From d78baa48a13964748619e3d787a3966e38b5f1f1 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 13 Dec 2012 16:50:39 +0000 Subject: [PATCH 01/89] Don't special case the tables in city.pfl Its hard to make them work in lifted networks : --- packages/CLPBN/examples/city.pfl | 37 ++++++++++++++------------------ 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/packages/CLPBN/examples/city.pfl b/packages/CLPBN/examples/city.pfl index 198284498..c891ae163 100644 --- a/packages/CLPBN/examples/city.pfl +++ b/packages/CLPBN/examples/city.pfl @@ -26,36 +26,36 @@ ev(descn(p4, fits)). ev(descn(p5, fits)). bayes city_conservativeness(C)::[high,low] ; - cons_table(C) ; + cons_table ; [people(_,C)]. bayes gender(P)::[male,female] ; - gender_table(P) ; + gender_table ; [people(P,_)]. bayes hair_color(P)::[dark,bright], city_conservativeness(C) ; - hair_color_table(P) ; + hair_color_table ; [people(P,C)]. bayes car_color(P)::[dark,bright], hair_color(P) ; - car_color_table(P) ; + car_color_table ; [people(P,_)]. bayes height(P)::[tall,short], gender(P) ; - height_table(P) ; + height_table ; [people(P,_)]. bayes shoe_size(P)::[big,small], height(P) ; - shoe_size_table(P) ; + shoe_size_table ; [people(P,_)]. bayes guilty(P)::[y,n] ; - guilty_table(P) ; + guilty_table ; [people(P,_)]. bayes descn(P)::[fits,dont_fit], car_color(P), hair_color(P), height(P), guilty(P) ; - descn_table(P) ; + descn_table ; [people(P,_)]. bayes witness(C), descn(Joe), descn(P2) ; @@ -63,44 +63,39 @@ bayes witness(C), descn(Joe), descn(P2) ; [people(_,C), Joe=joe, P2=p2]. -cons_table(amsterdam, -% special case for amsterdam: amsterdam is -% less conservative than other cities (is it?) -/* y */ [ 0.2, -/* n */ 0.8 ]) :- !. % FIXME -cons_table(_, +cons_table( /* y */ [ 0.8, /* n */ 0.2 ]). -gender_table(_, +gender_table( /* male */ [ 0.55, /* female */ 0.45 ]). -hair_color_table(_, +hair_color_table( /* high low */ /* dark */ [ 0.05, 0.1, /* bright */ 0.95, 0.9 ]). -car_color_table(_, +car_color_table( /* dark bright */ /* dark */ [ 0.9, 0.2, /* bright */ 0.1, 0.8 ]). -height_table(_, +height_table( /* male female */ /* tall */ [ 0.6, 0.4, /* short */ 0.4, 0.6 ]). -shoe_size_table(_, +shoe_size_table( /* tall short */ /* big */ [ 0.9, 0.1, /* small */ 0.1, 0.9 ]). -guilty_table(_, +guilty_table( /* yes */ [ 0.23, /* no */ 0.77 ]). -descn_table(_, +descn_table( /* car_color(P), hair_color(P), height(P), guilty(P) */ /* fits */ [ 0.99, 0.5, 0.23, 0.88, 0.41, 0.3, 0.76, 0.87, /* fits */ 0.44, 0.43, 0.29, 0.72, 0.23, 0.91, 0.95, 0.92, From 6b8301d9c46441fbf7888f4e080a182b3a649efb Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 13 Dec 2012 17:04:14 +0000 Subject: [PATCH 02/89] CLP(BN) is dead. Long live the PFL --- packages/CLPBN/Makefile.in | 2 - packages/CLPBN/examples/School/README | 3 - packages/CLPBN/examples/School/schema.yap | 68 ------------------- packages/CLPBN/examples/School/school_128.yap | 6 +- packages/CLPBN/examples/School/school_32.yap | 23 ------- packages/CLPBN/examples/School/school_64.yap | 7 +- packages/CLPBN/examples/School/tables.yap | 27 +------- 7 files changed, 10 insertions(+), 126 deletions(-) delete mode 100644 packages/CLPBN/examples/School/schema.yap delete mode 100644 packages/CLPBN/examples/School/school_32.yap diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index 4ad7d2e9b..f4276d85e 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -77,10 +77,8 @@ CLPBN_LEARNING_PROGRAMS= \ CLPBN_SCHOOL_EXAMPLES= \ $(CLPBN_EXDIR)/School/README \ $(CLPBN_EXDIR)/School/evidence_128.yap \ - $(CLPBN_EXDIR)/School/schema.yap \ $(CLPBN_EXDIR)/School/parschema.pfl \ $(CLPBN_EXDIR)/School/school_128.yap \ - $(CLPBN_EXDIR)/School/school_32.yap \ $(CLPBN_EXDIR)/School/sch32.yap \ $(CLPBN_EXDIR)/School/school32_data.yap \ $(CLPBN_EXDIR)/School/school_64.yap \ diff --git a/packages/CLPBN/examples/School/README b/packages/CLPBN/examples/School/README index 42160dd35..bbe305335 100644 --- a/packages/CLPBN/examples/School/README +++ b/packages/CLPBN/examples/School/README @@ -5,14 +5,11 @@ There are four main files: school_128.yap: a school with 128 professors, 256 courses and 4096 students. school_64.yap: medium size school -school_32.yap: small school (CLP(BN)) sch32.yap: small school (PFL) parschema.pfl: the PFL schema -schema.yap: the CLP(BN) schema - tables: CPTs ============================================================================= diff --git a/packages/CLPBN/examples/School/schema.yap b/packages/CLPBN/examples/School/schema.yap deleted file mode 100644 index d6eefb4e0..000000000 --- a/packages/CLPBN/examples/School/schema.yap +++ /dev/null @@ -1,68 +0,0 @@ -/* Base file for school database. Supposed to be called from school_*.yap */ - -professor_key(Key) :- - professor(Key). - -professor_ability(Key,Abi) :- - abi_table(Key, AbiDist), - { Abi = ability(Key) with p([h,m,l], AbiDist) }. - -professor_popularity(Key, Pop) :- - professor_ability(Key, Abi), - pop_table(Key,PopTable), - { Pop = popularity(Key) with - p([h,m,l], PopTable,[Abi]) }. - -registration_key(Key) :- - registration(Key, _, _). - -registration_course(Key, CKey) :- - registration(Key, CKey, _). - -registration_student(Key, SKey) :- - registration(Key, _, SKey). - -registration_grade(Key, Grade) :- - registration(Key, CKey, SKey), - course_difficulty(CKey, Dif), - student_intelligence(SKey, Int), - grade_table(Int, Dif, Table), - { Grade = grade(Key) with Table }. - -% registration_satisfaction(r0, h) :- {}. -registration_satisfaction(Key, Sat) :- - registration_course(Key, CKey), - course_professor(CKey, PKey), - professor_ability(PKey, Abi), - registration_grade(Key, Grade), - satisfaction_table(Abi, Grade, Table), - { Sat = satisfaction(Key) with Table }. - -course_key(Key) :- - course(Key,_). - -course_professor(Key, PKey) :- - course(Key, PKey). - -course_rating(CKey, Rat) :- - setof(Sat, RKey^(registration_course(RKey,CKey), registration_satisfaction(RKey,Sat)), Sats), - { Rat = rating(CKey) with avg([h,m,l],Sats) }. - -course_difficulty(Key, Dif) :- - dif_table(Key, Dist), - { Dif = difficulty(Key) with p([h,m,l], Dist) }. - -student_key(Key) :- - student(Key). - -student_intelligence(Key, Int) :- - int_table(Key, IDist, Domain), - { Int = intelligence(Key) with p(Domain, IDist) }. - -student_ranking(Key, Rank) :- - setof(Grade, CKey^(registration_student(CKey,Key), - registration_grade(CKey, Grade)), Grades), - { Rank = ranking(Key) with avg([a,b,c,d],Grades) }. - -:- ensure_loaded(tables). - diff --git a/packages/CLPBN/examples/School/school_128.yap b/packages/CLPBN/examples/School/school_128.yap index 034df01d9..b650a71f9 100644 --- a/packages/CLPBN/examples/School/school_128.yap +++ b/packages/CLPBN/examples/School/school_128.yap @@ -7,6 +7,8 @@ total_students(4096). */ +:- use_module(library(pfl)). + :- source. :- style_check(all). @@ -15,9 +17,9 @@ total_students(4096). :- yap_flag(write_strings,on). -:- use_module(library(clpbn)). +:- ensure_loaded('parschema.pfl'). -:- [-schema]. +:- set_solver(hve). professor(p0). professor(p1). diff --git a/packages/CLPBN/examples/School/school_32.yap b/packages/CLPBN/examples/School/school_32.yap deleted file mode 100644 index cc9349460..000000000 --- a/packages/CLPBN/examples/School/school_32.yap +++ /dev/null @@ -1,23 +0,0 @@ -/* -total_professors(32). - -total_courses(64). - -total_students(256). - -*/ - -:- source. - -:- style_check(all). - -:- yap_flag(unknown,error). - -:- yap_flag(write_strings,on). - -:- use_module(library(clpbn)). - -:- [-schema]. - -:- ensure_loaded(school32_data). - diff --git a/packages/CLPBN/examples/School/school_64.yap b/packages/CLPBN/examples/School/school_64.yap index 3b2df93d6..9a6ba140e 100644 --- a/packages/CLPBN/examples/School/school_64.yap +++ b/packages/CLPBN/examples/School/school_64.yap @@ -7,6 +7,9 @@ total_students(1024). */ + +:- use_module(library(pfl)). + :- source. :- style_check(all). @@ -15,9 +18,9 @@ total_students(1024). :- yap_flag(write_strings,on). -:- use_module(library(clpbn)). +:- ensure_loaded('parschema.pfl'). -:- [-schema]. +:- set_solver(hve). professor(p0). professor(p1). diff --git a/packages/CLPBN/examples/School/tables.yap b/packages/CLPBN/examples/School/tables.yap index a028aeb27..9c7fbe8ac 100644 --- a/packages/CLPBN/examples/School/tables.yap +++ b/packages/CLPBN/examples/School/tables.yap @@ -1,33 +1,26 @@ +/* CTPs for school database. */ abi_table( /* h */ [ 0.50, /* m */ 0.40, /* l */ 0.10 ]). -abi_table(_, T) :- abi_table(T). - pop_table( /* h m l */ /* h */ [ 0.9, 0.2, 0.01, /* m */ 0.09, 0.6, 0.09, /* l */ 0.01, 0.2, 0.9 ]). -pop_table(_, T) :- pop_table(T). - diff_table( /* h */ [ 0.25, /* m */ 0.50, /* l */ 0.25 ]). -dif_table(_, T) :- diff_table(T). - int_table( /* h */ [ 0.5, /* m */ 0.4, /* l */ 0.1 ]). -int_table(_,T ,[h,m,l]) :- int_table(T). - grade_table( /* h h h m h l m h m m m l l h l m l l */ /* a */ [ 0.2, 0.7, 0.85, 0.1, 0.2, 0.5, 0.01, 0.05, 0.1, @@ -35,27 +28,9 @@ grade_table( /* c */ 0.15, 0.04, 0.02, 0.4, 0.15, 0.12, 0.5, 0.6, 0.4, /* d */ 0.05, 0.01, 0.01, 0.2, 0.05, 0.03, 0.45, 0.2, 0.1 ]). -grade_table(I, D, - p([a,b,c,d], T, [I,D])) :- grade_table(T). - sat_table( /* h a h b h c h d m a m b m c m d l a l b l c l d */ /* h */ [ 0.98, 0.9, 0.8 , 0.6, 0.9, 0.4, 0.2, 0.01, 0.5, 0.2, 0.01, 0.01, /* m */ 0.01, 0.09, 0.15, 0.3, 0.05, 0.4, 0.3, 0.04, 0.35, 0.3, 0.09, 0.01, /* l */ 0.01, 0.01, 0.05, 0.1, 0.05, 0.2, 0.5, 0.95, 0.15, 0.5, 0.9, 0.98 ]). -satisfaction_table(A, G, p([h,m,l], T, [A,G])) :- sat_table(T). - - -% The idea is quite simple: -% hs = h -> r = ( 0.9, 0.1, 0) -% hs = m -> r = ( 0.2, 0.6, 0.2) -% hs = l -> r = ( 0, 0.1, 0.9) -% -% add all and divide on the number of elements on the table! -% -rating_prob_table( - [ 0.9, 0.05, 0.01, - 0.09, 0.9, 0.09, - 0.01, 0.05, 0.9 ]). - From 05e53eaf3701aa1d5703751d6f1e1e71b3044bf2 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 13 Dec 2012 17:08:54 +0000 Subject: [PATCH 03/89] Merge sch32 and school32_data and rename to school_32 --- packages/CLPBN/Makefile.in | 3 +- packages/CLPBN/examples/School/sch32.yap | 25 ----------------- .../{school32_data.yap => school_32.yap} | 28 +++++++++++++++++++ 3 files changed, 29 insertions(+), 27 deletions(-) delete mode 100644 packages/CLPBN/examples/School/sch32.yap rename packages/CLPBN/examples/School/{school32_data.yap => school_32.yap} (99%) diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index f4276d85e..602565138 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -79,8 +79,7 @@ CLPBN_SCHOOL_EXAMPLES= \ $(CLPBN_EXDIR)/School/evidence_128.yap \ $(CLPBN_EXDIR)/School/parschema.pfl \ $(CLPBN_EXDIR)/School/school_128.yap \ - $(CLPBN_EXDIR)/School/sch32.yap \ - $(CLPBN_EXDIR)/School/school32_data.yap \ + $(CLPBN_EXDIR)/School/school32.yap \ $(CLPBN_EXDIR)/School/school_64.yap \ $(CLPBN_EXDIR)/School/tables.yap diff --git a/packages/CLPBN/examples/School/sch32.yap b/packages/CLPBN/examples/School/sch32.yap deleted file mode 100644 index c455698d6..000000000 --- a/packages/CLPBN/examples/School/sch32.yap +++ /dev/null @@ -1,25 +0,0 @@ -/* -total_professors(32). - -total_courses(64). - -total_students(256). - -*/ - -:- use_module(library(pfl)). - -:- source. - -:- style_check(all). - -:- yap_flag(unknown,error). - -:- yap_flag(write_strings,on). - -:- ensure_loaded('parschema.pfl'). - -:- ensure_loaded(school32_data). - -:- set_solver(hve). - diff --git a/packages/CLPBN/examples/School/school32_data.yap b/packages/CLPBN/examples/School/school_32.yap similarity index 99% rename from packages/CLPBN/examples/School/school32_data.yap rename to packages/CLPBN/examples/School/school_32.yap index c6c104026..60323f94e 100644 --- a/packages/CLPBN/examples/School/school32_data.yap +++ b/packages/CLPBN/examples/School/school_32.yap @@ -1,3 +1,30 @@ +/* +total_professors(32). + +total_courses(64). + +total_students(256). + +*/ + +:- use_module(library(pfl)). + +:- source. + +:- style_check(all). + +:- yap_flag(unknown,error). + +:- yap_flag(write_strings,on). + +:- ensure_loaded('parschema.pfl'). + +:- ensure_loaded(school32_data). + +:- set_solver(hve). + + + professor(p0). professor(p1). professor(p2). @@ -1215,3 +1242,4 @@ registration(r855,c48,s255). registration(r856,c0,s255). + From 0594d4353982abf6ce1cac137f53ca39eee2f420 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 13 Dec 2012 17:51:43 +0000 Subject: [PATCH 04/89] Cosmetic fixes --- packages/CLPBN/examples/School/README | 13 ++++++------- packages/CLPBN/examples/School/school_128.yap | 4 ++-- packages/CLPBN/examples/School/school_32.yap | 6 ------ packages/CLPBN/examples/School/school_64.yap | 3 +-- 4 files changed, 9 insertions(+), 17 deletions(-) diff --git a/packages/CLPBN/examples/School/README b/packages/CLPBN/examples/School/README index bbe305335..a3ec52a17 100644 --- a/packages/CLPBN/examples/School/README +++ b/packages/CLPBN/examples/School/README @@ -1,16 +1,15 @@ This is a version of the school database, based on the PRM School example. -There are four main files: +There are the following main files: -school_128.yap: a school with 128 professors, 256 courses and 4096 students. -school_64.yap: medium size school +school_32.yap: school with 32 professors, 64 courses and 256 students +school_64.yap: school with 64 professors, 128 courses and 1024 students +school_128.yap: school with 128 professors, 256 courses and 4096 students -sch32.yap: small school (PFL) +parschema.pfl: the PFL schema -parschema.pfl: the PFL schema - -tables: CPTs +tables: CPTs ============================================================================= diff --git a/packages/CLPBN/examples/School/school_128.yap b/packages/CLPBN/examples/School/school_128.yap index b650a71f9..ecbf398cb 100644 --- a/packages/CLPBN/examples/School/school_128.yap +++ b/packages/CLPBN/examples/School/school_128.yap @@ -4,7 +4,6 @@ total_professors(128). total_courses(256). total_students(4096). - */ :- use_module(library(pfl)). @@ -21,6 +20,7 @@ total_students(4096). :- set_solver(hve). + professor(p0). professor(p1). professor(p2). @@ -18430,5 +18430,5 @@ registration(r13919,c221,s4095). registration(r13920,c39,s4095). - :- [evidence_128]. +:- [evidence_128]. diff --git a/packages/CLPBN/examples/School/school_32.yap b/packages/CLPBN/examples/School/school_32.yap index 60323f94e..48fcbcabc 100644 --- a/packages/CLPBN/examples/School/school_32.yap +++ b/packages/CLPBN/examples/School/school_32.yap @@ -4,7 +4,6 @@ total_professors(32). total_courses(64). total_students(256). - */ :- use_module(library(pfl)). @@ -19,12 +18,9 @@ total_students(256). :- ensure_loaded('parschema.pfl'). -:- ensure_loaded(school32_data). - :- set_solver(hve). - professor(p0). professor(p1). professor(p2). @@ -1241,5 +1237,3 @@ registration(r854,c60,s255). registration(r855,c48,s255). registration(r856,c0,s255). - - diff --git a/packages/CLPBN/examples/School/school_64.yap b/packages/CLPBN/examples/School/school_64.yap index 9a6ba140e..7564cc1b9 100644 --- a/packages/CLPBN/examples/School/school_64.yap +++ b/packages/CLPBN/examples/School/school_64.yap @@ -4,10 +4,8 @@ total_professors(64). total_courses(128). total_students(1024). - */ - :- use_module(library(pfl)). :- source. @@ -22,6 +20,7 @@ total_students(1024). :- set_solver(hve). + professor(p0). professor(p1). professor(p2). From eea7ce2885fb9904d7d370ef8e1ed3d89203be78 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 13 Dec 2012 19:00:28 +0000 Subject: [PATCH 05/89] Fix file names --- packages/CLPBN/examples/learning/debug_school.yap | 2 +- packages/CLPBN/examples/learning/school_params.yap | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/CLPBN/examples/learning/debug_school.yap b/packages/CLPBN/examples/learning/debug_school.yap index 47be3bf2d..3ee14fa96 100644 --- a/packages/CLPBN/examples/learning/debug_school.yap +++ b/packages/CLPBN/examples/learning/debug_school.yap @@ -4,7 +4,7 @@ :- [pos:train]. -:- ['../../examples/School/sch32']. +:- ['../../examples/School/parschema.pfl']. :- use_module(library(clpbn/learning/em)). diff --git a/packages/CLPBN/examples/learning/school_params.yap b/packages/CLPBN/examples/learning/school_params.yap index 2ef41865f..61c535b5f 100644 --- a/packages/CLPBN/examples/learning/school_params.yap +++ b/packages/CLPBN/examples/learning/school_params.yap @@ -4,14 +4,14 @@ :- [pos:train]. -:- ['../../examples/School/sch32']. +:- ['../../examples/School/school_32']. :- use_module(library(clpbn/learning/em)). %:- clpbn:set_clpbn_flag(em_solver,gibbs). %:- clpbn:set_clpbn_flag(em_solver,jt). - :- clpbn:set_clpbn_flag(em_solver,ve). -%:- clpbn:set_clpbn_flag(em_solver,bp). +% :- clpbn:set_clpbn_flag(em_solver,ve). +:- clpbn:set_clpbn_flag(em_solver,bp). timed_main :- statistics(runtime, _), From e5479ff17cc6b6a71c4c658ee6dc7885c27633cd Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 00:13:53 +0000 Subject: [PATCH 06/89] CLPBNs first, PFLs next --- packages/CLPBN/clpbn.yap | 43 +++++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index e7b8350ee..adb5c79f8 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -557,24 +557,6 @@ clpbn_init_solver(bdd, LVs, Vs0, VarsWithUnboundKeys, State) :- clpbn_init_solver(pcg, LVs, Vs0, VarsWithUnboundKeys, State) :- init_pcg_solver(LVs, Vs0, VarsWithUnboundKeys, State). -% -% This is a routine to start a solver, called by the learning procedures (ie, em). -% LVs is a list of lists of variables one is interested in eventually marginalising out -% Vs0 gives the original graph -% AllDiffs gives variables that are not fully constrainted, ie, we don't fully know -% the key. In this case, we assume different instances will be bound to different -% values at the end of the day. -% -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bdd) :- - init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, ve) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bp) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, hve) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). - - % % LVs is the list of lists of variables to marginalise % Vs is the full graph @@ -603,13 +585,38 @@ clpbn_run_solver(bdd, LVs, LPs, State) :- clpbn_run_solver(pcg, LVs, LPs, State) :- run_pcg_solver(LVs, LPs, State). + +% +% This is a routine to start a solver, called by the learning procedures (ie, em). +% LVs is a list of lists of variables one is interested in eventually marginalising out +% Vs0 gives the original graph +% AllDiffs gives variables that are not fully constrainted, ie, we don't fully know +% the key. In this case, we assume different instances will be bound to different +% values at the end of the day. +% +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bdd) :- + init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, ve) :- + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bp) :- + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, hve) :- + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). + pfl_run_solver(LVs, LPs, State, ve) :- run_ve_ground_solver(LVs, LPs, State). + pfl_run_solver(LVs, LPs, State, bdd) :- run_bdd_ground_solver(LVs, LPs, State). + pfl_run_solver(LVs, LPs, State, bp) :- run_horus_ground_solver(LVs, LPs, State, bp). + pfl_run_solver(LVs, LPs, State, hve) :- + run_horus_ground_solver(LVs, LPs, State, hve). From 2996a0b8b31ac96996c4e59aa9540ca9c45adad8 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 00:14:45 +0000 Subject: [PATCH 07/89] Comment does not apply --- packages/CLPBN/clpbn.yap | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index adb5c79f8..a97e547b7 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -588,11 +588,6 @@ clpbn_run_solver(pcg, LVs, LPs, State) :- % % This is a routine to start a solver, called by the learning procedures (ie, em). -% LVs is a list of lists of variables one is interested in eventually marginalising out -% Vs0 gives the original graph -% AllDiffs gives variables that are not fully constrainted, ie, we don't fully know -% the key. In this case, we assume different instances will be bound to different -% values at the end of the day. % pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bdd) :- init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). From 1174486576b09b545bf035b91ca383e1c45863ba Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 00:16:06 +0000 Subject: [PATCH 08/89] VE??? --- packages/CLPBN/clpbn.yap | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index a97e547b7..7737d1787 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -589,17 +589,17 @@ clpbn_run_solver(pcg, LVs, LPs, State) :- % % This is a routine to start a solver, called by the learning procedures (ie, em). % -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bdd) :- - init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- + init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, ve) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bp) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, hve) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_run_solver(LVs, LPs, State, ve) :- run_ve_ground_solver(LVs, LPs, State). From 55292ab7457ac5b21b43c7e43d948b5fe9b042b1 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 00:29:03 +0000 Subject: [PATCH 09/89] Clean ups and don't forget cbp --- packages/CLPBN/clpbn.yap | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 7737d1787..79da04fd8 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -589,16 +589,22 @@ clpbn_run_solver(pcg, LVs, LPs, State) :- % % This is a routine to start a solver, called by the learning procedures (ie, em). % -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- - init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). - pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- + init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- + clpbn_horus:set_horus_flag(ground_solver, ve), + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- + clpbn_horus:set_horus_flag(ground_solver, bp), + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- + clpbn_horus:set_horus_flag(ground_solver, cbp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_run_solver(LVs, LPs, State, ve) :- @@ -607,12 +613,14 @@ pfl_run_solver(LVs, LPs, State, ve) :- pfl_run_solver(LVs, LPs, State, bdd) :- run_bdd_ground_solver(LVs, LPs, State). +pfl_run_solver(LVs, LPs, State, hve), + run_horus_ground_solver(LVs, LPs, State, hve). + pfl_run_solver(LVs, LPs, State, bp) :- run_horus_ground_solver(LVs, LPs, State, bp). - -pfl_run_solver(LVs, LPs, State, hve) :- - - run_horus_ground_solver(LVs, LPs, State, hve). + +pfl_run_solver(LVs, LPs, State, cbp) :- + run_horus_ground_solver(LVs, LPs, State, cbp). add_keys(Key1+V1,_Key2,Key1+V1). From ea931f2f936ca1989661a7fa25a4d0e7d78da464 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 00:41:00 +0000 Subject: [PATCH 10/89] Comment --- packages/CLPBN/learning/em.yap | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index 335612442..ec034b641 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -166,7 +166,7 @@ em_loop(Its, Likelihood0, State, MaxError, MaxIts, LikelihoodF, FTables) :- estimate(State, LPs), maximise(State, Tables, LPs, Likelihood), ltables(Tables, F0Tables), - writeln(iteration:Its:Likelihood:Its:Likelihood0:F0Tables), + %writeln(iteration:Its:Likelihood:Its:Likelihood0:F0Tables), ( ( abs((Likelihood - Likelihood0)/Likelihood) < MaxError From 81ac6f191314cef0410f1203563cb48b9f8a669b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 15:56:05 +0000 Subject: [PATCH 11/89] Don't pass around the Solver in run_horus_ground_solver --- packages/CLPBN/clpbn.yap | 15 ++++++++++----- packages/CLPBN/clpbn/horus_ground.yap | 23 +++++++++-------------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 79da04fd8..3f68aa024 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -49,7 +49,7 @@ [call_horus_ground_solver/6, check_if_horus_ground_solver_done/1, init_horus_ground_solver/5, - run_horus_ground_solver/4, + run_horus_ground_solver/3, finalize_horus_ground_solver/1 ]). @@ -606,6 +606,11 @@ pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- clpbn_horus:set_horus_flag(ground_solver, cbp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(_, _, _, _, _, Solver) :- + write('Error: solver `'), + write(Solver), + write('\' cannot be used for learning'). pfl_run_solver(LVs, LPs, State, ve) :- run_ve_ground_solver(LVs, LPs, State). @@ -613,14 +618,14 @@ pfl_run_solver(LVs, LPs, State, ve) :- pfl_run_solver(LVs, LPs, State, bdd) :- run_bdd_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, hve), - run_horus_ground_solver(LVs, LPs, State, hve). +pfl_run_solver(LVs, LPs, State, hve) :- + run_horus_ground_solver(LVs, LPs, State). pfl_run_solver(LVs, LPs, State, bp) :- - run_horus_ground_solver(LVs, LPs, State, bp). + run_horus_ground_solver(LVs, LPs, State). pfl_run_solver(LVs, LPs, State, cbp) :- - run_horus_ground_solver(LVs, LPs, State, cbp). + run_horus_ground_solver(LVs, LPs, State). add_keys(Key1+V1,_Key2,Key1+V1). diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index 993fea5df..03b5764ec 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -11,7 +11,7 @@ [call_horus_ground_solver/6, check_if_horus_ground_solver_done/1, init_horus_ground_solver/5, - run_horus_ground_solver/4, + run_horus_ground_solver/3, finalize_horus_ground_solver/1 ]). @@ -53,9 +53,16 @@ call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), - run_solver(State, [QueryKeys], Solutions), + run_horus_ground_solver([QueryKeys], Solutions, State), clpbn_bind_vals([QueryVars], Solutions, Output), finalize_horus_ground_solver(State). + + +run_horus_ground_solver(QueryKeys, Solutions, state(Network,Hash,Id)) :- + %get_dists_parameters(DistIds, DistsParams), + %cpp_set_factors_params(Network, DistsParams), + lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), + cpp_run_ground_solver(Network, QueryIds, Solutions). init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, state(Network,Hash4,Id4)) :- @@ -68,23 +75,11 @@ init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, state(Network,Ha cpp_set_vars_information(KeysAtoms, StatesNames). -run_horus_ground_solver(_QueryVars, Solutions, horus(GKeys, Keys, Factors, Evidence), Solver) :- - set_solver(Solver), - call_horus_ground_solver_for_probabilities(GKeys, Keys, Factors, Evidence, Solutions). - - % TODO this is not beeing called! finalize_horus_ground_solver(state(Network,_Hash,_Id)) :- cpp_free_ground_network(Network). -run_solver(state(Network,Hash,Id), QueryKeys, Solutions) :- - %get_dists_parameters(DistIds, DistsParams), - %cpp_set_factors_params(Network, DistsParams), - lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), - cpp_run_ground_solver(Network, QueryIds, Solutions). - - get_factors_type([f(bayes, _, _)|_], bayes) :- ! . get_factors_type([f(markov, _, _)|_], markov) :- ! . From 9ff9be2f4903ecbc18e08880a45571056c2868ed Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 15 Dec 2012 16:11:03 +0000 Subject: [PATCH 12/89] Don't pass around the Solver for EM --- packages/CLPBN/clpbn.yap | 35 ++++++++++++++++++++-------------- packages/CLPBN/learning/em.yap | 18 ++++++++--------- packages/CLPBN/pfl.yap | 4 ++-- 3 files changed, 31 insertions(+), 26 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 3f68aa024..f771e257b 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -6,11 +6,9 @@ clpbn_key/2, clpbn_init_solver/4, clpbn_run_solver/3, - pfl_init_solver/6, - pfl_run_solver/4, + pfl_init_solver/5, + pfl_run_solver/3, clpbn_finalize_solver/1, - clpbn_init_solver/5, - clpbn_run_solver/4, clpbn_init_graph/1, probability/2, conditional_probability/3, @@ -589,21 +587,26 @@ clpbn_run_solver(pcg, LVs, LPs, State) :- % % This is a routine to start a solver, called by the learning procedures (ie, em). % -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State) :- + solver(Solver), + pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, Solver). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- !, init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- !, init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- !, clpbn_horus:set_horus_flag(ground_solver, ve), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- !, clpbn_horus:set_horus_flag(ground_solver, bp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- !, clpbn_horus:set_horus_flag(ground_solver, cbp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). @@ -611,20 +614,24 @@ pfl_init_solver(_, _, _, _, _, Solver) :- write('Error: solver `'), write(Solver), write('\' cannot be used for learning'). + +pfl_run_solver(LVs, LPs, State) :- + solver(Solver), + pfl_run_solver(LVs, LPs, State, Solver). -pfl_run_solver(LVs, LPs, State, ve) :- +pfl_run_solver(LVs, LPs, State, ve) :- !, run_ve_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, bdd) :- +pfl_run_solver(LVs, LPs, State, bdd) :- !, run_bdd_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, hve) :- +pfl_run_solver(LVs, LPs, State, hve) :- !, run_horus_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, bp) :- +pfl_run_solver(LVs, LPs, State, bp) :- !, run_horus_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, cbp) :- +pfl_run_solver(LVs, LPs, State, cbp) :- !, run_horus_ground_solver(LVs, LPs, State). diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index ec034b641..2dead04bb 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -15,10 +15,10 @@ :- use_module(library(clpbn), [clpbn_init_graph/1, - clpbn_init_solver/5, - clpbn_run_solver/4, - pfl_init_solver/6, - pfl_run_solver/4, + clpbn_init_solver/4, + clpbn_run_solver/3, + pfl_init_solver/5, + pfl_run_solver/3, clpbn_finalize_solver/1, conditional_probability/3, clpbn_flag/2]). @@ -110,7 +110,7 @@ setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargKeys, Sol % get the EM CPT connections info from the factors generate_dists(Factors, EList, AllDists, AllDistInstances, MargKeys), % setup solver, if necessary - pfl_init_solver(MargKeys, Keys, Factors, EList, SolverState, Solver). + pfl_init_solver(MargKeys, Keys, Factors, EList, SolverState). setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargVars, SolverVars)) :- % create the ground network call_run_all(Items), @@ -121,7 +121,7 @@ setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargVars, Sol % remove variables that do not have to do with this query. different_dists(AllVars, AllDists, AllDistInstances, MargVars), % setup solver by doing parameter independent work. - clpbn_init_solver(Solver, MargVars, AllVars, _, SolverVars). + clpbn_init_solver(MargVars, AllVars, _, SolverVars). run_examples(user:Exs, Keys, Factors, EList) :- Exs = [_:_|_], !, @@ -297,11 +297,9 @@ compact_mvars([X|MargVars], [X|CMVars]) :- !, estimate(state(_, _, Margs, SolverState), LPs) :- clpbn:use_parfactors(on), !, - clpbn_flag(em_solver, Solver), - pfl_run_solver(Margs, LPs, SolverState, Solver). + pfl_run_solver(Margs, LPs, SolverState). estimate(state(_, _, Margs, SolverState), LPs) :- - clpbn_flag(em_solver, Solver), - clpbn_run_solver(Solver, Margs, LPs, SolverState). + clpbn_run_solver(Margs, LPs, SolverState). maximise(state(_,DistInstances,MargVars,_), Tables, LPs, Likelihood) :- rb_new(MDistTable0), diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 3da42d0c7..97f3e972f 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -23,8 +23,8 @@ [clpbn_flag/2 as pfl_flag, set_clpbn_flag/2 as set_pfl_flag, conditional_probability/3, - pfl_init_solver/6, - pfl_run_solver/4]). + pfl_init_solver/5, + pfl_run_solver/3]). :- reexport(library(clpbn/horus), [set_solver/1]). From e35cf51476b92d52a8f7a96043daed7987ebfd7b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 11:53:57 +0000 Subject: [PATCH 13/89] Reorders and some reindentation --- packages/CLPBN/clpbn.yap | 223 +++++++++++++----------- packages/CLPBN/clpbn/display.yap | 2 + packages/CLPBN/clpbn/ground_factors.yap | 115 ++++++------ packages/CLPBN/clpbn/horus_ground.yap | 4 +- packages/CLPBN/clpbn/ve.yap | 6 +- packages/CLPBN/learning/em.yap | 112 ++++++------ packages/CLPBN/pfl.yap | 48 ++--- 7 files changed, 263 insertions(+), 247 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index f771e257b..ed5e0b7b3 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -1,38 +1,34 @@ -:- module(clpbn, [{}/1, - clpbn_flag/2, - set_clpbn_flag/2, - clpbn_flag/3, - clpbn_key/2, - clpbn_init_solver/4, - clpbn_run_solver/3, - pfl_init_solver/5, - pfl_run_solver/3, - clpbn_finalize_solver/1, - clpbn_init_graph/1, - probability/2, - conditional_probability/3, - use_parfactors/1, - op( 500, xfy, with)]). - +:- module(clpbn, + [{}/1, + clpbn_flag/2, + set_clpbn_flag/2, + clpbn_flag/3, + clpbn_key/2, + clpbn_init_graph/1, + clpbn_init_solver/4, + clpbn_run_solver/3, + clpbn_finalize_solver/1, + pfl_init_solver/5, + pfl_run_solver/3, + probability/2, + conditional_probability/3, + use_parfactors/1, + op(500, xfy, with) + ]). + :- use_module(library(atts)). + :- use_module(library(bhash)). + :- use_module(library(lists)). + :- use_module(library(terms)). + :- use_module(library(maplist)). -% -% avoid the overhead of using goal_expansion/2. -% -:- multifile - user:term_expansion/2. - -:- dynamic - user:term_expansion/2. - :- attribute key/1, dist/2, evidence/1. - :- use_module('clpbn/ve', [ve/3, check_if_ve_done/1, @@ -43,6 +39,39 @@ call_ve_ground_solver/6 ]). +:- use_module('clpbn/jt', + [jt/3, + init_jt_solver/4, + run_jt_solver/3 + ]). + +:- use_module('clpbn/bdd', + [bdd/3, + init_bdd_solver/4, + run_bdd_solver/3, + init_bdd_ground_solver/5, + run_bdd_ground_solver/3, + call_bdd_ground_solver/6 + ]). + +:- use_module('clpbn/gibbs', + [gibbs/3, + check_if_gibbs_done/1, + init_gibbs_solver/4, + run_gibbs_solver/3 + ]). + +%% :- use_module('clpbn/bnt', +%% [do_bnt/3, +%% check_if_bnt_done/1 +%% ]). + +:- use_module('clpbn/pgrammar', + [init_pcg_solver/4, + run_pcg_solver/3, + pcg_init_graph/0 + ]). + :- use_module('clpbn/horus_ground', [call_horus_ground_solver/6, check_if_horus_ground_solver_done/1, @@ -59,47 +88,8 @@ finalize_horus_lifted_solver/1 ]). -:- use_module('clpbn/jt', - [jt/3, - init_jt_solver/4, - run_jt_solver/3 - ]). - -:- use_module('clpbn/bdd', - [bdd/3, - init_bdd_solver/4, - run_bdd_solver/3, - init_bdd_ground_solver/5, - run_bdd_ground_solver/3, - call_bdd_ground_solver/6 - ]). - -%% :- use_module('clpbn/bnt', -%% [do_bnt/3, -%% check_if_bnt_done/1 -%% ]). - -:- use_module('clpbn/gibbs', - [gibbs/3, - check_if_gibbs_done/1, - init_gibbs_solver/4, - run_gibbs_solver/3 - ]). - -:- use_module('clpbn/pgrammar', - [init_pcg_solver/4, - run_pcg_solver/3, - pcg_init_graph/0 - ]). - -:- use_module('clpbn/graphs', - [ - clpbn2graph/1 - ]). - :- use_module('clpbn/dists', - [ - dist/4, + [dist/4, get_dist/4, get_evidence_position/3, get_evidence_from_position/3, @@ -107,33 +97,47 @@ ]). :- use_module('clpbn/evidence', - [ - store_evidence/1, + [store_evidence/1, add_stored_evidence/2, incorporate_evidence/2, check_stored_evidence/2, put_evidence/2 ]). + +:- use_module('clpbn/ground_factors', + [generate_network/5]). :- use_module('clpbn/utils', - [ - sort_vars_by_key/3 - ]). + [sort_vars_by_key/3]). + +:- use_module('clpbn/graphs', + [clpbn2graph/1]). :- use_module('clpbn/graphviz', - [clpbn2gviz/4]). - -:- use_module(clpbn/ground_factors, - [generate_network/5]). + [clpbn2gviz/4]). -:- dynamic solver/1,output/1,use/1,suppress_attribute_display/1, parameter_softening/1, em_solver/1, use_parfactors/1. +% +% avoid the overhead of using goal_expansion/2. +% +:- multifile user:term_expansion/2. + +:- dynamic user:term_expansion/2. + +:- dynamic + solver/1, + output/1, + use/1, + suppress_attribute_display/1, + parameter_softening/1, + em_solver/1, + use_parfactors/1. + +:- meta_predicate probability(:,-), conditional_probability(:,:,-). + solver(ve). em_solver(bp). - -:- meta_predicate probability(:,-), conditional_probability(:,:,-). - %output(xbif(user_error)). %output(gviz(user_error)). output(no). @@ -141,6 +145,7 @@ suppress_attribute_display(false). parameter_softening(m_estimate(10)). use_parfactors(off). + clpbn_flag(Flag,Option) :- clpbn_flag(Flag, Option, Option). @@ -182,7 +187,7 @@ clpbn_flag(use_factors,Before,After) :- dist(Dist, DistInfo, Key, Parents), add_evidence(Var,Key,DistInfo,El) % ,writeln({Var = Key with Dist}) -. + . % % make sure a query variable is reachable by the garbage collector. @@ -331,8 +336,7 @@ write_out(jt, GVars, AVars, DiffVars) :- write_out(bdd, GVars, AVars, DiffVars) :- bdd(GVars, AVars, DiffVars). write_out(bp, _GVars, _AVars, _DiffVars) :- - writeln('interface not supported any longer'). - %bp(GVars, AVars, DiffVars). + writeln('interface not supported any longer'). write_out(gibbs, GVars, AVars, DiffVars) :- gibbs(GVars, AVars, DiffVars). write_out(bnt, GVars, AVars, DiffVars) :- @@ -427,7 +431,7 @@ find_var([_|DVars], V, Key, [_|DKeys]) :- process_vars([], []). process_vars([V|Vs], [K|Ks]) :- - process_var(V, K), + process_var(V, K), process_vars(Vs, Ks). process_var(V, K) :- get_atts(V, [key(K)]), !. @@ -529,6 +533,15 @@ user:term_expansion((A :- {}), ( :- true )) :- !, % evidence clpbn_key(Var,Key) :- get_atts(Var, [key(Key)]). + + +% +% only useful for probabilistic context free grammars +% +clpbn_init_graph(pcg) :- !, + pcg_init_graph. +clpbn_init_graph(_). + % % This is a routine to start a solver, called by the learning procedures (ie, em). @@ -544,22 +557,26 @@ clpbn_init_solver(LVs, Vs0, VarsWithUnboundKeys, State) :- clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :- init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(ve, LVs, Vs0, VarsWithUnboundKeys, State) :- init_ve_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(bp, LVs, Vs0, VarsWithUnboundKeys, State) :- init_horus_ground_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(jt, LVs, Vs0, VarsWithUnboundKeys, State) :- init_jt_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(bdd, LVs, Vs0, VarsWithUnboundKeys, State) :- init_bdd_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(pcg, LVs, Vs0, VarsWithUnboundKeys, State) :- init_pcg_solver(LVs, Vs0, VarsWithUnboundKeys, State). % % LVs is the list of lists of variables to marginalise % Vs is the full graph -% Ps are the probabilities on LVs. -% +% Ps are the probabilities on LVs. % clpbn_run_solver(LVs, LPs, State) :- solver(Solver), @@ -582,6 +599,13 @@ clpbn_run_solver(bdd, LVs, LPs, State) :- clpbn_run_solver(pcg, LVs, LPs, State) :- run_pcg_solver(LVs, LPs, State). + +clpbn_finalize_solver(State) :- + solver(bp), !, + functor(State, _, Last), + arg(Last, State, Info), + finalize_horus_ground_solver(Info). +clpbn_finalize_solver(_State). % @@ -637,19 +661,6 @@ pfl_run_solver(LVs, LPs, State, cbp) :- !, add_keys(Key1+V1,_Key2,Key1+V1). -% -% only useful for probabilistic context free grammars -% -clpbn_init_graph(pcg) :- !, - pcg_init_graph. -clpbn_init_graph(_). - -clpbn_finalize_solver(State) :- - solver(bp), !, - functor(State, _, Last), - arg(Last, State, Info), - finalize_horus_ground_solver(Info). -clpbn_finalize_solver(_State). probability(Goal, Prob) :- findall(Prob, do_probability(Goal, [], Prob), [Prob]). @@ -693,20 +704,20 @@ variabilise_last([Arg1,Arg2|Args], Arg, Arg1.NArgs, V) :- variabilise_last(Arg2.Args, Arg, NArgs, V). match_probability(VPs, Goal, C, V, Prob) :- - match_probabilities(VPs, Goal, C, V, Prob). + match_probabilities(VPs, Goal, C, V, Prob). match_probabilities([p(V0=C)=Prob|_], _, C, V, Prob) :- - V0 == V, - !. + V0 == V, + !. match_probabilities([_|Probs], G, C, V, Prob) :- - match_probabilities(Probs, G, C, V, Prob). + match_probabilities(Probs, G, C, V, Prob). goal_to_key(_:Goal, Skolem) :- - goal_to_key(Goal, Skolem). + goal_to_key(Goal, Skolem). goal_to_key(Goal, Skolem) :- - functor(Goal, Na, Ar), - Ar1 is Ar-1, - functor(Skolem, Na, Ar1). + functor(Goal, Na, Ar), + Ar1 is Ar-1, + functor(Skolem, Na, Ar1). :- use_parfactors(on) -> true ; assert(use_parfactors(off)). diff --git a/packages/CLPBN/clpbn/display.yap b/packages/CLPBN/clpbn/display.yap index b8c9575c6..7b843cd77 100644 --- a/packages/CLPBN/clpbn/display.yap +++ b/packages/CLPBN/clpbn/display.yap @@ -12,6 +12,8 @@ :- use_module(library(maplist)). +:- use_module(library(atts)). + :- attribute posterior/4. diff --git a/packages/CLPBN/clpbn/ground_factors.yap b/packages/CLPBN/clpbn/ground_factors.yap index d2732f238..21575cf2d 100644 --- a/packages/CLPBN/clpbn/ground_factors.yap +++ b/packages/CLPBN/clpbn/ground_factors.yap @@ -1,40 +1,34 @@ -%parfactor( -% [ability(P),grade(C,S), satisfaction(C,S,P)], -% \phi = [....], -% [P,C,S], -% [P \in [p1,p2,p4], C \in [c1,c3], S \in [s2,s3]]). -% [S \= s2]) - - -:- module(pfl_ground_factors, [ - generate_network/5, - f/3 +:- module(pfl_ground_factors, + [generate_network/5, + f/3 ]). -:- use_module(library(bhash), [ - b_hash_new/1, - b_hash_lookup/3, - b_hash_insert/4, - b_hash_to_list/2]). +:- use_module(library(bhash), + [b_hash_new/1, + b_hash_lookup/3, + b_hash_insert/4, + b_hash_to_list/2 + ]). -:- use_module(library(lists), [ - delete/3, - nth0/3, - member/2]). +:- use_module(library(lists), + [member/2]). :- use_module(library(maplist)). -:- use_module(library(pfl), [ - factor/6, - defined_in_factor/2, - skolem/2]). +:- use_module(library(atts)). -:- use_module(library(clpbn/aggregates), [ - avg_factors/5]). +:- use_module(library(pfl), + [factor/6, + defined_in_factor/2, + skolem/2 + ]). -:- use_module(library(clpbn/dists), [ - dist/4]). +:- use_module(library(clpbn/aggregates), + [avg_factors/5]). + +:- use_module(library(clpbn/dists), + [dist/4]). :- dynamic currently_defined/1, queue/1, f/4. @@ -59,20 +53,20 @@ generate_network(QueryVars, QueryKeys, Keys, Factors, EList) :- % clean global stateq % init_global_search :- - retractall(queue(_)), - retractall(currently_defined(_)), - retractall(f(_,_,_)). + retractall(queue(_)), + retractall(currently_defined(_)), + retractall(f(_,_,_)). pair_to_evidence(K-E, K=E). include_evidence(V, Evidence0, Evidence) :- clpbn:get_atts(V,[key(K),evidence(E)]), !, ( - b_hash_lookup(K, E1, Evidence0) + b_hash_lookup(K, E1, Evidence0) -> - (E \= E1 -> throw(clpbn:incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) + (E \= E1 -> throw(clpbn:incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) ; - b_hash_insert(Evidence0, K, E, Evidence) + b_hash_insert(Evidence0, K, E, Evidence) ). include_evidence(_, Evidence, Evidence). @@ -82,11 +76,11 @@ static_evidence(Evidence0, Evidence) :- include_static_evidence(K=E, Evidence0, Evidence) :- ( - b_hash_lookup(K, E1, Evidence0) + b_hash_lookup(K, E1, Evidence0) -> - (E \= E1 -> throw(incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) + (E \= E1 -> throw(incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) ; - b_hash_insert(Evidence0, K, E, Evidence) + b_hash_insert(Evidence0, K, E, Evidence) ). @@ -122,14 +116,14 @@ propagate. do_propagate(K) :- %writeln(-K), \+ currently_defined(K), - ( ground(K) -> assert(currently_defined(K)) ; true), + ( ground(K) -> assert(currently_defined(K)) ; true), ( - defined_in_factor(K, ParFactor), - add_factor(ParFactor, Ks) - *-> - true + defined_in_factor(K, ParFactor), + add_factor(ParFactor, Ks) + *-> + true ; - throw(error(no_defining_factor(K))) + throw(error(no_defining_factor(K))) ), member(K1, Ks), \+ currently_defined(K1), @@ -139,25 +133,26 @@ do_propagate(_K) :- propagate. add_factor(factor(Type, Id, Ks, _, _Phi, Constraints), NKs) :- -% writeln(+Ks), - ( Ks = [K,Els], var(Els) - -> - % aggregate factor - once(run(Constraints)), - avg_factors(K, Els, 0.0, NewKeys, NewId), - NKs = [K|NewKeys] - ; - run(Constraints), - NKs = Ks, - Id = NewId +% writeln(+Ks), + ( + Ks = [K,Els], var(Els) + -> + % aggregate factor + once(run(Constraints)), + avg_factors(K, Els, 0.0, NewKeys, NewId), + NKs = [K|NewKeys] + ; + run(Constraints), + NKs = Ks, + Id = NewId ), ( - f(Type, NewId, NKs) - -> - true - ; - assert(f(Type, NewId, NKs)) - ). + f(Type, NewId, NKs) + -> + true + ; + assert(f(Type, NewId, NKs)) + ). run([Goal|Goals]) :- call(user:Goal), diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index 03b5764ec..54631100f 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -21,7 +21,7 @@ cpp_run_ground_solver/3, cpp_set_vars_information/2, cpp_free_ground_network/1, - set_solver/1 + set_solver/1 ]). :- use_module(library('clpbn/dists'), @@ -34,7 +34,7 @@ :- use_module(library('clpbn/display'), [clpbn_bind_vals/3]). -:- use_module(library(clpbn/numbers)). +:- use_module(library('clpbn/numbers')). :- use_module(library(charsio), [term_to_atom/2]). diff --git a/packages/CLPBN/clpbn/ve.yap b/packages/CLPBN/clpbn/ve.yap index b19bf020c..5ef49a3d4 100644 --- a/packages/CLPBN/clpbn/ve.yap +++ b/packages/CLPBN/clpbn/ve.yap @@ -21,8 +21,8 @@ init_ve_ground_solver/5, run_ve_ground_solver/3, call_ve_ground_solver/6]). - -:- attribute size/1, all_diffs/1. + +:- use_module(library(atts)). :- use_module(library(ordsets), [ord_union/3, @@ -72,6 +72,8 @@ :- use_module(library('clpbn/aggregates'), [check_for_agg_vars/2]). + +:- attribute size/1, all_diffs/1. % % uses a bipartite graph where bigraph(Vs, NFs, Fs) diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index 2dead04bb..5750e6c59 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -4,67 +4,71 @@ :- module(clpbn_em, [em/5]). -:- use_module(library(lists), - [append/3, - delete/3]). - :- reexport(library(clpbn), - [ - clpbn_flag/2, - clpbn_flag/3]). + [clpbn_flag/2, + clpbn_flag/3 + ]). :- use_module(library(clpbn), - [clpbn_init_graph/1, - clpbn_init_solver/4, - clpbn_run_solver/3, - pfl_init_solver/5, - pfl_run_solver/3, - clpbn_finalize_solver/1, - conditional_probability/3, - clpbn_flag/2]). + [clpbn_init_graph/1, + clpbn_init_solver/4, + clpbn_run_solver/3, + clpbn_finalize_solver/1, + pfl_init_solver/5, + pfl_run_solver/3, + conditional_probability/3, + clpbn_flag/2 + ]). :- use_module(library('clpbn/dists'), - [get_dist_domain_size/2, - empty_dist/2, - dist_new_table/2, - get_dist_key/2, - randomise_all_dists/0, - uniformise_all_dists/0]). + [get_dist_domain_size/2, + empty_dist/2, + dist_new_table/2, + get_dist_key/2, + randomise_all_dists/0, + uniformise_all_dists/0 + ]). -:- use_module(library(clpbn/ground_factors), - [generate_network/5, - f/3]). - -:- use_module(library(bhash), [ - b_hash_new/1, - b_hash_lookup/3, - b_hash_insert/4]). +:- use_module(library('clpbn/ground_factors'), + [generate_network/5, + f/3 + ]). + +:- use_module(library('clpbn/utils'), + [check_for_hidden_vars/3, + sort_vars_by_key/3 + ]). :- use_module(library('clpbn/learning/learn_utils'), - [run_all/1, - clpbn_vars/2, - normalise_counts/2, - compute_likelihood/3, - soften_sample/2]). + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + compute_likelihood/3, + soften_sample/2 + ]). + +:- use_module(library(bhash), + [b_hash_new/1, + b_hash_lookup/3, + b_hash_insert/4 + ]). +:- use_module(library(matrix), + [matrix_add/3, + matrix_to_list/2 + ]). + :- use_module(library(lists), - [member/2]). + [member/2]). + +:- use_module(library(rbtrees), + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(maplist)). -:- use_module(library(matrix), - [matrix_add/3, - matrix_to_list/2]). - -:- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3]). - -:- use_module(library('clpbn/utils'), - [ - check_for_hidden_vars/3, - sort_vars_by_key/3]). :- meta_predicate em(:,+,+,-,-), init_em(:,-). @@ -101,9 +105,9 @@ init_em(Items, State) :- % randomise_all_dists, % set initial values for distributions uniformise_all_dists, - setup_em_network(Items, Solver, State). + setup_em_network(Items, State). -setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargKeys, SolverState)) :- +setup_em_network(Items, state(AllDists, AllDistInstances, MargKeys, SolverState)) :- clpbn:use_parfactors(on), !, % get all variables to marginalise run_examples(Items, Keys, Factors, EList), @@ -111,7 +115,7 @@ setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargKeys, Sol generate_dists(Factors, EList, AllDists, AllDistInstances, MargKeys), % setup solver, if necessary pfl_init_solver(MargKeys, Keys, Factors, EList, SolverState). -setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargVars, SolverVars)) :- +setup_em_network(Items, state(AllDists, AllDistInstances, MargVars, SolverState)) :- % create the ground network call_run_all(Items), % get all variables to marginalise @@ -121,7 +125,7 @@ setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargVars, Sol % remove variables that do not have to do with this query. different_dists(AllVars, AllDists, AllDistInstances, MargVars), % setup solver by doing parameter independent work. - clpbn_init_solver(MargVars, AllVars, _, SolverVars). + clpbn_init_solver(MargVars, AllVars, _, SolverState). run_examples(user:Exs, Keys, Factors, EList) :- Exs = [_:_|_], !, @@ -232,9 +236,9 @@ all_dists([], _, []). all_dists([V|AllVars], AllVars0, [i(Id, [V|Parents], Cases, Hiddens)|Dists]) :- % V is an instance of Id clpbn:get_atts(V, [dist(Id,Parents)]), - sort([V|Parents], Sorted), + sort([V|Parents], Sorted), length(Sorted, LengSorted), - length(Parents, LengParents), + length(Parents, LengParents), ( LengParents+1 =:= LengSorted -> diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 97f3e972f..00b865d8e 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -1,30 +1,31 @@ % -% This module defines PFL, the prolog factor language. +% This module defines PFL, the Prolog Factor Language. % % -:- module(pfl, [ - op(550,yfx,@), - op(550,yfx,::), - op(1150,fx,bayes), - op(1150,fx,markov), - factor/6, - skolem/2, - defined_in_factor/2, - get_pfl_cpt/5, % given id and keys, return new keys and cpt - get_pfl_parameters/2, % given id return par factor parameter - new_pfl_parameters/2, % given id set new parameters - get_first_pvariable/2, % given id get firt pvar (useful in bayesian) - get_factor_pvariable/2, % given id get any pvar - add_ground_factor/5 %add a new bayesian variable (for now) - ]). +:- module(pfl, + [op(550,yfx,@), + op(550,yfx,::), + op(1150,fx,bayes), + op(1150,fx,markov), + factor/6, + skolem/2, + defined_in_factor/2, + get_pfl_cpt/5, % given id and keys, return new keys and cpt + get_pfl_parameters/2, % given id return par factor parameter + new_pfl_parameters/2, % given id set new parameters + get_first_pvariable/2, % given id get firt pvar (useful in bayesian) + get_factor_pvariable/2, % given id get any pvar + add_ground_factor/5 %add a new bayesian variable (for now) + ]). :- reexport(library(clpbn), [clpbn_flag/2 as pfl_flag, set_clpbn_flag/2 as set_pfl_flag, conditional_probability/3, pfl_init_solver/5, - pfl_run_solver/3]). + pfl_run_solver/3 + ]). :- reexport(library(clpbn/horus), [set_solver/1]). @@ -32,24 +33,25 @@ :- reexport(library(clpbn/aggregates), [avg_factors/5]). - :- ( % if clp(bn) has done loading, we're top-level predicate_property(set_pfl_flag(_,_), imported_from(clpbn)) - -> + -> % we're using factor language % set appropriate flag set_pfl_flag(use_factors,on) - ; + ; % we're within clp(bn), no need to do anything true - ). + ). +:- use_module(library(atts)). :- use_module(library(lists), [nth0/3, append/3, - member/2]). - + member/2 + ]). + :- dynamic factor/6, skolem_in/2, skolem/2, preprocess/3, evidence/2, id/1. user:term_expansion( bayes((Formula ; Phi ; Constraints)), pfl:factor(bayes,Id,FList,FV,Phi,Constraints)) :- From d03ea1509eb92bea3b2a235be56620852f2c6d2b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 11:56:15 +0000 Subject: [PATCH 14/89] Trivial --- packages/CLPBN/learning/em.yap | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index 5750e6c59..fd05d591d 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -193,12 +193,12 @@ ltables([Id-T|Tables], [Key-LTable|FTables]) :- generate_dists(Factors, EList, AllDists, AllInfo, MargVars) :- - b_hash_new(Ev0), - foldl(elist_to_hash, EList, Ev0, Ev), - maplist(process_factor(Ev), Factors, Dists0), - sort(Dists0, Dists1), - group(Dists1, AllDists, AllInfo, MargVars0, []), - sort(MargVars0, MargVars). + b_hash_new(Ev0), + foldl(elist_to_hash, EList, Ev0, Ev), + maplist(process_factor(Ev), Factors, Dists0), + sort(Dists0, Dists1), + group(Dists1, AllDists, AllInfo, MargVars0, []), + sort(MargVars0, MargVars). elist_to_hash(K=V, Ev0, Ev) :- b_hash_insert(Ev0, K, V, Ev). From 2738c0fb56d27cd47e91789bc53b1e40d26a05ed Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 12:13:08 +0000 Subject: [PATCH 15/89] Use tabs instead of spaces for consistency --- packages/CLPBN/clpbn/horus.yap | 34 +++---- packages/CLPBN/clpbn/horus_ground.yap | 90 ++++++++--------- packages/CLPBN/clpbn/horus_lifted.yap | 135 +++++++++++++------------- 3 files changed, 130 insertions(+), 129 deletions(-) diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 6bed62fa2..60c50eb21 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -5,34 +5,34 @@ ********************************************************/ :- module(clpbn_horus, - [set_solver/1, - set_horus_flag/1, - cpp_create_lifted_network/3, - cpp_create_ground_network/4, - cpp_set_parfactors_params/2, - cpp_set_factors_params/2, - cpp_run_lifted_solver/3, - cpp_run_ground_solver/3, - cpp_set_vars_information/2, - cpp_set_horus_flag/2, - cpp_free_lifted_network/1, - cpp_free_ground_network/1 - ]). + [set_solver/1, + set_horus_flag/1, + cpp_create_lifted_network/3, + cpp_create_ground_network/4, + cpp_set_parfactors_params/2, + cpp_set_factors_params/2, + cpp_run_lifted_solver/3, + cpp_run_ground_solver/3, + cpp_set_vars_information/2, + cpp_set_horus_flag/2, + cpp_free_lifted_network/1, + cpp_free_ground_network/1 + ]). :- use_module(library(clpbn), - [set_clpbn_flag/2]). + [set_clpbn_flag/2]). patch_things_up :- - assert_static(clpbn_horus:cpp_set_horus_flag(_,_)). + assert_static(clpbn_horus:cpp_set_horus_flag(_,_)). warning :- - format(user_error,"Horus library not installed: cannot use bp, fove~n.",[]). + format(user_error,"Horus library not installed: cannot use bp, fove~n.",[]). :- catch(load_foreign_files([horus], [], init_predicates), _, patch_things_up) - -> true ; warning. + -> true ; warning. set_solver(ve) :- !, set_clpbn_flag(solver,ve). diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index 54631100f..e170e796a 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -1,46 +1,46 @@ /******************************************************* Interface to Horus Ground Solvers. Used by: - - Variable Elimination - - Belief Propagation - - Counting Belief Propagation + - Variable Elimination + - Belief Propagation + - Counting Belief Propagation ********************************************************/ :- module(clpbn_horus_ground, - [call_horus_ground_solver/6, - check_if_horus_ground_solver_done/1, - init_horus_ground_solver/5, - run_horus_ground_solver/3, - finalize_horus_ground_solver/1 - ]). + [call_horus_ground_solver/6, + check_if_horus_ground_solver_done/1, + init_horus_ground_solver/5, + run_horus_ground_solver/3, + finalize_horus_ground_solver/1 + ]). :- use_module(horus, - [cpp_create_ground_network/4, - cpp_set_factors_params/2, - cpp_run_ground_solver/3, - cpp_set_vars_information/2, - cpp_free_ground_network/1, - set_solver/1 - ]). + [cpp_create_ground_network/4, + cpp_set_factors_params/2, + cpp_run_ground_solver/3, + cpp_set_vars_information/2, + cpp_free_ground_network/1, + set_solver/1 + ]). :- use_module(library('clpbn/dists'), - [dist/4, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_params/2 - ]). + [dist/4, + get_dist_domain/2, + get_dist_domain_size/2, + get_dist_params/2 + ]). :- use_module(library('clpbn/display'), - [clpbn_bind_vals/3]). + [clpbn_bind_vals/3]). :- use_module(library('clpbn/numbers')). :- use_module(library(charsio), - [term_to_atom/2]). + [term_to_atom/2]). :- use_module(library(pfl), - [skolem/2]). + [skolem/2]). :- use_module(library(maplist)). @@ -52,32 +52,32 @@ call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), - run_horus_ground_solver([QueryKeys], Solutions, State), - clpbn_bind_vals([QueryVars], Solutions, Output), - finalize_horus_ground_solver(State). - + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), + run_horus_ground_solver([QueryKeys], Solutions, State), + clpbn_bind_vals([QueryVars], Solutions, Output), + finalize_horus_ground_solver(State). + run_horus_ground_solver(QueryKeys, Solutions, state(Network,Hash,Id)) :- - %get_dists_parameters(DistIds, DistsParams), - %cpp_set_factors_params(Network, DistsParams), - lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), - cpp_run_ground_solver(Network, QueryIds, Solutions). + %get_dists_parameters(DistIds, DistsParams), + %cpp_set_factors_params(Network, DistsParams), + lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), + cpp_run_ground_solver(Network, QueryIds, Solutions). init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, state(Network,Hash4,Id4)) :- - get_factors_type(Factors, Type), - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - cpp_create_ground_network(Type, FactorIds, EvidenceIds, Network), - %writeln(network:(Type, FactorIds, EvidenceIds, Network)), writeln(''), - maplist(get_var_information, AllKeys, StatesNames), - maplist(term_to_atom, AllKeys, KeysAtoms), - cpp_set_vars_information(KeysAtoms, StatesNames). + get_factors_type(Factors, Type), + keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), + cpp_create_ground_network(Type, FactorIds, EvidenceIds, Network), + %writeln(network:(Type, FactorIds, EvidenceIds, Network)), writeln(''), + maplist(get_var_information, AllKeys, StatesNames), + maplist(term_to_atom, AllKeys, KeysAtoms), + cpp_set_vars_information(KeysAtoms, StatesNames). % TODO this is not beeing called! finalize_horus_ground_solver(state(Network,_Hash,_Id)) :- - cpp_free_ground_network(Network). + cpp_free_ground_network(Network). get_factors_type([f(bayes, _, _)|_], bayes) :- ! . @@ -85,13 +85,13 @@ get_factors_type([f(markov, _, _)|_], markov) :- ! . get_var_information(_:Key, Domain) :- !, - skolem(Key, Domain). + skolem(Key, Domain). get_var_information(Key, Domain) :- - skolem(Key, Domain). + skolem(Key, Domain). %get_dists_parameters([],[]). %get_dists_parameters([Id|Ids], [dist(Id, Params)|DistsInfo]) :- -% get_dist_params(Id, Params), -% get_dists_parameters(Ids, DistsInfo). +% get_dist_params(Id, Params), +% get_dists_parameters(Ids, DistsInfo). diff --git a/packages/CLPBN/clpbn/horus_lifted.yap b/packages/CLPBN/clpbn/horus_lifted.yap index 3de7d0481..b76e08378 100644 --- a/packages/CLPBN/clpbn/horus_lifted.yap +++ b/packages/CLPBN/clpbn/horus_lifted.yap @@ -1,67 +1,67 @@ /******************************************************* Interface to Horus Lifted Solvers. Used by: - - Generalized Counting First-Order Variable Elimination (GC-FOVE) - - Lifted First-Order Belief Propagation - - Lifted First-Order Knowledge Compilation + - Generalized Counting First-Order Variable Elimination (GC-FOVE) + - Lifted First-Order Belief Propagation + - Lifted First-Order Knowledge Compilation ********************************************************/ :- module(clpbn_horus_lifted, - [call_horus_lifted_solver/3, - check_if_horus_lifted_solver_done/1, - init_horus_lifted_solver/4, - run_horus_lifted_solver/3, - finalize_horus_lifted_solver/1 - ]). + [call_horus_lifted_solver/3, + check_if_horus_lifted_solver_done/1, + init_horus_lifted_solver/4, + run_horus_lifted_solver/3, + finalize_horus_lifted_solver/1 + ]). :- use_module(horus, - [cpp_create_lifted_network/3, - cpp_set_parfactors_params/2, - cpp_run_lifted_solver/3, - cpp_free_lifted_network/1 - ]). + [cpp_create_lifted_network/3, + cpp_set_parfactors_params/2, + cpp_run_lifted_solver/3, + cpp_free_lifted_network/1 + ]). :- use_module(library('clpbn/display'), - [clpbn_bind_vals/3]). + [clpbn_bind_vals/3]). :- use_module(library('clpbn/dists'), - [get_dist_params/2]). + [get_dist_params/2]). :- use_module(library(pfl), - [factor/6, - skolem/2, - get_pfl_parameters/2 - ]). + [factor/6, + skolem/2, + get_pfl_parameters/2 + ]). call_horus_lifted_solver(QueryVars, AllVars, Output) :- - init_horus_lifted_solver(_, AllVars, _, State), - run_horus_lifted_solver(QueryVars, Solutions, State), - clpbn_bind_vals(QueryVars, Solutions, Output), - finalize_horus_lifted_solver(State). + init_horus_lifted_solver(_, AllVars, _, State), + run_horus_lifted_solver(QueryVars, Solutions, State), + clpbn_bind_vals(QueryVars, Solutions, Output), + finalize_horus_lifted_solver(State). init_horus_lifted_solver(_, AllVars, _, state(ParfactorList, DistIds)) :- - get_parfactors(Parfactors), - get_dist_ids(Parfactors, DistIds0), - sort(DistIds0, DistIds), - get_observed_vars(AllVars, ObservedVars), - %writeln(parfactors:Parfactors:'\n'), - %writeln(evidence:ObservedVars:'\n'), - cpp_create_lifted_network(Parfactors, ObservedVars, ParfactorList). + get_parfactors(Parfactors), + get_dist_ids(Parfactors, DistIds0), + sort(DistIds0, DistIds), + get_observed_vars(AllVars, ObservedVars), + %writeln(parfactors:Parfactors:'\n'), + %writeln(evidence:ObservedVars:'\n'), + cpp_create_lifted_network(Parfactors, ObservedVars, ParfactorList). run_horus_lifted_solver(QueryVars, Solutions, state(ParfactorList, DistIds)) :- - get_query_keys(QueryVars, QueryKeys), - get_dists_parameters(DistIds, DistsParams), - %writeln(dists:DistsParams), writeln(''), - cpp_set_parfactors_params(ParfactorList, DistsParams), - cpp_run_lifted_solver(ParfactorList, QueryKeys, Solutions). + get_query_keys(QueryVars, QueryKeys), + get_dists_parameters(DistIds, DistsParams), + %writeln(dists:DistsParams), writeln(''), + cpp_set_parfactors_params(ParfactorList, DistsParams), + cpp_run_lifted_solver(ParfactorList, QueryKeys, Solutions). finalize_horus_lifted_solver(state(ParfactorList, _)) :- - cpp_free_lifted_network(ParfactorList). + cpp_free_lifted_network(ParfactorList). :- table get_parfactors/1. @@ -77,72 +77,73 @@ finalize_horus_lifted_solver(state(ParfactorList, _)) :- % Tuples: ground bindings for variables in Vs, of the form [fv(x,y)] % get_parfactors(Factors) :- - findall(F, is_factor(F), Factors). + findall(F, is_factor(F), Factors). is_factor(pf(Id, Ks, Rs, Phi, Tuples)) :- - factor(_Type, Id, Ks, Vs, Table, Constraints), - get_ranges(Ks,Rs), - Table \= avg, - gen_table(Table, Phi), - all_tuples(Constraints, Vs, Tuples). + factor(_Type, Id, Ks, Vs, Table, Constraints), + get_ranges(Ks,Rs), + Table \= avg, + gen_table(Table, Phi), + all_tuples(Constraints, Vs, Tuples). get_ranges([],[]). get_ranges(K.Ks, Range.Rs) :- !, - skolem(K,Domain), - length(Domain,Range), - get_ranges(Ks, Rs). + skolem(K,Domain), + length(Domain,Range), + get_ranges(Ks, Rs). gen_table(Table, Phi) :- - ( is_list(Table) - -> - Phi = Table - ; - call(user:Table, Phi) - ). + ( + is_list(Table) + -> + Phi = Table + ; + call(user:Table, Phi) + ). all_tuples(Constraints, Tuple, Tuples) :- - setof(Tuple, Constraints^run(Constraints), Tuples). + setof(Tuple, Constraints^run(Constraints), Tuples). run([]). run(Goal.Constraints) :- - user:Goal, - run(Constraints). + user:Goal, + run(Constraints). get_dist_ids([], []). get_dist_ids(pf(Id, _, _, _, _).Parfactors, Id.DistIds) :- - get_dist_ids(Parfactors, DistIds). + get_dist_ids(Parfactors, DistIds). get_observed_vars([], []). get_observed_vars(V.AllAttVars, [K:E|ObservedVars]) :- - clpbn:get_atts(V,[key(K)]), - ( clpbn:get_atts(V,[evidence(E)]) ; pfl:evidence(K,E) ), !, - get_observed_vars(AllAttVars, ObservedVars). + clpbn:get_atts(V,[key(K)]), + ( clpbn:get_atts(V,[evidence(E)]) ; pfl:evidence(K,E) ), !, + get_observed_vars(AllAttVars, ObservedVars). get_observed_vars(V.AllAttVars, ObservedVars) :- - clpbn:get_atts(V,[key(_K)]), !, - get_observed_vars(AllAttVars, ObservedVars). + clpbn:get_atts(V,[key(_K)]), !, + get_observed_vars(AllAttVars, ObservedVars). get_query_keys([], []). get_query_keys(E1.L1, E2.L2) :- - get_query_keys_2(E1,E2), - get_query_keys(L1, L2). + get_query_keys_2(E1,E2), + get_query_keys(L1, L2). get_query_keys_2([], []). get_query_keys_2(V.AttVars, [RV|RVs]) :- - clpbn:get_atts(V,[key(RV)]), !, - get_query_keys_2(AttVars, RVs). + clpbn:get_atts(V,[key(RV)]), !, + get_query_keys_2(AttVars, RVs). get_dists_parameters([], []). get_dists_parameters([Id|Ids], [dist(Id, Params)|DistsInfo]) :- - get_pfl_parameters(Id, Params), - get_dists_parameters(Ids, DistsInfo). + get_pfl_parameters(Id, Params), + get_dists_parameters(Ids, DistsInfo). From 409a230826a84a8505fede5bd72d9112be23ddb6 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 14:50:12 +0000 Subject: [PATCH 16/89] Use the same convetion for module import and export --- packages/CLPBN/clpbn.yap | 151 +++++++++++----------- packages/CLPBN/clpbn/aggregates.yap | 55 ++++---- packages/CLPBN/clpbn/bdd.yap | 37 +++--- packages/CLPBN/clpbn/bnt.yap | 78 +++++------ packages/CLPBN/clpbn/connected.yap | 32 ++--- packages/CLPBN/clpbn/discrete_utils.yap | 15 ++- packages/CLPBN/clpbn/display.yap | 14 +- packages/CLPBN/clpbn/dists.yap | 85 ++++++------ packages/CLPBN/clpbn/evidence.yap | 38 +++--- packages/CLPBN/clpbn/gibbs.yap | 69 +++++----- packages/CLPBN/clpbn/graphs.yap | 15 +-- packages/CLPBN/clpbn/graphviz.yap | 4 +- packages/CLPBN/clpbn/ground_factors.yap | 30 ++--- packages/CLPBN/clpbn/hmm.yap | 21 ++- packages/CLPBN/clpbn/jt.yap | 130 ++++++++++--------- packages/CLPBN/clpbn/matrix_cpt_utils.yap | 92 ++++++------- packages/CLPBN/clpbn/numbers.yap | 17 ++- packages/CLPBN/clpbn/pgrammar.yap | 35 +++-- packages/CLPBN/clpbn/table.yap | 55 ++++---- packages/CLPBN/clpbn/topsort.yap | 13 +- packages/CLPBN/clpbn/utils.yap | 15 ++- packages/CLPBN/clpbn/ve.yap | 68 +++++----- packages/CLPBN/clpbn/viterbi.yap | 14 +- packages/CLPBN/clpbn/vmap.yap | 18 +-- packages/CLPBN/clpbn/xbif.yap | 7 +- packages/CLPBN/learning/em.yap | 86 ++++++------ packages/CLPBN/pfl.yap | 52 ++++---- 27 files changed, 632 insertions(+), 614 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index ed5e0b7b3..a77d13d32 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -1,21 +1,21 @@ :- module(clpbn, - [{}/1, - clpbn_flag/2, - set_clpbn_flag/2, - clpbn_flag/3, - clpbn_key/2, - clpbn_init_graph/1, - clpbn_init_solver/4, - clpbn_run_solver/3, - clpbn_finalize_solver/1, - pfl_init_solver/5, - pfl_run_solver/3, - probability/2, - conditional_probability/3, - use_parfactors/1, - op(500, xfy, with) - ]). + [{}/1, + clpbn_flag/2, + set_clpbn_flag/2, + clpbn_flag/3, + clpbn_key/2, + clpbn_init_graph/1, + clpbn_init_solver/4, + clpbn_run_solver/3, + clpbn_finalize_solver/1, + pfl_init_solver/5, + pfl_run_solver/3, + probability/2, + conditional_probability/3, + use_parfactors/1, + op(500, xfy, with) + ]). :- use_module(library(atts)). @@ -30,92 +30,91 @@ :- attribute key/1, dist/2, evidence/1. :- use_module('clpbn/ve', - [ve/3, - check_if_ve_done/1, - init_ve_solver/4, - run_ve_solver/3, - init_ve_ground_solver/5, - run_ve_ground_solver/3, - call_ve_ground_solver/6 - ]). + [ve/3, + check_if_ve_done/1, + init_ve_solver/4, + run_ve_solver/3, + init_ve_ground_solver/5, + run_ve_ground_solver/3, + call_ve_ground_solver/6 + ]). :- use_module('clpbn/jt', - [jt/3, - init_jt_solver/4, - run_jt_solver/3 - ]). + [jt/3, + init_jt_solver/4, + run_jt_solver/3 + ]). :- use_module('clpbn/bdd', - [bdd/3, - init_bdd_solver/4, - run_bdd_solver/3, - init_bdd_ground_solver/5, - run_bdd_ground_solver/3, - call_bdd_ground_solver/6 - ]). + [bdd/3, + init_bdd_solver/4, + run_bdd_solver/3, + init_bdd_ground_solver/5, + run_bdd_ground_solver/3, + call_bdd_ground_solver/6 + ]). :- use_module('clpbn/gibbs', - [gibbs/3, - check_if_gibbs_done/1, - init_gibbs_solver/4, - run_gibbs_solver/3 - ]). + [gibbs/3, + check_if_gibbs_done/1, + init_gibbs_solver/4, + run_gibbs_solver/3 + ]). %% :- use_module('clpbn/bnt', -%% [do_bnt/3, -%% check_if_bnt_done/1 -%% ]). +%% [do_bnt/3, +%% check_if_bnt_done/1 +%% ]). :- use_module('clpbn/pgrammar', - [init_pcg_solver/4, - run_pcg_solver/3, - pcg_init_graph/0 - ]). + [init_pcg_solver/4, + run_pcg_solver/3, + pcg_init_graph/0 + ]). :- use_module('clpbn/horus_ground', - [call_horus_ground_solver/6, - check_if_horus_ground_solver_done/1, - init_horus_ground_solver/5, - run_horus_ground_solver/3, - finalize_horus_ground_solver/1 - ]). + [call_horus_ground_solver/6, + check_if_horus_ground_solver_done/1, + init_horus_ground_solver/5, + run_horus_ground_solver/3, + finalize_horus_ground_solver/1 + ]). :- use_module('clpbn/horus_lifted', - [call_horus_lifted_solver/3, - check_if_horus_lifted_solver_done/1, - init_horus_lifted_solver/4, - run_horus_lifted_solver/3, - finalize_horus_lifted_solver/1 - ]). + [call_horus_lifted_solver/3, + check_if_horus_lifted_solver_done/1, + init_horus_lifted_solver/4, + run_horus_lifted_solver/3, + finalize_horus_lifted_solver/1 + ]). :- use_module('clpbn/dists', - [dist/4, - get_dist/4, - get_evidence_position/3, - get_evidence_from_position/3, - additive_dists/6 - ]). + [dist/4, + get_dist/4, + get_evidence_position/3, + get_evidence_from_position/3, + additive_dists/6 + ]). :- use_module('clpbn/evidence', - [store_evidence/1, - add_stored_evidence/2, - incorporate_evidence/2, - check_stored_evidence/2, - put_evidence/2 - ]). + [store_evidence/1, + add_stored_evidence/2, + incorporate_evidence/2, + check_stored_evidence/2, + put_evidence/2 + ]). :- use_module('clpbn/ground_factors', - [generate_network/5]). + [generate_network/5]). :- use_module('clpbn/utils', - [sort_vars_by_key/3]). + [sort_vars_by_key/3]). :- use_module('clpbn/graphs', - [clpbn2graph/1]). + [clpbn2graph/1]). :- use_module('clpbn/graphviz', - [clpbn2gviz/4]). - + [clpbn2gviz/4]). % % avoid the overhead of using goal_expansion/2. diff --git a/packages/CLPBN/clpbn/aggregates.yap b/packages/CLPBN/clpbn/aggregates.yap index 20394516b..5fa7d0718 100644 --- a/packages/CLPBN/clpbn/aggregates.yap +++ b/packages/CLPBN/clpbn/aggregates.yap @@ -1,42 +1,45 @@ % % generate explicit CPTs % -:- module(clpbn_aggregates, [ - check_for_agg_vars/2, - cpt_average/6, - cpt_average/7, - cpt_max/6, - cpt_min/6, - avg_factors/5 - ]). +:- module(clpbn_aggregates, + [check_for_agg_vars/2, + cpt_average/6, + cpt_average/7, + cpt_max/6, + cpt_min/6, + avg_factors/5 + ]). -:- use_module(library(clpbn), [{}/1]). +:- use_module(library(clpbn), + [{}/1]). :- use_module(library(lists), - [last/2, - sumlist/2, - sum_list/3, - max_list/2, - min_list/2, - nth0/3 - ]). + [last/2, + sumlist/2, + sum_list/3, + max_list/2, + min_list/2, + nth0/3 + ]). :- use_module(library(matrix), - [matrix_new/3, - matrix_to_list/2, - matrix_set/3]). + [matrix_new/3, + matrix_to_list/2, + matrix_set/3 + ]). :- use_module(library(clpbn/dists), - [ - add_dist/6, - get_dist_domain_size/2]). + [add_dist/6, + get_dist_domain_size/2 + ]). :- use_module(library(clpbn/matrix_cpt_utils), - [normalise_CPT_on_lines/3]). + [normalise_CPT_on_lines/3]). :- use_module(library(pfl), - [skolem/2, - add_ground_factor/5]). + [skolem/2, + add_ground_factor/5 + ]). :- use_module(library(bhash)). @@ -369,7 +372,7 @@ fill_in_min(_,_). get_vdist_size(V, Sz) :- - var(V), !, + var(V), !, clpbn:get_atts(V, [dist(Dist,_)]), get_dist_domain_size(Dist, Sz). get_vdist_size(V, Sz) :- diff --git a/packages/CLPBN/clpbn/bdd.yap b/packages/CLPBN/clpbn/bdd.yap index 4c45084fc..3040a754f 100644 --- a/packages/CLPBN/clpbn/bdd.yap +++ b/packages/CLPBN/clpbn/bdd.yap @@ -18,32 +18,32 @@ Va <- P*X1*Y1 + Q*X2*Y2 + ... **************************************************/ :- module(clpbn_bdd, - [bdd/3, - set_solver_parameter/2, - init_bdd_solver/4, - init_bdd_ground_solver/5, - run_bdd_solver/3, - run_bdd_ground_solver/3, - finalize_bdd_solver/1, - check_if_bdd_done/1, - call_bdd_ground_solver/6 - ]). + [bdd/3, + set_solver_parameter/2, + init_bdd_solver/4, + init_bdd_ground_solver/5, + run_bdd_solver/3, + run_bdd_ground_solver/3, + finalize_bdd_solver/1, + check_if_bdd_done/1, + call_bdd_ground_solver/6 + ]). :- use_module(library('clpbn/dists'), - [dist/4, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_all_sizes/2, - get_dist_params/2 - ]). + [dist/4, + get_dist_domain/2, + get_dist_domain_size/2, + get_dist_all_sizes/2, + get_dist_params/2 + ]). :- use_module(library('clpbn/display'), - [clpbn_bind_vals/3]). + [clpbn_bind_vals/3]). :- use_module(library('clpbn/aggregates'), - [check_for_agg_vars/2]). + [check_for_agg_vars/2]). :- use_module(library(atts)). @@ -1064,4 +1064,3 @@ build_cnf(CNF, IVs, Indics, AllParms, AllParmValues, Val) :- set_to_ones(Extra), ddnnf_is(F, Val). - diff --git a/packages/CLPBN/clpbn/bnt.yap b/packages/CLPBN/clpbn/bnt.yap index f510abacd..4f8dddf49 100644 --- a/packages/CLPBN/clpbn/bnt.yap +++ b/packages/CLPBN/clpbn/bnt.yap @@ -1,45 +1,51 @@ -:- module(bnt, [do_bnt/3, - create_bnt_graph/2, - check_if_bnt_done/1]). +:- module(bnt, + [do_bnt/3, + create_bnt_graph/2, + check_if_bnt_done/1 + ]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). -:- use_module(library('clpbn/dists'), [ - get_dist_domain_size/2, - get_dist_domain/2, - get_dist_params/2 - ]). +:- use_module(library('clpbn/dists'), + [get_dist_domain_size/2, + get_dist_domain/2, + get_dist_params/2 + ]). -:- use_module(library('clpbn/discrete_utils'), [ - reorder_CPT/5]). +:- use_module(library('clpbn/discrete_utils'), + [reorder_CPT/5]). -:- use_module(library(matlab), [start_matlab/1, - close_matlab/0, - matlab_on/0, - matlab_eval_string/1, - matlab_eval_string/2, - matlab_matrix/4, - matlab_vector/2, - matlab_sequence/3, - matlab_initialized_cells/4, - matlab_get_variable/2, - matlab_call/2 - ]). +:- use_module(library(matlab), + [start_matlab/1, + close_matlab/0, + matlab_on/0, + matlab_eval_string/1, + matlab_eval_string/2, + matlab_matrix/4, + matlab_vector/2, + matlab_sequence/3, + matlab_initialized_cells/4, + matlab_get_variable/2, + matlab_call/2 + ]). -:- use_module(library(dgraphs), [dgraph_new/1, - dgraph_add_vertices/3, - dgraph_add_edges/3, - dgraph_top_sort/2, - dgraph_vertices/2, - dgraph_edges/2 - ]). +:- use_module(library(dgraphs), + [dgraph_new/1, + dgraph_add_vertices/3, + dgraph_add_edges/3, + dgraph_top_sort/2, + dgraph_vertices/2, + dgraph_edges/2 + ]). -:- use_module(library(lists), [append/3, - member/2,nth/3]). +:- use_module(library(lists), + [append/3, + member/2,nth/3 + ]). -:- use_module(library(ordsets), [ - ord_insert/3]). +:- use_module(library(ordsets), + [ord_insert/3]). :- yap_flag(write_strings,on). @@ -421,5 +427,5 @@ mk_evidence_query([V|L], [H|T], [ar(1,Pos,El)|LN]) :- get_dist_domain(Id,D), nth(El,D,H), mk_evidence_query(L, T, LN). - + diff --git a/packages/CLPBN/clpbn/connected.yap b/packages/CLPBN/clpbn/connected.yap index 450c61bd9..773511b8f 100644 --- a/packages/CLPBN/clpbn/connected.yap +++ b/packages/CLPBN/clpbn/connected.yap @@ -1,26 +1,28 @@ :- module(clpbn_connected, - [influences/3, - factor_influences/4, - init_influences/3, - influences/4] - ). + [influences/3, + factor_influences/4, + init_influences/3, + influences/4 + ]). :- use_module(library(maplist)). :- use_module(library(dgraphs), - [dgraph_new/1, - dgraph_add_edges/3, - dgraph_add_vertex/3, - dgraph_neighbors/3, - dgraph_edge/3, - dgraph_transpose/2]). + [dgraph_new/1, + dgraph_add_edges/3, + dgraph_add_vertex/3, + dgraph_neighbors/3, + dgraph_edge/3, + dgraph_transpose/2 + ]). :- use_module(library(rbtrees), - [rb_new/1, - rb_lookup/3, - rb_insert/4, - rb_visit/2]). + [rb_new/1, + rb_lookup/3, + rb_insert/4, + rb_visit/2 + ]). factor_influences(Vs, QVars, Ev, LV) :- init_factor_influences(Vs, G, RG), diff --git a/packages/CLPBN/clpbn/discrete_utils.yap b/packages/CLPBN/clpbn/discrete_utils.yap index f8f3c9906..d6b718074 100644 --- a/packages/CLPBN/clpbn/discrete_utils.yap +++ b/packages/CLPBN/clpbn/discrete_utils.yap @@ -1,10 +1,14 @@ -:- module(discrete_utils, [project_from_CPT/3, - reorder_CPT/5, - get_dist_size/2]). +:- module(discrete_utils, + [project_from_CPT/3, + reorder_CPT/5, + get_dist_size/2 + ]). -:- use_module(library(clpbn/dists), [get_dist_domain_size/2, - get_dist_domain/2]). +:- use_module(library(clpbn/dists), + [get_dist_domain_size/2, + get_dist_domain/2 + ]). % % remove columns from a table % @@ -143,4 +147,3 @@ get_sizes([V|Deps], [Sz|Sizes]) :- get_dist_domain_size(Id,Sz), get_sizes(Deps, Sizes). - diff --git a/packages/CLPBN/clpbn/display.yap b/packages/CLPBN/clpbn/display.yap index 7b843cd77..006f7c77e 100644 --- a/packages/CLPBN/clpbn/display.yap +++ b/packages/CLPBN/clpbn/display.yap @@ -1,14 +1,14 @@ -:- module(clpbn_display, [ - clpbn_bind_vals/3]). +:- module(clpbn_display, + [clpbn_bind_vals/3]). :- use_module(library(lists), - [ - member/2 - ]). + [member/2]). -:- use_module(library(clpbn/dists), [get_dist_domain/2]). +:- use_module(library(clpbn/dists), + [get_dist_domain/2]). -:- use_module(library(clpbn), [use_parfactors/1]). +:- use_module(library(clpbn), + [use_parfactors/1]). :- use_module(library(maplist)). diff --git a/packages/CLPBN/clpbn/dists.yap b/packages/CLPBN/clpbn/dists.yap index 8f71e6fae..81a604d3a 100644 --- a/packages/CLPBN/clpbn/dists.yap +++ b/packages/CLPBN/clpbn/dists.yap @@ -3,47 +3,51 @@ % :- module(clpbn_dist, - [ - dist/1, - dist/4, - dists/1, - dist_new_table/2, - get_dist/4, - get_dist_matrix/5, - get_possibly_deterministic_dist_matrix/5, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_params/2, - get_dist_key/2, - get_dist_all_sizes/2, - get_evidence_position/3, - get_evidence_from_position/3, - dist_to_term/2, - empty_dist/2, - all_dist_ids/1, - randomise_all_dists/0, - randomise_dist/1, - uniformise_all_dists/0, - uniformise_dist/1, - reset_all_dists/0, - add_dist/6, - additive_dists/6 - ]). + [dist/1, + dist/4, + dists/1, + dist_new_table/2, + get_dist/4, + get_dist_matrix/5, + get_possibly_deterministic_dist_matrix/5, + get_dist_domain/2, + get_dist_domain_size/2, + get_dist_params/2, + get_dist_key/2, + get_dist_all_sizes/2, + get_evidence_position/3, + get_evidence_from_position/3, + dist_to_term/2, + empty_dist/2, + all_dist_ids/1, + randomise_all_dists/0, + randomise_dist/1, + uniformise_all_dists/0, + uniformise_dist/1, + reset_all_dists/0, + add_dist/6, + additive_dists/6 + ]). -:- use_module(library(lists),[nth0/3,append/3]). +:- use_module(library(lists), + [nth0/3, + append/3 + ]). :- use_module(library(clpbn), - [use_parfactors/1]). + [use_parfactors/1]). :- use_module(library(matrix), - [matrix_new/4, - matrix_new/3, - matrix_to_list/2, - matrix_to_logs/1]). + [matrix_new/4, + matrix_new/3, + matrix_to_list/2, + matrix_to_logs/1 + ]). :- use_module(library(clpbn/matrix_cpt_utils), - [random_CPT/2, - uniform_CPT/2]). + [random_CPT/2, + uniform_CPT/2 + ]). /* :- mode dist(+, -). @@ -322,11 +326,11 @@ randomise_all_dists. randomise_dist(Dist) :- ( - use_parfactors(on) + use_parfactors(on) -> - pfl:get_pfl_factor_sizes(Dist, DSizes) + pfl:get_pfl_factor_sizes(Dist, DSizes) ; - recorded(clpbn_dist_psizes, db(Dist,DSizes), _) + recorded(clpbn_dist_psizes, db(Dist,DSizes), _) ), random_CPT(DSizes, NewCPT), dist_new_table(Dist, NewCPT). @@ -338,11 +342,11 @@ uniformise_all_dists. uniformise_dist(Dist) :- ( - use_parfactors(on) + use_parfactors(on) -> - pfl:get_pfl_factor_sizes(Dist, DSizes) + pfl:get_pfl_factor_sizes(Dist, DSizes) ; - recorded(clpbn_dist_psizes, db(Dist,DSizes), _) + recorded(clpbn_dist_psizes, db(Dist,DSizes), _) ), uniform_CPT(DSizes, NewCPT), dist_new_table(Dist, NewCPT). @@ -365,3 +369,4 @@ reset_all_dists. additive_dists(ip(Domain,Tabs1), ip(Domain,Tabs2), Parents1, Parents2, ip(Domain,Tabs), Parents) :- append(Tabs1, Tabs2, Tabs), append(Parents1, Parents2, Parents). + diff --git a/packages/CLPBN/clpbn/evidence.yap b/packages/CLPBN/clpbn/evidence.yap index 62b221858..450413e29 100644 --- a/packages/CLPBN/clpbn/evidence.yap +++ b/packages/CLPBN/clpbn/evidence.yap @@ -4,29 +4,27 @@ % :- module(clpbn_evidence, - [ - store_evidence/1, - incorporate_evidence/2, - check_stored_evidence/2, - add_stored_evidence/2, - put_evidence/2 - ]). + [store_evidence/1, + incorporate_evidence/2, + check_stored_evidence/2, + add_stored_evidence/2, + put_evidence/2 + ]). -:- use_module(library(clpbn), [ - {}/1, - clpbn_flag/3, - set_clpbn_flag/2 - ]). +:- use_module(library(clpbn), + [{}/1, + clpbn_flag/3, + set_clpbn_flag/2 + ]). -:- use_module(library('clpbn/dists'), [ - get_dist/4 - ]). +:- use_module(library('clpbn/dists'), + [get_dist/4]). -:- use_module(library(rbtrees), [ - rb_new/1, - rb_lookup/3, - rb_insert/4 - ]). +:- use_module(library(rbtrees), + [rb_new/1, + rb_lookup/3, + rb_insert/4 + ]). :- meta_predicate store_evidence(:). diff --git a/packages/CLPBN/clpbn/gibbs.yap b/packages/CLPBN/clpbn/gibbs.yap index 2a499b302..a8cccee0d 100644 --- a/packages/CLPBN/clpbn/gibbs.yap +++ b/packages/CLPBN/clpbn/gibbs.yap @@ -8,51 +8,54 @@ % :- module(clpbn_gibbs, - [gibbs/3, - check_if_gibbs_done/1, - init_gibbs_solver/4, - run_gibbs_solver/3]). + [gibbs/3, + check_if_gibbs_done/1, + init_gibbs_solver/4, + run_gibbs_solver/3 + ]). :- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3]). + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(lists), - [member/2, - append/3, - delete/3, - max_list/2, - sum_list/2]). + [member/2, + append/3, + delete/3, + max_list/2, + sum_list/2 + ]). :- use_module(library(ordsets), - [ord_subtract/3]). + [ord_subtract/3]). -:- use_module(library('clpbn/matrix_cpt_utils'), [ - project_from_CPT/3, - reorder_CPT/5, - multiply_possibly_deterministic_factors/3, - column_from_possibly_deterministic_CPT/3, - normalise_possibly_deterministic_CPT/2, - list_from_CPT/2]). +:- use_module(library('clpbn/matrix_cpt_utils'), + [project_from_CPT/3, + reorder_CPT/5, + multiply_possibly_deterministic_factors/3, + column_from_possibly_deterministic_CPT/3, + normalise_possibly_deterministic_CPT/2, + list_from_CPT/2 + ]). -:- use_module(library('clpbn/utils'), [ - check_for_hidden_vars/3]). +:- use_module(library('clpbn/utils'), + [check_for_hidden_vars/3]). -:- use_module(library('clpbn/dists'), [ - get_possibly_deterministic_dist_matrix/5, - get_dist_domain_size/2]). +:- use_module(library('clpbn/dists'), + [get_possibly_deterministic_dist_matrix/5, + get_dist_domain_size/2 + ]). -:- use_module(library('clpbn/topsort'), [ - topsort/2]). +:- use_module(library('clpbn/topsort'), + [topsort/2]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). :- use_module(library('clpbn/connected'), - [ - influences/3 - ]). + [influences/3]). :- dynamic gibbs_params/3. @@ -542,5 +545,3 @@ divide_list([C|Add], Sum, [P|Dist]) :- P is C/Sum, divide_list(Add, Sum, Dist). - - diff --git a/packages/CLPBN/clpbn/graphs.yap b/packages/CLPBN/clpbn/graphs.yap index e481161fb..c13afcb7b 100644 --- a/packages/CLPBN/clpbn/graphs.yap +++ b/packages/CLPBN/clpbn/graphs.yap @@ -3,13 +3,14 @@ % Just output a graph with all the variables. % -:- module(clpbn2graph, [clpbn2graph/1]). +:- module(clpbn2graph, + [clpbn2graph/1]). -:- use_module(library('clpbn/utils'), [ - check_for_hidden_vars/3]). +:- use_module(library('clpbn/utils'), + [check_for_hidden_vars/3]). -:- use_module(library('clpbn/dists'), [ - get_dist/4]). +:- use_module(library('clpbn/dists'), + [get_dist/4]). :- attribute node/0. @@ -37,7 +38,3 @@ translate_vars([V|Vs],[K|Ks]) :- clpbn:get_atts(V, [key(K)]), translate_vars(Vs,Ks). - - - - diff --git a/packages/CLPBN/clpbn/graphviz.yap b/packages/CLPBN/clpbn/graphviz.yap index 825fae3a1..4a75e4022 100644 --- a/packages/CLPBN/clpbn/graphviz.yap +++ b/packages/CLPBN/clpbn/graphviz.yap @@ -1,4 +1,6 @@ -:- module(clpbn_gviz, [clpbn2gviz/4]). + +:- module(clpbn_gviz, + [clpbn2gviz/4]). clpbn2gviz(Stream, Name, Network, Output) :- format(Stream, 'digraph ~w { diff --git a/packages/CLPBN/clpbn/ground_factors.yap b/packages/CLPBN/clpbn/ground_factors.yap index 21575cf2d..0a364408c 100644 --- a/packages/CLPBN/clpbn/ground_factors.yap +++ b/packages/CLPBN/clpbn/ground_factors.yap @@ -1,34 +1,34 @@ :- module(pfl_ground_factors, - [generate_network/5, - f/3 - ]). + [generate_network/5, + f/3 + ]). :- use_module(library(bhash), - [b_hash_new/1, - b_hash_lookup/3, - b_hash_insert/4, - b_hash_to_list/2 - ]). + [b_hash_new/1, + b_hash_lookup/3, + b_hash_insert/4, + b_hash_to_list/2 + ]). :- use_module(library(lists), - [member/2]). + [member/2]). :- use_module(library(maplist)). :- use_module(library(atts)). :- use_module(library(pfl), - [factor/6, - defined_in_factor/2, - skolem/2 - ]). + [factor/6, + defined_in_factor/2, + skolem/2 + ]). :- use_module(library(clpbn/aggregates), - [avg_factors/5]). + [avg_factors/5]). :- use_module(library(clpbn/dists), - [dist/4]). + [dist/4]). :- dynamic currently_defined/1, queue/1, f/4. diff --git a/packages/CLPBN/clpbn/hmm.yap b/packages/CLPBN/clpbn/hmm.yap index 9cea42f0e..fc6c38388 100644 --- a/packages/CLPBN/clpbn/hmm.yap +++ b/packages/CLPBN/clpbn/hmm.yap @@ -1,19 +1,20 @@ - -:- module(hmm, [init_hmm/0, - hmm_state/1, - emission/1]). +:- module(hmm, + [init_hmm/0, + hmm_state/1, + emission/1 + ]). :- ensure_loaded(library(clpbn)). :- use_module(library(lists), - [nth/3]). + [nth/3]). :- use_module(library(nbhash), - [nb_hash_new/2, - nb_hash_lookup/3, - nb_hash_insert/3 - ]). + [nb_hash_new/2, + nb_hash_lookup/3, + nb_hash_insert/3 + ]). :- ensure_loaded(library(tries)). @@ -79,5 +80,3 @@ cvt_vals([A|B],[A|B]). find_probs(Logs,Nth,Log) :- arg(Nth,Logs,Log). - - diff --git a/packages/CLPBN/clpbn/jt.yap b/packages/CLPBN/clpbn/jt.yap index 9a9aa9027..4a3e70f7f 100644 --- a/packages/CLPBN/clpbn/jt.yap +++ b/packages/CLPBN/clpbn/jt.yap @@ -1,89 +1,93 @@ -:- module(jt, [jt/3, - init_jt_solver/4, - run_jt_solver/3]). - +:- module(jt, + [jt/3, + init_jt_solver/4, + run_jt_solver/3 + ]). :- use_module(library(dgraphs), - [dgraph_new/1, - dgraph_add_edges/3, - dgraph_add_vertex/3, - dgraph_add_vertices/3, - dgraph_edges/2, - dgraph_vertices/2, - dgraph_transpose/2, - dgraph_to_ugraph/2, - ugraph_to_dgraph/2, - dgraph_neighbors/3 - ]). + [dgraph_new/1, + dgraph_add_edges/3, + dgraph_add_vertex/3, + dgraph_add_vertices/3, + dgraph_edges/2, + dgraph_vertices/2, + dgraph_transpose/2, + dgraph_to_ugraph/2, + ugraph_to_dgraph/2, + dgraph_neighbors/3 + ]). :- use_module(library(undgraphs), - [undgraph_new/1, - undgraph_add_edge/4, - undgraph_add_edges/3, - undgraph_del_vertex/3, - undgraph_del_vertices/3, - undgraph_vertices/2, - undgraph_edges/2, - undgraph_neighbors/3, - undgraph_edge/3, - dgraph_to_undgraph/2 - ]). + [undgraph_new/1, + undgraph_add_edge/4, + undgraph_add_edges/3, + undgraph_del_vertex/3, + undgraph_del_vertices/3, + undgraph_vertices/2, + undgraph_edges/2, + undgraph_neighbors/3, + undgraph_edge/3, + dgraph_to_undgraph/2 + ]). :- use_module(library(wundgraphs), - [wundgraph_new/1, - wundgraph_max_tree/3, - wundgraph_add_edges/3, - wundgraph_add_vertices/3, - wundgraph_to_undgraph/2 - ]). + [wundgraph_new/1, + wundgraph_max_tree/3, + wundgraph_add_edges/3, + wundgraph_add_vertices/3, + wundgraph_to_undgraph/2 + ]). :- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3]). + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(ordsets), - [ord_subset/2, - ord_insert/3, - ord_intersection/3, - ord_del_element/3, - ord_memberchk/2]). + [ord_subset/2, + ord_insert/3, + ord_intersection/3, + ord_del_element/3, + ord_memberchk/2 + ]). :- use_module(library(lists), - [reverse/2]). + [reverse/2]). :- use_module(library(maplist)). :- use_module(library('clpbn/aggregates'), - [check_for_agg_vars/2]). + [check_for_agg_vars/2]). :- use_module(library('clpbn/dists'), - [get_dist_domain_size/2, - get_dist_domain/2, - get_dist_matrix/5]). + [get_dist_domain_size/2, + get_dist_domain/2, + get_dist_matrix/5 + ]). :- use_module(library('clpbn/matrix_cpt_utils'), - [project_from_CPT/3, - reorder_CPT/5, - unit_CPT/2, - multiply_CPTs/4, - divide_CPTs/3, - normalise_CPT/2, - expand_CPT/4, - get_CPT_sizes/2, - reset_CPT_that_disagrees/5, - sum_out_from_CPT/4, - list_from_CPT/2]). + [project_from_CPT/3, + reorder_CPT/5, + unit_CPT/2, + multiply_CPTs/4, + divide_CPTs/3, + normalise_CPT/2, + expand_CPT/4, + get_CPT_sizes/2, + reset_CPT_that_disagrees/5, + sum_out_from_CPT/4, + list_from_CPT/2 + ]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). :- use_module(library('clpbn/connected'), - [ - init_influences/3, - influences/4 - ]). + [init_influences/3, + influences/4 + ]). jt([[]],_,_) :- !. @@ -171,7 +175,7 @@ add_parents([], _, Graph, Graph). add_parents([P|Parents], V, Graph0, [P-V|GraphF]) :- add_parents(Parents, V, Graph0, GraphF). - + % From David Page's lectures test_graph(0, [1-3,2-3,2-4,5-4,5-7,10-7,10-9,11-9,3-6,4-6,7-8,9-8,6-12,8-12], diff --git a/packages/CLPBN/clpbn/matrix_cpt_utils.yap b/packages/CLPBN/clpbn/matrix_cpt_utils.yap index da69fad2b..3c68dab8e 100644 --- a/packages/CLPBN/clpbn/matrix_cpt_utils.yap +++ b/packages/CLPBN/clpbn/matrix_cpt_utils.yap @@ -1,51 +1,53 @@ :- module(clpbn_matrix_utils, - [init_CPT/3, - project_from_CPT/3, - sum_out_from_CPT/5, - project_from_CPT/6, - reorder_CPT/5, - get_CPT_sizes/2, - normalise_CPT/2, - multiply_CPTs/4, - multiply_CPTs/6, - divide_CPTs/3, - expand_CPT/4, - reset_CPT_that_disagrees/5, - unit_CPT/2, - sum_out_from_CPT/4, - list_from_CPT/2, - multiply_factors/3, - normalise_possibly_deterministic_CPT/2, - column_from_possibly_deterministic_CPT/3, - multiply_possibly_deterministic_factors/3, - random_CPT/2, - uniform_CPT/2, - uniform_CPT_as_list/2, - normalise_CPT_on_lines/3]). + [init_CPT/3, + project_from_CPT/3, + sum_out_from_CPT/5, + project_from_CPT/6, + reorder_CPT/5, + get_CPT_sizes/2, + normalise_CPT/2, + multiply_CPTs/4, + multiply_CPTs/6, + divide_CPTs/3, + expand_CPT/4, + reset_CPT_that_disagrees/5, + unit_CPT/2, + sum_out_from_CPT/4, + list_from_CPT/2, + multiply_factors/3, + normalise_possibly_deterministic_CPT/2, + column_from_possibly_deterministic_CPT/3, + multiply_possibly_deterministic_factors/3, + random_CPT/2, + uniform_CPT/2, + uniform_CPT_as_list/2, + normalise_CPT_on_lines/3 + ]). :- use_module(library(matrix), - [matrix_new/4, - matrix_new_set/4, - matrix_select/4, - matrix_dims/2, - matrix_size/2, - matrix_shuffle/3, - matrix_expand/3, - matrix_op/4, - matrix_dims/2, - matrix_sum/2, - matrix_sum_logs_out/3, - matrix_sum_out/3, - matrix_sum_logs_out_several/3, - matrix_op_to_all/4, - matrix_to_exps2/1, - matrix_to_logs/1, - matrix_set_all_that_disagree/5, - matrix_to_list/2, - matrix_agg_lines/3, - matrix_agg_cols/3, - matrix_op_to_lines/4, - matrix_column/3]). + [matrix_new/4, + matrix_new_set/4, + matrix_select/4, + matrix_dims/2, + matrix_size/2, + matrix_shuffle/3, + matrix_expand/3, + matrix_op/4, + matrix_dims/2, + matrix_sum/2, + matrix_sum_logs_out/3, + matrix_sum_out/3, + matrix_sum_logs_out_several/3, + matrix_op_to_all/4, + matrix_to_exps2/1, + matrix_to_logs/1, + matrix_set_all_that_disagree/5, + matrix_to_list/2, + matrix_agg_lines/3, + matrix_agg_cols/3, + matrix_op_to_lines/4, + matrix_column/3 + ]). init_CPT(List, Sizes, TAB) :- matrix_new(floats, Sizes, List, TAB), diff --git a/packages/CLPBN/clpbn/numbers.yap b/packages/CLPBN/clpbn/numbers.yap index f7805a397..85aca9f77 100644 --- a/packages/CLPBN/clpbn/numbers.yap +++ b/packages/CLPBN/clpbn/numbers.yap @@ -1,17 +1,17 @@ :- module(clpbn_numbers, - [ - keys_to_numbers/7, - keys_to_numbers/9, - lists_of_keys_to_ids/6 - ]). + [keys_to_numbers/7, + keys_to_numbers/9, + lists_of_keys_to_ids/6 + ]). :- use_module(library(bhash)). :- use_module(library(maplist)). + :- use_module(library(pfl), - [skolem/2, - get_pfl_cpt/5 - ]). + [skolem/2, + get_pfl_cpt/5 + ]). % % convert key representation into numeric representation @@ -60,4 +60,3 @@ evidence_to_id(Key=Ev, I0=Ev, Hash0, Hash, I0, I) :- b_hash_insert(Hash0, Key, I0, Hash), I is I0+1. - diff --git a/packages/CLPBN/clpbn/pgrammar.yap b/packages/CLPBN/clpbn/pgrammar.yap index 6466253e4..0a54ba91a 100644 --- a/packages/CLPBN/clpbn/pgrammar.yap +++ b/packages/CLPBN/clpbn/pgrammar.yap @@ -2,30 +2,29 @@ :- style_check(all). -:- module(clpbn_pgrammar,[grammar_to_atts/1, - grammar_prob/2, - grammar_mle/2, - init_pcg_solver/4, - run_pcg_solver/3, - pcg_init_graph/0]). +:- module(clpbn_pgrammar, + [grammar_to_atts/1, + grammar_prob/2, + grammar_mle/2, + init_pcg_solver/4, + run_pcg_solver/3, + pcg_init_graph/0 + ]). :- load_files([library(clpbn)], - [ if(not_loaded), - silent(true) - ]). + [if(not_loaded), silent(true)]). :- use_module([library(lists)], - [ sum_list/2 - ]). + [sum_list/2]). :- use_module([library(matrix)], - [ matrix_new/3, - matrix_add/3, - matrix_get/3, - matrix_op/4, - matrix_op_to_all/4, - matrix_set_all/2 - ]). + [matrix_new/3, + matrix_add/3, + matrix_get/3, + matrix_op/4, + matrix_op_to_all/4, + matrix_set_all/2 + ]). :- op(600, xfy,'::'). diff --git a/packages/CLPBN/clpbn/table.yap b/packages/CLPBN/clpbn/table.yap index 9153c6ec9..3ebfac6c6 100644 --- a/packages/CLPBN/clpbn/table.yap +++ b/packages/CLPBN/clpbn/table.yap @@ -8,28 +8,29 @@ */ :- module(clpbn_table, - [clpbn_table/1, - clpbn_tableallargs/1, - clpbn_table_nondet/1, - clpbn_tabled_clause/2, - clpbn_tabled_clause_ref/3, - clpbn_tabled_retract/2, - clpbn_tabled_abolish/1, - clpbn_tabled_asserta/1, - clpbn_tabled_assertz/1, - clpbn_tabled_asserta/2, - clpbn_tabled_assertz/2, - clpbn_tabled_dynamic/1, - clpbn_tabled_number_of_clauses/2, - clpbn_reset_tables/0, - clpbn_reset_tables/1, - clpbn_is_tabled/1 - ]). + [clpbn_table/1, + clpbn_tableallargs/1, + clpbn_table_nondet/1, + clpbn_tabled_clause/2, + clpbn_tabled_clause_ref/3, + clpbn_tabled_retract/2, + clpbn_tabled_abolish/1, + clpbn_tabled_asserta/1, + clpbn_tabled_assertz/1, + clpbn_tabled_asserta/2, + clpbn_tabled_assertz/2, + clpbn_tabled_dynamic/1, + clpbn_tabled_number_of_clauses/2, + clpbn_reset_tables/0, + clpbn_reset_tables/1, + clpbn_is_tabled/1 + ]). :- use_module(library(bhash), - [b_hash_new/4, - b_hash_lookup/3, - b_hash_insert/4]). + [b_hash_new/4, + b_hash_lookup/3, + b_hash_insert/4 + ]). :- meta_predicate clpbn_table(:), clpbn_tabled_clause(:.?), @@ -43,14 +44,13 @@ clpbn_tabled_number_of_clauses(:,-), clpbn_is_tabled(:). -:- use_module(library(terms), [ - instantiated_term_hash/4, - variant/2 - ]). +:- use_module(library(terms), + [instantiated_term_hash/4, + variant/2 + ]). -:- use_module(evidence, [ - put_evidence/2 - ]). +:- use_module(evidence, + [put_evidence/2]). :- dynamic clpbn_table/3. @@ -364,4 +364,3 @@ clpbn_is_tabled(M:Clause, _) :- !, clpbn_is_tabled(Head, M) :- clpbn_table(Head, M, _). - diff --git a/packages/CLPBN/clpbn/topsort.yap b/packages/CLPBN/clpbn/topsort.yap index 9c40f9ad9..7beb31e05 100644 --- a/packages/CLPBN/clpbn/topsort.yap +++ b/packages/CLPBN/clpbn/topsort.yap @@ -1,11 +1,13 @@ -:- module(topsort, [topsort/2]). +:- module(topsort, + [topsort/2]). :- use_module(library(dgraphs), - [dgraph_new/1, - dgraph_add_edges/3, - dgraph_add_vertices/3, - dgraph_top_sort/2]). + [dgraph_new/1, + dgraph_add_edges/3, + dgraph_add_vertices/3, + dgraph_top_sort/2 + ]). /* simple implementation of a topological sorting algorithm */ /* graph is as Node-[Parents] */ @@ -31,4 +33,3 @@ add_edges([], _V) --> []. add_edges([P|Parents], V) --> [P-V], add_edges(Parents, V). - diff --git a/packages/CLPBN/clpbn/utils.yap b/packages/CLPBN/clpbn/utils.yap index 67f7cfe15..7ebf4d332 100644 --- a/packages/CLPBN/clpbn/utils.yap +++ b/packages/CLPBN/clpbn/utils.yap @@ -1,9 +1,11 @@ -:- module(clpbn_utils, [ - clpbn_not_var_member/2, - clpbn_var_member/2, - check_for_hidden_vars/3, - sort_vars_by_key/3, - sort_vars_by_key_and_parents/3]). + +:- module(clpbn_utils, + [clpbn_not_var_member/2, + clpbn_var_member/2, + check_for_hidden_vars/3, + sort_vars_by_key/3, + sort_vars_by_key_and_parents/3 + ]). % % It may happen that variables from a previous query may still be around. @@ -113,4 +115,3 @@ transform_parents([P|Parents0],[P|NParents],KeyVarsF,KeyVars0) :- transform_parents([P|Parents0],[V|NParents],[P-V|KeyVarsF],KeyVars0) :- transform_parents(Parents0,NParents,KeyVarsF,KeyVars0). - diff --git a/packages/CLPBN/clpbn/ve.yap b/packages/CLPBN/clpbn/ve.yap index 5ef49a3d4..a5b0f8dee 100644 --- a/packages/CLPBN/clpbn/ve.yap +++ b/packages/CLPBN/clpbn/ve.yap @@ -14,55 +14,58 @@ *********************************/ -:- module(clpbn_ve, [ve/3, - check_if_ve_done/1, - init_ve_solver/4, - run_ve_solver/3, - init_ve_ground_solver/5, - run_ve_ground_solver/3, - call_ve_ground_solver/6]). +:- module(clpbn_ve, + [ve/3, + check_if_ve_done/1, + init_ve_solver/4, + run_ve_solver/3, + init_ve_ground_solver/5, + run_ve_ground_solver/3, + call_ve_ground_solver/6 + ]). :- use_module(library(atts)). :- use_module(library(ordsets), - [ord_union/3, - ord_member/2]). + [ord_union/3, + ord_member/2 + ]). -:- use_module(library('clpbn/xbif'), [clpbn2xbif/3]). +:- use_module(library('clpbn/xbif'), + [clpbn2xbif/3]). -:- use_module(library('clpbn/graphviz'), [clpbn2gviz/4]). +:- use_module(library('clpbn/graphviz'), + [clpbn2gviz/4]). :- use_module(library('clpbn/dists'), - [ - dist/4, - get_dist_domain_size/2, - get_dist_params/2, - get_dist_domain_size/2, - get_dist_matrix/5]). + [dist/4, + get_dist_domain_size/2, + get_dist_params/2, + get_dist_domain_size/2, + get_dist_matrix/5 + ]). -:- use_module(library('clpbn/utils'), [ - clpbn_not_var_member/2]). +:- use_module(library('clpbn/utils'), + [clpbn_not_var_member/2]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). :- use_module(library('clpbn/connected'), - [ - init_influences/3, - influences/4, - factor_influences/4 - ]). + [init_influences/3, + influences/4, + factor_influences/4 + ]). :- use_module(library(clpbn/matrix_cpt_utils)). :- use_module(library(clpbn/numbers)). :- use_module(library(lists), - [ - member/2, - append/3, - delete/3 - ]). + [member/2, + append/3, + delete/3 + ]). :- use_module(library(maplist)). @@ -71,7 +74,7 @@ :- use_module(library(clpbn/vmap)). :- use_module(library('clpbn/aggregates'), - [check_for_agg_vars/2]). + [check_for_agg_vars/2]). :- attribute size/1, all_diffs/1. @@ -474,4 +477,3 @@ multiply([F0|Fs], Vs, T) :- multiply_factor(f(_,Vs1,T1), f(_,Vs0,T0), f(_,Vs,T)) :- multiply_CPTs(T1, Vs1, T0, Vs0, T, Vs). - diff --git a/packages/CLPBN/clpbn/viterbi.yap b/packages/CLPBN/clpbn/viterbi.yap index 1dc1037f5..b71befc68 100644 --- a/packages/CLPBN/clpbn/viterbi.yap +++ b/packages/CLPBN/clpbn/viterbi.yap @@ -1,11 +1,13 @@ %:- style_check(all). -:- module(viterbi, [viterbi/4]). +:- module(viterbi, + [viterbi/4]). :- use_module(library(lists), - [nth/3, - member/2]). + [nth/3, + member/2 + ]). :- use_module(library(assoc)). @@ -17,8 +19,8 @@ :- ensure_loaded(library('clpbn/hmm')). -:- use_module(library('clpbn/dists'), [ - get_dist_params/2]). +:- use_module(library('clpbn/dists'), + [get_dist_params/2]). :- meta_predicate viterbi(:,:,+,-). @@ -231,5 +233,3 @@ trace(L1,Next,Dump,Map,Trace0,Trace) :- matrix_get(Dump,[NL,P],New), trace(NL,New,Dump,Map,[Key|Trace0],Trace). - - diff --git a/packages/CLPBN/clpbn/vmap.yap b/packages/CLPBN/clpbn/vmap.yap index ac0fb83df..db7605646 100644 --- a/packages/CLPBN/clpbn/vmap.yap +++ b/packages/CLPBN/clpbn/vmap.yap @@ -1,13 +1,12 @@ :- module(clpbn_vmap, - [ - init_vmap/1, % init_vmap(-Vmap) - add_to_vmap/4, % add_to_vmap(+V,-I,+VMap0,VMapF) - get_from_vmap/3, % add_to_vmap(+V,-I,+VMap0) - vars_to_numbers/4, % vars_to_numbers(+Vs,-Is,+VMap0,VMapF) - lvars_to_numbers/4, % lvars_to_numbers(+LVs,-LIs,+VMap0,VMapF) - vmap_to_list/2 - ]). + [init_vmap/1, % init_vmap(-Vmap) + add_to_vmap/4, % add_to_vmap(+V,-I,+VMap0,VMapF) + get_from_vmap/3, % add_to_vmap(+V,-I,+VMap0) + vars_to_numbers/4, % vars_to_numbers(+Vs,-Is,+VMap0,VMapF) + lvars_to_numbers/4, % lvars_to_numbers(+LVs,-LIs,+VMap0,VMapF) + vmap_to_list/2 + ]). :- use_module(library(rbtrees)). :- use_module(library(maplist)). @@ -39,6 +38,3 @@ lvars_to_numbers(LVs, LIs, VMap0, VMap) :- vmap_to_list(vmap(_,Map), L) :- rb_visit(Map, L). - - - diff --git a/packages/CLPBN/clpbn/xbif.yap b/packages/CLPBN/clpbn/xbif.yap index df010c6bf..657e350ba 100644 --- a/packages/CLPBN/clpbn/xbif.yap +++ b/packages/CLPBN/clpbn/xbif.yap @@ -2,10 +2,11 @@ % XMLBIF support for CLP(BN) % -:- module(xbif, [clpbn2xbif/3]). +:- module(xbif, + [clpbn2xbif/3]). -:- use_module(library('clpbn/dists'), [ - get_dist_domain/2]). +:- use_module(library('clpbn/dists'), + [get_dist_domain/2]). clpbn2xbif(Stream, Name, Network) :- format(Stream, ' diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index fd05d591d..aa2119f47 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -5,67 +5,67 @@ :- module(clpbn_em, [em/5]). :- reexport(library(clpbn), - [clpbn_flag/2, - clpbn_flag/3 - ]). + [clpbn_flag/2, + clpbn_flag/3 + ]). :- use_module(library(clpbn), - [clpbn_init_graph/1, - clpbn_init_solver/4, - clpbn_run_solver/3, - clpbn_finalize_solver/1, - pfl_init_solver/5, - pfl_run_solver/3, - conditional_probability/3, - clpbn_flag/2 - ]). + [clpbn_init_graph/1, + clpbn_init_solver/4, + clpbn_run_solver/3, + clpbn_finalize_solver/1, + pfl_init_solver/5, + pfl_run_solver/3, + conditional_probability/3, + clpbn_flag/2 + ]). :- use_module(library('clpbn/dists'), - [get_dist_domain_size/2, - empty_dist/2, - dist_new_table/2, - get_dist_key/2, - randomise_all_dists/0, - uniformise_all_dists/0 - ]). + [get_dist_domain_size/2, + empty_dist/2, + dist_new_table/2, + get_dist_key/2, + randomise_all_dists/0, + uniformise_all_dists/0 + ]). :- use_module(library('clpbn/ground_factors'), - [generate_network/5, - f/3 - ]). + [generate_network/5, + f/3 + ]). :- use_module(library('clpbn/utils'), - [check_for_hidden_vars/3, - sort_vars_by_key/3 - ]). + [check_for_hidden_vars/3, + sort_vars_by_key/3 + ]). :- use_module(library('clpbn/learning/learn_utils'), - [run_all/1, - clpbn_vars/2, - normalise_counts/2, - compute_likelihood/3, - soften_sample/2 - ]). + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + compute_likelihood/3, + soften_sample/2 + ]). :- use_module(library(bhash), - [b_hash_new/1, - b_hash_lookup/3, - b_hash_insert/4 - ]). + [b_hash_new/1, + b_hash_lookup/3, + b_hash_insert/4 + ]). :- use_module(library(matrix), - [matrix_add/3, - matrix_to_list/2 - ]). + [matrix_add/3, + matrix_to_list/2 + ]). :- use_module(library(lists), - [member/2]). + [member/2]). :- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3 - ]). + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(maplist)). diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 00b865d8e..7e1194568 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -4,34 +4,34 @@ % :- module(pfl, - [op(550,yfx,@), - op(550,yfx,::), - op(1150,fx,bayes), - op(1150,fx,markov), - factor/6, - skolem/2, - defined_in_factor/2, - get_pfl_cpt/5, % given id and keys, return new keys and cpt - get_pfl_parameters/2, % given id return par factor parameter - new_pfl_parameters/2, % given id set new parameters - get_first_pvariable/2, % given id get firt pvar (useful in bayesian) - get_factor_pvariable/2, % given id get any pvar - add_ground_factor/5 %add a new bayesian variable (for now) - ]). + [op(550,yfx,@), + op(550,yfx,::), + op(1150,fx,bayes), + op(1150,fx,markov), + factor/6, + skolem/2, + defined_in_factor/2, + get_pfl_cpt/5, % given id and keys, return new keys and cpt + get_pfl_parameters/2, % given id return par factor parameter + new_pfl_parameters/2, % given id set new parameters + get_first_pvariable/2, % given id get firt pvar (useful in bayesian) + get_factor_pvariable/2, % given id get any pvar + add_ground_factor/5 %add a new bayesian variable (for now) + ]). :- reexport(library(clpbn), - [clpbn_flag/2 as pfl_flag, - set_clpbn_flag/2 as set_pfl_flag, - conditional_probability/3, - pfl_init_solver/5, - pfl_run_solver/3 - ]). + [clpbn_flag/2 as pfl_flag, + set_clpbn_flag/2 as set_pfl_flag, + conditional_probability/3, + pfl_init_solver/5, + pfl_run_solver/3 + ]). :- reexport(library(clpbn/horus), - [set_solver/1]). + [set_solver/1]). :- reexport(library(clpbn/aggregates), - [avg_factors/5]). + [avg_factors/5]). :- ( % if clp(bn) has done loading, we're top-level predicate_property(set_pfl_flag(_,_), imported_from(clpbn)) @@ -47,10 +47,10 @@ :- use_module(library(atts)). :- use_module(library(lists), - [nth0/3, - append/3, - member/2 - ]). + [nth0/3, + append/3, + member/2 + ]). :- dynamic factor/6, skolem_in/2, skolem/2, preprocess/3, evidence/2, id/1. From 2f2f88e57190831aca9960094b9e693420c067ba Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 17:57:00 +0000 Subject: [PATCH 17/89] Several whitespace fixes --- packages/CLPBN/clpbn.yap | 83 ++++----- packages/CLPBN/clpbn/aggregates.yap | 50 +++--- packages/CLPBN/clpbn/bdd.yap | 210 +++++++++++----------- packages/CLPBN/clpbn/bnt.yap | 13 +- packages/CLPBN/clpbn/connected.yap | 84 ++++----- packages/CLPBN/clpbn/discrete_utils.yap | 6 +- packages/CLPBN/clpbn/display.yap | 1 + packages/CLPBN/clpbn/dists.yap | 12 +- packages/CLPBN/clpbn/evidence.yap | 3 +- packages/CLPBN/clpbn/gibbs.yap | 14 +- packages/CLPBN/clpbn/ground_factors.yap | 48 ++--- packages/CLPBN/clpbn/hmm.yap | 29 ++- packages/CLPBN/clpbn/jt.yap | 67 ++++--- packages/CLPBN/clpbn/matrix_cpt_utils.yap | 51 +++--- packages/CLPBN/clpbn/numbers.yap | 8 +- packages/CLPBN/clpbn/pgrammar.yap | 30 ++-- packages/CLPBN/clpbn/table.yap | 93 +++++----- packages/CLPBN/clpbn/utils.yap | 16 +- packages/CLPBN/clpbn/ve.yap | 116 ++++++------ packages/CLPBN/clpbn/viterbi.yap | 60 +++---- packages/CLPBN/clpbn/vmap.yap | 2 +- packages/CLPBN/learning/aleph_params.yap | 131 +++++++------- packages/CLPBN/learning/bnt_parms.yap | 40 ++--- packages/CLPBN/learning/em.yap | 65 ++++--- packages/CLPBN/learning/learn_utils.yap | 40 +++-- packages/CLPBN/learning/mle.yap | 28 +-- packages/CLPBN/pfl.yap | 33 ++-- 27 files changed, 667 insertions(+), 666 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index a77d13d32..3fa79d7c0 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -5,10 +5,10 @@ set_clpbn_flag/2, clpbn_flag/3, clpbn_key/2, - clpbn_init_graph/1, + clpbn_init_graph/1, clpbn_init_solver/4, clpbn_run_solver/3, - clpbn_finalize_solver/1, + clpbn_finalize_solver/1, pfl_init_solver/5, pfl_run_solver/3, probability/2, @@ -16,7 +16,7 @@ use_parfactors/1, op(500, xfy, with) ]). - + :- use_module(library(atts)). :- use_module(library(bhash)). @@ -103,7 +103,7 @@ check_stored_evidence/2, put_evidence/2 ]). - + :- use_module('clpbn/ground_factors', [generate_network/5]). @@ -131,7 +131,7 @@ parameter_softening/1, em_solver/1, use_parfactors/1. - + :- meta_predicate probability(:,-), conditional_probability(:,:,-). @@ -199,7 +199,7 @@ store_var(El) :- get_mutable(Tail, Mutable), update_mutable(El.Tail, Mutable). store_var(El) :- - init_clpbn_vars(El). + init_clpbn_vars(El). init_clpbn_vars(El) :- create_mutable(El, Mutable), @@ -246,13 +246,14 @@ project_attributes(GVars0, _AVars0) :- generate_network(GVars0, GKeys, Keys, Factors, Evidence), b_setval(clpbn_query_variables, f(GVars0,Evidence)), simplify_query(GVars0, GVars), - ( GKeys = [] - -> + ( + GKeys = [] + -> GVars0 = [V|_], clpbn_display:put_atts(V, [posterior([],[],[],[])]) ; call_ground_solver(Solver, GVars, GKeys, Keys, Factors, Evidence) - ). + ). project_attributes(GVars, AVars) :- suppress_attribute_display(false), AVars = [_|_], @@ -266,11 +267,11 @@ project_attributes(GVars, AVars) :- (output(xbif(XBifStream)) -> clpbn2xbif(XBifStream,ve,AllVars) ; true), (output(gviz(XBifStream)) -> clpbn2gviz(XBifStream,sort,AllVars,GVars) ; true), ( - Solver = graphs + Solver = graphs -> - write_out(Solver, [[]], AllVars, DiffVars) + write_out(Solver, [[]], AllVars, DiffVars) ; - write_out(Solver, [CLPBNGVars], AllVars, DiffVars) + write_out(Solver, [CLPBNGVars], AllVars, DiffVars) ). project_attributes(_, _). @@ -334,7 +335,7 @@ write_out(jt, GVars, AVars, DiffVars) :- jt(GVars, AVars, DiffVars). write_out(bdd, GVars, AVars, DiffVars) :- bdd(GVars, AVars, DiffVars). -write_out(bp, _GVars, _AVars, _DiffVars) :- +write_out(bp, _GVars, _AVars, _DiffVars) :- writeln('interface not supported any longer'). write_out(gibbs, GVars, AVars, DiffVars) :- gibbs(GVars, AVars, DiffVars). @@ -453,19 +454,19 @@ bind_clpbn(T, Var, _, _, _, do_not_bind_variable([put_evidence(T,Var)])) :- bind_clpbn(T, Var, Key, Dist, Parents, []) :- var(T), get_atts(T, [key(Key1),dist(Dist1,Parents1)]), ( - bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) + bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) -> - ( - get_atts(T, [evidence(Ev1)]) -> - bind_evidence_from_extra_var(Ev1,Var) - ; - get_atts(Var, [evidence(Ev)]) -> - bind_evidence_from_extra_var(Ev,T) - ; - true - ) + ( + get_atts(T, [evidence(Ev1)]) -> + bind_evidence_from_extra_var(Ev1,Var) + ; + get_atts(Var, [evidence(Ev)]) -> + bind_evidence_from_extra_var(Ev,T) + ; + true + ) ; - fail + fail ). bind_clpbn(_, Var, _, _, _, _, []) :- use(bnt), @@ -487,7 +488,7 @@ bind_clpbn(T, Var, Key0, _, _, _, []) :- ( Key = Key0 -> true ; - % let us not loose whatever we had. + % let us not loose whatever we had. put_evidence(T,Var) ). @@ -497,7 +498,7 @@ fresh_attvar(Var, NVar) :- % I will now allow two CLPBN variables to be bound together. %bind_clpbns(Key, Dist, Parents, Key, Dist, Parents). -bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) :- +bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) :- Key == Key1, !, get_dist(Dist,_Type,_Domain,_Table), get_dist(Dist1,_Type1,_Domain1,_Table1), @@ -526,14 +527,14 @@ bind_evidence_from_extra_var(Ev1,Var) :- bind_evidence_from_extra_var(Ev1,Var) :- put_atts(Var, [evidence(Ev1)]). -user:term_expansion((A :- {}), ( :- true )) :- !, % evidence +user:term_expansion((A :- {}), ( :- true )) :- !, % evidence prolog_load_context(module, M), store_evidence(M:A). clpbn_key(Var,Key) :- get_atts(Var, [key(Key)]). - - + + % % only useful for probabilistic context free grammars % @@ -556,19 +557,19 @@ clpbn_init_solver(LVs, Vs0, VarsWithUnboundKeys, State) :- clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :- init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State). - + clpbn_init_solver(ve, LVs, Vs0, VarsWithUnboundKeys, State) :- init_ve_solver(LVs, Vs0, VarsWithUnboundKeys, State). - + clpbn_init_solver(bp, LVs, Vs0, VarsWithUnboundKeys, State) :- init_horus_ground_solver(LVs, Vs0, VarsWithUnboundKeys, State). - + clpbn_init_solver(jt, LVs, Vs0, VarsWithUnboundKeys, State) :- init_jt_solver(LVs, Vs0, VarsWithUnboundKeys, State). - + clpbn_init_solver(bdd, LVs, Vs0, VarsWithUnboundKeys, State) :- init_bdd_solver(LVs, Vs0, VarsWithUnboundKeys, State). - + clpbn_init_solver(pcg, LVs, Vs0, VarsWithUnboundKeys, State) :- init_pcg_solver(LVs, Vs0, VarsWithUnboundKeys, State). @@ -598,7 +599,7 @@ clpbn_run_solver(bdd, LVs, LPs, State) :- clpbn_run_solver(pcg, LVs, LPs, State) :- run_pcg_solver(LVs, LPs, State). - + clpbn_finalize_solver(State) :- solver(bp), !, functor(State, _, Last), @@ -622,22 +623,22 @@ pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- !, init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- !, - clpbn_horus:set_horus_flag(ground_solver, ve), + clpbn_horus:set_horus_flag(ground_solver, ve), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- !, - clpbn_horus:set_horus_flag(ground_solver, bp), + clpbn_horus:set_horus_flag(ground_solver, bp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- !, - clpbn_horus:set_horus_flag(ground_solver, cbp), + clpbn_horus:set_horus_flag(ground_solver, cbp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). - + pfl_init_solver(_, _, _, _, _, Solver) :- write('Error: solver `'), write(Solver), write('\' cannot be used for learning'). - + pfl_run_solver(LVs, LPs, State) :- solver(Solver), pfl_run_solver(LVs, LPs, State, Solver). @@ -653,7 +654,7 @@ pfl_run_solver(LVs, LPs, State, hve) :- !, pfl_run_solver(LVs, LPs, State, bp) :- !, run_horus_ground_solver(LVs, LPs, State). - + pfl_run_solver(LVs, LPs, State, cbp) :- !, run_horus_ground_solver(LVs, LPs, State). diff --git a/packages/CLPBN/clpbn/aggregates.yap b/packages/CLPBN/clpbn/aggregates.yap index 5fa7d0718..71e08795b 100644 --- a/packages/CLPBN/clpbn/aggregates.yap +++ b/packages/CLPBN/clpbn/aggregates.yap @@ -1,4 +1,4 @@ - % +% % generate explicit CPTs % :- module(clpbn_aggregates, @@ -63,9 +63,9 @@ simplify_dist(_, _, _, _, Vs0, Vs0). % avg_factors(Key, Parents, _Smoothing, NewParents, Id) :- - % we keep ev as a list - skolem(Key, Domain), - avg_table(Parents, Parents, Domain, Key, 0, 1.0, NewParents, [], _ExtraSkolems, Id). + % we keep ev as a list + skolem(Key, Domain), + avg_table(Parents, Parents, Domain, Key, 0, 1.0, NewParents, [], _ExtraSkolems, Id). % there are 4 cases: % no evidence on top node @@ -73,17 +73,17 @@ avg_factors(Key, Parents, _Smoothing, NewParents, Id) :- % evidence on top node *entailed* by values of parents (so there is no real connection) % evidence incompatible with parents query_evidence(Key, EvHash, MAT0, MAT, NewParents0, NewParents, Vs, IVs, NewVs) :- - b_hash_lookup(Key, Ev, EvHash), !, - normalise_CPT_on_lines(MAT0, MAT1, L1), - check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs). + b_hash_lookup(Key, Ev, EvHash), !, + normalise_CPT_on_lines(MAT0, MAT1, L1), + check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs). query_evidence(_, _, MAT, MAT, NewParents, NewParents, _, Vs, Vs). hash_ev(K=V, Es0, Es) :- - b_hash_insert(Es0, K, V, Es). + b_hash_insert(Es0, K, V, Es). find_ev(Ev, Key, RemKeys, RemKeys, Ev0, EvF) :- - b_hash_lookup(Key, V, Ev), !, - EvF is Ev0+V. + b_hash_lookup(Key, V, Ev), !, + EvF is Ev0+V. find_ev(_Evs, Key, RemKeys, [Key|RemKeys], Ev, Ev). @@ -118,7 +118,7 @@ avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, [V1,V2], Vs, [V1,V2|N average_cpt([V1,V2], OVars, Domain, TotEvidence, Softness, CPT), matrix_to_list(CPT, Mat), add_ground_factor(bayes, Domain, [Key,V1,V2], Mat, Id). - + intermediate_table(1,_,[V],V, _, _, I, I, Vs, Vs) :- !. intermediate_table(2, Op, [V1,V2], V, Key, Softness, I0, If, Vs, Vs) :- !, If is I0+1, @@ -184,11 +184,11 @@ build_avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, CPT, [V1,V2], V build_intermediate_table(LL1, sum(Min,Max), L1, V1, Key, 1.0, 0, I1, Vs, Vs1), build_intermediate_table(LL2, sum(Min,Max), L2, V2, Key, 1.0, I1, _, Vs1, NewVs), average_cpt([V1,V2], OVars, Domain, TotEvidence, Softness, CPT). - + build_max_table(Vars, Domain, Softness, p(Domain, CPT, Vars), Vs, Vs) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 16, + TabSize =< 16, /* case gmp is not there !! */ TabSize > 0, !, max_cpt(Vars, Domain, Softness, CPT). @@ -200,11 +200,11 @@ build_max_table(Vars, Domain, Softness, p(Domain, CPT, [V1,V2]), Vs, [V1,V2|NewV build_intermediate_table(LL1, max(Domain,CPT), L1, V1, Key, 1.0, 0, I1, Vs, Vs1), build_intermediate_table(LL2, max(Domain,CPT), L2, V2, Key, 1.0, I1, _, Vs1, NewVs), max_cpt([V1,V2], Domain, Softness, CPT). - + build_min_table(Vars, Domain, Softness, p(Domain, CPT, Vars), Vs, Vs) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 16, + TabSize =< 16, /* case gmp is not there !! */ TabSize > 0, !, min_cpt(Vars, Domain, Softness, CPT). @@ -216,7 +216,7 @@ build_min_table(Vars, Domain, Softness, p(Domain, CPT, [V1,V2]), Vs, [V1,V2|NewV build_intermediate_table(LL1, min(Domain,CPT), L1, V1, Key, 1.0, 0, I1, Vs, Vs1), build_intermediate_table(LL2, min(Domain,CPT), L2, V2, Key, 1.0, I1, _, Vs1, NewVs), min_cpt([V1,V2], Domain, Softness, CPT). - + int_power([], _, TabSize, TabSize). int_power([_|L], X, I0, TabSize) :- I is I0*X, @@ -273,19 +273,21 @@ include_qevidence(_, MAT, MAT, NewParents, NewParents, _, Vs, Vs). check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs) :- sumlist(L1, Tot), nth0(Ev, L1, Val), - (Val == Tot -> - MAT1 = MAT, - NewParents = [], - Vs = NewVs + ( + Val == Tot + -> + MAT1 = MAT, + NewParents = [], + Vs = NewVs ; - Val == 0.0 -> + Val == 0.0 -> throw(error(domain_error(incompatible_evidence),evidence(Ev))) - ; + ; MAT0 = MAT, NewParents = NewParents0, IVs = NewVs ). - + % % generate actual table, instead of trusting the solver @@ -376,6 +378,6 @@ get_vdist_size(V, Sz) :- clpbn:get_atts(V, [dist(Dist,_)]), get_dist_domain_size(Dist, Sz). get_vdist_size(V, Sz) :- - skolem(V, Dom), + skolem(V, Dom), length(Dom, Sz). diff --git a/packages/CLPBN/clpbn/bdd.yap b/packages/CLPBN/clpbn/bdd.yap index 3040a754f..a7bc3abd7 100644 --- a/packages/CLPBN/clpbn/bdd.yap +++ b/packages/CLPBN/clpbn/bdd.yap @@ -93,37 +93,37 @@ run_bdd_ground_solver(_QueryVars, Solutions, bdd(GKeys, Keys, Factors, Evidence) check_if_bdd_done(_Var). call_bdd_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - call_bdd_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), - clpbn_bind_vals([QueryVars], Solutions, Output). + call_bdd_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), + clpbn_bind_vals([QueryVars], Solutions, Output). call_bdd_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions) :- - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - init_bdd(FactorIds, EvidenceIds, Hash4, Id4, BDD), - run_solver(QueryKeys, Solutions, BDD). + keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), + init_bdd(FactorIds, EvidenceIds, Hash4, Id4, BDD), + run_solver(QueryKeys, Solutions, BDD). init_bdd(FactorIds, EvidenceIds, Hash, Id, bdd(Term, Leaves, Tops, Hash, Id)) :- - sort_keys(FactorIds, AllVars, Leaves), - rb_new(OrderVs0), - foldl2(order_key, AllVars, 0, _, OrderVs0, OrderVs), - rb_new(Vars0), - rb_new(Pars0), - rb_new(Ev0), - foldl(evtotree,EvidenceIds,Ev0,Ev), - rb_new(Fs0), - foldl(ftotree,FactorIds,Fs0,Fs), - init_tops(Leaves,Tops), - get_keys_info(AllVars, Ev, Fs, OrderVs, Vars0, _Vars, Pars0, _Pars, Leaves, Tops, Term, []). + sort_keys(FactorIds, AllVars, Leaves), + rb_new(OrderVs0), + foldl2(order_key, AllVars, 0, _, OrderVs0, OrderVs), + rb_new(Vars0), + rb_new(Pars0), + rb_new(Ev0), + foldl(evtotree,EvidenceIds,Ev0,Ev), + rb_new(Fs0), + foldl(ftotree,FactorIds,Fs0,Fs), + init_tops(Leaves,Tops), + get_keys_info(AllVars, Ev, Fs, OrderVs, Vars0, _Vars, Pars0, _Pars, Leaves, Tops, Term, []). order_key( Id, I0, I, OrderVs0, OrderVs) :- - I is I0+1, - rb_insert(OrderVs0, Id, I0, OrderVs). + I is I0+1, + rb_insert(OrderVs0, Id, I0, OrderVs). evtotree(K=V,Ev0,Ev) :- - rb_insert(Ev0, K, V, Ev). + rb_insert(Ev0, K, V, Ev). ftotree(F, Fs0, Fs) :- - F = f([K|_Parents],_,_,_), - rb_insert(Fs0, K, F, Fs). + F = f([K|_Parents],_,_,_), + rb_insert(Fs0, K, F, Fs). bdd([[]],_,_) :- !. bdd([QueryVars], AllVars, AllDiffs) :- @@ -155,59 +155,59 @@ init_tops([_|Leaves],[_|Tops]) :- init_tops(Leaves,Tops). sort_keys(AllFs, AllVars, Leaves) :- - dgraph_new(Graph0), - foldl(add_node, AllFs, Graph0, Graph), - dgraph_leaves(Graph, Leaves), - dgraph_top_sort(Graph, AllVars). + dgraph_new(Graph0), + foldl(add_node, AllFs, Graph0, Graph), + dgraph_leaves(Graph, Leaves), + dgraph_top_sort(Graph, AllVars). add_node(f([K|Parents],_,_,_), Graph0, Graph) :- - dgraph_add_vertex(Graph0, K, Graph1), - foldl(add_edge(K), Parents, Graph1, Graph). - + dgraph_add_vertex(Graph0, K, Graph1), + foldl(add_edge(K), Parents, Graph1, Graph). + add_edge(K, K0, Graph0, Graph) :- - dgraph_add_edge(Graph0, K0, K, Graph). + dgraph_add_edge(Graph0, K0, K, Graph). sort_vars(AllVars0, AllVars, Leaves) :- - dgraph_new(Graph0), - build_graph(AllVars0, Graph0, Graph), - dgraph_leaves(Graph, Leaves), - dgraph_top_sort(Graph, AllVars). + dgraph_new(Graph0), + build_graph(AllVars0, Graph0, Graph), + dgraph_leaves(Graph, Leaves), + dgraph_top_sort(Graph, AllVars). build_graph([], Graph, Graph). build_graph([V|AllVars0], Graph0, Graph) :- - clpbn:get_atts(V, [dist(_DistId, Parents)]), !, - dgraph_add_vertex(Graph0, V, Graph1), - add_parents(Parents, V, Graph1, GraphI), - build_graph(AllVars0, GraphI, Graph). + clpbn:get_atts(V, [dist(_DistId, Parents)]), !, + dgraph_add_vertex(Graph0, V, Graph1), + add_parents(Parents, V, Graph1, GraphI), + build_graph(AllVars0, GraphI, Graph). build_graph(_V.AllVars0, Graph0, Graph) :- - build_graph(AllVars0, Graph0, Graph). + build_graph(AllVars0, Graph0, Graph). add_parents([], _V, Graph, Graph). add_parents([V0|Parents], V, Graph0, GraphF) :- - dgraph_add_edge(Graph0, V0, V, GraphI), - add_parents(Parents, V, GraphI, GraphF). + dgraph_add_edge(Graph0, V0, V, GraphI), + add_parents(Parents, V, GraphI, GraphF). get_keys_info([], _, _, _, Vs, Vs, Ps, Ps, _, _) --> []. get_keys_info([V|MoreVs], Evs, Fs, OrderVs, Vs, VsF, Ps, PsF, Lvs, Outs) --> - { rb_lookup(V, F, Fs) }, !, - { F = f([V|Parents], _, _, DistId) }, + { rb_lookup(V, F, Fs) }, !, + { F = f([V|Parents], _, _, DistId) }, %{writeln(v:DistId:Parents)}, - [DIST], - { get_key_info(V, F, Fs, Evs, OrderVs, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, - get_keys_info(MoreVs, Evs, Fs, OrderVs, Vs2, VsF, Ps1, PsF, Lvs, Outs). + [DIST], + { get_key_info(V, F, Fs, Evs, OrderVs, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, + get_keys_info(MoreVs, Evs, Fs, OrderVs, Vs2, VsF, Ps1, PsF, Lvs, Outs). get_key_info(V, F, Fs, Evs, OrderVs, DistId, Parents0, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) :- - reorder_keys(Parents0, OrderVs, Parents, Map), - check_key_p(DistId, F, Map, Parms, _ParmVars, Ps, Ps1), - unbound_parms(Parms, ParmVars), - F = f(_,[Size|_],_,_), - check_key(V, Size, DIST, Vs, Vs1), - DIST = info(V, Tree, Ev, Values, Formula, ParmVars, Parms), - % get a list of form [[P00,P01], [P10,P11], [P20,P21]] - foldl(get_key_parent(Fs), Parents, PVars, Vs1, Vs2), - cross_product(Values, Ev, PVars, ParmVars, Formula0), + reorder_keys(Parents0, OrderVs, Parents, Map), + check_key_p(DistId, F, Map, Parms, _ParmVars, Ps, Ps1), + unbound_parms(Parms, ParmVars), + F = f(_,[Size|_],_,_), + check_key(V, Size, DIST, Vs, Vs1), + DIST = info(V, Tree, Ev, Values, Formula, ParmVars, Parms), + % get a list of form [[P00,P01], [P10,P11], [P20,P21]] + foldl(get_key_parent(Fs), Parents, PVars, Vs1, Vs2), + cross_product(Values, Ev, PVars, ParmVars, Formula0), % (numbervars(Formula0,0,_),writeln(formula0:Ev:Formula0), fail ; true), - get_key_evidence(V, Evs, DistId, Tree, Ev, Formula0, Formula, Lvs, Outs). + get_key_evidence(V, Evs, DistId, Tree, Ev, Formula0, Formula, Lvs, Outs). % (numbervars(Formula,0,_),writeln(formula:Formula), fail ; true). get_vars_info([], Vs, Vs, Ps, Ps, _, _) --> []. @@ -215,7 +215,7 @@ get_vars_info([V|MoreVs], Vs, VsF, Ps, PsF, Lvs, Outs) --> { clpbn:get_atts(V, [dist(DistId, Parents)]) }, !, %{writeln(v:DistId:Parents)}, [DIST], - { get_var_info(V, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, + { get_var_info(V, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, get_vars_info(MoreVs, Vs2, VsF, Ps1, PsF, Lvs, Outs). get_vars_info([_|MoreVs], Vs0, VsF, Ps0, PsF, VarsInfo, Lvs, Outs) :- get_vars_info(MoreVs, Vs0, VsF, Ps0, PsF, VarsInfo, Lvs, Outs). @@ -298,17 +298,17 @@ generate_3tree(OUT, [[P0,P1,P2]], I00, I10, I20, IR0, N0, N1, N2, R, Exp, _ExpF) IR is IR0-1, ( satisf(I00+1, I10, I20, IR, N0, N1, N2, R, Exp) -> L0 = [P0|L1] - ; + ; L0 = L1 ), ( satisf(I00, I10+1, I20, IR, N0, N1, N2, R, Exp) -> L1 = [P1|L2] - ; + ; L1 = L2 ), ( satisf(I00, I10, I20+1, IR, N0, N1, N2, R, Exp) -> L2 = [P2] - ; + ; L2 = [] ), to_disj(L0, OUT). @@ -316,23 +316,23 @@ generate_3tree(OUT, [[P0,P1,P2]|Ps], I00, I10, I20, IR0, N0, N1, N2, R, Exp, Exp IR is IR0-1, ( satisf(I00+1, I10, I20, IR, N0, N1, N2, R, Exp) -> I0 is I00+1, generate_3tree(O0, Ps, I0, I10, I20, IR, N0, N1, N2, R, Exp, ExpF) - -> + -> L0 = [P0*O0|L1] - ; + ; L0 = L1 ), ( satisf(I00, I10+1, I20, IR0, N0, N1, N2, R, Exp) -> I1 is I10+1, generate_3tree(O1, Ps, I00, I1, I20, IR, N0, N1, N2, R, Exp, ExpF) - -> + -> L1 = [P1*O1|L2] - ; + ; L1 = L2 ), ( satisf(I00, I10, I20+1, IR0, N0, N1, N2, R, Exp) -> I2 is I20+1, generate_3tree(O2, Ps, I00, I10, I2, IR, N0, N1, N2, R, Exp, ExpF) - -> + -> L2 = [P2*O2] - ; + ; L2 = [] ), to_disj(L0, OUT). @@ -384,12 +384,12 @@ avg_exp([Val|Vals], PVars, I0, P0, Max, Size, Im, IM, HI, HF, O) :- (Vals = [] -> O=O1 ; O = Val*O1+not(Val)*O2 ), Im1 is max(0, Im-I0), IM1 is IM-I0, - ( IM1 < 0 -> O1 = 0, H2 = HI; /* we have exceed maximum */ - Im1 > Max -> O1 = 0, H2 = HI; /* we cannot make to minimum */ - Im1 = 0, IM1 > Max -> O1 = 1, H2 = HI; /* we cannot exceed maximum */ + ( IM1 < 0 -> O1 = 0, H2 = HI ; /* we have exceed maximum */ + Im1 > Max -> O1 = 0, H2 = HI ; /* we cannot make to minimum */ + Im1 = 0, IM1 > Max -> O1 = 1, H2 = HI ; /* we cannot exceed maximum */ P is P0+1, avg_tree(PVars, P, Max, Im1, IM1, Size, O1, HI, H2) - ), + ), I is I0+1, avg_exp(Vals, PVars, I, P0, Max, Size, Im, IM, H2, HF, O2). @@ -437,11 +437,11 @@ bin_sums(Vs, Sums, F) :- vs_to_sums([], []). vs_to_sums([V|Vs], [Sum|Sums0]) :- - Sum =.. [sum|V], - vs_to_sums(Vs, Sums0). + Sum =.. [sum|V], + vs_to_sums(Vs, Sums0). bin_sums([Sum], Sum) --> !. -bin_sums(LSums, Sum) --> +bin_sums(LSums, Sum) --> { halve(LSums, Sums1, Sums2) }, bin_sums(Sums1, Sum1), bin_sums(Sums2, Sum2), @@ -458,14 +458,14 @@ head(Take, [H|L], [H|Sums1], Sum2) :- head(Take1, L, Sums1, Sum2). sum(Sum1, Sum2, Sum) --> - { functor(Sum1, _, M1), - functor(Sum2, _, M2), - Max is M1+M2-2, - Max1 is Max+1, - Max0 is M2-1, - functor(Sum, sum, Max1), - Sum1 =.. [_|PVals] }, - expand_sums(PVals, 0, Max0, Max1, M2, Sum2, Sum). + { functor(Sum1, _, M1), + functor(Sum2, _, M2), + Max is M1+M2-2, + Max1 is Max+1, + Max0 is M2-1, + functor(Sum, sum, Max1), + Sum1 =.. [_|PVals] }, + expand_sums(PVals, 0, Max0, Max1, M2, Sum2, Sum). % % bottom up step by step @@ -509,12 +509,12 @@ expand_sums(Parents, I0, Max0, Max, Size, Sums, Prot, NewSums, [O=SUM*1|F], F0) arg(I, NewSums, O), sum_all(Parents, 0, I0, Max0, Sums, List), to_disj(List, SUM), - expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). + expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). expand_sums(Parents, I0, Max0, Max, Size, Sums, Prot, NewSums, F, F0) :- I is I0+1, arg(I, Sums, O), arg(I, NewSums, O), - expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). + expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). % %inner loop: find all parents that contribute to A_ji, @@ -538,12 +538,12 @@ gen_arg(J, Sums, Max, S0) :- gen_arg(0, Max, J, Sums, S0). gen_arg(Max, Max, J, Sums, S0) :- !, - I is Max+1, - arg(I, Sums, A), + I is Max+1, + arg(I, Sums, A), ( Max = J -> S0 = A ; S0 = not(A)). gen_arg(I0, Max, J, Sums, S) :- - I is I0+1, - arg(I, Sums, A), + I is I0+1, + arg(I, Sums, A), ( I0 = J -> S = A*S0 ; S = not(A)*S0), gen_arg(I, Max, J, Sums, S0). @@ -692,9 +692,9 @@ get_parents(V.Parents, Values.PVars, Vs0, Vs) :- get_parents(Parents, PVars, Vs1, Vs). get_key_parent(Fs, V, Values, Vs0, Vs) :- - INFO = info(V, _Parent, _Ev, Values, _, _, _), - rb_lookup(V, f(_, [Size|_], _, _), Fs), - check_key(V, Size, INFO, Vs0, Vs). + INFO = info(V, _Parent, _Ev, Values, _, _, _), + rb_lookup(V, f(_, [Size|_], _, _), Fs), + check_key(V, Size, INFO, Vs0, Vs). check_key(V, _, INFO, Vs, Vs) :- rb_lookup(V, INFO, Vs), !. @@ -809,20 +809,20 @@ skim_for_theta([[P|Other]|More], not(P)*Ps, [Other|Left], New ) :- skim_for_theta(More, Ps, Left, New ). get_key_evidence(V, Evs, _, Tree, Ev, F0, F, Leaves, Finals) :- - rb_lookup(V, Pos, Evs), !, - zero_pos(0, Pos, Ev), - insert_output(Leaves, V, Finals, Tree, Outs, SendOut), - get_outs(F0, F, SendOut, Outs). + rb_lookup(V, Pos, Evs), !, + zero_pos(0, Pos, Ev), + insert_output(Leaves, V, Finals, Tree, Outs, SendOut), + get_outs(F0, F, SendOut, Outs). % hidden deterministic node, can be removed. %% get_key_evidence(V, _, DistId, _Tree, Ev, F0, [], _Leaves, _Finals) :- -%% deterministic(V, DistId), +%% deterministic(V, DistId), %% !, %% one_list(Ev), %% eval_outs(F0). %% no evidence !!! get_key_evidence(V, _, _, Tree, _Values, F0, F1, Leaves, Finals) :- - insert_output(Leaves, V, Finals, Tree, Outs, SendOut), - get_outs(F0, F1, SendOut, Outs). + insert_output(Leaves, V, Finals, Tree, Outs, SendOut), + get_outs(F0, F1, SendOut, Outs). get_evidence(V, Tree, Ev, F0, F, Leaves, Finals) :- clpbn:get_atts(V, [evidence(Pos)]), !, @@ -846,7 +846,7 @@ zero_pos(_, _Pos, []). zero_pos(Pos, Pos, [1|Values]) :- !, I is Pos+1, zero_pos(I, Pos, Values). -zero_pos(I0, Pos, [0|Values]) :- +zero_pos(I0, Pos, [0|Values]) :- I is I0+1, zero_pos(I, Pos, Values). @@ -863,7 +863,7 @@ insert_output(_.Leaves, V, _.Finals, Top, Outs, SendOut) :- insert_output(Leaves, V, Finals, Top, Outs, SendOut). -get_outs([V=F], [V=NF|End], End, V) :- !, +get_outs([V=F], [V=NF|End], End, V) :- !, % writeln(f0:F), simplify_exp(F,NF). get_outs([(V=F)|Outs], [(V=NF)|NOuts], End, (F0 + V)) :- @@ -878,11 +878,11 @@ eval_outs([(V=F)|Outs]) :- eval_outs(Outs). run_solver(Qs, LLPs, bdd(Term, Leaves, Nodes, Hash, Id)) :- - lists_of_keys_to_ids(Qs, QIds, Hash, _, Id, _), - findall(LPs, - (member(Q, QIds), - run_bdd_solver([Q],LPs,bdd(Term,Leaves,Nodes))), - LLPs). + lists_of_keys_to_ids(Qs, QIds, Hash, _, Id, _), + findall(LPs, + (member(Q, QIds), + run_bdd_solver([Q],LPs,bdd(Term,Leaves,Nodes))), + LLPs). run_bdd_solver([Vs], LPs, bdd(Term, _Leaves, Nodes)) :- build_out_node(Nodes, Node), @@ -988,7 +988,7 @@ all_cnfs([info(_V, Tree, Ev, Values, Formula, ParmVars, Parms)|Term], BindsF, IV v_in(V, [V0|_]) :- V == V0, !. v_in(V, [_|Vs]) :- - v_in(V, Vs). + v_in(V, Vs). all_indicators(Values) --> { values_to_disj(Values, Disj) }, @@ -1017,7 +1017,7 @@ parameters([(V0=Disj*_I0)|Formula], Tree) --> parameters(Formula, Tree). % transform V0<- A*B+C*(D+not(E)) -% [V0+not(A)+not(B),V0+not(C)+not(D),V0+not(C)+E] +% [V0+not(A)+not(B),V0+not(C)+not(D),V0+not(C)+E] conj(Disj, V0) --> { conj2(Disj, [[V0]], LVs) }, to_disjs(LVs). diff --git a/packages/CLPBN/clpbn/bnt.yap b/packages/CLPBN/clpbn/bnt.yap index 4f8dddf49..9fd0d8d65 100644 --- a/packages/CLPBN/clpbn/bnt.yap +++ b/packages/CLPBN/clpbn/bnt.yap @@ -154,7 +154,7 @@ extract_kvars([V|AllVars],[N-i(V,Parents)|KVars]) :- extract_kvars(AllVars,KVars). split_tied_vars([],[],[]). -split_tied_vars([N-i(V,Par)|More],[N-g(Vs,Ns,Es)|TVars],[N|LNs]) :- +split_tied_vars([N-i(V,Par)|More],[N-g(Vs,Ns,Es)|TVars],[N|LNs]) :- get_pars(Par,N,V,NPs,[],Es0,Es), get_tied(More,N,Vs,[V],Ns,NPs,Es,Es0,SVars), split_tied_vars(SVars,TVars,LNs). @@ -206,7 +206,7 @@ extract_graph(AllVars, Graph) :- dgraph_add_vertices(Graph0, AllVars, Graph1), get_edges(AllVars,Edges), dgraph_add_edges(Graph1, Edges, Graph). - + get_edges([],[]). get_edges([V|AllVars],Edges) :- clpbn:get_atts(V, [dist(_,Parents)]), @@ -224,13 +224,13 @@ number_graph([V|SortedGraph], [I|Is], I0, IF) :- % clpbn:get_atts(V,[key(K)]), % write(I:K),nl, number_graph(SortedGraph, Is, I, IF). - + init_bnet(propositional, SortedGraph, NumberedGraph, Size, []) :- build_dag(SortedGraph, Size), init_discrete_nodes(SortedGraph, Size), bnet <-- mk_bnet(dag, node_sizes, \discrete, discrete_nodes), dump_cpts(SortedGraph, NumberedGraph). - + init_bnet(tied, SortedGraph, NumberedGraph, Size, Representatives) :- build_dag(SortedGraph, Size), init_discrete_nodes(SortedGraph, Size), @@ -382,7 +382,7 @@ add_evidence(Graph, Size, Is) :- mk_evidence(Graph, Is, LN), matlab_initialized_cells( 1, Size, LN, evidence), [engine_ev, loglik] <-- enter_evidence(engine, evidence). - + mk_evidence([], [], []). mk_evidence([V|L], [I|Is], [ar(1,I,EvVal1)|LN]) :- clpbn:get_atts(V, [evidence(EvVal)]), !, @@ -409,7 +409,7 @@ marginalize([Vs], SortedVars, NumberedVars,Ps) :- length(SortedVars,L), cycle_values(Den, Ev, Vs, L, Vals, Ps). -cycle_values(_D, _Ev, _Vs, _Size, [], []). +cycle_values(_D, _Ev, _Vs, _Size, [], []). cycle_values(Den,Ev,Vs,Size,[H|T],[HP|TP]):- mk_evidence_query(Vs, H, EvQuery), @@ -428,4 +428,3 @@ mk_evidence_query([V|L], [H|T], [ar(1,Pos,El)|LN]) :- nth(El,D,H), mk_evidence_query(L, T, LN). - diff --git a/packages/CLPBN/clpbn/connected.yap b/packages/CLPBN/clpbn/connected.yap index 773511b8f..e71d90bee 100644 --- a/packages/CLPBN/clpbn/connected.yap +++ b/packages/CLPBN/clpbn/connected.yap @@ -61,13 +61,13 @@ build_edges([P|Parents], V, [P-V|Edges]) :- % search for the set of variables that influence V influences(Vs, G, RG, Vars) :- - influences(Vs, [], G, RG, Vars). + influences(Vs, [], G, RG, Vars). % search for the set of variables that influence V influences(Vs, Evs, G, RG, Vars) :- - rb_new(Visited0), - foldl(influence(Evs, G, RG), Vs, Visited0, Visited), - all_top(Visited, Evs, Vars). + rb_new(Visited0), + foldl(influence(Evs, G, RG), Vs, Visited0, Visited), + all_top(Visited, Evs, Vars). influence(_, _G, _RG, V, Vs, Vs) :- rb_lookup(V, [T|B], Vs), T == t, B == b, !. @@ -91,76 +91,78 @@ process_new_variable(V, Evs, G, RG, Vs0, Vs2) :- % visited throw_below(Evs, G, RG, Child, Vs0, Vs1) :- rb_lookup(Child, [_|B], Vs0), !, - ( - B == b -> + ( + B == b + -> Vs0 = Vs1 % been there before - ; + ; B = b, % mark it - handle_ball_from_above(Child, Evs, G, RG, Vs0, Vs1) - ). + handle_ball_from_above(Child, Evs, G, RG, Vs0, Vs1) + ). throw_below(Evs, G, RG, Child, Vs0, Vs2) :- rb_insert(Vs0, Child, [_|b], Vs1), handle_ball_from_above(Child, Evs, G, RG, Vs1, Vs2). % share this with parents, if we have evidence handle_ball_from_above(V, Evs, G, RG, Vs0, Vs1) :- - var(V), - clpbn:get_atts(V,[evidence(_)]), !, - dgraph_neighbors(V, RG, Parents), - foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). + var(V), + clpbn:get_atts(V,[evidence(_)]), !, + dgraph_neighbors(V, RG, Parents), + foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). handle_ball_from_above(V, Evs, G, RG, Vs0, Vs1) :- - nonvar(V), - rb_lookup(V,_,Evs), !, - dgraph_neighbors(V, RG, Parents), - foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). + nonvar(V), + rb_lookup(V,_,Evs), !, + dgraph_neighbors(V, RG, Parents), + foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). % propagate to kids, if we do not handle_ball_from_above(V, Evs, G, RG, Vs0, Vs1) :- - dgraph_neighbors(V, G, Children), - foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). - + dgraph_neighbors(V, G, Children), + foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). + % visited throw_above(Evs, G, RG, Parent, Vs0, Vs1) :- rb_lookup(Parent, [T|_], Vs0), !, - ( - T == t -> + ( + T == t + -> Vs1 = Vs0 % been there before - ; + ; T = t, % mark it - handle_ball_from_below(Parent, Evs, G, RG, Vs0, Vs1) - ). + handle_ball_from_below(Parent, Evs, G, RG, Vs0, Vs1) + ). throw_above(Evs, G, RG, Parent, Vs0, Vs2) :- rb_insert(Vs0, Parent, [t|_], Vs1), handle_ball_from_below(Parent, Evs, G, RG, Vs1, Vs2). % share this with parents, if we have evidence handle_ball_from_below(V, _Evs, _, _, Vs, Vs) :- - var(V), - clpbn:get_atts(V,[evidence(_)]), !. + var(V), + clpbn:get_atts(V,[evidence(_)]), !. handle_ball_from_below(V, Evs, _, _, Vs, Vs) :- - nonvar(V), - rb_lookup(V, _, Evs), !. + nonvar(V), + rb_lookup(V, _, Evs), !. % propagate to kids, if we do not handle_ball_from_below(V, Evs, G, RG, Vs0, Vs1) :- - dgraph_neighbors(V, RG, Parents), - propagate_ball_from_below(Parents, Evs, V, G, RG, Vs0, Vs1). + dgraph_neighbors(V, RG, Parents), + propagate_ball_from_below(Parents, Evs, V, G, RG, Vs0, Vs1). propagate_ball_from_below([], Evs, V, G, RG, Vs0, Vs1) :- !, - dgraph_neighbors(V, G, Children), - foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). + dgraph_neighbors(V, G, Children), + foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). propagate_ball_from_below(Parents, Evs, _V, G, RG, Vs0, Vs1) :- - foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). + foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). all_top(T, Evs, Vs) :- - rb_visit(T, Pairs), - foldl( get_top(Evs), Pairs, [], Vs). + rb_visit(T, Pairs), + foldl( get_top(Evs), Pairs, [], Vs). get_top(_EVs, V-[T|_], Vs, [V|Vs]) :- - T == t, !. + T == t, !. get_top(_EVs, V-_, Vs, [V|Vs]) :- - var(V), - clpbn:get_atts(V,[evidence(_)]), !. + var(V), + clpbn:get_atts(V,[evidence(_)]), !. get_top(EVs, V-_, Vs, [V|Vs]) :- - nonvar(V), - rb_lookup(V, _, EVs), !. + nonvar(V), + rb_lookup(V, _, EVs), !. get_top(_, _, Vs, Vs). diff --git a/packages/CLPBN/clpbn/discrete_utils.yap b/packages/CLPBN/clpbn/discrete_utils.yap index d6b718074..ebe8ab376 100644 --- a/packages/CLPBN/clpbn/discrete_utils.yap +++ b/packages/CLPBN/clpbn/discrete_utils.yap @@ -25,10 +25,10 @@ propagate_evidence(V, Evs) :- get_dist_domain(Id, Out), generate_szs_with_evidence(Out,Ev,0,Evs,Found), (var(Found) -> - clpbn:get_atts(V, [key(K)]), - throw(clpbn(evidence_does_not_match,K,Ev,[Out])) + clpbn:get_atts(V, [key(K)]), + throw(clpbn(evidence_does_not_match,K,Ev,[Out])) ; - true + true ). propagate_evidence(_, _). diff --git a/packages/CLPBN/clpbn/display.yap b/packages/CLPBN/clpbn/display.yap index 006f7c77e..5d6afb6ea 100644 --- a/packages/CLPBN/clpbn/display.yap +++ b/packages/CLPBN/clpbn/display.yap @@ -1,3 +1,4 @@ + :- module(clpbn_display, [clpbn_bind_vals/3]). diff --git a/packages/CLPBN/clpbn/dists.yap b/packages/CLPBN/clpbn/dists.yap index 81a604d3a..be4b63b20 100644 --- a/packages/CLPBN/clpbn/dists.yap +++ b/packages/CLPBN/clpbn/dists.yap @@ -326,11 +326,11 @@ randomise_all_dists. randomise_dist(Dist) :- ( - use_parfactors(on) + use_parfactors(on) -> - pfl:get_pfl_factor_sizes(Dist, DSizes) + pfl:get_pfl_factor_sizes(Dist, DSizes) ; - recorded(clpbn_dist_psizes, db(Dist,DSizes), _) + recorded(clpbn_dist_psizes, db(Dist,DSizes), _) ), random_CPT(DSizes, NewCPT), dist_new_table(Dist, NewCPT). @@ -342,11 +342,11 @@ uniformise_all_dists. uniformise_dist(Dist) :- ( - use_parfactors(on) + use_parfactors(on) -> - pfl:get_pfl_factor_sizes(Dist, DSizes) + pfl:get_pfl_factor_sizes(Dist, DSizes) ; - recorded(clpbn_dist_psizes, db(Dist,DSizes), _) + recorded(clpbn_dist_psizes, db(Dist,DSizes), _) ), uniform_CPT(DSizes, NewCPT), dist_new_table(Dist, NewCPT). diff --git a/packages/CLPBN/clpbn/evidence.yap b/packages/CLPBN/clpbn/evidence.yap index 450413e29..8fd4ee9bc 100644 --- a/packages/CLPBN/clpbn/evidence.yap +++ b/packages/CLPBN/clpbn/evidence.yap @@ -61,7 +61,7 @@ evidence_error(Ball,PreviousSolver) :- store_graph([]). store_graph([V|Vars]) :- - clpbn:get_atts(V,[key(K),dist(Id,Vs)]), + clpbn:get_atts(V,[key(K),dist(Id,Vs)]), \+ node(K, Id, _), !, translate_vars(Vs,TVs), assert(node(K,Id,TVs)), @@ -84,7 +84,6 @@ add_links([K0|TVs],K) :- assert(edge(K,K0)), add_links(TVs,K). - incorporate_evidence(Vs,AllVs) :- rb_new(Cache0), create_open_list(Vs, OL, FL, Cache0, CacheI), diff --git a/packages/CLPBN/clpbn/gibbs.yap b/packages/CLPBN/clpbn/gibbs.yap index a8cccee0d..3349f4d29 100644 --- a/packages/CLPBN/clpbn/gibbs.yap +++ b/packages/CLPBN/clpbn/gibbs.yap @@ -249,11 +249,11 @@ compile_var(_,_,_,_,_,_,_,_). multiply_all(I,Parents,CPTs,Sz,Graph) :- markov_blanket_instance(Parents,Graph,Values), ( - multiply_all(CPTs,Graph,Probs) + multiply_all(CPTs,Graph,Probs) -> - store_mblanket(I,Values,Probs) + store_mblanket(I,Values,Probs) ; - throw(error(domain_error(bayesian_domain),gibbs_cpt(I,Parents,Values,Sz))) + throw(error(domain_error(bayesian_domain),gibbs_cpt(I,Parents,Values,Sz))) ), fail. multiply_all(I,_,_,_,_) :- @@ -283,7 +283,7 @@ fetch_parents([], _, []). fetch_parents([P|Parents], Graph, [Val|Vals]) :- arg(P,Graph,var(_,_,Val,_,_,_,_,_,_)), fetch_parents(Parents, Graph, Vals). - + multiply_more([],_,Probs0,LProbs) :- normalise_possibly_deterministic_CPT(Probs0, Probs), list_from_CPT(Probs, LProbs0), @@ -299,7 +299,7 @@ accumulate_up_list([P|LProbs], P0, [P1|L]) :- P1 is P0+P, accumulate_up_list(LProbs, P1, L). - + store_mblanket(I,Values,Probs) :- recordz(mblanket,m(I,Values,Probs),_). @@ -458,7 +458,7 @@ get_estimate_pos([I|Is], Sample, [M|Mult], V0, V) :- get_estimate_pos(Is, Sample, Mult, VI, V). update_estimate_for_var(V0,[X|T],[X1|NT]) :- - ( V0 == 0 -> + (V0 == 0 -> X1 is X+1, NT = T ; @@ -499,7 +499,7 @@ do_probs([E|Es],Sum,[P|Ps]) :- show_sorted([], _) :- nl. show_sorted([I|VarOrder], Graph) :- - arg(I,Graph,var(V,I,_,_,_,_,_,_,_)), + arg(I,Graph,var(V,I,_,_,_,_,_,_,_)), clpbn:get_atts(V,[key(K)]), format('~w ',[K]), show_sorted(VarOrder, Graph). diff --git a/packages/CLPBN/clpbn/ground_factors.yap b/packages/CLPBN/clpbn/ground_factors.yap index 0a364408c..50d7a113d 100644 --- a/packages/CLPBN/clpbn/ground_factors.yap +++ b/packages/CLPBN/clpbn/ground_factors.yap @@ -42,7 +42,7 @@ generate_network(QueryVars, QueryKeys, Keys, Factors, EList) :- b_hash_new(Evidence0), foldl(include_evidence,AVars, Evidence0, Evidence1), static_evidence(Evidence1, Evidence), - b_hash_to_list(Evidence, EList0), + b_hash_to_list(Evidence, EList0), maplist(pair_to_evidence,EList0, EList), maplist(queue_evidence, EList), foldl(run_through_query(Evidence), QueryVars, [], QueryKeys), @@ -62,11 +62,11 @@ pair_to_evidence(K-E, K=E). include_evidence(V, Evidence0, Evidence) :- clpbn:get_atts(V,[key(K),evidence(E)]), !, ( - b_hash_lookup(K, E1, Evidence0) + b_hash_lookup(K, E1, Evidence0) -> - (E \= E1 -> throw(clpbn:incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) + (E \= E1 -> throw(clpbn:incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) ; - b_hash_insert(Evidence0, K, E, Evidence) + b_hash_insert(Evidence0, K, E, Evidence) ). include_evidence(_, Evidence, Evidence). @@ -76,16 +76,16 @@ static_evidence(Evidence0, Evidence) :- include_static_evidence(K=E, Evidence0, Evidence) :- ( - b_hash_lookup(K, E1, Evidence0) + b_hash_lookup(K, E1, Evidence0) -> - (E \= E1 -> throw(incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) + (E \= E1 -> throw(incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) ; - b_hash_insert(Evidence0, K, E, Evidence) + b_hash_insert(Evidence0, K, E, Evidence) ). queue_evidence(K=_) :- - queue_in(K). + queue_in(K). run_through_query(Evidence, V, QueryKeys, QueryKeys) :- clpbn:get_atts(V,[key(K)]), @@ -118,40 +118,40 @@ do_propagate(K) :- \+ currently_defined(K), ( ground(K) -> assert(currently_defined(K)) ; true), ( - defined_in_factor(K, ParFactor), - add_factor(ParFactor, Ks) + defined_in_factor(K, ParFactor), + add_factor(ParFactor, Ks) *-> - true + true ; - throw(error(no_defining_factor(K))) + throw(error(no_defining_factor(K))) ), member(K1, Ks), \+ currently_defined(K1), queue_in(K1), fail. do_propagate(_K) :- - propagate. + propagate. add_factor(factor(Type, Id, Ks, _, _Phi, Constraints), NKs) :- % writeln(+Ks), ( - Ks = [K,Els], var(Els) + Ks = [K,Els], var(Els) -> - % aggregate factor - once(run(Constraints)), - avg_factors(K, Els, 0.0, NewKeys, NewId), - NKs = [K|NewKeys] + % aggregate factor + once(run(Constraints)), + avg_factors(K, Els, 0.0, NewKeys, NewId), + NKs = [K|NewKeys] ; - run(Constraints), - NKs = Ks, - Id = NewId + run(Constraints), + NKs = Ks, + Id = NewId ), ( - f(Type, NewId, NKs) + f(Type, NewId, NKs) -> - true + true ; - assert(f(Type, NewId, NKs)) + assert(f(Type, NewId, NKs)) ). run([Goal|Goals]) :- diff --git a/packages/CLPBN/clpbn/hmm.yap b/packages/CLPBN/clpbn/hmm.yap index fc6c38388..623955160 100644 --- a/packages/CLPBN/clpbn/hmm.yap +++ b/packages/CLPBN/clpbn/hmm.yap @@ -47,22 +47,19 @@ hmm_state(N/A,Mod) :- Key =.. [T|KArgs], Head =.. [N|LArgs], asserta_static( (Mod:Head :- - ( First > 2 -> - Last = Key, ! - ; - nb_getval(trie, Trie), trie_check_entry(Trie, Key, _) - -> - % leave work for solver! - % - Last = Key, ! - ; - % first time we saw this entry - nb_getval(trie, Trie), trie_put_entry(Trie, Key, _), - fail - ) - ) - ). - + (First > 2 -> + Last = Key, ! + ; + nb_getval(trie, Trie), trie_check_entry(Trie, Key, _) -> + % leave work for solver! + Last = Key, ! + ; + % first time we saw this entry + nb_getval(trie, Trie), trie_put_entry(Trie, Key, _), + fail + ) + )). + build_args(4,[A,B,C,D],[A,B,C],A,D). build_args(3, [A,B,C], [A,B],A,C). build_args(2, [A,B], [A],A,B). diff --git a/packages/CLPBN/clpbn/jt.yap b/packages/CLPBN/clpbn/jt.yap index 4a3e70f7f..7eb3c191f 100644 --- a/packages/CLPBN/clpbn/jt.yap +++ b/packages/CLPBN/clpbn/jt.yap @@ -135,7 +135,7 @@ run_vars([V|LVs], Edges, [V|Vs], [CPTVars-dist([V|Parents],Id)|CPTs], Ev) :- add_evidence_from_vars(V, [e(V,P)|Evs], Evs) :- clpbn:get_atts(V, [evidence(P)]), !. add_evidence_from_vars(_, Evs, Evs). - + find_nth0([Id|_], Id, P, P) :- !. find_nth0([_|D], Id, P0, P) :- P1 is P0+1, @@ -175,7 +175,7 @@ add_parents([], _, Graph, Graph). add_parents([P|Parents], V, Graph0, [P-V|GraphF]) :- add_parents(Parents, V, Graph0, GraphF). - + % From David Page's lectures test_graph(0, [1-3,2-3,2-4,5-4,5-7,10-7,10-9,11-9,3-6,4-6,7-8,9-8,6-12,8-12], @@ -232,19 +232,19 @@ choose([V|Vertices], Graph, Score0, _, _, Best, _, Cliques0, Cliques, EdgesF) :- ord_insert(Neighbors, V, PossibleClique), new_edges(Neighbors, Graph, NewEdges), ( - % simplicial edge - NewEdges == [] + % simplicial edge + NewEdges == [] -> - !, - Best = V, - NewEdges = EdgesF, - length(PossibleClique,L), - Cliques = [L-PossibleClique|Cliques0] + !, + Best = V, + NewEdges = EdgesF, + length(PossibleClique,L), + Cliques = [L-PossibleClique|Cliques0] ; -% cliquelength(PossibleClique,1,CL), - length(PossibleClique,CL), - CL < Score0, !, - choose(Vertices,Graph,CL,NewEdges, V, Best, CL-PossibleClique, Cliques0,Cliques,EdgesF) +% cliquelength(PossibleClique,1,CL), + length(PossibleClique,CL), + CL < Score0, !, + choose(Vertices,Graph,CL,NewEdges, V, Best, CL-PossibleClique, Cliques0,Cliques,EdgesF) ). choose([_|Vertices], Graph, Score0, Edges0, BestSoFar, Best, Clique, Cliques0, Cliques, EdgesF) :- choose(Vertices,Graph,Score0,Edges0, BestSoFar, Best, Clique, Cliques0,Cliques,EdgesF). @@ -289,18 +289,17 @@ get_links([Sz-Clique|Cliques], SoFar, Vertices, Edges0, Edges) :- get_links(Cliques, [Clique|SoFar], Vertices, EdgesI, Edges). get_links([_|Cliques], SoFar, Vertices, Edges0, Edges) :- get_links(Cliques, SoFar, Vertices, Edges0, Edges). - + add_clique_edges([], _, _, Edges, Edges). add_clique_edges([Clique1|Cliques], Clique, Sz, Edges0, EdgesF) :- ord_intersection(Clique1, Clique, Int), Int \== Clique, - ( - Int = [] -> - add_clique_edges(Cliques, Clique, Sz, Edges0, EdgesF) + (Int = [] -> + add_clique_edges(Cliques, Clique, Sz, Edges0, EdgesF) ; - % we connect - length(Int, LSz), - add_clique_edges(Cliques, Clique, Sz, [Clique-(Clique1-LSz)|Edges0], EdgesF) + % we connect + length(Int, LSz), + add_clique_edges(Cliques, Clique, Sz, [Clique-(Clique1-LSz)|Edges0], EdgesF) ). root(WTree, JTree) :- @@ -362,25 +361,25 @@ get_cpts([], _, [], []). get_cpts([CPT|CPts], [], [], [CPT|CPts]) :- !. get_cpts([[I|MCPT]-Info|CPTs], [J|Clique], MyCPTs, MoreCPTs) :- compare(C,I,J), - ( C == < -> + (C == < -> % our CPT cannot be a part of the clique. MoreCPTs = [[I|MCPT]-Info|LeftoverCPTs], get_cpts(CPTs, [J|Clique], MyCPTs, LeftoverCPTs) ; - C == = -> - % our CPT cannot be a part of the clique. - get_cpt(MCPT, Clique, I, Info, MyCPTs, MyCPTs0, MoreCPTs, MoreCPTs0), - get_cpts(CPTs, [J|Clique], MyCPTs0, MoreCPTs0) - ; - % the first element in our CPT may not be in a clique - get_cpts([[I|MCPT]-Info|CPTs], Clique, MyCPTs, MoreCPTs) + C == = -> + % our CPT cannot be a part of the clique. + get_cpt(MCPT, Clique, I, Info, MyCPTs, MyCPTs0, MoreCPTs, MoreCPTs0), + get_cpts(CPTs, [J|Clique], MyCPTs0, MoreCPTs0) + ; + % the first element in our CPT may not be in a clique + get_cpts([[I|MCPT]-Info|CPTs], Clique, MyCPTs, MoreCPTs) ). get_cpt(MCPT, Clique, I, Info, [[I|MCPT]-Info|MyCPTs], MyCPTs, MoreCPTs, MoreCPTs) :- ord_subset(MCPT, Clique), !. get_cpt(MCPT, _, I, Info, MyCPTs, MyCPTs, [[I|MCPT]-Info|MoreCPTs], MoreCPTs). - + translate_edges([], [], []). translate_edges([E1-E2|Edges], [(E1-A)-(E2-B)|NEdges], [E1-A,E2-B|Vs]) :- translate_edges(Edges, NEdges, Vs). @@ -389,13 +388,13 @@ match_vs(_,[]). match_vs([K-A|Cls],[K1-B|KVs]) :- compare(C, K, K1), (C == = -> - A = B, - match_vs([K-A|Cls], KVs) + A = B, + match_vs([K-A|Cls], KVs) ; - C = < -> - match_vs(Cls,[K1-B|KVs]) + C = < -> + match_vs(Cls,[K1-B|KVs]) ; - match_vs([K-A|Cls],KVs) + match_vs([K-A|Cls],KVs) ). fill_with_cpts(tree(Clique-Dists,Leafs), tree(Clique-NewDists,NewLeafs)) :- diff --git a/packages/CLPBN/clpbn/matrix_cpt_utils.yap b/packages/CLPBN/clpbn/matrix_cpt_utils.yap index 3c68dab8e..c12d5b6e6 100644 --- a/packages/CLPBN/clpbn/matrix_cpt_utils.yap +++ b/packages/CLPBN/clpbn/matrix_cpt_utils.yap @@ -1,3 +1,4 @@ + :- module(clpbn_matrix_utils, [init_CPT/3, project_from_CPT/3, @@ -95,21 +96,21 @@ reorder_CPT(Vs0,T0,Vs,TF,Sizes) :- var(Vs), !, order_vec(Vs0,Vs,Map), ( - Vs == Vs0 + Vs == Vs0 -> - TF = T0 + TF = T0 ; - matrix_shuffle(T0,Map,TF) + matrix_shuffle(T0,Map,TF) ), matrix_dims(TF, Sizes). reorder_CPT(Vs0,T0,Vs,TF,Sizes) :- mapping(Vs0,Vs,Map), ( - Vs == Vs0 + Vs == Vs0 -> - TF = T0 + TF = T0 ; - matrix_shuffle(T0,Map,TF) + matrix_shuffle(T0,Map,TF) ), matrix_dims(TF, Sizes). @@ -126,7 +127,7 @@ add_indices([V|Vs0],I0,[V-I0|Is]) :- get_els([], [], []). get_els([V-I|NIs], [V|Vs], [I|Map]) :- get_els(NIs, Vs, Map). - + mapping(Vs0,Vs,Map) :- add_indices(Vs0,0,I1s), add_indices( Vs,I2s), @@ -169,26 +170,26 @@ expand_tabs([], [], [V2|Deps2], [S2|Sz2], [S2|Map1], [0|Map2], [V2|NDeps]) :- expand_tabs([V1|Deps1], [S1|Sz1], [V2|Deps2], [S2|Sz2], Map1, Map2, NDeps) :- compare(C,V1,V2), (C == = -> - NDeps = [V1|MDeps], - Map1 = [0|M1], - Map2 = [0|M2], - NDeps = [V1|MDeps], - expand_tabs(Deps1, Sz1, Deps2, Sz2, M1, M2, MDeps) + NDeps = [V1|MDeps], + Map1 = [0|M1], + Map2 = [0|M2], + NDeps = [V1|MDeps], + expand_tabs(Deps1, Sz1, Deps2, Sz2, M1, M2, MDeps) ; - C == < -> - NDeps = [V1|MDeps], - Map1 = [0|M1], - Map2 = [S1|M2], - NDeps = [V1|MDeps], - expand_tabs(Deps1, Sz1, [V2|Deps2], [S2|Sz2], M1, M2, MDeps) - ; - NDeps = [V2|MDeps], - Map1 = [S2|M1], - Map2 = [0|M2], - NDeps = [V2|MDeps], - expand_tabs([V1|Deps1], [S1|Sz1], Deps2, Sz2, M1, M2, MDeps) + C == < -> + NDeps = [V1|MDeps], + Map1 = [0|M1], + Map2 = [S1|M2], + NDeps = [V1|MDeps], + expand_tabs(Deps1, Sz1, [V2|Deps2], [S2|Sz2], M1, M2, MDeps) + ; + NDeps = [V2|MDeps], + Map1 = [S2|M1], + Map2 = [0|M2], + NDeps = [V2|MDeps], + expand_tabs([V1|Deps1], [S1|Sz1], Deps2, Sz2, M1, M2, MDeps) ). - + normalise_CPT(MAT,NMAT) :- matrix_to_exps2(MAT), matrix_sum(MAT, Sum), diff --git a/packages/CLPBN/clpbn/numbers.yap b/packages/CLPBN/clpbn/numbers.yap index 85aca9f77..88c65c915 100644 --- a/packages/CLPBN/clpbn/numbers.yap +++ b/packages/CLPBN/clpbn/numbers.yap @@ -30,16 +30,16 @@ keys_to_numbers(AllKeys, Factors, Evidence, Hash0, Hash4, Id0, Id4, FactorIds, E foldl2(key_to_id, SKeys, _, Hash3, Hash4, Id3, Id4). lists_of_keys_to_ids(QueryKeys, QueryIds, Hash0, Hash, Id0, Id) :- - foldl2(list_of_keys_to_ids, QueryKeys, QueryIds, Hash0, Hash, Id0, Id). + foldl2(list_of_keys_to_ids, QueryKeys, QueryIds, Hash0, Hash, Id0, Id). list_of_keys_to_ids(List, IdList, Hash0, Hash, I0, I) :- foldl2(key_to_id, List, IdList, Hash0, Hash, I0, I). key_to_id(Key, Id, Hash0, Hash0, I0, I0) :- - b_hash_lookup(Key, Id, Hash0), !. + b_hash_lookup(Key, Id, Hash0), !. key_to_id(Key, I0, Hash0, Hash, I0, I) :- - b_hash_insert(Hash0, Key, I0, Hash), - I is I0+1. + b_hash_insert(Hash0, Key, I0, Hash), + I is I0+1. factor_to_id(Ev, f(_, DistId, Keys), f(Ids, Ranges, CPT, DistId), Hash0, Hash, I0, I) :- get_pfl_cpt(DistId, Keys, Ev, NKeys, CPT), diff --git a/packages/CLPBN/clpbn/pgrammar.yap b/packages/CLPBN/clpbn/pgrammar.yap index 0a54ba91a..f4739cb84 100644 --- a/packages/CLPBN/clpbn/pgrammar.yap +++ b/packages/CLPBN/clpbn/pgrammar.yap @@ -70,9 +70,9 @@ grammar_mle(S,_,P) :- nb_getval(best,p(P,S)), P > 0.0. user:term_expansion((P::H --> B), Goal) :- - functor(H,A0,_), - % a-->b to a(p(K,P,C,[Cs])) --> b(Cs) - convert_to_internal(H, B, IH, IB, Id), + functor(H,A0,_), + % a-->b to a(p(K,P,C,[Cs])) --> b(Cs) + convert_to_internal(H, B, IH, IB, Id), expand_term((IH --> IB),(NH :- NB)), prolog_load_context(module, Mod), functor(NH,N,A), @@ -98,8 +98,8 @@ add_to_predicate(M:EH1,M:EH,M:H0,NH,NB,Key,Choice,P,Id,(EH1:-NB)) :- % now ensure_tabled works. ensure_tabled(M,H0,EH), assert_static(M:(EH :- - clpbn_pgrammar:p_rule(M,EH,Key,Choice), - M:EH1)), + clpbn_pgrammar:p_rule(M,EH,Key,Choice), + M:EH1)), Choice = 1, new_id(Key,P,Choice,Id), assert_static(M:ptab(EH,Choice,P)), @@ -139,18 +139,18 @@ convert_body_to_internal({A}, {A}) --> !. convert_body_to_internal(B, IB) --> [V], { - B =.. [Na|Args], - build_internal(Na,NaInternal), - IB =.. [NaInternal,V|Args] + B =.. [Na|Args], + build_internal(Na,NaInternal), + IB =.. [NaInternal,V|Args] }. new_id(Key,P,Choice,Id) :- ( - predicate_property(id(_,_,_,_),number_of_clauses(Id)) + predicate_property(id(_,_,_,_),number_of_clauses(Id)) -> - true + true ; - Id = 0 + Id = 0 ), assert(id(Id,Key,P,Choice)). @@ -210,11 +210,11 @@ path_choices(InternalS, Proof) :- new_id(Id) :- (nb_getval(grammar_id,Id) -> - I1 is Id+1, - nb_setval(grammar_id,I1) + I1 is Id+1, + nb_setval(grammar_id,I1) ; - nb_setval(grammar_id,1), - Id = 0 + nb_setval(grammar_id,1), + Id = 0 ). find_dom(K, Vs, Ps) :- diff --git a/packages/CLPBN/clpbn/table.yap b/packages/CLPBN/clpbn/table.yap index 3ebfac6c6..748a2757d 100644 --- a/packages/CLPBN/clpbn/table.yap +++ b/packages/CLPBN/clpbn/table.yap @@ -108,30 +108,28 @@ clpbn_table(F/N,M) :- L0 = [_|Args0], IGoal =.. [NF|Args0], asserta(clpbn_table(S, M, IGoal)), - assert( - (M:S :- - !, -% write(S: ' ' ), - b_getval(clpbn_tables, Tab), - % V2 is unbound. - ( b_hash_lookup(Key, V2, Tab) -> -% (attvar(V2) -> writeln(ok:A0:V2) ; writeln(error(V2:should_be_attvar(S)))), - ( var(A0) -> A0 = V2 ; put_evidence(A0, V2) ) - ; -% writeln(new), - b_hash_insert(Tab, Key, V2, NewTab), - b_setval(clpbn_tables,NewTab), - once(M:Goal), !, - % enter evidence after binding. - ( var(A0) -> A0 = V2 ; put_evidence(A0, V2) ) - ; - clpbn:clpbn_flag(solver,none) -> - true - ; - throw(error(tabled_clpbn_predicate_should_never_fail,S)) - ) - ) - ). + assert((M:S :- + !, +% write(S: ' ' ), + b_getval(clpbn_tables, Tab), + % V2 is unbound. + (b_hash_lookup(Key, V2, Tab) -> +% (attvar(V2) -> writeln(ok:A0:V2) ; writeln(error(V2:should_be_attvar(S)))), + (var(A0) -> A0 = V2 ; put_evidence(A0, V2)) + ; +% writeln(new), + b_hash_insert(Tab, Key, V2, NewTab), + b_setval(clpbn_tables,NewTab), + once(M:Goal), !, + % enter evidence after binding. + (var(A0) -> A0 = V2 ; put_evidence(A0, V2)) + ; + clpbn:clpbn_flag(solver,none) -> + true + ; + throw(error(tabled_clpbn_predicate_should_never_fail,S)) + ) + )). take_tail([V], V, [], V1, [V1]) :- !. take_tail([A|L0], V, [A|L1], V1, [A|L2]) :- @@ -154,19 +152,17 @@ clpbn_tableallargs(F/N,M) :- atom_concat(F, '___tabled', NF), NKey =.. [NF|Args], asserta(clpbn_table(Key, M, NKey)), - assert( - (M:Key :- - !, - b_getval(clpbn_tables, Tab), - ( b_hash_lookup(Key, Out, Tab) -> - true - ; - b_hash_insert(Tab, Key, Out, NewTab), - b_setval(clpbn_tables, NewTab), - once(M:NKey) - ) - ) - ). + assert((M:Key :- + !, + b_getval(clpbn_tables, Tab), + (b_hash_lookup(Key, Out, Tab) -> + true + ; + b_hash_insert(Tab, Key, Out, NewTab), + b_setval(clpbn_tables, NewTab), + once(M:NKey) + ) + )). clpbn_table_nondet(M:X) :- !, clpbn_table_nondet(X,M). @@ -185,18 +181,17 @@ clpbn_table_nondet(F/N,M) :- atom_concat(F, '___tabled', NF), NKey =.. [NF|Args], asserta(clpbn_table(Key, M, NKey)), - assert( - (M:Key :- % writeln(in:Key), - b_getval(clpbn_tables, Tab), - ( b_hash_lookup(Key, Out, Tab) -> - fail - ; - b_hash_insert(Tab, Key, Out, NewTab), - b_setval(clpbn_tables, NewTab), - M:NKey - ) - ) - ). + assert((M:Key :- + % writeln(in:Key), + b_getval(clpbn_tables, Tab), + (b_hash_lookup(Key, Out, Tab) -> + fail + ; + b_hash_insert(Tab, Key, Out, NewTab), + b_setval(clpbn_tables, NewTab), + M:NKey + ) + )). user:term_expansion((P :- Gs), NC) :- clpbn_table(P, M, NP), diff --git a/packages/CLPBN/clpbn/utils.yap b/packages/CLPBN/clpbn/utils.yap index 7ebf4d332..5bce1c943 100644 --- a/packages/CLPBN/clpbn/utils.yap +++ b/packages/CLPBN/clpbn/utils.yap @@ -54,15 +54,13 @@ get_keys([_|AVars], KeysVars) :- % may be non-CLPBN vars. merge_same_key([], [], _, []). merge_same_key([K1-V1,K2-V2|Vs], SortedAVars, Ks, UnifiableVars) :- K1 == K2, !, - (clpbn:get_atts(V1, [evidence(E)]) - -> - clpbn:put_atts(V2, [evidence(E)]) + (clpbn:get_atts(V1, [evidence(E)]) -> + clpbn:put_atts(V2, [evidence(E)]) ; - clpbn:get_atts(V2, [evidence(E)]) - -> + clpbn:get_atts(V2, [evidence(E)]) -> clpbn:put_atts(V1, [evidence(E)]) - ; - true + ; + true ), % V1 = V2, attributes:fast_unify_attributed(V1,V2), @@ -78,7 +76,7 @@ merge_same_key([K-V|Vs], [V|SortedAVars], Ks, UnifiableVars) :- in_keys(K1,[K|_]) :- \+ \+ K1 = K, !. in_keys(K1,[_|Ks]) :- in_keys(K1,Ks). - + add_to_keys(K1, Ks, Ks) :- ground(K1), !. add_to_keys(K1, Ks, [K1|Ks]). @@ -104,7 +102,7 @@ add_parents(Parents,V,Id,KeyVarsF,KeyVars0) :- all_vars([]). all_vars([P|Parents]) :- - var(P), + var(P), all_vars(Parents). diff --git a/packages/CLPBN/clpbn/ve.yap b/packages/CLPBN/clpbn/ve.yap index a5b0f8dee..b2e8d9ea4 100644 --- a/packages/CLPBN/clpbn/ve.yap +++ b/packages/CLPBN/clpbn/ve.yap @@ -23,7 +23,7 @@ run_ve_ground_solver/3, call_ve_ground_solver/6 ]). - + :- use_module(library(atts)). :- use_module(library(ordsets), @@ -75,8 +75,8 @@ :- use_module(library('clpbn/aggregates'), [check_for_agg_vars/2]). - -:- attribute size/1, all_diffs/1. + +:- attribute size/1, all_diffs/1. % % uses a bipartite graph where bigraph(Vs, NFs, Fs) @@ -93,23 +93,23 @@ check_if_ve_done(Var) :- % new PFL like interface... % call_ve_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - call_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), - clpbn_bind_vals([QueryVars], Solutions, Output). + call_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), + clpbn_bind_vals([QueryVars], Solutions, Output). call_ve_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), - run_ve_ground_solver(QueryKeys, Solutions, VE). + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), + run_ve_ground_solver(QueryKeys, Solutions, VE). simulate_ve_ground_solver(_QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - simulate_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Output). + simulate_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Output). simulate_ve_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), - simulate_solver(QueryKeys, Solutions, VE). + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), + simulate_solver(QueryKeys, Solutions, VE). init_ve_ground_solver(_QueryKeys, AllKeys, Factors, Evidence, VE) :- - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - init_ve(FactorIds, EvidenceIds, Hash4, Id4, VE). + keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), + init_ve(FactorIds, EvidenceIds, Hash4, Id4, VE). % @@ -117,11 +117,11 @@ init_ve_ground_solver(_QueryKeys, AllKeys, Factors, Evidence, VE) :- % ve([[]],_,_) :- !. ve(LLVs,Vs0,AllDiffs) :- - init_ve_solver(LLVs, Vs0, AllDiffs, State), - % variable elimination proper - run_ve_solver(LLVs, LLPs, State), - % bind Probs back to variables so that they can be output. - clpbn_bind_vals(LLVs,LLPs,AllDiffs). + init_ve_solver(LLVs, Vs0, AllDiffs, State), + % variable elimination proper + run_ve_solver(LLVs, LLPs, State), + % bind Probs back to variables so that they can be output. + clpbn_bind_vals(LLVs,LLPs,AllDiffs). init_ve(FactorIds, EvidenceIds, Hash, Id, ve(FactorIds, Hash, Id, Ev)) :- @@ -177,7 +177,7 @@ vars_to_bigraph(VMap, bigraph(VInfo, IF, Fs), Evs) :- id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- % process evidence for variable - clpbn:get_atts(V, [evidence(E), dist(_,Ps)]), + clpbn:get_atts(V, [evidence(E), dist(_,Ps)]), checklist(noparent_of_interest(VMap), Ps), !, % I don't need to get a factor here Evs = [I=E|Evs0], @@ -186,12 +186,12 @@ id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- % process distribution/factors ( - clpbn:get_atts(V, [evidence(E)]) - -> - Evs = [I=E|Evs0] + clpbn:get_atts(V, [evidence(E)]) + -> + Evs = [I=E|Evs0] ; - Evs = Evs0 - ), + Evs = Evs0 + ), clpbn:get_atts(V, [dist(D, Ps)]), get_dist_params(D, Pars0), get_dist_domain_size(D, DS), @@ -244,29 +244,29 @@ collect_factors(SFVs, _Fs, _V, [], SFVs). % solve each query independently % use a findall to recover space without needing for GC run_ve_ground_solver(LQVs, LLPs, ve(FactorIds, Hash, Id, Ev)) :- - rb_new(Fs0), - foldl3(factor_to_graph, FactorIds, Fs0, Fs, [], FVs, 0, IF), - sort(FVs, SFVs), - rb_new(VInfo0), - add_vs(SFVs, Fs, VInfo0, VInfo), - BG = bigraph(VInfo, IF, Fs), - lists_of_keys_to_ids(LQVs, LQIds, Hash, _, Id, _), - findall(LPs, solve(LQIds, FactorIds, BG, Ev, LPs), LLPs). + rb_new(Fs0), + foldl3(factor_to_graph, FactorIds, Fs0, Fs, [], FVs, 0, IF), + sort(FVs, SFVs), + rb_new(VInfo0), + add_vs(SFVs, Fs, VInfo0, VInfo), + BG = bigraph(VInfo, IF, Fs), + lists_of_keys_to_ids(LQVs, LQIds, Hash, _, Id, _), + findall(LPs, solve(LQIds, FactorIds, BG, Ev, LPs), LLPs). solve([QVs|_], FIds, Bigraph, Evs, LPs) :- - factor_influences(FIds, QVs, Evs, LVs), - do_solve(QVs, LVs, Bigraph, Evs, LPs). + factor_influences(FIds, QVs, Evs, LVs), + do_solve(QVs, LVs, Bigraph, Evs, LPs). solve([_|LQVs], FIds, Bigraph, Ev, LPs) :- - solve(LQVs, FIds, Bigraph, Ev, LPs). + solve(LQVs, FIds, Bigraph, Ev, LPs). do_solve(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Ps) :- - % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), - % and also prune using evidence - rb_visit(Ev, EvL), - foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), - % eliminate - eliminate(IQVs, digraph(EVs, IF, Fs2), Dist), + % get only what is relevant to query, + project_to_query_related(IVs, OldVs, SVs, Fs1), + % and also prune using evidence + rb_visit(Ev, EvL), + foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), + % eliminate + eliminate(IQVs, digraph(EVs, IF, Fs2), Dist), % writeln(m:Dist),matrix:matrix_to_list(Dist,LD),writeln(LD), %exps(LD,LDE),writeln(LDE), % move from potentials back to probabilities @@ -274,18 +274,18 @@ do_solve(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Ps) :- list_from_CPT(MPs, Ps). simulate_solver(LQVs, Choices, ve(FIds, Hash, Id, BG, Evs)) :- - lists_of_keys_to_ids(LQVs, [QVs], Hash, _, Id, _), - factor_influences(FIds, QVs, Evs, LVs), - do_simulate(QVs, LVs, BG, Evs, Choices). + lists_of_keys_to_ids(LQVs, [QVs], Hash, _, Id, _), + factor_influences(FIds, QVs, Evs, LVs), + do_simulate(QVs, LVs, BG, Evs, Choices). do_simulate(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Choices) :- - % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), - % and also prune using evidence - rb_visit(Ev, EvL), - foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), - % eliminate - simulate_eiminate(IQVs, digraph(EVs, IF, Fs2), Choices). + % get only what is relevant to query, + project_to_query_related(IVs, OldVs, SVs, Fs1), + % and also prune using evidence + rb_visit(Ev, EvL), + foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), + % eliminate + simulate_eiminate(IQVs, digraph(EVs, IF, Fs2), Choices). % solve each query independently % use a findall to recover space without needing for GC @@ -355,19 +355,19 @@ check_factor(V, NVs, F, NFs0, NFs, RemFs, NewRemFs) :- -> rb_insert(NFs0, IF, F, NFs), NewRemFs = [F|RemFs] - ; + ; NFs0 = NFs, NewRemFs = RemFs - ). + ). check_factor(_V, _NVs, F, NFs, NFs, RemFs, NewRemFs) :- F = f(Id, _, _), ( rb_lookup(Id, F, NFs) -> NewRemFs = [F|RemFs] - ; + ; NewRemFs = RemFs - ). + ). check_v(NVs, V) :- rb_lookup(V, _, NVs). @@ -430,15 +430,15 @@ best_var(QVs, I, _Node, Info, Info) :- !. % pick the variable with less factors best_var(_Qs, I, Node, i(ValSoFar,_,_), i(NewVal,I,Node)) :- - foldl(szfac,Node,1,NewVal), + foldl(szfac,Node,1,NewVal), %length(Node, NewVal), NewVal < ValSoFar, !. best_var(_, _I, _Node, Info, Info). szfac(f(_,Vs,_), I0, I) :- - length(Vs,L), - I is I0*L. + length(Vs,L), + I is I0*L. % delete one factor, need to also touch all variables del_fac(f(I,FVs,_), Fs0, Fs, Vs0, Vs) :- diff --git a/packages/CLPBN/clpbn/viterbi.yap b/packages/CLPBN/clpbn/viterbi.yap index b71befc68..0d496d63b 100644 --- a/packages/CLPBN/clpbn/viterbi.yap +++ b/packages/CLPBN/clpbn/viterbi.yap @@ -77,21 +77,21 @@ fetch_edges([V|Parents], Key0, EdgesF, Edges0, [Slice-AKey|PKeys]) :- clpbn:get_atts(V,[key(Key)]), abstract_key(Key, AKey, Slice), ( - Slice < 3 + Slice < 3 -> - EdgesF = [Key0-AKey|EdgesI] + EdgesF = [Key0-AKey|EdgesI] ; - EdgesF = EdgesI + EdgesF = EdgesI ), fetch_edges(Parents, Key0, EdgesI, Edges0, PKeys). fetch_edges([Key|Parents], Key0, EdgesF, Edges0, [Slice-AKey|PKeys]) :- abstract_key(Key, AKey, Slice), ( - Slice < 3 + Slice < 3 -> - EdgesF = [Key0-AKey|EdgesI] + EdgesF = [Key0-AKey|EdgesI] ; - EdgesF = EdgesI + EdgesF = EdgesI ), fetch_edges(Parents, Key0, EdgesI, Edges0, PKeys). fetch_edges([], _, Edges, Edges, []). @@ -124,20 +124,20 @@ compile_keys([], _, []). % add a random symbol to the end. compile_emission([],_) --> !, []. compile_emission(EmissionTerm,IKey) --> [emit(IKey,EmissionTerm)]. - + compile_propagation([],[],_,_) --> []. compile_propagation([0-PKey|Ps], [Prob|Probs], IKey, KeyMap) --> - [prop_same(IKey,Parent,Prob)], - { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, - compile_propagation(Ps, Probs, IKey, KeyMap). + [prop_same(IKey,Parent,Prob)], + { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, + compile_propagation(Ps, Probs, IKey, KeyMap). compile_propagation([2-PKey|Ps], [Prob|Probs], IKey, KeyMap) --> - [prop_same(IKey,Parent,Prob)], - { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, - compile_propagation(Ps, Probs, IKey, KeyMap). + [prop_same(IKey,Parent,Prob)], + { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, + compile_propagation(Ps, Probs, IKey, KeyMap). compile_propagation([3-PKey|Ps], [Prob|Probs], IKey, KeyMap) --> - [prop_next(IKey,Parent,Prob)], - { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, - compile_propagation(Ps, Probs, IKey, KeyMap). + [prop_next(IKey,Parent,Prob)], + { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, + compile_propagation(Ps, Probs, IKey, KeyMap). get_id(_:S, Map, SI) :- !, get_id(S, Map, SI). @@ -150,9 +150,9 @@ get_id(S, Map, SI) :- compile_trace(Trace, Emissions) :- user:hmm_domain(Domain), (atom(Domain) -> - hmm:cvt_vals(Domain, Vals) + hmm:cvt_vals(Domain, Vals) ; - Vals = Domain + Vals = Domain ), compile_trace(Trace, Vals, Emissions). @@ -194,22 +194,22 @@ run_inst(prop_same(I,P,Prob), _, SP, Current, _, Trace) :- NP is PI+Prob, matrix_get(Current, [P], P0), (NP > P0 -> - matrix_set(Current, [P], NP), - matrix_set(Trace, [SP,P], I) + matrix_set(Current, [P], NP), + matrix_set(Trace, [SP,P], I) ; - true + true ). run_inst(prop_next(I,P,Prob), _, SP, Current, Next, Trace) :- matrix_get(Current, [I], PI), NP is PI+Prob, matrix_get(Next, [P], P0), (NP > P0 -> - matrix_set(Next, [P], NP), - SP1 is SP+1, - IN is -I, - matrix_set(Trace, [SP1,P], IN) + matrix_set(Next, [P], NP), + SP1 is SP+1, + IN is -I, + matrix_set(Trace, [SP1,P], IN) ; - true + true ). backtrace(Dump, EI, Map, L, Trace) :- @@ -221,11 +221,11 @@ backtrace(Dump, EI, Map, L, Trace) :- trace(0,0,_,_,Trace,Trace) :- !. trace(L1,Next,Dump,Map,Trace0,Trace) :- (Next < 0 -> - NL is L1-1, - P is -Next + NL is L1-1, + P is -Next ; - NL = L1, - P = Next + NL = L1, + P = Next ), once(member(P-AKey,Map)), AKey=..[N|Args], diff --git a/packages/CLPBN/clpbn/vmap.yap b/packages/CLPBN/clpbn/vmap.yap index db7605646..98e3df357 100644 --- a/packages/CLPBN/clpbn/vmap.yap +++ b/packages/CLPBN/clpbn/vmap.yap @@ -16,7 +16,7 @@ % contiguous Vs to contiguous integers % init_vmap(vmap(0,Empty)) :- - rb_new(Empty). + rb_new(Empty). get_from_vmap(V, I, VMap0) :- VMap0 = vmap(_I,Map0), diff --git a/packages/CLPBN/learning/aleph_params.yap b/packages/CLPBN/learning/aleph_params.yap index 8162c8bf7..cb6070eb4 100644 --- a/packages/CLPBN/learning/aleph_params.yap +++ b/packages/CLPBN/learning/aleph_params.yap @@ -10,39 +10,42 @@ % but some variables are of special type random. % :- module(clpbn_aleph, - [init_clpbn_cost/0, - random_type/2]). + [init_clpbn_cost/0, + random_type/2 + ]). :- dynamic rt/2, inited/1. :- use_module(library('clpbn'), - [{}/1, - clpbn_flag/2, - clpbn_flag/3, - set_clpbn_flag/2]). + [{}/1, + clpbn_flag/2, + clpbn_flag/3, + set_clpbn_flag/2 + ]). :- use_module(library('clpbn/learning/em')). :- use_module(library('clpbn/matrix_cpt_utils'), - [uniform_CPT_as_list/2]). + [uniform_CPT_as_list/2]). :- use_module(library('clpbn/dists'), - [reset_all_dists/0, - get_dist_key/2, - get_dist_params/2 - ]). + [reset_all_dists/0, + get_dist_key/2, + get_dist_params/2 + ]). :- use_module(library('clpbn/table'), - [clpbn_tabled_abolish/1, - clpbn_tabled_asserta/1, - clpbn_tabled_asserta/2, - clpbn_tabled_assertz/1, - clpbn_tabled_clause/2, - clpbn_tabled_clause_ref/3, - clpbn_tabled_number_of_clauses/2, - clpbn_is_tabled/1, - clpbn_reset_tables/0, - clpbn_tabled_dynamic/1]). + [clpbn_tabled_abolish/1, + clpbn_tabled_asserta/1, + clpbn_tabled_asserta/2, + clpbn_tabled_assertz/1, + clpbn_tabled_clause/2, + clpbn_tabled_clause_ref/3, + clpbn_tabled_number_of_clauses/2, + clpbn_is_tabled/1, + clpbn_reset_tables/0, + clpbn_tabled_dynamic/1 + ]). % % Tell Aleph not to use default solver during saturation @@ -94,11 +97,11 @@ enable_solver :- add_new_clause(_,(H :- _),_,_) :- ( clpbn_is_tabled(user:H) - -> + -> update_tabled_theory(H) - ; + ; update_theory(H) - ), + ), fail. % step 2: add clause add_new_clause(_,(_ :- true),_,_) :- !. @@ -113,18 +116,18 @@ add_new_clause(_,(H :- B),_,_) :- get_dist_key(Id, K), get_dist_params(Id, CPTList), ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_asserta(user:(H :- IB)) + clpbn_tabled_asserta(user:(H :- IB)) ; - asserta(user:(H :- IB)) + asserta(user:(H :- IB)) ), user:setting(verbosity,V), ( V >= 1 -> - user:p_message('CLP(BN) Theory'), - functor(H,N,Ar), listing(user:N/Ar) + user:p_message('CLP(BN) Theory'), + functor(H,N,Ar), listing(user:N/Ar) ; - true + true ). @@ -165,22 +168,22 @@ user:cost((H :- B),Inf,Score) :- rewrite_body(B, IB, Vs, Ds, ( !, { V = K with p(D, CPTList, Vs) })), uniform_cpt([D|Ds], CPTList), ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_reset_tables, - clpbn_tabled_asserta(user:(H :- IB), R) + clpbn_reset_tables, + clpbn_tabled_asserta(user:(H :- IB), R) ; - asserta(user:(H :- IB), R) + asserta(user:(H :- IB), R) ), ( - cpt_score(Score0) + cpt_score(Score0) -> - erase(R), - Score is -Score0 - ; - % illegal clause, just get out of here. - erase(R), - fail + erase(R), + Score is -Score0 + ; + % illegal clause, just get out of here. + erase(R), + fail ). user:cost(H,_Inf,Score) :- !, init_clpbn_cost(H, Score0), @@ -196,38 +199,38 @@ init_clpbn_cost(H, Score) :- functor(H,N,A), % get rid of Aleph crap ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_abolish(user:N/A), - clpbn_tabled_dynamic(user:N/A) + clpbn_tabled_abolish(user:N/A), + clpbn_tabled_dynamic(user:N/A) ; - abolish(user:N/A), - % make it easy to add and remove clauses. - dynamic(user:N/A) + abolish(user:N/A), + % make it easy to add and remove clauses. + dynamic(user:N/A) ), domain(H, K, V, D), uniform_cpt([D], CPTList), % This will be the default cause, called when the other rules fail. ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_assertz(user:(H :- !, { V = K with p(D, CPTList) })) + clpbn_tabled_assertz(user:(H :- !, { V = K with p(D, CPTList) })) ; - assert(user:(H :- !, { V = K with p(D, CPTList) })) - ), + assert(user:(H :- !, { V = K with p(D, CPTList) })) + ), cpt_score(Score), assert(inited(Score)). -% receives H, and generates a key K, a random variable RV, and a domain D. +% receives H, and generates a key K, a random variable RV, and a domain D. domain(H, K, RV, D) :- functor(H,Name,Arity), functor(Pred,Name,Arity), ( - recorded(aleph,modeh(_,Pred),_) - -> - true + recorded(aleph,modeh(_,Pred),_) + -> + true ; - user:'$aleph_global'(modeh,modeh(_,Pred)) + user:'$aleph_global'(modeh,modeh(_,Pred)) ), arg(Arity,Pred,+RType), rt(RType,D), !, @@ -240,11 +243,11 @@ domain(H, K, V, D) :- key_from_head(H,K,V) :- H =.. [Name|Args], ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_number_of_clauses(user:H,NClauses) + clpbn_tabled_number_of_clauses(user:H,NClauses) ; - predicate_property(user:H,number_of_clauses(NClauses)) + predicate_property(user:H,number_of_clauses(NClauses)) ), atomic_concat(Name,NClauses,NName), append(H0L,[V],Args), @@ -267,11 +270,11 @@ rewrite_goal(A,V,D,NA) :- functor(A,Name,Arity), functor(Pred,Name,Arity), ( - recorded(aleph,modeb(_,Pred),_) - -> - true + recorded(aleph,modeb(_,Pred),_) + -> + true ; - user:'$aleph_global'(modeb,modeb(_,Pred)) + user:'$aleph_global'(modeb,modeb(_,Pred)) ), arg(Arity,Pred,-RType), rt(RType,D), !, @@ -288,7 +291,7 @@ replace_last_var([A|Args],V,[A|NArgs]) :- % This is the key % cpt_score(Lik) :- - findall(user:Ex, user:example(_,pos,Ex), Exs), + findall(user:Ex, user:example(_,pos,Ex), Exs), clpbn_flag(solver, Solver), clpbn_flag(em_solver, EMSolver), set_clpbn_flag(solver, EMSolver), diff --git a/packages/CLPBN/learning/bnt_parms.yap b/packages/CLPBN/learning/bnt_parms.yap index d3e8d9734..55be9d0ce 100644 --- a/packages/CLPBN/learning/bnt_parms.yap +++ b/packages/CLPBN/learning/bnt_parms.yap @@ -8,23 +8,23 @@ :- module(bnt_parameters, [learn_parameters/2]). -:- use_module(library('clpbn'), [ - clpbn_flag/3]). +:- use_module(library('clpbn'), + [clpbn_flag/3]). -:- use_module(library('clpbn/bnt'), [ - create_bnt_graph/2]). +:- use_module(library('clpbn/bnt'), + [create_bnt_graph/2]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). -:- use_module(library('clpbn/dists'), [ - get_dist_domain/2 - ]). +:- use_module(library('clpbn/dists'), + [get_dist_domain/2]). -:- use_module(library(matlab), [matlab_initialized_cells/4, - matlab_call/2, - matlab_get_variable/2 - ]). +:- use_module(library(matlab), + [matlab_initialized_cells/4, + matlab_call/2, + matlab_get_variable/2 + ]). :- dynamic bnt_em_max_iter/1. bnt_em_max_iter(10). @@ -61,7 +61,7 @@ clpbn_vars(Vs,BVars) :- get_clpbn_vars(Vs,CVs), keysort(CVs,KVs), merge_vars(KVs,BVars). - + get_clpbn_vars([],[]). get_clpbn_vars([V|GVars],[K-V|CLPBNGVars]) :- clpbn:get_atts(V, [key(K)]), !, @@ -73,8 +73,8 @@ merge_vars([],[]). merge_vars([K-V|KVs],[V|BVars]) :- get_var_has_same_key(KVs,K,V,KVs0), merge_vars(KVs0,BVars). - -get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !, + +get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !, get_var_has_same_key(KVs,K,V,KVs0). get_var_has_same_key(KVs,_,_,KVs). @@ -84,7 +84,7 @@ mk_sample(AllVars,NVars, LL) :- length(LN,LL), matlab_initialized_cells( NVars, 1, LN, sample). -add2sample([], []). +add2sample([], []). add2sample([V|Vs],[val(VId,1,Val)|Vals]) :- clpbn:get_atts(V, [evidence(Ev),dist(Id,_)]), !, bnt:get_atts(V,[bnt_id(VId)]), @@ -113,9 +113,9 @@ get_parameters([],[]). get_parameters([Rep-v(_,_,_)|Reps],[CPT|CPTs]) :- get_new_table(Rep,CPT), get_parameters(Reps,CPTs). - + get_new_table(Rep,CPT) :- s <-- struct(new_bnet.'CPD'({Rep})), matlab_get_variable( s.'CPT', CPT). - - + + diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index aa2119f47..0e28d3136 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -13,7 +13,7 @@ [clpbn_init_graph/1, clpbn_init_solver/4, clpbn_run_solver/3, - clpbn_finalize_solver/1, + clpbn_finalize_solver/1, pfl_init_solver/5, pfl_run_solver/3, conditional_probability/3, @@ -57,10 +57,10 @@ [matrix_add/3, matrix_to_list/2 ]). - + :- use_module(library(lists), [member/2]). - + :- use_module(library(rbtrees), [rb_new/1, rb_insert/4, @@ -85,9 +85,9 @@ em(_, _, _, Tables, Likelihood) :- handle_em(error(repeated_parents)) :- !, assert(em_found(_, -inf)), - fail. + fail. handle_em(Error) :- - throw(Error). + throw(Error). % This gets you an initial configuration. If there is a lot of evidence % tables may be filled in close to optimal, otherwise they may be @@ -128,32 +128,31 @@ setup_em_network(Items, state(AllDists, AllDistInstances, MargVars, SolverState) clpbn_init_solver(MargVars, AllVars, _, SolverState). run_examples(user:Exs, Keys, Factors, EList) :- - Exs = [_:_|_], !, - findall(ex(EKs, EFs, EEs), run_example(Exs, EKs, EFs, EEs), - VExs), - foldl4(join_example, VExs, [], Keys, [], Factors, [], EList, 0, _). + Exs = [_:_|_], !, + findall(ex(EKs, EFs, EEs), run_example(Exs, EKs, EFs, EEs), VExs), + foldl4(join_example, VExs, [], Keys, [], Factors, [], EList, 0, _). run_examples(Items, Keys, Factors, EList) :- - run_ex(Items, Keys, Factors, EList). + run_ex(Items, Keys, Factors, EList). join_example( ex(EKs, EFs, EEs), Keys0, Keys, Factors0, Factors, EList0, EList, I0, I) :- - I is I0+1, - foldl(process_key(I0), EKs, Keys0, Keys), - foldl(process_factor(I0), EFs, Factors0, Factors), - foldl(process_ev(I0), EEs, EList0, EList). + I is I0+1, + foldl(process_key(I0), EKs, Keys0, Keys), + foldl(process_factor(I0), EFs, Factors0, Factors), + foldl(process_ev(I0), EEs, EList0, EList). process_key(I0, K, Keys0, [I0:K|Keys0]). process_factor(I0, f(Type, Id, Keys), Keys0, [f(Type, Id, NKeys)|Keys0]) :- - maplist(update_key(I0), Keys, NKeys). + maplist(update_key(I0), Keys, NKeys). update_key(I0, K, I0:K). process_ev(I0, K=V, Es0, [(I0:K)=V|Es0]). run_example([_:Items|_], Keys, Factors, EList) :- - run_ex(user:Items, Keys, Factors, EList). + run_ex(user:Items, Keys, Factors, EList). run_example([_|LItems], Keys, Factors, EList) :- - run_example(LItems, Keys, Factors, EList). + run_example(LItems, Keys, Factors, EList). run_ex(Items, Keys, Factors, EList) :- % create the ground network @@ -172,17 +171,17 @@ em_loop(Its, Likelihood0, State, MaxError, MaxIts, LikelihoodF, FTables) :- ltables(Tables, F0Tables), %writeln(iteration:Its:Likelihood:Its:Likelihood0:F0Tables), ( - ( - abs((Likelihood - Likelihood0)/Likelihood) < MaxError - ; - Its == MaxIts - ) + ( + abs((Likelihood - Likelihood0)/Likelihood) < MaxError + ; + Its == MaxIts + ) -> - ltables(Tables, FTables), - LikelihoodF = Likelihood + ltables(Tables, FTables), + LikelihoodF = Likelihood ; - Its1 is Its+1, - em_loop(Its1, Likelihood, State, MaxError, MaxIts, LikelihoodF, FTables) + Its1 is Its+1, + em_loop(Its1, Likelihood, State, MaxError, MaxIts, LikelihoodF, FTables) ). ltables([], []). @@ -192,7 +191,7 @@ ltables([Id-T|Tables], [Key-LTable|FTables]) :- ltables(Tables, FTables). -generate_dists(Factors, EList, AllDists, AllInfo, MargVars) :- +generate_dists(Factors, EList, AllDists, AllInfo, MargVars) :- b_hash_new(Ev0), foldl(elist_to_hash, EList, Ev0, Ev), maplist(process_factor(Ev), Factors, Dists0), @@ -240,11 +239,11 @@ all_dists([V|AllVars], AllVars0, [i(Id, [V|Parents], Cases, Hiddens)|Dists]) :- length(Sorted, LengSorted), length(Parents, LengParents), ( - LengParents+1 =:= LengSorted - -> - true + LengParents+1 =:= LengSorted + -> + true ; - throw(error(repeated_parents)) + throw(error(repeated_parents)) ), generate_hidden_cases([V|Parents], CompactCases, Hiddens), uncompact_cases(CompactCases, Cases), @@ -314,7 +313,7 @@ create_mdist_table(Vs, Ps, MDistTable0, MDistTable) :- rb_insert(MDistTable0, Vs, Ps, MDistTable). compute_parameters([], [], _, Lik, Lik, _). -compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, Lik, LPs:MargVars) :- +compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, Lik, LPs:MargVars) :- empty_dist(Id, Table0), add_samples(Samples, Table0, MDistTable), %matrix_to_list(Table0,Mat), lists:sumlist(Mat, Sum), format(user_error, 'FINAL ~d ~w ~w~n', [Id,Sum,Mat]), @@ -324,7 +323,7 @@ compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, compute_likelihood(Table0, NewTable, DeltaLik), dist_new_table(Id, NewTable), NewLik is Lik0+DeltaLik, - compute_parameters(Dists, Tables, MDistTable, NewLik, Lik, LPs:MargVars). + compute_parameters(Dists, Tables, MDistTable, NewLik, Lik, LPs:MargVars). add_samples([], _, _). add_samples([i(_,_,[Case],[])|Samples], Table, MDistTable) :- !, diff --git a/packages/CLPBN/learning/learn_utils.yap b/packages/CLPBN/learning/learn_utils.yap index fee6cf5df..463b9030a 100644 --- a/packages/CLPBN/learning/learn_utils.yap +++ b/packages/CLPBN/learning/learn_utils.yap @@ -2,29 +2,31 @@ % Utilities for learning % -:- module(clpbn_learn_utils, [run_all/1, - clpbn_vars/2, - normalise_counts/2, - compute_likelihood/3, - soften_sample/2, - soften_sample/3]). +:- module(clpbn_learn_utils, + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + compute_likelihood/3, + soften_sample/2, + soften_sample/3 + ]). :- use_module(library(clpbn), - [clpbn_flag/2]). + [clpbn_flag/2]). :- use_module(library('clpbn/table'), - [clpbn_reset_tables/0]). + [clpbn_reset_tables/0]). :- use_module(library(matrix), - [matrix_agg_lines/3, - matrix_op_to_lines/4, - matrix_agg_cols/3, - matrix_op_to_cols/4, - matrix_to_logs/2, - matrix_op/4, - matrix_sum/2, - matrix_to_list/2, - matrix_op_to_all/4]). + [matrix_agg_lines/3, + matrix_op_to_lines/4, + matrix_agg_cols/3, + matrix_op_to_cols/4, + matrix_to_logs/2, + matrix_op/4, + matrix_sum/2, + matrix_to_list/2, + matrix_op_to_all/4]). :- meta_predicate run_all(:). @@ -47,7 +49,7 @@ clpbn_vars(Vs,BVars) :- get_clpbn_vars(Vs,CVs), keysort(CVs,KVs), merge_vars(KVs,BVars). - + get_clpbn_vars([],[]). get_clpbn_vars([V|GVars],[K-V|CLPBNGVars]) :- clpbn:get_atts(V, [key(K)]), !, @@ -59,7 +61,7 @@ merge_vars([],[]). merge_vars([K-V|KVs],[V|BVars]) :- get_var_has_same_key(KVs,K,V,KVs0), merge_vars(KVs0,BVars). - + get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !, get_var_has_same_key(KVs,K,V,KVs0). get_var_has_same_key(KVs,_,_,KVs). diff --git a/packages/CLPBN/learning/mle.yap b/packages/CLPBN/learning/mle.yap index ce6cd0132..14080fe69 100644 --- a/packages/CLPBN/learning/mle.yap +++ b/packages/CLPBN/learning/mle.yap @@ -5,25 +5,29 @@ % This assumes we have a single big example. % -:- module(clpbn_mle, [learn_parameters/2, - learn_parameters/3, - parameters_from_evidence/3]). +:- module(clpbn_mle, + [learn_parameters/2, + learn_parameters/3, + parameters_from_evidence/3 + ]). :- use_module(library('clpbn')). - + :- use_module(library('clpbn/learning/learn_utils'), - [run_all/1, - clpbn_vars/2, - normalise_counts/2, - soften_table/2, - normalise_counts/2]). + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + soften_table/2, + normalise_counts/2 + ]). :- use_module(library('clpbn/dists'), - [empty_dist/2, - dist_new_table/2]). + [empty_dist/2, + dist_new_table/2 + ]). :- use_module(library(matrix), - [matrix_inc/2]). + [matrix_inc/2]). learn_parameters(Items, Tables) :- diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 7e1194568..fe7fbc359 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -6,7 +6,7 @@ :- module(pfl, [op(550,yfx,@), op(550,yfx,::), - op(1150,fx,bayes), + op(1150,fx,bayes), op(1150,fx,markov), factor/6, skolem/2, @@ -133,19 +133,19 @@ process_args(Arg1, Id, I0, I ) --> process_arg(Sk::D, Id, _I) --> !, { - new_skolem(Sk,D), - assert(skolem_in(Sk, Id)) - }, + new_skolem(Sk,D), + assert(skolem_in(Sk, Id)) + }, [Sk]. process_arg(Sk, Id, _I) --> !, { - % if :: been used before for this skolem - % just keep on using it, - % otherwise, assume it is t,f - ( \+ \+ skolem(Sk,_D) -> true ; new_skolem(Sk,[t,f]) ), - assert(skolem_in(Sk, Id)) - }, + % if :: been used before for this skolem + % just keep on using it, + % otherwise, assume it is t,f + ( \+ \+ skolem(Sk,_D) -> true ; new_skolem(Sk,[t,f]) ), + assert(skolem_in(Sk, Id)) + }, [Sk]. new_skolem(Sk,D) :- @@ -165,11 +165,10 @@ interface_predicate(Sk) :- assert(preprocess(ESk, Sk, Var)), % transform from PFL to CLP(BN) call assert_static((user:ESk :- - evidence(Sk,Ev) -> Ev = Var; - var(Var) -> insert_atts(Var,Sk) ; - add_evidence(Sk,Var) - ) - ). + evidence(Sk,Ev) -> Ev = Var; + var(Var) -> insert_atts(Var,Sk) ; + add_evidence(Sk,Var) + )). insert_atts(Var,Sk) :- clpbn:put_atts(Var,[key(Sk)]). @@ -186,7 +185,7 @@ add_evidence(Sk,Var) :- %% writeln(Key:Parents), %% avg_factors(Key, Parents, 0.0, Ev, NewKeys, Out). get_pfl_cpt(Id, Keys, _, Keys, Out) :- - get_pfl_parameters(Id,Out). + get_pfl_parameters(Id,Out). get_pfl_parameters(Id,Out) :- factor(_Type,Id,_FList,_FV,Phi,_Constraints), @@ -208,7 +207,7 @@ get_sizes(Key.FList, Sz.DSizes) :- skolem(Key, Domain), length(Domain, Sz), get_sizes(FList, DSizes). - + % only makes sense for bayesian networks get_first_pvariable(Id,Var) :- factor(_Type, Id,Var._FList,_FV,_Phi,_Constraints). From 3eb6b76dddf452ea36a760f34e4d85bad8737d08 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 18:39:42 +0000 Subject: [PATCH 18/89] Whitespace clean --- packages/CLPBN/horus/BayesBallGraph.h | 2 +- packages/CLPBN/horus/BeliefProp.h | 4 +-- packages/CLPBN/horus/ConstraintTree.h | 20 ++++++------- packages/CLPBN/horus/CountingBp.h | 8 +++--- packages/CLPBN/horus/ElimGraph.cpp | 2 +- packages/CLPBN/horus/ElimGraph.h | 4 +-- packages/CLPBN/horus/Factor.h | 4 +-- packages/CLPBN/horus/FactorGraph.h | 14 ++++----- packages/CLPBN/horus/GroundSolver.h | 2 +- packages/CLPBN/horus/Histogram.h | 12 ++++---- packages/CLPBN/horus/HorusYap.cpp | 4 +-- packages/CLPBN/horus/LiftedBp.cpp | 2 +- packages/CLPBN/horus/LiftedBp.h | 2 +- packages/CLPBN/horus/LiftedCircuit.cpp | 18 ++++++------ packages/CLPBN/horus/LiftedCircuit.h | 17 ++++++----- packages/CLPBN/horus/LiftedOperations.h | 9 +++--- packages/CLPBN/horus/LiftedUtils.h | 2 +- packages/CLPBN/horus/LiftedWCNF.cpp | 6 ++-- packages/CLPBN/horus/LiftedWCNF.h | 38 ++++++++++++------------- packages/CLPBN/horus/Makefile.in | 36 +++++++++++------------ packages/CLPBN/horus/Parfactor.h | 16 +++++------ packages/CLPBN/horus/ParfactorList.cpp | 4 +-- packages/CLPBN/horus/ParfactorList.h | 5 ++-- packages/CLPBN/horus/ProbFormula.h | 12 ++++---- packages/CLPBN/horus/TinySet.h | 2 +- packages/CLPBN/horus/VarElim.cpp | 2 +- packages/CLPBN/horus/WeightedBp.h | 8 +++--- 27 files changed, 127 insertions(+), 128 deletions(-) diff --git a/packages/CLPBN/horus/BayesBallGraph.h b/packages/CLPBN/horus/BayesBallGraph.h index 8359b598f..72a0f90d0 100644 --- a/packages/CLPBN/horus/BayesBallGraph.h +++ b/packages/CLPBN/horus/BayesBallGraph.h @@ -63,7 +63,7 @@ class BayesBallGraph void addEdge (VarId vid1, VarId vid2); const BBNode* getNode (VarId vid) const; - + BBNode* getNode (VarId vid); bool empty (void) const { return nodes_.empty(); } diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index 6c1d5c46b..64a41d916 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -59,7 +59,7 @@ class BpLink ss << var_->label(); return ss.str(); } - + protected: FacNode* fac_; VarNode* var_; @@ -97,7 +97,7 @@ class BeliefProp : public GroundSolver virtual Params getPosterioriOf (VarId); virtual Params getJointDistributionOf (const VarIds&); - + protected: void runSolver (void); diff --git a/packages/CLPBN/horus/ConstraintTree.h b/packages/CLPBN/horus/ConstraintTree.h index 0b48c3650..c50f45dc3 100644 --- a/packages/CLPBN/horus/ConstraintTree.h +++ b/packages/CLPBN/horus/ConstraintTree.h @@ -57,7 +57,7 @@ class CTNode CTChilds_& childs (void) { return childs_; } const CTChilds_& childs (void) const { return childs_; } - + size_t nrChilds (void) const { return childs_.size(); } bool isRoot (void) const { return level_ == 0; } @@ -108,7 +108,7 @@ class ConstraintTree ConstraintTree (const LogVars&); ConstraintTree (const LogVars&, const Tuples&); - + ConstraintTree (vector> names); ConstraintTree (const ConstraintTree&); @@ -121,7 +121,7 @@ class ConstraintTree ~ConstraintTree (void); CTNode* root (void) const { return root_; } - + bool empty (void) const { return root_->childs().empty(); } const LogVars& logVars (void) const @@ -135,17 +135,17 @@ class ConstraintTree assert (LogVarSet (logVars_) == logVarSet_); return logVarSet_; } - + size_t nrLogVars (void) const { return logVars_.size(); assert (LogVarSet (logVars_) == logVarSet_); } - + void addTuple (const Tuple&); - + bool containsTuple (const Tuple&); - + void moveToTop (const LogVars&); void moveToBottom (const LogVars&); @@ -159,7 +159,7 @@ class ConstraintTree void applySubstitution (const Substitution&); void project (const LogVarSet&); - + ConstraintTree projectedCopy (const LogVarSet&); void remove (const LogVarSet&); @@ -201,9 +201,9 @@ class ConstraintTree ConstraintTrees ground (LogVar); void copyLogVar (LogVar,LogVar); - + ConstraintTree& operator= (const ConstraintTree& ct); - + private: unsigned countTuples (const CTNode*) const; diff --git a/packages/CLPBN/horus/CountingBp.h b/packages/CLPBN/horus/CountingBp.h index a553e9307..2cbd2f995 100644 --- a/packages/CLPBN/horus/CountingBp.h +++ b/packages/CLPBN/horus/CountingBp.h @@ -88,13 +88,13 @@ class FacCluster const FacNode* first (void) const { return members_.front(); } const FacNodes& members (void) const { return members_; } - + FacNode* representative (void) const { return repr_; } void setRepresentative (FacNode* fn) { repr_ = fn; } VarClusters& varClusters (void) { return varClusters_; } - + private: FacNodes members_; FacNode* repr_; @@ -112,9 +112,9 @@ class CountingBp : public GroundSolver void printSolverFlags (void) const; Params solveQuery (VarIds); - + static bool checkForIdenticalFactors; - + private: Color getNewColor (void) { diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index 50870d1b6..93c8527e4 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -44,7 +44,7 @@ ElimGraph::ElimGraph (const vector& factors) ElimGraph::~ElimGraph (void) { for (size_t i = 0; i < nodes_.size(); i++) { - delete nodes_[i]; + delete nodes_[i]; } } diff --git a/packages/CLPBN/horus/ElimGraph.h b/packages/CLPBN/horus/ElimGraph.h index 2f4d60d15..8188b5ba6 100644 --- a/packages/CLPBN/horus/ElimGraph.h +++ b/packages/CLPBN/horus/ElimGraph.h @@ -10,7 +10,7 @@ using namespace std; -enum ElimHeuristic +enum ElimHeuristic { SEQUENTIAL, MIN_NEIGHBORS, @@ -49,7 +49,7 @@ class ElimGraph ElimGraph (const Factors&); ~ElimGraph (void); - + VarIds getEliminatingOrder (const VarIds&); void print (void) const; diff --git a/packages/CLPBN/horus/Factor.h b/packages/CLPBN/horus/Factor.h index ca330e4c3..742f20f7a 100644 --- a/packages/CLPBN/horus/Factor.h +++ b/packages/CLPBN/horus/Factor.h @@ -34,7 +34,7 @@ class TFactor void setDistId (unsigned id) { distId_ = id; } void normalize (void) { LogAware::normalize (params_); } - + void randomize (void) { for (size_t i = 0; i < params_.size(); ++i) { @@ -207,7 +207,7 @@ class TFactor Ranges ranges_; Params params_; unsigned distId_; - + private: void extend (unsigned range_prod) { diff --git a/packages/CLPBN/horus/FactorGraph.h b/packages/CLPBN/horus/FactorGraph.h index c2ed01046..b2b03369d 100644 --- a/packages/CLPBN/horus/FactorGraph.h +++ b/packages/CLPBN/horus/FactorGraph.h @@ -15,8 +15,8 @@ class FacNode; class VarNode : public Var { public: - VarNode (VarId varId, unsigned nrStates, - int evidence = Constants::NO_EVIDENCE) + VarNode (VarId varId, unsigned nrStates, + int evidence = Constants::NO_EVIDENCE) : Var (varId, nrStates, evidence) { } VarNode (const Var* v) : Var (v) { } @@ -77,7 +77,7 @@ class FactorGraph void setFactorsAsBayesian (void) { bayesFactors_ = true; } - bool bayesianFactors (void) const { return bayesFactors_ ; } + bool bayesianFactors (void) const { return bayesFactors_; } size_t nrVarNodes (void) const { return varNodes_.size(); } @@ -112,7 +112,7 @@ class FactorGraph void exportToUaiFormat (const char*) const; void exportToLibDaiFormat (const char*) const; - + private: // DISALLOW_COPY_AND_ASSIGN (FactorGraph); @@ -130,7 +130,7 @@ class FactorGraph FacNodes facNodes_; BayesBallGraph structure_; - bool bayesFactors_; + bool bayesFactors_; typedef unordered_map VarMap; VarMap varMap_; @@ -139,8 +139,8 @@ class FactorGraph struct sortByVarId -{ - bool operator()(VarNode* vn1, VarNode* vn2) { +{ + bool operator()(VarNode* vn1, VarNode* vn2) { return vn1->varId() < vn2->varId(); } }; diff --git a/packages/CLPBN/horus/GroundSolver.h b/packages/CLPBN/horus/GroundSolver.h index 3e2959605..18b81454b 100644 --- a/packages/CLPBN/horus/GroundSolver.h +++ b/packages/CLPBN/horus/GroundSolver.h @@ -27,7 +27,7 @@ class GroundSolver Params getJointByConditioning (GroundSolverType, FactorGraph, const VarIds& jointVarIds) const; - + protected: const FactorGraph& fg; }; diff --git a/packages/CLPBN/horus/Histogram.h b/packages/CLPBN/horus/Histogram.h index 6e0f93411..af0c4595e 100644 --- a/packages/CLPBN/horus/Histogram.h +++ b/packages/CLPBN/horus/Histogram.h @@ -12,17 +12,17 @@ class HistogramSet { public: HistogramSet (unsigned, unsigned); - + void nextHistogram (void); unsigned operator[] (size_t idx) const; - + unsigned nrHistograms (void) const; void reset (void); static vector getHistograms (unsigned ,unsigned); - + static unsigned nrHistograms (unsigned, unsigned); static size_t findIndex ( @@ -31,14 +31,14 @@ class HistogramSet static vector getNumAssigns (unsigned, unsigned); friend std::ostream& operator<< (ostream &os, const HistogramSet& hs); - + private: unsigned maxCount (size_t) const; void clearAfter (size_t); - unsigned size_; - Histogram hist_; + unsigned size_; + Histogram hist_; }; #endif // HORUS_HISTOGRAM_H diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 586d5a170..33767344e 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -133,12 +133,12 @@ runLiftedSolver (void) case LiftedSolverType::LBP: solver = new LiftedBp (pfListCopy); break; case LiftedSolverType::LKC: solver = new LiftedKc (pfListCopy); break; } - + if (Globals::verbosity > 0) { solver->printSolverFlags(); cout << endl; } - + YAP_Term taskList = YAP_ARG2; vector results; while (taskList != YAP_TermNil()) { diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index 66e82a8c4..d3f757704 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -185,7 +185,7 @@ LiftedBp::rangeOfGround (const Ground& gr) return std::numeric_limits::max(); } - + Params LiftedBp::getJointByConditioning ( diff --git a/packages/CLPBN/horus/LiftedBp.h b/packages/CLPBN/horus/LiftedBp.h index cb6e9f3a4..274503f29 100644 --- a/packages/CLPBN/horus/LiftedBp.h +++ b/packages/CLPBN/horus/LiftedBp.h @@ -28,7 +28,7 @@ class LiftedBp : public LiftedSolver void createFactorGraph (void); vector> getWeights (void) const; - + unsigned rangeOfGround (const Ground&); Params getJointByConditioning (const ParfactorList&, const Grounds&); diff --git a/packages/CLPBN/horus/LiftedCircuit.cpp b/packages/CLPBN/horus/LiftedCircuit.cpp index 41ea4f2ae..7eb11b83b 100644 --- a/packages/CLPBN/horus/LiftedCircuit.cpp +++ b/packages/CLPBN/horus/LiftedCircuit.cpp @@ -238,7 +238,7 @@ LiftedCircuit::compile ( *follow = new TrueNode(); return; } - + if (clauses.size() == 1 && clauses[0]->isUnit()) { *follow = new LeafNode (clauses[0], *lwcnf_); return; @@ -326,13 +326,13 @@ LiftedCircuit::tryUnitPropagation ( explanation << " UP on " << clauses[i]->literals()[0]; explanationMap_[andNode] = explanation.str(); } - + Clauses unitClause = { clauses[i] }; compile (andNode->leftBranch(), unitClause); compile (andNode->rightBranch(), propagClauses); (*follow) = andNode; - return true; - } + return true; + } } return false; } @@ -409,11 +409,11 @@ LiftedCircuit::tryShannonDecomp ( explanation << " SD on " << literals[j]; explanationMap_[orNode] = explanation.str(); } - + compile (orNode->leftBranch(), clauses); compile (orNode->rightBranch(), otherClauses); (*follow) = orNode; - return true; + return true; } } } @@ -949,7 +949,7 @@ LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) { assert (node != 0); - static unsigned nrAuxNodes = 0; + static unsigned nrAuxNodes = 0; stringstream ss; ss << "n" << nrAuxNodes; string auxNode = ss.str(); @@ -957,7 +957,7 @@ LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; switch (getCircuitNodeType (node)) { - + case OR_NODE: { OrNode* casted = dynamic_cast(node); printClauses (casted, os); @@ -966,7 +966,7 @@ LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) os << escapeNode (node) << " -> " << auxNode; os << " [label=\"" << getExplanationString (node) << "\"]" ; os << endl; - + os << auxNode << " -> " ; os << escapeNode (*casted->leftBranch()); os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; diff --git a/packages/CLPBN/horus/LiftedCircuit.h b/packages/CLPBN/horus/LiftedCircuit.h index 3acdf7285..2c32945ee 100644 --- a/packages/CLPBN/horus/LiftedCircuit.h +++ b/packages/CLPBN/horus/LiftedCircuit.h @@ -33,7 +33,7 @@ class CircuitNode class OrNode : public CircuitNode { public: - OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } CircuitNode** leftBranch (void) { return &leftBranch_; } CircuitNode** rightBranch (void) { return &rightBranch_; } @@ -51,7 +51,7 @@ class AndNode : public CircuitNode { public: AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - + AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } @@ -153,13 +153,13 @@ class SmoothNode : public CircuitNode public: SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } - + const Clauses& clauses (void) const { return clauses_; } Clauses clauses (void) { return clauses_; } - - double weight (void) const; - + + double weight (void) const; + private: Clauses clauses_; const LiftedWCNF& lwcnf_; @@ -246,16 +246,15 @@ class LiftedCircuit string extraOptions = ""); string escapeNode (const CircuitNode* node) const; - + string getExplanationString (CircuitNode* node); CircuitNode* root_; const LiftedWCNF* lwcnf_; - + bool compilationSucceeded_; Clauses backupClauses_; unordered_map originClausesMap_; unordered_map explanationMap_; - bool compilationSucceeded_; }; #endif // HORUS_LIFTEDCIRCUIT_H diff --git a/packages/CLPBN/horus/LiftedOperations.h b/packages/CLPBN/horus/LiftedOperations.h index 1e21f317c..fc25363d3 100644 --- a/packages/CLPBN/horus/LiftedOperations.h +++ b/packages/CLPBN/horus/LiftedOperations.h @@ -8,19 +8,20 @@ class LiftedOperations public: static void shatterAgainstQuery ( ParfactorList& pfList, const Grounds& query); - + static void runWeakBayesBall ( ParfactorList& pfList, const Grounds&); - + static void absorveEvidence ( ParfactorList& pfList, ObservedFormulas& obsFormulas); - + static Parfactors countNormalize (Parfactor*, const LogVarSet&); static Parfactor calcGroundMultiplication (Parfactor pf); private: - static Parfactors absorve (ObservedFormula&, Parfactor*); + static Parfactors absorve (ObservedFormula&, Parfactor*); }; #endif // HORUS_LIFTEDOPERATIONS_H + diff --git a/packages/CLPBN/horus/LiftedUtils.h b/packages/CLPBN/horus/LiftedUtils.h index 458afdf7d..1f563eaf7 100644 --- a/packages/CLPBN/horus/LiftedUtils.h +++ b/packages/CLPBN/horus/LiftedUtils.h @@ -149,7 +149,7 @@ class Substitution { return Util::contains (subs_, X); } - + size_t nrReplacements (void) const { return subs_.size(); } LogVars getDiscardedLogVars (void) const; diff --git a/packages/CLPBN/horus/LiftedWCNF.cpp b/packages/CLPBN/horus/LiftedWCNF.cpp index b2272b6d6..c62ead03d 100644 --- a/packages/CLPBN/horus/LiftedWCNF.cpp +++ b/packages/CLPBN/horus/LiftedWCNF.cpp @@ -108,7 +108,7 @@ Clause::containsPositiveLiteral ( } - + bool Clause::containsNegativeLiteral ( LiteralId lid, @@ -427,7 +427,7 @@ LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) c2->addLiteralComplemented (Literal (1, {1,0})); clauses_.push_back(c2); */ - + if (Globals::verbosity > 1) { cout << "FORMULA INDICATORS:" << endl; printFormulaIndicators(); @@ -631,7 +631,7 @@ LiftedWCNF::printWeights (void) const unordered_map>::const_iterator it; it = weights_.begin(); while (it != weights_.end()) { - cout << "λ" << it->first << " weights: " ; + cout << "λ" << it->first << " weights: " ; cout << it->second.first << " " << it->second.second; cout << endl; ++ it; diff --git a/packages/CLPBN/horus/LiftedWCNF.h b/packages/CLPBN/horus/LiftedWCNF.h index 92f07d192..68c169ce0 100644 --- a/packages/CLPBN/horus/LiftedWCNF.h +++ b/packages/CLPBN/horus/LiftedWCNF.h @@ -33,19 +33,19 @@ class Literal LiteralId lid (void) const { return lid_; } LogVars logVars (void) const { return logVars_; } - - size_t nrLogVars (void) const { return logVars_.size(); } + + size_t nrLogVars (void) const { return logVars_.size(); } LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } - + void complement (void) { negated_ = !negated_; } bool isPositive (void) const { return negated_ == false; } bool isNegative (void) const { return negated_; } - + bool isGround (ConstraintTree constr, LogVarSet ipgLogVars) const; - + size_t indexOfLogVar (LogVar X) const; string toString (LogVarSet ipgLogVars = LogVarSet(), @@ -74,9 +74,9 @@ class Clause void addLiteral (const Literal& l) { literals_.push_back (l); } const Literals& literals (void) const { return literals_; } - + Literals& literals (void) { return literals_; } - + size_t nrLiterals (void) const { return literals_.size(); } const ConstraintTree& constr (void) const { return constr_; } @@ -100,7 +100,7 @@ class Clause unsigned nrPosCountedLogVars (void) const { return posCountedLvs_.size(); } unsigned nrNegCountedLogVars (void) const { return negCountedLvs_.size(); } - + void addLiteralComplemented (const Literal& lit); bool containsLiteral (LiteralId lid) const; @@ -119,8 +119,8 @@ class Clause bool isPositiveCountedLogVar (LogVar X) const; - bool isNegativeCountedLogVar (LogVar X) const; - + bool isNegativeCountedLogVar (LogVar X) const; + bool isIpgLogVar (LogVar X) const; TinySet lidSet (void) const; @@ -130,13 +130,13 @@ class Clause LogVarTypes logVarTypes (size_t litIdx) const; void removeLiteral (size_t litIdx); - + static bool independentClauses (Clause& c1, Clause& c2); - - static vector copyClauses (const vector& clauses); + + static vector copyClauses (const vector& clauses); static void printClauses (const vector& clauses); - + friend std::ostream& operator<< (ostream &os, const Clause& clause); private: @@ -171,14 +171,14 @@ class LitLvTypes return false; } }; - + LitLvTypes (LiteralId lid, const LogVarTypes& lvTypes) : lid_(lid), lvTypes_(lvTypes) { } - + LiteralId lid (void) const { return lid_; } - + const LogVarTypes& logVarTypes (void) const { return lvTypes_; } - + void setAllFullLogVars (void) { std::fill (lvTypes_.begin(), lvTypes_.end(), LogVarType::FULL_LV); } @@ -228,7 +228,7 @@ class LiftedWCNF Clauses clauses_; LiteralId freeLiteralId_; - const ParfactorList& pfList_; + const ParfactorList& pfList_; unordered_map> map_; unordered_map> weights_; }; diff --git a/packages/CLPBN/horus/Makefile.in b/packages/CLPBN/horus/Makefile.in index 59936c776..61a72d553 100644 --- a/packages/CLPBN/horus/Makefile.in +++ b/packages/CLPBN/horus/Makefile.in @@ -23,10 +23,10 @@ CC=@CC@ CXX=@CXX@ # normal -#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG +CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG # debug -CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra +#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra # @@ -57,13 +57,13 @@ HEADERS = \ $(srcdir)/Horus.h \ $(srcdir)/Indexer.h \ $(srcdir)/LiftedBp.h \ - $(srcdir)/LiftedCircuit.h \ - $(srcdir)/LiftedKc.h \ - $(srcdir)/LiftedOperations.h \ - $(srcdir)/LiftedSolver.h \ + $(srcdir)/LiftedCircuit.h \ + $(srcdir)/LiftedKc.h \ + $(srcdir)/LiftedOperations.h \ + $(srcdir)/LiftedSolver.h \ $(srcdir)/LiftedUtils.h \ $(srcdir)/LiftedVe.h \ - $(srcdir)/LiftedWCNF.h \ + $(srcdir)/LiftedWCNF.h \ $(srcdir)/Parfactor.h \ $(srcdir)/ParfactorList.h \ $(srcdir)/ProbFormula.h \ @@ -73,7 +73,7 @@ HEADERS = \ $(srcdir)/Var.h \ $(srcdir)/VarElim.h \ $(srcdir)/WeightedBp.h - + CPP_SOURCES = \ $(srcdir)/BayesBall.cpp \ $(srcdir)/BayesBallGraph.cpp \ @@ -87,12 +87,12 @@ CPP_SOURCES = \ $(srcdir)/HorusCli.cpp \ $(srcdir)/HorusYap.cpp \ $(srcdir)/LiftedBp.cpp \ - $(srcdir)/LiftedCircuit.cpp \ - $(srcdir)/LiftedKc.cpp \ - $(srcdir)/LiftedOperations.cpp \ + $(srcdir)/LiftedCircuit.cpp \ + $(srcdir)/LiftedKc.cpp \ + $(srcdir)/LiftedOperations.cpp \ $(srcdir)/LiftedUtils.cpp \ $(srcdir)/LiftedVe.cpp \ - $(srcdir)/LiftedWCNF.cpp \ + $(srcdir)/LiftedWCNF.cpp \ $(srcdir)/Parfactor.cpp \ $(srcdir)/ParfactorList.cpp \ $(srcdir)/ProbFormula.cpp \ @@ -114,12 +114,12 @@ OBJS = \ Histogram.o \ HorusYap.o \ LiftedBp.o \ - LiftedCircuit.o \ - LiftedKc.o \ - LiftedOperations.o \ + LiftedCircuit.o \ + LiftedKc.o \ + LiftedOperations.o \ LiftedUtils.o \ LiftedVe.o \ - LiftedWCNF.o \ + LiftedWCNF.o \ ProbFormula.o \ Parfactor.o \ ParfactorList.o \ @@ -178,8 +178,8 @@ depend: $(HEADERS) $(CPP_SOURCES) -@if test "$(GCC)" = yes; then\ $(CC) -std=c++0x -MM -MG $(CFLAGS) -I$(srcdir) -I$(srcdir)/../../../../include -I$(srcdir)/../../../../H $(CPP_SOURCES) >> Makefile;\ else\ - makedepend -f - -- $(CFLAGS) -I$(srcdir)/../../../../H -I$(srcdir)/../../../../include -- $(CPP_SOURCES) |\ - sed 's|.*/\([^:]*\):|\1:|' >> Makefile ;\ + makedepend -f - -- $(CFLAGS) -I$(srcdir)/../../../../H -I$(srcdir)/../../../../include -- $(CPP_SOURCES) |\ + sed 's|.*/\([^:]*\):|\1:|' >> Makefile ;\ fi # DO NOT DELETE THIS LINE -- make depend depends on it. diff --git a/packages/CLPBN/horus/Parfactor.h b/packages/CLPBN/horus/Parfactor.h index 5f6aec550..1c65c2ea0 100644 --- a/packages/CLPBN/horus/Parfactor.h +++ b/packages/CLPBN/horus/Parfactor.h @@ -33,21 +33,21 @@ class Parfactor : public TFactor const ConstraintTree* constr (void) const { return constr_; } const LogVars& logVars (void) const { return constr_->logVars(); } - + const LogVarSet& logVarSet (void) const { return constr_->logVarSet(); } LogVarSet countedLogVars (void) const; LogVarSet uncountedLogVars (void) const; - + LogVarSet elimLogVars (void) const; - + LogVarSet exclusiveLogVars (size_t fIdx) const; - + void sumOutIndex (size_t fIdx); void multiply (Parfactor&); - + bool canCountConvert (LogVar X); void countConvert (LogVar); @@ -75,7 +75,7 @@ class Parfactor : public TFactor bool containsGroup (PrvGroup) const; bool containsGroups (vector) const; - + unsigned nrFormulas (LogVar) const; int indexOfLogVar (LogVar) const; @@ -99,7 +99,7 @@ class Parfactor : public TFactor static bool canMultiply (Parfactor*, Parfactor*); private: - + void simplifyCountingFormulas (size_t fIdx); void simplifyParfactor (size_t fIdx1, size_t fIdx2); @@ -113,7 +113,7 @@ class Parfactor : public TFactor static void alignAndExponentiate (Parfactor*, Parfactor*); static void alignLogicalVars (Parfactor*, Parfactor*); - + ConstraintTree* constr_; }; diff --git a/packages/CLPBN/horus/ParfactorList.cpp b/packages/CLPBN/horus/ParfactorList.cpp index b4496a683..7e829a6d0 100644 --- a/packages/CLPBN/horus/ParfactorList.cpp +++ b/packages/CLPBN/horus/ParfactorList.cpp @@ -221,7 +221,7 @@ ParfactorList::isShattered ( } - + void ParfactorList::addToShatteredList (Parfactor* g) { @@ -481,7 +481,7 @@ ParfactorList::shatter ( ConstraintTree* commCt2 = split2.first; ConstraintTree* exclCt2 = split2.second; - assert (commCt1->tupleSet (f1.logVars()) == + assert (commCt1->tupleSet (f1.logVars()) == commCt2->tupleSet (f2.logVars())); // stringstream ss1; ss1 << "" << count << "_A.dot" ; diff --git a/packages/CLPBN/horus/ParfactorList.h b/packages/CLPBN/horus/ParfactorList.h index 48008b253..1c6404dcb 100644 --- a/packages/CLPBN/horus/ParfactorList.h +++ b/packages/CLPBN/horus/ParfactorList.h @@ -56,11 +56,10 @@ class ParfactorList bool isAllShattered (void) const; void print (void) const; - + ParfactorList& operator= (const ParfactorList& pfList); private: - bool isShattered (const Parfactor*) const; bool isShattered (const Parfactor*, const Parfactor*) const; @@ -73,7 +72,7 @@ class ParfactorList Parfactors shatterAgainstMySelf ( Parfactor* g, size_t fIdx1, size_t fIdx2); - + std::pair shatter ( Parfactor*, Parfactor*); diff --git a/packages/CLPBN/horus/ProbFormula.h b/packages/CLPBN/horus/ProbFormula.h index 61b016288..63086266a 100644 --- a/packages/CLPBN/horus/ProbFormula.h +++ b/packages/CLPBN/horus/ProbFormula.h @@ -30,11 +30,11 @@ class ProbFormula const LogVars& logVars (void) const { return logVars_; } LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } - + PrvGroup group (void) const { return group_; } void setGroup (PrvGroup g) { group_ = g; } - + bool sameSkeletonAs (const ProbFormula&) const; bool contains (LogVar) const; @@ -48,20 +48,20 @@ class ProbFormula bool isCounting (void) const; LogVar countedLogVar (void) const; - + void setCountedLogVar (LogVar); void clearCountedLogVar (void); - + void rename (LogVar, LogVar); - + static PrvGroup getNewGroup (void); friend std::ostream& operator<< (ostream &os, const ProbFormula& f); friend bool operator== (const ProbFormula& f1, const ProbFormula& f2); - private: + private: Symbol functor_; LogVars logVars_; unsigned range_; diff --git a/packages/CLPBN/horus/TinySet.h b/packages/CLPBN/horus/TinySet.h index ed810bcde..4b3c4bd83 100644 --- a/packages/CLPBN/horus/TinySet.h +++ b/packages/CLPBN/horus/TinySet.h @@ -225,7 +225,7 @@ class TinySet typename vector::size_type i; for (i = 0; i < s.size(); i++) { out << ((i != 0) ? "," : "") << s.vec_[i]; - } + } out << "}" ; return out; } diff --git a/packages/CLPBN/horus/VarElim.cpp b/packages/CLPBN/horus/VarElim.cpp index b2c4dc4ec..fb4eecf50 100644 --- a/packages/CLPBN/horus/VarElim.cpp +++ b/packages/CLPBN/horus/VarElim.cpp @@ -79,7 +79,7 @@ VarElim::createFactorList (void) it->second.push_back (i); } } -} +} diff --git a/packages/CLPBN/horus/WeightedBp.h b/packages/CLPBN/horus/WeightedBp.h index 844011ab5..7794fd509 100644 --- a/packages/CLPBN/horus/WeightedBp.h +++ b/packages/CLPBN/horus/WeightedBp.h @@ -22,7 +22,7 @@ class WeightedLink : public BpLink swap (currMsg_, nextMsg_); LogAware::pow (pwdMsg_, weight_); } - + private: size_t index_; unsigned weight_; @@ -33,13 +33,13 @@ class WeightedLink : public BpLink class WeightedBp : public BeliefProp { - public: + public: WeightedBp (const FactorGraph& fg, const vector>& weights) : BeliefProp (fg), weights_(weights) { } ~WeightedBp (void); - + Params getPosterioriOf (VarId); private: @@ -53,7 +53,7 @@ class WeightedBp : public BeliefProp Params getVarToFactorMsg (const BpLink*) const; void printLinkInformation (void) const; - + vector> weights_; }; From 9861f4aadb9c72409743d788e16b10a44a57ff8d Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 18:50:36 +0000 Subject: [PATCH 19/89] Is the EM solver that we want --- packages/CLPBN/clpbn.yap | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 3fa79d7c0..b4a7bfdfb 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -613,7 +613,7 @@ clpbn_finalize_solver(_State). % pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State) :- - solver(Solver), + em_solver(Solver), pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, Solver). pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- !, @@ -640,7 +640,7 @@ pfl_init_solver(_, _, _, _, _, Solver) :- write('\' cannot be used for learning'). pfl_run_solver(LVs, LPs, State) :- - solver(Solver), + em_solver(Solver), pfl_run_solver(LVs, LPs, State, Solver). pfl_run_solver(LVs, LPs, State, ve) :- !, From 40a72a0a635b18d40fc56f6af06a9873382f67c1 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 21:03:13 +0000 Subject: [PATCH 20/89] Improve error handling for choosing EM solver --- packages/CLPBN/clpbn.yap | 40 +++++++++++++++++++++++++++++++--------- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index b4a7bfdfb..919bccf52 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -608,24 +608,46 @@ clpbn_finalize_solver(State) :- clpbn_finalize_solver(_State). +ground_solver(ve). +ground_solver(hve). +ground_solver(jt). +ground_solver(bdd). +ground_solver(bp). +ground_solver(cbp). +ground_solver(gibbs). + +lifted_solver(lve). +lifted_solver(lkc). +lifted_solver(lbp). + % % This is a routine to start a solver, called by the learning procedures (ie, em). % pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State) :- em_solver(Solver), + (lifted_solver(Solver) -> + format("Error: you cannot use a lifted solver for learning.", [Solver]), fail + ; + true + ), + (ground_solver(Solver) -> + true + ; + format("Error: `~w' is an unknow solver.", [Solver]), fail + ), pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, Solver). pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- !, init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- !, - init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). - pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- !, clpbn_horus:set_horus_flag(ground_solver, ve), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- !, + init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- !, clpbn_horus:set_horus_flag(ground_solver, bp), init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). @@ -635,9 +657,9 @@ pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- !, init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_init_solver(_, _, _, _, _, Solver) :- - write('Error: solver `'), - write(Solver), - write('\' cannot be used for learning'). + format("Error: solver `~w' cannot be used for learning.", [Solver]), + fail. + pfl_run_solver(LVs, LPs, State) :- em_solver(Solver), @@ -645,13 +667,13 @@ pfl_run_solver(LVs, LPs, State) :- pfl_run_solver(LVs, LPs, State, ve) :- !, run_ve_ground_solver(LVs, LPs, State). + +pfl_run_solver(LVs, LPs, State, hve) :- !, + run_horus_ground_solver(LVs, LPs, State). pfl_run_solver(LVs, LPs, State, bdd) :- !, run_bdd_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, hve) :- !, - run_horus_ground_solver(LVs, LPs, State). - pfl_run_solver(LVs, LPs, State, bp) :- !, run_horus_ground_solver(LVs, LPs, State). From f1499f99f320545157a7f0e14448f0eda25db17b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 21:19:47 +0000 Subject: [PATCH 21/89] Introduce set_em_solver and use it --- packages/CLPBN/clpbn.yap | 4 ++++ packages/CLPBN/examples/School/school_128.yap | 3 --- packages/CLPBN/examples/School/school_32.yap | 3 --- packages/CLPBN/examples/School/school_64.yap | 3 --- packages/CLPBN/examples/learning/debug_school.yap | 13 +++++++------ packages/CLPBN/examples/learning/prof_params.pfl | 11 +++++------ packages/CLPBN/examples/learning/school_params.yap | 13 +++++++------ .../CLPBN/examples/learning/sprinkler_params.yap | 11 +++++------ packages/CLPBN/pfl.yap | 1 + 9 files changed, 29 insertions(+), 33 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 919bccf52..5ace917a2 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -3,6 +3,7 @@ [{}/1, clpbn_flag/2, set_clpbn_flag/2, + set_em_solver/1, clpbn_flag/3, clpbn_key/2, clpbn_init_graph/1, @@ -179,6 +180,9 @@ clpbn_flag(use_factors,Before,After) :- retract(use_parfactors(Before)), assert(use_parfactors(After)). +set_em_solver(Solver) :- + set_clpbn_flag(em_solver, Solver). + {_} :- solver(none), !. {Var = Key with Dist} :- diff --git a/packages/CLPBN/examples/School/school_128.yap b/packages/CLPBN/examples/School/school_128.yap index ecbf398cb..3a59cd9d6 100644 --- a/packages/CLPBN/examples/School/school_128.yap +++ b/packages/CLPBN/examples/School/school_128.yap @@ -18,9 +18,6 @@ total_students(4096). :- ensure_loaded('parschema.pfl'). -:- set_solver(hve). - - professor(p0). professor(p1). professor(p2). diff --git a/packages/CLPBN/examples/School/school_32.yap b/packages/CLPBN/examples/School/school_32.yap index 48fcbcabc..7c993ef24 100644 --- a/packages/CLPBN/examples/School/school_32.yap +++ b/packages/CLPBN/examples/School/school_32.yap @@ -18,9 +18,6 @@ total_students(256). :- ensure_loaded('parschema.pfl'). -:- set_solver(hve). - - professor(p0). professor(p1). professor(p2). diff --git a/packages/CLPBN/examples/School/school_64.yap b/packages/CLPBN/examples/School/school_64.yap index 7564cc1b9..d72ecf7e8 100644 --- a/packages/CLPBN/examples/School/school_64.yap +++ b/packages/CLPBN/examples/School/school_64.yap @@ -18,9 +18,6 @@ total_students(1024). :- ensure_loaded('parschema.pfl'). -:- set_solver(hve). - - professor(p0). professor(p1). professor(p2). diff --git a/packages/CLPBN/examples/learning/debug_school.yap b/packages/CLPBN/examples/learning/debug_school.yap index 3ee14fa96..907efc802 100644 --- a/packages/CLPBN/examples/learning/debug_school.yap +++ b/packages/CLPBN/examples/learning/debug_school.yap @@ -2,16 +2,17 @@ /* We do not consider aggregates yet. */ +:- use_module(library(clpbn/learning/em)). + :- [pos:train]. :- ['../../examples/School/parschema.pfl']. -:- use_module(library(clpbn/learning/em)). - -%:- clpbn:set_clpbn_flag(em_solver,gibbs). -%:- clpbn:set_clpbn_flag(em_solver,jt). - :- clpbn:set_clpbn_flag(em_solver,ve). -%:- clpbn:set_clpbn_flag(em_solver,bp). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). debug_school :- graph(L), diff --git a/packages/CLPBN/examples/learning/prof_params.pfl b/packages/CLPBN/examples/learning/prof_params.pfl index d43e5b18d..32df5081b 100644 --- a/packages/CLPBN/examples/learning/prof_params.pfl +++ b/packages/CLPBN/examples/learning/prof_params.pfl @@ -4,12 +4,11 @@ :- use_module(library(clpbn/learning/em)). -%:- clpbn:set_clpbn_flag(em_solver,gibbs). -%:- clpbn:set_clpbn_flag(em_solver,jt). -%:- clpbn:set_clpbn_flag(em_solver,hve). -:- clpbn:set_clpbn_flag(em_solver,ve). -%:- clpbn:set_clpbn_flag(em_solver,bp). -%:- clpbn:set_clpbn_flag(em_solver,bdd). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). professor(p0). professor(p1). diff --git a/packages/CLPBN/examples/learning/school_params.yap b/packages/CLPBN/examples/learning/school_params.yap index 61c535b5f..fd93edb27 100644 --- a/packages/CLPBN/examples/learning/school_params.yap +++ b/packages/CLPBN/examples/learning/school_params.yap @@ -2,16 +2,17 @@ /* We do not consider aggregates yet. */ +:- use_module(library(clpbn/learning/em)). + :- [pos:train]. :- ['../../examples/School/school_32']. -:- use_module(library(clpbn/learning/em)). - -%:- clpbn:set_clpbn_flag(em_solver,gibbs). -%:- clpbn:set_clpbn_flag(em_solver,jt). -% :- clpbn:set_clpbn_flag(em_solver,ve). -:- clpbn:set_clpbn_flag(em_solver,bp). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). timed_main :- statistics(runtime, _), diff --git a/packages/CLPBN/examples/learning/sprinkler_params.yap b/packages/CLPBN/examples/learning/sprinkler_params.yap index 05e3ae3c9..730f7fd5c 100644 --- a/packages/CLPBN/examples/learning/sprinkler_params.yap +++ b/packages/CLPBN/examples/learning/sprinkler_params.yap @@ -4,12 +4,11 @@ :- use_module(library(clpbn/learning/em)). -%:- set_pfl_flag(em_solver,gibbs). -%:- set_pfl_flag(em_solver,jt). -%:- set_pfl_flag(em_solver,hve). -%:- set_pfl_flag(em_solver,bp). -%:- set_pfl_flag(em_solver,ve). -:- set_pfl_flag(em_solver,bdd). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). :- dynamic id/1. diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index fe7fbc359..f3f02062f 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -22,6 +22,7 @@ :- reexport(library(clpbn), [clpbn_flag/2 as pfl_flag, set_clpbn_flag/2 as set_pfl_flag, + set_em_solver/1, conditional_probability/3, pfl_init_solver/5, pfl_run_solver/3 From e1c862ebbeed862e5b602a089bde7ce8826dc230 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 23:14:54 +0000 Subject: [PATCH 22/89] Refactor the way we set the inference solver --- packages/CLPBN/clpbn.yap | 175 +++++++++++++++++--------- packages/CLPBN/clpbn/horus.yap | 16 +-- packages/CLPBN/clpbn/horus_ground.yap | 3 +- packages/CLPBN/pfl.yap | 4 +- 4 files changed, 116 insertions(+), 82 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 5ace917a2..72d62dea6 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -3,6 +3,7 @@ [{}/1, clpbn_flag/2, set_clpbn_flag/2, + set_solver/1, set_em_solver/1, clpbn_flag/3, clpbn_key/2, @@ -68,9 +69,9 @@ %% ]). :- use_module('clpbn/pgrammar', - [init_pcg_solver/4, - run_pcg_solver/3, - pcg_init_graph/0 + [pcg_init_graph/0, + init_pcg_solver/4, + run_pcg_solver/3 ]). :- use_module('clpbn/horus_ground', @@ -126,66 +127,89 @@ :- dynamic solver/1, - output/1, - use/1, + em_solver/1, suppress_attribute_display/1, parameter_softening/1, - em_solver/1, - use_parfactors/1. + use_parfactors/1, + output/1, + use/1. :- meta_predicate probability(:,-), conditional_probability(:,:,-). solver(ve). em_solver(bp). -%output(xbif(user_error)). -%output(gviz(user_error)). -output(no). suppress_attribute_display(false). parameter_softening(m_estimate(10)). use_parfactors(off). +output(no). +%output(xbif(user_error)). +%output(gviz(user_error)). + +ground_solver(ve). +ground_solver(hve). +ground_solver(jt). +ground_solver(bdd). +ground_solver(bp). +ground_solver(cbp). +ground_solver(gibbs). + +lifted_solver(lve). +lifted_solver(lkc). +lifted_solver(lbp). -clpbn_flag(Flag,Option) :- +clpbn_flag(Flag, Option) :- clpbn_flag(Flag, Option, Option). set_clpbn_flag(Flag,Option) :- clpbn_flag(Flag, _, Option). -clpbn_flag(output,Before,After) :- - retract(output(Before)), - assert(output(After)). clpbn_flag(solver,Before,After) :- retract(solver(Before)), assert(solver(After)). + clpbn_flag(em_solver,Before,After) :- retract(em_solver(Before)), assert(em_solver(After)). + clpbn_flag(bnt_solver,Before,After) :- retract(bnt:bnt_solver(Before)), assert(bnt:bnt_solver(After)). + clpbn_flag(bnt_path,Before,After) :- retract(bnt:bnt_path(Before)), assert(bnt:bnt_path(After)). + clpbn_flag(bnt_model,Before,After) :- retract(bnt:bnt_model(Before)), assert(bnt:bnt_model(After)). + clpbn_flag(suppress_attribute_display,Before,After) :- retract(suppress_attribute_display(Before)), assert(suppress_attribute_display(After)). + clpbn_flag(parameter_softening,Before,After) :- retract(parameter_softening(Before)), assert(parameter_softening(After)). + clpbn_flag(use_factors,Before,After) :- retract(use_parfactors(Before)), assert(use_parfactors(After)). +clpbn_flag(output,Before,After) :- + retract(output(Before)), + assert(output(After)). + +set_solver(Solver) :- + set_clpbn_flag(solver,Solver). + set_em_solver(Solver) :- - set_clpbn_flag(em_solver, Solver). + set_clpbn_flag(em_solver,Solver). {_} :- solver(none), !. -{Var = Key with Dist} :- +{ Var = Key with Dist } :- put_atts(El,[key(Key),dist(DistInfo,Parents)]), dist(Dist, DistInfo, Key, Parents), add_evidence(Var,Key,DistInfo,El) @@ -209,8 +233,10 @@ init_clpbn_vars(El) :- create_mutable(El, Mutable), b_setval(clpbn_qvars, Mutable). -check_constraint(Constraint, _, _, Constraint) :- var(Constraint), !. -check_constraint((A->D), _, _, (A->D)) :- var(A), !. +check_constraint(Constraint, _, _, Constraint) :- + var(Constraint), !. +check_constraint((A->D), _, _, (A->D)) :- + var(A), !. check_constraint((([A|B].L)->D), Vars, NVars, (([A|B].NL)->D)) :- !, check_cpt_input_vars(L, Vars, NVars, NL). check_constraint(Dist, _, _, Dist). @@ -246,7 +272,8 @@ clpbn_marginalise(V, Dist) :- % project_attributes(GVars0, _AVars0) :- use_parfactors(on), - clpbn_flag(solver, Solver), Solver \= fove, !, + clpbn_flag(solver, Solver), + ground_solver(Solver), generate_network(GVars0, GKeys, Keys, Factors, Evidence), b_setval(clpbn_query_variables, f(GVars0,Evidence)), simplify_query(GVars0, GVars), @@ -329,34 +356,27 @@ get_rid_of_ev_vars([V|LVs0],[V|LVs]) :- get_rid_of_ev_vars(LVs0,LVs). -% do nothing if we don't have query variables to compute. -write_out(_, [], _, _) :- !. -write_out(graphs, _, AVars, _) :- - clpbn2graph(AVars). -write_out(ve, GVars, AVars, DiffVars) :- - ve(GVars, AVars, DiffVars). -write_out(jt, GVars, AVars, DiffVars) :- - jt(GVars, AVars, DiffVars). -write_out(bdd, GVars, AVars, DiffVars) :- - bdd(GVars, AVars, DiffVars). -write_out(bp, _GVars, _AVars, _DiffVars) :- - writeln('interface not supported any longer'). -write_out(gibbs, GVars, AVars, DiffVars) :- - gibbs(GVars, AVars, DiffVars). -write_out(bnt, GVars, AVars, DiffVars) :- - do_bnt(GVars, AVars, DiffVars). -write_out(fove, GVars, AVars, DiffVars) :- - call_horus_lifted_solver(GVars, AVars, DiffVars). - -% call a solver with keys, not actual variables -call_ground_solver(bp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, - call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). -call_ground_solver(bdd, GVars, GoalKeys, Keys, Factors, Evidence) :- !, - call_bdd_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). +% Call a solver with keys, not actual variables call_ground_solver(ve, GVars, GoalKeys, Keys, Factors, Evidence) :- !, call_ve_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(hve, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + clpbn_horus:set_horus_flag(ground_solver, ve), + call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(bdd, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + call_bdd_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(bp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + clpbn_horus:set_horus_flag(ground_solver, bp), + call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(cbp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + clpbn_horus:set_horus_flag(ground_solver, cbp), + call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + call_ground_solver(Solver, GVars, _GoalKeys, Keys, Factors, Evidence) :- - % traditional solver + % fall back to traditional solver b_hash_new(Hash0), foldl(gvar_in_hash, GVars, Hash0, HashI), foldl(key_to_var, Keys, AllVars, HashI, Hash1), @@ -368,6 +388,44 @@ call_ground_solver(Solver, GVars, _GoalKeys, Keys, Factors, Evidence) :- write_out(Solver, [GVars], AllVars, _), assert(use_parfactors(on)). + +% do nothing if we don't have query variables to compute. +write_out(_, [], _, _) :- !. + +write_out(graphs, _, AVars, _) :- !, + clpbn2graph(AVars). + +write_out(ve, GVars, AVars, DiffVars) :- !, + ve(GVars, AVars, DiffVars). + +write_out(jt, GVars, AVars, DiffVars) :- !, + jt(GVars, AVars, DiffVars). + +write_out(bdd, GVars, AVars, DiffVars) :- !, + bdd(GVars, AVars, DiffVars). + +write_out(gibbs, GVars, AVars, DiffVars) :- !, + gibbs(GVars, AVars, DiffVars). + +write_out(lve, GVars, AVars, DiffVars) :- !, + clpbn_horus:set_horus_flag(lifted_solver, lve), + call_horus_lifted_solver(GVars, AVars, DiffVars). + +write_out(lkc, GVars, AVars, DiffVars) :- !, + clpbn_horus:set_horus_flag(lifted_solver, lkc), + call_horus_lifted_solver(GVars, AVars, DiffVars). + +write_out(lbp, GVars, AVars, DiffVars) :- !, + clpbn_horus:set_horus_flag(lifted_solver, lbp), + call_horus_lifted_solver(GVars, AVars, DiffVars). + +write_out(bnt, GVars, AVars, DiffVars) :- !, + do_bnt(GVars, AVars, DiffVars). + +write_out(Solver, _, _, _) :- + format("Error: solver `~w' is unknown", [Solver]), + fail. + % % convert a PFL network (without constraints) % into CLP(BN) for evaluation @@ -472,14 +530,11 @@ bind_clpbn(T, Var, Key, Dist, Parents, []) :- var(T), ; fail ). -bind_clpbn(_, Var, _, _, _, _, []) :- - use(bnt), - check_if_bnt_done(Var), !. bind_clpbn(_, Var, _, _, _, _, []) :- use(ve), check_if_ve_done(Var), !. bind_clpbn(_, Var, _, _, _, _, []) :- - use(bp), + use(hve), check_if_horus_ground_solver_done(Var), !. bind_clpbn(_, Var, _, _, _, _, []) :- use(jt), @@ -487,6 +542,15 @@ bind_clpbn(_, Var, _, _, _, _, []) :- bind_clpbn(_, Var, _, _, _, _, []) :- use(bdd), check_if_bdd_done(Var), !. +bind_clpbn(_, Var, _, _, _, _, []) :- + use(bp), + check_if_horus_ground_solver_done(Var), !. +bind_clpbn(_, Var, _, _, _, _, []) :- + use(cbp), + check_if_horus_ground_solver_done(Var), !. +bind_clpbn(_, Var, _, _, _, _, []) :- + use(bnt), + check_if_bnt_done(Var), !. bind_clpbn(T, Var, Key0, _, _, _, []) :- get_atts(Var, [key(Key)]), !, ( @@ -501,7 +565,7 @@ fresh_attvar(Var, NVar) :- put_atts(NVar, LAtts). % I will now allow two CLPBN variables to be bound together. -%bind_clpbns(Key, Dist, Parents, Key, Dist, Parents). +% bind_clpbns(Key, Dist, Parents, Key, Dist, Parents). bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) :- Key == Key1, !, get_dist(Dist,_Type,_Domain,_Table), @@ -611,19 +675,6 @@ clpbn_finalize_solver(State) :- finalize_horus_ground_solver(Info). clpbn_finalize_solver(_State). - -ground_solver(ve). -ground_solver(hve). -ground_solver(jt). -ground_solver(bdd). -ground_solver(bp). -ground_solver(cbp). -ground_solver(gibbs). - -lifted_solver(lve). -lifted_solver(lkc). -lifted_solver(lbp). - % % This is a routine to start a solver, called by the learning procedures (ie, em). % diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 60c50eb21..0c40f9765 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -5,8 +5,7 @@ ********************************************************/ :- module(clpbn_horus, - [set_solver/1, - set_horus_flag/1, + [set_horus_flag/1, cpp_create_lifted_network/3, cpp_create_ground_network/4, cpp_set_parfactors_params/2, @@ -35,19 +34,6 @@ warning :- -> true ; warning. -set_solver(ve) :- !, set_clpbn_flag(solver,ve). -set_solver(bdd) :- !, set_clpbn_flag(solver,bdd). -set_solver(jt) :- !, set_clpbn_flag(solver,jt). -set_solver(gibbs) :- !, set_clpbn_flag(solver,gibbs). -set_solver(lve) :- !, set_clpbn_flag(solver,fove), set_horus_flag(lifted_solver, lve). -set_solver(lbp) :- !, set_clpbn_flag(solver,fove), set_horus_flag(lifted_solver, lbp). -set_solver(lkc) :- !, set_clpbn_flag(solver,fove), set_horus_flag(lifted_solver, lkc). -set_solver(hve) :- !, set_clpbn_flag(solver,bp), set_horus_flag(ground_solver, ve). -set_solver(bp) :- !, set_clpbn_flag(solver,bp), set_horus_flag(ground_solver, bp). -set_solver(cbp) :- !, set_clpbn_flag(solver,bp), set_horus_flag(ground_solver, cbp). -set_solver(S) :- throw(error('unknown solver ', S)). - - set_horus_flag(K,V) :- cpp_set_horus_flag(K,V). diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index e170e796a..36cf3df00 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -20,8 +20,7 @@ cpp_set_factors_params/2, cpp_run_ground_solver/3, cpp_set_vars_information/2, - cpp_free_ground_network/1, - set_solver/1 + cpp_free_ground_network/1 ]). :- use_module(library('clpbn/dists'), diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index f3f02062f..5272aa8f5 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -22,15 +22,13 @@ :- reexport(library(clpbn), [clpbn_flag/2 as pfl_flag, set_clpbn_flag/2 as set_pfl_flag, + set_solver/1, set_em_solver/1, conditional_probability/3, pfl_init_solver/5, pfl_run_solver/3 ]). -:- reexport(library(clpbn/horus), - [set_solver/1]). - :- reexport(library(clpbn/aggregates), [avg_factors/5]). From 5fa5dda579724cae334436812162cf75555e893d Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 23:29:21 +0000 Subject: [PATCH 23/89] Remove BP from clpbn interface to EM --- packages/CLPBN/clpbn.yap | 38 ++++++++++++++++---------------------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 72d62dea6..91b148b66 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -63,11 +63,6 @@ run_gibbs_solver/3 ]). -%% :- use_module('clpbn/bnt', -%% [do_bnt/3, -%% check_if_bnt_done/1 -%% ]). - :- use_module('clpbn/pgrammar', [pcg_init_graph/0, init_pcg_solver/4, @@ -90,6 +85,11 @@ finalize_horus_lifted_solver/1 ]). +%% :- use_module('clpbn/bnt', +%% [do_bnt/3, +%% check_if_bnt_done/1 +%% ]). + :- use_module('clpbn/dists', [dist/4, get_dist/4, @@ -623,21 +623,18 @@ clpbn_init_solver(LVs, Vs0, VarsWithUnboundKeys, State) :- solver(Solver), clpbn_init_solver(Solver, LVs, Vs0, VarsWithUnboundKeys, State). -clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :- - init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State). - clpbn_init_solver(ve, LVs, Vs0, VarsWithUnboundKeys, State) :- init_ve_solver(LVs, Vs0, VarsWithUnboundKeys, State). -clpbn_init_solver(bp, LVs, Vs0, VarsWithUnboundKeys, State) :- - init_horus_ground_solver(LVs, Vs0, VarsWithUnboundKeys, State). - clpbn_init_solver(jt, LVs, Vs0, VarsWithUnboundKeys, State) :- init_jt_solver(LVs, Vs0, VarsWithUnboundKeys, State). clpbn_init_solver(bdd, LVs, Vs0, VarsWithUnboundKeys, State) :- init_bdd_solver(LVs, Vs0, VarsWithUnboundKeys, State). +clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :- + init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(pcg, LVs, Vs0, VarsWithUnboundKeys, State) :- init_pcg_solver(LVs, Vs0, VarsWithUnboundKeys, State). @@ -650,29 +647,26 @@ clpbn_run_solver(LVs, LPs, State) :- solver(Solver), clpbn_run_solver(Solver, LVs, LPs, State). -clpbn_run_solver(gibbs, LVs, LPs, State) :- - run_gibbs_solver(LVs, LPs, State). - clpbn_run_solver(ve, LVs, LPs, State) :- run_ve_solver(LVs, LPs, State). -clpbn_run_solver(bp, LVs, LPs, State) :- - run_horus_ground_solver(LVs, LPs, State). - clpbn_run_solver(jt, LVs, LPs, State) :- run_jt_solver(LVs, LPs, State). clpbn_run_solver(bdd, LVs, LPs, State) :- run_bdd_solver(LVs, LPs, State). +clpbn_run_solver(gibbs, LVs, LPs, State) :- + run_gibbs_solver(LVs, LPs, State). + clpbn_run_solver(pcg, LVs, LPs, State) :- run_pcg_solver(LVs, LPs, State). -clpbn_finalize_solver(State) :- - solver(bp), !, - functor(State, _, Last), - arg(Last, State, Info), - finalize_horus_ground_solver(Info). +%clpbn_finalize_solver(State) :- +% solver(bp), !, +% functor(State, _, Last), +% arg(Last, State, Info), +% finalize_horus_ground_solver(Info). clpbn_finalize_solver(_State). % From c7a4adcefbbd2f8f5ae0d1a2bdbd8a886da78fc4 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 17 Dec 2012 23:59:52 +0000 Subject: [PATCH 24/89] Fix finalizing horus ground solvers on EM --- packages/CLPBN/clpbn.yap | 15 ++++++--------- packages/CLPBN/learning/em.yap | 10 ++++++++-- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 91b148b66..53d31b653 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -10,9 +10,9 @@ clpbn_init_graph/1, clpbn_init_solver/4, clpbn_run_solver/3, - clpbn_finalize_solver/1, pfl_init_solver/5, pfl_run_solver/3, + pfl_end_solver/1, probability/2, conditional_probability/3, use_parfactors/1, @@ -662,17 +662,9 @@ clpbn_run_solver(gibbs, LVs, LPs, State) :- clpbn_run_solver(pcg, LVs, LPs, State) :- run_pcg_solver(LVs, LPs, State). -%clpbn_finalize_solver(State) :- -% solver(bp), !, -% functor(State, _, Last), -% arg(Last, State, Info), -% finalize_horus_ground_solver(Info). -clpbn_finalize_solver(_State). - % % This is a routine to start a solver, called by the learning procedures (ie, em). % - pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State) :- em_solver(Solver), (lifted_solver(Solver) -> @@ -729,6 +721,11 @@ pfl_run_solver(LVs, LPs, State, bp) :- !, pfl_run_solver(LVs, LPs, State, cbp) :- !, run_horus_ground_solver(LVs, LPs, State). +pfl_end_solver(State) :- + (em_solver(hve) ; em_solver(bp) ; em_solver(cbp)), + finalize_horus_ground_solver(State). +pfl_end_solver(_State). + add_keys(Key1+V1,_Key2,Key1+V1). diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index 0e28d3136..8abe4e92d 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -13,9 +13,9 @@ [clpbn_init_graph/1, clpbn_init_solver/4, clpbn_run_solver/3, - clpbn_finalize_solver/1, pfl_init_solver/5, pfl_run_solver/3, + pfl_end_solver/1, conditional_probability/3, clpbn_flag/2 ]). @@ -75,7 +75,7 @@ em(Items, MaxError, MaxIts, Tables, Likelihood) :- catch(init_em(Items, State),Error,handle_em(Error)), em_loop(0, 0.0, State, MaxError, MaxIts, Likelihood, Tables), - clpbn_finalize_solver(State), + end_em(State), assert(em_found(Tables, Likelihood)), fail. % get rid of new random variables the easy way :) @@ -89,6 +89,12 @@ handle_em(error(repeated_parents)) :- !, handle_em(Error) :- throw(Error). + +end_em(state(_AllDists, _AllDistInstances, _MargKeys, SolverState)) :- + clpbn:use_parfactors(on), !, + pfl_end_solver(SolverState). +end_em(_). + % This gets you an initial configuration. If there is a lot of evidence % tables may be filled in close to optimal, otherwise they may be % close to uniform. From 4b0acbf8c1b959ffbccc07f6776906db9dbe3ffb Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 18 Dec 2012 12:11:45 +0000 Subject: [PATCH 25/89] Cleanups --- packages/CLPBN/clpbn.yap | 6 +-- packages/CLPBN/clpbn/horus_ground.yap | 38 ++++--------- packages/CLPBN/clpbn/horus_lifted.yap | 77 +++++++++++---------------- 3 files changed, 44 insertions(+), 77 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 53d31b653..fb7490600 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -74,7 +74,7 @@ check_if_horus_ground_solver_done/1, init_horus_ground_solver/5, run_horus_ground_solver/3, - finalize_horus_ground_solver/1 + end_horus_ground_solver/1 ]). :- use_module('clpbn/horus_lifted', @@ -82,7 +82,7 @@ check_if_horus_lifted_solver_done/1, init_horus_lifted_solver/4, run_horus_lifted_solver/3, - finalize_horus_lifted_solver/1 + end_horus_lifted_solver/1 ]). %% :- use_module('clpbn/bnt', @@ -723,7 +723,7 @@ pfl_run_solver(LVs, LPs, State, cbp) :- !, pfl_end_solver(State) :- (em_solver(hve) ; em_solver(bp) ; em_solver(cbp)), - finalize_horus_ground_solver(State). + end_horus_ground_solver(State). pfl_end_solver(_State). diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index 36cf3df00..6a7a6a9dd 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -12,53 +12,40 @@ check_if_horus_ground_solver_done/1, init_horus_ground_solver/5, run_horus_ground_solver/3, - finalize_horus_ground_solver/1 + end_horus_ground_solver/1 ]). :- use_module(horus, [cpp_create_ground_network/4, cpp_set_factors_params/2, cpp_run_ground_solver/3, - cpp_set_vars_information/2, - cpp_free_ground_network/1 + cpp_free_ground_network/1, + cpp_set_vars_information/2 ]). -:- use_module(library('clpbn/dists'), - [dist/4, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_params/2 - ]). +:- use_module(library('clpbn/numbers')). :- use_module(library('clpbn/display'), [clpbn_bind_vals/3]). -:- use_module(library('clpbn/numbers')). +:- use_module(library(pfl), + [skolem/2]). :- use_module(library(charsio), [term_to_atom/2]). -:- use_module(library(pfl), - [skolem/2]). - :- use_module(library(maplist)). -:- use_module(library(lists)). - -:- use_module(library(atts)). - -:- use_module(library(bhash)). - call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), run_horus_ground_solver([QueryKeys], Solutions, State), clpbn_bind_vals([QueryVars], Solutions, Output), - finalize_horus_ground_solver(State). + end_horus_ground_solver(State). run_horus_ground_solver(QueryKeys, Solutions, state(Network,Hash,Id)) :- - %get_dists_parameters(DistIds, DistsParams), + %maplist(get_dists_parameters, DistIds, DistsParams), %cpp_set_factors_params(Network, DistsParams), lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), cpp_run_ground_solver(Network, QueryIds, Solutions). @@ -74,8 +61,7 @@ init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, state(Network,Ha cpp_set_vars_information(KeysAtoms, StatesNames). -% TODO this is not beeing called! -finalize_horus_ground_solver(state(Network,_Hash,_Id)) :- +end_horus_ground_solver(state(Network,_Hash,_Id)) :- cpp_free_ground_network(Network). @@ -88,9 +74,3 @@ get_var_information(_:Key, Domain) :- !, get_var_information(Key, Domain) :- skolem(Key, Domain). - -%get_dists_parameters([],[]). -%get_dists_parameters([Id|Ids], [dist(Id, Params)|DistsInfo]) :- -% get_dist_params(Id, Params), -% get_dists_parameters(Ids, DistsInfo). - diff --git a/packages/CLPBN/clpbn/horus_lifted.yap b/packages/CLPBN/clpbn/horus_lifted.yap index b76e08378..174cfd870 100644 --- a/packages/CLPBN/clpbn/horus_lifted.yap +++ b/packages/CLPBN/clpbn/horus_lifted.yap @@ -12,7 +12,7 @@ check_if_horus_lifted_solver_done/1, init_horus_lifted_solver/4, run_horus_lifted_solver/3, - finalize_horus_lifted_solver/1 + end_horus_lifted_solver/1 ]). :- use_module(horus, @@ -25,49 +25,48 @@ :- use_module(library('clpbn/display'), [clpbn_bind_vals/3]). -:- use_module(library('clpbn/dists'), - [get_dist_params/2]). - :- use_module(library(pfl), [factor/6, skolem/2, get_pfl_parameters/2 ]). +:- use_module(library(maplist)). + call_horus_lifted_solver(QueryVars, AllVars, Output) :- init_horus_lifted_solver(_, AllVars, _, State), run_horus_lifted_solver(QueryVars, Solutions, State), clpbn_bind_vals(QueryVars, Solutions, Output), - finalize_horus_lifted_solver(State). + end_horus_lifted_solver(State). -init_horus_lifted_solver(_, AllVars, _, state(ParfactorList, DistIds)) :- +init_horus_lifted_solver(_, AllVars, _, state(Network, DistIds)) :- get_parfactors(Parfactors), get_dist_ids(Parfactors, DistIds0), sort(DistIds0, DistIds), - get_observed_vars(AllVars, ObservedVars), + get_observed_keys(AllVars, ObservedKeys), %writeln(parfactors:Parfactors:'\n'), - %writeln(evidence:ObservedVars:'\n'), - cpp_create_lifted_network(Parfactors, ObservedVars, ParfactorList). + %writeln(evidence:ObservedKeys:'\n'), + cpp_create_lifted_network(Parfactors, ObservedKeys, Network). -run_horus_lifted_solver(QueryVars, Solutions, state(ParfactorList, DistIds)) :- - get_query_keys(QueryVars, QueryKeys), +run_horus_lifted_solver(QueryVars, Solutions, state(Network, DistIds)) :- + maplist(get_query_keys, QueryVars, QueryKeys), get_dists_parameters(DistIds, DistsParams), + %writeln(distparams1:DistsParams), + %maplist(get_pfl_parameters, DistIds,DistsParams2), + %writeln(distparams1:DistsParams2), %writeln(dists:DistsParams), writeln(''), - cpp_set_parfactors_params(ParfactorList, DistsParams), - cpp_run_lifted_solver(ParfactorList, QueryKeys, Solutions). + cpp_set_parfactors_params(Network, DistsParams), + cpp_run_lifted_solver(Network, QueryKeys, Solutions). -finalize_horus_lifted_solver(state(ParfactorList, _)) :- - cpp_free_lifted_network(ParfactorList). - - -:- table get_parfactors/1. +end_horus_lifted_solver(state(Network, _)) :- + cpp_free_lifted_network(Network). % -% enumerate all parfactors and enumerate their domain as tuples. +% Enumerate all parfactors and enumerate their domain as tuples. % % output is list of pf( % Id: an unique number @@ -76,33 +75,27 @@ finalize_horus_lifted_solver(state(ParfactorList, _)) :- % Phi: the table following usual CLP(BN) convention % Tuples: ground bindings for variables in Vs, of the form [fv(x,y)] % +:- table get_parfactors/1. + get_parfactors(Factors) :- findall(F, is_factor(F), Factors). is_factor(pf(Id, Ks, Rs, Phi, Tuples)) :- factor(_Type, Id, Ks, Vs, Table, Constraints), - get_ranges(Ks,Rs), + maplist(get_range, Ks, Rs), Table \= avg, gen_table(Table, Phi), all_tuples(Constraints, Vs, Tuples). -get_ranges([],[]). -get_ranges(K.Ks, Range.Rs) :- !, +get_range(K, Range) :- skolem(K,Domain), - length(Domain,Range), - get_ranges(Ks, Rs). + length(Domain,Range). gen_table(Table, Phi) :- - ( - is_list(Table) - -> - Phi = Table - ; - call(user:Table, Phi) - ). + ( is_list(Table) -> Phi = Table ; call(user:Table, Phi) ). all_tuples(Constraints, Tuple, Tuples) :- @@ -120,26 +113,20 @@ get_dist_ids(pf(Id, _, _, _, _).Parfactors, Id.DistIds) :- get_dist_ids(Parfactors, DistIds). -get_observed_vars([], []). -get_observed_vars(V.AllAttVars, [K:E|ObservedVars]) :- +get_observed_keys([], []). +get_observed_keys(V.AllAttVars, [K:E|ObservedKeys]) :- clpbn:get_atts(V,[key(K)]), ( clpbn:get_atts(V,[evidence(E)]) ; pfl:evidence(K,E) ), !, - get_observed_vars(AllAttVars, ObservedVars). -get_observed_vars(V.AllAttVars, ObservedVars) :- + get_observed_keys(AllAttVars, ObservedKeys). +get_observed_keys(V.AllAttVars, ObservedKeys) :- clpbn:get_atts(V,[key(_K)]), !, - get_observed_vars(AllAttVars, ObservedVars). + get_observed_keys(AllAttVars, ObservedKeys). get_query_keys([], []). -get_query_keys(E1.L1, E2.L2) :- - get_query_keys_2(E1,E2), - get_query_keys(L1, L2). - - -get_query_keys_2([], []). -get_query_keys_2(V.AttVars, [RV|RVs]) :- - clpbn:get_atts(V,[key(RV)]), !, - get_query_keys_2(AttVars, RVs). +get_query_keys(V.AttVars, K.Ks) :- + clpbn:get_atts(V,[key(K)]), !, + get_query_keys(AttVars, Ks). get_dists_parameters([], []). From 0a661b0462b56527fc7fe08f574ee1c90232ab8f Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 18 Dec 2012 22:47:43 +0000 Subject: [PATCH 26/89] More cleanups --- packages/CLPBN/clpbn/horus.yap | 15 +++--- packages/CLPBN/clpbn/horus_ground.yap | 59 +++++++++++++--------- packages/CLPBN/clpbn/horus_lifted.yap | 43 +++++----------- packages/CLPBN/horus/HorusYap.cpp | 70 ++++++++++++++++++--------- 4 files changed, 99 insertions(+), 88 deletions(-) diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 0c40f9765..5c4c7b688 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -8,8 +8,8 @@ [set_horus_flag/1, cpp_create_lifted_network/3, cpp_create_ground_network/4, - cpp_set_parfactors_params/2, - cpp_set_factors_params/2, + cpp_set_parfactors_params/3, + cpp_set_factors_params/3, cpp_run_lifted_solver/3, cpp_run_ground_solver/3, cpp_set_vars_information/2, @@ -18,8 +18,9 @@ cpp_free_ground_network/1 ]). -:- use_module(library(clpbn), - [set_clpbn_flag/2]). + +:- catch(load_foreign_files([horus], [], init_predicates), _, patch_things_up) + -> true ; warning. patch_things_up :- @@ -27,11 +28,7 @@ patch_things_up :- warning :- - format(user_error,"Horus library not installed: cannot use bp, fove~n.",[]). - - -:- catch(load_foreign_files([horus], [], init_predicates), _, patch_things_up) - -> true ; warning. + format(user_error,"Horus library not installed: cannot use hve, bp, cbp, lve, lkc and lbp~n.",[]). set_horus_flag(K,V) :- cpp_set_horus_flag(K,V). diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index 6a7a6a9dd..f696bccc0 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -17,19 +17,24 @@ :- use_module(horus, [cpp_create_ground_network/4, - cpp_set_factors_params/2, + cpp_set_factors_params/3, cpp_run_ground_solver/3, cpp_free_ground_network/1, cpp_set_vars_information/2 ]). -:- use_module(library('clpbn/numbers')). +:- use_module(library('clpbn/numbers'), + [lists_of_keys_to_ids/6, + keys_to_numbers/7 + ]). :- use_module(library('clpbn/display'), [clpbn_bind_vals/3]). :- use_module(library(pfl), - [skolem/2]). + [get_pfl_parameters/2, + skolem/2 + ]). :- use_module(library(charsio), [term_to_atom/2]). @@ -37,40 +42,48 @@ :- use_module(library(maplist)). -call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- +call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, + Output) :- init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), run_horus_ground_solver([QueryKeys], Solutions, State), clpbn_bind_vals([QueryVars], Solutions, Output), end_horus_ground_solver(State). - -run_horus_ground_solver(QueryKeys, Solutions, state(Network,Hash,Id)) :- - %maplist(get_dists_parameters, DistIds, DistsParams), - %cpp_set_factors_params(Network, DistsParams), + +init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, + state(Network,Hash,Id,DistIds)) :- + factors_type(Factors, Type), + keys_to_numbers(AllKeys, Factors, Evidence, Hash, Id, FacIds, EvIds), + %writeln(network:(type=Type, factors=FacIds, evidence=EvIds)), nl, + cpp_create_ground_network(Type, FacIds, EvIds, Network), + %maplist(term_to_atom, AllKeys, VarNames), + %maplist(get_domain, AllKeys, Domains), + %cpp_set_vars_information(VarNames, Domains), + maplist(get_dist_id, FacIds, DistIds0), + sort(DistIds0, DistIds). + + +run_horus_ground_solver(QueryKeys, Solutions, + state(Network,Hash,Id, DistIds)) :- lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), + %maplist(get_pfl_parameters, DistIds, DistParams), + %cpp_set_factors_params(Network, DistIds, DistParams), cpp_run_ground_solver(Network, QueryIds, Solutions). -init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, state(Network,Hash4,Id4)) :- - get_factors_type(Factors, Type), - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - cpp_create_ground_network(Type, FactorIds, EvidenceIds, Network), - %writeln(network:(Type, FactorIds, EvidenceIds, Network)), writeln(''), - maplist(get_var_information, AllKeys, StatesNames), - maplist(term_to_atom, AllKeys, KeysAtoms), - cpp_set_vars_information(KeysAtoms, StatesNames). - - -end_horus_ground_solver(state(Network,_Hash,_Id)) :- +end_horus_ground_solver(state(Network,_Hash,_Id, _DistIds)) :- cpp_free_ground_network(Network). -get_factors_type([f(bayes, _, _)|_], bayes) :- ! . -get_factors_type([f(markov, _, _)|_], markov) :- ! . +factors_type([f(bayes, _, _)|_], bayes) :- ! . +factors_type([f(markov, _, _)|_], markov) :- ! . -get_var_information(_:Key, Domain) :- !, +get_dist_id(f(_, _, _, DistId), DistId). + + +get_domain(_:Key, Domain) :- !, skolem(Key, Domain). -get_var_information(Key, Domain) :- +get_domain(Key, Domain) :- skolem(Key, Domain). diff --git a/packages/CLPBN/clpbn/horus_lifted.yap b/packages/CLPBN/clpbn/horus_lifted.yap index 174cfd870..0dd13152f 100644 --- a/packages/CLPBN/clpbn/horus_lifted.yap +++ b/packages/CLPBN/clpbn/horus_lifted.yap @@ -17,7 +17,7 @@ :- use_module(horus, [cpp_create_lifted_network/3, - cpp_set_parfactors_params/2, + cpp_set_parfactors_params/3, cpp_run_lifted_solver/3, cpp_free_lifted_network/1 ]). @@ -43,22 +43,17 @@ call_horus_lifted_solver(QueryVars, AllVars, Output) :- init_horus_lifted_solver(_, AllVars, _, state(Network, DistIds)) :- get_parfactors(Parfactors), - get_dist_ids(Parfactors, DistIds0), - sort(DistIds0, DistIds), get_observed_keys(AllVars, ObservedKeys), - %writeln(parfactors:Parfactors:'\n'), - %writeln(evidence:ObservedKeys:'\n'), - cpp_create_lifted_network(Parfactors, ObservedKeys, Network). + %writeln(network:(parfactors=Parfactors, evidence=ObservedKeys)), nl, + cpp_create_lifted_network(Parfactors, ObservedKeys, Network), + maplist(get_dist_id, Parfactors, DistIds0), + sort(DistIds0, DistIds). run_horus_lifted_solver(QueryVars, Solutions, state(Network, DistIds)) :- maplist(get_query_keys, QueryVars, QueryKeys), - get_dists_parameters(DistIds, DistsParams), - %writeln(distparams1:DistsParams), - %maplist(get_pfl_parameters, DistIds,DistsParams2), - %writeln(distparams1:DistsParams2), - %writeln(dists:DistsParams), writeln(''), - cpp_set_parfactors_params(Network, DistsParams), + %maplist(get_pfl_parameters, DistIds,DistsParams), + %cpp_set_parfactors_params(Network, DistIds, DistsParams), cpp_run_lifted_solver(Network, QueryKeys, Solutions). @@ -68,13 +63,6 @@ end_horus_lifted_solver(state(Network, _)) :- % % Enumerate all parfactors and enumerate their domain as tuples. % -% output is list of pf( -% Id: an unique number -% Ks: a list of keys, also known as the pf formula [a(X),b(Y),c(X,Y)] -% Vs: the list of free variables [X,Y] -% Phi: the table following usual CLP(BN) convention -% Tuples: ground bindings for variables in Vs, of the form [fv(x,y)] -% :- table get_parfactors/1. get_parfactors(Factors) :- @@ -90,8 +78,8 @@ is_factor(pf(Id, Ks, Rs, Phi, Tuples)) :- get_range(K, Range) :- - skolem(K,Domain), - length(Domain,Range). + skolem(K, Domain), + length(Domain, Range). gen_table(Table, Phi) :- @@ -108,9 +96,7 @@ run(Goal.Constraints) :- run(Constraints). -get_dist_ids([], []). -get_dist_ids(pf(Id, _, _, _, _).Parfactors, Id.DistIds) :- - get_dist_ids(Parfactors, DistIds). +get_dist_id(pf(DistId, _, _, _, _), DistId). get_observed_keys([], []). @@ -118,8 +104,7 @@ get_observed_keys(V.AllAttVars, [K:E|ObservedKeys]) :- clpbn:get_atts(V,[key(K)]), ( clpbn:get_atts(V,[evidence(E)]) ; pfl:evidence(K,E) ), !, get_observed_keys(AllAttVars, ObservedKeys). -get_observed_keys(V.AllAttVars, ObservedKeys) :- - clpbn:get_atts(V,[key(_K)]), !, +get_observed_keys(_V.AllAttVars, ObservedKeys) :- get_observed_keys(AllAttVars, ObservedKeys). @@ -128,9 +113,3 @@ get_query_keys(V.AttVars, K.Ks) :- clpbn:get_atts(V,[key(K)]), !, get_query_keys(AttVars, Ks). - -get_dists_parameters([], []). -get_dists_parameters([Id|Ids], [dist(Id, Params)|DistsInfo]) :- - get_pfl_parameters(Id, Params), - get_dists_parameters(Ids, DistsInfo). - diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 33767344e..504d17602 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -233,19 +233,21 @@ setParfactorsParams (void) { LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); ParfactorList* pfList = network->first; - YAP_Term distList = YAP_ARG2; + YAP_Term distIdsList = YAP_ARG2; + YAP_Term paramsList = YAP_ARG3; unordered_map paramsMap; - while (distList != YAP_TermNil()) { - YAP_Term dist = YAP_HeadOfTerm (distList); - unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (1, dist)); + while (distIdsList != YAP_TermNil()) { + unsigned distId = (unsigned) YAP_IntOfTerm ( + YAP_HeadOfTerm (distIdsList)); assert (Util::contains (paramsMap, distId) == false); - paramsMap[distId] = readParameters (YAP_ArgOfTerm (2, dist)); - distList = YAP_TailOfTerm (distList); + paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); + distIdsList = YAP_TailOfTerm (distIdsList); + paramsList = YAP_TailOfTerm (paramsList); } ParfactorList::iterator it = pfList->begin(); while (it != pfList->end()) { assert (Util::contains (paramsMap, (*it)->distId())); - // (*it)->setParams (paramsMap[(*it)->distId()]); + (*it)->setParams (paramsMap[(*it)->distId()]); ++ it; } return TRUE; @@ -256,16 +258,17 @@ setParfactorsParams (void) int setFactorsParams (void) { - return TRUE; // TODO FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - YAP_Term distList = YAP_ARG2; + YAP_Term distIdsList = YAP_ARG2; + YAP_Term paramsList = YAP_ARG3; unordered_map paramsMap; - while (distList != YAP_TermNil()) { - YAP_Term dist = YAP_HeadOfTerm (distList); - unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (1, dist)); + while (distIdsList != YAP_TermNil()) { + unsigned distId = (unsigned) YAP_IntOfTerm ( + YAP_HeadOfTerm (distIdsList)); assert (Util::contains (paramsMap, distId) == false); - paramsMap[distId] = readParameters (YAP_ArgOfTerm (2, dist)); - distList = YAP_TailOfTerm (distList); + paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); + distIdsList = YAP_TailOfTerm (distIdsList); + paramsList = YAP_TailOfTerm (paramsList); } const FacNodes& facNodes = fg->facNodes(); for (size_t i = 0; i < facNodes.size(); i++) { @@ -534,15 +537,34 @@ fillAnswersPrologList (vector& results) extern "C" void init_predicates (void) { - YAP_UserCPredicate ("cpp_create_lifted_network", createLiftedNetwork, 3); - YAP_UserCPredicate ("cpp_create_ground_network", createGroundNetwork, 4); - YAP_UserCPredicate ("cpp_run_lifted_solver", runLiftedSolver, 3); - YAP_UserCPredicate ("cpp_run_ground_solver", runGroundSolver, 3); - YAP_UserCPredicate ("cpp_set_parfactors_params", setParfactorsParams, 2); - YAP_UserCPredicate ("cpp_cpp_set_factors_params", setFactorsParams, 2); - YAP_UserCPredicate ("cpp_set_vars_information", setVarsInformation, 2); - YAP_UserCPredicate ("cpp_set_horus_flag", setHorusFlag, 2); - YAP_UserCPredicate ("cpp_free_lifted_network", freeLiftedNetwork, 1); - YAP_UserCPredicate ("cpp_free_ground_network", freeGroundNetwork, 1); + YAP_UserCPredicate ("cpp_create_lifted_network", + createLiftedNetwork, 3); + + YAP_UserCPredicate ("cpp_create_ground_network", + createGroundNetwork, 4); + + YAP_UserCPredicate ("cpp_run_lifted_solver", + runLiftedSolver, 3); + + YAP_UserCPredicate ("cpp_run_ground_solver", + runGroundSolver, 3); + + YAP_UserCPredicate ("cpp_set_parfactors_params", + setParfactorsParams, 3); + + YAP_UserCPredicate ("cpp_set_factors_params", + setFactorsParams, 3); + + YAP_UserCPredicate ("cpp_set_vars_information", + setVarsInformation, 2); + + YAP_UserCPredicate ("cpp_set_horus_flag", + setHorusFlag, 2); + + YAP_UserCPredicate ("cpp_free_lifted_network", + freeLiftedNetwork, 1); + + YAP_UserCPredicate ("cpp_free_ground_network", + freeGroundNetwork, 1); } From 691188d5c9a0656df26592e89668f59f10d9c452 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 18 Dec 2012 23:51:51 +0000 Subject: [PATCH 27/89] Handle better formulas like f(X,X) --- packages/CLPBN/horus/ConstraintTree.cpp | 2 +- packages/CLPBN/horus/ConstraintTree.h | 2 +- packages/CLPBN/horus/Parfactor.cpp | 17 +++++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index bfabc982c..7b3b6bb6f 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -883,7 +883,7 @@ ConstraintTree::ground (LogVar X) void -ConstraintTree::copyLogVar (LogVar X_1, LogVar X_2) +ConstraintTree::cloneLogVar (LogVar X_1, LogVar X_2) { moveToBottom ({X_1}); CTNodes leafs = getNodesAtLevel (logVars_.size()); diff --git a/packages/CLPBN/horus/ConstraintTree.h b/packages/CLPBN/horus/ConstraintTree.h index c50f45dc3..cccb070b4 100644 --- a/packages/CLPBN/horus/ConstraintTree.h +++ b/packages/CLPBN/horus/ConstraintTree.h @@ -200,7 +200,7 @@ class ConstraintTree ConstraintTrees ground (LogVar); - void copyLogVar (LogVar,LogVar); + void cloneLogVar (LogVar, LogVar); ConstraintTree& operator= (const ConstraintTree& ct); diff --git a/packages/CLPBN/horus/Parfactor.cpp b/packages/CLPBN/horus/Parfactor.cpp index 6eaa32e72..38a77892e 100644 --- a/packages/CLPBN/horus/Parfactor.cpp +++ b/packages/CLPBN/horus/Parfactor.cpp @@ -26,7 +26,24 @@ Parfactor::Parfactor ( } } } + LogVar newLv = logVars.size(); constr_ = new ConstraintTree (logVars, tuples); + // Change formulas like f(X,X), X in {(p1),(p2),...} + // to be like f(X,Y), (X,Y) in {(p1,p1),(p2,p2),...}. + // This will simplify shattering on the constraint tree. + for (size_t i = 0; i < args_.size(); i++) { + LogVarSet lvSet; + LogVars& lvs = args_[i].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + if (lvSet.contains (lvs[j]) == false) { + lvSet |= lvs[j]; + } else { + constr_->cloneLogVar (lvs[j], newLv); + lvs[j] = newLv; + ++ newLv; + } + } + } assert (params_.size() == Util::sizeExpected (ranges_)); } From 2d1f5edc91a40ff4169cf2eb7b709d887f84abbb Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 18 Dec 2012 23:52:56 +0000 Subject: [PATCH 28/89] There is no more to do --- packages/CLPBN/horus/TODO | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 packages/CLPBN/horus/TODO diff --git a/packages/CLPBN/horus/TODO b/packages/CLPBN/horus/TODO deleted file mode 100644 index 360fa65ca..000000000 --- a/packages/CLPBN/horus/TODO +++ /dev/null @@ -1,2 +0,0 @@ -- Handle formulas like f(X,X) - From 48254ba7d7e37e70f2225b08c929f20abf3cd478 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Wed, 19 Dec 2012 17:45:53 +0000 Subject: [PATCH 29/89] Lkc: fix some memory leaks --- packages/CLPBN/horus/LiftedCircuit.cpp | 84 ++++++++++++++++++++++++++ packages/CLPBN/horus/LiftedCircuit.h | 18 ++++++ packages/CLPBN/horus/LiftedWCNF.cpp | 12 +++- packages/CLPBN/horus/LiftedWCNF.h | 2 + 4 files changed, 115 insertions(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/LiftedCircuit.cpp b/packages/CLPBN/horus/LiftedCircuit.cpp index 7eb11b83b..a31b22692 100644 --- a/packages/CLPBN/horus/LiftedCircuit.cpp +++ b/packages/CLPBN/horus/LiftedCircuit.cpp @@ -3,6 +3,14 @@ #include "LiftedCircuit.h" +OrNode::~OrNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + double OrNode::weight (void) const { @@ -13,6 +21,14 @@ OrNode::weight (void) const +AndNode::~AndNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + double AndNode::weight (void) const { @@ -28,6 +44,13 @@ int SetOrNode::nrNeg_ = -1; +SetOrNode::~SetOrNode (void) +{ + delete follow_; +} + + + double SetOrNode::weight (void) const { @@ -51,6 +74,13 @@ SetOrNode::weight (void) const +SetAndNode::~SetAndNode (void) +{ + delete follow_; +} + + + double SetAndNode::weight (void) const { @@ -59,6 +89,15 @@ SetAndNode::weight (void) const +IncExcNode::~IncExcNode (void) +{ + delete plus1Branch_; + delete plus2Branch_; + delete minusBranch_; +} + + + double IncExcNode::weight (void) const { @@ -75,6 +114,13 @@ IncExcNode::weight (void) const +LeafNode::~LeafNode (void) +{ + delete clause_; +} + + + double LeafNode::weight (void) const { @@ -113,6 +159,13 @@ LeafNode::weight (void) const +SmoothNode::~SmoothNode (void) +{ + Clause::deleteClauses (clauses_); +} + + + double SmoothNode::weight (void) const { @@ -189,6 +242,19 @@ LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) +LiftedCircuit::~LiftedCircuit (void) +{ + delete root_; + unordered_map::iterator it; + it = originClausesMap_.begin(); + while (it != originClausesMap_.end()) { + Clause::deleteClauses (it->second); + ++ it; + } +} + + + bool LiftedCircuit::isCompilationSucceeded (void) const { @@ -334,6 +400,9 @@ LiftedCircuit::tryUnitPropagation ( return true; } } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } return false; } @@ -375,6 +444,9 @@ LiftedCircuit::tryIndependence ( (*follow) = andNode; return true; } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } return false; } @@ -417,6 +489,9 @@ LiftedCircuit::tryShannonDecomp ( } } } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } return false; } @@ -493,6 +568,9 @@ LiftedCircuit::tryInclusionExclusion ( return true; } } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } return false; } @@ -528,6 +606,9 @@ LiftedCircuit::tryIndepPartialGrounding ( return true; } } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } return false; } @@ -620,6 +701,9 @@ LiftedCircuit::tryAtomCounting ( } } } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } return false; } diff --git a/packages/CLPBN/horus/LiftedCircuit.h b/packages/CLPBN/horus/LiftedCircuit.h index 2c32945ee..e3883211b 100644 --- a/packages/CLPBN/horus/LiftedCircuit.h +++ b/packages/CLPBN/horus/LiftedCircuit.h @@ -25,6 +25,8 @@ class CircuitNode public: CircuitNode (void) { } + virtual ~CircuitNode (void) { } + virtual double weight (void) const = 0; }; @@ -35,6 +37,8 @@ class OrNode : public CircuitNode public: OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + ~OrNode (void); + CircuitNode** leftBranch (void) { return &leftBranch_; } CircuitNode** rightBranch (void) { return &rightBranch_; } @@ -55,6 +59,8 @@ class AndNode : public CircuitNode AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } + ~AndNode (void); + CircuitNode** leftBranch (void) { return &leftBranch_; } CircuitNode** rightBranch (void) { return &rightBranch_; } @@ -73,6 +79,8 @@ class SetOrNode : public CircuitNode SetOrNode (unsigned nrGroundings) : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + ~SetOrNode (void); + CircuitNode** follow (void) { return &follow_; } static unsigned nrPositives (void) { return nrPos_; } @@ -98,6 +106,8 @@ class SetAndNode : public CircuitNode SetAndNode (unsigned nrGroundings) : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + ~SetAndNode (void); + CircuitNode** follow (void) { return &follow_; } double weight (void) const; @@ -115,6 +125,8 @@ class IncExcNode : public CircuitNode IncExcNode (void) : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } + ~IncExcNode (void); + CircuitNode** plus1Branch (void) { return &plus1Branch_; } CircuitNode** plus2Branch (void) { return &plus2Branch_; } CircuitNode** minusBranch (void) { return &minusBranch_; } @@ -135,6 +147,8 @@ class LeafNode : public CircuitNode LeafNode (Clause* clause, const LiftedWCNF& lwcnf) : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } + ~LeafNode (void); + const Clause* clause (void) const { return clause_; } Clause* clause (void) { return clause_; } @@ -154,6 +168,8 @@ class SmoothNode : public CircuitNode SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } + ~SmoothNode (void); + const Clauses& clauses (void) const { return clauses_; } Clauses clauses (void) { return clauses_; } @@ -192,6 +208,8 @@ class LiftedCircuit public: LiftedCircuit (const LiftedWCNF* lwcnf); + ~LiftedCircuit (void); + bool isCompilationSucceeded (void) const; double getWeightedModelCount (void) const; diff --git a/packages/CLPBN/horus/LiftedWCNF.cpp b/packages/CLPBN/horus/LiftedWCNF.cpp index c62ead03d..ba7097dbf 100644 --- a/packages/CLPBN/horus/LiftedWCNF.cpp +++ b/packages/CLPBN/horus/LiftedWCNF.cpp @@ -327,6 +327,16 @@ Clause::printClauses (const Clauses& clauses) +void +Clause::deleteClauses (Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + delete clauses[i]; + } +} + + + std::ostream& operator<< (ostream &os, const Clause& clause) { @@ -445,7 +455,7 @@ LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) LiftedWCNF::~LiftedWCNF (void) { - + Clause::deleteClauses (clauses_); } diff --git a/packages/CLPBN/horus/LiftedWCNF.h b/packages/CLPBN/horus/LiftedWCNF.h index 68c169ce0..e0f901b7c 100644 --- a/packages/CLPBN/horus/LiftedWCNF.h +++ b/packages/CLPBN/horus/LiftedWCNF.h @@ -137,6 +137,8 @@ class Clause static void printClauses (const vector& clauses); + static void deleteClauses (vector& clauses); + friend std::ostream& operator<< (ostream &os, const Clause& clause); private: From 60ff9daafdbbe7b4b08761b06b72e02094d43f90 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Wed, 19 Dec 2012 18:13:47 +0000 Subject: [PATCH 30/89] Remove old example --- packages/CLPBN/examples/cg.yap | 35 ---------------------------------- 1 file changed, 35 deletions(-) delete mode 100644 packages/CLPBN/examples/cg.yap diff --git a/packages/CLPBN/examples/cg.yap b/packages/CLPBN/examples/cg.yap deleted file mode 100644 index 04423248a..000000000 --- a/packages/CLPBN/examples/cg.yap +++ /dev/null @@ -1,35 +0,0 @@ - -% -% adapted from Hendrik Blockeel's ILP04 paper. -% - -:- use_module(library(clpbn)). - -cg(X,1,C):- - father(Y,X), - cg(Y,1,C1),cg(Y,2,C2), - parent_cpt(cg(X,1), C1, C2, C). - -cg(X,2,C):- - mother(Y,X), - cg(Y,1,C1),cg(Y,2,C2), - parent_cpt(cg(X,2), C1, C2, C). - - - -cg(f,X,C) :- - prior_cpt(cg(f,X),C). - -cg(m,X,C) :- - prior_cpt(cg(m,X),C). - - -prior_cpt(CKEY, C) :- - { C = CKEY with p([p,w], [0.5,0.5])}. - -parent_cpt(CKEY, C1, C2, C) :- - { C = CKEY with p([p,w], [ 1,0.5,0.5,0.0, - 0.0,0.5,0.5, 1],[C1,C2])}. - -father(f,s). -mother(m,s). From c407594146f52f2a9487da022783030e6c558c4d Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Wed, 19 Dec 2012 18:22:47 +0000 Subject: [PATCH 31/89] Whitespace clean --- packages/CLPBN/examples/HMMer/fasta.yap | 10 ++++++---- packages/CLPBN/examples/HMMer/globin.yap | 6 +++--- packages/CLPBN/examples/HMMer/scan.yap | 19 +++++++++---------- packages/CLPBN/examples/HMMer/score.yap | 9 ++++----- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/CLPBN/examples/HMMer/fasta.yap b/packages/CLPBN/examples/HMMer/fasta.yap index 9a3e7450a..d4bf53085 100644 --- a/packages/CLPBN/examples/HMMer/fasta.yap +++ b/packages/CLPBN/examples/HMMer/fasta.yap @@ -5,7 +5,10 @@ % support for a single sequence. % -:- module(fasta, [fa2atoms/2,fa2atoms/3]). +:- module(fasta, + [fa2atoms/2, + fa2atoms/3 + ]). fa2atoms(F, L) :- fa2atoms(F, L, []). @@ -25,8 +28,8 @@ read_chars(10,S) --> !, read_chars(C,S) --> [AC], { - cvt_c(C,AC), - get0(S,MC) + cvt_c(C,AC), + get0(S,MC) }, read_chars(MC, S). @@ -44,4 +47,3 @@ skip_header(_,S) :- skip_header(C,S). - diff --git a/packages/CLPBN/examples/HMMer/globin.yap b/packages/CLPBN/examples/HMMer/globin.yap index 06c896048..704ca40b3 100644 --- a/packages/CLPBN/examples/HMMer/globin.yap +++ b/packages/CLPBN/examples/HMMer/globin.yap @@ -32,9 +32,9 @@ g_f_cpt(-8455,1.0,0.00284964910984409). %Null state emission CPT. nule_cpt( - e(595,-1558,85,338,-294,453,-1158,197,249,902,-1085,-142,-21,-313,45,531,201,384,-1998,-644), - 0.05, - e(0.0755236292781413,0.0169810785568618,0.0530343870684108,0.0632001549226403,0.0407818746669505,0.0684441906545919,0.0224066674892351,0.0573156092864189,0.0594191552528466,0.093432734688318,0.023569613397956,0.0453130969133667,0.0492774668469685,0.0402483068810561,0.051584158965068,0.0722465198961763,0.0574747424017338,0.0652477473844479,0.0125173406963917,0.0319968103461077)). + e(595,-1558,85,338,-294,453,-1158,197,249,902,-1085,-142,-21,-313,45,531,201,384,-1998,-644), + 0.05, + e(0.0755236292781413,0.0169810785568618,0.0530343870684108,0.0632001549226403,0.0407818746669505,0.0684441906545919,0.0224066674892351,0.0573156092864189,0.0594191552528466,0.093432734688318,0.023569613397956,0.0453130969133667,0.0492774668469685,0.0402483068810561,0.051584158965068,0.0722465198961763,0.0574747424017338,0.0652477473844479,0.0125173406963917,0.0319968103461077)). %Reaching first D. b_d_cpt(-110,-3765,-110). diff --git a/packages/CLPBN/examples/HMMer/scan.yap b/packages/CLPBN/examples/HMMer/scan.yap index 33948a57c..a874f2ed7 100644 --- a/packages/CLPBN/examples/HMMer/scan.yap +++ b/packages/CLPBN/examples/HMMer/scan.yap @@ -14,7 +14,7 @@ stop(S,W,Info) :- gen_program(W, Info). stop(_,_,_) :- format(user_error,"Bad HMM~n", []). - + parse_model(S,Info) :- get_line(S, Line, Info), % format('~s~n',[Line]), @@ -45,7 +45,7 @@ match_field(hmmer(_,_,_,Alph,_,_,_,_),_) --> "ALPH", !, % aminos or bases match_field(_,_) --> "RF", !, scanner_skip. match_field(_,_) --> "CS", !, scanner_skip. match_field(hmmer(_,_,_,_,_,_,_,MAP),_) --> "MAP", !, - scanner_skip_blanks, + scanner_skip_blanks, to_lower(Codes), { map_code(Codes,MAP) }. match_field(_,_) --> "COM", !, scanner_skip. @@ -76,11 +76,11 @@ match_field(_,_) --> "EVD", !, match_field(Info,S) --> "HMM", !, scanner_skip, { - get_line(S,_,Info), - Info = hmmer(_,_,NOfStates,Alph,_,_,model(BD,NBD,Transitions),MAP), - nof_symbols(Alph,N), - scan_model(S,NOfStates,N,BD,NBD,Transitions,MAP,Info), - throw(done(Info)) + get_line(S,_,Info), + Info = hmmer(_,_,NOfStates,Alph,_,_,model(BD,NBD,Transitions),MAP), + nof_symbols(Alph,N), + scan_model(S,NOfStates,N,BD,NBD,Transitions,MAP,Info), + throw(done(Info)) }. scan_model(S,NOfStates,N,BD,NBD,Transitions,MAP,Info) :- @@ -95,7 +95,7 @@ scan_states(NOfStates, N, Stream, MAP, [t(E,I,S)|Transitions], Info) :- scan_states(NOfStates1, N, Stream, NMAP, Transitions, Info). scan_state(Stream, E,I,MAP,s(MM,MI,MD,IM,II,DM,DD,BM,ME), N, NMAP, Info) :- - get_line(Stream, ELine, Info), + get_line(Stream, ELine, Info), get_line(Stream, ILine, Info), get_line(Stream, SLine, Info), % format('~s~n~s~n~s~n',[ELine,ILine,SLine]), @@ -265,7 +265,7 @@ gen_model(W, model(BD,NBD,States),PsCPT) :- format(W, '~n%Reaching first D.~n',[]), format(W, 'b_d_cpt(~w,~w,~w).~n',[BD,NBD,BDCPT]), gen_states(W, States,1,PsCPT). - + gen_states(_, [],_,_). gen_states(W, [State|States],StateNo,PsCPT) :- gen_state(W, State,StateNo,PsCPT), @@ -327,4 +327,3 @@ max_index([_|L],I0,Max0,MaxIndex0,Max,MaxIndex) :- I is I0+1, max_index(L,I,Max0,MaxIndex0,Max,MaxIndex). - diff --git a/packages/CLPBN/examples/HMMer/score.yap b/packages/CLPBN/examples/HMMer/score.yap index db7693b3b..ef79b6624 100644 --- a/packages/CLPBN/examples/HMMer/score.yap +++ b/packages/CLPBN/examples/HMMer/score.yap @@ -6,13 +6,12 @@ :- ensure_loaded(library('clpbn/viterbi')). :- use_module(fasta, - [fa2atoms/3]). + [fa2atoms/3]). :- use_module(library(lists), - [ - nth/3, - append/3 - ]). + [nth/3, + append/3 + ]). :- [plan7]. From 046af409baed3618fce8f421283b6fbdc0843b6d Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Wed, 19 Dec 2012 18:25:47 +0000 Subject: [PATCH 32/89] Change the default solver to be hve --- packages/CLPBN/clpbn.yap | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index fb7490600..54a87a96a 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -137,8 +137,8 @@ :- meta_predicate probability(:,-), conditional_probability(:,:,-). -solver(ve). -em_solver(bp). +solver(hve). +em_solver(hve). suppress_attribute_display(false). parameter_softening(m_estimate(10)). use_parfactors(off). From af8497af6e9cc6faf44a4e1619c65c874493497e Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 16:20:38 +0000 Subject: [PATCH 33/89] Sort the logical vars of the constraint tree when exporting to graphviz --- packages/CLPBN/horus/ConstraintTree.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index 7b3b6bb6f..6dad4c87e 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -527,7 +527,7 @@ ConstraintTree::exportToGraphViz ( } out << "digraph {" << endl; ConstraintTree copy (*this); - // copy.moveToTop (copy.logVarSet_.elements()); + copy.moveToTop (copy.logVarSet_.elements()); CTNodes nodes = getNodesBelow (copy.root_); out << "\"" << copy.root_ << "\"" << " [label=\"R\"]" << endl; for (CTNodes::const_iterator it = ++ nodes.begin(); From 2ca31ca14ae041539c21831ca2162c5e8d65ad6f Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 17:11:11 +0000 Subject: [PATCH 34/89] Don't abort if opening a file to export to graphviz fails --- packages/CLPBN/horus/BayesBallGraph.cpp | 2 +- packages/CLPBN/horus/ConstraintTree.cpp | 2 +- packages/CLPBN/horus/ElimGraph.cpp | 7 +------ packages/CLPBN/horus/FactorGraph.cpp | 12 +++++++----- packages/CLPBN/horus/LiftedCircuit.cpp | 4 ++-- 5 files changed, 12 insertions(+), 15 deletions(-) diff --git a/packages/CLPBN/horus/BayesBallGraph.cpp b/packages/CLPBN/horus/BayesBallGraph.cpp index 626d940d9..6fd95e44e 100644 --- a/packages/CLPBN/horus/BayesBallGraph.cpp +++ b/packages/CLPBN/horus/BayesBallGraph.cpp @@ -81,7 +81,7 @@ BayesBallGraph::exportToGraphViz (const char* fileName) if (!out.is_open()) { cerr << "error: cannot open file to write at " ; cerr << "BayesBallGraph::exportToDotFile()" << endl; - abort(); + return; } out << "digraph {" << endl; out << "ranksep=1" << endl; diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index 6dad4c87e..33107e101 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -523,7 +523,7 @@ ConstraintTree::exportToGraphViz ( if (!out.is_open()) { cerr << "error: cannot open file to write at " ; cerr << "ConstraintTree::exportToDotFile()" << endl; - abort(); + return; } out << "digraph {" << endl; ConstraintTree copy (*this); diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index 93c8527e4..48d360da8 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -101,17 +101,14 @@ ElimGraph::exportToGraphViz ( if (!out.is_open()) { cerr << "error: cannot open file to write at " ; cerr << "Markov::exportToDotFile()" << endl; - abort(); + return; } - out << "strict graph {" << endl; - for (size_t i = 0; i < nodes_.size(); i++) { if (showNeighborless || nodes_[i]->neighbors().size() != 0) { out << '"' << nodes_[i]->label() << '"' << endl; } } - for (size_t i = 0; i < highlightVarIds.size(); i++) { EgNode* node =getEgNode (highlightVarIds[i]); if (node) { @@ -122,7 +119,6 @@ ElimGraph::exportToGraphViz ( abort(); } } - for (size_t i = 0; i < nodes_.size(); i++) { EGNeighs neighs = nodes_[i]->neighbors(); for (size_t j = 0; j < neighs.size(); j++) { @@ -130,7 +126,6 @@ ElimGraph::exportToGraphViz ( out << '"' << neighs[j]->label() << '"' << endl; } } - out << "}" << endl; out.close(); } diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 417499c4d..5badec4ca 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -283,7 +283,7 @@ FactorGraph::exportToGraphViz (const char* fileName) const if (!out.is_open()) { cerr << "error: cannot open file to write at " ; cerr << "FactorGraph::exportToDotFile()" << endl; - abort(); + return; } out << "graph \"" << fileName << "\" {" << endl; for (size_t i = 0; i < varNodes_.size(); i++) { @@ -316,8 +316,9 @@ FactorGraph::exportToUaiFormat (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file " << fileName << endl; - abort(); + cerr << "error: cannot open file to write at " ; + cerr << "ConstraintTree::exportToUaiFormat()" << endl; + return; } out << "MARKOV" << endl; out << varNodes_.size() << endl; @@ -350,8 +351,9 @@ FactorGraph::exportToLibDaiFormat (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file " << fileName << endl; - abort(); + cerr << "error: cannot open file to write at " ; + cerr << "ConstraintTree::exportToUaiFormat()" << endl; + return; } out << facNodes_.size() << endl << endl; for (size_t i = 0; i < facNodes_.size(); i++) { diff --git a/packages/CLPBN/horus/LiftedCircuit.cpp b/packages/CLPBN/horus/LiftedCircuit.cpp index a31b22692..094e04aab 100644 --- a/packages/CLPBN/horus/LiftedCircuit.cpp +++ b/packages/CLPBN/horus/LiftedCircuit.cpp @@ -278,8 +278,8 @@ LiftedCircuit::exportToGraphViz (const char* fileName) ofstream out (fileName); if (!out.is_open()) { cerr << "error: cannot open file to write at " ; - cerr << "BayesBallGraph::exportToDotFile()" << endl; - abort(); + cerr << "LiftedCircuit::exportToDotFile()" << endl; + return; } out << "digraph {" << endl; out << "ranksep=1" << endl; From 053fa31bb23e7463e46c3645376904fa1f0b3a28 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 17:37:59 +0000 Subject: [PATCH 35/89] Exit is better than abort. Also use a macro instead of integer to indicate failure. --- packages/CLPBN/horus/ElimGraph.cpp | 2 +- packages/CLPBN/horus/FactorGraph.cpp | 10 ++++----- packages/CLPBN/horus/HorusCli.cpp | 25 +++++++++++------------ packages/CLPBN/horus/HorusYap.cpp | 2 +- packages/CLPBN/horus/LiftedKc.cpp | 2 +- packages/CLPBN/horus/LiftedOperations.cpp | 2 +- packages/CLPBN/horus/Parfactor.cpp | 2 +- packages/CLPBN/horus/ParfactorList.cpp | 2 +- packages/CLPBN/horus/Util.cpp | 2 +- 9 files changed, 24 insertions(+), 25 deletions(-) diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index 48d360da8..c1b11f033 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -116,7 +116,7 @@ ElimGraph::exportToGraphViz ( out << " [shape=box3d]" << endl; } else { cout << "error: invalid variable id: " << highlightVarIds[i] << endl; - abort(); + exit (EXIT_FAILURE); } } for (size_t i = 0; i < nodes_.size(); i++) { diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 5badec4ca..34640476d 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -38,14 +38,14 @@ FactorGraph::readFromUaiFormat (const char* fileName) std::ifstream is (fileName); if (!is.is_open()) { cerr << "error: cannot read from file " << fileName << endl; - abort(); + exit (EXIT_FAILURE); } ignoreLines (is); string line; getline (is, line); if (line != "MARKOV") { cerr << "error: the network must be a MARKOV network " << endl; - abort(); + exit (EXIT_FAILURE); } // read the number of vars ignoreLines (is); @@ -74,7 +74,7 @@ FactorGraph::readFromUaiFormat (const char* fileName) cerr << "error: invalid variable identifier `" << vid << "'" << endl; cerr << "identifiers must be between 0 and " << ranges.size() - 1 ; cerr << endl; - abort(); + exit (EXIT_FAILURE); } factorVarIds.back().push_back (vid); factorRanges.back().push_back (ranges[vid]); @@ -89,7 +89,7 @@ FactorGraph::readFromUaiFormat (const char* fileName) cerr << "error: invalid number of parameters for factor nº " << i ; cerr << ", expected: " << Util::sizeExpected (factorRanges[i]); cerr << ", given: " << nrParams << endl; - abort(); + exit (EXIT_FAILURE); } Params params (nrParams); for (unsigned j = 0; j < nrParams; j++) { @@ -111,7 +111,7 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) std::ifstream is (fileName); if (!is.is_open()) { cerr << "error: cannot read from file " << fileName << endl; - abort(); + exit (EXIT_FAILURE); } ignoreLines (is); unsigned nrFactors; diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 639b91739..54f229dfa 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -26,7 +26,7 @@ main (int argc, const char* argv[]) if (argc <= 1) { cerr << "error: no graphical model specified" << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } int idx = readHorusFlags (argc, argv); FactorGraph fg; @@ -53,12 +53,12 @@ readHorusFlags (int argc, const char* argv[]) if (leftArg.empty()) { cerr << "error: missing left argument" << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } if (rightArg.empty()) { cerr << "error: missing right argument" << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } Util::setHorusFlag (leftArg, rightArg); } @@ -79,7 +79,7 @@ readFactorGraph (FactorGraph& fg, const char* s) } else { cerr << "error: the graphical model must be defined either " ; cerr << "in a UAI or libDAI file" << endl; - exit (0); + exit (EXIT_FAILURE); } } @@ -100,14 +100,14 @@ readQueryAndEvidence ( cerr << "error: `" << arg << "' " ; cerr << "is not a variable id" ; cerr << endl; - exit (0); + exit (EXIT_FAILURE); } VarId vid = Util::stringToUnsigned (arg); VarNode* queryVar = fg.getVarNode (vid); if (queryVar == false) { cerr << "error: unknow variable with id " ; cerr << "`" << vid << "'" << endl; - exit (0); + exit (EXIT_FAILURE); } queryIds.push_back (vid); } else { @@ -117,37 +117,36 @@ readQueryAndEvidence ( if (leftArg.empty()) { cerr << "error: missing left argument" << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } if (Util::isInteger (leftArg) == false) { cerr << "error: `" << leftArg << "' " ; cerr << "is not a variable id" << endl ; - exit (0); - continue; + exit (EXIT_FAILURE); } VarId vid = Util::stringToUnsigned (leftArg); VarNode* observedVar = fg.getVarNode (vid); if (observedVar == false) { cerr << "error: unknow variable with id " ; cerr << "`" << vid << "'" << endl; - exit (0); + exit (EXIT_FAILURE); } if (rightArg.empty()) { cerr << "error: missing right argument" << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } if (Util::isInteger (rightArg) == false) { cerr << "error: `" << rightArg << "' " ; cerr << "is not a state index" << endl ; - exit (0); + exit (EXIT_FAILURE); } unsigned stateIdx = Util::stringToUnsigned (rightArg); if (observedVar->isValidState (stateIdx) == false) { cerr << "error: `" << stateIdx << "' " ; cerr << "is not a valid state index for variable with id " ; cerr << "`" << vid << "'" << endl; - exit (0); + exit (EXIT_FAILURE); } observedVar->setEvidence (stateIdx); } diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 504d17602..2e74e68a8 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -424,7 +424,7 @@ readParfactor (YAP_Term pfTerm) YAP_Term ti = YAP_ArgOfTerm (i, term); if (YAP_IsAtomTerm (ti) == false) { cerr << "error: constraint has free variables" << endl; - abort(); + exit (EXIT_FAILURE); } string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ti))); tuple[i - 1] = LiftedUtils::getSymbol (name); diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index d9560f6df..e6c937c33 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -23,7 +23,7 @@ LiftedKc::solveQuery (const Grounds& query) circuit_ = new LiftedCircuit (lwcnf_); if (circuit_->isCompilationSucceeded() == false) { cerr << "error: compilation failed" << endl; - abort(); + exit (EXIT_FAILURE); } vector groups; Ranges ranges; diff --git a/packages/CLPBN/horus/LiftedOperations.cpp b/packages/CLPBN/horus/LiftedOperations.cpp index 03cafdee7..e6edfcb27 100644 --- a/packages/CLPBN/horus/LiftedOperations.cpp +++ b/packages/CLPBN/horus/LiftedOperations.cpp @@ -37,7 +37,7 @@ LiftedOperations::shatterAgainstQuery ( if (found == false) { cerr << "error: could not find a parfactor with ground " ; cerr << "`" << query[i] << "'" << endl; - exit (0); + exit (EXIT_FAILURE); } pfList.add (newPfs); } diff --git a/packages/CLPBN/horus/Parfactor.cpp b/packages/CLPBN/horus/Parfactor.cpp index 38a77892e..6ea8ada05 100644 --- a/packages/CLPBN/horus/Parfactor.cpp +++ b/packages/CLPBN/horus/Parfactor.cpp @@ -691,7 +691,7 @@ Parfactor::expandPotential ( if (newSize > params_.max_size()) { cerr << "error: an overflow occurred when performing expansion" ; cerr << endl; - abort(); + exit (EXIT_FAILURE); } Params backup = params_; diff --git a/packages/CLPBN/horus/ParfactorList.cpp b/packages/CLPBN/horus/ParfactorList.cpp index 7e829a6d0..41732ee0c 100644 --- a/packages/CLPBN/horus/ParfactorList.cpp +++ b/packages/CLPBN/horus/ParfactorList.cpp @@ -336,7 +336,7 @@ ParfactorList::shatterAgainstMySelf ( if (f1.isAtom()) { cerr << "error: a ground occurs twice in a parfactor" << endl; cerr << endl; - abort(); + exit (EXIT_FAILURE); } assert (g->constr()->empty() == false); ConstraintTree ctCopy (*g->constr()); diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index d3dbd588d..97c5907bd 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -54,7 +54,7 @@ stringToUnsigned (string str) ss >> val; if (val < 0) { cerr << "error: the readed number is negative" << endl; - abort(); + exit (EXIT_FAILURE); } return static_cast (val); } From 685f46dc2765f3e50b3a115ef6dc588dfbd51a7b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 17:41:52 +0000 Subject: [PATCH 36/89] Fix --- packages/CLPBN/horus/ElimGraph.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index c1b11f033..db32f6e33 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -100,7 +100,7 @@ ElimGraph::exportToGraphViz ( ofstream out (fileName); if (!out.is_open()) { cerr << "error: cannot open file to write at " ; - cerr << "Markov::exportToDotFile()" << endl; + cerr << "ElimGraph::exportToDotFile()" << endl; return; } out << "strict graph {" << endl; @@ -115,7 +115,7 @@ ElimGraph::exportToGraphViz ( out << '"' << node->label() << '"' ; out << " [shape=box3d]" << endl; } else { - cout << "error: invalid variable id: " << highlightVarIds[i] << endl; + cerr << "error: invalid variable id: " << highlightVarIds[i] << endl; exit (EXIT_FAILURE); } } From b44ed7db39be46c839ef394c122ca976534a79e6 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 18:07:50 +0000 Subject: [PATCH 37/89] Improve the error messages --- packages/CLPBN/horus/BayesBallGraph.cpp | 3 +- packages/CLPBN/horus/ConstraintTree.cpp | 3 +- packages/CLPBN/horus/ElimGraph.cpp | 6 ++-- packages/CLPBN/horus/FactorGraph.cpp | 31 ++++++++---------- packages/CLPBN/horus/HorusCli.cpp | 40 +++++++++++------------ packages/CLPBN/horus/HorusYap.cpp | 2 +- packages/CLPBN/horus/LiftedCircuit.cpp | 3 +- packages/CLPBN/horus/LiftedKc.cpp | 2 +- packages/CLPBN/horus/LiftedOperations.cpp | 4 +-- packages/CLPBN/horus/Makefile.in | 4 +-- packages/CLPBN/horus/Parfactor.cpp | 2 +- packages/CLPBN/horus/ParfactorList.cpp | 2 +- packages/CLPBN/horus/Util.cpp | 2 +- 13 files changed, 49 insertions(+), 55 deletions(-) diff --git a/packages/CLPBN/horus/BayesBallGraph.cpp b/packages/CLPBN/horus/BayesBallGraph.cpp index 6fd95e44e..36fcbb5ee 100644 --- a/packages/CLPBN/horus/BayesBallGraph.cpp +++ b/packages/CLPBN/horus/BayesBallGraph.cpp @@ -79,8 +79,7 @@ BayesBallGraph::exportToGraphViz (const char* fileName) { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "BayesBallGraph::exportToDotFile()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << "digraph {" << endl; diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index 33107e101..0546d0852 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -521,8 +521,7 @@ ConstraintTree::exportToGraphViz ( { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "ConstraintTree::exportToDotFile()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << "digraph {" << endl; diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index db32f6e33..f617d8237 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -99,8 +99,7 @@ ElimGraph::exportToGraphViz ( { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "ElimGraph::exportToDotFile()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << "strict graph {" << endl; @@ -115,7 +114,8 @@ ElimGraph::exportToGraphViz ( out << '"' << node->label() << '"' ; out << " [shape=box3d]" << endl; } else { - cerr << "error: invalid variable id: " << highlightVarIds[i] << endl; + cerr << "Error: invalid variable id: " << highlightVarIds[i] << "." ; + cerr << endl; exit (EXIT_FAILURE); } } diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 34640476d..ba31a9faa 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -37,14 +37,14 @@ FactorGraph::readFromUaiFormat (const char* fileName) { std::ifstream is (fileName); if (!is.is_open()) { - cerr << "error: cannot read from file " << fileName << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; exit (EXIT_FAILURE); } ignoreLines (is); string line; getline (is, line); if (line != "MARKOV") { - cerr << "error: the network must be a MARKOV network " << endl; + cerr << "Error: the network must be a MARKOV network." << endl; exit (EXIT_FAILURE); } // read the number of vars @@ -71,9 +71,9 @@ FactorGraph::readFromUaiFormat (const char* fileName) for (unsigned j = 0; j < nrArgs; j++) { is >> vid; if (vid >= ranges.size()) { - cerr << "error: invalid variable identifier `" << vid << "'" << endl; - cerr << "identifiers must be between 0 and " << ranges.size() - 1 ; - cerr << endl; + cerr << "Error: invalid variable identifier `" << vid << "'. " ; + cerr << "Identifiers must be between 0 and " << ranges.size() - 1 ; + cerr << "." << endl; exit (EXIT_FAILURE); } factorVarIds.back().push_back (vid); @@ -86,9 +86,9 @@ FactorGraph::readFromUaiFormat (const char* fileName) ignoreLines (is); is >> nrParams; if (nrParams != Util::sizeExpected (factorRanges[i])) { - cerr << "error: invalid number of parameters for factor nº " << i ; - cerr << ", expected: " << Util::sizeExpected (factorRanges[i]); - cerr << ", given: " << nrParams << endl; + cerr << "Error: invalid number of parameters for factor nº " << i ; + cerr << ", " << Util::sizeExpected (factorRanges[i]); + cerr << " expected, " << nrParams << " given." << endl; exit (EXIT_FAILURE); } Params params (nrParams); @@ -110,7 +110,7 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) { std::ifstream is (fileName); if (!is.is_open()) { - cerr << "error: cannot read from file " << fileName << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; exit (EXIT_FAILURE); } ignoreLines (is); @@ -135,8 +135,8 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) is >> ranges[j]; VarNode* var = getVarNode (vids[j]); if (var != 0 && ranges[j] != var->range()) { - cerr << "error: variable `" << vids[j] << "' appears in two or " ; - cerr << "more factors with a different range" << endl; + cerr << "Error: variable `" << vids[j] << "' appears in two or " ; + cerr << "more factors with a different range." << endl; } } // read parameters @@ -281,8 +281,7 @@ FactorGraph::exportToGraphViz (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "FactorGraph::exportToDotFile()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << "graph \"" << fileName << "\" {" << endl; @@ -316,8 +315,7 @@ FactorGraph::exportToUaiFormat (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "ConstraintTree::exportToUaiFormat()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << "MARKOV" << endl; @@ -351,8 +349,7 @@ FactorGraph::exportToLibDaiFormat (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "ConstraintTree::exportToUaiFormat()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << facNodes_.size() << endl << endl; diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 54f229dfa..520603052 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -17,14 +17,14 @@ VarIds readQueryAndEvidence (FactorGraph&, int, const char* [], int); void runSolver (const FactorGraph&, const VarIds&); const string USAGE = "usage: ./hcli [HORUS_FLAG=VALUE] \ -NETWORK_FILE [VARIABLE | OBSERVED_VARIABLE=EVIDENCE] ..." ; +MODEL_FILE [VARIABLE | OBSERVED_VARIABLE=EVIDENCE] ..." ; int main (int argc, const char* argv[]) { if (argc <= 1) { - cerr << "error: no graphical model specified" << endl; + cerr << "Error: no probabilistic graphical model was given." << endl; cerr << USAGE << endl; exit (EXIT_FAILURE); } @@ -51,12 +51,12 @@ readHorusFlags (int argc, const char* argv[]) string leftArg = arg.substr (0, pos); string rightArg = arg.substr (pos + 1); if (leftArg.empty()) { - cerr << "error: missing left argument" << endl; + cerr << "Error: missing left argument." << endl; cerr << USAGE << endl; exit (EXIT_FAILURE); } if (rightArg.empty()) { - cerr << "error: missing right argument" << endl; + cerr << "Error: missing right argument." << endl; cerr << USAGE << endl; exit (EXIT_FAILURE); } @@ -77,8 +77,8 @@ readFactorGraph (FactorGraph& fg, const char* s) } else if (extension == "fg") { fg.readFromLibDaiFormat (fileName.c_str()); } else { - cerr << "error: the graphical model must be defined either " ; - cerr << "in a UAI or libDAI file" << endl; + cerr << "Error: the probabilistic graphical model must be " ; + cerr << "defined either in a UAI or libDAI file." << endl; exit (EXIT_FAILURE); } } @@ -97,16 +97,16 @@ readQueryAndEvidence ( const string& arg = argv[i]; if (arg.find ('=') == std::string::npos) { if (Util::isInteger (arg) == false) { - cerr << "error: `" << arg << "' " ; - cerr << "is not a variable id" ; + cerr << "Error: `" << arg << "' " ; + cerr << "is not a variable id." ; cerr << endl; exit (EXIT_FAILURE); } VarId vid = Util::stringToUnsigned (arg); VarNode* queryVar = fg.getVarNode (vid); if (queryVar == false) { - cerr << "error: unknow variable with id " ; - cerr << "`" << vid << "'" << endl; + cerr << "Error: unknow variable with id " ; + cerr << "`" << vid << "'." << endl; exit (EXIT_FAILURE); } queryIds.push_back (vid); @@ -115,37 +115,37 @@ readQueryAndEvidence ( string leftArg = arg.substr (0, pos); string rightArg = arg.substr (pos + 1); if (leftArg.empty()) { - cerr << "error: missing left argument" << endl; + cerr << "Error: missing left argument." << endl; cerr << USAGE << endl; exit (EXIT_FAILURE); } if (Util::isInteger (leftArg) == false) { - cerr << "error: `" << leftArg << "' " ; - cerr << "is not a variable id" << endl ; + cerr << "Error: `" << leftArg << "' " ; + cerr << "is not a variable id." << endl ; exit (EXIT_FAILURE); } VarId vid = Util::stringToUnsigned (leftArg); VarNode* observedVar = fg.getVarNode (vid); if (observedVar == false) { - cerr << "error: unknow variable with id " ; - cerr << "`" << vid << "'" << endl; + cerr << "Error: unknow variable with id " ; + cerr << "`" << vid << "'." << endl; exit (EXIT_FAILURE); } if (rightArg.empty()) { - cerr << "error: missing right argument" << endl; + cerr << "Error: missing right argument." << endl; cerr << USAGE << endl; exit (EXIT_FAILURE); } if (Util::isInteger (rightArg) == false) { - cerr << "error: `" << rightArg << "' " ; - cerr << "is not a state index" << endl ; + cerr << "Error: `" << rightArg << "' " ; + cerr << "is not a state index." << endl ; exit (EXIT_FAILURE); } unsigned stateIdx = Util::stringToUnsigned (rightArg); if (observedVar->isValidState (stateIdx) == false) { - cerr << "error: `" << stateIdx << "' " ; + cerr << "Error: `" << stateIdx << "' " ; cerr << "is not a valid state index for variable with id " ; - cerr << "`" << vid << "'" << endl; + cerr << "`" << vid << "'." << endl; exit (EXIT_FAILURE); } observedVar->setEvidence (stateIdx); diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 2e74e68a8..3c566b73a 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -423,7 +423,7 @@ readParfactor (YAP_Term pfTerm) for (unsigned i = 1; i <= arity; i++) { YAP_Term ti = YAP_ArgOfTerm (i, term); if (YAP_IsAtomTerm (ti) == false) { - cerr << "error: constraint has free variables" << endl; + cerr << "Error: the constraint contains free variables." << endl; exit (EXIT_FAILURE); } string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ti))); diff --git a/packages/CLPBN/horus/LiftedCircuit.cpp b/packages/CLPBN/horus/LiftedCircuit.cpp index 094e04aab..863f86f29 100644 --- a/packages/CLPBN/horus/LiftedCircuit.cpp +++ b/packages/CLPBN/horus/LiftedCircuit.cpp @@ -277,8 +277,7 @@ LiftedCircuit::exportToGraphViz (const char* fileName) { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "LiftedCircuit::exportToDotFile()" << endl; + cerr << "Error: couldn't open file '" << fileName << "'." ; return; } out << "digraph {" << endl; diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index e6c937c33..678bacbec 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -22,7 +22,7 @@ LiftedKc::solveQuery (const Grounds& query) lwcnf_ = new LiftedWCNF (pfList_); circuit_ = new LiftedCircuit (lwcnf_); if (circuit_->isCompilationSucceeded() == false) { - cerr << "error: compilation failed" << endl; + cerr << "Error: the circuit compilation has failed." << endl; exit (EXIT_FAILURE); } vector groups; diff --git a/packages/CLPBN/horus/LiftedOperations.cpp b/packages/CLPBN/horus/LiftedOperations.cpp index e6edfcb27..e0da2dd3b 100644 --- a/packages/CLPBN/horus/LiftedOperations.cpp +++ b/packages/CLPBN/horus/LiftedOperations.cpp @@ -35,8 +35,8 @@ LiftedOperations::shatterAgainstQuery ( } } if (found == false) { - cerr << "error: could not find a parfactor with ground " ; - cerr << "`" << query[i] << "'" << endl; + cerr << "Error: could not find a parfactor with ground " ; + cerr << "`" << query[i] << "'." << endl; exit (EXIT_FAILURE); } pfList.add (newPfs); diff --git a/packages/CLPBN/horus/Makefile.in b/packages/CLPBN/horus/Makefile.in index 61a72d553..d19803ee7 100644 --- a/packages/CLPBN/horus/Makefile.in +++ b/packages/CLPBN/horus/Makefile.in @@ -23,10 +23,10 @@ CC=@CC@ CXX=@CXX@ # normal -CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG +#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG # debug -#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra +CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra # diff --git a/packages/CLPBN/horus/Parfactor.cpp b/packages/CLPBN/horus/Parfactor.cpp index 6ea8ada05..ef2301b7b 100644 --- a/packages/CLPBN/horus/Parfactor.cpp +++ b/packages/CLPBN/horus/Parfactor.cpp @@ -689,7 +689,7 @@ Parfactor::expandPotential ( { ullong newSize = (params_.size() / ranges_[fIdx]) * newRange; if (newSize > params_.max_size()) { - cerr << "error: an overflow occurred when performing expansion" ; + cerr << "Error: an overflow occurred when performing expansion." ; cerr << endl; exit (EXIT_FAILURE); } diff --git a/packages/CLPBN/horus/ParfactorList.cpp b/packages/CLPBN/horus/ParfactorList.cpp index 41732ee0c..1de1ccc7d 100644 --- a/packages/CLPBN/horus/ParfactorList.cpp +++ b/packages/CLPBN/horus/ParfactorList.cpp @@ -334,7 +334,7 @@ ParfactorList::shatterAgainstMySelf ( ProbFormula& f1 = g->argument (fIdx1); ProbFormula& f2 = g->argument (fIdx2); if (f1.isAtom()) { - cerr << "error: a ground occurs twice in a parfactor" << endl; + cerr << "Error: a ground occurs twice in the same parfactor." << endl; cerr << endl; exit (EXIT_FAILURE); } diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 97c5907bd..0f3ce6544 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -53,7 +53,7 @@ stringToUnsigned (string str) ss << str; ss >> val; if (val < 0) { - cerr << "error: the readed number is negative" << endl; + cerr << "Error: the number readed is negative." << endl; exit (EXIT_FAILURE); } return static_cast (val); From 57339760b9615eda045303816f47e74ac2bd037c Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 21:11:51 +0000 Subject: [PATCH 38/89] Merge LiftedKc and LiftedCircuit in one file --- packages/CLPBN/horus/LiftedCircuit.cpp | 1232 ----------------------- packages/CLPBN/horus/LiftedCircuit.h | 279 ------ packages/CLPBN/horus/LiftedKc.cpp | 1234 +++++++++++++++++++++++- packages/CLPBN/horus/LiftedKc.h | 274 +++++- packages/CLPBN/horus/Makefile.in | 7 +- 5 files changed, 1506 insertions(+), 1520 deletions(-) delete mode 100644 packages/CLPBN/horus/LiftedCircuit.cpp delete mode 100644 packages/CLPBN/horus/LiftedCircuit.h diff --git a/packages/CLPBN/horus/LiftedCircuit.cpp b/packages/CLPBN/horus/LiftedCircuit.cpp deleted file mode 100644 index 863f86f29..000000000 --- a/packages/CLPBN/horus/LiftedCircuit.cpp +++ /dev/null @@ -1,1232 +0,0 @@ -#include - -#include "LiftedCircuit.h" - - -OrNode::~OrNode (void) -{ - delete leftBranch_; - delete rightBranch_; -} - - - -double -OrNode::weight (void) const -{ - double lw = leftBranch_->weight(); - double rw = rightBranch_->weight(); - return Globals::logDomain ? Util::logSum (lw, rw) : lw + rw; -} - - - -AndNode::~AndNode (void) -{ - delete leftBranch_; - delete rightBranch_; -} - - - -double -AndNode::weight (void) const -{ - double lw = leftBranch_->weight(); - double rw = rightBranch_->weight(); - return Globals::logDomain ? lw + rw : lw * rw; -} - - - -int SetOrNode::nrPos_ = -1; -int SetOrNode::nrNeg_ = -1; - - - -SetOrNode::~SetOrNode (void) -{ - delete follow_; -} - - - -double -SetOrNode::weight (void) const -{ - double weightSum = LogAware::addIdenty(); - for (unsigned i = 0; i < nrGroundings_ + 1; i++) { - nrPos_ = nrGroundings_ - i; - nrNeg_ = i; - if (Globals::logDomain) { - double nrCombs = Util::nrCombinations (nrGroundings_, i); - double w = follow_->weight(); - weightSum = Util::logSum (weightSum, std::log (nrCombs) + w); - } else { - double w = follow_->weight(); - weightSum += Util::nrCombinations (nrGroundings_, i) * w; - } - } - nrPos_ = -1; - nrNeg_ = -1; - return weightSum; -} - - - -SetAndNode::~SetAndNode (void) -{ - delete follow_; -} - - - -double -SetAndNode::weight (void) const -{ - return LogAware::pow (follow_->weight(), nrGroundings_); -} - - - -IncExcNode::~IncExcNode (void) -{ - delete plus1Branch_; - delete plus2Branch_; - delete minusBranch_; -} - - - -double -IncExcNode::weight (void) const -{ - double w = 0.0; - if (Globals::logDomain) { - w = Util::logSum (plus1Branch_->weight(), plus2Branch_->weight()); - w = std::log (std::exp (w) - std::exp (minusBranch_->weight())); - } else { - w = plus1Branch_->weight() + plus2Branch_->weight(); - w -= minusBranch_->weight(); - } - return w; -} - - - -LeafNode::~LeafNode (void) -{ - delete clause_; -} - - - -double -LeafNode::weight (void) const -{ - assert (clause_->isUnit()); - if (clause_->posCountedLogVars().empty() == false - || clause_->negCountedLogVars().empty() == false) { - if (SetOrNode::isSet() == false) { - // return a NaN if we have a SetOrNode - // ancester that is not set. This can only - // happen when calculating the weights - // for the edge labels in graphviz - return 0.0 / 0.0; - } - } - double weight = clause_->literals()[0].isPositive() - ? lwcnf_.posWeight (clause_->literals().front().lid()) - : lwcnf_.negWeight (clause_->literals().front().lid()); - LogVarSet lvs = clause_->constr().logVarSet(); - lvs -= clause_->ipgLogVars(); - lvs -= clause_->posCountedLogVars(); - lvs -= clause_->negCountedLogVars(); - unsigned nrGroundings = 1; - if (lvs.empty() == false) { - nrGroundings = clause_->constr().projectedCopy (lvs).size(); - } - if (clause_->posCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrPositives(), - clause_->nrPosCountedLogVars()); - } - if (clause_->negCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrNegatives(), - clause_->nrNegCountedLogVars()); - } - return LogAware::pow (weight, nrGroundings); -} - - - -SmoothNode::~SmoothNode (void) -{ - Clause::deleteClauses (clauses_); -} - - - -double -SmoothNode::weight (void) const -{ - Clauses cs = clauses(); - double totalWeight = LogAware::multIdenty(); - for (size_t i = 0; i < cs.size(); i++) { - double posWeight = lwcnf_.posWeight (cs[i]->literals()[0].lid()); - double negWeight = lwcnf_.negWeight (cs[i]->literals()[0].lid()); - LogVarSet lvs = cs[i]->constr().logVarSet(); - lvs -= cs[i]->ipgLogVars(); - lvs -= cs[i]->posCountedLogVars(); - lvs -= cs[i]->negCountedLogVars(); - unsigned nrGroundings = 1; - if (lvs.empty() == false) { - nrGroundings = cs[i]->constr().projectedCopy (lvs).size(); - } - if (cs[i]->posCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrPositives(), - cs[i]->nrPosCountedLogVars()); - } - if (cs[i]->negCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrNegatives(), - cs[i]->nrNegCountedLogVars()); - } - if (Globals::logDomain) { - totalWeight += Util::logSum (posWeight, negWeight) * nrGroundings; - } else { - totalWeight *= std::pow (posWeight + negWeight, nrGroundings); - } - } - return totalWeight; -} - - - -double -TrueNode::weight (void) const -{ - return LogAware::multIdenty(); -} - - - -double -CompilationFailedNode::weight (void) const -{ - // weighted model counting in compilation - // failed nodes should give NaN - return 0.0 / 0.0; -} - - - -LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) - : lwcnf_(lwcnf) -{ - root_ = 0; - compilationSucceeded_ = true; - Clauses clauses = Clause::copyClauses (lwcnf->clauses()); - compile (&root_, clauses); - if (compilationSucceeded_) { - smoothCircuit (root_); - } - if (Globals::verbosity > 1) { - if (compilationSucceeded_) { - double wmc = LogAware::exp (getWeightedModelCount()); - cout << "Weighted model count = " << wmc << endl << endl; - } - cout << "Exporting circuit to graphviz (circuit.dot)..." ; - cout << endl << endl; - exportToGraphViz ("circuit.dot"); - } -} - - - -LiftedCircuit::~LiftedCircuit (void) -{ - delete root_; - unordered_map::iterator it; - it = originClausesMap_.begin(); - while (it != originClausesMap_.end()) { - Clause::deleteClauses (it->second); - ++ it; - } -} - - - -bool -LiftedCircuit::isCompilationSucceeded (void) const -{ - return compilationSucceeded_; -} - - - -double -LiftedCircuit::getWeightedModelCount (void) const -{ - assert (compilationSucceeded_); - return root_->weight(); -} - - - -void -LiftedCircuit::exportToGraphViz (const char* fileName) -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "digraph {" << endl; - out << "ranksep=1" << endl; - exportToGraphViz (root_, out); - out << "}" << endl; - out.close(); -} - - - -void -LiftedCircuit::compile ( - CircuitNode** follow, - Clauses& clauses) -{ - if (compilationSucceeded_ == false - && Globals::verbosity <= 1) { - return; - } - - if (clauses.empty()) { - *follow = new TrueNode(); - return; - } - - if (clauses.size() == 1 && clauses[0]->isUnit()) { - *follow = new LeafNode (clauses[0], *lwcnf_); - return; - } - - if (tryUnitPropagation (follow, clauses)) { - return; - } - - if (tryIndependence (follow, clauses)) { - return; - } - - if (tryShannonDecomp (follow, clauses)) { - return; - } - - if (tryInclusionExclusion (follow, clauses)) { - return; - } - - if (tryIndepPartialGrounding (follow, clauses)) { - return; - } - - if (tryAtomCounting (follow, clauses)) { - return; - } - - *follow = new CompilationFailedNode(); - if (Globals::verbosity > 1) { - originClausesMap_[*follow] = clauses; - explanationMap_[*follow] = "" ; - } - compilationSucceeded_ = false; -} - - - -bool -LiftedCircuit::tryUnitPropagation ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - if (clauses[i]->isUnit()) { - Clauses propagClauses; - for (size_t j = 0; j < clauses.size(); j++) { - if (i != j) { - LiteralId lid = clauses[i]->literals()[0].lid(); - LogVarTypes types = clauses[i]->logVarTypes (0); - if (clauses[i]->literals()[0].isPositive()) { - if (clauses[j]->containsPositiveLiteral (lid, types) == false) { - clauses[j]->removeNegativeLiterals (lid, types); - if (clauses[j]->nrLiterals() > 0) { - propagClauses.push_back (clauses[j]); - } else { - delete clauses[j]; - } - } else { - delete clauses[j]; - } - } else if (clauses[i]->literals()[0].isNegative()) { - if (clauses[j]->containsNegativeLiteral (lid, types) == false) { - clauses[j]->removePositiveLiterals (lid, types); - if (clauses[j]->nrLiterals() > 0) { - propagClauses.push_back (clauses[j]); - } else { - delete clauses[j]; - } - } else { - delete clauses[j]; - } - } - } - } - - AndNode* andNode = new AndNode(); - if (Globals::verbosity > 1) { - originClausesMap_[andNode] = backupClauses_; - stringstream explanation; - explanation << " UP on " << clauses[i]->literals()[0]; - explanationMap_[andNode] = explanation.str(); - } - - Clauses unitClause = { clauses[i] }; - compile (andNode->leftBranch(), unitClause); - compile (andNode->rightBranch(), propagClauses); - (*follow) = andNode; - return true; - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryIndependence ( - CircuitNode** follow, - Clauses& clauses) -{ - if (clauses.size() == 1) { - return false; - } - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - Clauses depClauses = { clauses[0] }; - Clauses indepClauses (clauses.begin() + 1, clauses.end()); - bool finish = false; - while (finish == false) { - finish = true; - for (size_t i = 0; i < indepClauses.size(); i++) { - if (independentClause (*indepClauses[i], depClauses) == false) { - depClauses.push_back (indepClauses[i]); - indepClauses.erase (indepClauses.begin() + i); - finish = false; - break; - } - } - } - if (indepClauses.empty() == false) { - AndNode* andNode = new AndNode (); - if (Globals::verbosity > 1) { - originClausesMap_[andNode] = backupClauses_; - explanationMap_[andNode] = " Independence" ; - } - compile (andNode->leftBranch(), depClauses); - compile (andNode->rightBranch(), indepClauses); - (*follow) = andNode; - return true; - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryShannonDecomp ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - const Literals& literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].isGround ( - clauses[i]->constr(), clauses[i]->ipgLogVars())) { - - Clause* c1 = lwcnf_->createClause (literals[j].lid()); - Clause* c2 = new Clause (*c1); - c2->literals().front().complement(); - - Clauses otherClauses = Clause::copyClauses (clauses); - clauses.push_back (c1); - otherClauses.push_back (c2); - - OrNode* orNode = new OrNode(); - if (Globals::verbosity > 1) { - originClausesMap_[orNode] = backupClauses_; - stringstream explanation; - explanation << " SD on " << literals[j]; - explanationMap_[orNode] = explanation.str(); - } - - compile (orNode->leftBranch(), clauses); - compile (orNode->rightBranch(), otherClauses); - (*follow) = orNode; - return true; - } - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryInclusionExclusion ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - Literals depLits = { clauses[i]->literals().front() }; - Literals indepLits (clauses[i]->literals().begin() + 1, - clauses[i]->literals().end()); - bool finish = false; - while (finish == false) { - finish = true; - for (size_t j = 0; j < indepLits.size(); j++) { - if (independentLiteral (indepLits[j], depLits) == false) { - depLits.push_back (indepLits[j]); - indepLits.erase (indepLits.begin() + j); - finish = false; - break; - } - } - } - if (indepLits.empty() == false) { - LogVarSet lvs1; - for (size_t j = 0; j < depLits.size(); j++) { - lvs1 |= depLits[j].logVarSet(); - } - if (clauses[i]->constr().isCountNormalized (lvs1) == false) { - break; - } - LogVarSet lvs2; - for (size_t j = 0; j < indepLits.size(); j++) { - lvs2 |= indepLits[j].logVarSet(); - } - if (clauses[i]->constr().isCountNormalized (lvs2) == false) { - break; - } - Clause* c1 = new Clause (clauses[i]->constr().projectedCopy (lvs1)); - for (size_t j = 0; j < depLits.size(); j++) { - c1->addLiteral (depLits[j]); - } - Clause* c2 = new Clause (clauses[i]->constr().projectedCopy (lvs2)); - for (size_t j = 0; j < indepLits.size(); j++) { - c2->addLiteral (indepLits[j]); - } - - clauses.erase (clauses.begin() + i); - Clauses plus1Clauses = Clause::copyClauses (clauses); - Clauses plus2Clauses = Clause::copyClauses (clauses); - - plus1Clauses.push_back (c1); - plus2Clauses.push_back (c2); - clauses.push_back (c1); - clauses.push_back (c2); - - IncExcNode* ieNode = new IncExcNode(); - if (Globals::verbosity > 1) { - originClausesMap_[ieNode] = backupClauses_; - stringstream explanation; - explanation << " IncExc on clause nº " << i + 1; - explanationMap_[ieNode] = explanation.str(); - } - compile (ieNode->plus1Branch(), plus1Clauses); - compile (ieNode->plus2Branch(), plus2Clauses); - compile (ieNode->minusBranch(), clauses); - *follow = ieNode; - return true; - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryIndepPartialGrounding ( - CircuitNode** follow, - Clauses& clauses) -{ - // assumes that all literals have logical variables - // else, shannon decomp was possible - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - LogVars rootLogVars; - LogVarSet lvs = clauses[0]->ipgCandidates(); - for (size_t i = 0; i < lvs.size(); i++) { - rootLogVars.clear(); - rootLogVars.push_back (lvs[i]); - ConstraintTree ct = clauses[0]->constr().projectedCopy ({lvs[i]}); - if (tryIndepPartialGroundingAux (clauses, ct, rootLogVars)) { - for (size_t j = 0; j < clauses.size(); j++) { - clauses[j]->addIpgLogVar (rootLogVars[j]); - } - SetAndNode* setAndNode = new SetAndNode (ct.size()); - if (Globals::verbosity > 1) { - originClausesMap_[setAndNode] = backupClauses_; - explanationMap_[setAndNode] = " IPG" ; - } - *follow = setAndNode; - compile (setAndNode->follow(), clauses); - return true; - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryIndepPartialGroundingAux ( - Clauses& clauses, - ConstraintTree& ct, - LogVars& rootLogVars) -{ - for (size_t i = 1; i < clauses.size(); i++) { - LogVarSet lvs = clauses[i]->ipgCandidates(); - for (size_t j = 0; j < lvs.size(); j++) { - ConstraintTree ct2 = clauses[i]->constr().projectedCopy ({lvs[j]}); - if (ct.tupleSet() == ct2.tupleSet()) { - rootLogVars.push_back (lvs[j]); - break; - } - } - if (rootLogVars.size() != i + 1) { - return false; - } - } - // verifies if the IPG logical vars appear in the same positions - unordered_map positions; - for (size_t i = 0; i < clauses.size(); i++) { - const Literals& literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - size_t idx = literals[j].indexOfLogVar (rootLogVars[i]); - assert (idx != literals[j].nrLogVars()); - unordered_map::iterator it; - it = positions.find (literals[j].lid()); - if (it != positions.end()) { - if (it->second != idx) { - return false; - } - } else { - positions[literals[j].lid()] = idx; - } - } - } - return true; -} - - - -bool -LiftedCircuit::tryAtomCounting ( - CircuitNode** follow, - Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size(); i++) { - if (clauses[i]->nrPosCountedLogVars() > 0 - || clauses[i]->nrNegCountedLogVars() > 0) { - // only allow one atom counting node per branch - return false; - } - } - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - Literals literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].nrLogVars() == 1 - && ! clauses[i]->isIpgLogVar (literals[j].logVars().front()) - && ! clauses[i]->isCountedLogVar (literals[j].logVars().front())) { - unsigned nrGroundings = clauses[i]->constr().projectedCopy ( - literals[j].logVars()).size(); - SetOrNode* setOrNode = new SetOrNode (nrGroundings); - if (Globals::verbosity > 1) { - originClausesMap_[setOrNode] = backupClauses_; - explanationMap_[setOrNode] = " AC" ; - } - Clause* c1 = new Clause ( - clauses[i]->constr().projectedCopy (literals[j].logVars())); - Clause* c2 = new Clause ( - clauses[i]->constr().projectedCopy (literals[j].logVars())); - c1->addLiteral (literals[j]); - c2->addLiteralComplemented (literals[j]); - c1->addPosCountedLogVar (literals[j].logVars().front()); - c2->addNegCountedLogVar (literals[j].logVars().front()); - clauses.push_back (c1); - clauses.push_back (c2); - shatterCountedLogVars (clauses); - compile (setOrNode->follow(), clauses); - *follow = setOrNode; - return true; - } - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -void -LiftedCircuit::shatterCountedLogVars (Clauses& clauses) -{ - while (shatterCountedLogVarsAux (clauses)) ; -} - - - -bool -LiftedCircuit::shatterCountedLogVarsAux (Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size() - 1; i++) { - for (size_t j = i + 1; j < clauses.size(); j++) { - bool splitedSome = shatterCountedLogVarsAux (clauses, i, j); - if (splitedSome) { - return true; - } - } - } - return false; -} - - - -bool -LiftedCircuit::shatterCountedLogVarsAux ( - Clauses& clauses, - size_t idx1, - size_t idx2) -{ - Literals lits1 = clauses[idx1]->literals(); - Literals lits2 = clauses[idx2]->literals(); - for (size_t i = 0; i < lits1.size(); i++) { - for (size_t j = 0; j < lits2.size(); j++) { - if (lits1[i].lid() == lits2[j].lid()) { - LogVars lvs1 = lits1[i].logVars(); - LogVars lvs2 = lits2[j].logVars(); - for (size_t k = 0; k < lvs1.size(); k++) { - if (clauses[idx1]->isCountedLogVar (lvs1[k]) - && clauses[idx2]->isCountedLogVar (lvs2[k]) == false) { - clauses.push_back (new Clause (*clauses[idx2])); - clauses[idx2]->addPosCountedLogVar (lvs2[k]); - clauses.back()->addNegCountedLogVar (lvs2[k]); - return true; - } - if (clauses[idx2]->isCountedLogVar (lvs2[k]) - && clauses[idx1]->isCountedLogVar (lvs1[k]) == false) { - clauses.push_back (new Clause (*clauses[idx1])); - clauses[idx1]->addPosCountedLogVar (lvs1[k]); - clauses.back()->addNegCountedLogVar (lvs1[k]); - return true; - } - } - } - } - } - return false; -} - - - -bool -LiftedCircuit::independentClause ( - Clause& clause, - Clauses& otherClauses) const -{ - for (size_t i = 0; i < otherClauses.size(); i++) { - if (Clause::independentClauses (clause, *otherClauses[i]) == false) { - return false; - } - } - return true; -} - - - -bool -LiftedCircuit::independentLiteral ( - const Literal& lit, - const Literals& otherLits) const -{ - for (size_t i = 0; i < otherLits.size(); i++) { - if (lit.lid() == otherLits[i].lid() - || (lit.logVarSet() & otherLits[i].logVarSet()).empty() == false) { - return false; - } - } - return true; -} - - - -LitLvTypesSet -LiftedCircuit::smoothCircuit (CircuitNode* node) -{ - assert (node != 0); - LitLvTypesSet propagLits; - - switch (getCircuitNodeType (node)) { - - case CircuitNodeType::OR_NODE: { - OrNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); - LitLvTypesSet missingLeft = lids2 - lids1; - LitLvTypesSet missingRight = lids1 - lids2; - createSmoothNode (missingLeft, casted->leftBranch()); - createSmoothNode (missingRight, casted->rightBranch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::AND_NODE: { - AndNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::SET_OR_NODE: { - SetOrNode* casted = dynamic_cast(node); - propagLits = smoothCircuit (*casted->follow()); - TinySet> litSet; - for (size_t i = 0; i < propagLits.size(); i++) { - litSet.insert (make_pair (propagLits[i].lid(), - propagLits[i].logVarTypes().size())); - } - LitLvTypesSet missingLids; - for (size_t i = 0; i < litSet.size(); i++) { - vector allTypes = getAllPossibleTypes (litSet[i].second); - for (size_t j = 0; j < allTypes.size(); j++) { - bool typeFound = false; - for (size_t k = 0; k < propagLits.size(); k++) { - if (litSet[i].first == propagLits[k].lid() - && containsTypes (propagLits[k].logVarTypes(), allTypes[j])) { - typeFound = true; - break; - } - } - if (typeFound == false) { - missingLids.insert (LitLvTypes (litSet[i].first, allTypes[j])); - } - } - } - createSmoothNode (missingLids, casted->follow()); - // setAllFullLogVars() can cause repeated elements in - // the set. Fix this by reconstructing the set again - LitLvTypesSet copy = propagLits; - propagLits.clear(); - for (size_t i = 0; i < copy.size(); i++) { - copy[i].setAllFullLogVars(); - propagLits.insert (copy[i]); - } - break; - } - - case CircuitNodeType::SET_AND_NODE: { - SetAndNode* casted = dynamic_cast(node); - propagLits = smoothCircuit (*casted->follow()); - break; - } - - case CircuitNodeType::INC_EXC_NODE: { - IncExcNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->plus1Branch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->plus2Branch()); - LitLvTypesSet missingPlus1 = lids2 - lids1; - LitLvTypesSet missingPlus2 = lids1 - lids2; - createSmoothNode (missingPlus1, casted->plus1Branch()); - createSmoothNode (missingPlus2, casted->plus2Branch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::LEAF_NODE: { - LeafNode* casted = dynamic_cast(node); - propagLits.insert (LitLvTypes ( - casted->clause()->literals()[0].lid(), - casted->clause()->logVarTypes(0))); - } - - default: - break; - } - - return propagLits; -} - - - -void -LiftedCircuit::createSmoothNode ( - const LitLvTypesSet& missingLits, - CircuitNode** prev) -{ - if (missingLits.empty() == false) { - if (Globals::verbosity > 1) { - unordered_map::iterator it; - it = originClausesMap_.find (*prev); - if (it != originClausesMap_.end()) { - backupClauses_ = it->second; - } else { - backupClauses_ = Clause::copyClauses ( - {((dynamic_cast(*prev))->clause())}); - } - } - Clauses clauses; - for (size_t i = 0; i < missingLits.size(); i++) { - LiteralId lid = missingLits[i].lid(); - const LogVarTypes& types = missingLits[i].logVarTypes(); - Clause* c = lwcnf_->createClause (lid); - for (size_t j = 0; j < types.size(); j++) { - LogVar X = c->literals().front().logVars()[j]; - if (types[j] == LogVarType::POS_LV) { - c->addPosCountedLogVar (X); - } else if (types[j] == LogVarType::NEG_LV) { - c->addNegCountedLogVar (X); - } - } - c->addLiteralComplemented (c->literals()[0]); - clauses.push_back (c); - } - SmoothNode* smoothNode = new SmoothNode (clauses, *lwcnf_); - *prev = new AndNode (smoothNode, *prev); - if (Globals::verbosity > 1) { - originClausesMap_[*prev] = backupClauses_; - explanationMap_[*prev] = " Smoothing" ; - } - } -} - - - -vector -LiftedCircuit::getAllPossibleTypes (unsigned nrLogVars) const -{ - if (nrLogVars == 0) { - return {}; - } - if (nrLogVars == 1) { - return {{LogVarType::POS_LV},{LogVarType::NEG_LV}}; - } - vector res; - Ranges ranges (nrLogVars, 2); - Indexer indexer (ranges); - while (indexer.valid()) { - LogVarTypes types; - for (size_t i = 0; i < nrLogVars; i++) { - if (indexer[i] == 0) { - types.push_back (LogVarType::POS_LV); - } else { - types.push_back (LogVarType::NEG_LV); - } - } - res.push_back (types); - ++ indexer; - } - return res; -} - - - -bool -LiftedCircuit::containsTypes ( - const LogVarTypes& typesA, - const LogVarTypes& typesB) const -{ - for (size_t i = 0; i < typesA.size(); i++) { - if (typesA[i] == LogVarType::FULL_LV) { - - } else if (typesA[i] == LogVarType::POS_LV - && typesB[i] == LogVarType::POS_LV) { - - } else if (typesA[i] == LogVarType::NEG_LV - && typesB[i] == LogVarType::NEG_LV) { - - } else { - return false; - } - } - return true; -} - - - -CircuitNodeType -LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const -{ - CircuitNodeType type; - if (dynamic_cast(node) != 0) { - type = CircuitNodeType::OR_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::AND_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SET_OR_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SET_AND_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::INC_EXC_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::LEAF_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SMOOTH_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::TRUE_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::COMPILATION_FAILED_NODE; - } else { - assert (false); - } - return type; -} - - - -void -LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) -{ - assert (node != 0); - - static unsigned nrAuxNodes = 0; - stringstream ss; - ss << "n" << nrAuxNodes; - string auxNode = ss.str(); - nrAuxNodes ++; - string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; - - switch (getCircuitNodeType (node)) { - - case OR_NODE: { - OrNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∨\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->leftBranch()); - os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->rightBranch()); - os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->leftBranch(), os); - exportToGraphViz (*casted->rightBranch(), os); - break; - } - - case AND_NODE: { - AndNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∧\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->leftBranch()); - os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->rightBranch()) << endl; - os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->leftBranch(), os); - exportToGraphViz (*casted->rightBranch(), os); - break; - } - - case SET_OR_NODE: { - SetOrNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∨(X)\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->follow()); - os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->follow(), os); - break; - } - - case SET_AND_NODE: { - SetAndNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∧(X)\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->follow()); - os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->follow(), os); - break; - } - - case INC_EXC_NODE: { - IncExcNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"+ - +\"]" ; - os << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->plus1Branch()); - os << " [label=\" " << (*casted->plus1Branch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->minusBranch()) << endl; - os << " [label=\" " << (*casted->minusBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->plus2Branch()); - os << " [label=\" " << (*casted->plus2Branch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->plus1Branch(), os); - exportToGraphViz (*casted->plus2Branch(), os); - exportToGraphViz (*casted->minusBranch(), os); - break; - } - - case LEAF_NODE: { - printClauses (node, os, "style=filled,fillcolor=palegreen,"); - break; - } - - case SMOOTH_NODE: { - printClauses (node, os, "style=filled,fillcolor=lightblue,"); - break; - } - - case TRUE_NODE: { - os << escapeNode (node); - os << " [shape=box,label=\"⊤\"]" ; - os << endl; - break; - } - - case COMPILATION_FAILED_NODE: { - printClauses (node, os, "style=filled,fillcolor=salmon,"); - break; - } - - default: - assert (false); - } -} - - - -string -LiftedCircuit::escapeNode (const CircuitNode* node) const -{ - stringstream ss; - ss << "\"" << node << "\"" ; - return ss.str(); -} - - - -string -LiftedCircuit::getExplanationString (CircuitNode* node) -{ - return Util::contains (explanationMap_, node) - ? explanationMap_[node] - : "" ; -} - - - -void -LiftedCircuit::printClauses ( - CircuitNode* node, - ofstream& os, - string extraOptions) -{ - Clauses clauses; - if (Util::contains (originClausesMap_, node)) { - clauses = originClausesMap_[node]; - } else if (getCircuitNodeType (node) == CircuitNodeType::LEAF_NODE) { - clauses = { (dynamic_cast(node))->clause() } ; - } else if (getCircuitNodeType (node) == CircuitNodeType::SMOOTH_NODE) { - clauses = (dynamic_cast(node))->clauses(); - } - assert (clauses.empty() == false); - os << escapeNode (node); - os << " [shape=box," << extraOptions << "label=\"" ; - for (size_t i = 0; i < clauses.size(); i++) { - if (i != 0) os << "\\n" ; - os << *clauses[i]; - } - os << "\"]" ; - os << endl; -} - diff --git a/packages/CLPBN/horus/LiftedCircuit.h b/packages/CLPBN/horus/LiftedCircuit.h deleted file mode 100644 index e3883211b..000000000 --- a/packages/CLPBN/horus/LiftedCircuit.h +++ /dev/null @@ -1,279 +0,0 @@ -#ifndef HORUS_LIFTEDCIRCUIT_H -#define HORUS_LIFTEDCIRCUIT_H - -#include - -#include "LiftedWCNF.h" - - -enum CircuitNodeType { - OR_NODE, - AND_NODE, - SET_OR_NODE, - SET_AND_NODE, - INC_EXC_NODE, - LEAF_NODE, - SMOOTH_NODE, - TRUE_NODE, - COMPILATION_FAILED_NODE -}; - - - -class CircuitNode -{ - public: - CircuitNode (void) { } - - virtual ~CircuitNode (void) { } - - virtual double weight (void) const = 0; -}; - - - -class OrNode : public CircuitNode -{ - public: - OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - - ~OrNode (void); - - CircuitNode** leftBranch (void) { return &leftBranch_; } - CircuitNode** rightBranch (void) { return &rightBranch_; } - - double weight (void) const; - - private: - CircuitNode* leftBranch_; - CircuitNode* rightBranch_; -}; - - - -class AndNode : public CircuitNode -{ - public: - AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - - AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) - : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } - - ~AndNode (void); - - CircuitNode** leftBranch (void) { return &leftBranch_; } - CircuitNode** rightBranch (void) { return &rightBranch_; } - - double weight (void) const; - - private: - CircuitNode* leftBranch_; - CircuitNode* rightBranch_; -}; - - - -class SetOrNode : public CircuitNode -{ - public: - SetOrNode (unsigned nrGroundings) - : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } - - ~SetOrNode (void); - - CircuitNode** follow (void) { return &follow_; } - - static unsigned nrPositives (void) { return nrPos_; } - - static unsigned nrNegatives (void) { return nrNeg_; } - - static bool isSet (void) { return nrPos_ >= 0; } - - double weight (void) const; - - private: - CircuitNode* follow_; - unsigned nrGroundings_; - static int nrPos_; - static int nrNeg_; -}; - - - -class SetAndNode : public CircuitNode -{ - public: - SetAndNode (unsigned nrGroundings) - : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } - - ~SetAndNode (void); - - CircuitNode** follow (void) { return &follow_; } - - double weight (void) const; - - private: - CircuitNode* follow_; - unsigned nrGroundings_; -}; - - - -class IncExcNode : public CircuitNode -{ - public: - IncExcNode (void) - : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } - - ~IncExcNode (void); - - CircuitNode** plus1Branch (void) { return &plus1Branch_; } - CircuitNode** plus2Branch (void) { return &plus2Branch_; } - CircuitNode** minusBranch (void) { return &minusBranch_; } - - double weight (void) const; - - private: - CircuitNode* plus1Branch_; - CircuitNode* plus2Branch_; - CircuitNode* minusBranch_; -}; - - - -class LeafNode : public CircuitNode -{ - public: - LeafNode (Clause* clause, const LiftedWCNF& lwcnf) - : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } - - ~LeafNode (void); - - const Clause* clause (void) const { return clause_; } - - Clause* clause (void) { return clause_; } - - double weight (void) const; - - private: - Clause* clause_; - const LiftedWCNF& lwcnf_; -}; - - - -class SmoothNode : public CircuitNode -{ - public: - SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) - : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } - - ~SmoothNode (void); - - const Clauses& clauses (void) const { return clauses_; } - - Clauses clauses (void) { return clauses_; } - - double weight (void) const; - - private: - Clauses clauses_; - const LiftedWCNF& lwcnf_; -}; - - - -class TrueNode : public CircuitNode -{ - public: - TrueNode (void) : CircuitNode() { } - - double weight (void) const; -}; - - - -class CompilationFailedNode : public CircuitNode -{ - public: - CompilationFailedNode (void) : CircuitNode() { } - - double weight (void) const; -}; - - - -class LiftedCircuit -{ - public: - LiftedCircuit (const LiftedWCNF* lwcnf); - - ~LiftedCircuit (void); - - bool isCompilationSucceeded (void) const; - - double getWeightedModelCount (void) const; - - void exportToGraphViz (const char*); - - private: - - void compile (CircuitNode** follow, Clauses& clauses); - - bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); - - bool tryIndependence (CircuitNode** follow, Clauses& clauses); - - bool tryShannonDecomp (CircuitNode** follow, Clauses& clauses); - - bool tryInclusionExclusion (CircuitNode** follow, Clauses& clauses); - - bool tryIndepPartialGrounding (CircuitNode** follow, Clauses& clauses); - - bool tryIndepPartialGroundingAux (Clauses& clauses, ConstraintTree& ct, - LogVars& rootLogVars); - - bool tryAtomCounting (CircuitNode** follow, Clauses& clauses); - - void shatterCountedLogVars (Clauses& clauses); - - bool shatterCountedLogVarsAux (Clauses& clauses); - - bool shatterCountedLogVarsAux (Clauses& clauses, size_t idx1, size_t idx2); - - bool independentClause (Clause& clause, Clauses& otherClauses) const; - - bool independentLiteral (const Literal& lit, - const Literals& otherLits) const; - - LitLvTypesSet smoothCircuit (CircuitNode* node); - - void createSmoothNode (const LitLvTypesSet& lids, - CircuitNode** prev); - - vector getAllPossibleTypes (unsigned nrLogVars) const; - - bool containsTypes (const LogVarTypes& typesA, - const LogVarTypes& typesB) const; - - CircuitNodeType getCircuitNodeType (const CircuitNode* node) const; - - void exportToGraphViz (CircuitNode* node, ofstream&); - - void printClauses (CircuitNode* node, ofstream&, - string extraOptions = ""); - - string escapeNode (const CircuitNode* node) const; - - string getExplanationString (CircuitNode* node); - - CircuitNode* root_; - const LiftedWCNF* lwcnf_; - bool compilationSucceeded_; - Clauses backupClauses_; - unordered_map originClausesMap_; - unordered_map explanationMap_; -}; - -#endif // HORUS_LIFTEDCIRCUIT_H - diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index 678bacbec..45848ab70 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -1,10 +1,1240 @@ +#include + #include "LiftedKc.h" -#include "LiftedWCNF.h" -#include "LiftedCircuit.h" #include "LiftedOperations.h" #include "Indexer.h" + +OrNode::~OrNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + +double +OrNode::weight (void) const +{ + double lw = leftBranch_->weight(); + double rw = rightBranch_->weight(); + return Globals::logDomain ? Util::logSum (lw, rw) : lw + rw; +} + + + +AndNode::~AndNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + +double +AndNode::weight (void) const +{ + double lw = leftBranch_->weight(); + double rw = rightBranch_->weight(); + return Globals::logDomain ? lw + rw : lw * rw; +} + + + +int SetOrNode::nrPos_ = -1; +int SetOrNode::nrNeg_ = -1; + + + +SetOrNode::~SetOrNode (void) +{ + delete follow_; +} + + + +double +SetOrNode::weight (void) const +{ + double weightSum = LogAware::addIdenty(); + for (unsigned i = 0; i < nrGroundings_ + 1; i++) { + nrPos_ = nrGroundings_ - i; + nrNeg_ = i; + if (Globals::logDomain) { + double nrCombs = Util::nrCombinations (nrGroundings_, i); + double w = follow_->weight(); + weightSum = Util::logSum (weightSum, std::log (nrCombs) + w); + } else { + double w = follow_->weight(); + weightSum += Util::nrCombinations (nrGroundings_, i) * w; + } + } + nrPos_ = -1; + nrNeg_ = -1; + return weightSum; +} + + + +SetAndNode::~SetAndNode (void) +{ + delete follow_; +} + + + +double +SetAndNode::weight (void) const +{ + return LogAware::pow (follow_->weight(), nrGroundings_); +} + + + +IncExcNode::~IncExcNode (void) +{ + delete plus1Branch_; + delete plus2Branch_; + delete minusBranch_; +} + + + +double +IncExcNode::weight (void) const +{ + double w = 0.0; + if (Globals::logDomain) { + w = Util::logSum (plus1Branch_->weight(), plus2Branch_->weight()); + w = std::log (std::exp (w) - std::exp (minusBranch_->weight())); + } else { + w = plus1Branch_->weight() + plus2Branch_->weight(); + w -= minusBranch_->weight(); + } + return w; +} + + + +LeafNode::~LeafNode (void) +{ + delete clause_; +} + + + +double +LeafNode::weight (void) const +{ + assert (clause_->isUnit()); + if (clause_->posCountedLogVars().empty() == false + || clause_->negCountedLogVars().empty() == false) { + if (SetOrNode::isSet() == false) { + // return a NaN if we have a SetOrNode + // ancester that is not set. This can only + // happen when calculating the weights + // for the edge labels in graphviz + return 0.0 / 0.0; + } + } + double weight = clause_->literals()[0].isPositive() + ? lwcnf_.posWeight (clause_->literals().front().lid()) + : lwcnf_.negWeight (clause_->literals().front().lid()); + LogVarSet lvs = clause_->constr().logVarSet(); + lvs -= clause_->ipgLogVars(); + lvs -= clause_->posCountedLogVars(); + lvs -= clause_->negCountedLogVars(); + unsigned nrGroundings = 1; + if (lvs.empty() == false) { + nrGroundings = clause_->constr().projectedCopy (lvs).size(); + } + if (clause_->posCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrPositives(), + clause_->nrPosCountedLogVars()); + } + if (clause_->negCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrNegatives(), + clause_->nrNegCountedLogVars()); + } + return LogAware::pow (weight, nrGroundings); +} + + + +SmoothNode::~SmoothNode (void) +{ + Clause::deleteClauses (clauses_); +} + + + +double +SmoothNode::weight (void) const +{ + Clauses cs = clauses(); + double totalWeight = LogAware::multIdenty(); + for (size_t i = 0; i < cs.size(); i++) { + double posWeight = lwcnf_.posWeight (cs[i]->literals()[0].lid()); + double negWeight = lwcnf_.negWeight (cs[i]->literals()[0].lid()); + LogVarSet lvs = cs[i]->constr().logVarSet(); + lvs -= cs[i]->ipgLogVars(); + lvs -= cs[i]->posCountedLogVars(); + lvs -= cs[i]->negCountedLogVars(); + unsigned nrGroundings = 1; + if (lvs.empty() == false) { + nrGroundings = cs[i]->constr().projectedCopy (lvs).size(); + } + if (cs[i]->posCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrPositives(), + cs[i]->nrPosCountedLogVars()); + } + if (cs[i]->negCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrNegatives(), + cs[i]->nrNegCountedLogVars()); + } + if (Globals::logDomain) { + totalWeight += Util::logSum (posWeight, negWeight) * nrGroundings; + } else { + totalWeight *= std::pow (posWeight + negWeight, nrGroundings); + } + } + return totalWeight; +} + + + +double +TrueNode::weight (void) const +{ + return LogAware::multIdenty(); +} + + + +double +CompilationFailedNode::weight (void) const +{ + // weighted model counting in compilation + // failed nodes should give NaN + return 0.0 / 0.0; +} + + + +LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) + : lwcnf_(lwcnf) +{ + root_ = 0; + compilationSucceeded_ = true; + Clauses clauses = Clause::copyClauses (lwcnf->clauses()); + compile (&root_, clauses); + if (compilationSucceeded_) { + smoothCircuit (root_); + } + if (Globals::verbosity > 1) { + if (compilationSucceeded_) { + double wmc = LogAware::exp (getWeightedModelCount()); + cout << "Weighted model count = " << wmc << endl << endl; + } + cout << "Exporting circuit to graphviz (circuit.dot)..." ; + cout << endl << endl; + exportToGraphViz ("circuit.dot"); + } +} + + + +LiftedCircuit::~LiftedCircuit (void) +{ + delete root_; + unordered_map::iterator it; + it = originClausesMap_.begin(); + while (it != originClausesMap_.end()) { + Clause::deleteClauses (it->second); + ++ it; + } +} + + + +bool +LiftedCircuit::isCompilationSucceeded (void) const +{ + return compilationSucceeded_; +} + + + +double +LiftedCircuit::getWeightedModelCount (void) const +{ + assert (compilationSucceeded_); + return root_->weight(); +} + + + +void +LiftedCircuit::exportToGraphViz (const char* fileName) +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "digraph {" << endl; + out << "ranksep=1" << endl; + exportToGraphViz (root_, out); + out << "}" << endl; + out.close(); +} + + + +void +LiftedCircuit::compile ( + CircuitNode** follow, + Clauses& clauses) +{ + if (compilationSucceeded_ == false + && Globals::verbosity <= 1) { + return; + } + + if (clauses.empty()) { + *follow = new TrueNode(); + return; + } + + if (clauses.size() == 1 && clauses[0]->isUnit()) { + *follow = new LeafNode (clauses[0], *lwcnf_); + return; + } + + if (tryUnitPropagation (follow, clauses)) { + return; + } + + if (tryIndependence (follow, clauses)) { + return; + } + + if (tryShannonDecomp (follow, clauses)) { + return; + } + + if (tryInclusionExclusion (follow, clauses)) { + return; + } + + if (tryIndepPartialGrounding (follow, clauses)) { + return; + } + + if (tryAtomCounting (follow, clauses)) { + return; + } + + *follow = new CompilationFailedNode(); + if (Globals::verbosity > 1) { + originClausesMap_[*follow] = clauses; + explanationMap_[*follow] = "" ; + } + compilationSucceeded_ = false; +} + + + +bool +LiftedCircuit::tryUnitPropagation ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + if (clauses[i]->isUnit()) { + Clauses propagClauses; + for (size_t j = 0; j < clauses.size(); j++) { + if (i != j) { + LiteralId lid = clauses[i]->literals()[0].lid(); + LogVarTypes types = clauses[i]->logVarTypes (0); + if (clauses[i]->literals()[0].isPositive()) { + if (clauses[j]->containsPositiveLiteral (lid, types) == false) { + clauses[j]->removeNegativeLiterals (lid, types); + if (clauses[j]->nrLiterals() > 0) { + propagClauses.push_back (clauses[j]); + } else { + delete clauses[j]; + } + } else { + delete clauses[j]; + } + } else if (clauses[i]->literals()[0].isNegative()) { + if (clauses[j]->containsNegativeLiteral (lid, types) == false) { + clauses[j]->removePositiveLiterals (lid, types); + if (clauses[j]->nrLiterals() > 0) { + propagClauses.push_back (clauses[j]); + } else { + delete clauses[j]; + } + } else { + delete clauses[j]; + } + } + } + } + + AndNode* andNode = new AndNode(); + if (Globals::verbosity > 1) { + originClausesMap_[andNode] = backupClauses_; + stringstream explanation; + explanation << " UP on " << clauses[i]->literals()[0]; + explanationMap_[andNode] = explanation.str(); + } + + Clauses unitClause = { clauses[i] }; + compile (andNode->leftBranch(), unitClause); + compile (andNode->rightBranch(), propagClauses); + (*follow) = andNode; + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndependence ( + CircuitNode** follow, + Clauses& clauses) +{ + if (clauses.size() == 1) { + return false; + } + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + Clauses depClauses = { clauses[0] }; + Clauses indepClauses (clauses.begin() + 1, clauses.end()); + bool finish = false; + while (finish == false) { + finish = true; + for (size_t i = 0; i < indepClauses.size(); i++) { + if (independentClause (*indepClauses[i], depClauses) == false) { + depClauses.push_back (indepClauses[i]); + indepClauses.erase (indepClauses.begin() + i); + finish = false; + break; + } + } + } + if (indepClauses.empty() == false) { + AndNode* andNode = new AndNode (); + if (Globals::verbosity > 1) { + originClausesMap_[andNode] = backupClauses_; + explanationMap_[andNode] = " Independence" ; + } + compile (andNode->leftBranch(), depClauses); + compile (andNode->rightBranch(), indepClauses); + (*follow) = andNode; + return true; + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryShannonDecomp ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + const Literals& literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].isGround ( + clauses[i]->constr(), clauses[i]->ipgLogVars())) { + + Clause* c1 = lwcnf_->createClause (literals[j].lid()); + Clause* c2 = new Clause (*c1); + c2->literals().front().complement(); + + Clauses otherClauses = Clause::copyClauses (clauses); + clauses.push_back (c1); + otherClauses.push_back (c2); + + OrNode* orNode = new OrNode(); + if (Globals::verbosity > 1) { + originClausesMap_[orNode] = backupClauses_; + stringstream explanation; + explanation << " SD on " << literals[j]; + explanationMap_[orNode] = explanation.str(); + } + + compile (orNode->leftBranch(), clauses); + compile (orNode->rightBranch(), otherClauses); + (*follow) = orNode; + return true; + } + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryInclusionExclusion ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + Literals depLits = { clauses[i]->literals().front() }; + Literals indepLits (clauses[i]->literals().begin() + 1, + clauses[i]->literals().end()); + bool finish = false; + while (finish == false) { + finish = true; + for (size_t j = 0; j < indepLits.size(); j++) { + if (independentLiteral (indepLits[j], depLits) == false) { + depLits.push_back (indepLits[j]); + indepLits.erase (indepLits.begin() + j); + finish = false; + break; + } + } + } + if (indepLits.empty() == false) { + LogVarSet lvs1; + for (size_t j = 0; j < depLits.size(); j++) { + lvs1 |= depLits[j].logVarSet(); + } + if (clauses[i]->constr().isCountNormalized (lvs1) == false) { + break; + } + LogVarSet lvs2; + for (size_t j = 0; j < indepLits.size(); j++) { + lvs2 |= indepLits[j].logVarSet(); + } + if (clauses[i]->constr().isCountNormalized (lvs2) == false) { + break; + } + Clause* c1 = new Clause (clauses[i]->constr().projectedCopy (lvs1)); + for (size_t j = 0; j < depLits.size(); j++) { + c1->addLiteral (depLits[j]); + } + Clause* c2 = new Clause (clauses[i]->constr().projectedCopy (lvs2)); + for (size_t j = 0; j < indepLits.size(); j++) { + c2->addLiteral (indepLits[j]); + } + + clauses.erase (clauses.begin() + i); + Clauses plus1Clauses = Clause::copyClauses (clauses); + Clauses plus2Clauses = Clause::copyClauses (clauses); + + plus1Clauses.push_back (c1); + plus2Clauses.push_back (c2); + clauses.push_back (c1); + clauses.push_back (c2); + + IncExcNode* ieNode = new IncExcNode(); + if (Globals::verbosity > 1) { + originClausesMap_[ieNode] = backupClauses_; + stringstream explanation; + explanation << " IncExc on clause nº " << i + 1; + explanationMap_[ieNode] = explanation.str(); + } + compile (ieNode->plus1Branch(), plus1Clauses); + compile (ieNode->plus2Branch(), plus2Clauses); + compile (ieNode->minusBranch(), clauses); + *follow = ieNode; + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndepPartialGrounding ( + CircuitNode** follow, + Clauses& clauses) +{ + // assumes that all literals have logical variables + // else, shannon decomp was possible + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + LogVars rootLogVars; + LogVarSet lvs = clauses[0]->ipgCandidates(); + for (size_t i = 0; i < lvs.size(); i++) { + rootLogVars.clear(); + rootLogVars.push_back (lvs[i]); + ConstraintTree ct = clauses[0]->constr().projectedCopy ({lvs[i]}); + if (tryIndepPartialGroundingAux (clauses, ct, rootLogVars)) { + for (size_t j = 0; j < clauses.size(); j++) { + clauses[j]->addIpgLogVar (rootLogVars[j]); + } + SetAndNode* setAndNode = new SetAndNode (ct.size()); + if (Globals::verbosity > 1) { + originClausesMap_[setAndNode] = backupClauses_; + explanationMap_[setAndNode] = " IPG" ; + } + *follow = setAndNode; + compile (setAndNode->follow(), clauses); + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndepPartialGroundingAux ( + Clauses& clauses, + ConstraintTree& ct, + LogVars& rootLogVars) +{ + for (size_t i = 1; i < clauses.size(); i++) { + LogVarSet lvs = clauses[i]->ipgCandidates(); + for (size_t j = 0; j < lvs.size(); j++) { + ConstraintTree ct2 = clauses[i]->constr().projectedCopy ({lvs[j]}); + if (ct.tupleSet() == ct2.tupleSet()) { + rootLogVars.push_back (lvs[j]); + break; + } + } + if (rootLogVars.size() != i + 1) { + return false; + } + } + // verifies if the IPG logical vars appear in the same positions + unordered_map positions; + for (size_t i = 0; i < clauses.size(); i++) { + const Literals& literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + size_t idx = literals[j].indexOfLogVar (rootLogVars[i]); + assert (idx != literals[j].nrLogVars()); + unordered_map::iterator it; + it = positions.find (literals[j].lid()); + if (it != positions.end()) { + if (it->second != idx) { + return false; + } + } else { + positions[literals[j].lid()] = idx; + } + } + } + return true; +} + + + +bool +LiftedCircuit::tryAtomCounting ( + CircuitNode** follow, + Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + if (clauses[i]->nrPosCountedLogVars() > 0 + || clauses[i]->nrNegCountedLogVars() > 0) { + // only allow one atom counting node per branch + return false; + } + } + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + Literals literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].nrLogVars() == 1 + && ! clauses[i]->isIpgLogVar (literals[j].logVars().front()) + && ! clauses[i]->isCountedLogVar (literals[j].logVars().front())) { + unsigned nrGroundings = clauses[i]->constr().projectedCopy ( + literals[j].logVars()).size(); + SetOrNode* setOrNode = new SetOrNode (nrGroundings); + if (Globals::verbosity > 1) { + originClausesMap_[setOrNode] = backupClauses_; + explanationMap_[setOrNode] = " AC" ; + } + Clause* c1 = new Clause ( + clauses[i]->constr().projectedCopy (literals[j].logVars())); + Clause* c2 = new Clause ( + clauses[i]->constr().projectedCopy (literals[j].logVars())); + c1->addLiteral (literals[j]); + c2->addLiteralComplemented (literals[j]); + c1->addPosCountedLogVar (literals[j].logVars().front()); + c2->addNegCountedLogVar (literals[j].logVars().front()); + clauses.push_back (c1); + clauses.push_back (c2); + shatterCountedLogVars (clauses); + compile (setOrNode->follow(), clauses); + *follow = setOrNode; + return true; + } + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +void +LiftedCircuit::shatterCountedLogVars (Clauses& clauses) +{ + while (shatterCountedLogVarsAux (clauses)) ; +} + + + +bool +LiftedCircuit::shatterCountedLogVarsAux (Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size() - 1; i++) { + for (size_t j = i + 1; j < clauses.size(); j++) { + bool splitedSome = shatterCountedLogVarsAux (clauses, i, j); + if (splitedSome) { + return true; + } + } + } + return false; +} + + + +bool +LiftedCircuit::shatterCountedLogVarsAux ( + Clauses& clauses, + size_t idx1, + size_t idx2) +{ + Literals lits1 = clauses[idx1]->literals(); + Literals lits2 = clauses[idx2]->literals(); + for (size_t i = 0; i < lits1.size(); i++) { + for (size_t j = 0; j < lits2.size(); j++) { + if (lits1[i].lid() == lits2[j].lid()) { + LogVars lvs1 = lits1[i].logVars(); + LogVars lvs2 = lits2[j].logVars(); + for (size_t k = 0; k < lvs1.size(); k++) { + if (clauses[idx1]->isCountedLogVar (lvs1[k]) + && clauses[idx2]->isCountedLogVar (lvs2[k]) == false) { + clauses.push_back (new Clause (*clauses[idx2])); + clauses[idx2]->addPosCountedLogVar (lvs2[k]); + clauses.back()->addNegCountedLogVar (lvs2[k]); + return true; + } + if (clauses[idx2]->isCountedLogVar (lvs2[k]) + && clauses[idx1]->isCountedLogVar (lvs1[k]) == false) { + clauses.push_back (new Clause (*clauses[idx1])); + clauses[idx1]->addPosCountedLogVar (lvs1[k]); + clauses.back()->addNegCountedLogVar (lvs1[k]); + return true; + } + } + } + } + } + return false; +} + + + +bool +LiftedCircuit::independentClause ( + Clause& clause, + Clauses& otherClauses) const +{ + for (size_t i = 0; i < otherClauses.size(); i++) { + if (Clause::independentClauses (clause, *otherClauses[i]) == false) { + return false; + } + } + return true; +} + + + +bool +LiftedCircuit::independentLiteral ( + const Literal& lit, + const Literals& otherLits) const +{ + for (size_t i = 0; i < otherLits.size(); i++) { + if (lit.lid() == otherLits[i].lid() + || (lit.logVarSet() & otherLits[i].logVarSet()).empty() == false) { + return false; + } + } + return true; +} + + + +LitLvTypesSet +LiftedCircuit::smoothCircuit (CircuitNode* node) +{ + assert (node != 0); + LitLvTypesSet propagLits; + + switch (getCircuitNodeType (node)) { + + case CircuitNodeType::OR_NODE: { + OrNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); + LitLvTypesSet missingLeft = lids2 - lids1; + LitLvTypesSet missingRight = lids1 - lids2; + createSmoothNode (missingLeft, casted->leftBranch()); + createSmoothNode (missingRight, casted->rightBranch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::AND_NODE: { + AndNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::SET_OR_NODE: { + SetOrNode* casted = dynamic_cast(node); + propagLits = smoothCircuit (*casted->follow()); + TinySet> litSet; + for (size_t i = 0; i < propagLits.size(); i++) { + litSet.insert (make_pair (propagLits[i].lid(), + propagLits[i].logVarTypes().size())); + } + LitLvTypesSet missingLids; + for (size_t i = 0; i < litSet.size(); i++) { + vector allTypes = getAllPossibleTypes (litSet[i].second); + for (size_t j = 0; j < allTypes.size(); j++) { + bool typeFound = false; + for (size_t k = 0; k < propagLits.size(); k++) { + if (litSet[i].first == propagLits[k].lid() + && containsTypes (propagLits[k].logVarTypes(), allTypes[j])) { + typeFound = true; + break; + } + } + if (typeFound == false) { + missingLids.insert (LitLvTypes (litSet[i].first, allTypes[j])); + } + } + } + createSmoothNode (missingLids, casted->follow()); + // setAllFullLogVars() can cause repeated elements in + // the set. Fix this by reconstructing the set again + LitLvTypesSet copy = propagLits; + propagLits.clear(); + for (size_t i = 0; i < copy.size(); i++) { + copy[i].setAllFullLogVars(); + propagLits.insert (copy[i]); + } + break; + } + + case CircuitNodeType::SET_AND_NODE: { + SetAndNode* casted = dynamic_cast(node); + propagLits = smoothCircuit (*casted->follow()); + break; + } + + case CircuitNodeType::INC_EXC_NODE: { + IncExcNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->plus1Branch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->plus2Branch()); + LitLvTypesSet missingPlus1 = lids2 - lids1; + LitLvTypesSet missingPlus2 = lids1 - lids2; + createSmoothNode (missingPlus1, casted->plus1Branch()); + createSmoothNode (missingPlus2, casted->plus2Branch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::LEAF_NODE: { + LeafNode* casted = dynamic_cast(node); + propagLits.insert (LitLvTypes ( + casted->clause()->literals()[0].lid(), + casted->clause()->logVarTypes(0))); + } + + default: + break; + } + + return propagLits; +} + + + +void +LiftedCircuit::createSmoothNode ( + const LitLvTypesSet& missingLits, + CircuitNode** prev) +{ + if (missingLits.empty() == false) { + if (Globals::verbosity > 1) { + unordered_map::iterator it; + it = originClausesMap_.find (*prev); + if (it != originClausesMap_.end()) { + backupClauses_ = it->second; + } else { + backupClauses_ = Clause::copyClauses ( + {((dynamic_cast(*prev))->clause())}); + } + } + Clauses clauses; + for (size_t i = 0; i < missingLits.size(); i++) { + LiteralId lid = missingLits[i].lid(); + const LogVarTypes& types = missingLits[i].logVarTypes(); + Clause* c = lwcnf_->createClause (lid); + for (size_t j = 0; j < types.size(); j++) { + LogVar X = c->literals().front().logVars()[j]; + if (types[j] == LogVarType::POS_LV) { + c->addPosCountedLogVar (X); + } else if (types[j] == LogVarType::NEG_LV) { + c->addNegCountedLogVar (X); + } + } + c->addLiteralComplemented (c->literals()[0]); + clauses.push_back (c); + } + SmoothNode* smoothNode = new SmoothNode (clauses, *lwcnf_); + *prev = new AndNode (smoothNode, *prev); + if (Globals::verbosity > 1) { + originClausesMap_[*prev] = backupClauses_; + explanationMap_[*prev] = " Smoothing" ; + } + } +} + + + +vector +LiftedCircuit::getAllPossibleTypes (unsigned nrLogVars) const +{ + if (nrLogVars == 0) { + return {}; + } + if (nrLogVars == 1) { + return {{LogVarType::POS_LV},{LogVarType::NEG_LV}}; + } + vector res; + Ranges ranges (nrLogVars, 2); + Indexer indexer (ranges); + while (indexer.valid()) { + LogVarTypes types; + for (size_t i = 0; i < nrLogVars; i++) { + if (indexer[i] == 0) { + types.push_back (LogVarType::POS_LV); + } else { + types.push_back (LogVarType::NEG_LV); + } + } + res.push_back (types); + ++ indexer; + } + return res; +} + + + +bool +LiftedCircuit::containsTypes ( + const LogVarTypes& typesA, + const LogVarTypes& typesB) const +{ + for (size_t i = 0; i < typesA.size(); i++) { + if (typesA[i] == LogVarType::FULL_LV) { + + } else if (typesA[i] == LogVarType::POS_LV + && typesB[i] == LogVarType::POS_LV) { + + } else if (typesA[i] == LogVarType::NEG_LV + && typesB[i] == LogVarType::NEG_LV) { + + } else { + return false; + } + } + return true; +} + + + +CircuitNodeType +LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const +{ + CircuitNodeType type; + if (dynamic_cast(node) != 0) { + type = CircuitNodeType::OR_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::AND_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::SET_OR_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::SET_AND_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::INC_EXC_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::LEAF_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::SMOOTH_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::TRUE_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::COMPILATION_FAILED_NODE; + } else { + assert (false); + } + return type; +} + + + +void +LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) +{ + assert (node != 0); + + static unsigned nrAuxNodes = 0; + stringstream ss; + ss << "n" << nrAuxNodes; + string auxNode = ss.str(); + nrAuxNodes ++; + string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; + + switch (getCircuitNodeType (node)) { + + case OR_NODE: { + OrNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∨\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->leftBranch()); + os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->rightBranch()); + os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->leftBranch(), os); + exportToGraphViz (*casted->rightBranch(), os); + break; + } + + case AND_NODE: { + AndNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∧\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->leftBranch()); + os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->rightBranch()) << endl; + os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->leftBranch(), os); + exportToGraphViz (*casted->rightBranch(), os); + break; + } + + case SET_OR_NODE: { + SetOrNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∨(X)\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->follow()); + os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->follow(), os); + break; + } + + case SET_AND_NODE: { + SetAndNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∧(X)\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->follow()); + os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->follow(), os); + break; + } + + case INC_EXC_NODE: { + IncExcNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"+ - +\"]" ; + os << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->plus1Branch()); + os << " [label=\" " << (*casted->plus1Branch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->minusBranch()) << endl; + os << " [label=\" " << (*casted->minusBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->plus2Branch()); + os << " [label=\" " << (*casted->plus2Branch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->plus1Branch(), os); + exportToGraphViz (*casted->plus2Branch(), os); + exportToGraphViz (*casted->minusBranch(), os); + break; + } + + case LEAF_NODE: { + printClauses (node, os, "style=filled,fillcolor=palegreen,"); + break; + } + + case SMOOTH_NODE: { + printClauses (node, os, "style=filled,fillcolor=lightblue,"); + break; + } + + case TRUE_NODE: { + os << escapeNode (node); + os << " [shape=box,label=\"⊤\"]" ; + os << endl; + break; + } + + case COMPILATION_FAILED_NODE: { + printClauses (node, os, "style=filled,fillcolor=salmon,"); + break; + } + + default: + assert (false); + } +} + + + +string +LiftedCircuit::escapeNode (const CircuitNode* node) const +{ + stringstream ss; + ss << "\"" << node << "\"" ; + return ss.str(); +} + + + +string +LiftedCircuit::getExplanationString (CircuitNode* node) +{ + return Util::contains (explanationMap_, node) + ? explanationMap_[node] + : "" ; +} + + + +void +LiftedCircuit::printClauses ( + CircuitNode* node, + ofstream& os, + string extraOptions) +{ + Clauses clauses; + if (Util::contains (originClausesMap_, node)) { + clauses = originClausesMap_[node]; + } else if (getCircuitNodeType (node) == CircuitNodeType::LEAF_NODE) { + clauses = { (dynamic_cast(node))->clause() } ; + } else if (getCircuitNodeType (node) == CircuitNodeType::SMOOTH_NODE) { + clauses = (dynamic_cast(node))->clauses(); + } + assert (clauses.empty() == false); + os << escapeNode (node); + os << " [shape=box," << extraOptions << "label=\"" ; + for (size_t i = 0; i < clauses.size(); i++) { + if (i != 0) os << "\\n" ; + os << *clauses[i]; + } + os << "\"]" ; + os << endl; +} + + + LiftedKc::~LiftedKc (void) { delete lwcnf_; diff --git a/packages/CLPBN/horus/LiftedKc.h b/packages/CLPBN/horus/LiftedKc.h index cba6499e1..a4cd2dbeb 100644 --- a/packages/CLPBN/horus/LiftedKc.h +++ b/packages/CLPBN/horus/LiftedKc.h @@ -1,11 +1,281 @@ #ifndef HORUS_LIFTEDKC_H #define HORUS_LIFTEDKC_H + +#include "LiftedWCNF.h" #include "LiftedSolver.h" #include "ParfactorList.h" -class LiftedWCNF; -class LiftedCircuit; + +enum CircuitNodeType { + OR_NODE, + AND_NODE, + SET_OR_NODE, + SET_AND_NODE, + INC_EXC_NODE, + LEAF_NODE, + SMOOTH_NODE, + TRUE_NODE, + COMPILATION_FAILED_NODE +}; + + + +class CircuitNode +{ + public: + CircuitNode (void) { } + + virtual ~CircuitNode (void) { } + + virtual double weight (void) const = 0; +}; + + + +class OrNode : public CircuitNode +{ + public: + OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + + ~OrNode (void); + + CircuitNode** leftBranch (void) { return &leftBranch_; } + CircuitNode** rightBranch (void) { return &rightBranch_; } + + double weight (void) const; + + private: + CircuitNode* leftBranch_; + CircuitNode* rightBranch_; +}; + + + +class AndNode : public CircuitNode +{ + public: + AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + + AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) + : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } + + ~AndNode (void); + + CircuitNode** leftBranch (void) { return &leftBranch_; } + CircuitNode** rightBranch (void) { return &rightBranch_; } + + double weight (void) const; + + private: + CircuitNode* leftBranch_; + CircuitNode* rightBranch_; +}; + + + +class SetOrNode : public CircuitNode +{ + public: + SetOrNode (unsigned nrGroundings) + : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + + ~SetOrNode (void); + + CircuitNode** follow (void) { return &follow_; } + + static unsigned nrPositives (void) { return nrPos_; } + + static unsigned nrNegatives (void) { return nrNeg_; } + + static bool isSet (void) { return nrPos_ >= 0; } + + double weight (void) const; + + private: + CircuitNode* follow_; + unsigned nrGroundings_; + static int nrPos_; + static int nrNeg_; +}; + + + +class SetAndNode : public CircuitNode +{ + public: + SetAndNode (unsigned nrGroundings) + : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + + ~SetAndNode (void); + + CircuitNode** follow (void) { return &follow_; } + + double weight (void) const; + + private: + CircuitNode* follow_; + unsigned nrGroundings_; +}; + + + +class IncExcNode : public CircuitNode +{ + public: + IncExcNode (void) + : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } + + ~IncExcNode (void); + + CircuitNode** plus1Branch (void) { return &plus1Branch_; } + CircuitNode** plus2Branch (void) { return &plus2Branch_; } + CircuitNode** minusBranch (void) { return &minusBranch_; } + + double weight (void) const; + + private: + CircuitNode* plus1Branch_; + CircuitNode* plus2Branch_; + CircuitNode* minusBranch_; +}; + + + +class LeafNode : public CircuitNode +{ + public: + LeafNode (Clause* clause, const LiftedWCNF& lwcnf) + : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } + + ~LeafNode (void); + + const Clause* clause (void) const { return clause_; } + + Clause* clause (void) { return clause_; } + + double weight (void) const; + + private: + Clause* clause_; + const LiftedWCNF& lwcnf_; +}; + + + +class SmoothNode : public CircuitNode +{ + public: + SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) + : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } + + ~SmoothNode (void); + + const Clauses& clauses (void) const { return clauses_; } + + Clauses clauses (void) { return clauses_; } + + double weight (void) const; + + private: + Clauses clauses_; + const LiftedWCNF& lwcnf_; +}; + + + +class TrueNode : public CircuitNode +{ + public: + TrueNode (void) : CircuitNode() { } + + double weight (void) const; +}; + + + +class CompilationFailedNode : public CircuitNode +{ + public: + CompilationFailedNode (void) : CircuitNode() { } + + double weight (void) const; +}; + + + +class LiftedCircuit +{ + public: + LiftedCircuit (const LiftedWCNF* lwcnf); + + ~LiftedCircuit (void); + + bool isCompilationSucceeded (void) const; + + double getWeightedModelCount (void) const; + + void exportToGraphViz (const char*); + + private: + + void compile (CircuitNode** follow, Clauses& clauses); + + bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); + + bool tryIndependence (CircuitNode** follow, Clauses& clauses); + + bool tryShannonDecomp (CircuitNode** follow, Clauses& clauses); + + bool tryInclusionExclusion (CircuitNode** follow, Clauses& clauses); + + bool tryIndepPartialGrounding (CircuitNode** follow, Clauses& clauses); + + bool tryIndepPartialGroundingAux (Clauses& clauses, ConstraintTree& ct, + LogVars& rootLogVars); + + bool tryAtomCounting (CircuitNode** follow, Clauses& clauses); + + void shatterCountedLogVars (Clauses& clauses); + + bool shatterCountedLogVarsAux (Clauses& clauses); + + bool shatterCountedLogVarsAux (Clauses& clauses, size_t idx1, size_t idx2); + + bool independentClause (Clause& clause, Clauses& otherClauses) const; + + bool independentLiteral (const Literal& lit, + const Literals& otherLits) const; + + LitLvTypesSet smoothCircuit (CircuitNode* node); + + void createSmoothNode (const LitLvTypesSet& lids, + CircuitNode** prev); + + vector getAllPossibleTypes (unsigned nrLogVars) const; + + bool containsTypes (const LogVarTypes& typesA, + const LogVarTypes& typesB) const; + + CircuitNodeType getCircuitNodeType (const CircuitNode* node) const; + + void exportToGraphViz (CircuitNode* node, ofstream&); + + void printClauses (CircuitNode* node, ofstream&, + string extraOptions = ""); + + string escapeNode (const CircuitNode* node) const; + + string getExplanationString (CircuitNode* node); + + CircuitNode* root_; + const LiftedWCNF* lwcnf_; + bool compilationSucceeded_; + Clauses backupClauses_; + unordered_map originClausesMap_; + unordered_map explanationMap_; +}; + class LiftedKc : public LiftedSolver diff --git a/packages/CLPBN/horus/Makefile.in b/packages/CLPBN/horus/Makefile.in index d19803ee7..24e7d0b87 100644 --- a/packages/CLPBN/horus/Makefile.in +++ b/packages/CLPBN/horus/Makefile.in @@ -23,10 +23,10 @@ CC=@CC@ CXX=@CXX@ # normal -#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG +CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG # debug -CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra +#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra # @@ -57,7 +57,6 @@ HEADERS = \ $(srcdir)/Horus.h \ $(srcdir)/Indexer.h \ $(srcdir)/LiftedBp.h \ - $(srcdir)/LiftedCircuit.h \ $(srcdir)/LiftedKc.h \ $(srcdir)/LiftedOperations.h \ $(srcdir)/LiftedSolver.h \ @@ -87,7 +86,6 @@ CPP_SOURCES = \ $(srcdir)/HorusCli.cpp \ $(srcdir)/HorusYap.cpp \ $(srcdir)/LiftedBp.cpp \ - $(srcdir)/LiftedCircuit.cpp \ $(srcdir)/LiftedKc.cpp \ $(srcdir)/LiftedOperations.cpp \ $(srcdir)/LiftedUtils.cpp \ @@ -114,7 +112,6 @@ OBJS = \ Histogram.o \ HorusYap.o \ LiftedBp.o \ - LiftedCircuit.o \ LiftedKc.o \ LiftedOperations.o \ LiftedUtils.o \ From 421d6f72ee55c631ba4578c1097eef6b2c16f54f Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 21:32:40 +0000 Subject: [PATCH 39/89] Place the constructor on the right place --- packages/CLPBN/horus/FactorGraph.cpp | 24 +- packages/CLPBN/horus2/BayesBall.cpp | 84 ++ packages/CLPBN/horus2/BayesBall.h | 85 ++ packages/CLPBN/horus2/BayesBallGraph.cpp | 106 ++ packages/CLPBN/horus2/BayesBallGraph.h | 84 ++ packages/CLPBN/horus2/BeliefProp.cpp | 471 +++++++ packages/CLPBN/horus2/BeliefProp.h | 188 +++ packages/CLPBN/horus2/ConstraintTree.cpp | 1174 ++++++++++++++++++ packages/CLPBN/horus2/ConstraintTree.h | 237 ++++ packages/CLPBN/horus2/CountingBp.cpp | 424 +++++++ packages/CLPBN/horus2/CountingBp.h | 182 +++ packages/CLPBN/horus2/ElimGraph.cpp | 243 ++++ packages/CLPBN/horus2/ElimGraph.h | 139 +++ packages/CLPBN/horus2/Factor.cpp | 237 ++++ packages/CLPBN/horus2/Factor.h | 294 +++++ packages/CLPBN/horus2/FactorGraph.cpp | 454 +++++++ packages/CLPBN/horus2/FactorGraph.h | 150 +++ packages/CLPBN/horus2/GroundSolver.cpp | 107 ++ packages/CLPBN/horus2/GroundSolver.h | 36 + packages/CLPBN/horus2/Histogram.cpp | 146 +++ packages/CLPBN/horus2/Histogram.h | 45 + packages/CLPBN/horus2/Horus.h | 87 ++ packages/CLPBN/horus2/HorusCli.cpp | 187 +++ packages/CLPBN/horus2/HorusYap.cpp | 570 +++++++++ packages/CLPBN/horus2/Indexer.h | 258 ++++ packages/CLPBN/horus2/LiftedBp.cpp | 234 ++++ packages/CLPBN/horus2/LiftedBp.h | 43 + packages/CLPBN/horus2/LiftedKc.cpp | 1309 ++++++++++++++++++++ packages/CLPBN/horus2/LiftedKc.h | 300 +++++ packages/CLPBN/horus2/LiftedOperations.cpp | 271 ++++ packages/CLPBN/horus2/LiftedOperations.h | 27 + packages/CLPBN/horus2/LiftedSolver.h | 27 + packages/CLPBN/horus2/LiftedUtils.cpp | 131 ++ packages/CLPBN/horus2/LiftedUtils.h | 166 +++ packages/CLPBN/horus2/LiftedVe.cpp | 728 +++++++++++ packages/CLPBN/horus2/LiftedVe.h | 155 +++ packages/CLPBN/horus2/LiftedWCNF.cpp | 658 ++++++++++ packages/CLPBN/horus2/LiftedWCNF.h | 239 ++++ packages/CLPBN/horus2/Parfactor.cpp | 942 ++++++++++++++ packages/CLPBN/horus2/Parfactor.h | 125 ++ packages/CLPBN/horus2/ParfactorList.cpp | 638 ++++++++++ packages/CLPBN/horus2/ParfactorList.h | 121 ++ packages/CLPBN/horus2/ProbFormula.cpp | 140 +++ packages/CLPBN/horus2/ProbFormula.h | 114 ++ packages/CLPBN/horus2/TinySet.h | 264 ++++ packages/CLPBN/horus2/Util.cpp | 429 +++++++ packages/CLPBN/horus2/Util.h | 422 +++++++ packages/CLPBN/horus2/Var.cpp | 102 ++ packages/CLPBN/horus2/Var.h | 108 ++ packages/CLPBN/horus2/VarElim.cpp | 217 ++++ packages/CLPBN/horus2/VarElim.h | 46 + packages/CLPBN/horus2/WeightedBp.cpp | 288 +++++ packages/CLPBN/horus2/WeightedBp.h | 61 + 53 files changed, 14305 insertions(+), 12 deletions(-) create mode 100644 packages/CLPBN/horus2/BayesBall.cpp create mode 100644 packages/CLPBN/horus2/BayesBall.h create mode 100644 packages/CLPBN/horus2/BayesBallGraph.cpp create mode 100644 packages/CLPBN/horus2/BayesBallGraph.h create mode 100644 packages/CLPBN/horus2/BeliefProp.cpp create mode 100644 packages/CLPBN/horus2/BeliefProp.h create mode 100644 packages/CLPBN/horus2/ConstraintTree.cpp create mode 100644 packages/CLPBN/horus2/ConstraintTree.h create mode 100644 packages/CLPBN/horus2/CountingBp.cpp create mode 100644 packages/CLPBN/horus2/CountingBp.h create mode 100644 packages/CLPBN/horus2/ElimGraph.cpp create mode 100644 packages/CLPBN/horus2/ElimGraph.h create mode 100644 packages/CLPBN/horus2/Factor.cpp create mode 100644 packages/CLPBN/horus2/Factor.h create mode 100644 packages/CLPBN/horus2/FactorGraph.cpp create mode 100644 packages/CLPBN/horus2/FactorGraph.h create mode 100644 packages/CLPBN/horus2/GroundSolver.cpp create mode 100644 packages/CLPBN/horus2/GroundSolver.h create mode 100644 packages/CLPBN/horus2/Histogram.cpp create mode 100644 packages/CLPBN/horus2/Histogram.h create mode 100644 packages/CLPBN/horus2/Horus.h create mode 100644 packages/CLPBN/horus2/HorusCli.cpp create mode 100644 packages/CLPBN/horus2/HorusYap.cpp create mode 100644 packages/CLPBN/horus2/Indexer.h create mode 100644 packages/CLPBN/horus2/LiftedBp.cpp create mode 100644 packages/CLPBN/horus2/LiftedBp.h create mode 100644 packages/CLPBN/horus2/LiftedKc.cpp create mode 100644 packages/CLPBN/horus2/LiftedKc.h create mode 100644 packages/CLPBN/horus2/LiftedOperations.cpp create mode 100644 packages/CLPBN/horus2/LiftedOperations.h create mode 100644 packages/CLPBN/horus2/LiftedSolver.h create mode 100644 packages/CLPBN/horus2/LiftedUtils.cpp create mode 100644 packages/CLPBN/horus2/LiftedUtils.h create mode 100644 packages/CLPBN/horus2/LiftedVe.cpp create mode 100644 packages/CLPBN/horus2/LiftedVe.h create mode 100644 packages/CLPBN/horus2/LiftedWCNF.cpp create mode 100644 packages/CLPBN/horus2/LiftedWCNF.h create mode 100644 packages/CLPBN/horus2/Parfactor.cpp create mode 100644 packages/CLPBN/horus2/Parfactor.h create mode 100644 packages/CLPBN/horus2/ParfactorList.cpp create mode 100644 packages/CLPBN/horus2/ParfactorList.h create mode 100644 packages/CLPBN/horus2/ProbFormula.cpp create mode 100644 packages/CLPBN/horus2/ProbFormula.h create mode 100644 packages/CLPBN/horus2/TinySet.h create mode 100644 packages/CLPBN/horus2/Util.cpp create mode 100644 packages/CLPBN/horus2/Util.h create mode 100644 packages/CLPBN/horus2/Var.cpp create mode 100644 packages/CLPBN/horus2/Var.h create mode 100644 packages/CLPBN/horus2/VarElim.cpp create mode 100644 packages/CLPBN/horus2/VarElim.h create mode 100644 packages/CLPBN/horus2/WeightedBp.cpp create mode 100644 packages/CLPBN/horus2/WeightedBp.h diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index ba31a9faa..8f5c446b5 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -32,6 +32,18 @@ FactorGraph::FactorGraph (const FactorGraph& fg) +FactorGraph::~FactorGraph (void) +{ + for (size_t i = 0; i < varNodes_.size(); i++) { + delete varNodes_[i]; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + delete facNodes_[i]; + } +} + + + void FactorGraph::readFromUaiFormat (const char* fileName) { @@ -167,18 +179,6 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) -FactorGraph::~FactorGraph (void) -{ - for (size_t i = 0; i < varNodes_.size(); i++) { - delete varNodes_[i]; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - delete facNodes_[i]; - } -} - - - void FactorGraph::addFactor (const Factor& factor) { diff --git a/packages/CLPBN/horus2/BayesBall.cpp b/packages/CLPBN/horus2/BayesBall.cpp new file mode 100644 index 000000000..0fac25056 --- /dev/null +++ b/packages/CLPBN/horus2/BayesBall.cpp @@ -0,0 +1,84 @@ +#include +#include + +#include +#include +#include + +#include "BayesBall.h" +#include "Util.h" + + +FactorGraph* +BayesBall::getMinimalFactorGraph (const VarIds& queryIds) +{ + assert (fg_.bayesianFactors()); + Scheduling scheduling; + for (size_t i = 0; i < queryIds.size(); i++) { + assert (dag_.getNode (queryIds[i])); + BBNode* n = dag_.getNode (queryIds[i]); + scheduling.push (ScheduleInfo (n, false, true)); + } + + while (!scheduling.empty()) { + ScheduleInfo& sch = scheduling.front(); + BBNode* n = sch.node; + n->setAsVisited(); + if (n->hasEvidence() == false && sch.visitedFromChild) { + if (n->isMarkedOnTop() == false) { + n->markOnTop(); + scheduleParents (n, scheduling); + } + if (n->isMarkedOnBottom() == false) { + n->markOnBottom(); + scheduleChilds (n, scheduling); + } + } + if (sch.visitedFromParent) { + if (n->hasEvidence() && n->isMarkedOnTop() == false) { + n->markOnTop(); + scheduleParents (n, scheduling); + } + if (n->hasEvidence() == false && n->isMarkedOnBottom() == false) { + n->markOnBottom(); + scheduleChilds (n, scheduling); + } + } + scheduling.pop(); + } + + FactorGraph* fg = new FactorGraph(); + constructGraph (fg); + return fg; +} + + + +void +BayesBall::constructGraph (FactorGraph* fg) const +{ + const FacNodes& facNodes = fg_.facNodes(); + for (size_t i = 0; i < facNodes.size(); i++) { + const BBNode* n = dag_.getNode ( + facNodes[i]->factor().argument (0)); + if (n->isMarkedOnTop()) { + fg->addFactor (facNodes[i]->factor()); + } else if (n->hasEvidence() && n->isVisited()) { + VarIds varIds = { facNodes[i]->factor().argument (0) }; + Ranges ranges = { facNodes[i]->factor().range (0) }; + Params params (ranges[0], LogAware::noEvidence()); + params[n->getEvidence()] = LogAware::withEvidence(); + fg->addFactor (Factor (varIds, ranges, params)); + } + } + const VarNodes& varNodes = fg_.varNodes(); + for (size_t i = 0; i < varNodes.size(); i++) { + if (varNodes[i]->hasEvidence()) { + VarNode* vn = fg->getVarNode (varNodes[i]->varId()); + if (vn) { + vn->setEvidence (varNodes[i]->getEvidence()); + } + } + } +} + diff --git a/packages/CLPBN/horus2/BayesBall.h b/packages/CLPBN/horus2/BayesBall.h new file mode 100644 index 000000000..4efbd2ed1 --- /dev/null +++ b/packages/CLPBN/horus2/BayesBall.h @@ -0,0 +1,85 @@ +#ifndef HORUS_BAYESBALL_H +#define HORUS_BAYESBALL_H + +#include +#include +#include +#include + +#include "FactorGraph.h" +#include "BayesBallGraph.h" +#include "Horus.h" + +using namespace std; + + +struct ScheduleInfo +{ + ScheduleInfo (BBNode* n, bool vfp, bool vfc) : + node(n), visitedFromParent(vfp), visitedFromChild(vfc) { } + + BBNode* node; + bool visitedFromParent; + bool visitedFromChild; +}; + + +typedef queue> Scheduling; + + +class BayesBall +{ + public: + BayesBall (FactorGraph& fg) + : fg_(fg) , dag_(fg.getStructure()) + { + dag_.clear(); + } + + FactorGraph* getMinimalFactorGraph (const VarIds&); + + static FactorGraph* getMinimalFactorGraph (FactorGraph& fg, VarIds vids) + { + BayesBall bb (fg); + return bb.getMinimalFactorGraph (vids); + } + + private: + + void constructGraph (FactorGraph* fg) const; + + void scheduleParents (const BBNode* n, Scheduling& sch) const; + + void scheduleChilds (const BBNode* n, Scheduling& sch) const; + + FactorGraph& fg_; + + BayesBallGraph& dag_; +}; + + + +inline void +BayesBall::scheduleParents (const BBNode* n, Scheduling& sch) const +{ + const vector& ps = n->parents(); + for (vector::const_iterator it = ps.begin(); + it != ps.end(); ++it) { + sch.push (ScheduleInfo (*it, false, true)); + } +} + + + +inline void +BayesBall::scheduleChilds (const BBNode* n, Scheduling& sch) const +{ + const vector& cs = n->childs(); + for (vector::const_iterator it = cs.begin(); + it != cs.end(); ++it) { + sch.push (ScheduleInfo (*it, true, false)); + } +} + +#endif // HORUS_BAYESBALL_H + diff --git a/packages/CLPBN/horus2/BayesBallGraph.cpp b/packages/CLPBN/horus2/BayesBallGraph.cpp new file mode 100644 index 000000000..36fcbb5ee --- /dev/null +++ b/packages/CLPBN/horus2/BayesBallGraph.cpp @@ -0,0 +1,106 @@ +#include +#include + +#include +#include +#include + +#include "BayesBallGraph.h" +#include "Util.h" + + +void +BayesBallGraph::addNode (BBNode* n) +{ + assert (Util::contains (varMap_, n->varId()) == false); + nodes_.push_back (n); + varMap_[n->varId()] = n; +} + + + +void +BayesBallGraph::addEdge (VarId vid1, VarId vid2) +{ + unordered_map::iterator it1; + unordered_map::iterator it2; + it1 = varMap_.find (vid1); + it2 = varMap_.find (vid2); + assert (it1 != varMap_.end()); + assert (it2 != varMap_.end()); + it1->second->addChild (it2->second); + it2->second->addParent (it1->second); +} + + + +const BBNode* +BayesBallGraph::getNode (VarId vid) const +{ + unordered_map::const_iterator it; + it = varMap_.find (vid); + return it != varMap_.end() ? it->second : 0; +} + + + +BBNode* +BayesBallGraph::getNode (VarId vid) +{ + unordered_map::const_iterator it; + it = varMap_.find (vid); + return it != varMap_.end() ? it->second : 0; +} + + + +void +BayesBallGraph::setIndexes (void) +{ + for (size_t i = 0; i < nodes_.size(); i++) { + nodes_[i]->setIndex (i); + } +} + + + +void +BayesBallGraph::clear (void) +{ + for (size_t i = 0; i < nodes_.size(); i++) { + nodes_[i]->clear(); + } +} + + + +void +BayesBallGraph::exportToGraphViz (const char* fileName) +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "digraph {" << endl; + out << "ranksep=1" << endl; + for (size_t i = 0; i < nodes_.size(); i++) { + out << nodes_[i]->varId() ; + out << " [" ; + out << "label=\"" << nodes_[i]->label() << "\"" ; + if (nodes_[i]->hasEvidence()) { + out << ",style=filled, fillcolor=yellow" ; + } + out << "]" << endl; + } + for (size_t i = 0; i < nodes_.size(); i++) { + const vector& childs = nodes_[i]->childs(); + for (size_t j = 0; j < childs.size(); j++) { + out << nodes_[i]->varId() << " -> " << childs[j]->varId(); + out << " [style=bold]" << endl ; + } + } + out << "}" << endl; + out.close(); +} + diff --git a/packages/CLPBN/horus2/BayesBallGraph.h b/packages/CLPBN/horus2/BayesBallGraph.h new file mode 100644 index 000000000..72a0f90d0 --- /dev/null +++ b/packages/CLPBN/horus2/BayesBallGraph.h @@ -0,0 +1,84 @@ +#ifndef HORUS_BAYESBALLGRAPH_H +#define HORUS_BAYESBALLGRAPH_H + +#include +#include +#include +#include + +#include "Var.h" +#include "Horus.h" + +using namespace std; + +class BBNode : public Var +{ + public: + BBNode (Var* v) : Var (v) , visited_(false), + markedOnTop_(false), markedOnBottom_(false) { } + + const vector& childs (void) const { return childs_; } + + vector& childs (void) { return childs_; } + + const vector& parents (void) const { return parents_; } + + vector& parents (void) { return parents_; } + + void addParent (BBNode* p) { parents_.push_back (p); } + + void addChild (BBNode* c) { childs_.push_back (c); } + + bool isVisited (void) const { return visited_; } + + void setAsVisited (void) { visited_ = true; } + + bool isMarkedOnTop (void) const { return markedOnTop_; } + + void markOnTop (void) { markedOnTop_ = true; } + + bool isMarkedOnBottom (void) const { return markedOnBottom_; } + + void markOnBottom (void) { markedOnBottom_ = true; } + + void clear (void) { visited_ = markedOnTop_ = markedOnBottom_ = false; } + + private: + bool visited_; + bool markedOnTop_; + bool markedOnBottom_; + + vector childs_; + vector parents_; +}; + + +class BayesBallGraph +{ + public: + BayesBallGraph (void) { } + + void addNode (BBNode* n); + + void addEdge (VarId vid1, VarId vid2); + + const BBNode* getNode (VarId vid) const; + + BBNode* getNode (VarId vid); + + bool empty (void) const { return nodes_.empty(); } + + void setIndexes (void); + + void clear (void); + + void exportToGraphViz (const char*); + + private: + vector nodes_; + + unordered_map varMap_; +}; + +#endif // HORUS_BAYESBALLGRAPH_H + diff --git a/packages/CLPBN/horus2/BeliefProp.cpp b/packages/CLPBN/horus2/BeliefProp.cpp new file mode 100644 index 000000000..d96384cfd --- /dev/null +++ b/packages/CLPBN/horus2/BeliefProp.cpp @@ -0,0 +1,471 @@ +#include +#include + +#include + +#include + +#include "BeliefProp.h" +#include "FactorGraph.h" +#include "Factor.h" +#include "Indexer.h" +#include "Horus.h" + + +BeliefProp::BeliefProp (const FactorGraph& fg) : GroundSolver (fg) +{ + runned_ = false; +} + + + +BeliefProp::~BeliefProp (void) +{ + for (size_t i = 0; i < varsI_.size(); i++) { + delete varsI_[i]; + } + for (size_t i = 0; i < facsI_.size(); i++) { + delete facsI_[i]; + } + for (size_t i = 0; i < links_.size(); i++) { + delete links_[i]; + } +} + + + +Params +BeliefProp::solveQuery (VarIds queryVids) +{ + assert (queryVids.empty() == false); + return queryVids.size() == 1 + ? getPosterioriOf (queryVids[0]) + : getJointDistributionOf (queryVids); +} + + + +void +BeliefProp::printSolverFlags (void) const +{ + stringstream ss; + ss << "belief propagation [" ; + ss << "schedule=" ; + typedef BpOptions::Schedule Sch; + switch (BpOptions::schedule) { + case Sch::SEQ_FIXED: ss << "seq_fixed"; break; + case Sch::SEQ_RANDOM: ss << "seq_random"; break; + case Sch::PARALLEL: ss << "parallel"; break; + case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + } + ss << ",max_iter=" << Util::toString (BpOptions::maxIter); + ss << ",accuracy=" << Util::toString (BpOptions::accuracy); + ss << ",log_domain=" << Util::toString (Globals::logDomain); + ss << "]" ; + cout << ss.str() << endl; +} + + + +Params +BeliefProp::getPosterioriOf (VarId vid) +{ + if (runned_ == false) { + runSolver(); + } + assert (fg.getVarNode (vid)); + VarNode* var = fg.getVarNode (vid); + Params probs; + if (var->hasEvidence()) { + probs.resize (var->range(), LogAware::noEvidence()); + probs[var->getEvidence()] = LogAware::withEvidence(); + } else { + probs.resize (var->range(), LogAware::multIdenty()); + const BpLinks& links = ninf(var)->getLinks(); + if (Globals::logDomain) { + for (size_t i = 0; i < links.size(); i++) { + probs += links[i]->message(); + } + LogAware::normalize (probs); + Util::exp (probs); + } else { + for (size_t i = 0; i < links.size(); i++) { + probs *= links[i]->message(); + } + LogAware::normalize (probs); + } + } + return probs; +} + + + +Params +BeliefProp::getJointDistributionOf (const VarIds& jointVarIds) +{ + if (runned_ == false) { + runSolver(); + } + VarNode* vn = fg.getVarNode (jointVarIds[0]); + const FacNodes& facNodes = vn->neighbors(); + size_t idx = facNodes.size(); + for (size_t i = 0; i < facNodes.size(); i++) { + if (facNodes[i]->factor().contains (jointVarIds)) { + idx = i; + break; + } + } + if (idx == facNodes.size()) { + return getJointByConditioning (jointVarIds); + } + return getFactorJoint (facNodes[idx], jointVarIds); +} + + + +Params +BeliefProp::getFactorJoint ( + FacNode* fn, + const VarIds& jointVarIds) +{ + if (runned_ == false) { + runSolver(); + } + Factor res (fn->factor()); + const BpLinks& links = ninf(fn)->getLinks(); + for (size_t i = 0; i < links.size(); i++) { + Factor msg ({links[i]->varNode()->varId()}, + {links[i]->varNode()->range()}, + getVarToFactorMsg (links[i])); + res.multiply (msg); + } + res.sumOutAllExcept (jointVarIds); + res.reorderArguments (jointVarIds); + res.normalize(); + Params jointDist = res.params(); + if (Globals::logDomain) { + Util::exp (jointDist); + } + return jointDist; +} + + + +void +BeliefProp::runSolver (void) +{ + initializeSolver(); + nIters_ = 0; + while (!converged() && nIters_ < BpOptions::maxIter) { + nIters_ ++; + if (Globals::verbosity > 1) { + Util::printHeader (string ("Iteration ") + Util::toString (nIters_)); + } + switch (BpOptions::schedule) { + case BpOptions::Schedule::SEQ_RANDOM: + std::random_shuffle (links_.begin(), links_.end()); + // no break + case BpOptions::Schedule::SEQ_FIXED: + for (size_t i = 0; i < links_.size(); i++) { + calculateAndUpdateMessage (links_[i]); + } + break; + case BpOptions::Schedule::PARALLEL: + for (size_t i = 0; i < links_.size(); i++) { + calculateMessage (links_[i]); + } + for (size_t i = 0; i < links_.size(); i++) { + updateMessage(links_[i]); + } + break; + case BpOptions::Schedule::MAX_RESIDUAL: + maxResidualSchedule(); + break; + } + } + if (Globals::verbosity > 0) { + if (nIters_ < BpOptions::maxIter) { + cout << "Belief propagation converged in " ; + cout << nIters_ << " iterations" << endl; + } else { + cout << "The maximum number of iterations was hit, terminating..." ; + cout << endl; + } + cout << endl; + } + runned_ = true; +} + + + +void +BeliefProp::createLinks (void) +{ + const FacNodes& facNodes = fg.facNodes(); + for (size_t i = 0; i < facNodes.size(); i++) { + const VarNodes& neighbors = facNodes[i]->neighbors(); + for (size_t j = 0; j < neighbors.size(); j++) { + links_.push_back (new BpLink (facNodes[i], neighbors[j])); + } + } +} + + + +void +BeliefProp::maxResidualSchedule (void) +{ + if (nIters_ == 1) { + for (size_t i = 0; i < links_.size(); i++) { + calculateMessage (links_[i]); + SortedOrder::iterator it = sortedOrder_.insert (links_[i]); + linkMap_.insert (make_pair (links_[i], it)); + } + return; + } + + for (size_t c = 0; c < links_.size(); c++) { + if (Globals::verbosity > 1) { + cout << "current residuals:" << endl; + for (SortedOrder::iterator it = sortedOrder_.begin(); + it != sortedOrder_.end(); ++it) { + cout << " " << setw (30) << left << (*it)->toString(); + cout << "residual = " << (*it)->residual() << endl; + } + } + + SortedOrder::iterator it = sortedOrder_.begin(); + BpLink* link = *it; + if (link->residual() < BpOptions::accuracy) { + return; + } + updateMessage (link); + link->clearResidual(); + sortedOrder_.erase (it); + linkMap_.find (link)->second = sortedOrder_.insert (link); + + // update the messages that depend on message source --> destin + const FacNodes& factorNeighbors = link->varNode()->neighbors(); + for (size_t i = 0; i < factorNeighbors.size(); i++) { + if (factorNeighbors[i] != link->facNode()) { + const BpLinks& links = ninf(factorNeighbors[i])->getLinks(); + for (size_t j = 0; j < links.size(); j++) { + if (links[j]->varNode() != link->varNode()) { + calculateMessage (links[j]); + BpLinkMap::iterator iter = linkMap_.find (links[j]); + sortedOrder_.erase (iter->second); + iter->second = sortedOrder_.insert (links[j]); + } + } + } + } + if (Globals::verbosity > 1) { + Util::printDashedLine(); + } + } +} + + + +void +BeliefProp::calcFactorToVarMsg (BpLink* link) +{ + FacNode* src = link->facNode(); + const VarNode* dst = link->varNode(); + const BpLinks& links = ninf(src)->getLinks(); + // calculate the product of messages that were sent + // to factor `src', except from var `dst' + unsigned reps = 1; + unsigned msgSize = Util::sizeExpected (src->factor().ranges()); + Params msgProduct (msgSize, LogAware::multIdenty()); + if (Globals::logDomain) { + for (size_t i = links.size(); i-- > 0; ) { + if (links[i]->varNode() != dst) { + if (Constants::SHOW_BP_CALCS) { + cout << " message from " << links[i]->varNode()->label(); + cout << ": " ; + } + Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), + reps, std::plus()); + if (Constants::SHOW_BP_CALCS) { + cout << endl; + } + } + reps *= links[i]->varNode()->range(); + } + } else { + for (size_t i = links.size(); i-- > 0; ) { + if (links[i]->varNode() != dst) { + if (Constants::SHOW_BP_CALCS) { + cout << " message from " << links[i]->varNode()->label(); + cout << ": " ; + } + Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), + reps, std::multiplies()); + if (Constants::SHOW_BP_CALCS) { + cout << endl; + } + } + reps *= links[i]->varNode()->range(); + } + } + Factor result (src->factor().arguments(), + src->factor().ranges(), msgProduct); + result.multiply (src->factor()); + if (Constants::SHOW_BP_CALCS) { + cout << " message product: " << msgProduct << endl; + cout << " original factor: " << src->factor().params() << endl; + cout << " factor product: " << result.params() << endl; + } + result.sumOutAllExcept (dst->varId()); + if (Constants::SHOW_BP_CALCS) { + cout << " marginalized: " << result.params() << endl; + } + link->nextMessage() = result.params(); + LogAware::normalize (link->nextMessage()); + if (Constants::SHOW_BP_CALCS) { + cout << " curr msg: " << link->message() << endl; + cout << " next msg: " << link->nextMessage() << endl; + } +} + + + +Params +BeliefProp::getVarToFactorMsg (const BpLink* link) const +{ + const VarNode* src = link->varNode(); + Params msg; + if (src->hasEvidence()) { + msg.resize (src->range(), LogAware::noEvidence()); + msg[src->getEvidence()] = LogAware::withEvidence(); + } else { + msg.resize (src->range(), LogAware::one()); + } + if (Constants::SHOW_BP_CALCS) { + cout << msg; + } + BpLinks::const_iterator it; + const BpLinks& links = ninf (src)->getLinks(); + if (Globals::logDomain) { + for (it = links.begin(); it != links.end(); ++it) { + if (*it != link) { + msg += (*it)->message(); + } + if (Constants::SHOW_BP_CALCS) { + cout << " x " << (*it)->message(); + } + } + } else { + for (it = links.begin(); it != links.end(); ++it) { + if (*it != link) { + msg *= (*it)->message(); + } + if (Constants::SHOW_BP_CALCS) { + cout << " x " << (*it)->message(); + } + } + } + if (Constants::SHOW_BP_CALCS) { + cout << " = " << msg; + } + return msg; +} + + + +Params +BeliefProp::getJointByConditioning (const VarIds& jointVarIds) const +{ + return GroundSolver::getJointByConditioning ( + GroundSolverType::BP, fg, jointVarIds); +} + + + +void +BeliefProp::initializeSolver (void) +{ + const VarNodes& varNodes = fg.varNodes(); + varsI_.reserve (varNodes.size()); + for (size_t i = 0; i < varNodes.size(); i++) { + varsI_.push_back (new SPNodeInfo()); + } + const FacNodes& facNodes = fg.facNodes(); + facsI_.reserve (facNodes.size()); + for (size_t i = 0; i < facNodes.size(); i++) { + facsI_.push_back (new SPNodeInfo()); + } + createLinks(); + for (size_t i = 0; i < links_.size(); i++) { + FacNode* src = links_[i]->facNode(); + VarNode* dst = links_[i]->varNode(); + ninf (dst)->addBpLink (links_[i]); + ninf (src)->addBpLink (links_[i]); + } +} + + + +bool +BeliefProp::converged (void) +{ + if (links_.size() == 0) { + return true; + } + if (nIters_ == 0) { + return false; + } + if (Globals::verbosity > 2) { + cout << endl; + } + if (nIters_ == 1) { + if (Globals::verbosity > 1) { + cout << "no residuals" << endl << endl; + } + return false; + } + bool converged = true; + if (BpOptions::schedule == BpOptions::Schedule::MAX_RESIDUAL) { + double maxResidual = (*(sortedOrder_.begin()))->residual(); + if (maxResidual > BpOptions::accuracy) { + converged = false; + } else { + converged = true; + } + } else { + for (size_t i = 0; i < links_.size(); i++) { + double residual = links_[i]->residual(); + if (Globals::verbosity > 1) { + cout << links_[i]->toString() + " residual = " << residual << endl; + } + if (residual > BpOptions::accuracy) { + converged = false; + if (Globals::verbosity < 2) { + break; + } + } + } + if (Globals::verbosity > 1) { + cout << endl; + } + } + return converged; +} + + + +void +BeliefProp::printLinkInformation (void) const +{ + for (size_t i = 0; i < links_.size(); i++) { + BpLink* l = links_[i]; + cout << l->toString() << ":" << endl; + cout << " curr msg = " ; + cout << l->message() << endl; + cout << " next msg = " ; + cout << l->nextMessage() << endl; + cout << " residual = " << l->residual() << endl; + } +} + diff --git a/packages/CLPBN/horus2/BeliefProp.h b/packages/CLPBN/horus2/BeliefProp.h new file mode 100644 index 000000000..64a41d916 --- /dev/null +++ b/packages/CLPBN/horus2/BeliefProp.h @@ -0,0 +1,188 @@ +#ifndef HORUS_BELIEFPROP_H +#define HORUS_BELIEFPROP_H + +#include +#include +#include + +#include "GroundSolver.h" +#include "Factor.h" +#include "FactorGraph.h" +#include "Util.h" + +using namespace std; + + +class BpLink +{ + public: + BpLink (FacNode* fn, VarNode* vn) + { + fac_ = fn; + var_ = vn; + v1_.resize (vn->range(), LogAware::log (1.0 / vn->range())); + v2_.resize (vn->range(), LogAware::log (1.0 / vn->range())); + currMsg_ = &v1_; + nextMsg_ = &v2_; + residual_ = 0.0; + } + + virtual ~BpLink (void) { }; + + FacNode* facNode (void) const { return fac_; } + + VarNode* varNode (void) const { return var_; } + + const Params& message (void) const { return *currMsg_; } + + Params& nextMessage (void) { return *nextMsg_; } + + double residual (void) const { return residual_; } + + void clearResidual (void) { residual_ = 0.0; } + + void updateResidual (void) + { + residual_ = LogAware::getMaxNorm (v1_,v2_); + } + + virtual void updateMessage (void) + { + swap (currMsg_, nextMsg_); + } + + string toString (void) const + { + stringstream ss; + ss << fac_->getLabel(); + ss << " -- " ; + ss << var_->label(); + return ss.str(); + } + + protected: + FacNode* fac_; + VarNode* var_; + Params v1_; + Params v2_; + Params* currMsg_; + Params* nextMsg_; + double residual_; +}; + +typedef vector BpLinks; + + +class SPNodeInfo +{ + public: + void addBpLink (BpLink* link) { links_.push_back (link); } + const BpLinks& getLinks (void) { return links_; } + private: + BpLinks links_; +}; + + +class BeliefProp : public GroundSolver +{ + public: + BeliefProp (const FactorGraph&); + + virtual ~BeliefProp (void); + + Params solveQuery (VarIds); + + virtual void printSolverFlags (void) const; + + virtual Params getPosterioriOf (VarId); + + virtual Params getJointDistributionOf (const VarIds&); + + protected: + void runSolver (void); + + virtual void createLinks (void); + + virtual void maxResidualSchedule (void); + + virtual void calcFactorToVarMsg (BpLink*); + + virtual Params getVarToFactorMsg (const BpLink*) const; + + virtual Params getJointByConditioning (const VarIds&) const; + + public: + Params getFactorJoint (FacNode* fn, const VarIds&); + + protected: + SPNodeInfo* ninf (const VarNode* var) const + { + return varsI_[var->getIndex()]; + } + + SPNodeInfo* ninf (const FacNode* fac) const + { + return facsI_[fac->getIndex()]; + } + + void calculateAndUpdateMessage (BpLink* link, bool calcResidual = true) + { + if (Globals::verbosity > 2) { + cout << "calculating & updating " << link->toString() << endl; + } + calcFactorToVarMsg (link); + if (calcResidual) { + link->updateResidual(); + } + link->updateMessage(); + } + + void calculateMessage (BpLink* link, bool calcResidual = true) + { + if (Globals::verbosity > 2) { + cout << "calculating " << link->toString() << endl; + } + calcFactorToVarMsg (link); + if (calcResidual) { + link->updateResidual(); + } + } + + void updateMessage (BpLink* link) + { + link->updateMessage(); + if (Globals::verbosity > 2) { + cout << "updating " << link->toString() << endl; + } + } + + struct CompareResidual + { + inline bool operator() (const BpLink* link1, const BpLink* link2) + { + return link1->residual() > link2->residual(); + } + }; + + BpLinks links_; + unsigned nIters_; + vector varsI_; + vector facsI_; + bool runned_; + + typedef multiset SortedOrder; + SortedOrder sortedOrder_; + + typedef unordered_map BpLinkMap; + BpLinkMap linkMap_; + + private: + void initializeSolver (void); + + bool converged (void); + + virtual void printLinkInformation (void) const; +}; + +#endif // HORUS_BELIEFPROP_H + diff --git a/packages/CLPBN/horus2/ConstraintTree.cpp b/packages/CLPBN/horus2/ConstraintTree.cpp new file mode 100644 index 000000000..0546d0852 --- /dev/null +++ b/packages/CLPBN/horus2/ConstraintTree.cpp @@ -0,0 +1,1174 @@ +#include + +#include + +#include "ConstraintTree.h" +#include "Util.h" + + +void +CTNode::mergeSubtree (CTNode* n, bool updateLevels) +{ + if (updateLevels) { + updateChildLevels (n, level_ + 1); + } + CTChilds::iterator chIt = childs_.find (n); + if (chIt != childs_.end()) { + assert ((*chIt)->symbol() == n->symbol()); + const CTChilds& childsToAdd = n->childs(); + for (CTChilds::const_iterator it = childsToAdd.begin(); + it != childsToAdd.end(); ++ it) { + (*chIt)->mergeSubtree (*it, false); + } + delete n; + } else { + childs_.insert (n); + } +} + + + +void +CTNode::removeChild (CTNode* child) +{ + assert (childs_.contains (child)); + childs_.remove (child); +} + + + +void +CTNode::removeChilds (void) +{ + childs_.clear(); +} + + + +void +CTNode::removeAndDeleteChild (CTNode* child) +{ + removeChild (child); + CTNode::deleteSubtree (child); +} + + + +void +CTNode::removeAndDeleteAllChilds (void) +{ + for (CTChilds::const_iterator chIt = childs_.begin(); + chIt != childs_.end(); ++ chIt) { + deleteSubtree (*chIt); + } + childs_.clear(); +} + + + +SymbolSet +CTNode::childSymbols (void) const +{ + SymbolSet symbols; + for (CTChilds::const_iterator chIt = childs_.begin(); + chIt != childs_.end(); ++ chIt) { + symbols.insert ((*chIt)->symbol()); + } + return symbols; +} + + + +void +CTNode::updateChildLevels (CTNode* n, unsigned level) +{ + CTNodes stack; + stack.push_back (n); + n->setLevel (level); + while (stack.empty() == false) { + CTNode* node = stack.back(); + stack.pop_back(); + for (CTChilds::const_iterator chIt = node->childs().begin(); + chIt != node->childs().end(); ++ chIt) { + (*chIt)->setLevel (node->level() + 1); + } + stack.insert (stack.end(), node->childs().begin(), + node->childs().end()); + } +} + + + +CTNode* +CTNode::copySubtree (const CTNode* root1) +{ + if (root1->childs().empty()) { + return new CTNode (*root1); + } + CTNode* root2 = new CTNode (*root1); + typedef pair StackPair; + vector stack = { StackPair (root1, root2) }; + while (stack.empty() == false) { + const CTNode* n1 = stack.back().first; + CTNode* n2 = stack.back().second; + stack.pop_back(); + // cout << "n2 childs: " << n2->childs(); + // cout << "n1 childs: " << n1->childs(); + n2->childs().reserve (n1->nrChilds()); + stack.reserve (n1->nrChilds()); + for (CTChilds::const_iterator chIt = n1->childs().begin(); + chIt != n1->childs().end(); ++ chIt) { + CTNode* chCopy = new CTNode (**chIt); + n2->childs().insert_sorted (chCopy); + if ((*chIt)->nrChilds() != 0) { + stack.push_back (StackPair (*chIt, chCopy)); + } + } + } + return root2; +} + + + +void +CTNode::deleteSubtree (CTNode* n) +{ + assert (n); + const CTChilds& childs = n->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + deleteSubtree (*chIt); + } + delete n; +} + + + +ostream& operator<< (ostream &out, const CTNode& n) +{ + out << "(" << n.level() << ") " ; + out << n.symbol(); + return out; +} + + + +ConstraintTree::ConstraintTree (unsigned nrLvs) +{ + for (unsigned i = 0; i < nrLvs; i++) { + logVars_.push_back (LogVar (i)); + } + root_ = new CTNode (0, 0); + logVarSet_ = LogVarSet (logVars_); +} + + + +ConstraintTree::ConstraintTree (const LogVars& logVars) +{ + root_ = new CTNode (0, 0); + logVars_ = logVars; + logVarSet_ = LogVarSet (logVars); +} + + + +ConstraintTree::ConstraintTree ( + const LogVars& logVars, + const Tuples& tuples) +{ + root_ = new CTNode (0, 0); + logVars_ = logVars; + logVarSet_ = LogVarSet (logVars); + for (size_t i = 0; i < tuples.size(); i++) { + addTuple (tuples[i]); + } +} + + + +ConstraintTree::ConstraintTree (vector> names) +{ + assert (names.empty() == false); + assert (names.front().empty() == false); + unsigned nrLvs = names[0].size(); + for (size_t i = 0; i < nrLvs; i++) { + logVars_.push_back (LogVar (i)); + } + root_ = new CTNode (0, 0); + logVarSet_ = LogVarSet (logVars_); + for (size_t i = 0; i < names.size(); i++) { + Tuple t; + for (size_t j = 0; j < names[i].size(); j++) { + assert (names[i].size() == nrLvs); + t.push_back (LiftedUtils::getSymbol (names[i][j])); + } + addTuple (t); + } +} + + + +ConstraintTree::ConstraintTree (const ConstraintTree& ct) +{ + *this = ct; +} + + + +ConstraintTree::~ConstraintTree (void) +{ + CTNode::deleteSubtree (root_); +} + + + +void +ConstraintTree::addTuple (const Tuple& tuple) +{ + CTNode* prevNode = root_; + for (size_t i = 0; i < tuple.size(); i++) { + CTChilds::const_iterator it = prevNode->findSymbol (tuple[i]); + if (it == prevNode->childs().end()) { + CTNode* newNode = new CTNode (tuple[i], i + 1); + prevNode->mergeSubtree (newNode, false); + prevNode = newNode; + } else { + prevNode = *it; + } + } +} + + + +bool +ConstraintTree::containsTuple (const Tuple& tuple) +{ + CTNode* prevNode = root_; + for (size_t i = 0; i < tuple.size(); i++) { + CTChilds::const_iterator it = prevNode->findSymbol (tuple[i]); + if (it == prevNode->childs().end()) { + return false; + } else { + prevNode = *it; + } + } + return true; +} + + + +void +ConstraintTree::moveToTop (const LogVars& lvs) +{ + for (size_t i = 0; i < lvs.size(); i++) { + size_t pos = Util::indexOf (logVars_, lvs[i]); + assert (pos != logVars_.size()); + for (size_t j = pos; j-- > i; ) { + swapLogVar (logVars_[j]); + } + } +} + + + +void +ConstraintTree::moveToBottom (const LogVars& lvs) +{ + for (size_t i = lvs.size(); i-- > 0; ) { + size_t pos = Util::indexOf (logVars_, lvs[i]); + assert (pos != logVars_.size()); + size_t stop = logVars_.size() - (lvs.size() - i - 1); + for (size_t j = pos; j < stop - 1; j++) { + swapLogVar (logVars_[j]); + } + } +} + + + +void +ConstraintTree::join (ConstraintTree* ct, bool oneTwoOne) +{ + if (logVarSet_.empty()) { + CTNode::deleteSubtree (root_); + root_ = CTNode::copySubtree (ct->root()); + logVars_ = ct->logVars(); + logVarSet_ = ct->logVarSet(); + return; + } + if (oneTwoOne) { + if (logVarSet_.contains (ct->logVarSet())) { + return; + } + if (ct->logVarSet().contains (logVarSet_)) { + CTNode::deleteSubtree (root_); + root_ = CTNode::copySubtree (ct->root()); + logVars_ = ct->logVars(); + logVarSet_ = ct->logVarSet(); + return; + } + } + LogVarSet intersect = logVarSet_ & ct->logVarSet_; + if (intersect.empty()) { + // cartesian product + appendOnBottom (root_, ct->root()->childs()); + Util::addToVector (logVars_, ct->logVars_); + logVarSet_ |= ct->logVarSet_; + } else { + moveToTop (intersect.elements()); + ct->moveToTop (intersect.elements()); + + Tuples tuples; + CTNodes appendNodes; + getTuples (ct->root(), Tuples(), intersect.size(), + tuples, appendNodes); + + CTNodes::const_iterator appendIt = appendNodes.begin(); + for (size_t i = 0; i < tuples.size(); ++ i, ++ appendIt) { + bool tupleFounded = join (root_, tuples[i], 0, *appendIt); + if (oneTwoOne && tupleFounded == false) { + assert (false); + } + } + + LogVars newLvs (ct->logVars().begin() + intersect.size(), + ct->logVars().end()); + Util::addToVector (logVars_, newLvs); + logVarSet_ |= LogVarSet (newLvs); + } +} + + + +unsigned +ConstraintTree::getLevel (LogVar X) const +{ + unsigned level = Util::indexOf (logVars_, X); + level += 1; // root is in level 0, first logVar is in level 1 + return level; +} + + + +void +ConstraintTree::rename (LogVar X_old, LogVar X_new) +{ + assert (logVarSet_.contains (X_old)); + assert (logVarSet_.contains (X_new) == false); + logVarSet_ -= X_old; + logVarSet_ |= X_new; + for (size_t i = 0; i < logVars_.size(); i++) { + if (logVars_[i] == X_old) { + logVars_[i] = X_new; + return; + } + } + assert (false); +} + + + +void +ConstraintTree::applySubstitution (const Substitution& theta) +{ + for (size_t i = 0; i < logVars_.size(); i++) { + logVars_[i] = theta.newNameFor (logVars_[i]); + } + logVarSet_ = LogVarSet (logVars_); +} + + + +void +ConstraintTree::project (const LogVarSet& X) +{ + assert (logVarSet_.contains (X)); + remove ((logVarSet_ - X)); +} + + + +ConstraintTree +ConstraintTree::projectedCopy (const LogVarSet& X) +{ + ConstraintTree copy = *this; + copy.project (X); + return copy; +} + + + +void +ConstraintTree::remove (const LogVarSet& X) +{ + assert (logVarSet_.contains (X)); + if (X.empty()) { + return; + } + moveToBottom (X.elements()); + unsigned level = getLevel (X.front()) - 1; + CTNodes nodes = getNodesAtLevel (level); + for (CTNodes::const_iterator it = nodes.begin(); + it != nodes.end(); ++ it) { + (*it)->removeAndDeleteAllChilds(); + } + logVars_.resize (logVars_.size() - X.size()); + logVarSet_ -= X; +} + + + +bool +ConstraintTree::ConstraintTree::isSingleton (LogVar X) +{ + Symbol symb; + unsigned level = getLevel (X); + CTNodes stack; + stack.push_back (root_); + while (stack.empty() == false) { + CTNode* node = stack.back(); + stack.pop_back(); + if (node->level() == level) { + if (symb.valid()) { + if (node->symbol() != symb) { + return false; + } + } else { + symb = node->symbol(); + } + } else { + stack.insert (stack.end(), node->childs().begin(), + node->childs().end()); + } + } + return true; +} + + + +LogVarSet +ConstraintTree::singletons (void) +{ + LogVarSet singletons; + for (size_t i = 0; i < logVars_.size(); i++) { + if (isSingleton (logVars_[i])) { + singletons.insert (logVars_[i]); + } + } + return singletons; +} + + + +TupleSet +ConstraintTree::tupleSet (unsigned stopLevel) const +{ + assert (root_->isRoot()); + Tuples tuples; + if (stopLevel == 0) { + stopLevel = logVars_.size(); + } + getTuples (root_, Tuples(), stopLevel, tuples, CTNodes() = {}); + return TupleSet (tuples); +} + + + +TupleSet +ConstraintTree::tupleSet (const LogVars& originalLvs) +{ + LogVars uniqueLvs; + for (size_t i = 0; i < originalLvs.size(); i++) { + if (Util::contains (uniqueLvs, originalLvs[i]) == false) { + uniqueLvs.push_back (originalLvs[i]); + } + } + + Tuples tuples; + moveToTop (uniqueLvs); + unsigned stopLevel = uniqueLvs.size(); + getTuples (root_, Tuples(), stopLevel, tuples, CTNodes() = {}); + + if (originalLvs.size() != uniqueLvs.size()) { + vector indexes; + indexes.reserve (originalLvs.size()); + for (size_t i = 0; i < originalLvs.size(); i++) { + indexes.push_back (Util::indexOf (uniqueLvs, originalLvs[i])); + } + Tuples tuples2; + tuples2.reserve (tuples.size()); + for (size_t i = 0; i < tuples.size(); i++) { + Tuple t; + t.reserve (originalLvs.size()); + for (size_t j = 0; j < originalLvs.size(); j++) { + t.push_back (tuples[i][indexes[j]]); + } + tuples2.push_back (t); + } + return TupleSet (tuples2); + } + + return TupleSet (tuples); +} + + + +void +ConstraintTree::exportToGraphViz ( + const char* fileName, + bool showLogVars) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "digraph {" << endl; + ConstraintTree copy (*this); + copy.moveToTop (copy.logVarSet_.elements()); + CTNodes nodes = getNodesBelow (copy.root_); + out << "\"" << copy.root_ << "\"" << " [label=\"R\"]" << endl; + for (CTNodes::const_iterator it = ++ nodes.begin(); + it != nodes.end(); ++ it) { + out << "\"" << *it << "\""; + out << " [label=\"" << **it << "\"]" ; + out << endl; + } + for (CTNodes::const_iterator it = nodes.begin(); + it != nodes.end(); ++ it) { + const CTChilds& childs = (*it)->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + out << "\"" << *it << "\"" ; + out << " -> " ; + out << "\"" << *chIt << "\"" << endl ; + } + } + if (showLogVars) { + out << "Root [label=\"\", shape=plaintext]" << endl; + for (size_t i = 0; i < copy.logVars_.size(); i++) { + out << copy.logVars_[i] << " [label=" ; + out << copy.logVars_[i] << ", " ; + out << "shape=plaintext, fontsize=14]" << endl; + } + out << "Root -> " << copy.logVars_[0]; + out << " [style=invis]" << endl; + for (size_t i = 0; i < copy.logVars_.size() - 1; i++) { + out << copy.logVars_[i] << " -> " << copy.logVars_[i + 1]; + out << " [style=invis]" << endl; + } + } + out << "}" << endl; + out.close(); +} + + + +bool +ConstraintTree::isCountNormalized (const LogVarSet& Ys) +{ + assert (logVarSet_.contains (Ys)); + if (Ys.empty()) { + return true; + } + if (Ys.size() == logVars_.size()) { + assert (LogVarSet (logVars_) == LogVarSet (Ys)); + return true; + } + LogVarSet Zs = logVarSet_ - LogVarSet (Ys); + moveToTop (Zs.elements()); + CTNodes nodes = getNodesAtLevel (Zs.size()); + unsigned count = countTuples (*nodes.begin()); + for (CTNodes::const_iterator it = nodes.begin(); + it != nodes.end(); ++ it) { + if (countTuples (*it) != count) { + return false; + } + } + return true; +} + + + +unsigned +ConstraintTree::getConditionalCount (const LogVarSet& Ys) +{ + assert (isCountNormalized (Ys)); + if (Ys.empty()) { + return 1; + } + if (Ys.size() == logVars_.size()) { + assert (LogVarSet (Ys) == LogVarSet (logVars_)); + return countTuples (root_); + } + LogVarSet Zs = logVarSet_ - Ys; + moveToTop (Zs.elements()); + CTNode* n = root_; + unsigned l = 0; + while (l != Zs.size()) { + n = *(n->childs().begin()); + l ++; + } + return countTuples (n); +} + + + +TinySet +ConstraintTree::getConditionalCounts (const LogVarSet& Ys) +{ + TinySet counts; + assert (logVarSet_.contains (Ys)); + if (Ys.empty()) { + counts.insert (1); + } else if (Ys.size() == logVars_.size()) { + assert (LogVarSet (logVars_) == LogVarSet (Ys)); + counts.insert (countTuples (root_)); + } else { + LogVarSet Zs = logVarSet_ - LogVarSet (Ys); + moveToTop (Zs.elements()); + CTNodes nodes = getNodesAtLevel (Zs.size()); + for (CTNodes::const_iterator it = nodes.begin(); + it != nodes.end(); ++ it) { + counts.insert (countTuples (*it)); + } + } + return counts; +} + + + +bool +ConstraintTree::isCartesianProduct (const LogVarSet& Xs) +{ + assert (logVarSet_.contains (Xs)); + if (Xs.size() <= 1) { + return true; + } + moveToTop (Xs.elements()); + for (size_t i = 1; i < Xs.size(); i++) { + CTNodes nodes = getNodesAtLevel (i); + for (size_t j = 1; j < nodes.size(); j++) { + if (nodes[j-1]->nrChilds() != nodes[ j ]->nrChilds()) { + return false; + } + if (nodes[j-1]->childSymbols() != nodes[ j ]->childSymbols()) { + return false; + } + } + } + return true; +} + + + +std::pair +ConstraintTree::split (const Tuple& tuple) +{ + // assumes that my log vars are already on top + LogVars lvs (logVars_.begin(), logVars_.begin() + tuple.size()); + ConstraintTree tempCt (logVars_, {tuple}); + return split (lvs, &tempCt, lvs); +} + + + +std::pair +ConstraintTree::split ( + const LogVars& lvs1, + ConstraintTree* ct, + const LogVars& lvs2) +{ + assert (lvs1.size() == lvs2.size()); + assert (lvs1.size() == LogVarSet (lvs1).size()); + assert (lvs2.size() == LogVarSet (lvs2).size()); + assert (logVarSet_.contains (lvs1)); + assert (ct->logVarSet().contains (lvs2)); + CTChilds commChilds, exclChilds; + unsigned stopLevel = lvs1.size(); + split (root_, ct->root(), commChilds, exclChilds, stopLevel); + ConstraintTree* commCt = new ConstraintTree (commChilds, logVars_); + ConstraintTree* exclCt = new ConstraintTree (exclChilds, logVars_); + // cout << commCt->tupleSet() << " + " ; + // cout << exclCt->tupleSet() << " = " ; + // cout << tupleSet() << endl; + assert ((commCt->tupleSet() | exclCt->tupleSet()) == tupleSet()); + assert ((exclCt->tupleSet (stopLevel) & ct->tupleSet (stopLevel)).empty()); + return {commCt, exclCt}; +} + + + +ConstraintTrees +ConstraintTree::countNormalize (const LogVarSet& Ys) +{ + assert (logVarSet_.contains (Ys)); + LogVarSet Zs = logVarSet_ - Ys; + if (Ys.empty() || Zs.empty()) { + return { new ConstraintTree (*this) }; + } + moveToTop (Zs.elements()); + ConstraintTrees cts; + unordered_map countMap; + unsigned stopLevel = getLevel (Zs.back()); + const CTChilds& childs = root_->childs(); + + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + const vector>& res = + countNormalize (*chIt, stopLevel); + for (size_t j = 0; j < res.size(); j++) { + unordered_map::iterator it + = countMap.find (res[j].second); + if (it == countMap.end()) { + ConstraintTree* newCt = new ConstraintTree (logVars_); + it = countMap.insert (make_pair (res[j].second, newCt)).first; + cts.push_back (newCt); + } + it->second->root_->mergeSubtree (res[j].first); + } + } + return cts; +} + + + +ConstraintTrees +ConstraintTree::jointCountNormalize ( + ConstraintTree* commCt, + ConstraintTree* exclCt, + LogVar X, + LogVar X_new1, + LogVar X_new2) +{ + unsigned N = getConditionalCount (X); + // cout << "My tuples: " << tupleSet() << endl; + // cout << "CommCt tuples: " << commCt->tupleSet() << endl; + // cout << "ExclCt tuples: " << exclCt->tupleSet() << endl; + // cout << "Counted Lv: " << X << endl; + // cout << "X_new1: " << X_new1 << endl; + // cout << "X_new2: " << X_new2 << endl; + // cout << "Original N: " << N << endl; + // cout << endl; + + ConstraintTrees normCts1 = commCt->countNormalize (X); + vector counts1 (normCts1.size()); + for (size_t i = 0; i < normCts1.size(); i++) { + counts1[i] = normCts1[i]->getConditionalCount (X); + // cout << "normCts1[" << i << "] #" << counts1[i] ; + // cout << " " << normCts1[i]->tupleSet() << endl; + } + + ConstraintTrees normCts2 = exclCt->countNormalize (X); + vector counts2 (normCts2.size()); + for (size_t i = 0; i < normCts2.size(); i++) { + counts2[i] = normCts2[i]->getConditionalCount (X); + // cout << "normCts2[" << i << "] #" << counts2[i] ; + // cout << " " << normCts2[i]->tupleSet() << endl; + } + // cout << endl; + + ConstraintTree* excl1 = 0; + for (size_t i = 0; i < normCts1.size(); i++) { + if (counts1[i] == N) { + excl1 = normCts1[i]; + normCts1.erase (normCts1.begin() + i); + counts1.erase (counts1.begin() + i); + // cout << "joint-count(" << N << ",0)" << endl; + break; + } + } + + ConstraintTree* excl2 = 0; + for (size_t i = 0; i < normCts2.size(); i++) { + if (counts2[i] == N) { + excl2 = normCts2[i]; + normCts2.erase (normCts2.begin() + i); + counts2.erase (counts2.begin() + i); + // cout << "joint-count(0," << N << ")" << endl; + break; + } + } + + for (size_t i = 0; i < normCts1.size(); i++) { + unsigned j; + for (j = 0; counts1[i] + counts2[j] != N; j++) ; + // cout << "joint-count(" << counts1[i] ; + // cout << "," << counts2[j] << ")" << endl; + const CTChilds& childs = normCts2[j]->root_->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + normCts1[i]->root_->mergeSubtree (CTNode::copySubtree (*chIt)); + } + delete normCts2[j]; + } + + ConstraintTrees cts = normCts1; + commCt->rename (X, X_new1); + exclCt->rename (X, X_new2); + for (size_t i = 0; i < cts.size(); i++) { + cts[i]->remove (X); + cts[i]->join (commCt); + cts[i]->join (exclCt); + } + + if (excl1 != 0) { + cts.push_back (excl1); + } + if (excl2 != 0) { + cts.push_back (excl2); + } + + return cts; +} + + + +LogVars +ConstraintTree::expand (LogVar X) +{ + moveToBottom ({X}); + assert (isCountNormalized (X)); + CTNodes nodes = getNodesAtLevel (logVars_.size() - 1); + unsigned nrSymbols = getConditionalCount (X); + for (CTNodes::const_iterator it = nodes.begin(); + it != nodes.end(); ++ it) { + Symbols symbols; + const CTChilds& childs = (*it)->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + symbols.push_back ((*chIt)->symbol()); + } + (*it)->removeAndDeleteAllChilds(); + CTNode* prev = *it; + assert (symbols.size() == nrSymbols); + for (size_t j = 0; j < nrSymbols; j++) { + CTNode* newNode = new CTNode (symbols[j], (*it)->level() + j); + prev->mergeSubtree (newNode); + prev = newNode; + } + } + LogVars newLvs; + logVars_.pop_back(); + for (size_t i = 0; i < nrSymbols; i++) { + logVars_.push_back (LogVar (logVarSet_.back() + 1)); + newLvs.push_back (LogVar (logVarSet_.back() + 1)); + logVarSet_.insert (LogVar (logVarSet_.back() + 1)); + } + logVarSet_ -= X; + return newLvs; +} + + + +ConstraintTrees +ConstraintTree::ground (LogVar X) +{ + moveToTop ({X}); + ConstraintTrees cts; + const CTChilds& nodes = root_->childs(); + for (CTChilds::const_iterator it = nodes.begin(); + it != nodes.end(); ++ it) { + CTNode* copy = CTNode::copySubtree (*it); + copy->setSymbol ((*it)->symbol()); + ConstraintTree* newCt = new ConstraintTree (logVars_); + newCt->root()->mergeSubtree (copy); + cts.push_back (newCt); + } + return cts; +} + + + +void +ConstraintTree::cloneLogVar (LogVar X_1, LogVar X_2) +{ + moveToBottom ({X_1}); + CTNodes leafs = getNodesAtLevel (logVars_.size()); + for (size_t i = 0; i < leafs.size(); i++) { + leafs[i]->childs().insert_sorted ( + new CTNode (leafs[i]->symbol(), leafs[i]->level() + 1)); + } + logVars_.push_back (X_2); + logVarSet_.insert (X_2); +} + + + +ConstraintTree& +ConstraintTree::operator= (const ConstraintTree& ct) +{ + if (this != &ct) { + root_ = CTNode::copySubtree (ct.root_); + logVars_ = ct.logVars_; + logVarSet_ = ct.logVarSet_; + } + return *this; +} + + + +unsigned +ConstraintTree::countTuples (const CTNode* n) const +{ + if (n->isLeaf()) { + return 1; + } + unsigned sum = 0; + const CTChilds& childs = n->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + sum += countTuples (*chIt); + } + return sum; +} + + + +CTNodes +ConstraintTree::getNodesBelow (CTNode* fromHere) const +{ + CTNodes nodes; + queue queue; + queue.push (fromHere); + while (queue.empty() == false) { + CTNode* node = queue.front(); + nodes.push_back (node); + for (CTChilds::const_iterator chIt = node->childs().begin(); + chIt != node->childs().end(); ++ chIt) { + queue.push (*chIt); + } + queue.pop(); + } + return nodes; +} + + + +CTNodes +ConstraintTree::getNodesAtLevel (unsigned level) const +{ + assert (level <= logVars_.size()); + if (level == 0) { + return { root_ }; + } + CTNodes stack; + CTNodes nodes; + stack.push_back (root_); + while (stack.empty() == false) { + CTNode* node = stack.back(); + stack.pop_back(); + if (node->level() + 1 == level) { + nodes.insert (nodes.end(), node->childs().begin(), + node->childs().end()); + } else { + stack.insert (stack.end(), node->childs().begin(), + node->childs().end()); + } + } + return nodes; +} + + + +unsigned +ConstraintTree::nrNodes (const CTNode* n) const +{ + unsigned nr = 0; + if (n->isLeaf() == false) { + for (CTChilds::const_iterator chIt = n->childs().begin(); + chIt != n->childs().end(); ++ chIt) { + nr += nrNodes (*chIt); + } + } + return nr; +} + + + +void +ConstraintTree::appendOnBottom (CTNode* n, const CTChilds& childs) +{ + if (childs.empty()) { + return; + } + CTNodes stack { n }; + while (stack.empty() == false) { + CTNode* node = stack.back(); + stack.pop_back(); + if (node->isLeaf()) { + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + node->mergeSubtree (CTNode::copySubtree (*chIt)); + } + } else { + stack.insert (stack.end(), node->childs().begin(), + node->childs().end()); + } + } +} + + + +void +ConstraintTree::swapLogVar (LogVar X) +{ + size_t pos = Util::indexOf (logVars_, X); + assert (pos != logVars_.size()); + const CTNodes& nodes = getNodesAtLevel (pos); + for (CTNodes::const_iterator nodeIt = nodes.begin(); + nodeIt != nodes.end(); ++ nodeIt) { + CTChilds childsCopy = (*nodeIt)->childs(); + (*nodeIt)->removeChilds(); + for (CTChilds::const_iterator ccIt = childsCopy.begin(); + ccIt != childsCopy.end(); ++ ccIt) { + const CTChilds& grandsons = (*ccIt)->childs(); + for (CTChilds::const_iterator gsIt = grandsons.begin(); + gsIt != grandsons.end(); ++ gsIt) { + CTNode* childCopy = new CTNode ( + (*ccIt)->symbol(), (*ccIt)->level() + 1, (*gsIt)->childs()); + (*gsIt)->removeChilds(); + (*gsIt)->childs().insert_sorted (childCopy); + (*gsIt)->setLevel ((*gsIt)->level() - 1); + (*nodeIt)->mergeSubtree ((*gsIt), false); + } + delete (*ccIt); + } + } + std::swap (logVars_[pos], logVars_[pos + 1]); +} + + + +bool +ConstraintTree::join ( + CTNode* currNode, + const Tuple& tuple, + size_t currIdx, + CTNode* appendNode) +{ + bool tupleFounded = false; + CTChilds::const_iterator it = currNode->findSymbol (tuple[currIdx]); + if (it != currNode->childs().end()) { + if (currIdx == tuple.size() - 1) { + appendOnBottom (*it, appendNode->childs()); + return true; + } else { + tupleFounded = join (*it, tuple, currIdx + 1, appendNode); + } + } + return tupleFounded; +} + + + +void +ConstraintTree::getTuples ( + CTNode* n, + Tuples currTuples, + unsigned stopLevel, + Tuples& tuplesCollected, + CTNodes& continuationNodes) const +{ + if (n->isRoot() == false) { + if (currTuples.size() == 0) { + currTuples.push_back ({ n->symbol()}); + } else { + for (size_t i = 0; i < currTuples.size(); i++) { + currTuples[i].push_back (n->symbol()); + } + } + if (n->level() == stopLevel) { + for (size_t i = 0; i < currTuples.size(); i++) { + tuplesCollected.push_back (currTuples[i]); + continuationNodes.push_back (n); + } + return; + } + } + const CTChilds& childs = n->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + getTuples (*chIt, currTuples, stopLevel, tuplesCollected, + continuationNodes); + } +} + + + +unsigned +ConstraintTree::size (void) const +{ + return countTuples (root_); +} + + + +unsigned +ConstraintTree::nrSymbols (LogVar X) +{ + moveToTop ({X}); + return root_->childs().size(); +} + + + +vector> +ConstraintTree::countNormalize ( + const CTNode* n, + unsigned stopLevel) +{ + if (n->level() == stopLevel) { + return vector>() = { + make_pair (CTNode::copySubtree (n), countTuples (n)) + }; + } + vector> res; + const CTChilds& childs = n->childs(); + for (CTChilds::const_iterator chIt = childs.begin(); + chIt != childs.end(); ++ chIt) { + const vector>& lowerRes = + countNormalize (*chIt, stopLevel); + for (size_t j = 0; j < lowerRes.size(); j++) { + CTNode* newNode = new CTNode (*n); + newNode->mergeSubtree (lowerRes[j].first); + res.push_back (make_pair (newNode, lowerRes[j].second)); + } + } + return res; +} + + + +void +ConstraintTree::split ( + CTNode* n1, + CTNode* n2, + CTChilds& commChilds, + CTChilds& exclChilds, + unsigned stopLevel) +{ + CTChilds& childs1 = n1->childs(); + for (CTChilds::const_iterator chIt1 = childs1.begin(); + chIt1 != childs1.end(); ++ chIt1) { + CTChilds::iterator chIt2 = n2->findSymbol ((*chIt1)->symbol()); + if (chIt2 == n2->childs().end()) { + exclChilds.insert_sorted (CTNode::copySubtree (*chIt1)); + } else { + if ((*chIt1)->level() == stopLevel) { + commChilds.insert_sorted (CTNode::copySubtree (*chIt1)); + } else { + CTChilds lowerCommChilds, lowerExclChilds; + split (*chIt1, *chIt2, lowerCommChilds, lowerExclChilds, stopLevel); + if (lowerCommChilds.empty() == false) { + commChilds.insert_sorted (new CTNode (**chIt1, lowerCommChilds)); + } + if (lowerExclChilds.empty() == false) { + exclChilds.insert_sorted (new CTNode (**chIt1, lowerExclChilds)); + } + } + } + } +} + diff --git a/packages/CLPBN/horus2/ConstraintTree.h b/packages/CLPBN/horus2/ConstraintTree.h new file mode 100644 index 000000000..cccb070b4 --- /dev/null +++ b/packages/CLPBN/horus2/ConstraintTree.h @@ -0,0 +1,237 @@ +#ifndef HORUS_CONSTRAINTTREE_H +#define HORUS_CONSTRAINTTREE_H + +#include +#include + +#include +#include + +#include "TinySet.h" +#include "LiftedUtils.h" + +using namespace std; + + +class CTNode; +typedef vector CTNodes; + +class ConstraintTree; +typedef vector ConstraintTrees; + + +class CTNode +{ + public: + + struct CompareSymbol + { + bool operator() (const CTNode* n1, const CTNode* n2) const + { + return n1->symbol() < n2->symbol(); + } + }; + + private: + + typedef TinySet CTChilds_; + + public: + + CTNode (const CTNode& n, const CTChilds_& chs = CTChilds_()) + : symbol_(n.symbol()), childs_(chs), level_(n.level()) { } + + CTNode (Symbol s, unsigned l, const CTChilds_& chs = CTChilds_()) + : symbol_(s), childs_(chs), level_(l) { } + + unsigned level (void) const { return level_; } + + void setLevel (unsigned level) { level_ = level; } + + Symbol symbol (void) const { return symbol_; } + + void setSymbol (const Symbol s) { symbol_ = s; } + + public: + + CTChilds_& childs (void) { return childs_; } + + const CTChilds_& childs (void) const { return childs_; } + + size_t nrChilds (void) const { return childs_.size(); } + + bool isRoot (void) const { return level_ == 0; } + + bool isLeaf (void) const { return childs_.empty(); } + + CTChilds_::iterator findSymbol (Symbol symb) + { + CTNode tmp (symb, 0); + return childs_.find (&tmp); + } + + void mergeSubtree (CTNode*, bool = true); + + void removeChild (CTNode*); + + void removeChilds (void); + + void removeAndDeleteChild (CTNode*); + + void removeAndDeleteAllChilds (void); + + SymbolSet childSymbols (void) const; + + static CTNode* copySubtree (const CTNode*); + + static void deleteSubtree (CTNode*); + + private: + void updateChildLevels (CTNode*, unsigned); + + Symbol symbol_; + CTChilds_ childs_; + unsigned level_; +}; + +ostream& operator<< (ostream &out, const CTNode&); + + +typedef TinySet CTChilds; + + +class ConstraintTree +{ + public: + ConstraintTree (unsigned); + + ConstraintTree (const LogVars&); + + ConstraintTree (const LogVars&, const Tuples&); + + ConstraintTree (vector> names); + + ConstraintTree (const ConstraintTree&); + + ConstraintTree (const CTChilds& rootChilds, const LogVars& logVars) + : root_(new CTNode (0, 0, rootChilds)), + logVars_(logVars), + logVarSet_(logVars) { } + + ~ConstraintTree (void); + + CTNode* root (void) const { return root_; } + + bool empty (void) const { return root_->childs().empty(); } + + const LogVars& logVars (void) const + { + assert (LogVarSet (logVars_) == logVarSet_); + return logVars_; + } + + const LogVarSet& logVarSet (void) const + { + assert (LogVarSet (logVars_) == logVarSet_); + return logVarSet_; + } + + size_t nrLogVars (void) const + { + return logVars_.size(); + assert (LogVarSet (logVars_) == logVarSet_); + } + + void addTuple (const Tuple&); + + bool containsTuple (const Tuple&); + + void moveToTop (const LogVars&); + + void moveToBottom (const LogVars&); + + void join (ConstraintTree*, bool oneTwoOne = false); + + unsigned getLevel (LogVar) const; + + void rename (LogVar, LogVar); + + void applySubstitution (const Substitution&); + + void project (const LogVarSet&); + + ConstraintTree projectedCopy (const LogVarSet&); + + void remove (const LogVarSet&); + + bool isSingleton (LogVar); + + LogVarSet singletons (void); + + TupleSet tupleSet (unsigned = 0) const; + + TupleSet tupleSet (const LogVars&); + + unsigned size (void) const; + + unsigned nrSymbols (LogVar); + + void exportToGraphViz (const char*, bool = false) const; + + bool isCountNormalized (const LogVarSet&); + + unsigned getConditionalCount (const LogVarSet&); + + TinySet getConditionalCounts (const LogVarSet&); + + bool isCartesianProduct (const LogVarSet&); + + std::pair split (const Tuple&); + + std::pair split ( + const LogVars&, ConstraintTree*, const LogVars&); + + ConstraintTrees countNormalize (const LogVarSet&); + + ConstraintTrees jointCountNormalize ( + ConstraintTree*, ConstraintTree*, LogVar, LogVar, LogVar); + + LogVars expand (LogVar); + + ConstraintTrees ground (LogVar); + + void cloneLogVar (LogVar, LogVar); + + ConstraintTree& operator= (const ConstraintTree& ct); + + private: + unsigned countTuples (const CTNode*) const; + + CTNodes getNodesBelow (CTNode*) const; + + CTNodes getNodesAtLevel (unsigned) const; + + unsigned nrNodes (const CTNode* n) const; + + void appendOnBottom (CTNode* n1, const CTChilds&); + + void swapLogVar (LogVar); + + bool join (CTNode*, const Tuple&, size_t, CTNode*); + + void getTuples (CTNode*, Tuples, unsigned, Tuples&, CTNodes&) const; + + vector> countNormalize ( + const CTNode*, unsigned); + + static void split ( + CTNode*, CTNode*, CTChilds&, CTChilds&, unsigned); + + CTNode* root_; + LogVars logVars_; + LogVarSet logVarSet_; +}; + + +#endif // HORUS_CONSTRAINTTREE_H + diff --git a/packages/CLPBN/horus2/CountingBp.cpp b/packages/CLPBN/horus2/CountingBp.cpp new file mode 100644 index 000000000..d248c602c --- /dev/null +++ b/packages/CLPBN/horus2/CountingBp.cpp @@ -0,0 +1,424 @@ +#include "CountingBp.h" +#include "WeightedBp.h" + + +bool CountingBp::checkForIdenticalFactors = true; + + +CountingBp::CountingBp (const FactorGraph& fg) + : GroundSolver (fg), freeColor_(0) +{ + findIdenticalFactors(); + setInitialColors(); + createGroups(); + compressedFg_ = getCompressedFactorGraph(); + solver_ = new WeightedBp (*compressedFg_, getWeights()); +} + + + +CountingBp::~CountingBp (void) +{ + delete solver_; + delete compressedFg_; + for (size_t i = 0; i < varClusters_.size(); i++) { + delete varClusters_[i]; + } + for (size_t i = 0; i < facClusters_.size(); i++) { + delete facClusters_[i]; + } +} + + + +void +CountingBp::printSolverFlags (void) const +{ + stringstream ss; + ss << "counting bp [" ; + ss << "schedule=" ; + typedef BpOptions::Schedule Sch; + switch (BpOptions::schedule) { + case Sch::SEQ_FIXED: ss << "seq_fixed"; break; + case Sch::SEQ_RANDOM: ss << "seq_random"; break; + case Sch::PARALLEL: ss << "parallel"; break; + case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + } + ss << ",max_iter=" << BpOptions::maxIter; + ss << ",accuracy=" << BpOptions::accuracy; + ss << ",log_domain=" << Util::toString (Globals::logDomain); + ss << ",chkif=" << + Util::toString (CountingBp::checkForIdenticalFactors); + ss << "]" ; + cout << ss.str() << endl; +} + + + +Params +CountingBp::solveQuery (VarIds queryVids) +{ + assert (queryVids.empty() == false); + Params res; + if (queryVids.size() == 1) { + res = solver_->getPosterioriOf (getRepresentative (queryVids[0])); + } else { + VarNode* vn = fg.getVarNode (queryVids[0]); + const FacNodes& facNodes = vn->neighbors(); + size_t idx = facNodes.size(); + for (size_t i = 0; i < facNodes.size(); i++) { + if (facNodes[i]->factor().contains (queryVids)) { + idx = i; + break; + } + cout << endl; + } + if (idx == facNodes.size()) { + res = GroundSolver::getJointByConditioning ( + GroundSolverType::CBP, fg, queryVids); + } else { + VarIds reprArgs; + for (size_t i = 0; i < queryVids.size(); i++) { + reprArgs.push_back (getRepresentative (queryVids[i])); + } + FacNode* reprFac = getRepresentative (facNodes[idx]); + assert (reprFac != 0); + res = solver_->getFactorJoint (reprFac, reprArgs); + } + } + return res; +} + + + +void +CountingBp::findIdenticalFactors() +{ + const FacNodes& facNodes = fg.facNodes(); + if (checkForIdenticalFactors == false || + facNodes.size() == 1) { + return; + } + for (size_t i = 0; i < facNodes.size(); i++) { + facNodes[i]->factor().setDistId (Util::maxUnsigned()); + } + unsigned groupCount = 1; + for (size_t i = 0; i < facNodes.size() - 1; i++) { + Factor& f1 = facNodes[i]->factor(); + if (f1.distId() != Util::maxUnsigned()) { + continue; + } + f1.setDistId (groupCount); + for (size_t j = i + 1; j < facNodes.size(); j++) { + Factor& f2 = facNodes[j]->factor(); + if (f2.distId() != Util::maxUnsigned()) { + continue; + } + if (f1.size() == f2.size() && + f1.ranges() == f2.ranges() && + f1.params() == f2.params()) { + f2.setDistId (groupCount); + } + } + groupCount ++; + } +} + + + +void +CountingBp::setInitialColors (void) +{ + varColors_.resize (fg.nrVarNodes()); + facColors_.resize (fg.nrFacNodes()); + // create the initial variable colors + VarColorMap colorMap; + const VarNodes& varNodes = fg.varNodes(); + for (size_t i = 0; i < varNodes.size(); i++) { + unsigned range = varNodes[i]->range(); + VarColorMap::iterator it = colorMap.find (range); + if (it == colorMap.end()) { + it = colorMap.insert (make_pair ( + range, Colors (range + 1, -1))).first; + } + unsigned idx = varNodes[i]->hasEvidence() + ? varNodes[i]->getEvidence() + : range; + Colors& stateColors = it->second; + if (stateColors[idx] == -1) { + stateColors[idx] = getNewColor(); + } + setColor (varNodes[i], stateColors[idx]); + } + const FacNodes& facNodes = fg.facNodes(); + // create the initial factor colors + DistColorMap distColors; + for (size_t i = 0; i < facNodes.size(); i++) { + unsigned distId = facNodes[i]->factor().distId(); + DistColorMap::iterator it = distColors.find (distId); + if (it == distColors.end()) { + it = distColors.insert (make_pair (distId, getNewColor())).first; + } + setColor (facNodes[i], it->second); + } +} + + + +void +CountingBp::createGroups (void) +{ + VarSignMap varGroups; + FacSignMap facGroups; + unsigned nIters = 0; + bool groupsHaveChanged = true; + const VarNodes& varNodes = fg.varNodes(); + const FacNodes& facNodes = fg.facNodes(); + + while (groupsHaveChanged || nIters == 1) { + nIters ++; + + // set a new color to the variables with the same signature + size_t prevVarGroupsSize = varGroups.size(); + varGroups.clear(); + for (size_t i = 0; i < varNodes.size(); i++) { + const VarSignature& signature = getSignature (varNodes[i]); + VarSignMap::iterator it = varGroups.find (signature); + if (it == varGroups.end()) { + it = varGroups.insert (make_pair (signature, VarNodes())).first; + } + it->second.push_back (varNodes[i]); + } + for (VarSignMap::iterator it = varGroups.begin(); + it != varGroups.end(); ++it) { + Color newColor = getNewColor(); + VarNodes& groupMembers = it->second; + for (size_t i = 0; i < groupMembers.size(); i++) { + setColor (groupMembers[i], newColor); + } + } + + size_t prevFactorGroupsSize = facGroups.size(); + facGroups.clear(); + // set a new color to the factors with the same signature + for (size_t i = 0; i < facNodes.size(); i++) { + const FacSignature& signature = getSignature (facNodes[i]); + FacSignMap::iterator it = facGroups.find (signature); + if (it == facGroups.end()) { + it = facGroups.insert (make_pair (signature, FacNodes())).first; + } + it->second.push_back (facNodes[i]); + } + for (FacSignMap::iterator it = facGroups.begin(); + it != facGroups.end(); ++it) { + Color newColor = getNewColor(); + FacNodes& groupMembers = it->second; + for (size_t i = 0; i < groupMembers.size(); i++) { + setColor (groupMembers[i], newColor); + } + } + + groupsHaveChanged = prevVarGroupsSize != varGroups.size() + || prevFactorGroupsSize != facGroups.size(); + } + // printGroups (varGroups, facGroups); + createClusters (varGroups, facGroups); +} + + + +void +CountingBp::createClusters ( + const VarSignMap& varGroups, + const FacSignMap& facGroups) +{ + varClusters_.reserve (varGroups.size()); + for (VarSignMap::const_iterator it = varGroups.begin(); + it != varGroups.end(); ++it) { + const VarNodes& groupVars = it->second; + VarCluster* vc = new VarCluster (groupVars); + for (size_t i = 0; i < groupVars.size(); i++) { + varClusterMap_.insert (make_pair (groupVars[i]->varId(), vc)); + } + varClusters_.push_back (vc); + } + + facClusters_.reserve (facGroups.size()); + for (FacSignMap::const_iterator it = facGroups.begin(); + it != facGroups.end(); ++it) { + FacNode* groupFactor = it->second[0]; + const VarNodes& neighs = groupFactor->neighbors(); + VarClusters varClusters; + varClusters.reserve (neighs.size()); + for (size_t i = 0; i < neighs.size(); i++) { + VarId vid = neighs[i]->varId(); + varClusters.push_back (varClusterMap_.find (vid)->second); + } + facClusters_.push_back (new FacCluster (it->second, varClusters)); + } +} + + + +VarSignature +CountingBp::getSignature (const VarNode* varNode) +{ + const FacNodes& neighs = varNode->neighbors(); + VarSignature sign; + sign.reserve (neighs.size() + 1); + for (size_t i = 0; i < neighs.size(); i++) { + sign.push_back (make_pair ( + getColor (neighs[i]), + neighs[i]->factor().indexOf (varNode->varId()))); + } + std::sort (sign.begin(), sign.end()); + sign.push_back (make_pair (getColor (varNode), 0)); + return sign; +} + + + +FacSignature +CountingBp::getSignature (const FacNode* facNode) +{ + const VarNodes& neighs = facNode->neighbors(); + FacSignature sign; + sign.reserve (neighs.size() + 1); + for (size_t i = 0; i < neighs.size(); i++) { + sign.push_back (getColor (neighs[i])); + } + sign.push_back (getColor (facNode)); + return sign; +} + + + +VarId +CountingBp::getRepresentative (VarId vid) +{ + assert (Util::contains (varClusterMap_, vid)); + VarCluster* vc = varClusterMap_.find (vid)->second; + return vc->representative()->varId(); +} + + + +FacNode* +CountingBp::getRepresentative (FacNode* fn) +{ + for (size_t i = 0; i < facClusters_.size(); i++) { + if (Util::contains (facClusters_[i]->members(), fn)) { + return facClusters_[i]->representative(); + } + } + return 0; +} + + + +FactorGraph* +CountingBp::getCompressedFactorGraph (void) +{ + FactorGraph* fg = new FactorGraph(); + for (size_t i = 0; i < varClusters_.size(); i++) { + VarNode* newVar = new VarNode (varClusters_[i]->first()); + varClusters_[i]->setRepresentative (newVar); + fg->addVarNode (newVar); + } + for (size_t i = 0; i < facClusters_.size(); i++) { + Vars vars; + const VarClusters& clusters = facClusters_[i]->varClusters(); + for (size_t j = 0; j < clusters.size(); j++) { + vars.push_back (clusters[j]->representative()); + } + const Factor& groundFac = facClusters_[i]->first()->factor(); + FacNode* fn = new FacNode (Factor ( + vars, groundFac.params(), groundFac.distId())); + facClusters_[i]->setRepresentative (fn); + fg->addFacNode (fn); + for (size_t j = 0; j < vars.size(); j++) { + fg->addEdge (static_cast (vars[j]), fn); + } + } + return fg; +} + + + +vector> +CountingBp::getWeights (void) const +{ + vector> weights; + weights.reserve (facClusters_.size()); + for (size_t i = 0; i < facClusters_.size(); i++) { + const VarClusters& neighs = facClusters_[i]->varClusters(); + weights.push_back ({ }); + weights.back().reserve (neighs.size()); + for (size_t j = 0; j < neighs.size(); j++) { + weights.back().push_back (getWeight ( + facClusters_[i], neighs[j], j)); + } + } + return weights; +} + + + +unsigned +CountingBp::getWeight ( + const FacCluster* fc, + const VarCluster* vc, + size_t index) const +{ + unsigned weight = 0; + VarId reprVid = vc->representative()->varId(); + VarNode* groundVar = fg.getVarNode (reprVid); + const FacNodes& neighs = groundVar->neighbors(); + for (size_t i = 0; i < neighs.size(); i++) { + FacNodes::const_iterator it; + it = std::find (fc->members().begin(), fc->members().end(), neighs[i]); + if (it != fc->members().end() && + (*it)->factor().indexOf (reprVid) == index) { + weight ++; + } + } + return weight; +} + + + +void +CountingBp::printGroups ( + const VarSignMap& varGroups, + const FacSignMap& facGroups) const +{ + unsigned count = 1; + cout << "variable groups:" << endl; + for (VarSignMap::const_iterator it = varGroups.begin(); + it != varGroups.end(); ++it) { + const VarNodes& groupMembers = it->second; + if (groupMembers.size() > 0) { + cout << count << ": " ; + for (size_t i = 0; i < groupMembers.size(); i++) { + cout << groupMembers[i]->label() << " " ; + } + count ++; + cout << endl; + } + } + count = 1; + cout << endl << "factor groups:" << endl; + for (FacSignMap::const_iterator it = facGroups.begin(); + it != facGroups.end(); ++it) { + const FacNodes& groupMembers = it->second; + if (groupMembers.size() > 0) { + cout << ++count << ": " ; + for (size_t i = 0; i < groupMembers.size(); i++) { + cout << groupMembers[i]->getLabel() << " " ; + } + count ++; + cout << endl; + } + } +} + diff --git a/packages/CLPBN/horus2/CountingBp.h b/packages/CLPBN/horus2/CountingBp.h new file mode 100644 index 000000000..2cbd2f995 --- /dev/null +++ b/packages/CLPBN/horus2/CountingBp.h @@ -0,0 +1,182 @@ +#ifndef HORUS_COUNTINGBP_H +#define HORUS_COUNTINGBP_H + +#include + +#include "GroundSolver.h" +#include "FactorGraph.h" +#include "Util.h" +#include "Horus.h" + +class VarCluster; +class FacCluster; +class WeightedBp; + +typedef long Color; +typedef vector Colors; +typedef vector> VarSignature; +typedef vector FacSignature; + +typedef unordered_map DistColorMap; +typedef unordered_map VarColorMap; + +typedef unordered_map VarSignMap; +typedef unordered_map FacSignMap; + +typedef unordered_map VarClusterMap; + +typedef vector VarClusters; +typedef vector FacClusters; + +template +inline size_t hash_combine (size_t seed, const T& v) +{ + return seed ^ (hash()(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2)); +} + + +namespace std { + template struct hash> + { + size_t operator() (const std::pair& p) const + { + return hash_combine (std::hash()(p.first), p.second); + } + }; + + template struct hash> + { + size_t operator() (const std::vector& vec) const + { + size_t h = 0; + typename vector::const_iterator first = vec.begin(); + typename vector::const_iterator last = vec.end(); + for (; first != last; ++first) { + h = hash_combine (h, *first); + } + return h; + } + }; +} + + +class VarCluster +{ + public: + VarCluster (const VarNodes& vs) : members_(vs) { } + + const VarNode* first (void) const { return members_.front(); } + + const VarNodes& members (void) const { return members_; } + + VarNode* representative (void) const { return repr_; } + + void setRepresentative (VarNode* vn) { repr_ = vn; } + + private: + VarNodes members_; + VarNode* repr_; +}; + + +class FacCluster +{ + public: + FacCluster (const FacNodes& fcs, const VarClusters& vcs) + : members_(fcs), varClusters_(vcs) { } + + const FacNode* first (void) const { return members_.front(); } + + const FacNodes& members (void) const { return members_; } + + FacNode* representative (void) const { return repr_; } + + void setRepresentative (FacNode* fn) { repr_ = fn; } + + VarClusters& varClusters (void) { return varClusters_; } + + private: + FacNodes members_; + FacNode* repr_; + VarClusters varClusters_; +}; + + +class CountingBp : public GroundSolver +{ + public: + CountingBp (const FactorGraph& fg); + + ~CountingBp (void); + + void printSolverFlags (void) const; + + Params solveQuery (VarIds); + + static bool checkForIdenticalFactors; + + private: + Color getNewColor (void) + { + ++ freeColor_; + return freeColor_ - 1; + } + + Color getColor (const VarNode* vn) const + { + return varColors_[vn->getIndex()]; + } + + Color getColor (const FacNode* fn) const + { + return facColors_[fn->getIndex()]; + } + + void setColor (const VarNode* vn, Color c) + { + varColors_[vn->getIndex()] = c; + } + + void setColor (const FacNode* fn, Color c) + { + facColors_[fn->getIndex()] = c; + } + + void findIdenticalFactors (void); + + void setInitialColors (void); + + void createGroups (void); + + void createClusters (const VarSignMap&, const FacSignMap&); + + VarSignature getSignature (const VarNode*); + + FacSignature getSignature (const FacNode*); + + void printGroups (const VarSignMap&, const FacSignMap&) const; + + VarId getRepresentative (VarId vid); + + FacNode* getRepresentative (FacNode*); + + FactorGraph* getCompressedFactorGraph (void); + + vector> getWeights (void) const; + + unsigned getWeight (const FacCluster*, + const VarCluster*, size_t index) const; + + + Color freeColor_; + Colors varColors_; + Colors facColors_; + VarClusters varClusters_; + FacClusters facClusters_; + VarClusterMap varClusterMap_; + const FactorGraph* compressedFg_; + WeightedBp* solver_; +}; + +#endif // HORUS_COUNTINGBP_H + diff --git a/packages/CLPBN/horus2/ElimGraph.cpp b/packages/CLPBN/horus2/ElimGraph.cpp new file mode 100644 index 000000000..f617d8237 --- /dev/null +++ b/packages/CLPBN/horus2/ElimGraph.cpp @@ -0,0 +1,243 @@ +#include + +#include + +#include "ElimGraph.h" + +ElimHeuristic ElimGraph::elimHeuristic = MIN_NEIGHBORS; + + +ElimGraph::ElimGraph (const vector& factors) +{ + for (size_t i = 0; i < factors.size(); i++) { + if (factors[i] == 0) { // if contained just one var with evidence + continue; + } + const VarIds& vids = factors[i]->arguments(); + for (size_t j = 0; j < vids.size() - 1; j++) { + EgNode* n1 = getEgNode (vids[j]); + if (n1 == 0) { + n1 = new EgNode (vids[j], factors[i]->range (j)); + addNode (n1); + } + for (size_t k = j + 1; k < vids.size(); k++) { + EgNode* n2 = getEgNode (vids[k]); + if (n2 == 0) { + n2 = new EgNode (vids[k], factors[i]->range (k)); + addNode (n2); + } + if (neighbors (n1, n2) == false) { + addEdge (n1, n2); + } + } + } + if (vids.size() == 1) { + if (getEgNode (vids[0]) == 0) { + addNode (new EgNode (vids[0], factors[i]->range (0))); + } + } + } +} + + + +ElimGraph::~ElimGraph (void) +{ + for (size_t i = 0; i < nodes_.size(); i++) { + delete nodes_[i]; + } +} + + + +VarIds +ElimGraph::getEliminatingOrder (const VarIds& exclude) +{ + VarIds elimOrder; + unmarked_.reserve (nodes_.size()); + for (size_t i = 0; i < nodes_.size(); i++) { + if (Util::contains (exclude, nodes_[i]->varId()) == false) { + unmarked_.insert (nodes_[i]); + } + } + size_t nrVarsToEliminate = nodes_.size() - exclude.size(); + for (size_t i = 0; i < nrVarsToEliminate; i++) { + EgNode* node = getLowestCostNode(); + unmarked_.remove (node); + const EGNeighs& neighs = node->neighbors(); + for (size_t j = 0; j < neighs.size(); j++) { + neighs[j]->removeNeighbor (node); + } + elimOrder.push_back (node->varId()); + connectAllNeighbors (node); + } + return elimOrder; +} + + + +void +ElimGraph::print (void) const +{ + for (size_t i = 0; i < nodes_.size(); i++) { + cout << "node " << nodes_[i]->label() << " neighs:" ; + EGNeighs neighs = nodes_[i]->neighbors(); + for (size_t j = 0; j < neighs.size(); j++) { + cout << " " << neighs[j]->label(); + } + cout << endl; + } +} + + + +void +ElimGraph::exportToGraphViz ( + const char* fileName, + bool showNeighborless, + const VarIds& highlightVarIds) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "strict graph {" << endl; + for (size_t i = 0; i < nodes_.size(); i++) { + if (showNeighborless || nodes_[i]->neighbors().size() != 0) { + out << '"' << nodes_[i]->label() << '"' << endl; + } + } + for (size_t i = 0; i < highlightVarIds.size(); i++) { + EgNode* node =getEgNode (highlightVarIds[i]); + if (node) { + out << '"' << node->label() << '"' ; + out << " [shape=box3d]" << endl; + } else { + cerr << "Error: invalid variable id: " << highlightVarIds[i] << "." ; + cerr << endl; + exit (EXIT_FAILURE); + } + } + for (size_t i = 0; i < nodes_.size(); i++) { + EGNeighs neighs = nodes_[i]->neighbors(); + for (size_t j = 0; j < neighs.size(); j++) { + out << '"' << nodes_[i]->label() << '"' << " -- " ; + out << '"' << neighs[j]->label() << '"' << endl; + } + } + out << "}" << endl; + out.close(); +} + + + +VarIds +ElimGraph::getEliminationOrder ( + const Factors& factors, + VarIds excludedVids) +{ + if (elimHeuristic == ElimHeuristic::SEQUENTIAL) { + VarIds allVids; + Factors::const_iterator first = factors.begin(); + Factors::const_iterator end = factors.end(); + for (; first != end; ++first) { + Util::addToVector (allVids, (*first)->arguments()); + } + TinySet elimOrder (allVids); + elimOrder -= TinySet (excludedVids); + return elimOrder.elements(); + } + ElimGraph graph (factors); + return graph.getEliminatingOrder (excludedVids); +} + + + +void +ElimGraph::addNode (EgNode* n) +{ + nodes_.push_back (n); + n->setIndex (nodes_.size() - 1); + varMap_.insert (make_pair (n->varId(), n)); +} + + + +EgNode* +ElimGraph::getEgNode (VarId vid) const +{ + unordered_map::const_iterator it; + it = varMap_.find (vid); + return (it != varMap_.end()) ? it->second : 0; +} + + + +EgNode* +ElimGraph::getLowestCostNode (void) const +{ + EgNode* bestNode = 0; + unsigned minCost = std::numeric_limits::max(); + EGNeighs::const_iterator it; + switch (elimHeuristic) { + case MIN_NEIGHBORS: { + for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { + unsigned cost = getNeighborsCost (*it); + if (cost < minCost) { + bestNode = *it; + minCost = cost; + } + }} + break; + case MIN_WEIGHT: { + for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { + unsigned cost = getWeightCost (*it); + if (cost < minCost) { + bestNode = *it; + minCost = cost; + } + }} + break; + case MIN_FILL: { + for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { + unsigned cost = getFillCost (*it); + if (cost < minCost) { + bestNode = *it; + minCost = cost; + } + }} + break; + case WEIGHTED_MIN_FILL: { + for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { + unsigned cost = getWeightedFillCost (*it); + if (cost < minCost) { + bestNode = *it; + minCost = cost; + } + }} + break; + default: + assert (false); + } + assert (bestNode); + return bestNode; +} + + + +void +ElimGraph::connectAllNeighbors (const EgNode* n) +{ + const EGNeighs& neighs = n->neighbors(); + if (neighs.size() > 0) { + for (size_t i = 0; i < neighs.size() - 1; i++) { + for (size_t j = i + 1; j < neighs.size(); j++) { + if ( ! neighbors (neighs[i], neighs[j])) { + addEdge (neighs[i], neighs[j]); + } + } + } + } +} + diff --git a/packages/CLPBN/horus2/ElimGraph.h b/packages/CLPBN/horus2/ElimGraph.h new file mode 100644 index 000000000..8188b5ba6 --- /dev/null +++ b/packages/CLPBN/horus2/ElimGraph.h @@ -0,0 +1,139 @@ +#ifndef HORUS_ELIMGRAPH_H +#define HORUS_ELIMGRAPH_H + +#include "unordered_map" + +#include "FactorGraph.h" +#include "TinySet.h" +#include "Horus.h" + + +using namespace std; + +enum ElimHeuristic +{ + SEQUENTIAL, + MIN_NEIGHBORS, + MIN_WEIGHT, + MIN_FILL, + WEIGHTED_MIN_FILL +}; + + +class EgNode; + +typedef TinySet EGNeighs; + + +class EgNode : public Var +{ + public: + EgNode (VarId vid, unsigned range) : Var (vid, range) { } + + void addNeighbor (EgNode* n) { neighs_.insert (n); } + + void removeNeighbor (EgNode* n) { neighs_.remove (n); } + + bool isNeighbor (EgNode* n) const { return neighs_.contains (n); } + + const EGNeighs& neighbors (void) const { return neighs_; } + + private: + EGNeighs neighs_; +}; + + +class ElimGraph +{ + public: + ElimGraph (const Factors&); + + ~ElimGraph (void); + + VarIds getEliminatingOrder (const VarIds&); + + void print (void) const; + + void exportToGraphViz (const char*, bool = true, + const VarIds& = VarIds()) const; + + static VarIds getEliminationOrder (const Factors&, VarIds); + + static ElimHeuristic elimHeuristic; + + private: + + void addEdge (EgNode* n1, EgNode* n2) + { + assert (n1 != n2); + n1->addNeighbor (n2); + n2->addNeighbor (n1); + } + + unsigned getNeighborsCost (const EgNode* n) const + { + return n->neighbors().size(); + } + + unsigned getWeightCost (const EgNode* n) const + { + unsigned cost = 1; + const EGNeighs& neighs = n->neighbors(); + for (size_t i = 0; i < neighs.size(); i++) { + cost *= neighs[i]->range(); + } + return cost; + } + + unsigned getFillCost (const EgNode* n) const + { + unsigned cost = 0; + const EGNeighs& neighs = n->neighbors(); + if (neighs.size() > 0) { + for (size_t i = 0; i < neighs.size() - 1; i++) { + for (size_t j = i + 1; j < neighs.size(); j++) { + if ( ! neighbors (neighs[i], neighs[j])) { + cost ++; + } + } + } + } + return cost; + } + + unsigned getWeightedFillCost (const EgNode* n) const + { + unsigned cost = 0; + const EGNeighs& neighs = n->neighbors(); + if (neighs.size() > 0) { + for (size_t i = 0; i < neighs.size() - 1; i++) { + for (size_t j = i + 1; j < neighs.size(); j++) { + if ( ! neighbors (neighs[i], neighs[j])) { + cost += neighs[i]->range() * neighs[j]->range(); + } + } + } + } + return cost; + } + + bool neighbors (EgNode* n1, EgNode* n2) const + { + return n1->isNeighbor (n2); + } + + void addNode (EgNode*); + + EgNode* getEgNode (VarId) const; + + EgNode* getLowestCostNode (void) const; + + void connectAllNeighbors (const EgNode*); + + vector nodes_; + TinySet unmarked_; + unordered_map varMap_; +}; + +#endif // HORUS_ELIMGRAPH_H + diff --git a/packages/CLPBN/horus2/Factor.cpp b/packages/CLPBN/horus2/Factor.cpp new file mode 100644 index 000000000..9b8ad0be7 --- /dev/null +++ b/packages/CLPBN/horus2/Factor.cpp @@ -0,0 +1,237 @@ +#include +#include + +#include + +#include +#include + +#include "Factor.h" +#include "Indexer.h" + + +Factor::Factor (const Factor& g) +{ + clone (g); +} + + + +Factor::Factor ( + const VarIds& vids, + const Ranges& ranges, + const Params& params, + unsigned distId) +{ + args_ = vids; + ranges_ = ranges; + params_ = params; + distId_ = distId; + assert (params_.size() == Util::sizeExpected (ranges_)); +} + + + +Factor::Factor ( + const Vars& vars, + const Params& params, + unsigned distId) +{ + for (size_t i = 0; i < vars.size(); i++) { + args_.push_back (vars[i]->varId()); + ranges_.push_back (vars[i]->range()); + } + params_ = params; + distId_ = distId; + assert (params_.size() == Util::sizeExpected (ranges_)); +} + + + +void +Factor::sumOut (VarId vid) +{ + if (vid == args_.front() && ranges_.front() == 2) { + // optimization + sumOutFirstVariable(); + } else if (vid == args_.back() && ranges_.back() == 2) { + // optimization + sumOutLastVariable(); + } else { + assert (indexOf (vid) != args_.size()); + sumOutIndex (indexOf (vid)); + } +} + + + +void +Factor::sumOutAllExcept (VarId vid) +{ + assert (indexOf (vid) != args_.size()); + sumOutAllExceptIndex (indexOf (vid)); +} + + + +void +Factor::sumOutAllExcept (const VarIds& vids) +{ + vector mask (args_.size(), false); + for (unsigned i = 0; i < vids.size(); i++) { + assert (indexOf (vids[i]) != args_.size()); + mask[indexOf (vids[i])] = true; + } + sumOutArgs (mask); +} + + + +void +Factor::sumOutAllExceptIndex (size_t idx) +{ + assert (idx < args_.size()); + vector mask (args_.size(), false); + mask[idx] = true; + sumOutArgs (mask); +} + + +void +Factor::multiply (Factor& g) +{ + if (args_.size() == 0) { + clone (g); + return; + } + TFactor::multiply (g); +} + + + +string +Factor::getLabel (void) const +{ + stringstream ss; + ss << "f(" ; + for (size_t i = 0; i < args_.size(); i++) { + if (i != 0) ss << "," ; + ss << Var (args_[i], ranges_[i]).label(); + } + ss << ")" ; + return ss.str(); +} + + + +void +Factor::print (void) const +{ + Vars vars; + for (size_t i = 0; i < args_.size(); i++) { + vars.push_back (new Var (args_[i], ranges_[i])); + } + vector jointStrings = Util::getStateLines (vars); + for (size_t i = 0; i < params_.size(); i++) { + // cout << "[" << distId_ << "] " ; + cout << "f(" << jointStrings[i] << ")" ; + cout << " = " << params_[i] << endl; + } + cout << endl; + for (size_t i = 0; i < vars.size(); i++) { + delete vars[i]; + } +} + + + +void +Factor::sumOutFirstVariable (void) +{ + size_t sep = params_.size() / 2; + if (Globals::logDomain) { + std::transform ( + params_.begin(), params_.begin() + sep, + params_.begin() + sep, params_.begin(), + Util::logSum); + + } else { + std::transform ( + params_.begin(), params_.begin() + sep, + params_.begin() + sep, params_.begin(), + std::plus()); + } + params_.resize (sep); + args_.erase (args_.begin()); + ranges_.erase (ranges_.begin()); +} + + + +void +Factor::sumOutLastVariable (void) +{ + Params::iterator first1 = params_.begin(); + Params::iterator first2 = params_.begin(); + Params::iterator last = params_.end(); + if (Globals::logDomain) { + while (first2 != last) { + // the arguments can be swaped, but that is ok + *first1++ = Util::logSum (*first2++, *first2++); + } + } else { + while (first2 != last) { + *first1++ = (*first2++) + (*first2++); + } + } + params_.resize (params_.size() / 2); + args_.pop_back(); + ranges_.pop_back(); +} + + + +void +Factor::sumOutArgs (const vector& mask) +{ + assert (mask.size() == args_.size()); + size_t new_size = 1; + Ranges oldRanges = ranges_; + args_.clear(); + ranges_.clear(); + for (unsigned i = 0; i < mask.size(); i++) { + if (mask[i]) { + new_size *= ranges_[i]; + args_.push_back (args_[i]); + ranges_.push_back (ranges_[i]); + } + } + Params newps (new_size, LogAware::addIdenty()); + Params::const_iterator first = params_.begin(); + Params::const_iterator last = params_.end(); + MapIndexer indexer (oldRanges, mask); + if (Globals::logDomain) { + while (first != last) { + newps[indexer] = Util::logSum (newps[indexer], *first++); + ++ indexer; + } + } else { + while (first != last) { + newps[indexer] += *first++; + ++ indexer; + } + } + params_ = newps; +} + + + +void +Factor::clone (const Factor& g) +{ + args_ = g.arguments(); + ranges_ = g.ranges(); + params_ = g.params(); + distId_ = g.distId(); +} + diff --git a/packages/CLPBN/horus2/Factor.h b/packages/CLPBN/horus2/Factor.h new file mode 100644 index 000000000..742f20f7a --- /dev/null +++ b/packages/CLPBN/horus2/Factor.h @@ -0,0 +1,294 @@ +#ifndef HORUS_FACTOR_H +#define HORUS_FACTOR_H + +#include + +#include "Var.h" +#include "Indexer.h" +#include "Util.h" + + +using namespace std; + + +template +class TFactor +{ + public: + const vector& arguments (void) const { return args_; } + + vector& arguments (void) { return args_; } + + const Ranges& ranges (void) const { return ranges_; } + + const Params& params (void) const { return params_; } + + Params& params (void) { return params_; } + + size_t nrArguments (void) const { return args_.size(); } + + size_t size (void) const { return params_.size(); } + + unsigned distId (void) const { return distId_; } + + void setDistId (unsigned id) { distId_ = id; } + + void normalize (void) { LogAware::normalize (params_); } + + void randomize (void) + { + for (size_t i = 0; i < params_.size(); ++i) { + params_[i] = (double) std::rand() / RAND_MAX; + } + } + + void setParams (const Params& newParams) + { + params_ = newParams; + assert (params_.size() == Util::sizeExpected (ranges_)); + } + + size_t indexOf (const T& t) const + { + return Util::indexOf (args_, t); + } + + const T& argument (size_t idx) const + { + assert (idx < args_.size()); + return args_[idx]; + } + + T& argument (size_t idx) + { + assert (idx < args_.size()); + return args_[idx]; + } + + unsigned range (size_t idx) const + { + assert (idx < ranges_.size()); + return ranges_[idx]; + } + + void multiply (TFactor& g) + { + if (args_ == g.arguments()) { + // optimization + Globals::logDomain + ? params_ += g.params() + : params_ *= g.params(); + return; + } + unsigned range_prod = 1; + bool share_arguments = false; + const vector& g_args = g.arguments(); + const Ranges& g_ranges = g.ranges(); + const Params& g_params = g.params(); + for (size_t i = 0; i < g_args.size(); i++) { + size_t idx = indexOf (g_args[i]); + if (idx == args_.size()) { + range_prod *= g_ranges[i]; + args_.push_back (g_args[i]); + ranges_.push_back (g_ranges[i]); + } else { + share_arguments = true; + } + } + if (share_arguments == false) { + // optimization + cartesianProduct (g_params.begin(), g_params.end()); + } else { + extend (range_prod); + Params::iterator it = params_.begin(); + MapIndexer indexer (args_, ranges_, g_args, g_ranges); + if (Globals::logDomain) { + for (; indexer.valid(); ++it, ++indexer) { + *it += g_params[indexer]; + } + } else { + for (; indexer.valid(); ++it, ++indexer) { + *it *= g_params[indexer]; + } + } + } + } + + void sumOutIndex (size_t idx) + { + assert (idx < args_.size()); + assert (args_.size() > 1); + size_t new_size = params_.size() / ranges_[idx]; + Params newps (new_size, LogAware::addIdenty()); + Params::const_iterator first = params_.begin(); + Params::const_iterator last = params_.end(); + MapIndexer indexer (ranges_, idx); + if (Globals::logDomain) { + for (; first != last; ++indexer) { + newps[indexer] = Util::logSum (newps[indexer], *first++); + } + } else { + for (; first != last; ++indexer) { + newps[indexer] += *first++; + } + } + params_ = newps; + args_.erase (args_.begin() + idx); + ranges_.erase (ranges_.begin() + idx); + } + + void absorveEvidence (const T& arg, unsigned obsIdx) + { + size_t idx = indexOf (arg); + assert (idx != args_.size()); + assert (obsIdx < ranges_[idx]); + Params newps; + newps.reserve (params_.size() / ranges_[idx]); + Indexer indexer (ranges_); + for (unsigned i = 0; i < obsIdx; ++i) { + indexer.incrementDimension (idx); + } + while (indexer.valid()) { + newps.push_back (params_[indexer]); + indexer.incrementExceptDimension (idx); + } + params_ = newps; + args_.erase (args_.begin() + idx); + ranges_.erase (ranges_.begin() + idx); + } + + void reorderArguments (const vector new_args) + { + assert (new_args.size() == args_.size()); + if (new_args == args_) { + return; // already on the desired order + } + Ranges new_ranges; + for (size_t i = 0; i < new_args.size(); i++) { + size_t idx = indexOf (new_args[i]); + assert (idx != args_.size()); + new_ranges.push_back (ranges_[idx]); + } + Params newps; + newps.reserve (params_.size()); + MapIndexer indexer (new_args, new_ranges, args_, ranges_); + for (; indexer.valid(); ++indexer) { + newps.push_back (params_[indexer]); + } + params_ = newps; + args_ = new_args; + ranges_ = new_ranges; + } + + bool contains (const T& arg) const + { + return Util::contains (args_, arg); + } + + bool contains (const vector& args) const + { + for (size_t i = 0; i < args.size(); i++) { + if (contains (args[i]) == false) { + return false; + } + } + return true; + } + + double& operator[] (size_t idx) + { + assert (idx < params_.size()); + return params_[idx]; + } + + + protected: + vector args_; + Ranges ranges_; + Params params_; + unsigned distId_; + + private: + void extend (unsigned range_prod) + { + Params backup = params_; + params_.clear(); + params_.reserve (backup.size() * range_prod); + Params::const_iterator first = backup.begin(); + Params::const_iterator last = backup.end(); + for (; first != last; ++first) { + for (unsigned reps = 0; reps < range_prod; ++reps) { + params_.push_back (*first); + } + } + } + + void cartesianProduct ( + Params::const_iterator first2, + Params::const_iterator last2) + { + Params backup = params_; + params_.clear(); + params_.reserve (params_.size() * (last2 - first2)); + Params::const_iterator first1 = backup.begin(); + Params::const_iterator last1 = backup.end(); + Params::const_iterator tmp; + if (Globals::logDomain) { + for (; first1 != last1; ++first1) { + for (tmp = first2; tmp != last2; ++tmp) { + params_.push_back ((*first1) + (*tmp)); + } + } + } else { + for (; first1 != last1; ++first1) { + for (tmp = first2; tmp != last2; ++tmp) { + params_.push_back ((*first1) * (*tmp)); + } + } + } + } + +}; + + + +class Factor : public TFactor +{ + public: + Factor (void) { } + + Factor (const Factor&); + + Factor (const VarIds&, const Ranges&, const Params&, + unsigned = Util::maxUnsigned()); + + Factor (const Vars&, const Params&, + unsigned = Util::maxUnsigned()); + + void sumOut (VarId); + + void sumOutAllExcept (VarId); + + void sumOutAllExcept (const VarIds&); + + void sumOutAllExceptIndex (size_t idx); + + void multiply (Factor&); + + string getLabel (void) const; + + void print (void) const; + + private: + void sumOutFirstVariable (void); + + void sumOutLastVariable (void); + + void sumOutArgs (const vector& mask); + + void clone (const Factor& f); + +}; + +#endif // HORUS_FACTOR_H + diff --git a/packages/CLPBN/horus2/FactorGraph.cpp b/packages/CLPBN/horus2/FactorGraph.cpp new file mode 100644 index 000000000..ba31a9faa --- /dev/null +++ b/packages/CLPBN/horus2/FactorGraph.cpp @@ -0,0 +1,454 @@ +#include +#include +#include + +#include +#include +#include + +#include "FactorGraph.h" +#include "Factor.h" +#include "BayesBall.h" +#include "Util.h" + + +FactorGraph::FactorGraph (const FactorGraph& fg) +{ + const VarNodes& varNodes = fg.varNodes(); + for (size_t i = 0; i < varNodes.size(); i++) { + addVarNode (new VarNode (varNodes[i])); + } + const FacNodes& facNodes = fg.facNodes(); + for (size_t i = 0; i < facNodes.size(); i++) { + FacNode* facNode = new FacNode (facNodes[i]->factor()); + addFacNode (facNode); + const VarNodes& neighs = facNodes[i]->neighbors(); + for (size_t j = 0; j < neighs.size(); j++) { + addEdge (varNodes_[neighs[j]->getIndex()], facNode); + } + } + bayesFactors_ = fg.bayesianFactors(); +} + + + +void +FactorGraph::readFromUaiFormat (const char* fileName) +{ + std::ifstream is (fileName); + if (!is.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + exit (EXIT_FAILURE); + } + ignoreLines (is); + string line; + getline (is, line); + if (line != "MARKOV") { + cerr << "Error: the network must be a MARKOV network." << endl; + exit (EXIT_FAILURE); + } + // read the number of vars + ignoreLines (is); + unsigned nrVars; + is >> nrVars; + // read the range of each var + ignoreLines (is); + Ranges ranges (nrVars); + for (unsigned i = 0; i < nrVars; i++) { + is >> ranges[i]; + } + unsigned nrFactors; + unsigned nrArgs; + unsigned vid; + is >> nrFactors; + vector factorVarIds; + vector factorRanges; + for (unsigned i = 0; i < nrFactors; i++) { + ignoreLines (is); + is >> nrArgs; + factorVarIds.push_back ({ }); + factorRanges.push_back ({ }); + for (unsigned j = 0; j < nrArgs; j++) { + is >> vid; + if (vid >= ranges.size()) { + cerr << "Error: invalid variable identifier `" << vid << "'. " ; + cerr << "Identifiers must be between 0 and " << ranges.size() - 1 ; + cerr << "." << endl; + exit (EXIT_FAILURE); + } + factorVarIds.back().push_back (vid); + factorRanges.back().push_back (ranges[vid]); + } + } + // read the parameters + unsigned nrParams; + for (unsigned i = 0; i < nrFactors; i++) { + ignoreLines (is); + is >> nrParams; + if (nrParams != Util::sizeExpected (factorRanges[i])) { + cerr << "Error: invalid number of parameters for factor nº " << i ; + cerr << ", " << Util::sizeExpected (factorRanges[i]); + cerr << " expected, " << nrParams << " given." << endl; + exit (EXIT_FAILURE); + } + Params params (nrParams); + for (unsigned j = 0; j < nrParams; j++) { + is >> params[j]; + } + if (Globals::logDomain) { + Util::log (params); + } + addFactor (Factor (factorVarIds[i], factorRanges[i], params)); + } + is.close(); +} + + + +void +FactorGraph::readFromLibDaiFormat (const char* fileName) +{ + std::ifstream is (fileName); + if (!is.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + exit (EXIT_FAILURE); + } + ignoreLines (is); + unsigned nrFactors; + unsigned nrArgs; + VarId vid; + is >> nrFactors; + for (unsigned i = 0; i < nrFactors; i++) { + ignoreLines (is); + // read the factor arguments + is >> nrArgs; + VarIds vids; + for (unsigned j = 0; j < nrArgs; j++) { + ignoreLines (is); + is >> vid; + vids.push_back (vid); + } + // read ranges + Ranges ranges (nrArgs); + for (unsigned j = 0; j < nrArgs; j++) { + ignoreLines (is); + is >> ranges[j]; + VarNode* var = getVarNode (vids[j]); + if (var != 0 && ranges[j] != var->range()) { + cerr << "Error: variable `" << vids[j] << "' appears in two or " ; + cerr << "more factors with a different range." << endl; + } + } + // read parameters + ignoreLines (is); + unsigned nNonzeros; + is >> nNonzeros; + Params params (Util::sizeExpected (ranges), 0); + for (unsigned j = 0; j < nNonzeros; j++) { + ignoreLines (is); + unsigned index; + is >> index; + ignoreLines (is); + double val; + is >> val; + params[index] = val; + } + if (Globals::logDomain) { + Util::log (params); + } + std::reverse (vids.begin(), vids.end()); + Factor f (vids, ranges, params); + std::reverse (vids.begin(), vids.end()); + f.reorderArguments (vids); + addFactor (f); + } + is.close(); +} + + + +FactorGraph::~FactorGraph (void) +{ + for (size_t i = 0; i < varNodes_.size(); i++) { + delete varNodes_[i]; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + delete facNodes_[i]; + } +} + + + +void +FactorGraph::addFactor (const Factor& factor) +{ + FacNode* fn = new FacNode (factor); + addFacNode (fn); + const VarIds& vids = fn->factor().arguments(); + for (size_t i = 0; i < vids.size(); i++) { + VarMap::const_iterator it = varMap_.find (vids[i]); + if (it != varMap_.end()) { + addEdge (it->second, fn); + } else { + VarNode* vn = new VarNode (vids[i], fn->factor().range (i)); + addVarNode (vn); + addEdge (vn, fn); + } + } +} + + + +void +FactorGraph::addVarNode (VarNode* vn) +{ + varNodes_.push_back (vn); + vn->setIndex (varNodes_.size() - 1); + varMap_.insert (make_pair (vn->varId(), vn)); +} + + + +void +FactorGraph::addFacNode (FacNode* fn) +{ + facNodes_.push_back (fn); + fn->setIndex (facNodes_.size() - 1); +} + + + +void +FactorGraph::addEdge (VarNode* vn, FacNode* fn) +{ + vn->addNeighbor (fn); + fn->addNeighbor (vn); +} + + + +bool +FactorGraph::isTree (void) const +{ + return !containsCycle(); +} + + + +BayesBallGraph& +FactorGraph::getStructure (void) +{ + assert (bayesFactors_); + if (structure_.empty()) { + for (size_t i = 0; i < varNodes_.size(); i++) { + structure_.addNode (new BBNode (varNodes_[i])); + } + for (size_t i = 0; i < facNodes_.size(); i++) { + const VarIds& vids = facNodes_[i]->factor().arguments(); + for (size_t j = 1; j < vids.size(); j++) { + structure_.addEdge (vids[j], vids[0]); + } + } + } + return structure_; +} + + + +void +FactorGraph::print (void) const +{ + for (size_t i = 0; i < varNodes_.size(); i++) { + cout << "var id = " << varNodes_[i]->varId() << endl; + cout << "label = " << varNodes_[i]->label() << endl; + cout << "range = " << varNodes_[i]->range() << endl; + cout << "evidence = " << varNodes_[i]->getEvidence() << endl; + cout << "factors = " ; + for (size_t j = 0; j < varNodes_[i]->neighbors().size(); j++) { + cout << varNodes_[i]->neighbors()[j]->getLabel() << " " ; + } + cout << endl << endl; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + facNodes_[i]->factor().print(); + } +} + + + +void +FactorGraph::exportToGraphViz (const char* fileName) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "graph \"" << fileName << "\" {" << endl; + for (size_t i = 0; i < varNodes_.size(); i++) { + if (varNodes_[i]->hasEvidence()) { + out << '"' << varNodes_[i]->label() << '"' ; + out << " [style=filled, fillcolor=yellow]" << endl; + } + } + for (size_t i = 0; i < facNodes_.size(); i++) { + out << '"' << facNodes_[i]->getLabel() << '"' ; + out << " [label=\"" << facNodes_[i]->getLabel(); + out << "\"" << ", shape=box]" << endl; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + const VarNodes& myVars = facNodes_[i]->neighbors(); + for (size_t j = 0; j < myVars.size(); j++) { + out << '"' << facNodes_[i]->getLabel() << '"' ; + out << " -- " ; + out << '"' << myVars[j]->label() << '"' << endl; + } + } + out << "}" << endl; + out.close(); +} + + + +void +FactorGraph::exportToUaiFormat (const char* fileName) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "MARKOV" << endl; + out << varNodes_.size() << endl; + VarNodes sortedVns = varNodes_; + std::sort (sortedVns.begin(), sortedVns.end(), sortByVarId()); + for (size_t i = 0; i < sortedVns.size(); i++) { + out << ((i != 0) ? " " : "") << sortedVns[i]->range(); + } + out << endl << facNodes_.size() << endl; + for (size_t i = 0; i < facNodes_.size(); i++) { + VarIds args = facNodes_[i]->factor().arguments(); + out << args.size() << " " << Util::elementsToString (args) << endl; + } + out << endl; + for (size_t i = 0; i < facNodes_.size(); i++) { + Params params = facNodes_[i]->factor().params(); + if (Globals::logDomain) { + Util::exp (params); + } + out << params.size() << endl << " " ; + out << Util::elementsToString (params) << endl << endl; + } + out.close(); +} + + + +void +FactorGraph::exportToLibDaiFormat (const char* fileName) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << facNodes_.size() << endl << endl; + for (size_t i = 0; i < facNodes_.size(); i++) { + Factor f (facNodes_[i]->factor()); + out << f.nrArguments() << endl; + out << Util::elementsToString (f.arguments()) << endl; + out << Util::elementsToString (f.ranges()) << endl; + VarIds args = f.arguments(); + std::reverse (args.begin(), args.end()); + f.reorderArguments (args); + if (Globals::logDomain) { + Util::exp (f.params()); + } + out << f.size() << endl; + for (size_t j = 0; j < f.size(); j++) { + out << j << " " << f[j] << endl; + } + out << endl; + } + out.close(); +} + + + +void +FactorGraph::ignoreLines (std::ifstream& is) const +{ + string ignoreStr; + while (is.peek() == '#' || is.peek() == '\n') { + getline (is, ignoreStr); + } +} + + + +bool +FactorGraph::containsCycle (void) const +{ + vector visitedVars (varNodes_.size(), false); + vector visitedFactors (facNodes_.size(), false); + for (size_t i = 0; i < varNodes_.size(); i++) { + int v = varNodes_[i]->getIndex(); + if (!visitedVars[v]) { + if (containsCycle (varNodes_[i], 0, visitedVars, visitedFactors)) { + return true; + } + } + } + return false; +} + + + +bool +FactorGraph::containsCycle ( + const VarNode* v, + const FacNode* p, + vector& visitedVars, + vector& visitedFactors) const +{ + visitedVars[v->getIndex()] = true; + const FacNodes& adjacencies = v->neighbors(); + for (size_t i = 0; i < adjacencies.size(); i++) { + int w = adjacencies[i]->getIndex(); + if (!visitedFactors[w]) { + if (containsCycle (adjacencies[i], v, visitedVars, visitedFactors)) { + return true; + } + } + else if (visitedFactors[w] && adjacencies[i] != p) { + return true; + } + } + return false; // no cycle detected in this component +} + + + +bool +FactorGraph::containsCycle ( + const FacNode* v, + const VarNode* p, + vector& visitedVars, + vector& visitedFactors) const +{ + visitedFactors[v->getIndex()] = true; + const VarNodes& adjacencies = v->neighbors(); + for (size_t i = 0; i < adjacencies.size(); i++) { + int w = adjacencies[i]->getIndex(); + if (!visitedVars[w]) { + if (containsCycle (adjacencies[i], v, visitedVars, visitedFactors)) { + return true; + } + } + else if (visitedVars[w] && adjacencies[i] != p) { + return true; + } + } + return false; // no cycle detected in this component +} + diff --git a/packages/CLPBN/horus2/FactorGraph.h b/packages/CLPBN/horus2/FactorGraph.h new file mode 100644 index 000000000..b2b03369d --- /dev/null +++ b/packages/CLPBN/horus2/FactorGraph.h @@ -0,0 +1,150 @@ +#ifndef HORUS_FACTORGRAPH_H +#define HORUS_FACTORGRAPH_H + +#include + +#include "Factor.h" +#include "BayesBallGraph.h" +#include "Horus.h" + +using namespace std; + + +class FacNode; + +class VarNode : public Var +{ + public: + VarNode (VarId varId, unsigned nrStates, + int evidence = Constants::NO_EVIDENCE) + : Var (varId, nrStates, evidence) { } + + VarNode (const Var* v) : Var (v) { } + + void addNeighbor (FacNode* fn) { neighs_.push_back (fn); } + + const FacNodes& neighbors (void) const { return neighs_; } + + private: + DISALLOW_COPY_AND_ASSIGN (VarNode); + + FacNodes neighs_; +}; + + + +class FacNode +{ + public: + FacNode (const Factor& f) : factor_(f), index_(-1) { } + + const Factor& factor (void) const { return factor_; } + + Factor& factor (void) { return factor_; } + + void addNeighbor (VarNode* vn) { neighs_.push_back (vn); } + + const VarNodes& neighbors (void) const { return neighs_; } + + size_t getIndex (void) const { return index_; } + + void setIndex (size_t index) { index_ = index; } + + string getLabel (void) { return factor_.getLabel(); } + + private: + DISALLOW_COPY_AND_ASSIGN (FacNode); + + VarNodes neighs_; + Factor factor_; + size_t index_; +}; + + + +class FactorGraph +{ + public: + FactorGraph (void) : bayesFactors_(false) { } + + FactorGraph (const FactorGraph&); + + ~FactorGraph (void); + + const VarNodes& varNodes (void) const { return varNodes_; } + + const FacNodes& facNodes (void) const { return facNodes_; } + + void setFactorsAsBayesian (void) { bayesFactors_ = true; } + + bool bayesianFactors (void) const { return bayesFactors_; } + + size_t nrVarNodes (void) const { return varNodes_.size(); } + + size_t nrFacNodes (void) const { return facNodes_.size(); } + + VarNode* getVarNode (VarId vid) const + { + VarMap::const_iterator it = varMap_.find (vid); + return it != varMap_.end() ? it->second : 0; + } + + void readFromUaiFormat (const char*); + + void readFromLibDaiFormat (const char*); + + void addFactor (const Factor& factor); + + void addVarNode (VarNode*); + + void addFacNode (FacNode*); + + void addEdge (VarNode*, FacNode*); + + bool isTree (void) const; + + BayesBallGraph& getStructure (void); + + void print (void) const; + + void exportToGraphViz (const char*) const; + + void exportToUaiFormat (const char*) const; + + void exportToLibDaiFormat (const char*) const; + + private: + // DISALLOW_COPY_AND_ASSIGN (FactorGraph); + + void ignoreLines (std::ifstream&) const; + + bool containsCycle (void) const; + + bool containsCycle (const VarNode*, const FacNode*, + vector&, vector&) const; + + bool containsCycle (const FacNode*, const VarNode*, + vector&, vector&) const; + + VarNodes varNodes_; + FacNodes facNodes_; + + BayesBallGraph structure_; + bool bayesFactors_; + + typedef unordered_map VarMap; + VarMap varMap_; +}; + + + +struct sortByVarId +{ + bool operator()(VarNode* vn1, VarNode* vn2) { + return vn1->varId() < vn2->varId(); + } +}; + + +#endif // HORUS_FACTORGRAPH_H + diff --git a/packages/CLPBN/horus2/GroundSolver.cpp b/packages/CLPBN/horus2/GroundSolver.cpp new file mode 100644 index 000000000..4cd3fdbd2 --- /dev/null +++ b/packages/CLPBN/horus2/GroundSolver.cpp @@ -0,0 +1,107 @@ +#include "GroundSolver.h" +#include "Util.h" +#include "BeliefProp.h" +#include "CountingBp.h" +#include "VarElim.h" + + +void +GroundSolver::printAnswer (const VarIds& vids) +{ + Vars unobservedVars; + VarIds unobservedVids; + for (size_t i = 0; i < vids.size(); i++) { + VarNode* vn = fg.getVarNode (vids[i]); + if (vn->hasEvidence() == false) { + unobservedVars.push_back (vn); + unobservedVids.push_back (vids[i]); + } + } + if (unobservedVids.empty() == false) { + Params res = solveQuery (unobservedVids); + vector stateLines = Util::getStateLines (unobservedVars); + for (size_t i = 0; i < res.size(); i++) { + cout << "P(" << stateLines[i] << ") = " ; + cout << std::setprecision (Constants::PRECISION) << res[i]; + cout << endl; + } + cout << endl; + } +} + + + +void +GroundSolver::printAllPosterioris (void) +{ + VarNodes vars = fg.varNodes(); + std::sort (vars.begin(), vars.end(), sortByVarId()); + for (size_t i = 0; i < vars.size(); i++) { + printAnswer ({vars[i]->varId()}); + } +} + + + +Params +GroundSolver::getJointByConditioning ( + GroundSolverType solverType, + FactorGraph fg, + const VarIds& jointVarIds) const +{ + VarNodes jointVars; + for (size_t i = 0; i < jointVarIds.size(); i++) { + assert (fg.getVarNode (jointVarIds[i])); + jointVars.push_back (fg.getVarNode (jointVarIds[i])); + } + + GroundSolver* solver = 0; + switch (solverType) { + case GroundSolverType::BP: solver = new BeliefProp (fg); break; + case GroundSolverType::CBP: solver = new CountingBp (fg); break; + case GroundSolverType::VE: solver = new VarElim (fg); break; + } + Params prevBeliefs = solver->solveQuery ({jointVarIds[0]}); + VarIds observedVids = {jointVars[0]->varId()}; + + for (size_t i = 1; i < jointVarIds.size(); i++) { + assert (jointVars[i]->hasEvidence() == false); + Params newBeliefs; + Vars observedVars; + Ranges observedRanges; + for (size_t j = 0; j < observedVids.size(); j++) { + observedVars.push_back (fg.getVarNode (observedVids[j])); + observedRanges.push_back (observedVars.back()->range()); + } + Indexer indexer (observedRanges, false); + while (indexer.valid()) { + for (size_t j = 0; j < observedVars.size(); j++) { + observedVars[j]->setEvidence (indexer[j]); + } + delete solver; + switch (solverType) { + case GroundSolverType::BP: solver = new BeliefProp (fg); break; + case GroundSolverType::CBP: solver = new CountingBp (fg); break; + case GroundSolverType::VE: solver = new VarElim (fg); break; + } + Params beliefs = solver->solveQuery ({jointVarIds[i]}); + for (size_t k = 0; k < beliefs.size(); k++) { + newBeliefs.push_back (beliefs[k]); + } + ++ indexer; + } + + int count = -1; + for (size_t j = 0; j < newBeliefs.size(); j++) { + if (j % jointVars[i]->range() == 0) { + count ++; + } + newBeliefs[j] *= prevBeliefs[count]; + } + prevBeliefs = newBeliefs; + observedVids.push_back (jointVars[i]->varId()); + } + delete solver; + return prevBeliefs; +} + diff --git a/packages/CLPBN/horus2/GroundSolver.h b/packages/CLPBN/horus2/GroundSolver.h new file mode 100644 index 000000000..18b81454b --- /dev/null +++ b/packages/CLPBN/horus2/GroundSolver.h @@ -0,0 +1,36 @@ +#ifndef HORUS_GROUNDSOLVER_H +#define HORUS_GROUNDSOLVER_H + +#include + +#include "FactorGraph.h" +#include "Var.h" +#include "Horus.h" + + +using namespace std; + +class GroundSolver +{ + public: + GroundSolver (const FactorGraph& factorGraph) : fg(factorGraph) { } + + virtual ~GroundSolver() { } // ensure that subclass destructor is called + + virtual Params solveQuery (VarIds queryVids) = 0; + + virtual void printSolverFlags (void) const = 0; + + void printAnswer (const VarIds& vids); + + void printAllPosterioris (void); + + Params getJointByConditioning (GroundSolverType, + FactorGraph, const VarIds& jointVarIds) const; + + protected: + const FactorGraph& fg; +}; + +#endif // HORUS_GROUNDSOLVER_H + diff --git a/packages/CLPBN/horus2/Histogram.cpp b/packages/CLPBN/horus2/Histogram.cpp new file mode 100644 index 000000000..a9e96cfdd --- /dev/null +++ b/packages/CLPBN/horus2/Histogram.cpp @@ -0,0 +1,146 @@ +#include + +#include +#include + +#include "Histogram.h" +#include "Util.h" + + +HistogramSet::HistogramSet (unsigned size, unsigned range) +{ + size_ = size; + hist_.resize (range, 0); + hist_[0] = size; +} + + + +void +HistogramSet::nextHistogram (void) +{ + for (size_t i = hist_.size() - 1; i-- > 0; ) { + if (hist_[i] > 0) { + hist_[i] --; + hist_[i + 1] = maxCount (i + 1); + clearAfter (i + 1); + break; + } + } + assert (std::accumulate (hist_.begin(), hist_.end(), 0) + == (int) size_); +} + + + +unsigned +HistogramSet::operator[] (size_t idx) const +{ + assert (idx < hist_.size()); + return hist_[idx]; +} + + + +unsigned +HistogramSet::nrHistograms (void) const +{ + return HistogramSet::nrHistograms (size_, hist_.size()); +} + + + +void +HistogramSet::reset (void) +{ + std::fill (hist_.begin() + 1, hist_.end(), 0); + hist_[0] = size_; +} + + + +vector +HistogramSet::getHistograms (unsigned N, unsigned R) +{ + HistogramSet hs (N, R); + unsigned H = hs.nrHistograms(); + vector histograms; + histograms.reserve (H); + for (unsigned i = 0; i < H; i++) { + histograms.push_back (hs.hist_); + hs.nextHistogram(); + } + return histograms; +} + + + +unsigned +HistogramSet::nrHistograms (unsigned N, unsigned R) +{ + return Util::nrCombinations (N + R - 1, R - 1); +} + + + +size_t +HistogramSet::findIndex ( + const Histogram& h, + const vector& hists) +{ + vector::const_iterator it = std::lower_bound ( + hists.begin(), hists.end(), h, std::greater()); + assert (it != hists.end() && *it == h); + return std::distance (hists.begin(), it); +} + + + +vector +HistogramSet::getNumAssigns (unsigned N, unsigned R) +{ + HistogramSet hs (N, R); + double N_fac = Util::logFactorial (N); + unsigned H = hs.nrHistograms(); + vector numAssigns; + numAssigns.reserve (H); + for (unsigned h = 0; h < H; h++) { + double prod = 0.0; + for (unsigned r = 0; r < R; r++) { + prod += Util::logFactorial (hs[r]); + } + double res = N_fac - prod; + numAssigns.push_back (Globals::logDomain ? res : std::exp (res)); + hs.nextHistogram(); + } + return numAssigns; +} + + + +ostream& operator<< (ostream &os, const HistogramSet& hs) +{ + os << "#" << hs.hist_; + return os; +} + + + +unsigned +HistogramSet::maxCount (size_t idx) const +{ + unsigned sum = 0; + for (size_t i = 0; i < idx; i++) { + sum += hist_[i]; + } + return size_ - sum; +} + + + +void +HistogramSet::clearAfter (size_t idx) +{ + std::fill (hist_.begin() + idx + 1, hist_.end(), 0); +} + diff --git a/packages/CLPBN/horus2/Histogram.h b/packages/CLPBN/horus2/Histogram.h new file mode 100644 index 000000000..af0c4595e --- /dev/null +++ b/packages/CLPBN/horus2/Histogram.h @@ -0,0 +1,45 @@ +#ifndef HORUS_HISTOGRAM_H +#define HORUS_HISTOGRAM_H + +#include +#include + +using namespace std; + +typedef vector Histogram; + +class HistogramSet +{ + public: + HistogramSet (unsigned, unsigned); + + void nextHistogram (void); + + unsigned operator[] (size_t idx) const; + + unsigned nrHistograms (void) const; + + void reset (void); + + static vector getHistograms (unsigned ,unsigned); + + static unsigned nrHistograms (unsigned, unsigned); + + static size_t findIndex ( + const Histogram&, const vector&); + + static vector getNumAssigns (unsigned, unsigned); + + friend std::ostream& operator<< (ostream &os, const HistogramSet& hs); + + private: + unsigned maxCount (size_t) const; + + void clearAfter (size_t); + + unsigned size_; + Histogram hist_; +}; + +#endif // HORUS_HISTOGRAM_H + diff --git a/packages/CLPBN/horus2/Horus.h b/packages/CLPBN/horus2/Horus.h new file mode 100644 index 000000000..7e5f12c8e --- /dev/null +++ b/packages/CLPBN/horus2/Horus.h @@ -0,0 +1,87 @@ +#ifndef HORUS_HORUS_H +#define HORUS_HORUS_H + +#include + +#include + +#define DISALLOW_COPY_AND_ASSIGN(TypeName) \ + TypeName(const TypeName&); \ + void operator=(const TypeName&) + +using namespace std; + +class Var; +class Factor; +class VarNode; +class FacNode; + +typedef vector Params; +typedef unsigned VarId; +typedef vector VarIds; +typedef vector Vars; +typedef vector VarNodes; +typedef vector FacNodes; +typedef vector Factors; +typedef vector States; +typedef vector Ranges; +typedef unsigned long long ullong; + + +enum LiftedSolverType +{ + LVE, // generalized counting first-order variable elimination (GC-FOVE) + LBP, // lifted first-order belief propagation + LKC // lifted first-order knowledge compilation +}; + + +enum GroundSolverType +{ + VE, // variable elimination + BP, // belief propagation + CBP // counting belief propagation +}; + + +namespace Globals { + +extern bool logDomain; + +// level of debug information +extern unsigned verbosity; + +extern LiftedSolverType liftedSolver; +extern GroundSolverType groundSolver; + +}; + + +namespace Constants { + +// show message calculation for belief propagation +const bool SHOW_BP_CALCS = false; + +const int NO_EVIDENCE = -1; + +// number of digits to show when printing a parameter +const unsigned PRECISION = 6; + +}; + + +namespace BpOptions +{ + enum Schedule { + SEQ_FIXED, + SEQ_RANDOM, + PARALLEL, + MAX_RESIDUAL + }; + extern Schedule schedule; + extern double accuracy; + extern unsigned maxIter; +} + +#endif // HORUS_HORUS_H + diff --git a/packages/CLPBN/horus2/HorusCli.cpp b/packages/CLPBN/horus2/HorusCli.cpp new file mode 100644 index 000000000..520603052 --- /dev/null +++ b/packages/CLPBN/horus2/HorusCli.cpp @@ -0,0 +1,187 @@ +#include + +#include +#include + +#include "FactorGraph.h" +#include "VarElim.h" +#include "BeliefProp.h" +#include "CountingBp.h" + +using namespace std; + +int readHorusFlags (int, const char* []); +void readFactorGraph (FactorGraph&, const char*); +VarIds readQueryAndEvidence (FactorGraph&, int, const char* [], int); + +void runSolver (const FactorGraph&, const VarIds&); + +const string USAGE = "usage: ./hcli [HORUS_FLAG=VALUE] \ +MODEL_FILE [VARIABLE | OBSERVED_VARIABLE=EVIDENCE] ..." ; + + +int +main (int argc, const char* argv[]) +{ + if (argc <= 1) { + cerr << "Error: no probabilistic graphical model was given." << endl; + cerr << USAGE << endl; + exit (EXIT_FAILURE); + } + int idx = readHorusFlags (argc, argv); + FactorGraph fg; + readFactorGraph (fg, argv[idx]); + VarIds queryIds = readQueryAndEvidence (fg, argc, argv, idx + 1); + runSolver (fg, queryIds); + return 0; +} + + + +int +readHorusFlags (int argc, const char* argv[]) +{ + int i = 1; + for (; i < argc; i++) { + const string& arg = argv[i]; + size_t pos = arg.find ('='); + if (pos == std::string::npos) { + return i; + } + string leftArg = arg.substr (0, pos); + string rightArg = arg.substr (pos + 1); + if (leftArg.empty()) { + cerr << "Error: missing left argument." << endl; + cerr << USAGE << endl; + exit (EXIT_FAILURE); + } + if (rightArg.empty()) { + cerr << "Error: missing right argument." << endl; + cerr << USAGE << endl; + exit (EXIT_FAILURE); + } + Util::setHorusFlag (leftArg, rightArg); + } + return i + 1; +} + + + +void +readFactorGraph (FactorGraph& fg, const char* s) +{ + string fileName (s); + string extension = fileName.substr (fileName.find_last_of ('.') + 1); + if (extension == "uai") { + fg.readFromUaiFormat (fileName.c_str()); + } else if (extension == "fg") { + fg.readFromLibDaiFormat (fileName.c_str()); + } else { + cerr << "Error: the probabilistic graphical model must be " ; + cerr << "defined either in a UAI or libDAI file." << endl; + exit (EXIT_FAILURE); + } +} + + + +VarIds +readQueryAndEvidence ( + FactorGraph& fg, + int argc, + const char* argv[], + int start) +{ + VarIds queryIds; + for (int i = start; i < argc; i++) { + const string& arg = argv[i]; + if (arg.find ('=') == std::string::npos) { + if (Util::isInteger (arg) == false) { + cerr << "Error: `" << arg << "' " ; + cerr << "is not a variable id." ; + cerr << endl; + exit (EXIT_FAILURE); + } + VarId vid = Util::stringToUnsigned (arg); + VarNode* queryVar = fg.getVarNode (vid); + if (queryVar == false) { + cerr << "Error: unknow variable with id " ; + cerr << "`" << vid << "'." << endl; + exit (EXIT_FAILURE); + } + queryIds.push_back (vid); + } else { + size_t pos = arg.find ('='); + string leftArg = arg.substr (0, pos); + string rightArg = arg.substr (pos + 1); + if (leftArg.empty()) { + cerr << "Error: missing left argument." << endl; + cerr << USAGE << endl; + exit (EXIT_FAILURE); + } + if (Util::isInteger (leftArg) == false) { + cerr << "Error: `" << leftArg << "' " ; + cerr << "is not a variable id." << endl ; + exit (EXIT_FAILURE); + } + VarId vid = Util::stringToUnsigned (leftArg); + VarNode* observedVar = fg.getVarNode (vid); + if (observedVar == false) { + cerr << "Error: unknow variable with id " ; + cerr << "`" << vid << "'." << endl; + exit (EXIT_FAILURE); + } + if (rightArg.empty()) { + cerr << "Error: missing right argument." << endl; + cerr << USAGE << endl; + exit (EXIT_FAILURE); + } + if (Util::isInteger (rightArg) == false) { + cerr << "Error: `" << rightArg << "' " ; + cerr << "is not a state index." << endl ; + exit (EXIT_FAILURE); + } + unsigned stateIdx = Util::stringToUnsigned (rightArg); + if (observedVar->isValidState (stateIdx) == false) { + cerr << "Error: `" << stateIdx << "' " ; + cerr << "is not a valid state index for variable with id " ; + cerr << "`" << vid << "'." << endl; + exit (EXIT_FAILURE); + } + observedVar->setEvidence (stateIdx); + } + } + return queryIds; +} + + + +void +runSolver (const FactorGraph& fg, const VarIds& queryIds) +{ + GroundSolver* solver = 0; + switch (Globals::groundSolver) { + case GroundSolverType::VE: + solver = new VarElim (fg); + break; + case GroundSolverType::BP: + solver = new BeliefProp (fg); + break; + case GroundSolverType::CBP: + solver = new CountingBp (fg); + break; + default: + assert (false); + } + if (Globals::verbosity > 0) { + solver->printSolverFlags(); + cout << endl; + } + if (queryIds.empty()) { + solver->printAllPosterioris(); + } else { + solver->printAnswer (queryIds); + } + delete solver; +} + diff --git a/packages/CLPBN/horus2/HorusYap.cpp b/packages/CLPBN/horus2/HorusYap.cpp new file mode 100644 index 000000000..3c566b73a --- /dev/null +++ b/packages/CLPBN/horus2/HorusYap.cpp @@ -0,0 +1,570 @@ +#include + +#include + +#include +#include + +#include + +#include "ParfactorList.h" +#include "FactorGraph.h" +#include "LiftedOperations.h" +#include "LiftedVe.h" +#include "VarElim.h" +#include "LiftedBp.h" +#include "CountingBp.h" +#include "BeliefProp.h" +#include "LiftedKc.h" +#include "ElimGraph.h" +#include "BayesBall.h" + + +using namespace std; + +typedef std::pair LiftedNetwork; + +Parfactor* readParfactor (YAP_Term); + +void readLiftedEvidence (YAP_Term, ObservedFormulas&); + +vector readUnsignedList (YAP_Term list); + +Params readParameters (YAP_Term); + +YAP_Term fillAnswersPrologList (vector& results); + + + +int +createLiftedNetwork (void) +{ + Parfactors parfactors; + YAP_Term parfactorList = YAP_ARG1; + while (parfactorList != YAP_TermNil()) { + YAP_Term pfTerm = YAP_HeadOfTerm (parfactorList); + parfactors.push_back (readParfactor (pfTerm)); + parfactorList = YAP_TailOfTerm (parfactorList); + } + + // LiftedUtils::printSymbolDictionary(); + if (Globals::verbosity > 2) { + Util::printHeader ("INITIAL PARFACTORS"); + for (size_t i = 0; i < parfactors.size(); i++) { + parfactors[i]->print(); + cout << endl; + } + } + + ParfactorList* pfList = new ParfactorList (parfactors); + + if (Globals::verbosity > 2) { + Util::printHeader ("SHATTERED PARFACTORS"); + pfList->print(); + } + + // read evidence + ObservedFormulas* obsFormulas = new ObservedFormulas(); + readLiftedEvidence (YAP_ARG2, *(obsFormulas)); + + LiftedNetwork* net = new LiftedNetwork (pfList, obsFormulas); + + YAP_Int p = (YAP_Int) (net); + return YAP_Unify (YAP_MkIntTerm (p), YAP_ARG3); +} + + + +int +createGroundNetwork (void) +{ + string factorsType ((char*) YAP_AtomName (YAP_AtomOfTerm (YAP_ARG1))); + FactorGraph* fg = new FactorGraph(); + if (factorsType == "bayes") { + fg->setFactorsAsBayesian(); + } + YAP_Term factorList = YAP_ARG2; + while (factorList != YAP_TermNil()) { + YAP_Term factor = YAP_HeadOfTerm (factorList); + // read the var ids + VarIds varIds = readUnsignedList (YAP_ArgOfTerm (1, factor)); + // read the ranges + Ranges ranges = readUnsignedList (YAP_ArgOfTerm (2, factor)); + // read the parameters + Params params = readParameters (YAP_ArgOfTerm (3, factor)); + // read dist id + unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (4, factor)); + fg->addFactor (Factor (varIds, ranges, params, distId)); + factorList = YAP_TailOfTerm (factorList); + } + unsigned nrObservedVars = 0; + YAP_Term evidenceList = YAP_ARG3; + while (evidenceList != YAP_TermNil()) { + YAP_Term evTerm = YAP_HeadOfTerm (evidenceList); + unsigned vid = (unsigned) YAP_IntOfTerm ((YAP_ArgOfTerm (1, evTerm))); + unsigned ev = (unsigned) YAP_IntOfTerm ((YAP_ArgOfTerm (2, evTerm))); + assert (fg->getVarNode (vid)); + fg->getVarNode (vid)->setEvidence (ev); + evidenceList = YAP_TailOfTerm (evidenceList); + nrObservedVars ++; + } + if (Globals::verbosity > 0) { + cout << "factor graph contains " ; + cout << fg->nrVarNodes() << " variables " ; + cout << "(" << nrObservedVars << " observed) and " ; + cout << fg->nrFacNodes() << " factors " << endl; + } + YAP_Int p = (YAP_Int) (fg); + return YAP_Unify (YAP_MkIntTerm (p), YAP_ARG4); +} + + + +int +runLiftedSolver (void) +{ + LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); + ParfactorList pfListCopy (*network->first); + LiftedOperations::absorveEvidence (pfListCopy, *network->second); + + LiftedSolver* solver = 0; + switch (Globals::liftedSolver) { + case LiftedSolverType::LVE: solver = new LiftedVe (pfListCopy); break; + case LiftedSolverType::LBP: solver = new LiftedBp (pfListCopy); break; + case LiftedSolverType::LKC: solver = new LiftedKc (pfListCopy); break; + } + + if (Globals::verbosity > 0) { + solver->printSolverFlags(); + cout << endl; + } + + YAP_Term taskList = YAP_ARG2; + vector results; + while (taskList != YAP_TermNil()) { + Grounds queryVars; + YAP_Term jointList = YAP_HeadOfTerm (taskList); + while (jointList != YAP_TermNil()) { + YAP_Term ground = YAP_HeadOfTerm (jointList); + if (YAP_IsAtomTerm (ground)) { + string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ground))); + queryVars.push_back (Ground (LiftedUtils::getSymbol (name))); + } else { + assert (YAP_IsApplTerm (ground)); + YAP_Functor yapFunctor = YAP_FunctorOfTerm (ground); + string name ((char*) (YAP_AtomName (YAP_NameOfFunctor (yapFunctor)))); + unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); + Symbol functor = LiftedUtils::getSymbol (name); + Symbols args; + for (unsigned i = 1; i <= arity; i++) { + YAP_Term ti = YAP_ArgOfTerm (i, ground); + assert (YAP_IsAtomTerm (ti)); + string arg ((char *) YAP_AtomName (YAP_AtomOfTerm (ti))); + args.push_back (LiftedUtils::getSymbol (arg)); + } + queryVars.push_back (Ground (functor, args)); + } + jointList = YAP_TailOfTerm (jointList); + } + results.push_back (solver->solveQuery (queryVars)); + taskList = YAP_TailOfTerm (taskList); + } + + delete solver; + + return YAP_Unify (fillAnswersPrologList (results), YAP_ARG3); +} + + + +int +runGroundSolver (void) +{ + FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); + + vector tasks; + YAP_Term taskList = YAP_ARG2; + while (taskList != YAP_TermNil()) { + tasks.push_back (readUnsignedList (YAP_HeadOfTerm (taskList))); + taskList = YAP_TailOfTerm (taskList); + } + + FactorGraph* mfg = fg; + if (fg->bayesianFactors()) { + std::set vids; + for (size_t i = 0; i < tasks.size(); i++) { + Util::addToSet (vids, tasks[i]); + } + mfg = BayesBall::getMinimalFactorGraph ( + *fg, VarIds (vids.begin(), vids.end())); + } + + GroundSolver* solver = 0; + CountingBp::checkForIdenticalFactors = false; + switch (Globals::groundSolver) { + case GroundSolverType::VE: solver = new VarElim (*mfg); break; + case GroundSolverType::BP: solver = new BeliefProp (*mfg); break; + case GroundSolverType::CBP: solver = new CountingBp (*mfg); break; + } + + if (Globals::verbosity > 0) { + solver->printSolverFlags(); + cout << endl; + } + + vector results; + results.reserve (tasks.size()); + for (size_t i = 0; i < tasks.size(); i++) { + results.push_back (solver->solveQuery (tasks[i])); + } + + delete solver; + if (fg->bayesianFactors()) { + delete mfg; + } + + return YAP_Unify (fillAnswersPrologList (results), YAP_ARG3); +} + + + +int +setParfactorsParams (void) +{ + LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); + ParfactorList* pfList = network->first; + YAP_Term distIdsList = YAP_ARG2; + YAP_Term paramsList = YAP_ARG3; + unordered_map paramsMap; + while (distIdsList != YAP_TermNil()) { + unsigned distId = (unsigned) YAP_IntOfTerm ( + YAP_HeadOfTerm (distIdsList)); + assert (Util::contains (paramsMap, distId) == false); + paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); + distIdsList = YAP_TailOfTerm (distIdsList); + paramsList = YAP_TailOfTerm (paramsList); + } + ParfactorList::iterator it = pfList->begin(); + while (it != pfList->end()) { + assert (Util::contains (paramsMap, (*it)->distId())); + (*it)->setParams (paramsMap[(*it)->distId()]); + ++ it; + } + return TRUE; +} + + + +int +setFactorsParams (void) +{ + FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); + YAP_Term distIdsList = YAP_ARG2; + YAP_Term paramsList = YAP_ARG3; + unordered_map paramsMap; + while (distIdsList != YAP_TermNil()) { + unsigned distId = (unsigned) YAP_IntOfTerm ( + YAP_HeadOfTerm (distIdsList)); + assert (Util::contains (paramsMap, distId) == false); + paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); + distIdsList = YAP_TailOfTerm (distIdsList); + paramsList = YAP_TailOfTerm (paramsList); + } + const FacNodes& facNodes = fg->facNodes(); + for (size_t i = 0; i < facNodes.size(); i++) { + unsigned distId = facNodes[i]->factor().distId(); + assert (Util::contains (paramsMap, distId)); + facNodes[i]->factor().setParams (paramsMap[distId]); + } + return TRUE; +} + + + +int +setVarsInformation (void) +{ + Var::clearVarsInfo(); + vector labels; + YAP_Term labelsL = YAP_ARG1; + while (labelsL != YAP_TermNil()) { + YAP_Atom atom = YAP_AtomOfTerm (YAP_HeadOfTerm (labelsL)); + labels.push_back ((char*) YAP_AtomName (atom)); + labelsL = YAP_TailOfTerm (labelsL); + } + unsigned count = 0; + YAP_Term stateNamesL = YAP_ARG2; + while (stateNamesL != YAP_TermNil()) { + States states; + YAP_Term namesL = YAP_HeadOfTerm (stateNamesL); + while (namesL != YAP_TermNil()) { + YAP_Atom atom = YAP_AtomOfTerm (YAP_HeadOfTerm (namesL)); + states.push_back ((char*) YAP_AtomName (atom)); + namesL = YAP_TailOfTerm (namesL); + } + Var::addVarInfo (count, labels[count], states); + count ++; + stateNamesL = YAP_TailOfTerm (stateNamesL); + } + return TRUE; +} + + + +int +setHorusFlag (void) +{ + string key ((char*) YAP_AtomName (YAP_AtomOfTerm (YAP_ARG1))); + string value; + if (key == "verbosity") { + stringstream ss; + ss << (int) YAP_IntOfTerm (YAP_ARG2); + ss >> value; + } else if (key == "accuracy") { + stringstream ss; + ss << (float) YAP_FloatOfTerm (YAP_ARG2); + ss >> value; + } else if (key == "max_iter") { + stringstream ss; + ss << (int) YAP_IntOfTerm (YAP_ARG2); + ss >> value; + } else { + value = ((char*) YAP_AtomName (YAP_AtomOfTerm (YAP_ARG2))); + } + return Util::setHorusFlag (key, value); +} + + + +int +freeGroundNetwork (void) +{ + delete (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); + return TRUE; +} + + + +int +freeLiftedNetwork (void) +{ + LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); + delete network->first; + delete network->second; + delete network; + return TRUE; +} + + + +Parfactor* +readParfactor (YAP_Term pfTerm) +{ + // read dist id + unsigned distId = YAP_IntOfTerm (YAP_ArgOfTerm (1, pfTerm)); + + // read the ranges + Ranges ranges; + YAP_Term rangeList = YAP_ArgOfTerm (3, pfTerm); + while (rangeList != YAP_TermNil()) { + unsigned range = (unsigned) YAP_IntOfTerm (YAP_HeadOfTerm (rangeList)); + ranges.push_back (range); + rangeList = YAP_TailOfTerm (rangeList); + } + + // read parametric random vars + ProbFormulas formulas; + unsigned count = 0; + unordered_map lvMap; + YAP_Term pvList = YAP_ArgOfTerm (2, pfTerm); + while (pvList != YAP_TermNil()) { + YAP_Term formulaTerm = YAP_HeadOfTerm (pvList); + if (YAP_IsAtomTerm (formulaTerm)) { + string name ((char*) YAP_AtomName (YAP_AtomOfTerm (formulaTerm))); + Symbol functor = LiftedUtils::getSymbol (name); + formulas.push_back (ProbFormula (functor, ranges[count])); + } else { + LogVars logVars; + YAP_Functor yapFunctor = YAP_FunctorOfTerm (formulaTerm); + string name ((char*) YAP_AtomName (YAP_NameOfFunctor (yapFunctor))); + Symbol functor = LiftedUtils::getSymbol (name); + unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); + for (unsigned i = 1; i <= arity; i++) { + YAP_Term ti = YAP_ArgOfTerm (i, formulaTerm); + unordered_map::iterator it = lvMap.find (ti); + if (it != lvMap.end()) { + logVars.push_back (it->second); + } else { + unsigned newLv = lvMap.size(); + lvMap[ti] = newLv; + logVars.push_back (newLv); + } + } + formulas.push_back (ProbFormula (functor, logVars, ranges[count])); + } + count ++; + pvList = YAP_TailOfTerm (pvList); + } + + // read the parameters + const Params& params = readParameters (YAP_ArgOfTerm (4, pfTerm)); + + // read the constraint + Tuples tuples; + if (lvMap.size() >= 1) { + YAP_Term tupleList = YAP_ArgOfTerm (5, pfTerm); + while (tupleList != YAP_TermNil()) { + YAP_Term term = YAP_HeadOfTerm (tupleList); + assert (YAP_IsApplTerm (term)); + YAP_Functor yapFunctor = YAP_FunctorOfTerm (term); + unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); + assert (lvMap.size() == arity); + Tuple tuple (arity); + for (unsigned i = 1; i <= arity; i++) { + YAP_Term ti = YAP_ArgOfTerm (i, term); + if (YAP_IsAtomTerm (ti) == false) { + cerr << "Error: the constraint contains free variables." << endl; + exit (EXIT_FAILURE); + } + string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ti))); + tuple[i - 1] = LiftedUtils::getSymbol (name); + } + tuples.push_back (tuple); + tupleList = YAP_TailOfTerm (tupleList); + } + } + return new Parfactor (formulas, params, tuples, distId); +} + + + +void +readLiftedEvidence ( + YAP_Term observedList, + ObservedFormulas& obsFormulas) +{ + while (observedList != YAP_TermNil()) { + YAP_Term pair = YAP_HeadOfTerm (observedList); + YAP_Term ground = YAP_ArgOfTerm (1, pair); + Symbol functor; + Symbols args; + if (YAP_IsAtomTerm (ground)) { + string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ground))); + functor = LiftedUtils::getSymbol (name); + } else { + assert (YAP_IsApplTerm (ground)); + YAP_Functor yapFunctor = YAP_FunctorOfTerm (ground); + string name ((char*) (YAP_AtomName (YAP_NameOfFunctor (yapFunctor)))); + functor = LiftedUtils::getSymbol (name); + unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); + for (unsigned i = 1; i <= arity; i++) { + YAP_Term ti = YAP_ArgOfTerm (i, ground); + assert (YAP_IsAtomTerm (ti)); + string arg ((char *) YAP_AtomName (YAP_AtomOfTerm (ti))); + args.push_back (LiftedUtils::getSymbol (arg)); + } + } + unsigned evidence = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (2, pair)); + bool found = false; + for (size_t i = 0; i < obsFormulas.size(); i++) { + if (obsFormulas[i].functor() == functor && + obsFormulas[i].arity() == args.size() && + obsFormulas[i].evidence() == evidence) { + obsFormulas[i].addTuple (args); + found = true; + } + } + if (found == false) { + obsFormulas.push_back (ObservedFormula (functor, evidence, args)); + } + observedList = YAP_TailOfTerm (observedList); + } +} + + + +vector +readUnsignedList (YAP_Term list) +{ + vector vec; + while (list != YAP_TermNil()) { + vec.push_back ((unsigned) YAP_IntOfTerm (YAP_HeadOfTerm (list))); + list = YAP_TailOfTerm (list); + } + return vec; +} + + + +Params +readParameters (YAP_Term paramL) +{ + Params params; + assert (YAP_IsPairTerm (paramL)); + while (paramL != YAP_TermNil()) { + params.push_back ((double) YAP_FloatOfTerm (YAP_HeadOfTerm (paramL))); + paramL = YAP_TailOfTerm (paramL); + } + if (Globals::logDomain) { + Util::log (params); + } + return params; +} + + + +YAP_Term +fillAnswersPrologList (vector& results) +{ + YAP_Term list = YAP_TermNil(); + for (size_t i = results.size(); i-- > 0; ) { + const Params& beliefs = results[i]; + YAP_Term queryBeliefsL = YAP_TermNil(); + for (size_t j = beliefs.size(); j-- > 0; ) { + YAP_Int sl1 = YAP_InitSlot (list); + YAP_Term belief = YAP_MkFloatTerm (beliefs[j]); + queryBeliefsL = YAP_MkPairTerm (belief, queryBeliefsL); + list = YAP_GetFromSlot (sl1); + YAP_RecoverSlots (1); + } + list = YAP_MkPairTerm (queryBeliefsL, list); + } + return list; +} + + + +extern "C" void +init_predicates (void) +{ + YAP_UserCPredicate ("cpp_create_lifted_network", + createLiftedNetwork, 3); + + YAP_UserCPredicate ("cpp_create_ground_network", + createGroundNetwork, 4); + + YAP_UserCPredicate ("cpp_run_lifted_solver", + runLiftedSolver, 3); + + YAP_UserCPredicate ("cpp_run_ground_solver", + runGroundSolver, 3); + + YAP_UserCPredicate ("cpp_set_parfactors_params", + setParfactorsParams, 3); + + YAP_UserCPredicate ("cpp_set_factors_params", + setFactorsParams, 3); + + YAP_UserCPredicate ("cpp_set_vars_information", + setVarsInformation, 2); + + YAP_UserCPredicate ("cpp_set_horus_flag", + setHorusFlag, 2); + + YAP_UserCPredicate ("cpp_free_lifted_network", + freeLiftedNetwork, 1); + + YAP_UserCPredicate ("cpp_free_ground_network", + freeGroundNetwork, 1); +} + diff --git a/packages/CLPBN/horus2/Indexer.h b/packages/CLPBN/horus2/Indexer.h new file mode 100644 index 000000000..db99cf1a7 --- /dev/null +++ b/packages/CLPBN/horus2/Indexer.h @@ -0,0 +1,258 @@ +#ifndef HORUS_INDEXER_H +#define HORUS_INDEXER_H + +#include +#include + +#include +#include + +#include "Util.h" + + +class Indexer +{ + public: + Indexer (const Ranges& ranges, bool calcOffsets = true) + : index_(0), indices_(ranges.size(), 0), ranges_(ranges), + size_(Util::sizeExpected (ranges)) + { + if (calcOffsets) { + calculateOffsets(); + } + } + + void increment (void) + { + for (size_t i = ranges_.size(); i-- > 0; ) { + indices_[i] ++; + if (indices_[i] != ranges_[i]) { + break; + } else { + indices_[i] = 0; + } + } + index_ ++; + } + + void incrementDimension (size_t dim) + { + assert (dim < ranges_.size()); + assert (ranges_.size() == offsets_.size()); + assert (indices_[dim] < ranges_[dim]); + indices_[dim] ++; + index_ += offsets_[dim]; + } + + void incrementExceptDimension (size_t dim) + { + assert (ranges_.size() == offsets_.size()); + for (size_t i = ranges_.size(); i-- > 0; ) { + if (i != dim) { + indices_[i] ++; + index_ += offsets_[i]; + if (indices_[i] != ranges_[i]) { + return; + } else { + indices_[i] = 0; + index_ -= offsets_[i] * ranges_[i]; + } + } + } + index_ = size_; + } + + Indexer& operator++ (void) + { + increment(); + return *this; + } + + operator size_t (void) const + { + return index_; + } + + unsigned operator[] (size_t dim) const + { + assert (valid()); + assert (dim < ranges_.size()); + return indices_[dim]; + } + + bool valid (void) const + { + return index_ < size_; + } + + void reset (void) + { + std::fill (indices_.begin(), indices_.end(), 0); + index_ = 0; + } + + void resetDimension (size_t dim) + { + indices_[dim] = 0; + index_ -= offsets_[dim] * ranges_[dim]; + } + + size_t size (void) const + { + return size_ ; + } + + friend std::ostream& operator<< (std::ostream&, const Indexer&); + + private: + void calculateOffsets (void) + { + size_t prod = 1; + offsets_.resize (ranges_.size()); + for (size_t i = ranges_.size(); i-- > 0; ) { + offsets_[i] = prod; + prod *= ranges_[i]; + } + } + + size_t index_; + Ranges indices_; + const Ranges& ranges_; + size_t size_; + vector offsets_; +}; + + + +inline std::ostream& +operator<< (std::ostream& os, const Indexer& indexer) +{ + os << "(" ; + os << std::setw (2) << std::setfill('0') << indexer.index_; + os << ") " ; + os << indexer.indices_; + return os; +} + + + +class MapIndexer +{ + public: + MapIndexer (const Ranges& ranges, const vector& mask) + : index_(0), indices_(ranges.size(), 0), ranges_(ranges), + valid_(true) + { + size_t prod = 1; + offsets_.resize (ranges.size(), 0); + for (size_t i = ranges.size(); i-- > 0; ) { + if (mask[i]) { + offsets_[i] = prod; + prod *= ranges[i]; + } + } + assert (ranges.size() == mask.size()); + } + + MapIndexer (const Ranges& ranges, size_t dim) + : index_(0), indices_(ranges.size(), 0), ranges_(ranges), + valid_(true) + { + size_t prod = 1; + offsets_.resize (ranges.size(), 0); + for (size_t i = ranges.size(); i-- > 0; ) { + if (i != dim) { + offsets_[i] = prod; + prod *= ranges[i]; + } + } + } + + template + MapIndexer ( + const vector& allArgs, + const Ranges& allRanges, + const vector& wantedArgs, + const Ranges& wantedRanges) + : index_(0), indices_(allArgs.size(), 0), ranges_(allRanges), + valid_(true) + { + size_t prod = 1; + vector offsets (wantedRanges.size()); + for (size_t i = wantedRanges.size(); i-- > 0; ) { + offsets[i] = prod; + prod *= wantedRanges[i]; + } + offsets_.reserve (allArgs.size()); + for (size_t i = 0; i < allArgs.size(); i++) { + size_t idx = Util::indexOf (wantedArgs, allArgs[i]); + offsets_.push_back (idx != wantedArgs.size() ? offsets[idx] : 0); + } + } + + MapIndexer& operator++ (void) + { + assert (valid_); + for (size_t i = ranges_.size(); i-- > 0; ) { + indices_[i] ++; + index_ += offsets_[i]; + if (indices_[i] != ranges_[i]) { + return *this; + } else { + indices_[i] = 0; + index_ -= offsets_[i] * ranges_[i]; + } + } + valid_ = false; + return *this; + } + + operator size_t (void) const + { + assert (valid()); + return index_; + } + + unsigned operator[] (size_t dim) const + { + assert (valid()); + assert (dim < ranges_.size()); + return indices_[dim]; + } + + bool valid (void) const + { + return valid_; + } + + void reset (void) + { + std::fill (indices_.begin(), indices_.end(), 0); + index_ = 0; + } + + friend std::ostream& operator<< (std::ostream&, const MapIndexer&); + + private: + size_t index_; + Ranges indices_; + const Ranges& ranges_; + bool valid_; + vector offsets_; +}; + + + +inline std::ostream& +operator<< (std::ostream &os, const MapIndexer& indexer) +{ + os << "(" ; + os << std::setw (2) << std::setfill('0') << indexer.index_; + os << ") " ; + os << indexer.indices_; + return os; +} + + +#endif // HORUS_INDEXER_H + diff --git a/packages/CLPBN/horus2/LiftedBp.cpp b/packages/CLPBN/horus2/LiftedBp.cpp new file mode 100644 index 000000000..d3f757704 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedBp.cpp @@ -0,0 +1,234 @@ +#include "LiftedBp.h" +#include "WeightedBp.h" +#include "FactorGraph.h" +#include "LiftedOperations.h" + + +LiftedBp::LiftedBp (const ParfactorList& parfactorList) + : LiftedSolver (parfactorList) +{ + refineParfactors(); + createFactorGraph(); + solver_ = new WeightedBp (*fg_, getWeights()); +} + + + +LiftedBp::~LiftedBp (void) +{ + delete solver_; + delete fg_; +} + + + +Params +LiftedBp::solveQuery (const Grounds& query) +{ + assert (query.empty() == false); + Params res; + vector groups = getQueryGroups (query); + if (query.size() == 1) { + res = solver_->getPosterioriOf (groups[0]); + } else { + ParfactorList::iterator it = pfList_.begin(); + size_t idx = pfList_.size(); + size_t count = 0; + while (it != pfList_.end()) { + if ((*it)->containsGrounds (query)) { + idx = count; + break; + } + ++ it; + ++ count; + } + if (idx == pfList_.size()) { + res = getJointByConditioning (pfList_, query); + } else { + VarIds queryVids; + for (unsigned i = 0; i < groups.size(); i++) { + queryVids.push_back (groups[i]); + } + res = solver_->getFactorJoint (fg_->facNodes()[idx], queryVids); + } + } + return res; +} + + + +void +LiftedBp::printSolverFlags (void) const +{ + stringstream ss; + ss << "lifted bp [" ; + ss << "schedule=" ; + typedef BpOptions::Schedule Sch; + switch (BpOptions::schedule) { + case Sch::SEQ_FIXED: ss << "seq_fixed"; break; + case Sch::SEQ_RANDOM: ss << "seq_random"; break; + case Sch::PARALLEL: ss << "parallel"; break; + case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + } + ss << ",max_iter=" << BpOptions::maxIter; + ss << ",accuracy=" << BpOptions::accuracy; + ss << ",log_domain=" << Util::toString (Globals::logDomain); + ss << "]" ; + cout << ss.str() << endl; +} + + + +void +LiftedBp::refineParfactors (void) +{ + pfList_ = parfactorList; + while (iterate() == false); + + if (Globals::verbosity > 2) { + Util::printHeader ("AFTER REFINEMENT"); + pfList_.print(); + } +} + + + +bool +LiftedBp::iterate (void) +{ + ParfactorList::iterator it = pfList_.begin(); + while (it != pfList_.end()) { + const ProbFormulas& args = (*it)->arguments(); + for (size_t i = 0; i < args.size(); i++) { + LogVarSet lvs = (*it)->logVarSet() - args[i].logVars(); + if ((*it)->constr()->isCountNormalized (lvs) == false) { + Parfactors pfs = LiftedOperations::countNormalize (*it, lvs); + it = pfList_.removeAndDelete (it); + pfList_.add (pfs); + return false; + } + } + ++ it; + } + return true; +} + + + +vector +LiftedBp::getQueryGroups (const Grounds& query) +{ + vector queryGroups; + for (unsigned i = 0; i < query.size(); i++) { + ParfactorList::const_iterator it = pfList_.begin(); + for (; it != pfList_.end(); ++it) { + if ((*it)->containsGround (query[i])) { + queryGroups.push_back ((*it)->findGroup (query[i])); + break; + } + } + } + assert (queryGroups.size() == query.size()); + return queryGroups; +} + + + +void +LiftedBp::createFactorGraph (void) +{ + fg_ = new FactorGraph(); + ParfactorList::const_iterator it = pfList_.begin(); + for (; it != pfList_.end(); ++it) { + vector groups = (*it)->getAllGroups(); + VarIds varIds; + for (size_t i = 0; i < groups.size(); i++) { + varIds.push_back (groups[i]); + } + fg_->addFactor (Factor (varIds, (*it)->ranges(), (*it)->params())); + } +} + + + +vector> +LiftedBp::getWeights (void) const +{ + vector> weights; + weights.reserve (pfList_.size()); + ParfactorList::const_iterator it = pfList_.begin(); + for (; it != pfList_.end(); ++it) { + const ProbFormulas& args = (*it)->arguments(); + weights.push_back ({ }); + weights.back().reserve (args.size()); + for (size_t i = 0; i < args.size(); i++) { + LogVarSet lvs = (*it)->logVarSet() - args[i].logVars(); + weights.back().push_back ((*it)->constr()->getConditionalCount (lvs)); + } + } + return weights; +} + + + +unsigned +LiftedBp::rangeOfGround (const Ground& gr) +{ + ParfactorList::iterator it = pfList_.begin(); + while (it != pfList_.end()) { + if ((*it)->containsGround (gr)) { + PrvGroup prvGroup = (*it)->findGroup (gr); + return (*it)->range ((*it)->indexOfGroup (prvGroup)); + } + ++ it; + } + return std::numeric_limits::max(); +} + + + +Params +LiftedBp::getJointByConditioning ( + const ParfactorList& pfList, + const Grounds& query) +{ + LiftedBp solver (pfList); + Params prevBeliefs = solver.solveQuery ({query[0]}); + Grounds obsGrounds = {query[0]}; + for (size_t i = 1; i < query.size(); i++) { + Params newBeliefs; + vector obsFs; + Ranges obsRanges; + for (size_t j = 0; j < obsGrounds.size(); j++) { + obsFs.push_back (ObservedFormula ( + obsGrounds[j].functor(), 0, obsGrounds[j].args())); + obsRanges.push_back (rangeOfGround (obsGrounds[j])); + } + Indexer indexer (obsRanges, false); + while (indexer.valid()) { + for (size_t j = 0; j < obsFs.size(); j++) { + obsFs[j].setEvidence (indexer[j]); + } + ParfactorList tempPfList (pfList); + LiftedOperations::absorveEvidence (tempPfList, obsFs); + LiftedBp solver (tempPfList); + Params beliefs = solver.solveQuery ({query[i]}); + for (size_t k = 0; k < beliefs.size(); k++) { + newBeliefs.push_back (beliefs[k]); + } + ++ indexer; + } + int count = -1; + unsigned range = rangeOfGround (query[i]); + for (size_t j = 0; j < newBeliefs.size(); j++) { + if (j % range == 0) { + count ++; + } + newBeliefs[j] *= prevBeliefs[count]; + } + prevBeliefs = newBeliefs; + obsGrounds.push_back (query[i]); + } + return prevBeliefs; +} + diff --git a/packages/CLPBN/horus2/LiftedBp.h b/packages/CLPBN/horus2/LiftedBp.h new file mode 100644 index 000000000..274503f29 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedBp.h @@ -0,0 +1,43 @@ +#ifndef HORUS_LIFTEDBP_H +#define HORUS_LIFTEDBP_H + +#include "LiftedSolver.h" +#include "ParfactorList.h" + +class FactorGraph; +class WeightedBp; + +class LiftedBp : public LiftedSolver +{ + public: + LiftedBp (const ParfactorList& pfList); + + ~LiftedBp (void); + + Params solveQuery (const Grounds&); + + void printSolverFlags (void) const; + + private: + void refineParfactors (void); + + bool iterate (void); + + vector getQueryGroups (const Grounds&); + + void createFactorGraph (void); + + vector> getWeights (void) const; + + unsigned rangeOfGround (const Ground&); + + Params getJointByConditioning (const ParfactorList&, const Grounds&); + + ParfactorList pfList_; + WeightedBp* solver_; + FactorGraph* fg_; + +}; + +#endif // HORUS_LIFTEDBP_H + diff --git a/packages/CLPBN/horus2/LiftedKc.cpp b/packages/CLPBN/horus2/LiftedKc.cpp new file mode 100644 index 000000000..45848ab70 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedKc.cpp @@ -0,0 +1,1309 @@ +#include + +#include "LiftedKc.h" +#include "LiftedOperations.h" +#include "Indexer.h" + + + +OrNode::~OrNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + +double +OrNode::weight (void) const +{ + double lw = leftBranch_->weight(); + double rw = rightBranch_->weight(); + return Globals::logDomain ? Util::logSum (lw, rw) : lw + rw; +} + + + +AndNode::~AndNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + +double +AndNode::weight (void) const +{ + double lw = leftBranch_->weight(); + double rw = rightBranch_->weight(); + return Globals::logDomain ? lw + rw : lw * rw; +} + + + +int SetOrNode::nrPos_ = -1; +int SetOrNode::nrNeg_ = -1; + + + +SetOrNode::~SetOrNode (void) +{ + delete follow_; +} + + + +double +SetOrNode::weight (void) const +{ + double weightSum = LogAware::addIdenty(); + for (unsigned i = 0; i < nrGroundings_ + 1; i++) { + nrPos_ = nrGroundings_ - i; + nrNeg_ = i; + if (Globals::logDomain) { + double nrCombs = Util::nrCombinations (nrGroundings_, i); + double w = follow_->weight(); + weightSum = Util::logSum (weightSum, std::log (nrCombs) + w); + } else { + double w = follow_->weight(); + weightSum += Util::nrCombinations (nrGroundings_, i) * w; + } + } + nrPos_ = -1; + nrNeg_ = -1; + return weightSum; +} + + + +SetAndNode::~SetAndNode (void) +{ + delete follow_; +} + + + +double +SetAndNode::weight (void) const +{ + return LogAware::pow (follow_->weight(), nrGroundings_); +} + + + +IncExcNode::~IncExcNode (void) +{ + delete plus1Branch_; + delete plus2Branch_; + delete minusBranch_; +} + + + +double +IncExcNode::weight (void) const +{ + double w = 0.0; + if (Globals::logDomain) { + w = Util::logSum (plus1Branch_->weight(), plus2Branch_->weight()); + w = std::log (std::exp (w) - std::exp (minusBranch_->weight())); + } else { + w = plus1Branch_->weight() + plus2Branch_->weight(); + w -= minusBranch_->weight(); + } + return w; +} + + + +LeafNode::~LeafNode (void) +{ + delete clause_; +} + + + +double +LeafNode::weight (void) const +{ + assert (clause_->isUnit()); + if (clause_->posCountedLogVars().empty() == false + || clause_->negCountedLogVars().empty() == false) { + if (SetOrNode::isSet() == false) { + // return a NaN if we have a SetOrNode + // ancester that is not set. This can only + // happen when calculating the weights + // for the edge labels in graphviz + return 0.0 / 0.0; + } + } + double weight = clause_->literals()[0].isPositive() + ? lwcnf_.posWeight (clause_->literals().front().lid()) + : lwcnf_.negWeight (clause_->literals().front().lid()); + LogVarSet lvs = clause_->constr().logVarSet(); + lvs -= clause_->ipgLogVars(); + lvs -= clause_->posCountedLogVars(); + lvs -= clause_->negCountedLogVars(); + unsigned nrGroundings = 1; + if (lvs.empty() == false) { + nrGroundings = clause_->constr().projectedCopy (lvs).size(); + } + if (clause_->posCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrPositives(), + clause_->nrPosCountedLogVars()); + } + if (clause_->negCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrNegatives(), + clause_->nrNegCountedLogVars()); + } + return LogAware::pow (weight, nrGroundings); +} + + + +SmoothNode::~SmoothNode (void) +{ + Clause::deleteClauses (clauses_); +} + + + +double +SmoothNode::weight (void) const +{ + Clauses cs = clauses(); + double totalWeight = LogAware::multIdenty(); + for (size_t i = 0; i < cs.size(); i++) { + double posWeight = lwcnf_.posWeight (cs[i]->literals()[0].lid()); + double negWeight = lwcnf_.negWeight (cs[i]->literals()[0].lid()); + LogVarSet lvs = cs[i]->constr().logVarSet(); + lvs -= cs[i]->ipgLogVars(); + lvs -= cs[i]->posCountedLogVars(); + lvs -= cs[i]->negCountedLogVars(); + unsigned nrGroundings = 1; + if (lvs.empty() == false) { + nrGroundings = cs[i]->constr().projectedCopy (lvs).size(); + } + if (cs[i]->posCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrPositives(), + cs[i]->nrPosCountedLogVars()); + } + if (cs[i]->negCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrNegatives(), + cs[i]->nrNegCountedLogVars()); + } + if (Globals::logDomain) { + totalWeight += Util::logSum (posWeight, negWeight) * nrGroundings; + } else { + totalWeight *= std::pow (posWeight + negWeight, nrGroundings); + } + } + return totalWeight; +} + + + +double +TrueNode::weight (void) const +{ + return LogAware::multIdenty(); +} + + + +double +CompilationFailedNode::weight (void) const +{ + // weighted model counting in compilation + // failed nodes should give NaN + return 0.0 / 0.0; +} + + + +LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) + : lwcnf_(lwcnf) +{ + root_ = 0; + compilationSucceeded_ = true; + Clauses clauses = Clause::copyClauses (lwcnf->clauses()); + compile (&root_, clauses); + if (compilationSucceeded_) { + smoothCircuit (root_); + } + if (Globals::verbosity > 1) { + if (compilationSucceeded_) { + double wmc = LogAware::exp (getWeightedModelCount()); + cout << "Weighted model count = " << wmc << endl << endl; + } + cout << "Exporting circuit to graphviz (circuit.dot)..." ; + cout << endl << endl; + exportToGraphViz ("circuit.dot"); + } +} + + + +LiftedCircuit::~LiftedCircuit (void) +{ + delete root_; + unordered_map::iterator it; + it = originClausesMap_.begin(); + while (it != originClausesMap_.end()) { + Clause::deleteClauses (it->second); + ++ it; + } +} + + + +bool +LiftedCircuit::isCompilationSucceeded (void) const +{ + return compilationSucceeded_; +} + + + +double +LiftedCircuit::getWeightedModelCount (void) const +{ + assert (compilationSucceeded_); + return root_->weight(); +} + + + +void +LiftedCircuit::exportToGraphViz (const char* fileName) +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "digraph {" << endl; + out << "ranksep=1" << endl; + exportToGraphViz (root_, out); + out << "}" << endl; + out.close(); +} + + + +void +LiftedCircuit::compile ( + CircuitNode** follow, + Clauses& clauses) +{ + if (compilationSucceeded_ == false + && Globals::verbosity <= 1) { + return; + } + + if (clauses.empty()) { + *follow = new TrueNode(); + return; + } + + if (clauses.size() == 1 && clauses[0]->isUnit()) { + *follow = new LeafNode (clauses[0], *lwcnf_); + return; + } + + if (tryUnitPropagation (follow, clauses)) { + return; + } + + if (tryIndependence (follow, clauses)) { + return; + } + + if (tryShannonDecomp (follow, clauses)) { + return; + } + + if (tryInclusionExclusion (follow, clauses)) { + return; + } + + if (tryIndepPartialGrounding (follow, clauses)) { + return; + } + + if (tryAtomCounting (follow, clauses)) { + return; + } + + *follow = new CompilationFailedNode(); + if (Globals::verbosity > 1) { + originClausesMap_[*follow] = clauses; + explanationMap_[*follow] = "" ; + } + compilationSucceeded_ = false; +} + + + +bool +LiftedCircuit::tryUnitPropagation ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + if (clauses[i]->isUnit()) { + Clauses propagClauses; + for (size_t j = 0; j < clauses.size(); j++) { + if (i != j) { + LiteralId lid = clauses[i]->literals()[0].lid(); + LogVarTypes types = clauses[i]->logVarTypes (0); + if (clauses[i]->literals()[0].isPositive()) { + if (clauses[j]->containsPositiveLiteral (lid, types) == false) { + clauses[j]->removeNegativeLiterals (lid, types); + if (clauses[j]->nrLiterals() > 0) { + propagClauses.push_back (clauses[j]); + } else { + delete clauses[j]; + } + } else { + delete clauses[j]; + } + } else if (clauses[i]->literals()[0].isNegative()) { + if (clauses[j]->containsNegativeLiteral (lid, types) == false) { + clauses[j]->removePositiveLiterals (lid, types); + if (clauses[j]->nrLiterals() > 0) { + propagClauses.push_back (clauses[j]); + } else { + delete clauses[j]; + } + } else { + delete clauses[j]; + } + } + } + } + + AndNode* andNode = new AndNode(); + if (Globals::verbosity > 1) { + originClausesMap_[andNode] = backupClauses_; + stringstream explanation; + explanation << " UP on " << clauses[i]->literals()[0]; + explanationMap_[andNode] = explanation.str(); + } + + Clauses unitClause = { clauses[i] }; + compile (andNode->leftBranch(), unitClause); + compile (andNode->rightBranch(), propagClauses); + (*follow) = andNode; + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndependence ( + CircuitNode** follow, + Clauses& clauses) +{ + if (clauses.size() == 1) { + return false; + } + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + Clauses depClauses = { clauses[0] }; + Clauses indepClauses (clauses.begin() + 1, clauses.end()); + bool finish = false; + while (finish == false) { + finish = true; + for (size_t i = 0; i < indepClauses.size(); i++) { + if (independentClause (*indepClauses[i], depClauses) == false) { + depClauses.push_back (indepClauses[i]); + indepClauses.erase (indepClauses.begin() + i); + finish = false; + break; + } + } + } + if (indepClauses.empty() == false) { + AndNode* andNode = new AndNode (); + if (Globals::verbosity > 1) { + originClausesMap_[andNode] = backupClauses_; + explanationMap_[andNode] = " Independence" ; + } + compile (andNode->leftBranch(), depClauses); + compile (andNode->rightBranch(), indepClauses); + (*follow) = andNode; + return true; + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryShannonDecomp ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + const Literals& literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].isGround ( + clauses[i]->constr(), clauses[i]->ipgLogVars())) { + + Clause* c1 = lwcnf_->createClause (literals[j].lid()); + Clause* c2 = new Clause (*c1); + c2->literals().front().complement(); + + Clauses otherClauses = Clause::copyClauses (clauses); + clauses.push_back (c1); + otherClauses.push_back (c2); + + OrNode* orNode = new OrNode(); + if (Globals::verbosity > 1) { + originClausesMap_[orNode] = backupClauses_; + stringstream explanation; + explanation << " SD on " << literals[j]; + explanationMap_[orNode] = explanation.str(); + } + + compile (orNode->leftBranch(), clauses); + compile (orNode->rightBranch(), otherClauses); + (*follow) = orNode; + return true; + } + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryInclusionExclusion ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + Literals depLits = { clauses[i]->literals().front() }; + Literals indepLits (clauses[i]->literals().begin() + 1, + clauses[i]->literals().end()); + bool finish = false; + while (finish == false) { + finish = true; + for (size_t j = 0; j < indepLits.size(); j++) { + if (independentLiteral (indepLits[j], depLits) == false) { + depLits.push_back (indepLits[j]); + indepLits.erase (indepLits.begin() + j); + finish = false; + break; + } + } + } + if (indepLits.empty() == false) { + LogVarSet lvs1; + for (size_t j = 0; j < depLits.size(); j++) { + lvs1 |= depLits[j].logVarSet(); + } + if (clauses[i]->constr().isCountNormalized (lvs1) == false) { + break; + } + LogVarSet lvs2; + for (size_t j = 0; j < indepLits.size(); j++) { + lvs2 |= indepLits[j].logVarSet(); + } + if (clauses[i]->constr().isCountNormalized (lvs2) == false) { + break; + } + Clause* c1 = new Clause (clauses[i]->constr().projectedCopy (lvs1)); + for (size_t j = 0; j < depLits.size(); j++) { + c1->addLiteral (depLits[j]); + } + Clause* c2 = new Clause (clauses[i]->constr().projectedCopy (lvs2)); + for (size_t j = 0; j < indepLits.size(); j++) { + c2->addLiteral (indepLits[j]); + } + + clauses.erase (clauses.begin() + i); + Clauses plus1Clauses = Clause::copyClauses (clauses); + Clauses plus2Clauses = Clause::copyClauses (clauses); + + plus1Clauses.push_back (c1); + plus2Clauses.push_back (c2); + clauses.push_back (c1); + clauses.push_back (c2); + + IncExcNode* ieNode = new IncExcNode(); + if (Globals::verbosity > 1) { + originClausesMap_[ieNode] = backupClauses_; + stringstream explanation; + explanation << " IncExc on clause nº " << i + 1; + explanationMap_[ieNode] = explanation.str(); + } + compile (ieNode->plus1Branch(), plus1Clauses); + compile (ieNode->plus2Branch(), plus2Clauses); + compile (ieNode->minusBranch(), clauses); + *follow = ieNode; + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndepPartialGrounding ( + CircuitNode** follow, + Clauses& clauses) +{ + // assumes that all literals have logical variables + // else, shannon decomp was possible + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + LogVars rootLogVars; + LogVarSet lvs = clauses[0]->ipgCandidates(); + for (size_t i = 0; i < lvs.size(); i++) { + rootLogVars.clear(); + rootLogVars.push_back (lvs[i]); + ConstraintTree ct = clauses[0]->constr().projectedCopy ({lvs[i]}); + if (tryIndepPartialGroundingAux (clauses, ct, rootLogVars)) { + for (size_t j = 0; j < clauses.size(); j++) { + clauses[j]->addIpgLogVar (rootLogVars[j]); + } + SetAndNode* setAndNode = new SetAndNode (ct.size()); + if (Globals::verbosity > 1) { + originClausesMap_[setAndNode] = backupClauses_; + explanationMap_[setAndNode] = " IPG" ; + } + *follow = setAndNode; + compile (setAndNode->follow(), clauses); + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndepPartialGroundingAux ( + Clauses& clauses, + ConstraintTree& ct, + LogVars& rootLogVars) +{ + for (size_t i = 1; i < clauses.size(); i++) { + LogVarSet lvs = clauses[i]->ipgCandidates(); + for (size_t j = 0; j < lvs.size(); j++) { + ConstraintTree ct2 = clauses[i]->constr().projectedCopy ({lvs[j]}); + if (ct.tupleSet() == ct2.tupleSet()) { + rootLogVars.push_back (lvs[j]); + break; + } + } + if (rootLogVars.size() != i + 1) { + return false; + } + } + // verifies if the IPG logical vars appear in the same positions + unordered_map positions; + for (size_t i = 0; i < clauses.size(); i++) { + const Literals& literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + size_t idx = literals[j].indexOfLogVar (rootLogVars[i]); + assert (idx != literals[j].nrLogVars()); + unordered_map::iterator it; + it = positions.find (literals[j].lid()); + if (it != positions.end()) { + if (it->second != idx) { + return false; + } + } else { + positions[literals[j].lid()] = idx; + } + } + } + return true; +} + + + +bool +LiftedCircuit::tryAtomCounting ( + CircuitNode** follow, + Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + if (clauses[i]->nrPosCountedLogVars() > 0 + || clauses[i]->nrNegCountedLogVars() > 0) { + // only allow one atom counting node per branch + return false; + } + } + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + Literals literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].nrLogVars() == 1 + && ! clauses[i]->isIpgLogVar (literals[j].logVars().front()) + && ! clauses[i]->isCountedLogVar (literals[j].logVars().front())) { + unsigned nrGroundings = clauses[i]->constr().projectedCopy ( + literals[j].logVars()).size(); + SetOrNode* setOrNode = new SetOrNode (nrGroundings); + if (Globals::verbosity > 1) { + originClausesMap_[setOrNode] = backupClauses_; + explanationMap_[setOrNode] = " AC" ; + } + Clause* c1 = new Clause ( + clauses[i]->constr().projectedCopy (literals[j].logVars())); + Clause* c2 = new Clause ( + clauses[i]->constr().projectedCopy (literals[j].logVars())); + c1->addLiteral (literals[j]); + c2->addLiteralComplemented (literals[j]); + c1->addPosCountedLogVar (literals[j].logVars().front()); + c2->addNegCountedLogVar (literals[j].logVars().front()); + clauses.push_back (c1); + clauses.push_back (c2); + shatterCountedLogVars (clauses); + compile (setOrNode->follow(), clauses); + *follow = setOrNode; + return true; + } + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +void +LiftedCircuit::shatterCountedLogVars (Clauses& clauses) +{ + while (shatterCountedLogVarsAux (clauses)) ; +} + + + +bool +LiftedCircuit::shatterCountedLogVarsAux (Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size() - 1; i++) { + for (size_t j = i + 1; j < clauses.size(); j++) { + bool splitedSome = shatterCountedLogVarsAux (clauses, i, j); + if (splitedSome) { + return true; + } + } + } + return false; +} + + + +bool +LiftedCircuit::shatterCountedLogVarsAux ( + Clauses& clauses, + size_t idx1, + size_t idx2) +{ + Literals lits1 = clauses[idx1]->literals(); + Literals lits2 = clauses[idx2]->literals(); + for (size_t i = 0; i < lits1.size(); i++) { + for (size_t j = 0; j < lits2.size(); j++) { + if (lits1[i].lid() == lits2[j].lid()) { + LogVars lvs1 = lits1[i].logVars(); + LogVars lvs2 = lits2[j].logVars(); + for (size_t k = 0; k < lvs1.size(); k++) { + if (clauses[idx1]->isCountedLogVar (lvs1[k]) + && clauses[idx2]->isCountedLogVar (lvs2[k]) == false) { + clauses.push_back (new Clause (*clauses[idx2])); + clauses[idx2]->addPosCountedLogVar (lvs2[k]); + clauses.back()->addNegCountedLogVar (lvs2[k]); + return true; + } + if (clauses[idx2]->isCountedLogVar (lvs2[k]) + && clauses[idx1]->isCountedLogVar (lvs1[k]) == false) { + clauses.push_back (new Clause (*clauses[idx1])); + clauses[idx1]->addPosCountedLogVar (lvs1[k]); + clauses.back()->addNegCountedLogVar (lvs1[k]); + return true; + } + } + } + } + } + return false; +} + + + +bool +LiftedCircuit::independentClause ( + Clause& clause, + Clauses& otherClauses) const +{ + for (size_t i = 0; i < otherClauses.size(); i++) { + if (Clause::independentClauses (clause, *otherClauses[i]) == false) { + return false; + } + } + return true; +} + + + +bool +LiftedCircuit::independentLiteral ( + const Literal& lit, + const Literals& otherLits) const +{ + for (size_t i = 0; i < otherLits.size(); i++) { + if (lit.lid() == otherLits[i].lid() + || (lit.logVarSet() & otherLits[i].logVarSet()).empty() == false) { + return false; + } + } + return true; +} + + + +LitLvTypesSet +LiftedCircuit::smoothCircuit (CircuitNode* node) +{ + assert (node != 0); + LitLvTypesSet propagLits; + + switch (getCircuitNodeType (node)) { + + case CircuitNodeType::OR_NODE: { + OrNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); + LitLvTypesSet missingLeft = lids2 - lids1; + LitLvTypesSet missingRight = lids1 - lids2; + createSmoothNode (missingLeft, casted->leftBranch()); + createSmoothNode (missingRight, casted->rightBranch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::AND_NODE: { + AndNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::SET_OR_NODE: { + SetOrNode* casted = dynamic_cast(node); + propagLits = smoothCircuit (*casted->follow()); + TinySet> litSet; + for (size_t i = 0; i < propagLits.size(); i++) { + litSet.insert (make_pair (propagLits[i].lid(), + propagLits[i].logVarTypes().size())); + } + LitLvTypesSet missingLids; + for (size_t i = 0; i < litSet.size(); i++) { + vector allTypes = getAllPossibleTypes (litSet[i].second); + for (size_t j = 0; j < allTypes.size(); j++) { + bool typeFound = false; + for (size_t k = 0; k < propagLits.size(); k++) { + if (litSet[i].first == propagLits[k].lid() + && containsTypes (propagLits[k].logVarTypes(), allTypes[j])) { + typeFound = true; + break; + } + } + if (typeFound == false) { + missingLids.insert (LitLvTypes (litSet[i].first, allTypes[j])); + } + } + } + createSmoothNode (missingLids, casted->follow()); + // setAllFullLogVars() can cause repeated elements in + // the set. Fix this by reconstructing the set again + LitLvTypesSet copy = propagLits; + propagLits.clear(); + for (size_t i = 0; i < copy.size(); i++) { + copy[i].setAllFullLogVars(); + propagLits.insert (copy[i]); + } + break; + } + + case CircuitNodeType::SET_AND_NODE: { + SetAndNode* casted = dynamic_cast(node); + propagLits = smoothCircuit (*casted->follow()); + break; + } + + case CircuitNodeType::INC_EXC_NODE: { + IncExcNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->plus1Branch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->plus2Branch()); + LitLvTypesSet missingPlus1 = lids2 - lids1; + LitLvTypesSet missingPlus2 = lids1 - lids2; + createSmoothNode (missingPlus1, casted->plus1Branch()); + createSmoothNode (missingPlus2, casted->plus2Branch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::LEAF_NODE: { + LeafNode* casted = dynamic_cast(node); + propagLits.insert (LitLvTypes ( + casted->clause()->literals()[0].lid(), + casted->clause()->logVarTypes(0))); + } + + default: + break; + } + + return propagLits; +} + + + +void +LiftedCircuit::createSmoothNode ( + const LitLvTypesSet& missingLits, + CircuitNode** prev) +{ + if (missingLits.empty() == false) { + if (Globals::verbosity > 1) { + unordered_map::iterator it; + it = originClausesMap_.find (*prev); + if (it != originClausesMap_.end()) { + backupClauses_ = it->second; + } else { + backupClauses_ = Clause::copyClauses ( + {((dynamic_cast(*prev))->clause())}); + } + } + Clauses clauses; + for (size_t i = 0; i < missingLits.size(); i++) { + LiteralId lid = missingLits[i].lid(); + const LogVarTypes& types = missingLits[i].logVarTypes(); + Clause* c = lwcnf_->createClause (lid); + for (size_t j = 0; j < types.size(); j++) { + LogVar X = c->literals().front().logVars()[j]; + if (types[j] == LogVarType::POS_LV) { + c->addPosCountedLogVar (X); + } else if (types[j] == LogVarType::NEG_LV) { + c->addNegCountedLogVar (X); + } + } + c->addLiteralComplemented (c->literals()[0]); + clauses.push_back (c); + } + SmoothNode* smoothNode = new SmoothNode (clauses, *lwcnf_); + *prev = new AndNode (smoothNode, *prev); + if (Globals::verbosity > 1) { + originClausesMap_[*prev] = backupClauses_; + explanationMap_[*prev] = " Smoothing" ; + } + } +} + + + +vector +LiftedCircuit::getAllPossibleTypes (unsigned nrLogVars) const +{ + if (nrLogVars == 0) { + return {}; + } + if (nrLogVars == 1) { + return {{LogVarType::POS_LV},{LogVarType::NEG_LV}}; + } + vector res; + Ranges ranges (nrLogVars, 2); + Indexer indexer (ranges); + while (indexer.valid()) { + LogVarTypes types; + for (size_t i = 0; i < nrLogVars; i++) { + if (indexer[i] == 0) { + types.push_back (LogVarType::POS_LV); + } else { + types.push_back (LogVarType::NEG_LV); + } + } + res.push_back (types); + ++ indexer; + } + return res; +} + + + +bool +LiftedCircuit::containsTypes ( + const LogVarTypes& typesA, + const LogVarTypes& typesB) const +{ + for (size_t i = 0; i < typesA.size(); i++) { + if (typesA[i] == LogVarType::FULL_LV) { + + } else if (typesA[i] == LogVarType::POS_LV + && typesB[i] == LogVarType::POS_LV) { + + } else if (typesA[i] == LogVarType::NEG_LV + && typesB[i] == LogVarType::NEG_LV) { + + } else { + return false; + } + } + return true; +} + + + +CircuitNodeType +LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const +{ + CircuitNodeType type; + if (dynamic_cast(node) != 0) { + type = CircuitNodeType::OR_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::AND_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::SET_OR_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::SET_AND_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::INC_EXC_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::LEAF_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::SMOOTH_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::TRUE_NODE; + } else if (dynamic_cast(node) != 0) { + type = CircuitNodeType::COMPILATION_FAILED_NODE; + } else { + assert (false); + } + return type; +} + + + +void +LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) +{ + assert (node != 0); + + static unsigned nrAuxNodes = 0; + stringstream ss; + ss << "n" << nrAuxNodes; + string auxNode = ss.str(); + nrAuxNodes ++; + string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; + + switch (getCircuitNodeType (node)) { + + case OR_NODE: { + OrNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∨\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->leftBranch()); + os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->rightBranch()); + os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->leftBranch(), os); + exportToGraphViz (*casted->rightBranch(), os); + break; + } + + case AND_NODE: { + AndNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∧\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->leftBranch()); + os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->rightBranch()) << endl; + os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->leftBranch(), os); + exportToGraphViz (*casted->rightBranch(), os); + break; + } + + case SET_OR_NODE: { + SetOrNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∨(X)\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->follow()); + os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->follow(), os); + break; + } + + case SET_AND_NODE: { + SetAndNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∧(X)\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->follow()); + os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->follow(), os); + break; + } + + case INC_EXC_NODE: { + IncExcNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"+ - +\"]" ; + os << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->plus1Branch()); + os << " [label=\" " << (*casted->plus1Branch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->minusBranch()) << endl; + os << " [label=\" " << (*casted->minusBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->plus2Branch()); + os << " [label=\" " << (*casted->plus2Branch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->plus1Branch(), os); + exportToGraphViz (*casted->plus2Branch(), os); + exportToGraphViz (*casted->minusBranch(), os); + break; + } + + case LEAF_NODE: { + printClauses (node, os, "style=filled,fillcolor=palegreen,"); + break; + } + + case SMOOTH_NODE: { + printClauses (node, os, "style=filled,fillcolor=lightblue,"); + break; + } + + case TRUE_NODE: { + os << escapeNode (node); + os << " [shape=box,label=\"⊤\"]" ; + os << endl; + break; + } + + case COMPILATION_FAILED_NODE: { + printClauses (node, os, "style=filled,fillcolor=salmon,"); + break; + } + + default: + assert (false); + } +} + + + +string +LiftedCircuit::escapeNode (const CircuitNode* node) const +{ + stringstream ss; + ss << "\"" << node << "\"" ; + return ss.str(); +} + + + +string +LiftedCircuit::getExplanationString (CircuitNode* node) +{ + return Util::contains (explanationMap_, node) + ? explanationMap_[node] + : "" ; +} + + + +void +LiftedCircuit::printClauses ( + CircuitNode* node, + ofstream& os, + string extraOptions) +{ + Clauses clauses; + if (Util::contains (originClausesMap_, node)) { + clauses = originClausesMap_[node]; + } else if (getCircuitNodeType (node) == CircuitNodeType::LEAF_NODE) { + clauses = { (dynamic_cast(node))->clause() } ; + } else if (getCircuitNodeType (node) == CircuitNodeType::SMOOTH_NODE) { + clauses = (dynamic_cast(node))->clauses(); + } + assert (clauses.empty() == false); + os << escapeNode (node); + os << " [shape=box," << extraOptions << "label=\"" ; + for (size_t i = 0; i < clauses.size(); i++) { + if (i != 0) os << "\\n" ; + os << *clauses[i]; + } + os << "\"]" ; + os << endl; +} + + + +LiftedKc::~LiftedKc (void) +{ + delete lwcnf_; + delete circuit_; +} + + + +Params +LiftedKc::solveQuery (const Grounds& query) +{ + pfList_ = parfactorList; + LiftedOperations::shatterAgainstQuery (pfList_, query); + LiftedOperations::runWeakBayesBall (pfList_, query); + lwcnf_ = new LiftedWCNF (pfList_); + circuit_ = new LiftedCircuit (lwcnf_); + if (circuit_->isCompilationSucceeded() == false) { + cerr << "Error: the circuit compilation has failed." << endl; + exit (EXIT_FAILURE); + } + vector groups; + Ranges ranges; + for (size_t i = 0; i < query.size(); i++) { + ParfactorList::const_iterator it = pfList_.begin(); + while (it != pfList_.end()) { + size_t idx = (*it)->indexOfGround (query[i]); + if (idx != (*it)->nrArguments()) { + groups.push_back ((*it)->argument (idx).group()); + ranges.push_back ((*it)->range (idx)); + break; + } + ++ it; + } + } + assert (groups.size() == query.size()); + Params params; + Indexer indexer (ranges); + while (indexer.valid()) { + for (size_t i = 0; i < groups.size(); i++) { + vector litIds = lwcnf_->prvGroupLiterals (groups[i]); + for (size_t j = 0; j < litIds.size(); j++) { + if (indexer[i] == j) { + lwcnf_->addWeight (litIds[j], LogAware::one(), + LogAware::one()); + } else { + lwcnf_->addWeight (litIds[j], LogAware::zero(), + LogAware::one()); + } + } + } + params.push_back (circuit_->getWeightedModelCount()); + ++ indexer; + } + LogAware::normalize (params); + if (Globals::logDomain) { + Util::exp (params); + } + return params; +} + + + +void +LiftedKc::printSolverFlags (void) const +{ + stringstream ss; + ss << "lifted kc [" ; + ss << "log_domain=" << Util::toString (Globals::logDomain); + ss << "]" ; + cout << ss.str() << endl; +} + diff --git a/packages/CLPBN/horus2/LiftedKc.h b/packages/CLPBN/horus2/LiftedKc.h new file mode 100644 index 000000000..a4cd2dbeb --- /dev/null +++ b/packages/CLPBN/horus2/LiftedKc.h @@ -0,0 +1,300 @@ +#ifndef HORUS_LIFTEDKC_H +#define HORUS_LIFTEDKC_H + + +#include "LiftedWCNF.h" +#include "LiftedSolver.h" +#include "ParfactorList.h" + + +enum CircuitNodeType { + OR_NODE, + AND_NODE, + SET_OR_NODE, + SET_AND_NODE, + INC_EXC_NODE, + LEAF_NODE, + SMOOTH_NODE, + TRUE_NODE, + COMPILATION_FAILED_NODE +}; + + + +class CircuitNode +{ + public: + CircuitNode (void) { } + + virtual ~CircuitNode (void) { } + + virtual double weight (void) const = 0; +}; + + + +class OrNode : public CircuitNode +{ + public: + OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + + ~OrNode (void); + + CircuitNode** leftBranch (void) { return &leftBranch_; } + CircuitNode** rightBranch (void) { return &rightBranch_; } + + double weight (void) const; + + private: + CircuitNode* leftBranch_; + CircuitNode* rightBranch_; +}; + + + +class AndNode : public CircuitNode +{ + public: + AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + + AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) + : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } + + ~AndNode (void); + + CircuitNode** leftBranch (void) { return &leftBranch_; } + CircuitNode** rightBranch (void) { return &rightBranch_; } + + double weight (void) const; + + private: + CircuitNode* leftBranch_; + CircuitNode* rightBranch_; +}; + + + +class SetOrNode : public CircuitNode +{ + public: + SetOrNode (unsigned nrGroundings) + : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + + ~SetOrNode (void); + + CircuitNode** follow (void) { return &follow_; } + + static unsigned nrPositives (void) { return nrPos_; } + + static unsigned nrNegatives (void) { return nrNeg_; } + + static bool isSet (void) { return nrPos_ >= 0; } + + double weight (void) const; + + private: + CircuitNode* follow_; + unsigned nrGroundings_; + static int nrPos_; + static int nrNeg_; +}; + + + +class SetAndNode : public CircuitNode +{ + public: + SetAndNode (unsigned nrGroundings) + : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + + ~SetAndNode (void); + + CircuitNode** follow (void) { return &follow_; } + + double weight (void) const; + + private: + CircuitNode* follow_; + unsigned nrGroundings_; +}; + + + +class IncExcNode : public CircuitNode +{ + public: + IncExcNode (void) + : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } + + ~IncExcNode (void); + + CircuitNode** plus1Branch (void) { return &plus1Branch_; } + CircuitNode** plus2Branch (void) { return &plus2Branch_; } + CircuitNode** minusBranch (void) { return &minusBranch_; } + + double weight (void) const; + + private: + CircuitNode* plus1Branch_; + CircuitNode* plus2Branch_; + CircuitNode* minusBranch_; +}; + + + +class LeafNode : public CircuitNode +{ + public: + LeafNode (Clause* clause, const LiftedWCNF& lwcnf) + : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } + + ~LeafNode (void); + + const Clause* clause (void) const { return clause_; } + + Clause* clause (void) { return clause_; } + + double weight (void) const; + + private: + Clause* clause_; + const LiftedWCNF& lwcnf_; +}; + + + +class SmoothNode : public CircuitNode +{ + public: + SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) + : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } + + ~SmoothNode (void); + + const Clauses& clauses (void) const { return clauses_; } + + Clauses clauses (void) { return clauses_; } + + double weight (void) const; + + private: + Clauses clauses_; + const LiftedWCNF& lwcnf_; +}; + + + +class TrueNode : public CircuitNode +{ + public: + TrueNode (void) : CircuitNode() { } + + double weight (void) const; +}; + + + +class CompilationFailedNode : public CircuitNode +{ + public: + CompilationFailedNode (void) : CircuitNode() { } + + double weight (void) const; +}; + + + +class LiftedCircuit +{ + public: + LiftedCircuit (const LiftedWCNF* lwcnf); + + ~LiftedCircuit (void); + + bool isCompilationSucceeded (void) const; + + double getWeightedModelCount (void) const; + + void exportToGraphViz (const char*); + + private: + + void compile (CircuitNode** follow, Clauses& clauses); + + bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); + + bool tryIndependence (CircuitNode** follow, Clauses& clauses); + + bool tryShannonDecomp (CircuitNode** follow, Clauses& clauses); + + bool tryInclusionExclusion (CircuitNode** follow, Clauses& clauses); + + bool tryIndepPartialGrounding (CircuitNode** follow, Clauses& clauses); + + bool tryIndepPartialGroundingAux (Clauses& clauses, ConstraintTree& ct, + LogVars& rootLogVars); + + bool tryAtomCounting (CircuitNode** follow, Clauses& clauses); + + void shatterCountedLogVars (Clauses& clauses); + + bool shatterCountedLogVarsAux (Clauses& clauses); + + bool shatterCountedLogVarsAux (Clauses& clauses, size_t idx1, size_t idx2); + + bool independentClause (Clause& clause, Clauses& otherClauses) const; + + bool independentLiteral (const Literal& lit, + const Literals& otherLits) const; + + LitLvTypesSet smoothCircuit (CircuitNode* node); + + void createSmoothNode (const LitLvTypesSet& lids, + CircuitNode** prev); + + vector getAllPossibleTypes (unsigned nrLogVars) const; + + bool containsTypes (const LogVarTypes& typesA, + const LogVarTypes& typesB) const; + + CircuitNodeType getCircuitNodeType (const CircuitNode* node) const; + + void exportToGraphViz (CircuitNode* node, ofstream&); + + void printClauses (CircuitNode* node, ofstream&, + string extraOptions = ""); + + string escapeNode (const CircuitNode* node) const; + + string getExplanationString (CircuitNode* node); + + CircuitNode* root_; + const LiftedWCNF* lwcnf_; + bool compilationSucceeded_; + Clauses backupClauses_; + unordered_map originClausesMap_; + unordered_map explanationMap_; +}; + + + +class LiftedKc : public LiftedSolver +{ + public: + LiftedKc (const ParfactorList& pfList) + : LiftedSolver(pfList) { } + + ~LiftedKc (void); + + Params solveQuery (const Grounds&); + + void printSolverFlags (void) const; + + private: + LiftedWCNF* lwcnf_; + LiftedCircuit* circuit_; + ParfactorList pfList_; +}; + +#endif // HORUS_LIFTEDKC_H + diff --git a/packages/CLPBN/horus2/LiftedOperations.cpp b/packages/CLPBN/horus2/LiftedOperations.cpp new file mode 100644 index 000000000..e0da2dd3b --- /dev/null +++ b/packages/CLPBN/horus2/LiftedOperations.cpp @@ -0,0 +1,271 @@ +#include "LiftedOperations.h" + + +void +LiftedOperations::shatterAgainstQuery ( + ParfactorList& pfList, + const Grounds& query) +{ + for (size_t i = 0; i < query.size(); i++) { + if (query[i].isAtom()) { + continue; + } + bool found = false; + Parfactors newPfs; + ParfactorList::iterator it = pfList.begin(); + while (it != pfList.end()) { + if ((*it)->containsGround (query[i])) { + found = true; + std::pair split; + LogVars queryLvs ( + (*it)->constr()->logVars().begin(), + (*it)->constr()->logVars().begin() + query[i].arity()); + split = (*it)->constr()->split (query[i].args()); + ConstraintTree* commCt = split.first; + ConstraintTree* exclCt = split.second; + newPfs.push_back (new Parfactor (*it, commCt)); + if (exclCt->empty() == false) { + newPfs.push_back (new Parfactor (*it, exclCt)); + } else { + delete exclCt; + } + it = pfList.removeAndDelete (it); + } else { + ++ it; + } + } + if (found == false) { + cerr << "Error: could not find a parfactor with ground " ; + cerr << "`" << query[i] << "'." << endl; + exit (EXIT_FAILURE); + } + pfList.add (newPfs); + } + if (Globals::verbosity > 2) { + Util::printAsteriskLine(); + cout << "SHATTERED AGAINST THE QUERY" << endl; + for (size_t i = 0; i < query.size(); i++) { + cout << " -> " << query[i] << endl; + } + Util::printAsteriskLine(); + pfList.print(); + } +} + + + +void +LiftedOperations::runWeakBayesBall ( + ParfactorList& pfList, + const Grounds& query) +{ + queue todo; // groups to process + set done; // processed or in queue + for (size_t i = 0; i < query.size(); i++) { + ParfactorList::iterator it = pfList.begin(); + while (it != pfList.end()) { + PrvGroup group = (*it)->findGroup (query[i]); + if (group != numeric_limits::max()) { + todo.push (group); + done.insert (group); + break; + } + ++ it; + } + } + + set requiredPfs; + while (todo.empty() == false) { + PrvGroup group = todo.front(); + ParfactorList::iterator it = pfList.begin(); + while (it != pfList.end()) { + if (Util::contains (requiredPfs, *it) == false && + (*it)->containsGroup (group)) { + vector groups = (*it)->getAllGroups(); + for (size_t i = 0; i < groups.size(); i++) { + if (Util::contains (done, groups[i]) == false) { + todo.push (groups[i]); + done.insert (groups[i]); + } + } + requiredPfs.insert (*it); + } + ++ it; + } + todo.pop(); + } + + ParfactorList::iterator it = pfList.begin(); + bool foundNotRequired = false; + while (it != pfList.end()) { + if (Util::contains (requiredPfs, *it) == false) { + if (Globals::verbosity > 2) { + if (foundNotRequired == false) { + Util::printHeader ("PARFACTORS TO DISCARD"); + foundNotRequired = true; + } + (*it)->print(); + } + it = pfList.removeAndDelete (it); + } else { + ++ it; + } + } +} + + + +void +LiftedOperations::absorveEvidence ( + ParfactorList& pfList, + ObservedFormulas& obsFormulas) +{ + for (size_t i = 0; i < obsFormulas.size(); i++) { + Parfactors newPfs; + ParfactorList::iterator it = pfList.begin(); + while (it != pfList.end()) { + Parfactor* pf = *it; + it = pfList.remove (it); + Parfactors absorvedPfs = absorve (obsFormulas[i], pf); + if (absorvedPfs.empty() == false) { + if (absorvedPfs.size() == 1 && absorvedPfs[0] == 0) { + // just remove pf; + } else { + Util::addToVector (newPfs, absorvedPfs); + } + delete pf; + } else { + it = pfList.insertShattered (it, pf); + ++ it; + } + } + pfList.add (newPfs); + } + if (Globals::verbosity > 2 && obsFormulas.empty() == false) { + Util::printAsteriskLine(); + cout << "AFTER EVIDENCE ABSORVED" << endl; + for (size_t i = 0; i < obsFormulas.size(); i++) { + cout << " -> " << obsFormulas[i] << endl; + } + Util::printAsteriskLine(); + pfList.print(); + } +} + + + +Parfactors +LiftedOperations::countNormalize ( + Parfactor* g, + const LogVarSet& set) +{ + Parfactors normPfs; + if (set.empty()) { + normPfs.push_back (new Parfactor (*g)); + } else { + ConstraintTrees normCts = g->constr()->countNormalize (set); + for (size_t i = 0; i < normCts.size(); i++) { + normPfs.push_back (new Parfactor (g, normCts[i])); + } + } + return normPfs; +} + + + +Parfactor +LiftedOperations::calcGroundMultiplication (Parfactor pf) +{ + LogVarSet lvs = pf.constr()->logVarSet(); + lvs -= pf.constr()->singletons(); + Parfactors newPfs = {new Parfactor (pf)}; + for (size_t i = 0; i < lvs.size(); i++) { + Parfactors pfs = newPfs; + newPfs.clear(); + for (size_t j = 0; j < pfs.size(); j++) { + bool countedLv = pfs[j]->countedLogVars().contains (lvs[i]); + if (countedLv) { + pfs[j]->fullExpand (lvs[i]); + newPfs.push_back (pfs[j]); + } else { + ConstraintTrees cts = pfs[j]->constr()->ground (lvs[i]); + for (size_t k = 0; k < cts.size(); k++) { + newPfs.push_back (new Parfactor (pfs[j], cts[k])); + } + delete pfs[j]; + } + } + } + ParfactorList pfList (newPfs); + Parfactors groundShatteredPfs (pfList.begin(),pfList.end()); + for (size_t i = 1; i < groundShatteredPfs.size(); i++) { + groundShatteredPfs[0]->multiply (*groundShatteredPfs[i]); + } + return Parfactor (*groundShatteredPfs[0]); +} + + + +Parfactors +LiftedOperations::absorve ( + ObservedFormula& obsFormula, + Parfactor* g) +{ + Parfactors absorvedPfs; + const ProbFormulas& formulas = g->arguments(); + for (size_t i = 0; i < formulas.size(); i++) { + if (obsFormula.functor() == formulas[i].functor() && + obsFormula.arity() == formulas[i].arity()) { + + if (obsFormula.isAtom()) { + if (formulas.size() > 1) { + g->absorveEvidence (formulas[i], obsFormula.evidence()); + } else { + // hack to erase parfactor g + absorvedPfs.push_back (0); + } + break; + } + + g->constr()->moveToTop (formulas[i].logVars()); + std::pair res; + res = g->constr()->split ( + formulas[i].logVars(), + &(obsFormula.constr()), + obsFormula.constr().logVars()); + ConstraintTree* commCt = res.first; + ConstraintTree* exclCt = res.second; + + if (commCt->empty() == false) { + if (formulas.size() > 1) { + LogVarSet excl = g->exclusiveLogVars (i); + Parfactor tempPf (g, commCt); + Parfactors countNormPfs = LiftedOperations::countNormalize ( + &tempPf, excl); + for (size_t j = 0; j < countNormPfs.size(); j++) { + countNormPfs[j]->absorveEvidence ( + formulas[i], obsFormula.evidence()); + absorvedPfs.push_back (countNormPfs[j]); + } + } else { + delete commCt; + } + if (exclCt->empty() == false) { + absorvedPfs.push_back (new Parfactor (g, exclCt)); + } else { + delete exclCt; + } + if (absorvedPfs.empty()) { + // hack to erase parfactor g + absorvedPfs.push_back (0); + } + break; + } else { + delete commCt; + delete exclCt; + } + } + } + return absorvedPfs; +} + diff --git a/packages/CLPBN/horus2/LiftedOperations.h b/packages/CLPBN/horus2/LiftedOperations.h new file mode 100644 index 000000000..fc25363d3 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedOperations.h @@ -0,0 +1,27 @@ +#ifndef HORUS_LIFTEDOPERATIONS_H +#define HORUS_LIFTEDOPERATIONS_H + +#include "ParfactorList.h" + +class LiftedOperations +{ + public: + static void shatterAgainstQuery ( + ParfactorList& pfList, const Grounds& query); + + static void runWeakBayesBall ( + ParfactorList& pfList, const Grounds&); + + static void absorveEvidence ( + ParfactorList& pfList, ObservedFormulas& obsFormulas); + + static Parfactors countNormalize (Parfactor*, const LogVarSet&); + + static Parfactor calcGroundMultiplication (Parfactor pf); + + private: + static Parfactors absorve (ObservedFormula&, Parfactor*); +}; + +#endif // HORUS_LIFTEDOPERATIONS_H + diff --git a/packages/CLPBN/horus2/LiftedSolver.h b/packages/CLPBN/horus2/LiftedSolver.h new file mode 100644 index 000000000..5429fc5b3 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedSolver.h @@ -0,0 +1,27 @@ +#ifndef HORUS_LIFTEDSOLVER_H +#define HORUS_LIFTEDSOLVER_H + +#include "ParfactorList.h" +#include "Horus.h" + + +using namespace std; + +class LiftedSolver +{ + public: + LiftedSolver (const ParfactorList& pfList) + : parfactorList(pfList) { } + + virtual ~LiftedSolver() { } // ensure that subclass destructor is called + + virtual Params solveQuery (const Grounds& query) = 0; + + virtual void printSolverFlags (void) const = 0; + + protected: + const ParfactorList& parfactorList; +}; + +#endif // HORUS_LIFTEDSOLVER_H + diff --git a/packages/CLPBN/horus2/LiftedUtils.cpp b/packages/CLPBN/horus2/LiftedUtils.cpp new file mode 100644 index 000000000..9ad750f90 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedUtils.cpp @@ -0,0 +1,131 @@ +#include + +#include +#include +#include + +#include "LiftedUtils.h" +#include "ConstraintTree.h" + + +namespace LiftedUtils { + + +unordered_map symbolDict; + + +Symbol +getSymbol (const string& symbolName) +{ + unordered_map::iterator it + = symbolDict.find (symbolName); + if (it != symbolDict.end()) { + return it->second; + } else { + symbolDict[symbolName] = symbolDict.size() - 1; + return symbolDict.size() - 1; + } +} + + + +void +printSymbolDictionary (void) +{ + unordered_map::const_iterator it + = symbolDict.begin(); + while (it != symbolDict.end()) { + cout << it->first << " -> " << it->second << endl; + ++ it; + } +} + +} + + + +ostream& operator<< (ostream &os, const Symbol& s) +{ + unordered_map::const_iterator it + = LiftedUtils::symbolDict.begin(); + while (it != LiftedUtils::symbolDict.end() && it->second != s) { + ++ it; + } + assert (it != LiftedUtils::symbolDict.end()); + os << it->first; + return os; +} + + + +ostream& operator<< (ostream &os, const LogVar& X) +{ + const string labels[] = { + "A", "B", "C", "D", "E", "F", + "G", "H", "I", "J", "K", "M" }; + (X >= 12) ? os << "X_" << X.id_ : os << labels[X]; + return os; +} + + + +ostream& operator<< (ostream &os, const Tuple& t) +{ + os << "(" ; + for (size_t i = 0; i < t.size(); i++) { + os << ((i != 0) ? "," : "") << t[i]; + } + os << ")" ; + return os; +} + + + +ostream& operator<< (ostream &os, const Ground& gr) +{ + os << gr.functor(); + os << "(" ; + for (size_t i = 0; i < gr.args().size(); i++) { + if (i != 0) os << ", " ; + os << gr.args()[i]; + } + os << ")" ; + return os; +} + + + +LogVars +Substitution::getDiscardedLogVars (void) const +{ + LogVars discardedLvs; + set doneLvs; + unordered_map::const_iterator it; + it = subs_.begin(); + while (it != subs_.end()) { + if (Util::contains (doneLvs, it->second)) { + discardedLvs.push_back (it->first); + } else { + doneLvs.insert (it->second); + } + ++ it; + } + return discardedLvs; +} + + + +ostream& operator<< (ostream &os, const Substitution& theta) +{ + unordered_map::const_iterator it; + os << "[" ; + it = theta.subs_.begin(); + while (it != theta.subs_.end()) { + if (it != theta.subs_.begin()) os << ", " ; + os << it->first << "->" << it->second ; + ++ it; + } + os << "]" ; + return os; +} + diff --git a/packages/CLPBN/horus2/LiftedUtils.h b/packages/CLPBN/horus2/LiftedUtils.h new file mode 100644 index 000000000..1f563eaf7 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedUtils.h @@ -0,0 +1,166 @@ +#ifndef HORUS_LIFTEDUTILS_H +#define HORUS_LIFTEDUTILS_H + +#include +#include +#include +#include + + +#include "TinySet.h" +#include "Util.h" + + +using namespace std; + + +class Symbol +{ + public: + Symbol (void) : id_(Util::maxUnsigned()) { } + + Symbol (unsigned id) : id_(id) { } + + operator unsigned (void) const { return id_; } + + bool valid (void) const { return id_ != Util::maxUnsigned(); } + + static Symbol invalid (void) { return Symbol(); } + + friend ostream& operator<< (ostream &os, const Symbol& s); + + private: + unsigned id_; +}; + + +class LogVar +{ + public: + LogVar (void) : id_(Util::maxUnsigned()) { } + + LogVar (unsigned id) : id_(id) { } + + operator unsigned (void) const { return id_; } + + LogVar& operator++ (void) + { + assert (valid()); + id_ ++; + return *this; + } + + bool valid (void) const + { + return id_ != Util::maxUnsigned(); + } + + friend ostream& operator<< (ostream &os, const LogVar& X); + + private: + unsigned id_; +}; + + +namespace std { +template <> struct hash { + size_t operator() (const Symbol& s) const { + return std::hash() (s); + }}; + +template <> struct hash { + size_t operator() (const LogVar& X) const { + return std::hash() (X); + }}; +}; + + +typedef vector Symbols; +typedef vector Tuple; +typedef vector Tuples; +typedef vector LogVars; +typedef TinySet SymbolSet; +typedef TinySet LogVarSet; +typedef TinySet TupleSet; + + +ostream& operator<< (ostream &os, const Tuple& t); + + +namespace LiftedUtils { +Symbol getSymbol (const string&); +void printSymbolDictionary (void); +} + + + +class Ground +{ + public: + Ground (Symbol f) : functor_(f) { } + + Ground (Symbol f, const Symbols& args) : functor_(f), args_(args) { } + + Symbol functor (void) const { return functor_; } + + Symbols args (void) const { return args_; } + + size_t arity (void) const { return args_.size(); } + + bool isAtom (void) const { return args_.size() == 0; } + + friend ostream& operator<< (ostream &os, const Ground& gr); + + private: + Symbol functor_; + Symbols args_; +}; + +typedef vector Grounds; + + + +class Substitution +{ + public: + void add (LogVar X_old, LogVar X_new) + { + assert (Util::contains (subs_, X_old) == false); + subs_.insert (make_pair (X_old, X_new)); + } + + void rename (LogVar X_old, LogVar X_new) + { + assert (Util::contains (subs_, X_old)); + subs_.find (X_old)->second = X_new; + } + + LogVar newNameFor (LogVar X) const + { + unordered_map::const_iterator it; + it = subs_.find (X); + if (it != subs_.end()) { + return subs_.find (X)->second; + } + return X; + } + + bool containsReplacementFor (LogVar X) const + { + return Util::contains (subs_, X); + } + + size_t nrReplacements (void) const { return subs_.size(); } + + LogVars getDiscardedLogVars (void) const; + + friend ostream& operator<< (ostream &os, const Substitution& theta); + + private: + unordered_map subs_; + +}; + + +#endif // HORUS_LIFTEDUTILS_H + diff --git a/packages/CLPBN/horus2/LiftedVe.cpp b/packages/CLPBN/horus2/LiftedVe.cpp new file mode 100644 index 000000000..141006c46 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedVe.cpp @@ -0,0 +1,728 @@ +#include +#include + +#include "LiftedVe.h" +#include "LiftedOperations.h" +#include "Histogram.h" +#include "Util.h" + + +vector +LiftedOperator::getValidOps ( + ParfactorList& pfList, + const Grounds& query) +{ + vector validOps; + vector multOps; + + multOps = ProductOperator::getValidOps (pfList); + validOps.insert (validOps.end(), multOps.begin(), multOps.end()); + + if (Globals::verbosity > 1 || multOps.empty()) { + vector sumOutOps; + vector countOps; + vector groundOps; + sumOutOps = SumOutOperator::getValidOps (pfList, query); + countOps = CountingOperator::getValidOps (pfList); + groundOps = GroundOperator::getValidOps (pfList); + validOps.insert (validOps.end(), sumOutOps.begin(), sumOutOps.end()); + validOps.insert (validOps.end(), countOps.begin(), countOps.end()); + validOps.insert (validOps.end(), groundOps.begin(), groundOps.end()); + } + + return validOps; +} + + + +void +LiftedOperator::printValidOps ( + ParfactorList& pfList, + const Grounds& query) +{ + vector validOps; + validOps = LiftedOperator::getValidOps (pfList, query); + for (size_t i = 0; i < validOps.size(); i++) { + cout << "-> " << validOps[i]->toString(); + delete validOps[i]; + } +} + + + +vector +LiftedOperator::getParfactorsWithGroup ( + ParfactorList& pfList, PrvGroup group) +{ + vector iters; + ParfactorList::iterator pflIt = pfList.begin(); + while (pflIt != pfList.end()) { + if ((*pflIt)->containsGroup (group)) { + iters.push_back (pflIt); + } + ++ pflIt; + } + return iters; +} + + + +double +ProductOperator::getLogCost (void) +{ + return std::log (0.0); +} + + + +void +ProductOperator::apply (void) +{ + Parfactor* g1 = *g1_; + Parfactor* g2 = *g2_; + g1->multiply (*g2); + pfList_.remove (g1_); + pfList_.removeAndDelete (g2_); + pfList_.addShattered (g1); +} + + + +vector +ProductOperator::getValidOps (ParfactorList& pfList) +{ + vector validOps; + ParfactorList::iterator it1 = pfList.begin(); + ParfactorList::iterator penultimate = -- pfList.end(); + set pfs; + while (it1 != penultimate) { + if (Util::contains (pfs, *it1)) { + ++ it1; + continue; + } + ParfactorList::iterator it2 = it1; + ++ it2; + while (it2 != pfList.end()) { + if (Util::contains (pfs, *it2)) { + ++ it2; + continue; + } else { + if (validOp (*it1, *it2)) { + pfs.insert (*it1); + pfs.insert (*it2); + validOps.push_back (new ProductOperator ( + it1, it2, pfList)); + if (Globals::verbosity < 2) { + return validOps; + } + break; + } + } + ++ it2; + } + ++ it1; + } + return validOps; +} + + + +string +ProductOperator::toString (void) +{ + stringstream ss; + ss << "just multiplicate " ; + ss << (*g1_)->getAllGroups(); + ss << " x " ; + ss << (*g2_)->getAllGroups(); + ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; + return ss.str(); +} + + + +bool +ProductOperator::validOp (Parfactor* g1, Parfactor* g2) +{ + TinySet g1_gs (g1->getAllGroups()); + TinySet g2_gs (g2->getAllGroups()); + if (g1_gs.contains (g2_gs) || g2_gs.contains (g1_gs)) { + TinySet intersect = g1_gs & g2_gs; + for (size_t i = 0; i < intersect.size(); i++) { + if (g1->nrFormulasWithGroup (intersect[i]) != 1 || + g2->nrFormulasWithGroup (intersect[i]) != 1) { + return false; + } + size_t idx1 = g1->indexOfGroup (intersect[i]); + size_t idx2 = g2->indexOfGroup (intersect[i]); + if (g1->range (idx1) != g2->range (idx2)) { + return false; + } + } + return Parfactor::canMultiply (g1, g2); + } + return false; +} + + + +double +SumOutOperator::getLogCost (void) +{ + TinySet groupSet; + ParfactorList::const_iterator pfIter = pfList_.begin(); + unsigned nrProdFactors = 0; + while (pfIter != pfList_.end()) { + if ((*pfIter)->containsGroup (group_)) { + vector groups = (*pfIter)->getAllGroups(); + groupSet |= TinySet (groups); + ++ nrProdFactors; + } + ++ pfIter; + } + if (nrProdFactors == 1) { + // best possible case + return std::log (0.0); + } + double cost = 1.0; + for (size_t i = 0; i < groupSet.size(); i++) { + pfIter = pfList_.begin(); + while (pfIter != pfList_.end()) { + if ((*pfIter)->containsGroup (groupSet[i])) { + size_t idx = (*pfIter)->indexOfGroup (groupSet[i]); + cost *= (*pfIter)->range (idx); + break; + } + ++ pfIter; + } + } + return std::log (cost); +} + + + +void +SumOutOperator::apply (void) +{ + vector iters; + iters = getParfactorsWithGroup (pfList_, group_); + Parfactor* product = *(iters[0]); + pfList_.remove (iters[0]); + for (size_t i = 1; i < iters.size(); i++) { + product->multiply (**(iters[i])); + pfList_.removeAndDelete (iters[i]); + } + if (product->nrArguments() == 1) { + delete product; + return; + } + size_t fIdx = product->indexOfGroup (group_); + LogVarSet excl = product->exclusiveLogVars (fIdx); + if (product->constr()->isCountNormalized (excl)) { + product->sumOutIndex (fIdx); + pfList_.addShattered (product); + } else { + Parfactors pfs = LiftedOperations::countNormalize (product, excl); + for (size_t i = 0; i < pfs.size(); i++) { + pfs[i]->sumOutIndex (fIdx); + pfList_.add (pfs[i]); + } + delete product; + } +} + + + +vector +SumOutOperator::getValidOps ( + ParfactorList& pfList, + const Grounds& query) +{ + vector validOps; + set allGroups; + ParfactorList::const_iterator it = pfList.begin(); + while (it != pfList.end()) { + const ProbFormulas& formulas = (*it)->arguments(); + for (size_t i = 0; i < formulas.size(); i++) { + allGroups.insert (formulas[i].group()); + } + ++ it; + } + set::const_iterator groupIt = allGroups.begin(); + while (groupIt != allGroups.end()) { + if (validOp (*groupIt, pfList, query)) { + validOps.push_back (new SumOutOperator (*groupIt, pfList)); + } + ++ groupIt; + } + return validOps; +} + + + +string +SumOutOperator::toString (void) +{ + stringstream ss; + vector pfIters; + pfIters = getParfactorsWithGroup (pfList_, group_); + size_t idx = (*pfIters[0])->indexOfGroup (group_); + ProbFormula f = (*pfIters[0])->argument (idx); + TupleSet tupleSet = (*pfIters[0])->constr()->tupleSet (f.logVars()); + ss << "sum out " << f.functor() << "/" << f.arity(); + ss << "|" << tupleSet << " (group " << group_ << ")"; + ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; + return ss.str(); +} + + + +bool +SumOutOperator::validOp ( + PrvGroup group, + ParfactorList& pfList, + const Grounds& query) +{ + vector pfIters; + pfIters = getParfactorsWithGroup (pfList, group); + if (isToEliminate (*pfIters[0], group, query) == false) { + return false; + } + int range = -1; + for (size_t i = 0; i < pfIters.size(); i++) { + if ((*pfIters[i])->nrFormulasWithGroup (group) > 1) { + return false; + } + size_t fIdx = (*pfIters[i])->indexOfGroup (group); + if ((*pfIters[i])->argument (fIdx).contains ( + (*pfIters[i])->elimLogVars()) == false) { + return false; + } + if (range == -1) { + range = (*pfIters[i])->range (fIdx); + } else if ((int)(*pfIters[i])->range (fIdx) != range) { + return false; + } + } + return true; +} + + + +bool +SumOutOperator::isToEliminate ( + Parfactor* g, + PrvGroup group, + const Grounds& query) +{ + size_t fIdx = g->indexOfGroup (group); + const ProbFormula& formula = g->argument (fIdx); + bool toElim = true; + for (size_t i = 0; i < query.size(); i++) { + if (formula.functor() == query[i].functor() && + formula.arity() == query[i].arity()) { + g->constr()->moveToTop (formula.logVars()); + if (g->constr()->containsTuple (query[i].args())) { + toElim = false; + break; + } + } + } + return toElim; +} + + + +double +CountingOperator::getLogCost (void) +{ + double cost = 0.0; + size_t fIdx = (*pfIter_)->indexOfLogVar (X_); + unsigned range = (*pfIter_)->range (fIdx); + unsigned size = (*pfIter_)->size() / range; + TinySet counts; + counts = (*pfIter_)->constr()->getConditionalCounts (X_); + for (size_t i = 0; i < counts.size(); i++) { + cost += size * HistogramSet::nrHistograms (counts[i], range); + } + PrvGroup group = (*pfIter_)->argument (fIdx).group(); + size_t lvIndex = Util::indexOf ( + (*pfIter_)->argument (fIdx).logVars(), X_); + assert (lvIndex != (*pfIter_)->argument (fIdx).logVars().size()); + ParfactorList::iterator pfIter = pfList_.begin(); + while (pfIter != pfList_.end()) { + if (pfIter != pfIter_) { + size_t fIdx2 = (*pfIter)->indexOfGroup (group); + if (fIdx2 != (*pfIter)->nrArguments()) { + LogVar Y = ((*pfIter)->argument (fIdx2).logVars()[lvIndex]); + if ((*pfIter)->canCountConvert (Y) == false) { + // the real cost should be the cost of grounding Y + cost *= 10.0; + } + } + } + ++ pfIter; + } + return std::log (cost); +} + + + +void +CountingOperator::apply (void) +{ + if ((*pfIter_)->constr()->isCountNormalized (X_)) { + (*pfIter_)->countConvert (X_); + } else { + Parfactor* pf = *pfIter_; + pfList_.remove (pfIter_); + Parfactors pfs = LiftedOperations::countNormalize (pf, X_); + for (size_t i = 0; i < pfs.size(); i++) { + unsigned condCount = pfs[i]->constr()->getConditionalCount (X_); + bool cartProduct = pfs[i]->constr()->isCartesianProduct ( + pfs[i]->countedLogVars() | X_); + if (condCount > 1 && cartProduct) { + pfs[i]->countConvert (X_); + } + pfList_.add (pfs[i]); + } + delete pf; + } +} + + + +vector +CountingOperator::getValidOps (ParfactorList& pfList) +{ + vector validOps; + ParfactorList::iterator it = pfList.begin(); + while (it != pfList.end()) { + LogVarSet candidates = (*it)->uncountedLogVars(); + for (size_t i = 0; i < candidates.size(); i++) { + if (validOp (*it, candidates[i])) { + validOps.push_back (new CountingOperator ( + it, candidates[i], pfList)); + } else { + } + } + ++ it; + } + return validOps; +} + + + +string +CountingOperator::toString (void) +{ + stringstream ss; + ss << "count convert " << X_ << " in " ; + ss << (*pfIter_)->getLabel(); + ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; + Parfactors pfs = LiftedOperations::countNormalize (*pfIter_, X_); + if ((*pfIter_)->constr()->isCountNormalized (X_) == false) { + for (size_t i = 0; i < pfs.size(); i++) { + ss << " º " << pfs[i]->getLabel() << endl; + } + } + for (size_t i = 0; i < pfs.size(); i++) { + delete pfs[i]; + } + return ss.str(); +} + + + +bool +CountingOperator::validOp (Parfactor* g, LogVar X) +{ + if (g->nrFormulas (X) != 1) { + return false; + } + size_t fIdx = g->indexOfLogVar (X); + if (g->argument (fIdx).isCounting()) { + return false; + } + bool countNormalized = g->constr()->isCountNormalized (X); + if (countNormalized) { + return g->canCountConvert (X); + } + return true; +} + + + +double +GroundOperator::getLogCost (void) +{ + vector> affectedFormulas; + affectedFormulas = getAffectedFormulas(); + // cout << "affected formulas: " ; + // for (size_t i = 0; i < affectedFormulas.size(); i++) { + // cout << affectedFormulas[i].first << ":" ; + // cout << affectedFormulas[i].second << " " ; + // } + // cout << "cost =" ; + double totalCost = std::log (0.0); + ParfactorList::iterator pflIt = pfList_.begin(); + while (pflIt != pfList_.end()) { + Parfactor* pf = *pflIt; + double reps = 0.0; + double pfSize = std::log (pf->size()); + bool willBeAffected = false; + LogVarSet lvsToGround; + for (size_t i = 0; i < affectedFormulas.size(); i++) { + size_t fIdx = pf->indexOfGroup (affectedFormulas[i].first); + if (fIdx != pf->nrArguments()) { + ProbFormula f = pf->argument (fIdx); + LogVar X = f.logVars()[affectedFormulas[i].second]; + bool isCountingLv = pf->countedLogVars().contains (X); + if (isCountingLv) { + unsigned nrHists = pf->range (fIdx); + unsigned nrSymbols = pf->constr()->getConditionalCount (X); + unsigned range = pf->argument (fIdx).range(); + double power = std::log (range) * nrSymbols; + pfSize = (pfSize - std::log (nrHists)) + power; + } else { + if (lvsToGround.contains (X) == false) { + reps += std::log (pf->constr()->nrSymbols (X)); + lvsToGround.insert (X); + } + } + willBeAffected = true; + } + } + if (willBeAffected) { + // cout << " + " << std::exp (reps) << "x" << std::exp (pfSize); + double pfCost = reps + pfSize; + totalCost = Util::logSum (totalCost, pfCost); + } + ++ pflIt; + } + // cout << endl; + return totalCost + 3; +} + + + +void +GroundOperator::apply (void) +{ + ParfactorList::iterator pfIter; + pfIter = getParfactorsWithGroup (pfList_, group_).front(); + Parfactor* pf = *pfIter; + size_t idx = pf->indexOfGroup (group_); + ProbFormula f = pf->argument (idx); + LogVar X = f.logVars()[lvIndex_]; + bool countedLv = pf->countedLogVars().contains (X); + pfList_.remove (pfIter); + if (countedLv) { + pf->fullExpand (X); + pfList_.add (pf); + } else { + ConstraintTrees cts = pf->constr()->ground (X); + for (size_t i = 0; i < cts.size(); i++) { + pfList_.add (new Parfactor (pf, cts[i])); + } + delete pf; + } + ParfactorList::iterator pflIt = pfList_.begin(); + while (pflIt != pfList_.end()) { + (*pflIt)->simplifyGrounds(); + ++ pflIt; + } +} + + + +vector +GroundOperator::getValidOps (ParfactorList& pfList) +{ + vector validOps; + set allGroups; + ParfactorList::const_iterator it = pfList.begin(); + while (it != pfList.end()) { + const ProbFormulas& formulas = (*it)->arguments(); + for (size_t i = 0; i < formulas.size(); i++) { + if (Util::contains (allGroups, formulas[i].group()) == false) { + const LogVars& lvs = formulas[i].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + if ((*it)->constr()->isSingleton (lvs[j]) == false) { + validOps.push_back (new GroundOperator ( + formulas[i].group(), j, pfList)); + } + } + allGroups.insert (formulas[i].group()); + } + } + ++ it; + } + return validOps; +} + + + +string +GroundOperator::toString (void) +{ + stringstream ss; + vector pfIters; + pfIters = getParfactorsWithGroup (pfList_, group_); + Parfactor* pf = *(getParfactorsWithGroup (pfList_, group_).front()); + size_t idx = pf->indexOfGroup (group_); + ProbFormula f = pf->argument (idx); + LogVar lv = f.logVars()[lvIndex_]; + TupleSet tupleSet = pf->constr()->tupleSet ({lv}); + string pos = "th"; + if (lvIndex_ == 0) { + pos = "st" ; + } else if (lvIndex_ == 1) { + pos = "nd" ; + } else if (lvIndex_ == 2) { + pos = "rd" ; + } + ss << "grounding " << lvIndex_ + 1 << pos << " log var in " ; + ss << f.functor() << "/" << f.arity(); + ss << "|" << tupleSet << " (group " << group_ << ")"; + ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; + return ss.str(); +} + + + +vector> +GroundOperator::getAffectedFormulas (void) +{ + vector> affectedFormulas; + affectedFormulas.push_back (make_pair (group_, lvIndex_)); + queue> q; + q.push (make_pair (group_, lvIndex_)); + while (q.empty() == false) { + pair front = q.front(); + ParfactorList::iterator pflIt = pfList_.begin(); + while (pflIt != pfList_.end()) { + size_t idx = (*pflIt)->indexOfGroup (front.first); + if (idx != (*pflIt)->nrArguments()) { + ProbFormula f = (*pflIt)->argument (idx); + LogVar X = f.logVars()[front.second]; + const ProbFormulas& fs = (*pflIt)->arguments(); + for (size_t i = 0; i < fs.size(); i++) { + if (i != idx && fs[i].contains (X)) { + pair pair = make_pair ( + fs[i].group(), fs[i].indexOf (X)); + if (Util::contains (affectedFormulas, pair) == false) { + q.push (pair); + affectedFormulas.push_back (pair); + } + } + } + } + ++ pflIt; + } + q.pop(); + } + return affectedFormulas; +} + + + +Params +LiftedVe::solveQuery (const Grounds& query) +{ + assert (query.empty() == false); + pfList_ = parfactorList; + runSolver (query); + (*pfList_.begin())->normalize(); + Params params = (*pfList_.begin())->params(); + if (Globals::logDomain) { + Util::exp (params); + } + return params; +} + + + +void +LiftedVe::printSolverFlags (void) const +{ + stringstream ss; + ss << "lve [" ; + ss << "log_domain=" << Util::toString (Globals::logDomain); + ss << "]" ; + cout << ss.str() << endl; +} + + + +void +LiftedVe::runSolver (const Grounds& query) +{ + largestCost_ = std::log (0); + LiftedOperations::shatterAgainstQuery (pfList_, query); + LiftedOperations::runWeakBayesBall (pfList_, query); + while (true) { + if (Globals::verbosity > 2) { + Util::printDashedLine(); + pfList_.print(); + if (Globals::verbosity > 3) { + LiftedOperator::printValidOps (pfList_, query); + } + } + LiftedOperator* op = getBestOperation (query); + if (op == 0) { + break; + } + if (Globals::verbosity > 1) { + cout << "best operation: " << op->toString(); + if (Globals::verbosity > 2) { + cout << endl; + } + } + op->apply(); + delete op; + } + assert (pfList_.size() > 0); + if (pfList_.size() > 1) { + ParfactorList::iterator pfIter = pfList_.begin(); + ++ pfIter; + while (pfIter != pfList_.end()) { + (*pfList_.begin())->multiply (**pfIter); + ++ pfIter; + } + } + if (Globals::verbosity > 0) { + cout << "largest cost = " << std::exp (largestCost_) << endl; + cout << endl; + } + (*pfList_.begin())->simplifyGrounds(); + (*pfList_.begin())->reorderAccordingGrounds (query); +} + + + +LiftedOperator* +LiftedVe::getBestOperation (const Grounds& query) +{ + double bestCost = 0.0; + LiftedOperator* bestOp = 0; + vector validOps; + validOps = LiftedOperator::getValidOps (pfList_, query); + for (size_t i = 0; i < validOps.size(); i++) { + double cost = validOps[i]->getLogCost(); + if ((bestOp == 0) || (cost < bestCost)) { + bestOp = validOps[i]; + bestCost = cost; + } + } + if (bestCost > largestCost_) { + largestCost_ = bestCost; + } + for (size_t i = 0; i < validOps.size(); i++) { + if (validOps[i] != bestOp) { + delete validOps[i]; + } + } + return bestOp; +} + diff --git a/packages/CLPBN/horus2/LiftedVe.h b/packages/CLPBN/horus2/LiftedVe.h new file mode 100644 index 000000000..7d9974294 --- /dev/null +++ b/packages/CLPBN/horus2/LiftedVe.h @@ -0,0 +1,155 @@ +#ifndef HORUS_LIFTEDVE_H +#define HORUS_LIFTEDVE_H + +#include "LiftedSolver.h" +#include "ParfactorList.h" + + +class LiftedOperator +{ + public: + virtual ~LiftedOperator (void) { } + + virtual double getLogCost (void) = 0; + + virtual void apply (void) = 0; + + virtual string toString (void) = 0; + + static vector getValidOps ( + ParfactorList&, const Grounds&); + + static void printValidOps (ParfactorList&, const Grounds&); + + static vector getParfactorsWithGroup ( + ParfactorList&, PrvGroup group); +}; + + + +class ProductOperator : public LiftedOperator +{ + public: + ProductOperator ( + ParfactorList::iterator g1, ParfactorList::iterator g2, + ParfactorList& pfList) : g1_(g1), g2_(g2), pfList_(pfList) { } + + double getLogCost (void); + + void apply (void); + + static vector getValidOps (ParfactorList&); + + string toString (void); + + private: + static bool validOp (Parfactor*, Parfactor*); + + ParfactorList::iterator g1_; + ParfactorList::iterator g2_; + ParfactorList& pfList_; +}; + + + +class SumOutOperator : public LiftedOperator +{ + public: + SumOutOperator (PrvGroup group, ParfactorList& pfList) + : group_(group), pfList_(pfList) { } + + double getLogCost (void); + + void apply (void); + + static vector getValidOps ( + ParfactorList&, const Grounds&); + + string toString (void); + + private: + static bool validOp (PrvGroup, ParfactorList&, const Grounds&); + + static bool isToEliminate (Parfactor*, PrvGroup, const Grounds&); + + PrvGroup group_; + ParfactorList& pfList_; +}; + + + +class CountingOperator : public LiftedOperator +{ + public: + CountingOperator ( + ParfactorList::iterator pfIter, + LogVar X, + ParfactorList& pfList) + : pfIter_(pfIter), X_(X), pfList_(pfList) { } + + double getLogCost (void); + + void apply (void); + + static vector getValidOps (ParfactorList&); + + string toString (void); + + private: + static bool validOp (Parfactor*, LogVar); + + ParfactorList::iterator pfIter_; + LogVar X_; + ParfactorList& pfList_; +}; + + + +class GroundOperator : public LiftedOperator +{ + public: + GroundOperator ( + PrvGroup group, + unsigned lvIndex, + ParfactorList& pfList) + : group_(group), lvIndex_(lvIndex), pfList_(pfList) { } + + double getLogCost (void); + + void apply (void); + + static vector getValidOps (ParfactorList&); + + string toString (void); + + private: + vector> getAffectedFormulas (void); + + PrvGroup group_; + unsigned lvIndex_; + ParfactorList& pfList_; +}; + + + +class LiftedVe : public LiftedSolver +{ + public: + LiftedVe (const ParfactorList& pfList) + : LiftedSolver(pfList) { } + + Params solveQuery (const Grounds&); + + void printSolverFlags (void) const; + + private: + void runSolver (const Grounds&); + + LiftedOperator* getBestOperation (const Grounds&); + + ParfactorList pfList_; + double largestCost_; +}; + +#endif // HORUS_LIFTEDVE_H + diff --git a/packages/CLPBN/horus2/LiftedWCNF.cpp b/packages/CLPBN/horus2/LiftedWCNF.cpp new file mode 100644 index 000000000..ba7097dbf --- /dev/null +++ b/packages/CLPBN/horus2/LiftedWCNF.cpp @@ -0,0 +1,658 @@ +#include "LiftedWCNF.h" +#include "ConstraintTree.h" +#include "Indexer.h" + + + +bool +Literal::isGround (ConstraintTree constr, LogVarSet ipgLogVars) const +{ + if (logVars_.size() == 0) { + return true; + } + LogVarSet lvs (logVars_); + lvs -= ipgLogVars; + return constr.singletons().contains (lvs); +} + + + +size_t +Literal::indexOfLogVar (LogVar X) const +{ + return Util::indexOf (logVars_, X); +} + + + +string +Literal::toString ( + LogVarSet ipgLogVars, + LogVarSet posCountedLvs, + LogVarSet negCountedLvs) const +{ + stringstream ss; + negated_ ? ss << "¬" : ss << "" ; + ss << "λ" ; + ss << lid_ ; + if (logVars_.empty() == false) { + ss << "(" ; + for (size_t i = 0; i < logVars_.size(); i++) { + if (i != 0) ss << ","; + if (posCountedLvs.contains (logVars_[i])) { + ss << "+" << logVars_[i]; + } else if (negCountedLvs.contains (logVars_[i])) { + ss << "-" << logVars_[i]; + } else if (ipgLogVars.contains (logVars_[i])) { + LogVar X = logVars_[i]; + const string labels[] = { + "a", "b", "c", "d", "e", "f", + "g", "h", "i", "j", "k", "m" }; + (X >= 12) ? ss << "x_" << X : ss << labels[X]; + } else { + ss << logVars_[i]; + } + } + ss << ")" ; + } + return ss.str(); +} + + + +std::ostream& +operator<< (ostream &os, const Literal& lit) +{ + os << lit.toString(); + return os; +} + + + +void +Clause::addLiteralComplemented (const Literal& lit) +{ + assert (constr_.logVarSet().contains (lit.logVars())); + literals_.push_back (lit); + literals_.back().complement(); +} + + + +bool +Clause::containsLiteral (LiteralId lid) const +{ + for (size_t i = 0; i < literals_.size(); i++) { + if (literals_[i].lid() == lid) { + return true; + } + } + return false; +} + + + +bool +Clause::containsPositiveLiteral ( + LiteralId lid, + const LogVarTypes& types) const +{ + for (size_t i = 0; i < literals_.size(); i++) { + if (literals_[i].lid() == lid + && literals_[i].isPositive() + && logVarTypes (i) == types) { + return true; + } + } + return false; +} + + + +bool +Clause::containsNegativeLiteral ( + LiteralId lid, + const LogVarTypes& types) const +{ + for (size_t i = 0; i < literals_.size(); i++) { + if (literals_[i].lid() == lid + && literals_[i].isNegative() + && logVarTypes (i) == types) { + return true; + } + } + return false; +} + + + +void +Clause::removeLiterals (LiteralId lid) +{ + size_t i = 0; + while (i != literals_.size()) { + if (literals_[i].lid() == lid) { + removeLiteral (i); + } else { + i ++; + } + } +} + + + +void +Clause::removePositiveLiterals ( + LiteralId lid, + const LogVarTypes& types) +{ + size_t i = 0; + while (i != literals_.size()) { + if (literals_[i].lid() == lid + && literals_[i].isPositive() + && logVarTypes (i) == types) { + removeLiteral (i); + } else { + i ++; + } + } +} + + + +void +Clause::removeNegativeLiterals ( + LiteralId lid, + const LogVarTypes& types) +{ + size_t i = 0; + while (i != literals_.size()) { + if (literals_[i].lid() == lid + && literals_[i].isNegative() + && logVarTypes (i) == types) { + removeLiteral (i); + } else { + i ++; + } + } +} + + + +bool +Clause::isCountedLogVar (LogVar X) const +{ + assert (constr_.logVarSet().contains (X)); + return posCountedLvs_.contains (X) + || negCountedLvs_.contains (X); +} + + + +bool +Clause::isPositiveCountedLogVar (LogVar X) const +{ + assert (constr_.logVarSet().contains (X)); + return posCountedLvs_.contains (X); +} + + + +bool +Clause::isNegativeCountedLogVar (LogVar X) const +{ + assert (constr_.logVarSet().contains (X)); + return negCountedLvs_.contains (X); +} + + + +bool +Clause::isIpgLogVar (LogVar X) const +{ + assert (constr_.logVarSet().contains (X)); + return ipgLvs_.contains (X); +} + + + +TinySet +Clause::lidSet (void) const +{ + TinySet lidSet; + for (size_t i = 0; i < literals_.size(); i++) { + lidSet.insert (literals_[i].lid()); + } + return lidSet; +} + + + +LogVarSet +Clause::ipgCandidates (void) const +{ + LogVarSet candidates; + LogVarSet allLvs = constr_.logVarSet(); + allLvs -= ipgLvs_; + allLvs -= posCountedLvs_; + allLvs -= negCountedLvs_; + for (size_t i = 0; i < allLvs.size(); i++) { + bool valid = true; + for (size_t j = 0; j < literals_.size(); j++) { + if (Util::contains (literals_[j].logVars(), allLvs[i]) == false) { + valid = false; + break; + } + } + if (valid) { + candidates.insert (allLvs[i]); + } + } + return candidates; +} + + + +LogVarTypes +Clause::logVarTypes (size_t litIdx) const +{ + LogVarTypes types; + const LogVars& lvs = literals_[litIdx].logVars(); + for (size_t i = 0; i < lvs.size(); i++) { + if (posCountedLvs_.contains (lvs[i])) { + types.push_back (LogVarType::POS_LV); + } else if (negCountedLvs_.contains (lvs[i])) { + types.push_back (LogVarType::NEG_LV); + } else { + types.push_back (LogVarType::FULL_LV); + } + } + return types; +} + + + +void +Clause::removeLiteral (size_t litIdx) +{ + LogVarSet lvsToRemove = literals_[litIdx].logVarSet() + - getLogVarSetExcluding (litIdx); + ipgLvs_ -= lvsToRemove; + posCountedLvs_ -= lvsToRemove; + negCountedLvs_ -= lvsToRemove; + constr_.remove (lvsToRemove); + literals_.erase (literals_.begin() + litIdx); +} + + + +bool +Clause::independentClauses (Clause& c1, Clause& c2) +{ + const Literals& lits1 = c1.literals(); + const Literals& lits2 = c2.literals(); + for (size_t i = 0; i < lits1.size(); i++) { + for (size_t j = 0; j < lits2.size(); j++) { + if (lits1[i].lid() == lits2[j].lid() + && c1.logVarTypes (i) == c2.logVarTypes (j)) { + return false; + } + } + } + return true; +} + + + +Clauses +Clause::copyClauses (const Clauses& clauses) +{ + Clauses copy; + copy.reserve (clauses.size()); + for (size_t i = 0; i < clauses.size(); i++) { + copy.push_back (new Clause (*clauses[i])); + } + return copy; +} + + + +void +Clause::printClauses (const Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + cout << *clauses[i] << endl; + } +} + + + +void +Clause::deleteClauses (Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + delete clauses[i]; + } +} + + + +std::ostream& +operator<< (ostream &os, const Clause& clause) +{ + for (unsigned i = 0; i < clause.literals_.size(); i++) { + if (i != 0) os << " v " ; + os << clause.literals_[i].toString (clause.ipgLvs_, + clause.posCountedLvs_, clause.negCountedLvs_); + } + if (clause.constr_.empty() == false) { + ConstraintTree copy (clause.constr_); + copy.moveToTop (copy.logVarSet().elements()); + os << " | " << copy.tupleSet(); + } + return os; +} + + + +LogVarSet +Clause::getLogVarSetExcluding (size_t idx) const +{ + LogVarSet lvs; + for (size_t i = 0; i < literals_.size(); i++) { + if (i != idx) { + lvs |= literals_[i].logVars(); + } + } + return lvs; +} + + + +std::ostream& +operator<< (std::ostream &os, const LitLvTypes& lit) +{ + os << lit.lid_ << "<" ; + for (size_t i = 0; i < lit.lvTypes_.size(); i++) { + switch (lit.lvTypes_[i]) { + case LogVarType::FULL_LV: os << "F" ; break; + case LogVarType::POS_LV: os << "P" ; break; + case LogVarType::NEG_LV: os << "N" ; break; + } + } + os << ">" ; + return os; +} + + + +LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) + : freeLiteralId_(0), pfList_(pfList) +{ + addIndicatorClauses (pfList); + addParameterClauses (pfList); + + /* + // INCLUSION-EXCLUSION TEST + clauses_.clear(); + vector> names = { + {"a1","b1"},{"a2","b2"} + }; + Clause* c1 = new Clause (names); + c1->addLiteral (Literal (0, LogVars() = {0})); + c1->addLiteral (Literal (1, LogVars() = {1})); + clauses_.push_back(c1); + */ + + /* + // INDEPENDENT PARTIAL GROUND TEST + clauses_.clear(); + vector> names = { + {"a1","b1"},{"a2","b2"} + }; + Clause* c1 = new Clause (names); + c1->addLiteral (Literal (0, LogVars() = {0,1})); + c1->addLiteral (Literal (1, LogVars() = {0,1})); + clauses_.push_back(c1); + Clause* c2 = new Clause (names); + c2->addLiteral (Literal (2, LogVars() = {0})); + c2->addLiteral (Literal (1, LogVars() = {0,1})); + clauses_.push_back(c2); + */ + + /* + // ATOM-COUNTING TEST + clauses_.clear(); + vector> names = { + {"p1","p1"},{"p1","p2"},{"p1","p3"}, + {"p2","p1"},{"p2","p2"},{"p2","p3"}, + {"p3","p1"},{"p3","p2"},{"p3","p3"} + }; + Clause* c1 = new Clause (names); + c1->addLiteral (Literal (0, LogVars() = {0})); + c1->addLiteralComplemented (Literal (1, {0,1})); + clauses_.push_back(c1); + Clause* c2 = new Clause (names); + c2->addLiteral (Literal (0, LogVars()={0})); + c2->addLiteralComplemented (Literal (1, {1,0})); + clauses_.push_back(c2); + */ + + if (Globals::verbosity > 1) { + cout << "FORMULA INDICATORS:" << endl; + printFormulaIndicators(); + cout << endl; + cout << "WEIGHTED INDICATORS:" << endl; + printWeights(); + cout << endl; + cout << "CLAUSES:" << endl; + printClauses(); + cout << endl; + } +} + + + +LiftedWCNF::~LiftedWCNF (void) +{ + Clause::deleteClauses (clauses_); +} + + + +void +LiftedWCNF::addWeight (LiteralId lid, double posW, double negW) +{ + weights_[lid] = make_pair (posW, negW); +} + + + +double +LiftedWCNF::posWeight (LiteralId lid) const +{ + unordered_map>::const_iterator it; + it = weights_.find (lid); + return it != weights_.end() ? it->second.first : LogAware::one(); +} + + + +double +LiftedWCNF::negWeight (LiteralId lid) const +{ + unordered_map>::const_iterator it; + it = weights_.find (lid); + return it != weights_.end() ? it->second.second : LogAware::one(); +} + + + +vector +LiftedWCNF::prvGroupLiterals (PrvGroup prvGroup) +{ + assert (Util::contains (map_, prvGroup)); + return map_[prvGroup]; +} + + + +Clause* +LiftedWCNF::createClause (LiteralId lid) const +{ + for (size_t i = 0; i < clauses_.size(); i++) { + const Literals& literals = clauses_[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].lid() == lid) { + ConstraintTree ct = clauses_[i]->constr().projectedCopy ( + literals[j].logVars()); + Clause* c = new Clause (ct); + c->addLiteral (literals[j]); + return c; + } + } + } + return 0; +} + + + +LiteralId +LiftedWCNF::getLiteralId (PrvGroup prvGroup, unsigned range) +{ + assert (Util::contains (map_, prvGroup)); + return map_[prvGroup][range]; +} + + + +void +LiftedWCNF::addIndicatorClauses (const ParfactorList& pfList) +{ + ParfactorList::const_iterator it = pfList.begin(); + while (it != pfList.end()) { + const ProbFormulas& formulas = (*it)->arguments(); + for (size_t i = 0; i < formulas.size(); i++) { + if (Util::contains (map_, formulas[i].group()) == false) { + ConstraintTree tempConstr = (*it)->constr()->projectedCopy( + formulas[i].logVars()); + Clause* clause = new Clause (tempConstr); + vector lids; + for (size_t j = 0; j < formulas[i].range(); j++) { + clause->addLiteral (Literal (freeLiteralId_, formulas[i].logVars())); + lids.push_back (freeLiteralId_); + freeLiteralId_ ++; + } + clauses_.push_back (clause); + for (size_t j = 0; j < formulas[i].range() - 1; j++) { + for (size_t k = j + 1; k < formulas[i].range(); k++) { + ConstraintTree tempConstr2 = (*it)->constr()->projectedCopy ( + formulas[i].logVars()); + Clause* clause2 = new Clause (tempConstr2); + clause2->addLiteralComplemented (Literal (clause->literals()[j])); + clause2->addLiteralComplemented (Literal (clause->literals()[k])); + clauses_.push_back (clause2); + } + } + map_[formulas[i].group()] = lids; + } + } + ++ it; + } +} + + + +void +LiftedWCNF::addParameterClauses (const ParfactorList& pfList) +{ + ParfactorList::const_iterator it = pfList.begin(); + while (it != pfList.end()) { + Indexer indexer ((*it)->ranges()); + vector groups = (*it)->getAllGroups(); + while (indexer.valid()) { + LiteralId paramVarLid = freeLiteralId_; + // λu1 ∧ ... ∧ λun ∧ λxi <=> θxi|u1,...,un + // + // ¬λu1 ... ¬λun v θxi|u1,...,un -> clause1 + // ¬θxi|u1,...,un v λu1 -> tempClause + // ¬θxi|u1,...,un v λu2 -> tempClause + double posWeight = (**it)[indexer]; + addWeight (paramVarLid, posWeight, LogAware::one()); + + Clause* clause1 = new Clause (*(*it)->constr()); + + for (unsigned i = 0; i < groups.size(); i++) { + LiteralId lid = getLiteralId (groups[i], indexer[i]); + + clause1->addLiteralComplemented ( + Literal (lid, (*it)->argument(i).logVars())); + + ConstraintTree ct = *(*it)->constr(); + Clause* tempClause = new Clause (ct); + tempClause->addLiteralComplemented (Literal ( + paramVarLid, (*it)->constr()->logVars())); + tempClause->addLiteral (Literal (lid, (*it)->argument(i).logVars())); + clauses_.push_back (tempClause); + } + clause1->addLiteral (Literal (paramVarLid, (*it)->constr()->logVars())); + clauses_.push_back (clause1); + freeLiteralId_ ++; + ++ indexer; + } + ++ it; + } +} + + + +void +LiftedWCNF::printFormulaIndicators (void) const +{ + if (map_.empty()) { + return; + } + set allGroups; + ParfactorList::const_iterator it = pfList_.begin(); + while (it != pfList_.end()) { + const ProbFormulas& formulas = (*it)->arguments(); + for (size_t i = 0; i < formulas.size(); i++) { + if (Util::contains (allGroups, formulas[i].group()) == false) { + allGroups.insert (formulas[i].group()); + cout << formulas[i] << " | " ; + ConstraintTree tempCt = (*it)->constr()->projectedCopy ( + formulas[i].logVars()); + cout << tempCt.tupleSet(); + cout << " indicators => " ; + vector indicators = + (map_.find (formulas[i].group()))->second; + cout << indicators << endl; + } + } + ++ it; + } +} + + + +void +LiftedWCNF::printWeights (void) const +{ + unordered_map>::const_iterator it; + it = weights_.begin(); + while (it != weights_.end()) { + cout << "λ" << it->first << " weights: " ; + cout << it->second.first << " " << it->second.second; + cout << endl; + ++ it; + } +} + + + +void +LiftedWCNF::printClauses (void) const +{ + Clause::printClauses (clauses_); +} + diff --git a/packages/CLPBN/horus2/LiftedWCNF.h b/packages/CLPBN/horus2/LiftedWCNF.h new file mode 100644 index 000000000..e0f901b7c --- /dev/null +++ b/packages/CLPBN/horus2/LiftedWCNF.h @@ -0,0 +1,239 @@ +#ifndef HORUS_LIFTEDWCNF_H +#define HORUS_LIFTEDWCNF_H + +#include "ParfactorList.h" + +using namespace std; + +typedef long LiteralId; + +class ConstraintTree; + + +enum LogVarType +{ + FULL_LV, + POS_LV, + NEG_LV +}; + +typedef vector LogVarTypes; + + + +class Literal +{ + public: + Literal (LiteralId lid, const LogVars& lvs) : + lid_(lid), logVars_(lvs), negated_(false) { } + + Literal (const Literal& lit, bool negated) : + lid_(lit.lid_), logVars_(lit.logVars_), negated_(negated) { } + + LiteralId lid (void) const { return lid_; } + + LogVars logVars (void) const { return logVars_; } + + size_t nrLogVars (void) const { return logVars_.size(); } + + LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } + + void complement (void) { negated_ = !negated_; } + + bool isPositive (void) const { return negated_ == false; } + + bool isNegative (void) const { return negated_; } + + bool isGround (ConstraintTree constr, LogVarSet ipgLogVars) const; + + size_t indexOfLogVar (LogVar X) const; + + string toString (LogVarSet ipgLogVars = LogVarSet(), + LogVarSet posCountedLvs = LogVarSet(), + LogVarSet negCountedLvs = LogVarSet()) const; + + friend std::ostream& operator<< (std::ostream &os, const Literal& lit); + + private: + LiteralId lid_; + LogVars logVars_; + bool negated_; +}; + +typedef vector Literals; + + + +class Clause +{ + public: + Clause (const ConstraintTree& ct = ConstraintTree({})) : constr_(ct) { } + + Clause (vector> names) : constr_(ConstraintTree (names)) { } + + void addLiteral (const Literal& l) { literals_.push_back (l); } + + const Literals& literals (void) const { return literals_; } + + Literals& literals (void) { return literals_; } + + size_t nrLiterals (void) const { return literals_.size(); } + + const ConstraintTree& constr (void) const { return constr_; } + + ConstraintTree constr (void) { return constr_; } + + bool isUnit (void) const { return literals_.size() == 1; } + + LogVarSet ipgLogVars (void) const { return ipgLvs_; } + + void addIpgLogVar (LogVar X) { ipgLvs_.insert (X); } + + void addPosCountedLogVar (LogVar X) { posCountedLvs_.insert (X); } + + void addNegCountedLogVar (LogVar X) { negCountedLvs_.insert (X); } + + LogVarSet posCountedLogVars (void) const { return posCountedLvs_; } + + LogVarSet negCountedLogVars (void) const { return negCountedLvs_; } + + unsigned nrPosCountedLogVars (void) const { return posCountedLvs_.size(); } + + unsigned nrNegCountedLogVars (void) const { return negCountedLvs_.size(); } + + void addLiteralComplemented (const Literal& lit); + + bool containsLiteral (LiteralId lid) const; + + bool containsPositiveLiteral (LiteralId lid, const LogVarTypes&) const; + + bool containsNegativeLiteral (LiteralId lid, const LogVarTypes&) const; + + void removeLiterals (LiteralId lid); + + void removePositiveLiterals (LiteralId lid, const LogVarTypes&); + + void removeNegativeLiterals (LiteralId lid, const LogVarTypes&); + + bool isCountedLogVar (LogVar X) const; + + bool isPositiveCountedLogVar (LogVar X) const; + + bool isNegativeCountedLogVar (LogVar X) const; + + bool isIpgLogVar (LogVar X) const; + + TinySet lidSet (void) const; + + LogVarSet ipgCandidates (void) const; + + LogVarTypes logVarTypes (size_t litIdx) const; + + void removeLiteral (size_t litIdx); + + static bool independentClauses (Clause& c1, Clause& c2); + + static vector copyClauses (const vector& clauses); + + static void printClauses (const vector& clauses); + + static void deleteClauses (vector& clauses); + + friend std::ostream& operator<< (ostream &os, const Clause& clause); + + private: + LogVarSet getLogVarSetExcluding (size_t idx) const; + + Literals literals_; + LogVarSet ipgLvs_; + LogVarSet posCountedLvs_; + LogVarSet negCountedLvs_; + ConstraintTree constr_; +}; + +typedef vector Clauses; + + + +class LitLvTypes +{ + public: + struct CompareLitLvTypes + { + bool operator() ( + const LitLvTypes& types1, + const LitLvTypes& types2) const + { + if (types1.lid_ < types2.lid_) { + return true; + } + if (types1.lid_ == types2.lid_) { + return types1.lvTypes_ < types2.lvTypes_; + } + return false; + } + }; + + LitLvTypes (LiteralId lid, const LogVarTypes& lvTypes) : + lid_(lid), lvTypes_(lvTypes) { } + + LiteralId lid (void) const { return lid_; } + + const LogVarTypes& logVarTypes (void) const { return lvTypes_; } + + void setAllFullLogVars (void) { + std::fill (lvTypes_.begin(), lvTypes_.end(), LogVarType::FULL_LV); } + + friend std::ostream& operator<< (std::ostream &os, const LitLvTypes& lit); + + private: + LiteralId lid_; + LogVarTypes lvTypes_; +}; + +typedef TinySet LitLvTypesSet; + + + +class LiftedWCNF +{ + public: + LiftedWCNF (const ParfactorList& pfList); + + ~LiftedWCNF (void); + + const Clauses& clauses (void) const { return clauses_; } + + void addWeight (LiteralId lid, double posW, double negW); + + double posWeight (LiteralId lid) const; + + double negWeight (LiteralId lid) const; + + vector prvGroupLiterals (PrvGroup prvGroup); + + Clause* createClause (LiteralId lid) const; + + void printFormulaIndicators (void) const; + + void printWeights (void) const; + + void printClauses (void) const; + + private: + + LiteralId getLiteralId (PrvGroup prvGroup, unsigned range); + + void addIndicatorClauses (const ParfactorList& pfList); + + void addParameterClauses (const ParfactorList& pfList); + + Clauses clauses_; + LiteralId freeLiteralId_; + const ParfactorList& pfList_; + unordered_map> map_; + unordered_map> weights_; +}; + +#endif // HORUS_LIFTEDWCNF_H + diff --git a/packages/CLPBN/horus2/Parfactor.cpp b/packages/CLPBN/horus2/Parfactor.cpp new file mode 100644 index 000000000..ef2301b7b --- /dev/null +++ b/packages/CLPBN/horus2/Parfactor.cpp @@ -0,0 +1,942 @@ + +#include "Parfactor.h" +#include "Histogram.h" +#include "Indexer.h" +#include "Util.h" +#include "Horus.h" + + +Parfactor::Parfactor ( + const ProbFormulas& formulas, + const Params& params, + const Tuples& tuples, + unsigned distId) +{ + args_ = formulas; + params_ = params; + distId_ = distId; + + LogVars logVars; + for (size_t i = 0; i < args_.size(); i++) { + ranges_.push_back (args_[i].range()); + const LogVars& lvs = args_[i].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + if (Util::contains (logVars, lvs[j]) == false) { + logVars.push_back (lvs[j]); + } + } + } + LogVar newLv = logVars.size(); + constr_ = new ConstraintTree (logVars, tuples); + // Change formulas like f(X,X), X in {(p1),(p2),...} + // to be like f(X,Y), (X,Y) in {(p1,p1),(p2,p2),...}. + // This will simplify shattering on the constraint tree. + for (size_t i = 0; i < args_.size(); i++) { + LogVarSet lvSet; + LogVars& lvs = args_[i].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + if (lvSet.contains (lvs[j]) == false) { + lvSet |= lvs[j]; + } else { + constr_->cloneLogVar (lvs[j], newLv); + lvs[j] = newLv; + ++ newLv; + } + } + } + assert (params_.size() == Util::sizeExpected (ranges_)); +} + + + +Parfactor::Parfactor (const Parfactor* g, const Tuple& tuple) +{ + args_ = g->arguments(); + params_ = g->params(); + ranges_ = g->ranges(); + distId_ = g->distId(); + constr_ = new ConstraintTree (g->logVars(), {tuple}); + assert (params_.size() == Util::sizeExpected (ranges_)); +} + + + +Parfactor::Parfactor (const Parfactor* g, ConstraintTree* constr) +{ + args_ = g->arguments(); + params_ = g->params(); + ranges_ = g->ranges(); + distId_ = g->distId(); + constr_ = constr; + assert (params_.size() == Util::sizeExpected (ranges_)); +} + + + +Parfactor::Parfactor (const Parfactor& g) +{ + args_ = g.arguments(); + params_ = g.params(); + ranges_ = g.ranges(); + distId_ = g.distId(); + constr_ = new ConstraintTree (*g.constr()); + assert (params_.size() == Util::sizeExpected (ranges_)); +} + + + +Parfactor::~Parfactor (void) +{ + delete constr_; +} + + + +LogVarSet +Parfactor::countedLogVars (void) const +{ + LogVarSet set; + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].isCounting()) { + set.insert (args_[i].countedLogVar()); + } + } + return set; +} + + + +LogVarSet +Parfactor::uncountedLogVars (void) const +{ + return constr_->logVarSet() - countedLogVars(); +} + + + +LogVarSet +Parfactor::elimLogVars (void) const +{ + LogVarSet requiredToElim = constr_->logVarSet(); + requiredToElim -= constr_->singletons(); + requiredToElim -= countedLogVars(); + return requiredToElim; +} + + + +LogVarSet +Parfactor::exclusiveLogVars (size_t fIdx) const +{ + assert (fIdx < args_.size()); + LogVarSet remaining; + for (size_t i = 0; i < args_.size(); i++) { + if (i != fIdx) { + remaining |= args_[i].logVarSet(); + } + } + return args_[fIdx].logVarSet() - remaining; +} + + + +void +Parfactor::sumOutIndex (size_t fIdx) +{ + assert (fIdx < args_.size()); + assert (args_[fIdx].contains (elimLogVars())); + + if (args_[fIdx].isCounting()) { + unsigned N = constr_->getConditionalCount ( + args_[fIdx].countedLogVar()); + unsigned R = args_[fIdx].range(); + vector numAssigns = HistogramSet::getNumAssigns (N, R); + Indexer indexer (ranges_, fIdx); + while (indexer.valid()) { + if (Globals::logDomain) { + params_[indexer] += numAssigns[ indexer[fIdx] ]; + } else { + params_[indexer] *= numAssigns[ indexer[fIdx] ]; + } + ++ indexer; + } + } + + LogVarSet excl = exclusiveLogVars (fIdx); + unsigned exp; + if (args_[fIdx].isCounting()) { + // counting log vars were already raised on counting conversion + exp = constr_->getConditionalCount (excl - args_[fIdx].countedLogVar()); + } else { + exp = constr_->getConditionalCount (excl); + } + constr_->remove (excl); + + TFactor::sumOutIndex (fIdx); + LogAware::pow (params_, exp); +} + + + +void +Parfactor::multiply (Parfactor& g) +{ + alignAndExponentiate (this, &g); + TFactor::multiply (g); + constr_->join (g.constr(), true); + simplifyGrounds(); + assert (constr_->isCartesianProduct (countedLogVars())); +} + + + +bool +Parfactor::canCountConvert (LogVar X) +{ + if (nrFormulas (X) != 1) { + return false; + } + size_t fIdx = indexOfLogVar (X); + if (args_[fIdx].isCounting()) { + return false; + } + if (constr_->isCountNormalized (X) == false) { + return false; + } + if (constr_->getConditionalCount (X) == 1) { + return false; + } + if (constr_->isCartesianProduct (countedLogVars() | X) == false) { + return false; + } + return true; +} + + + +void +Parfactor::countConvert (LogVar X) +{ + size_t fIdx = indexOfLogVar (X); + assert (constr_->isCountNormalized (X)); + assert (constr_->getConditionalCount (X) > 1); + assert (canCountConvert (X)); + + unsigned N = constr_->getConditionalCount (X); + unsigned R = ranges_[fIdx]; + unsigned H = HistogramSet::nrHistograms (N, R); + vector histograms = HistogramSet::getHistograms (N, R); + + Indexer indexer (ranges_); + vector sumout (params_.size() / R); + unsigned count = 0; + while (indexer.valid()) { + sumout[count].reserve (R); + for (unsigned r = 0; r < R; r++) { + sumout[count].push_back (params_[indexer]); + indexer.incrementDimension (fIdx); + } + count ++; + indexer.resetDimension (fIdx); + indexer.incrementExceptDimension (fIdx); + } + + params_.clear(); + params_.reserve (sumout.size() * H); + + ranges_[fIdx] = H; + MapIndexer mapIndexer (ranges_, fIdx); + while (mapIndexer.valid()) { + double prod = LogAware::multIdenty(); + size_t i = mapIndexer; + unsigned h = mapIndexer[fIdx]; + for (unsigned r = 0; r < R; r++) { + if (Globals::logDomain) { + prod += LogAware::pow (sumout[i][r], histograms[h][r]); + } else { + prod *= LogAware::pow (sumout[i][r], histograms[h][r]); + } + } + params_.push_back (prod); + ++ mapIndexer; + } + args_[fIdx].setCountedLogVar (X); + simplifyCountingFormulas (fIdx); +} + + + +void +Parfactor::expand (LogVar X, LogVar X_new1, LogVar X_new2) +{ + size_t fIdx = indexOfLogVar (X); + assert (fIdx != args_.size()); + assert (args_[fIdx].isCounting()); + + unsigned N1 = constr_->getConditionalCount (X_new1); + unsigned N2 = constr_->getConditionalCount (X_new2); + unsigned N = N1 + N2; + unsigned R = args_[fIdx].range(); + unsigned H1 = HistogramSet::nrHistograms (N1, R); + unsigned H2 = HistogramSet::nrHistograms (N2, R); + + vector histograms = HistogramSet::getHistograms (N, R); + vector histograms1 = HistogramSet::getHistograms (N1, R); + vector histograms2 = HistogramSet::getHistograms (N2, R); + + vector sumIndexes; + sumIndexes.reserve (H1 * H2); + for (unsigned i = 0; i < H1; i++) { + for (unsigned j = 0; j < H2; j++) { + Histogram hist = histograms1[i]; + hist += histograms2[j]; + sumIndexes.push_back (HistogramSet::findIndex (hist, histograms)); + } + } + + expandPotential (fIdx, H1 * H2, sumIndexes); + + args_.insert (args_.begin() + fIdx + 1, args_[fIdx]); + args_[fIdx].rename (X, X_new1); + args_[fIdx + 1].rename (X, X_new2); + if (H1 == 2) { + args_[fIdx].clearCountedLogVar(); + } + if (H2 == 2) { + args_[fIdx + 1].clearCountedLogVar(); + } + ranges_.insert (ranges_.begin() + fIdx + 1, H2); + ranges_[fIdx] = H1; +} + + + +void +Parfactor::fullExpand (LogVar X) +{ + size_t fIdx = indexOfLogVar (X); + assert (fIdx != args_.size()); + assert (args_[fIdx].isCounting()); + + unsigned N = constr_->getConditionalCount (X); + unsigned R = args_[fIdx].range(); + vector originHists = HistogramSet::getHistograms (N, R); + vector expandHists = HistogramSet::getHistograms (1, R); + assert (ranges_[fIdx] == originHists.size()); + vector sumIndexes; + sumIndexes.reserve (N * R); + + Ranges expandRanges (N, R); + Indexer indexer (expandRanges); + while (indexer.valid()) { + vector hist (R, 0); + for (unsigned n = 0; n < N; n++) { + hist += expandHists[indexer[n]]; + } + sumIndexes.push_back (HistogramSet::findIndex (hist, originHists)); + ++ indexer; + } + + expandPotential (fIdx, std::pow (R, N), sumIndexes); + + ProbFormula f = args_[fIdx]; + args_.erase (args_.begin() + fIdx); + ranges_.erase (ranges_.begin() + fIdx); + LogVars newLvs = constr_->expand (X); + assert (newLvs.size() == N); + for (unsigned i = 0 ; i < N; i++) { + ProbFormula newFormula (f.functor(), f.logVars(), f.range()); + newFormula.rename (X, newLvs[i]); + args_.insert (args_.begin() + fIdx + i, newFormula); + ranges_.insert (ranges_.begin() + fIdx + i, R); + } +} + + + +void +Parfactor::reorderAccordingGrounds (const Grounds& grounds) +{ + ProbFormulas newFormulas; + for (size_t i = 0; i < grounds.size(); i++) { + for (size_t j = 0; j < args_.size(); j++) { + if (grounds[i].functor() == args_[j].functor() && + grounds[i].arity() == args_[j].arity()) { + constr_->moveToTop (args_[j].logVars()); + if (constr_->containsTuple (grounds[i].args())) { + newFormulas.push_back (args_[j]); + break; + } + } + } + assert (newFormulas.size() == i + 1); + } + reorderArguments (newFormulas); +} + + + +void +Parfactor::absorveEvidence (const ProbFormula& formula, unsigned evidence) +{ + size_t fIdx = indexOf (formula); + assert (fIdx != args_.size()); + LogVarSet excl = exclusiveLogVars (fIdx); + assert (args_[fIdx].isCounting() == false); + assert (constr_->isCountNormalized (excl)); + LogAware::pow (params_, constr_->getConditionalCount (excl)); + TFactor::absorveEvidence (formula, evidence); + constr_->remove (excl); +} + + + +void +Parfactor::setNewGroups (void) +{ + for (size_t i = 0; i < args_.size(); i++) { + args_[i].setGroup (ProbFormula::getNewGroup()); + } +} + + + +void +Parfactor::applySubstitution (const Substitution& theta) +{ + for (size_t i = 0; i < args_.size(); i++) { + LogVars& lvs = args_[i].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + lvs[j] = theta.newNameFor (lvs[j]); + } + if (args_[i].isCounting()) { + LogVar clv = args_[i].countedLogVar(); + args_[i].setCountedLogVar (theta.newNameFor (clv)); + } + } + constr_->applySubstitution (theta); +} + + + +size_t +Parfactor::indexOfGround (const Ground& ground) const +{ + size_t idx = args_.size(); + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].functor() == ground.functor() && + args_[i].arity() == ground.arity()) { + constr_->moveToTop (args_[i].logVars()); + if (constr_->containsTuple (ground.args())) { + idx = i; + break; + } + } + } + return idx; +} + + + +PrvGroup +Parfactor::findGroup (const Ground& ground) const +{ + size_t idx = indexOfGround (ground); + return idx == args_.size() + ? numeric_limits::max() + : args_[idx].group(); +} + + + +bool +Parfactor::containsGround (const Ground& ground) const +{ + return findGroup (ground) != numeric_limits::max(); +} + + + +bool +Parfactor::containsGrounds (const Grounds& grounds) const +{ + Tuple tuple; + LogVars tupleLvs; + for (size_t i = 0; i < grounds.size(); i++) { + size_t idx = indexOfGround (grounds[i]); + if (idx == args_.size()) { + return false; + } + LogVars lvs = args_[idx].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + if (Util::contains (tupleLvs, lvs[j]) == false) { + tuple.push_back (grounds[i].args()[j]); + tupleLvs.push_back (lvs[j]); + } + } + } + constr_->moveToTop (tupleLvs); + return constr_->containsTuple (tuple); +} + + + +bool +Parfactor::containsGroup (PrvGroup group) const +{ + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].group() == group) { + return true; + } + } + return false; +} + + + +bool +Parfactor::containsGroups (vector groups) const +{ + for (size_t i = 0; i < groups.size(); i++) { + if (containsGroup (groups[i]) == false) { + return false; + } + } + return true; +} + + + +unsigned +Parfactor::nrFormulas (LogVar X) const +{ + unsigned count = 0; + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].contains (X)) { + count ++; + } + } + return count; +} + + + +int +Parfactor::indexOfLogVar (LogVar X) const +{ + size_t idx = args_.size(); + assert (nrFormulas (X) == 1); + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].contains (X)) { + idx = i; + break; + } + } + return idx; +} + + + +int +Parfactor::indexOfGroup (PrvGroup group) const +{ + size_t pos = args_.size(); + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].group() == group) { + pos = i; + break; + } + } + return pos; +} + + + +unsigned +Parfactor::nrFormulasWithGroup (PrvGroup group) const +{ + unsigned count = 0; + for (size_t i = 0; i < args_.size(); i++) { + if (args_[i].group() == group) { + count ++; + } + } + return count; +} + + + +vector +Parfactor::getAllGroups (void) const +{ + vector groups (args_.size()); + for (size_t i = 0; i < args_.size(); i++) { + groups[i] = args_[i].group(); + } + return groups; +} + + + +string +Parfactor::getLabel (void) const +{ + stringstream ss; + ss << "phi(" ; + for (size_t i = 0; i < args_.size(); i++) { + if (i != 0) ss << "," ; + ss << args_[i]; + } + ss << ")" ; + ConstraintTree copy (*constr_); + copy.moveToTop (copy.logVarSet().elements()); + ss << "|" << copy.tupleSet(); + return ss.str(); +} + + + +void +Parfactor::print (bool printParams) const +{ + cout << "Formulas: " ; + for (size_t i = 0; i < args_.size(); i++) { + if (i != 0) cout << ", " ; + cout << args_[i]; + } + cout << endl; + if (args_[0].group() != Util::maxUnsigned()) { + vector groups; + for (size_t i = 0; i < args_.size(); i++) { + groups.push_back (string ("g") + Util::toString (args_[i].group())); + } + cout << "Groups: " << groups << endl; + } + cout << "LogVars: " << constr_->logVarSet() << endl; + cout << "Ranges: " << ranges_ << endl; + if (printParams == false) { + cout << "Params: " ; + if (params_.size() <= 32) { + cout.precision(10); + cout << params_ << endl; + } else { + cout << "|" << params_.size() << "|" << endl; + } + } + ConstraintTree copy (*constr_); + copy.moveToTop (copy.logVarSet().elements()); + cout << "Tuples: " << copy.tupleSet() << endl; + if (printParams) { + printParameters(); + } +} + + + +void +Parfactor::printParameters (void) const +{ + vector jointStrings; + Indexer indexer (ranges_); + while (indexer.valid()) { + stringstream ss; + for (size_t i = 0; i < args_.size(); i++) { + if (i != 0) ss << ", " ; + if (args_[i].isCounting()) { + unsigned N = constr_->getConditionalCount ( + args_[i].countedLogVar()); + HistogramSet hs (N, args_[i].range()); + unsigned c = 0; + while (c < indexer[i]) { + hs.nextHistogram(); + c ++; + } + ss << hs; + } else { + ss << indexer[i]; + } + } + jointStrings.push_back (ss.str()); + ++ indexer; + } + for (size_t i = 0; i < params_.size(); i++) { + cout << "f(" << jointStrings[i] << ")" ; + cout << " = " << params_[i] << endl; + } +} + + + +void +Parfactor::printProjections (void) const +{ + ConstraintTree copy (*constr_); + + LogVarSet Xs = copy.logVarSet(); + for (size_t i = 0; i < Xs.size(); i++) { + cout << "-> projection of " << Xs[i] << ": " ; + cout << copy.tupleSet ({Xs[i]}) << endl; + } +} + + + +void +Parfactor::expandPotential ( + size_t fIdx, + unsigned newRange, + const vector& sumIndexes) +{ + ullong newSize = (params_.size() / ranges_[fIdx]) * newRange; + if (newSize > params_.max_size()) { + cerr << "Error: an overflow occurred when performing expansion." ; + cerr << endl; + exit (EXIT_FAILURE); + } + + Params backup = params_; + params_.clear(); + params_.reserve (newSize); + + size_t prod = 1; + vector offsets (ranges_.size()); + for (size_t i = ranges_.size(); i-- > 0; ) { + offsets[i] = prod; + prod *= ranges_[i]; + } + + size_t index = 0; + ranges_[fIdx] = newRange; + vector indices (ranges_.size(), 0); + for (size_t k = 0; k < newSize; k++) { + assert (index < backup.size()); + params_.push_back (backup[index]); + for (size_t i = ranges_.size(); i-- > 0; ) { + indices[i] ++; + if (i == fIdx) { + if (indices[i] != ranges_[i]) { + int diff = sumIndexes[indices[i]] - sumIndexes[indices[i] - 1]; + index += diff * offsets[i]; + break; + } else { + // last index contains the old range minus 1 + index -= sumIndexes.back() * offsets[i]; + indices[i] = 0; + } + } else { + if (indices[i] != ranges_[i]) { + index += offsets[i]; + break; + } else { + index -= (ranges_[i] - 1) * offsets[i]; + indices[i] = 0; + } + } + } + } +} + + + +void +Parfactor::simplifyCountingFormulas (size_t fIdx) +{ + // check if we can simplify the parfactor + for (size_t i = 0; i < args_.size(); i++) { + if (i != fIdx && + args_[i].isCounting() && + args_[i].group() == args_[fIdx].group()) { + // if they only differ in the name of the counting log var + if ((args_[i].logVarSet() - args_[i].countedLogVar()) == + (args_[fIdx].logVarSet()) - args_[fIdx].countedLogVar() && + ranges_[i] == ranges_[fIdx]) { + simplifyParfactor (fIdx, i); + break; + } + } + } +} + + + +void +Parfactor::simplifyGrounds (void) +{ + if (args_.size() == 1) { + return; + } + LogVarSet singletons = constr_->singletons(); + for (long i = 0; i < (long)args_.size() - 1; i++) { + for (size_t j = i + 1; j < args_.size(); j++) { + if (args_[i].group() == args_[j].group() && + singletons.contains (args_[i].logVarSet()) && + singletons.contains (args_[j].logVarSet())) { + simplifyParfactor (i, j); + i --; + break; + } + } + } +} + + + +bool +Parfactor::canMultiply (Parfactor* g1, Parfactor* g2) +{ + std::pair res = getAlignLogVars (g1, g2); + LogVarSet Xs_1 (res.first); + LogVarSet Xs_2 (res.second); + LogVarSet Y_1 = g1->logVarSet() - Xs_1; + LogVarSet Y_2 = g2->logVarSet() - Xs_2; + Y_1 -= g1->countedLogVars(); + Y_2 -= g2->countedLogVars(); + return g1->constr()->isCountNormalized (Y_1) && + g2->constr()->isCountNormalized (Y_2); +} + + + +void +Parfactor::simplifyParfactor (size_t fIdx1, size_t fIdx2) +{ + Params backup = params_; + params_.clear(); + Indexer indexer (ranges_); + while (indexer.valid()) { + if (indexer[fIdx1] == indexer[fIdx2]) { + params_.push_back (backup[indexer]); + } + ++ indexer; + } + for (size_t i = 0; i < args_[fIdx2].logVars().size(); i++) { + if (nrFormulas (args_[fIdx2].logVars()[i]) == 1) { + constr_->remove ({ args_[fIdx2].logVars()[i] }); + } + } + args_.erase (args_.begin() + fIdx2); + ranges_.erase (ranges_.begin() + fIdx2); +} + + + +std::pair +Parfactor::getAlignLogVars (Parfactor* g1, Parfactor* g2) +{ + g1->simplifyGrounds(); + g2->simplifyGrounds(); + LogVars Xs_1, Xs_2; + TinySet matchedI; + TinySet matchedJ; + ProbFormulas& formulas1 = g1->arguments(); + ProbFormulas& formulas2 = g2->arguments(); + for (size_t i = 0; i < formulas1.size(); i++) { + for (size_t j = 0; j < formulas2.size(); j++) { + if (formulas1[i].group() == formulas2[j].group() && + g1->range (i) == g2->range (j) && + matchedI.contains (i) == false && + matchedJ.contains (j) == false) { + Util::addToVector (Xs_1, formulas1[i].logVars()); + Util::addToVector (Xs_2, formulas2[j].logVars()); + matchedI.insert (i); + matchedJ.insert (j); + } + } + } + return make_pair (Xs_1, Xs_2); +} + + + +void +Parfactor::alignAndExponentiate (Parfactor* g1, Parfactor* g2) +{ + alignLogicalVars (g1, g2); + LogVarSet comm = g1->logVarSet() & g2->logVarSet(); + LogVarSet Y_1 = g1->logVarSet() - comm; + LogVarSet Y_2 = g2->logVarSet() - comm; + Y_1 -= g1->countedLogVars(); + Y_2 -= g2->countedLogVars(); + assert (g1->constr()->isCountNormalized (Y_1)); + assert (g2->constr()->isCountNormalized (Y_2)); + unsigned condCount1 = g1->constr()->getConditionalCount (Y_1); + unsigned condCount2 = g2->constr()->getConditionalCount (Y_2); + LogAware::pow (g1->params(), 1.0 / condCount2); + LogAware::pow (g2->params(), 1.0 / condCount1); +} + + + +void +Parfactor::alignLogicalVars (Parfactor* g1, Parfactor* g2) +{ + std::pair res = getAlignLogVars (g1, g2); + const LogVars& alignLvs1 = res.first; + const LogVars& alignLvs2 = res.second; + // cout << "ALIGNING :::::::::::::::::" << endl; + // g1->print(); + // cout << "AND" << endl; + // g2->print(); + // cout << "-> align lvs1 = " << alignLvs1 << endl; + // cout << "-> align lvs2 = " << alignLvs2 << endl; + LogVar freeLogVar (0); + Substitution theta1, theta2; + for (size_t i = 0; i < alignLvs1.size(); i++) { + bool b1 = theta1.containsReplacementFor (alignLvs1[i]); + bool b2 = theta2.containsReplacementFor (alignLvs2[i]); + if (b1 == false && b2 == false) { + theta1.add (alignLvs1[i], freeLogVar); + theta2.add (alignLvs2[i], freeLogVar); + ++ freeLogVar; + } else if (b1 == false && b2) { + theta1.add (alignLvs1[i], theta2.newNameFor (alignLvs2[i])); + } else if (b1 && b2 == false) { + theta2.add (alignLvs2[i], theta1.newNameFor (alignLvs1[i])); + } + } + + const LogVarSet& allLvs1 = g1->logVarSet(); + for (size_t i = 0; i < allLvs1.size(); i++) { + if (theta1.containsReplacementFor (allLvs1[i]) == false) { + theta1.add (allLvs1[i], freeLogVar); + ++ freeLogVar; + } + } + const LogVarSet& allLvs2 = g2->logVarSet(); + for (size_t i = 0; i < allLvs2.size(); i++) { + if (theta2.containsReplacementFor (allLvs2[i]) == false) { + theta2.add (allLvs2[i], freeLogVar); + ++ freeLogVar; + } + } + + // handle this type of situation: + // g1 = p(X), q(X) ; X in {(p1),(p2)} + // g2 = p(X), q(Y) ; (X,Y) in {(p1,p2),(p2,p1)} + LogVars discardedLvs1 = theta1.getDiscardedLogVars(); + for (size_t i = 0; i < discardedLvs1.size(); i++) { + if (g1->constr()->isSingleton (discardedLvs1[i]) && + g1->nrFormulas (discardedLvs1[i]) == 1) { + g1->constr()->remove (discardedLvs1[i]); + } else { + LogVar X_new = ++ g1->constr()->logVarSet().back(); + theta1.rename (discardedLvs1[i], X_new); + } + } + LogVars discardedLvs2 = theta2.getDiscardedLogVars(); + for (size_t i = 0; i < discardedLvs2.size(); i++) { + if (g2->constr()->isSingleton (discardedLvs2[i]) && + g2->nrFormulas (discardedLvs2[i]) == 1) { + g2->constr()->remove (discardedLvs2[i]); + } else { + LogVar X_new = ++ g2->constr()->logVarSet().back(); + theta2.rename (discardedLvs2[i], X_new); + } + } + + // cout << "theta1: " << theta1 << endl; + // cout << "theta2: " << theta2 << endl; + g1->applySubstitution (theta1); + g2->applySubstitution (theta2); +} + diff --git a/packages/CLPBN/horus2/Parfactor.h b/packages/CLPBN/horus2/Parfactor.h new file mode 100644 index 000000000..1c65c2ea0 --- /dev/null +++ b/packages/CLPBN/horus2/Parfactor.h @@ -0,0 +1,125 @@ +#ifndef HORUS_PARFACTOR_H +#define HORUS_PARFACTOR_H + +#include +#include + +#include "ProbFormula.h" +#include "ConstraintTree.h" +#include "LiftedUtils.h" +#include "Horus.h" + +#include "Factor.h" + +class Parfactor : public TFactor +{ + public: + Parfactor ( + const ProbFormulas&, + const Params&, + const Tuples&, + unsigned distId); + + Parfactor (const Parfactor*, const Tuple&); + + Parfactor (const Parfactor*, ConstraintTree*); + + Parfactor (const Parfactor&); + + ~Parfactor (void); + + ConstraintTree* constr (void) { return constr_; } + + const ConstraintTree* constr (void) const { return constr_; } + + const LogVars& logVars (void) const { return constr_->logVars(); } + + const LogVarSet& logVarSet (void) const { return constr_->logVarSet(); } + + LogVarSet countedLogVars (void) const; + + LogVarSet uncountedLogVars (void) const; + + LogVarSet elimLogVars (void) const; + + LogVarSet exclusiveLogVars (size_t fIdx) const; + + void sumOutIndex (size_t fIdx); + + void multiply (Parfactor&); + + bool canCountConvert (LogVar X); + + void countConvert (LogVar); + + void expand (LogVar, LogVar, LogVar); + + void fullExpand (LogVar); + + void reorderAccordingGrounds (const Grounds&); + + void absorveEvidence (const ProbFormula&, unsigned); + + void setNewGroups (void); + + void applySubstitution (const Substitution&); + + size_t indexOfGround (const Ground&) const; + + PrvGroup findGroup (const Ground&) const; + + bool containsGround (const Ground&) const; + + bool containsGrounds (const Grounds&) const; + + bool containsGroup (PrvGroup) const; + + bool containsGroups (vector) const; + + unsigned nrFormulas (LogVar) const; + + int indexOfLogVar (LogVar) const; + + int indexOfGroup (PrvGroup) const; + + unsigned nrFormulasWithGroup (PrvGroup) const; + + vector getAllGroups (void) const; + + void print (bool = false) const; + + void printParameters (void) const; + + void printProjections (void) const; + + string getLabel (void) const; + + void simplifyGrounds (void); + + static bool canMultiply (Parfactor*, Parfactor*); + + private: + + void simplifyCountingFormulas (size_t fIdx); + + void simplifyParfactor (size_t fIdx1, size_t fIdx2); + + static std::pair getAlignLogVars ( + Parfactor* g1, Parfactor* g2); + + void expandPotential (size_t fIdx, unsigned newRange, + const vector& sumIndexes); + + static void alignAndExponentiate (Parfactor*, Parfactor*); + + static void alignLogicalVars (Parfactor*, Parfactor*); + + ConstraintTree* constr_; + +}; + + +typedef vector Parfactors; + +#endif // HORUS_PARFACTOR_H + diff --git a/packages/CLPBN/horus2/ParfactorList.cpp b/packages/CLPBN/horus2/ParfactorList.cpp new file mode 100644 index 000000000..1de1ccc7d --- /dev/null +++ b/packages/CLPBN/horus2/ParfactorList.cpp @@ -0,0 +1,638 @@ +#include + +#include "ParfactorList.h" + + +ParfactorList::ParfactorList (const ParfactorList& pfList) +{ + ParfactorList::const_iterator it = pfList.begin(); + while (it != pfList.end()) { + addShattered (new Parfactor (**it)); + ++ it; + } +} + + + +ParfactorList::ParfactorList (const Parfactors& pfs) +{ + add (pfs); +} + + + +ParfactorList::~ParfactorList (void) +{ + ParfactorList::const_iterator it = pfList_.begin(); + while (it != pfList_.end()) { + delete *it; + ++ it; + } +} + + + +void +ParfactorList::add (Parfactor* pf) +{ + pf->setNewGroups(); + addToShatteredList (pf); +} + + + +void +ParfactorList::add (const Parfactors& pfs) +{ + for (size_t i = 0; i < pfs.size(); i++) { + pfs[i]->setNewGroups(); + addToShatteredList (pfs[i]); + } +} + + + +void +ParfactorList::addShattered (Parfactor* pf) +{ + assert (isAllShattered()); + pfList_.push_back (pf); + assert (isAllShattered()); +} + + + +list::iterator +ParfactorList::insertShattered ( + list::iterator it, + Parfactor* pf) +{ + return pfList_.insert (it, pf); + assert (isAllShattered()); +} + + + +list::iterator +ParfactorList::remove (list::iterator it) +{ + return pfList_.erase (it); +} + + + +list::iterator +ParfactorList::removeAndDelete (list::iterator it) +{ + delete *it; + return pfList_.erase (it); +} + + + +bool +ParfactorList::isAllShattered (void) const +{ + if (pfList_.size() <= 1) { + return true; + } + vector pfs (pfList_.begin(), pfList_.end()); + for (size_t i = 0; i < pfs.size(); i++) { + assert (isShattered (pfs[i])); + } + for (size_t i = 0; i < pfs.size() - 1; i++) { + for (size_t j = i + 1; j < pfs.size(); j++) { + if (isShattered (pfs[i], pfs[j]) == false) { + return false; + } + } + } + return true; +} + + + +void +ParfactorList::print (void) const +{ + Parfactors pfVec (pfList_.begin(), pfList_.end()); + std::sort (pfVec.begin(), pfVec.end(), sortByParams()); + for (size_t i = 0; i < pfVec.size(); i++) { + pfVec[i]->print(); + cout << endl; + } +} + + + +ParfactorList& +ParfactorList::operator= (const ParfactorList& pfList) +{ + if (this != &pfList) { + ParfactorList::const_iterator it0 = pfList_.begin(); + while (it0 != pfList_.end()) { + delete *it0; + ++ it0; + } + pfList_.clear(); + ParfactorList::const_iterator it = pfList.begin(); + while (it != pfList.end()) { + addShattered (new Parfactor (**it)); + ++ it; + } + } + return *this; +} + + + +bool +ParfactorList::isShattered (const Parfactor* g) const +{ + const ProbFormulas& formulas = g->arguments(); + if (formulas.size() < 2) { + return true; + } + ConstraintTree ct (*g->constr()); + for (size_t i = 0; i < formulas.size() - 1; i++) { + for (size_t j = i + 1; j < formulas.size(); j++) { + if (formulas[i].group() == formulas[j].group()) { + if (identical ( + formulas[i], *(g->constr()), + formulas[j], *(g->constr())) == false) { + g->print(); + cout << "-> not identical on positions " ; + cout << i << " and " << j << endl; + return false; + } + } else { + if (disjoint ( + formulas[i], *(g->constr()), + formulas[j], *(g->constr())) == false) { + g->print(); + cout << "-> not disjoint on positions " ; + cout << i << " and " << j << endl; + return false; + } + } + } + } + return true; +} + + + +bool +ParfactorList::isShattered ( + const Parfactor* g1, + const Parfactor* g2) const +{ + assert (g1 != g2); + const ProbFormulas& fms1 = g1->arguments(); + const ProbFormulas& fms2 = g2->arguments(); + + for (size_t i = 0; i < fms1.size(); i++) { + for (size_t j = 0; j < fms2.size(); j++) { + if (fms1[i].group() == fms2[j].group()) { + if (identical ( + fms1[i], *(g1->constr()), + fms2[j], *(g2->constr())) == false) { + g1->print(); + cout << "^" << endl; + g2->print(); + cout << "-> not identical on group " << fms1[i].group() << endl; + return false; + } + } else { + if (disjoint ( + fms1[i], *(g1->constr()), + fms2[j], *(g2->constr())) == false) { + g1->print(); + cout << "^" << endl; + g2->print(); + cout << "-> not disjoint on groups " << fms1[i].group(); + cout << " and " << fms2[j].group() << endl; + return false; + } + } + } + } + return true; +} + + + +void +ParfactorList::addToShatteredList (Parfactor* g) +{ + queue residuals; + residuals.push (g); + while (residuals.empty() == false) { + Parfactor* pf = residuals.front(); + bool pfSplitted = false; + list::iterator pfIter; + pfIter = pfList_.begin(); + while (pfIter != pfList_.end()) { + std::pair shattRes; + shattRes = shatter (*pfIter, pf); + if (shattRes.first.empty() == false) { + pfIter = removeAndDelete (pfIter); + Util::addToQueue (residuals, shattRes.first); + } else { + ++ pfIter; + } + if (shattRes.second.empty() == false) { + delete pf; + Util::addToQueue (residuals, shattRes.second); + pfSplitted = true; + break; + } + } + residuals.pop(); + if (pfSplitted == false) { + Parfactors res = shatterAgainstMySelf (pf); + if (res.empty()) { + addShattered (pf); + } else { + Util::addToQueue (residuals, res); + } + } + } + assert (isAllShattered()); +} + + + +Parfactors +ParfactorList::shatterAgainstMySelf (Parfactor* g) +{ + Parfactors pfs; + queue residuals; + residuals.push (g); + bool shattered = true; + while (residuals.empty() == false) { + Parfactor* pf = residuals.front(); + Parfactors res = shatterAgainstMySelf2 (pf); + if (res.empty()) { + assert (isShattered (pf)); + if (shattered) { + return { }; + } + pfs.push_back (pf); + } else { + shattered = false; + for (size_t i = 0; i < res.size(); i++) { + assert (res[i]->constr()->empty() == false); + residuals.push (res[i]); + } + delete pf; + } + residuals.pop(); + } + return pfs; +} + + + +Parfactors +ParfactorList::shatterAgainstMySelf2 (Parfactor* g) +{ + // slip a parfactor with overlapping formulas: + // e.g. {s(X),s(Y)}, with (X,Y) in {(p1,p2),(p1,p3),(p4,p1)} + const ProbFormulas& formulas = g->arguments(); + for (size_t i = 0; i < formulas.size() - 1; i++) { + for (size_t j = i + 1; j < formulas.size(); j++) { + if (formulas[i].sameSkeletonAs (formulas[j])) { + Parfactors res = shatterAgainstMySelf (g, i, j); + if (res.empty() == false) { + return res; + } + } + } + } + return Parfactors(); +} + + + +Parfactors +ParfactorList::shatterAgainstMySelf ( + Parfactor* g, + size_t fIdx1, + size_t fIdx2) +{ + /* + Util::printDashedLine(); + cout << "-> SHATTERING" << endl; + g->print(); + cout << "-> ON: " << g->argument (fIdx1) << "|" ; + cout << g->constr()->tupleSet (g->argument (fIdx1).logVars()) << endl; + cout << "-> ON: " << g->argument (fIdx2) << "|" ; + cout << g->constr()->tupleSet (g->argument (fIdx2).logVars()) << endl; + Util::printDashedLine(); + */ + ProbFormula& f1 = g->argument (fIdx1); + ProbFormula& f2 = g->argument (fIdx2); + if (f1.isAtom()) { + cerr << "Error: a ground occurs twice in the same parfactor." << endl; + cerr << endl; + exit (EXIT_FAILURE); + } + assert (g->constr()->empty() == false); + ConstraintTree ctCopy (*g->constr()); + if (f1.group() == f2.group()) { + assert (identical (f1, *(g->constr()), f2, ctCopy)); + return { }; + } + + g->constr()->moveToTop (f1.logVars()); + ctCopy.moveToTop (f2.logVars()); + + std::pair split1 = + g->constr()->split (f1.logVars(), &ctCopy, f2.logVars()); + ConstraintTree* commCt1 = split1.first; + ConstraintTree* exclCt1 = split1.second; + + if (commCt1->empty()) { + // disjoint + delete commCt1; + delete exclCt1; + return { }; + } + + PrvGroup newGroup = ProbFormula::getNewGroup(); + Parfactors res1 = shatter (g, fIdx1, commCt1, exclCt1, newGroup); + if (res1.empty()) { + res1.push_back (g); + } + + Parfactors res; + ctCopy.moveToTop (f1.logVars()); + for (size_t i = 0; i < res1.size(); i++) { + res1[i]->constr()->moveToTop (f2.logVars()); + std::pair split2; + split2 = res1[i]->constr()->split (f2.logVars(), &ctCopy, f1.logVars()); + ConstraintTree* commCt2 = split2.first; + ConstraintTree* exclCt2 = split2.second; + if (commCt2->empty()) { + if (res1[i] != g) { + res.push_back (res1[i]); + } + delete commCt2; + delete exclCt2; + continue; + } + newGroup = ProbFormula::getNewGroup(); + Parfactors res2 = shatter (res1[i], fIdx2, commCt2, exclCt2, newGroup); + if (res2.empty()) { + if (res1[i] != g) { + res.push_back (res1[i]); + } + } else { + Util::addToVector (res, res2); + for (size_t j = 0; j < res2.size(); j++) { + } + if (res1[i] != g) { + delete res1[i]; + } + } + } + + if (res.empty()) { + g->argument (fIdx2).setGroup (g->argument (fIdx1).group()); + updateGroups (f2.group(), f1.group()); + } + return res; +} + + + +std::pair +ParfactorList::shatter (Parfactor* g1, Parfactor* g2) +{ + ProbFormulas& formulas1 = g1->arguments(); + ProbFormulas& formulas2 = g2->arguments(); + assert (g1 != 0 && g2 != 0 && g1 != g2); + for (size_t i = 0; i < formulas1.size(); i++) { + for (size_t j = 0; j < formulas2.size(); j++) { + if (formulas1[i].sameSkeletonAs (formulas2[j])) { + std::pair res; + res = shatter (i, g1, j, g2); + if (res.first.empty() == false || + res.second.empty() == false) { + return res; + } + } + } + } + return make_pair (Parfactors(), Parfactors()); +} + + + +std::pair +ParfactorList::shatter ( + size_t fIdx1, Parfactor* g1, + size_t fIdx2, Parfactor* g2) +{ + ProbFormula& f1 = g1->argument (fIdx1); + ProbFormula& f2 = g2->argument (fIdx2); + /* + Util::printDashedLine(); + cout << "-> SHATTERING" << endl; + g1->print(); + cout << "-> WITH" << endl; + g2->print(); + cout << "-> ON: " << f1 << "|" ; + cout << g1->constr()->tupleSet (f1.logVars()) << endl; + cout << "-> ON: " << f2 << "|" ; + cout << g2->constr()->tupleSet (f2.logVars()) << endl; + Util::printDashedLine(); + */ + if (f1.isAtom()) { + f2.setGroup (f1.group()); + updateGroups (f2.group(), f1.group()); + return { }; + } + assert (g1->constr()->empty() == false); + assert (g2->constr()->empty() == false); + if (f1.group() == f2.group()) { + assert (identical (f1, *(g1->constr()), f2, *(g2->constr()))); + return { }; + } + + g1->constr()->moveToTop (f1.logVars()); + g2->constr()->moveToTop (f2.logVars()); + + std::pair split1 = + g1->constr()->split (f1.logVars(), g2->constr(), f2.logVars()); + ConstraintTree* commCt1 = split1.first; + ConstraintTree* exclCt1 = split1.second; + + if (commCt1->empty()) { + // disjoint + delete commCt1; + delete exclCt1; + return { }; + } + + std::pair split2 = + g2->constr()->split (f2.logVars(), g1->constr(), f1.logVars()); + ConstraintTree* commCt2 = split2.first; + ConstraintTree* exclCt2 = split2.second; + + assert (commCt1->tupleSet (f1.logVars()) == + commCt2->tupleSet (f2.logVars())); + + // stringstream ss1; ss1 << "" << count << "_A.dot" ; + // stringstream ss2; ss2 << "" << count << "_B.dot" ; + // stringstream ss3; ss3 << "" << count << "_A_comm.dot" ; + // stringstream ss4; ss4 << "" << count << "_A_excl.dot" ; + // stringstream ss5; ss5 << "" << count << "_B_comm.dot" ; + // stringstream ss6; ss6 << "" << count << "_B_excl.dot" ; + // g1->constr()->exportToGraphViz (ss1.str().c_str(), true); + // g2->constr()->exportToGraphViz (ss2.str().c_str(), true); + // commCt1->exportToGraphViz (ss3.str().c_str(), true); + // exclCt1->exportToGraphViz (ss4.str().c_str(), true); + // commCt2->exportToGraphViz (ss5.str().c_str(), true); + // exclCt2->exportToGraphViz (ss6.str().c_str(), true); + + if (exclCt1->empty() && exclCt2->empty()) { + // identical + f2.setGroup (f1.group()); + updateGroups (f2.group(), f1.group()); + delete commCt1; + delete exclCt1; + delete commCt2; + delete exclCt2; + return { }; + } + + PrvGroup group; + if (exclCt1->empty()) { + group = f1.group(); + } else if (exclCt2->empty()) { + group = f2.group(); + } else { + group = ProbFormula::getNewGroup(); + } + Parfactors res1 = shatter (g1, fIdx1, commCt1, exclCt1, group); + Parfactors res2 = shatter (g2, fIdx2, commCt2, exclCt2, group); + return make_pair (res1, res2); +} + + + +Parfactors +ParfactorList::shatter ( + Parfactor* g, + size_t fIdx, + ConstraintTree* commCt, + ConstraintTree* exclCt, + PrvGroup commGroup) +{ + ProbFormula& f = g->argument (fIdx); + if (exclCt->empty()) { + delete commCt; + delete exclCt; + f.setGroup (commGroup); + return { }; + } + + Parfactors result; + if (f.isCounting()) { + LogVar X_new1 = g->constr()->logVarSet().back() + 1; + LogVar X_new2 = g->constr()->logVarSet().back() + 2; + ConstraintTrees cts = g->constr()->jointCountNormalize ( + commCt, exclCt, f.countedLogVar(), X_new1, X_new2); + for (size_t i = 0; i < cts.size(); i++) { + Parfactor* newPf = new Parfactor (g, cts[i]); + if (cts[i]->nrLogVars() == g->constr()->nrLogVars() + 1) { + newPf->expand (f.countedLogVar(), X_new1, X_new2); + assert (g->constr()->getConditionalCount (f.countedLogVar()) == + cts[i]->getConditionalCount (X_new1) + + cts[i]->getConditionalCount (X_new2)); + } else { + assert (g->constr()->getConditionalCount (f.countedLogVar()) == + cts[i]->getConditionalCount (f.countedLogVar())); + } + newPf->setNewGroups(); + result.push_back (newPf); + } + delete commCt; + delete exclCt; + } else { + Parfactor* newPf = new Parfactor (g, commCt); + newPf->setNewGroups(); + newPf->argument (fIdx).setGroup (commGroup); + result.push_back (newPf); + newPf = new Parfactor (g, exclCt); + newPf->setNewGroups(); + result.push_back (newPf); + } + return result; +} + + + +void +ParfactorList::updateGroups (PrvGroup oldGroup, PrvGroup newGroup) +{ + for (ParfactorList::iterator it = pfList_.begin(); + it != pfList_.end(); ++it) { + ProbFormulas& formulas = (*it)->arguments(); + for (size_t i = 0; i < formulas.size(); i++) { + if (formulas[i].group() == oldGroup) { + formulas[i].setGroup (newGroup); + } + } + } +} + + + +bool +ParfactorList::proper ( + const ProbFormula& f1, ConstraintTree ct1, + const ProbFormula& f2, ConstraintTree ct2) const +{ + return disjoint (f1, ct1, f2, ct2) + || identical (f1, ct1, f2, ct2); +} + + + +bool +ParfactorList::identical ( + const ProbFormula& f1, ConstraintTree ct1, + const ProbFormula& f2, ConstraintTree ct2) const +{ + if (f1.sameSkeletonAs (f2) == false) { + return false; + } + if (f1.isAtom()) { + return true; + } + TupleSet ts1 = ct1.tupleSet (f1.logVars()); + TupleSet ts2 = ct2.tupleSet (f2.logVars()); + return ts1 == ts2; +} + + + +bool +ParfactorList::disjoint ( + const ProbFormula& f1, ConstraintTree ct1, + const ProbFormula& f2, ConstraintTree ct2) const +{ + if (f1.sameSkeletonAs (f2) == false) { + return true; + } + if (f1.isAtom()) { + return false; + } + TupleSet ts1 = ct1.tupleSet (f1.logVars()); + TupleSet ts2 = ct2.tupleSet (f2.logVars()); + return (ts1 & ts2).empty(); +} + diff --git a/packages/CLPBN/horus2/ParfactorList.h b/packages/CLPBN/horus2/ParfactorList.h new file mode 100644 index 000000000..1c6404dcb --- /dev/null +++ b/packages/CLPBN/horus2/ParfactorList.h @@ -0,0 +1,121 @@ +#ifndef HORUS_PARFACTORLIST_H +#define HORUS_PARFACTORLIST_H + +#include +#include + +#include "Parfactor.h" +#include "ProbFormula.h" + + +using namespace std; + + +class ParfactorList +{ + public: + ParfactorList (void) { } + + ParfactorList (const ParfactorList&); + + ParfactorList (const Parfactors&); + + ~ParfactorList (void); + + const list& parfactors (void) const { return pfList_; } + + void clear (void) { pfList_.clear(); } + + size_t size (void) const { return pfList_.size(); } + + typedef std::list::iterator iterator; + + iterator begin (void) { return pfList_.begin(); } + + iterator end (void) { return pfList_.end(); } + + typedef std::list::const_iterator const_iterator; + + const_iterator begin (void) const { return pfList_.begin(); } + + const_iterator end (void) const { return pfList_.end(); } + + void add (Parfactor* pf); + + void add (const Parfactors& pfs); + + void addShattered (Parfactor* pf); + + list::iterator insertShattered ( + list::iterator, Parfactor*); + + list::iterator remove (list::iterator); + + list::iterator removeAndDelete (list::iterator); + + bool isAllShattered (void) const; + + void print (void) const; + + ParfactorList& operator= (const ParfactorList& pfList); + + private: + bool isShattered (const Parfactor*) const; + + bool isShattered (const Parfactor*, const Parfactor*) const; + + void addToShatteredList (Parfactor*); + + Parfactors shatterAgainstMySelf (Parfactor* g); + + Parfactors shatterAgainstMySelf2 (Parfactor* g); + + Parfactors shatterAgainstMySelf ( + Parfactor* g, size_t fIdx1, size_t fIdx2); + + std::pair shatter ( + Parfactor*, Parfactor*); + + std::pair shatter ( + size_t, Parfactor*, size_t, Parfactor*); + + Parfactors shatter ( + Parfactor*, + size_t, + ConstraintTree*, + ConstraintTree*, + PrvGroup); + + void updateGroups (PrvGroup group1, PrvGroup group2); + + bool proper ( + const ProbFormula&, ConstraintTree, + const ProbFormula&, ConstraintTree) const; + + bool identical ( + const ProbFormula&, ConstraintTree, + const ProbFormula&, ConstraintTree) const; + + bool disjoint ( + const ProbFormula&, ConstraintTree, + const ProbFormula&, ConstraintTree) const; + + struct sortByParams + { + inline bool operator() (const Parfactor* pf1, const Parfactor* pf2) + { + if (pf1->params().size() < pf2->params().size()) { + return true; + } else if (pf1->params().size() == pf2->params().size() && + pf1->params() < pf2->params()) { + return true; + } + return false; + } + }; + + list pfList_; +}; + +#endif // HORUS_PARFACTORLIST_H + diff --git a/packages/CLPBN/horus2/ProbFormula.cpp b/packages/CLPBN/horus2/ProbFormula.cpp new file mode 100644 index 000000000..fa2d26d05 --- /dev/null +++ b/packages/CLPBN/horus2/ProbFormula.cpp @@ -0,0 +1,140 @@ +#include "ProbFormula.h" + + +PrvGroup ProbFormula::freeGroup_ = 0; + + + +bool +ProbFormula::sameSkeletonAs (const ProbFormula& f) const +{ + return functor_ == f.functor() && logVars_.size() == f.arity(); +} + + + +bool +ProbFormula::contains (LogVar lv) const +{ + return Util::contains (logVars_, lv); +} + + + +bool +ProbFormula::contains (LogVarSet s) const +{ + return LogVarSet (logVars_).contains (s); +} + + + +size_t +ProbFormula::indexOf (LogVar X) const +{ + return Util::indexOf (logVars_, X); +} + + + +bool +ProbFormula::isAtom (void) const +{ + return logVars_.size() == 0; +} + + + +bool +ProbFormula::isCounting (void) const +{ + return countedLogVar_.valid(); +} + + + +LogVar +ProbFormula::countedLogVar (void) const +{ + assert (isCounting()); + return countedLogVar_; +} + + + +void +ProbFormula::setCountedLogVar (LogVar lv) +{ + countedLogVar_ = lv; +} + + + +void +ProbFormula::clearCountedLogVar (void) +{ + countedLogVar_ = LogVar(); +} + + + +void +ProbFormula::rename (LogVar oldName, LogVar newName) +{ + for (size_t i = 0; i < logVars_.size(); i++) { + if (logVars_[i] == oldName) { + logVars_[i] = newName; + } + } + if (isCounting() && countedLogVar_ == oldName) { + countedLogVar_ = newName; + } +} + + +bool operator== (const ProbFormula& f1, const ProbFormula& f2) +{ + return f1.group_ == f2.group_ && + f1.logVars_ == f2.logVars_; +} + + + +std::ostream& operator<< (ostream &os, const ProbFormula& f) +{ + os << f.functor_; + if (f.isAtom() == false) { + os << "(" ; + for (size_t i = 0; i < f.logVars_.size(); i++) { + if (i != 0) os << ","; + if (f.isCounting() && f.logVars_[i] == f.countedLogVar_) { + os << "#" ; + } + os << f.logVars_[i]; + } + os << ")" ; + } + os << "::" << f.range_; + return os; +} + + + +PrvGroup +ProbFormula::getNewGroup (void) +{ + freeGroup_ ++; + assert (freeGroup_ != numeric_limits::max()); + return freeGroup_; +} + + + +ostream& operator<< (ostream &os, const ObservedFormula& of) +{ + os << of.functor_ << "/" << of.arity_; + os << "|" << of.constr_.tupleSet(); + os << " [evidence=" << of.evidence_ << "]"; + return os; +} + diff --git a/packages/CLPBN/horus2/ProbFormula.h b/packages/CLPBN/horus2/ProbFormula.h new file mode 100644 index 000000000..63086266a --- /dev/null +++ b/packages/CLPBN/horus2/ProbFormula.h @@ -0,0 +1,114 @@ +#ifndef HORUS_PROBFORMULA_H +#define HORUS_PROBFORMULA_H + +#include + +#include "ConstraintTree.h" +#include "LiftedUtils.h" +#include "Horus.h" + +typedef unsigned long PrvGroup; + +class ProbFormula +{ + public: + ProbFormula (Symbol f, const LogVars& lvs, unsigned range) + : functor_(f), logVars_(lvs), range_(range), + countedLogVar_(), group_(numeric_limits::max()) { } + + ProbFormula (Symbol f, unsigned r) + : functor_(f), range_(r), group_(numeric_limits::max()) { } + + Symbol functor (void) const { return functor_; } + + unsigned arity (void) const { return logVars_.size(); } + + unsigned range (void) const { return range_; } + + LogVars& logVars (void) { return logVars_; } + + const LogVars& logVars (void) const { return logVars_; } + + LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } + + PrvGroup group (void) const { return group_; } + + void setGroup (PrvGroup g) { group_ = g; } + + bool sameSkeletonAs (const ProbFormula&) const; + + bool contains (LogVar) const; + + bool contains (LogVarSet) const; + + size_t indexOf (LogVar) const; + + bool isAtom (void) const; + + bool isCounting (void) const; + + LogVar countedLogVar (void) const; + + void setCountedLogVar (LogVar); + + void clearCountedLogVar (void); + + void rename (LogVar, LogVar); + + static PrvGroup getNewGroup (void); + + friend std::ostream& operator<< (ostream &os, const ProbFormula& f); + + friend bool operator== (const ProbFormula& f1, const ProbFormula& f2); + + private: + Symbol functor_; + LogVars logVars_; + unsigned range_; + LogVar countedLogVar_; + PrvGroup group_; + static PrvGroup freeGroup_; +}; + +typedef vector ProbFormulas; + + +class ObservedFormula +{ + public: + ObservedFormula (Symbol f, unsigned a, unsigned ev) + : functor_(f), arity_(a), evidence_(ev), constr_(a) { } + + ObservedFormula (Symbol f, unsigned ev, const Tuple& tuple) + : functor_(f), arity_(tuple.size()), evidence_(ev), constr_(arity_) + { + constr_.addTuple (tuple); + } + + Symbol functor (void) const { return functor_; } + + unsigned arity (void) const { return arity_; } + + unsigned evidence (void) const { return evidence_; } + + void setEvidence (unsigned ev) { evidence_ = ev; } + + ConstraintTree& constr (void) { return constr_; } + + bool isAtom (void) const { return arity_ == 0; } + + void addTuple (const Tuple& tuple) { constr_.addTuple (tuple); } + + friend ostream& operator<< (ostream &os, const ObservedFormula& of); + + private: + Symbol functor_; + unsigned arity_; + unsigned evidence_; + ConstraintTree constr_; +}; + +typedef vector ObservedFormulas; + +#endif // HORUS_PROBFORMULA_H + diff --git a/packages/CLPBN/horus2/TinySet.h b/packages/CLPBN/horus2/TinySet.h new file mode 100644 index 000000000..4b3c4bd83 --- /dev/null +++ b/packages/CLPBN/horus2/TinySet.h @@ -0,0 +1,264 @@ +#ifndef HORUS_TINYSET_H +#define HORUS_TINYSET_H + +#include +#include + +using namespace std; + + +template > +class TinySet +{ + public: + + typedef typename vector::iterator iterator; + typedef typename vector::const_iterator const_iterator; + + TinySet (const TinySet& s) + : vec_(s.vec_), cmp_(s.cmp_) { } + + TinySet (const Compare& cmp = Compare()) + : vec_(), cmp_(cmp) { } + + TinySet (const T& t, const Compare& cmp = Compare()) + : vec_(1, t), cmp_(cmp) { } + + TinySet (const vector& elements, const Compare& cmp = Compare()) + : vec_(elements), cmp_(cmp) + { + std::sort (begin(), end(), cmp_); + iterator it = unique_cmp (begin(), end()); + vec_.resize (it - begin()); + } + + iterator insert (const T& t) + { + iterator it = std::lower_bound (begin(), end(), t, cmp_); + if (it == end() || cmp_(t, *it)) { + vec_.insert (it, t); + } + return it; + } + + void insert_sorted (const T& t) + { + vec_.push_back (t); + assert (consistent()); + } + + void remove (const T& t) + { + iterator it = std::lower_bound (begin(), end(), t, cmp_); + if (it != end()) { + vec_.erase (it); + } + } + + const_iterator find (const T& t) const + { + const_iterator it = std::lower_bound (begin(), end(), t, cmp_); + return it == end() || cmp_(t, *it) ? end() : it; + } + + iterator find (const T& t) + { + iterator it = std::lower_bound (begin(), end(), t, cmp_); + return it == end() || cmp_(t, *it) ? end() : it; + } + + /* set union */ + TinySet operator| (const TinySet& s) const + { + TinySet res; + std::set_union ( + vec_.begin(), vec_.end(), + s.vec_.begin(), s.vec_.end(), + std::back_inserter (res.vec_), + cmp_); + return res; + } + + /* set intersection */ + TinySet operator& (const TinySet& s) const + { + TinySet res; + std::set_intersection ( + vec_.begin(), vec_.end(), + s.vec_.begin(), s.vec_.end(), + std::back_inserter (res.vec_), + cmp_); + return res; + } + + /* set difference */ + TinySet operator- (const TinySet& s) const + { + TinySet res; + std::set_difference ( + vec_.begin(), vec_.end(), + s.vec_.begin(), s.vec_.end(), + std::back_inserter (res.vec_), + cmp_); + return res; + } + + TinySet& operator|= (const TinySet& s) + { + return *this = (*this | s); + } + + TinySet& operator&= (const TinySet& s) + { + return *this = (*this & s); + } + + TinySet& operator-= (const TinySet& s) + { + return *this = (*this - s); + } + + bool contains (const T& t) const + { + return std::binary_search ( + vec_.begin(), vec_.end(), t, cmp_); + } + + bool contains (const TinySet& s) const + { + return std::includes ( + vec_.begin(), + vec_.end(), + s.vec_.begin(), + s.vec_.end(), + cmp_); + } + + bool in (const TinySet& s) const + { + return std::includes ( + s.vec_.begin(), + s.vec_.end(), + vec_.begin(), + vec_.end(), + cmp_); + } + + bool intersects (const TinySet& s) const + { + return (*this & s).size() > 0; + } + + const T& operator[] (typename vector::size_type i) const + { + return vec_[i]; + } + + T& operator[] (typename vector::size_type i) + { + return vec_[i]; + } + + T front (void) const + { + return vec_.front(); + } + + T& front (void) + { + return vec_.front(); + } + + T back (void) const + { + return vec_.back(); + } + + T& back (void) + { + return vec_.back(); + } + + const vector& elements (void) const + { + return vec_; + } + + bool empty (void) const + { + return size() == 0; + } + + typename vector::size_type size (void) const + { + return vec_.size(); + } + + void clear (void) + { + vec_.clear(); + } + + void reserve (typename vector::size_type size) + { + vec_.reserve (size); + } + + iterator begin (void) { return vec_.begin(); } + iterator end (void) { return vec_.end(); } + const_iterator begin (void) const { return vec_.begin(); } + const_iterator end (void) const { return vec_.end(); } + + friend bool operator== (const TinySet& s1, const TinySet& s2) + { + return s1.vec_ == s2.vec_; + } + + friend bool operator!= (const TinySet& s1, const TinySet& s2) + { + return ! (s1.vec_ == s2.vec_); + } + + friend std::ostream& operator << (std::ostream& out, const TinySet& s) + { + out << "{" ; + typename vector::size_type i; + for (i = 0; i < s.size(); i++) { + out << ((i != 0) ? "," : "") << s.vec_[i]; + } + out << "}" ; + return out; + } + + private: + iterator unique_cmp (iterator first, iterator last) + { + if (first == last) { + return last; + } + iterator result = first; + while (++first != last) { + if (cmp_(*result, *first)) { + *(++result) = *first; + } + } + return ++result; + } + + bool consistent (void) const + { + typename vector::size_type i; + for (i = 0; i < vec_.size() - 1; i++) { + if ( ! cmp_(vec_[i], vec_[i + 1])) { + return false; + } + } + return true; + } + + vector vec_; + Compare cmp_; +}; + +#endif // HORUS_TINYSET_H + diff --git a/packages/CLPBN/horus2/Util.cpp b/packages/CLPBN/horus2/Util.cpp new file mode 100644 index 000000000..0f3ce6544 --- /dev/null +++ b/packages/CLPBN/horus2/Util.cpp @@ -0,0 +1,429 @@ +#include + +#include +#include + +#include "Util.h" +#include "Indexer.h" +#include "ElimGraph.h" + + +namespace Globals { +bool logDomain = false; + +unsigned verbosity = 0; + +LiftedSolverType liftedSolver = LiftedSolverType::LVE; + +GroundSolverType groundSolver = GroundSolverType::VE; + +}; + + + +namespace BpOptions { +Schedule schedule = BpOptions::Schedule::SEQ_FIXED; +//Schedule schedule = BpOptions::Schedule::SEQ_RANDOM; +//Schedule schedule = BpOptions::Schedule::PARALLEL; +//Schedule schedule = BpOptions::Schedule::MAX_RESIDUAL; +double accuracy = 0.0001; +unsigned maxIter = 1000; +} + + + +namespace Util { + + +template <> std::string +toString (const bool& b) +{ + std::stringstream ss; + ss << std::boolalpha << b; + return ss.str(); +} + + + +unsigned +stringToUnsigned (string str) +{ + int val; + stringstream ss; + ss << str; + ss >> val; + if (val < 0) { + cerr << "Error: the number readed is negative." << endl; + exit (EXIT_FAILURE); + } + return static_cast (val); +} + + + +double +stringToDouble (string str) +{ + double val; + stringstream ss; + ss << str; + ss >> val; + return val; +} + + + +double +factorial (unsigned num) +{ + double result = 1.0; + for (unsigned i = 1; i <= num; i++) { + result *= i; + } + return result; +} + + + +double +logFactorial (unsigned num) +{ + double result = 0.0; + if (num < 150) { + result = std::log (factorial (num)); + } else { + for (unsigned i = 1; i <= num; i++) { + result += std::log (i); + } + } + return result; +} + + + +unsigned +nrCombinations (unsigned n, unsigned k) +{ + assert (n >= k); + int diff = n - k; + unsigned result = 0; + if (n < 150) { + unsigned prod = 1; + for (int i = n; i > diff; i--) { + prod *= i; + } + result = prod / factorial (k); + } else { + double prod = 0.0; + for (int i = n; i > diff; i--) { + prod += std::log (i); + } + prod -= logFactorial (k); + result = static_cast (std::exp (prod)); + } + return result; +} + + + +size_t +sizeExpected (const Ranges& ranges) +{ + return std::accumulate (ranges.begin(), + ranges.end(), 1, multiplies()); +} + + + +unsigned +nrDigits (int num) +{ + unsigned count = 1; + while (num >= 10) { + num /= 10; + count ++; + } + return count; +} + + + +bool +isInteger (const string& s) +{ + stringstream ss1 (s); + stringstream ss2; + int integer; + ss1 >> integer; + ss2 << integer; + return (ss1.str() == ss2.str()); +} + + + +string +parametersToString (const Params& v, unsigned precision) +{ + stringstream ss; + ss.precision (precision); + ss << "[" ; + for (size_t i = 0; i < v.size(); i++) { + if (i != 0) ss << ", " ; + ss << v[i]; + } + ss << "]" ; + return ss.str(); +} + + + +vector +getStateLines (const Vars& vars) +{ + Ranges ranges; + for (size_t i = 0; i < vars.size(); i++) { + ranges.push_back (vars[i]->range()); + } + Indexer indexer (ranges); + vector jointStrings; + while (indexer.valid()) { + stringstream ss; + for (size_t i = 0; i < vars.size(); i++) { + if (i != 0) ss << ", " ; + ss << vars[i]->label() << "=" ; + ss << vars[i]->states()[(indexer[i])]; + } + jointStrings.push_back (ss.str()); + ++ indexer; + } + return jointStrings; +} + + + +bool +setHorusFlag (string key, string value) +{ + bool returnVal = true; + if (key == "verbosity") { + stringstream ss; + ss << value; + ss >> Globals::verbosity; + } else if (key == "lifted_solver") { + if ( value == "lve") { + Globals::liftedSolver = LiftedSolverType::LVE; + } else if (value == "lbp") { + Globals::liftedSolver = LiftedSolverType::LBP; + } else if (value == "lkc") { + Globals::liftedSolver = LiftedSolverType::LKC; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "ground_solver") { + if ( value == "ve") { + Globals::groundSolver = GroundSolverType::VE; + } else if (value == "bp") { + Globals::groundSolver = GroundSolverType::BP; + } else if (value == "cbp") { + Globals::groundSolver = GroundSolverType::CBP; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "elim_heuristic") { + if ( value == "sequential") { + ElimGraph::elimHeuristic = ElimHeuristic::SEQUENTIAL; + } else if (value == "min_neighbors") { + ElimGraph::elimHeuristic = ElimHeuristic::MIN_NEIGHBORS; + } else if (value == "min_weight") { + ElimGraph::elimHeuristic = ElimHeuristic::MIN_WEIGHT; + } else if (value == "min_fill") { + ElimGraph::elimHeuristic = ElimHeuristic::MIN_FILL; + } else if (value == "weighted_min_fill") { + ElimGraph::elimHeuristic = ElimHeuristic::WEIGHTED_MIN_FILL; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "schedule") { + if ( value == "seq_fixed") { + BpOptions::schedule = BpOptions::Schedule::SEQ_FIXED; + } else if (value == "seq_random") { + BpOptions::schedule = BpOptions::Schedule::SEQ_RANDOM; + } else if (value == "parallel") { + BpOptions::schedule = BpOptions::Schedule::PARALLEL; + } else if (value == "max_residual") { + BpOptions::schedule = BpOptions::Schedule::MAX_RESIDUAL; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "accuracy") { + stringstream ss; + ss << value; + ss >> BpOptions::accuracy; + } else if (key == "max_iter") { + stringstream ss; + ss << value; + ss >> BpOptions::maxIter; + } else if (key == "use_logarithms") { + if ( value == "true") { + Globals::logDomain = true; + } else if (value == "false") { + Globals::logDomain = false; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else { + cerr << "warning: invalid key `" << key << "'" << endl; + returnVal = false; + } + return returnVal; +} + + + +void +printHeader (string header, std::ostream& os) +{ + printAsteriskLine (os); + os << header << endl; + printAsteriskLine (os); +} + + + +void +printSubHeader (string header, std::ostream& os) +{ + printDashedLine (os); + os << header << endl; + printDashedLine (os); +} + + + +void +printAsteriskLine (std::ostream& os) +{ + os << "********************************" ; + os << "********************************" ; + os << endl; +} + + + +void +printDashedLine (std::ostream& os) +{ + os << "--------------------------------" ; + os << "--------------------------------" ; + os << endl; +} + + +} + + + +namespace LogAware { + +void +normalize (Params& v) +{ + if (Globals::logDomain) { + double sum = std::accumulate (v.begin(), v.end(), + LogAware::addIdenty(), Util::logSum); + assert (sum != -numeric_limits::infinity()); + v -= sum; + } else { + double sum = std::accumulate (v.begin(), v.end(), 0.0); + assert (sum != 0.0); + v /= sum; + } +} + + + +double +getL1Distance (const Params& v1, const Params& v2) +{ + assert (v1.size() == v2.size()); + double dist = 0.0; + if (Globals::logDomain) { + dist = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, + std::plus(), FuncObject::abs_diff_exp()); + } else { + dist = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, + std::plus(), FuncObject::abs_diff()); + } + return dist; +} + + + +double +getMaxNorm (const Params& v1, const Params& v2) +{ + assert (v1.size() == v2.size()); + double max = 0.0; + if (Globals::logDomain) { + max = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, + FuncObject::max(), FuncObject::abs_diff_exp()); + } else { + max = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, + FuncObject::max(), FuncObject::abs_diff()); + } + return max; +} + + + +double +pow (double base, unsigned iexp) +{ + return Globals::logDomain + ? base * iexp + : std::pow (base, iexp); +} + + + +double +pow (double base, double exp) +{ + // `expoent' should not be in log domain + return Globals::logDomain + ? base * exp + : std::pow (base, exp); +} + + + +void +pow (Params& v, unsigned iexp) +{ + if (iexp == 1) { + return; + } + Globals::logDomain ? v *= iexp : v ^= (int)iexp; +} + + + +void +pow (Params& v, double exp) +{ + // `expoent' should not be in log domain + Globals::logDomain ? v *= exp : v ^= exp; +} + +} + diff --git a/packages/CLPBN/horus2/Util.h b/packages/CLPBN/horus2/Util.h new file mode 100644 index 000000000..38a088714 --- /dev/null +++ b/packages/CLPBN/horus2/Util.h @@ -0,0 +1,422 @@ +#ifndef HORUS_UTIL_H +#define HORUS_UTIL_H + +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include + +#include "Horus.h" + +using namespace std; + + +namespace { +const double NEG_INF = -numeric_limits::infinity(); +}; + + +namespace Util { + +template void addToVector (vector&, const vector&); + +template void addToSet (set&, const vector&); + +template void addToQueue (queue&, const vector&); + +template bool contains (const vector&, const T&); + +template bool contains (const set&, const T&); + +template bool contains ( + const unordered_map&, const K&); + +template size_t indexOf (const vector&, const T&); + +template +void apply_n_times (Params& v1, const Params& v2, unsigned repetitions, Operation); + +template void log (vector&); + +template void exp (vector&); + +template string elementsToString ( + const vector& v, string sep = " "); + +template std::string toString (const T&); + +template <> std::string toString (const bool&); + +double logSum (double, double); + +unsigned maxUnsigned (void); + +unsigned stringToUnsigned (string); + +double stringToDouble (string); + +double factorial (unsigned); + +double logFactorial (unsigned); + +unsigned nrCombinations (unsigned, unsigned); + +size_t sizeExpected (const Ranges&); + +unsigned nrDigits (int); + +bool isInteger (const string&); + +string parametersToString (const Params&, unsigned = Constants::PRECISION); + +vector getStateLines (const Vars&); + +bool setHorusFlag (string key, string value); + +void printHeader (string, std::ostream& os = std::cout); + +void printSubHeader (string, std::ostream& os = std::cout); + +void printAsteriskLine (std::ostream& os = std::cout); + +void printDashedLine (std::ostream& os = std::cout); + +}; + + + +template void +Util::addToVector (vector& v, const vector& elements) +{ + v.insert (v.end(), elements.begin(), elements.end()); +} + + + +template void +Util::addToSet (set& s, const vector& elements) +{ + s.insert (elements.begin(), elements.end()); +} + + + +template void +Util::addToQueue (queue& q, const vector& elements) +{ + for (size_t i = 0; i < elements.size(); i++) { + q.push (elements[i]); + } +} + + + +template bool +Util::contains (const vector& v, const T& e) +{ + return std::find (v.begin(), v.end(), e) != v.end(); +} + + + +template bool +Util::contains (const set& s, const T& e) +{ + return s.find (e) != s.end(); +} + + + +template bool +Util::contains (const unordered_map& m, const K& k) +{ + return m.find (k) != m.end(); +} + + + +template size_t +Util::indexOf (const vector& v, const T& e) +{ + return std::distance (v.begin(), + std::find (v.begin(), v.end(), e)); +} + + + +template void +Util::apply_n_times (Params& v1, const Params& v2, unsigned repetitions, + Operation unary_op) +{ + Params::iterator first = v1.begin(); + Params::const_iterator last = v1.end(); + Params::const_iterator first2 = v2.begin(); + Params::const_iterator last2 = v2.end(); + while (first != last) { + for (first2 = v2.begin(); first2 != last2; ++first2) { + std::transform (first, first + repetitions, first, + std::bind1st (unary_op, *first2)); + first += repetitions; + } + } +} + + + +template void +Util::log (vector& v) +{ + std::transform (v.begin(), v.end(), v.begin(), ::log); +} + + + +template void +Util::exp (vector& v) +{ + std::transform (v.begin(), v.end(), v.begin(), ::exp); +} + + + +template string +Util::elementsToString (const vector& v, string sep) +{ + stringstream ss; + for (size_t i = 0; i < v.size(); i++) { + ss << ((i != 0) ? sep : "") << v[i]; + } + return ss.str(); +} + + + +template std::string +Util::toString (const T& t) +{ + std::stringstream ss; + ss << t; + return ss.str(); +} + + + +inline double +Util::logSum (double x, double y) +{ + // std::log (std::exp (x) + std::exp (y)) can overflow! + assert (std::isnan (x) == false); + assert (std::isnan (y) == false); + if (x == NEG_INF) { + return y; + } + if (y == NEG_INF) { + return x; + } + // if one value is much smaller than the other, + // keep the larger value + const double tol = 460.517; // log (1e200) + if (x < y - tol) { + return y; + } + if (y < x - tol) { + return x; + } + assert (std::isnan (x - y) == false); + const double exp_diff = std::exp (x - y); + if (std::isfinite (exp_diff) == false) { + // difference is too large + return x > y ? x : y; + } + // otherwise return the sum + return y + std::log (static_cast(1.0) + exp_diff); +} + + + +inline unsigned +Util::maxUnsigned (void) +{ + return numeric_limits::max(); +} + + + +namespace LogAware { + +inline double one() { return Globals::logDomain ? 0.0 : 1.0; } +inline double zero() { return Globals::logDomain ? NEG_INF : 0.0; } +inline double addIdenty() { return Globals::logDomain ? NEG_INF : 0.0; } +inline double multIdenty() { return Globals::logDomain ? 0.0 : 1.0; } +inline double withEvidence() { return Globals::logDomain ? 0.0 : 1.0; } +inline double noEvidence() { return Globals::logDomain ? NEG_INF : 0.0; } +inline double log (double v) { return Globals::logDomain ? ::log (v) : v; } +inline double exp (double v) { return Globals::logDomain ? ::exp (v) : v; } + +void normalize (Params&); + +double getL1Distance (const Params&, const Params&); + +double getMaxNorm (const Params&, const Params&); + +double pow (double, unsigned); + +double pow (double, double); + +void pow (Params&, unsigned); + +void pow (Params&, double); + +}; + + + +template +void operator+=(std::vector& v, double val) +{ + std::transform (v.begin(), v.end(), v.begin(), + std::bind2nd (plus(), val)); +} + + + +template +void operator-=(std::vector& v, double val) +{ + std::transform (v.begin(), v.end(), v.begin(), + std::bind2nd (minus(), val)); +} + + + +template +void operator*=(std::vector& v, double val) +{ + std::transform (v.begin(), v.end(), v.begin(), + std::bind2nd (multiplies(), val)); +} + + + +template +void operator/=(std::vector& v, double val) +{ + std::transform (v.begin(), v.end(), v.begin(), + std::bind2nd (divides(), val)); +} + + + +template +void operator+=(std::vector& a, const std::vector& b) +{ + assert (a.size() == b.size()); + std::transform (a.begin(), a.end(), b.begin(), a.begin(), + plus()); +} + + + +template +void operator-=(std::vector& a, const std::vector& b) +{ + assert (a.size() == b.size()); + std::transform (a.begin(), a.end(), b.begin(), a.begin(), + minus()); +} + + + +template +void operator*=(std::vector& a, const std::vector& b) +{ + assert (a.size() == b.size()); + std::transform (a.begin(), a.end(), b.begin(), a.begin(), + multiplies()); +} + + + +template +void operator/=(std::vector& a, const std::vector& b) +{ + assert (a.size() == b.size()); + std::transform (a.begin(), a.end(), b.begin(), a.begin(), + divides()); +} + + + +template +void operator^=(std::vector& v, double exp) +{ + std::transform (v.begin(), v.end(), v.begin(), + std::bind2nd (ptr_fun (std::pow), exp)); +} + + + +template +void operator^=(std::vector& v, int iexp) +{ + std::transform (v.begin(), v.end(), v.begin(), + std::bind2nd (ptr_fun (std::pow), iexp)); +} + + + +template +std::ostream& operator << (std::ostream& os, const vector& v) +{ + os << "[" ; + os << Util::elementsToString (v, ", "); + os << "]" ; + return os; +} + + +namespace FuncObject { + +template +struct max : public std::binary_function +{ + T operator() (const T& x, const T& y) const + { + return x < y ? y : x; + } +}; + + + +template +struct abs_diff : public std::binary_function +{ + T operator() (const T& x, const T& y) const + { + return std::abs (x - y); + } +}; + + + +template +struct abs_diff_exp : public std::binary_function +{ + T operator() (const T& x, const T& y) const + { + return std::abs (std::exp (x) - std::exp (y)); + } +}; + +} + +#endif // HORUS_UTIL_H + diff --git a/packages/CLPBN/horus2/Var.cpp b/packages/CLPBN/horus2/Var.cpp new file mode 100644 index 000000000..44ab6b1e4 --- /dev/null +++ b/packages/CLPBN/horus2/Var.cpp @@ -0,0 +1,102 @@ +#include +#include + +#include "Var.h" + +using namespace std; + + +unordered_map Var::varsInfo_; + + +Var::Var (const Var* v) +{ + varId_ = v->varId(); + range_ = v->range(); + evidence_ = v->getEvidence(); + index_ = std::numeric_limits::max(); +} + + + +Var::Var (VarId varId, unsigned range, int evidence) +{ + assert (range != 0); + assert (evidence < (int) range); + varId_ = varId; + range_ = range; + evidence_ = evidence; + index_ = std::numeric_limits::max(); +} + + + +bool +Var::isValidState (int stateIndex) +{ + return stateIndex >= 0 && stateIndex < (int) range_; +} + + + +bool +Var::isValidState (const string& stateName) +{ + States states = Var::getVarInfo (varId_).states; + return Util::contains (states, stateName); +} + + + +void +Var::setEvidence (int ev) +{ + assert (ev < (int) range_); + evidence_ = ev; +} + + + +void +Var::setEvidence (const string& ev) +{ + States states = Var::getVarInfo (varId_).states; + for (size_t i = 0; i < states.size(); i++) { + if (states[i] == ev) { + evidence_ = i; + return; + } + } + assert (false); +} + + + +string +Var::label (void) const +{ + if (Var::varsHaveInfo()) { + return Var::getVarInfo (varId_).label; + } + stringstream ss; + ss << "x" << varId_; + return ss.str(); +} + + + +States +Var::states (void) const +{ + if (Var::varsHaveInfo()) { + return Var::getVarInfo (varId_).states; + } + States states; + for (unsigned i = 0; i < range_; i++) { + stringstream ss; + ss << i ; + states.push_back (ss.str()); + } + return states; +} + diff --git a/packages/CLPBN/horus2/Var.h b/packages/CLPBN/horus2/Var.h new file mode 100644 index 000000000..8ab580c3a --- /dev/null +++ b/packages/CLPBN/horus2/Var.h @@ -0,0 +1,108 @@ +#ifndef HORUS_VAR_H +#define HORUS_VAR_H + +#include + +#include + +#include "Util.h" +#include "Horus.h" + + +using namespace std; + + +struct VarInfo +{ + VarInfo (string l, const States& sts) : label(l), states(sts) { } + string label; + States states; +}; + + + +class Var +{ + public: + Var (const Var*); + + Var (VarId, unsigned, int = Constants::NO_EVIDENCE); + + virtual ~Var (void) { }; + + VarId varId (void) const { return varId_; } + + unsigned range (void) const { return range_; } + + int getEvidence (void) const { return evidence_; } + + size_t getIndex (void) const { return index_; } + + void setIndex (size_t idx) { index_ = idx; } + + bool hasEvidence (void) const + { + return evidence_ != Constants::NO_EVIDENCE; + } + + operator size_t (void) const { return index_; } + + bool operator== (const Var& var) const + { + assert (!(varId_ == var.varId() && range_ != var.range())); + return varId_ == var.varId(); + } + + bool operator!= (const Var& var) const + { + assert (!(varId_ == var.varId() && range_ != var.range())); + return varId_ != var.varId(); + } + + bool isValidState (int); + + bool isValidState (const string&); + + void setEvidence (int); + + void setEvidence (const string&); + + string label (void) const; + + States states (void) const; + + static void addVarInfo ( + VarId vid, string label, const States& states) + { + assert (Util::contains (varsInfo_, vid) == false); + varsInfo_.insert (make_pair (vid, VarInfo (label, states))); + } + + static VarInfo getVarInfo (VarId vid) + { + assert (Util::contains (varsInfo_, vid)); + return varsInfo_.find (vid)->second; + } + + static bool varsHaveInfo (void) + { + return varsInfo_.size() != 0; + } + + static void clearVarsInfo (void) + { + varsInfo_.clear(); + } + + private: + VarId varId_; + unsigned range_; + int evidence_; + size_t index_; + + static unordered_map varsInfo_; + +}; + +#endif // HORUS_VAR_H + diff --git a/packages/CLPBN/horus2/VarElim.cpp b/packages/CLPBN/horus2/VarElim.cpp new file mode 100644 index 000000000..fb4eecf50 --- /dev/null +++ b/packages/CLPBN/horus2/VarElim.cpp @@ -0,0 +1,217 @@ +#include + +#include "VarElim.h" +#include "ElimGraph.h" +#include "Factor.h" +#include "Util.h" + + +VarElim::~VarElim (void) +{ + delete factorList_.back(); +} + + + +Params +VarElim::solveQuery (VarIds queryVids) +{ + if (Globals::verbosity > 1) { + cout << "Solving query on " ; + for (size_t i = 0; i < queryVids.size(); i++) { + if (i != 0) cout << ", " ; + cout << fg.getVarNode (queryVids[i])->label(); + } + cout << endl; + } + factorList_.clear(); + varFactors_.clear(); + elimOrder_.clear(); + createFactorList(); + absorveEvidence(); + findEliminationOrder (queryVids); + processFactorList (queryVids); + Params params = factorList_.back()->params(); + if (Globals::logDomain) { + Util::exp (params); + } + return params; +} + + + +void +VarElim::printSolverFlags (void) const +{ + stringstream ss; + ss << "variable elimination [" ; + ss << "elim_heuristic=" ; + ElimHeuristic eh = ElimGraph::elimHeuristic; + switch (eh) { + case SEQUENTIAL: ss << "sequential"; break; + case MIN_NEIGHBORS: ss << "min_neighbors"; break; + case MIN_WEIGHT: ss << "min_weight"; break; + case MIN_FILL: ss << "min_fill"; break; + case WEIGHTED_MIN_FILL: ss << "weighted_min_fill"; break; + } + ss << ",log_domain=" << Util::toString (Globals::logDomain); + ss << "]" ; + cout << ss.str() << endl; +} + + + +void +VarElim::createFactorList (void) +{ + const FacNodes& facNodes = fg.facNodes(); + factorList_.reserve (facNodes.size() * 2); + for (size_t i = 0; i < facNodes.size(); i++) { + factorList_.push_back (new Factor (facNodes[i]->factor())); + const VarNodes& neighs = facNodes[i]->neighbors(); + for (size_t j = 0; j < neighs.size(); j++) { + unordered_map>::iterator it + = varFactors_.find (neighs[j]->varId()); + if (it == varFactors_.end()) { + it = varFactors_.insert (make_pair ( + neighs[j]->varId(), vector())).first; + } + it->second.push_back (i); + } + } +} + + + +void +VarElim::absorveEvidence (void) +{ + if (Globals::verbosity > 2) { + Util::printDashedLine(); + cout << "(initial factor list)" << endl; + printActiveFactors(); + } + const VarNodes& varNodes = fg.varNodes(); + for (size_t i = 0; i < varNodes.size(); i++) { + if (varNodes[i]->hasEvidence()) { + if (Globals::verbosity > 1) { + cout << "-> aborving evidence on "; + cout << varNodes[i]->label() << " = " ; + cout << varNodes[i]->getEvidence() << endl; + } + const vector& idxs = + varFactors_.find (varNodes[i]->varId())->second; + for (size_t j = 0; j < idxs.size(); j++) { + Factor* factor = factorList_[idxs[j]]; + if (factor->nrArguments() == 1) { + factorList_[idxs[j]] = 0; + } else { + factorList_[idxs[j]]->absorveEvidence ( + varNodes[i]->varId(), varNodes[i]->getEvidence()); + } + } + } + } +} + + + +void +VarElim::findEliminationOrder (const VarIds& vids) +{ + elimOrder_ = ElimGraph::getEliminationOrder (factorList_, vids); +} + + + +void +VarElim::processFactorList (const VarIds& vids) +{ + totalFactorSize_ = 0; + largestFactorSize_ = 0; + for (size_t i = 0; i < elimOrder_.size(); i++) { + if (Globals::verbosity >= 2) { + if (Globals::verbosity >= 3) { + Util::printDashedLine(); + printActiveFactors(); + } + cout << "-> summing out " ; + cout << fg.getVarNode (elimOrder_[i])->label() << endl; + } + eliminate (elimOrder_[i]); + } + + Factor* finalFactor = new Factor(); + for (size_t i = 0; i < factorList_.size(); i++) { + if (factorList_[i]) { + finalFactor->multiply (*factorList_[i]); + delete factorList_[i]; + factorList_[i] = 0; + } + } + + VarIds unobservedVids; + for (size_t i = 0; i < vids.size(); i++) { + if (fg.getVarNode (vids[i])->hasEvidence() == false) { + unobservedVids.push_back (vids[i]); + } + } + + finalFactor->reorderArguments (unobservedVids); + finalFactor->normalize(); + factorList_.push_back (finalFactor); + if (Globals::verbosity > 0) { + cout << "total factor size: " << totalFactorSize_ << endl; + cout << "largest factor size: " << largestFactorSize_ << endl; + cout << endl; + } +} + + + +void +VarElim::eliminate (VarId elimVar) +{ + Factor* result = 0; + vector& idxs = varFactors_.find (elimVar)->second; + for (size_t i = 0; i < idxs.size(); i++) { + size_t idx = idxs[i]; + if (factorList_[idx]) { + if (result == 0) { + result = new Factor (*factorList_[idx]); + } else { + result->multiply (*factorList_[idx]); + } + delete factorList_[idx]; + factorList_[idx] = 0; + } + } + totalFactorSize_ += result->size(); + if (result->size() > largestFactorSize_) { + largestFactorSize_ = result->size(); + } + if (result != 0 && result->nrArguments() != 1) { + result->sumOut (elimVar); + factorList_.push_back (result); + const VarIds& resultVarIds = result->arguments(); + for (size_t i = 0; i < resultVarIds.size(); i++) { + vector& idxs = + varFactors_.find (resultVarIds[i])->second; + idxs.push_back (factorList_.size() - 1); + } + } +} + + + +void +VarElim::printActiveFactors (void) +{ + for (size_t i = 0; i < factorList_.size(); i++) { + if (factorList_[i] != 0) { + cout << factorList_[i]->getLabel() << " " ; + cout << factorList_[i]->params() << endl; + } + } +} + diff --git a/packages/CLPBN/horus2/VarElim.h b/packages/CLPBN/horus2/VarElim.h new file mode 100644 index 000000000..fe1327fc0 --- /dev/null +++ b/packages/CLPBN/horus2/VarElim.h @@ -0,0 +1,46 @@ +#ifndef HORUS_VARELIM_H +#define HORUS_VARELIM_H + +#include "unordered_map" + +#include "GroundSolver.h" +#include "FactorGraph.h" +#include "Horus.h" + + +using namespace std; + + +class VarElim : public GroundSolver +{ + public: + VarElim (const FactorGraph& fg) : GroundSolver (fg) { } + + ~VarElim (void); + + Params solveQuery (VarIds); + + void printSolverFlags (void) const; + + private: + void createFactorList (void); + + void absorveEvidence (void); + + void findEliminationOrder (const VarIds&); + + void processFactorList (const VarIds&); + + void eliminate (VarId); + + void printActiveFactors (void); + + Factors factorList_; + VarIds elimOrder_; + unsigned largestFactorSize_; + unsigned totalFactorSize_; + unordered_map> varFactors_; +}; + +#endif // HORUS_VARELIM_H + diff --git a/packages/CLPBN/horus2/WeightedBp.cpp b/packages/CLPBN/horus2/WeightedBp.cpp new file mode 100644 index 000000000..d8a32a246 --- /dev/null +++ b/packages/CLPBN/horus2/WeightedBp.cpp @@ -0,0 +1,288 @@ +#include "WeightedBp.h" + + +WeightedBp::~WeightedBp (void) +{ + for (size_t i = 0; i < links_.size(); i++) { + delete links_[i]; + } + links_.clear(); +} + + + +Params +WeightedBp::getPosterioriOf (VarId vid) +{ + if (runned_ == false) { + runSolver(); + } + VarNode* var = fg.getVarNode (vid); + assert (var != 0); + Params probs; + if (var->hasEvidence()) { + probs.resize (var->range(), LogAware::noEvidence()); + probs[var->getEvidence()] = LogAware::withEvidence(); + } else { + probs.resize (var->range(), LogAware::multIdenty()); + const BpLinks& links = ninf(var)->getLinks(); + if (Globals::logDomain) { + for (size_t i = 0; i < links.size(); i++) { + WeightedLink* l = static_cast (links[i]); + probs += l->powMessage(); + } + LogAware::normalize (probs); + Util::exp (probs); + } else { + for (size_t i = 0; i < links.size(); i++) { + WeightedLink* l = static_cast (links[i]); + probs *= l->powMessage(); + } + LogAware::normalize (probs); + } + } + return probs; +} + + + +void +WeightedBp::createLinks (void) +{ + if (Globals::verbosity > 0) { + cout << "compressed factor graph contains " ; + cout << fg.nrVarNodes() << " variables and " ; + cout << fg.nrFacNodes() << " factors " << endl; + cout << endl; + } + const FacNodes& facNodes = fg.facNodes(); + for (size_t i = 0; i < facNodes.size(); i++) { + const VarNodes& neighs = facNodes[i]->neighbors(); + for (size_t j = 0; j < neighs.size(); j++) { + if (Globals::verbosity > 1) { + cout << "creating link " ; + cout << facNodes[i]->getLabel(); + cout << " -- " ; + cout << neighs[j]->label(); + cout << " idx=" << j << ", weight=" << weights_[i][j] << endl; + } + links_.push_back (new WeightedLink ( + facNodes[i], neighs[j], j, weights_[i][j])); + } + } + if (Globals::verbosity > 1) { + cout << endl; + } +} + + + +void +WeightedBp::maxResidualSchedule (void) +{ + if (nIters_ == 1) { + for (size_t i = 0; i < links_.size(); i++) { + calculateMessage (links_[i]); + SortedOrder::iterator it = sortedOrder_.insert (links_[i]); + linkMap_.insert (make_pair (links_[i], it)); + if (Globals::verbosity >= 1) { + cout << "calculating " << links_[i]->toString() << endl; + } + } + return; + } + + for (size_t c = 0; c < links_.size(); c++) { + if (Globals::verbosity > 1) { + cout << endl << "current residuals:" << endl; + for (SortedOrder::iterator it = sortedOrder_.begin(); + it != sortedOrder_.end(); ++it) { + cout << " " << setw (30) << left << (*it)->toString(); + cout << "residual = " << (*it)->residual() << endl; + } + } + + SortedOrder::iterator it = sortedOrder_.begin(); + BpLink* link = *it; + if (Globals::verbosity >= 1) { + cout << "updating " << (*sortedOrder_.begin())->toString() << endl; + } + if (link->residual() < BpOptions::accuracy) { + return; + } + link->updateMessage(); + link->clearResidual(); + sortedOrder_.erase (it); + linkMap_.find (link)->second = sortedOrder_.insert (link); + + // update the messages that depend on message source --> destin + const FacNodes& factorNeighbors = link->varNode()->neighbors(); + for (size_t i = 0; i < factorNeighbors.size(); i++) { + const BpLinks& links = ninf(factorNeighbors[i])->getLinks(); + for (size_t j = 0; j < links.size(); j++) { + if (links[j]->varNode() != link->varNode()) { + if (Globals::verbosity > 1) { + cout << " calculating " << links[j]->toString() << endl; + } + calculateMessage (links[j]); + BpLinkMap::iterator iter = linkMap_.find (links[j]); + sortedOrder_.erase (iter->second); + iter->second = sortedOrder_.insert (links[j]); + } + } + } + // in counting bp, the message that a variable X sends to + // to a factor F depends on the message that F sent to the X + const BpLinks& links = ninf(link->facNode())->getLinks(); + for (size_t i = 0; i < links.size(); i++) { + if (links[i]->varNode() != link->varNode()) { + if (Globals::verbosity > 1) { + cout << " calculating " << links[i]->toString() << endl; + } + calculateMessage (links[i]); + BpLinkMap::iterator iter = linkMap_.find (links[i]); + sortedOrder_.erase (iter->second); + iter->second = sortedOrder_.insert (links[i]); + } + } + } +} + + + +void +WeightedBp::calcFactorToVarMsg (BpLink* _link) +{ + WeightedLink* link = static_cast (_link); + FacNode* src = link->facNode(); + const VarNode* dst = link->varNode(); + const BpLinks& links = ninf(src)->getLinks(); + // calculate the product of messages that were sent + // to factor `src', except from var `dst' + unsigned reps = 1; + unsigned msgSize = Util::sizeExpected (src->factor().ranges()); + Params msgProduct (msgSize, LogAware::multIdenty()); + if (Globals::logDomain) { + for (size_t i = links.size(); i-- > 0; ) { + const WeightedLink* l = static_cast (links[i]); + if ( ! (l->varNode() == dst && l->index() == link->index())) { + if (Constants::SHOW_BP_CALCS) { + cout << " message from " << links[i]->varNode()->label(); + cout << ": " ; + } + Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), + reps, std::plus()); + if (Constants::SHOW_BP_CALCS) { + cout << endl; + } + } + reps *= links[i]->varNode()->range(); + } + } else { + for (size_t i = links.size(); i-- > 0; ) { + const WeightedLink* l = static_cast (links[i]); + if ( ! (l->varNode() == dst && l->index() == link->index())) { + if (Constants::SHOW_BP_CALCS) { + cout << " message from " << links[i]->varNode()->label(); + cout << ": " ; + } + Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), + reps, std::multiplies()); + if (Constants::SHOW_BP_CALCS) { + cout << endl; + } + } + reps *= links[i]->varNode()->range(); + } + } + Factor result (src->factor().arguments(), + src->factor().ranges(), msgProduct); + assert (msgProduct.size() == src->factor().size()); + if (Globals::logDomain) { + result.params() += src->factor().params(); + } else { + result.params() *= src->factor().params(); + } + if (Constants::SHOW_BP_CALCS) { + cout << " message product: " << msgProduct << endl; + cout << " original factor: " << src->factor().params() << endl; + cout << " factor product: " << result.params() << endl; + } + result.sumOutAllExceptIndex (link->index()); + if (Constants::SHOW_BP_CALCS) { + cout << " marginalized: " << result.params() << endl; + } + link->nextMessage() = result.params(); + LogAware::normalize (link->nextMessage()); + if (Constants::SHOW_BP_CALCS) { + cout << " curr msg: " << link->message() << endl; + cout << " next msg: " << link->nextMessage() << endl; + } +} + + + +Params +WeightedBp::getVarToFactorMsg (const BpLink* _link) const +{ + const WeightedLink* link = static_cast (_link); + const VarNode* src = link->varNode(); + const FacNode* dst = link->facNode(); + Params msg; + if (src->hasEvidence()) { + msg.resize (src->range(), LogAware::noEvidence()); + double value = link->message()[src->getEvidence()]; + if (Constants::SHOW_BP_CALCS) { + msg[src->getEvidence()] = value; + cout << msg << "^" << link->weight() << "-1" ; + } + msg[src->getEvidence()] = LogAware::pow (value, link->weight() - 1); + } else { + msg = link->message(); + if (Constants::SHOW_BP_CALCS) { + cout << msg << "^" << link->weight() << "-1" ; + } + LogAware::pow (msg, link->weight() - 1); + } + const BpLinks& links = ninf(src)->getLinks(); + if (Globals::logDomain) { + for (size_t i = 0; i < links.size(); i++) { + WeightedLink* l = static_cast (links[i]); + if ( ! (l->facNode() == dst && l->index() == link->index())) { + msg += l->powMessage(); + } + } + } else { + for (size_t i = 0; i < links.size(); i++) { + WeightedLink* l = static_cast (links[i]); + if ( ! (l->facNode() == dst && l->index() == link->index())) { + msg *= l->powMessage(); + if (Constants::SHOW_BP_CALCS) { + cout << " x " << l->nextMessage() << "^" << link->weight(); + } + } + } + } + if (Constants::SHOW_BP_CALCS) { + cout << " = " << msg; + } + return msg; +} + + + +void +WeightedBp::printLinkInformation (void) const +{ + for (size_t i = 0; i < links_.size(); i++) { + WeightedLink* l = static_cast (links_[i]); + cout << l->toString() << ":" << endl; + cout << " curr msg = " << l->message() << endl; + cout << " next msg = " << l->nextMessage() << endl; + cout << " pow msg = " << l->powMessage() << endl; + cout << " index = " << l->index() << endl; + cout << " weight = " << l->weight() << endl; + cout << " residual = " << l->residual() << endl; + } +} + diff --git a/packages/CLPBN/horus2/WeightedBp.h b/packages/CLPBN/horus2/WeightedBp.h new file mode 100644 index 000000000..7794fd509 --- /dev/null +++ b/packages/CLPBN/horus2/WeightedBp.h @@ -0,0 +1,61 @@ +#ifndef HORUS_WEIGHTEDBP_H +#define HORUS_WEIGHTEDBP_H + +#include "BeliefProp.h" + +class WeightedLink : public BpLink +{ + public: + WeightedLink (FacNode* fn, VarNode* vn, size_t idx, unsigned weight) + : BpLink (fn, vn), index_(idx), weight_(weight), + pwdMsg_(vn->range(), LogAware::one()) { } + + size_t index (void) const { return index_; } + + unsigned weight (void) const { return weight_; } + + const Params& powMessage (void) const { return pwdMsg_; } + + void updateMessage (void) + { + pwdMsg_ = *nextMsg_; + swap (currMsg_, nextMsg_); + LogAware::pow (pwdMsg_, weight_); + } + + private: + size_t index_; + unsigned weight_; + Params pwdMsg_; +}; + + + +class WeightedBp : public BeliefProp +{ + public: + WeightedBp (const FactorGraph& fg, + const vector>& weights) + : BeliefProp (fg), weights_(weights) { } + + ~WeightedBp (void); + + Params getPosterioriOf (VarId); + + private: + + void createLinks (void); + + void maxResidualSchedule (void); + + void calcFactorToVarMsg (BpLink*); + + Params getVarToFactorMsg (const BpLink*) const; + + void printLinkInformation (void) const; + + vector> weights_; +}; + +#endif // HORUS_WEIGHTEDBP_H + From 01d8f8e178cc28196dd2e39cdc13f8af8dc1f742 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 23:19:10 +0000 Subject: [PATCH 40/89] More whitespace cleanups --- packages/CLPBN/clpbn.yap | 26 +++++++------- packages/CLPBN/clpbn/aggregates.yap | 8 ++--- packages/CLPBN/clpbn/bdd.yap | 38 ++++++++++---------- packages/CLPBN/clpbn/bnt.yap | 8 ++--- packages/CLPBN/clpbn/discrete_utils.yap | 2 +- packages/CLPBN/clpbn/display.yap | 2 +- packages/CLPBN/clpbn/dists.yap | 2 +- packages/CLPBN/clpbn/evidence.yap | 6 ++-- packages/CLPBN/clpbn/gibbs.yap | 14 ++++---- packages/CLPBN/clpbn/graphviz.yap | 2 +- packages/CLPBN/clpbn/horus.yap | 2 +- packages/CLPBN/clpbn/horus_ground.yap | 2 +- packages/CLPBN/clpbn/jt.yap | 4 +-- packages/CLPBN/clpbn/matrix_cpt_utils.yap | 8 ++--- packages/CLPBN/clpbn/numbers.yap | 4 +-- packages/CLPBN/clpbn/pgrammar.yap | 2 +- packages/CLPBN/clpbn/table.yap | 10 +++--- packages/CLPBN/clpbn/utils.yap | 4 +-- packages/CLPBN/clpbn/ve.yap | 22 ++++++------ packages/CLPBN/clpbn/vmap.yap | 2 +- packages/CLPBN/examples/School/parlearn.yap | 2 +- packages/CLPBN/examples/School/parschema.pfl | 2 +- packages/CLPBN/examples/city.pfl | 8 ++--- packages/CLPBN/horus/BayesBall.h | 9 +++-- packages/CLPBN/horus/BayesBallGraph.h | 6 ++-- packages/CLPBN/horus/BeliefProp.cpp | 6 ++-- packages/CLPBN/horus/BeliefProp.h | 4 +-- packages/CLPBN/horus/ConstraintTree.cpp | 24 ++++++------- packages/CLPBN/horus/CountingBp.cpp | 5 ++- packages/CLPBN/horus/ElimGraph.cpp | 6 ++-- packages/CLPBN/horus/Factor.h | 4 +-- packages/CLPBN/horus/FactorGraph.cpp | 6 ++-- packages/CLPBN/horus/FactorGraph.h | 2 +- packages/CLPBN/horus/Histogram.cpp | 6 ++-- packages/CLPBN/horus/HorusCli.cpp | 2 +- packages/CLPBN/horus/HorusYap.cpp | 12 +++---- packages/CLPBN/horus/Indexer.h | 2 +- packages/CLPBN/horus/LiftedKc.cpp | 2 +- packages/CLPBN/horus/LiftedOperations.cpp | 4 +-- packages/CLPBN/horus/LiftedOperations.h | 2 +- packages/CLPBN/horus/LiftedUtils.cpp | 2 +- packages/CLPBN/horus/LiftedUtils.h | 4 +-- packages/CLPBN/horus/LiftedVe.cpp | 6 ++-- packages/CLPBN/horus/LiftedVe.h | 4 +-- packages/CLPBN/horus/LiftedWCNF.cpp | 12 +++---- packages/CLPBN/horus/Parfactor.cpp | 20 +++++------ packages/CLPBN/horus/Parfactor.h | 2 +- packages/CLPBN/horus/ParfactorList.cpp | 12 +++---- packages/CLPBN/horus/ProbFormula.cpp | 7 ++-- packages/CLPBN/horus/ProbFormula.h | 8 ++--- packages/CLPBN/horus/TinySet.h | 8 ++--- packages/CLPBN/horus/Util.cpp | 4 +-- packages/CLPBN/horus/Util.h | 4 +-- packages/CLPBN/horus/Var.cpp | 6 ++-- packages/CLPBN/horus/VarElim.cpp | 2 +- packages/CLPBN/horus/WeightedBp.cpp | 4 +-- packages/CLPBN/horus/WeightedBp.h | 4 +-- packages/CLPBN/learning/aleph_params.yap | 10 +++--- packages/CLPBN/learning/em.yap | 12 +++---- packages/CLPBN/learning/mle.yap | 2 +- packages/CLPBN/pfl.yap | 4 +-- 61 files changed, 209 insertions(+), 210 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 54a87a96a..605e9dd74 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -221,14 +221,14 @@ set_em_solver(Solver) :- % % we use a mutable variable to avoid unnecessary trailing. % -store_var(El) :- - nb_current(clpbn_qvars, Mutable), +store_var(El) :- + nb_current(clpbn_qvars, Mutable), nonvar(Mutable), !, get_mutable(Tail, Mutable), update_mutable(El.Tail, Mutable). -store_var(El) :- +store_var(El) :- init_clpbn_vars(El). - + init_clpbn_vars(El) :- create_mutable(El, Mutable), b_setval(clpbn_qvars, Mutable). @@ -278,7 +278,7 @@ project_attributes(GVars0, _AVars0) :- b_setval(clpbn_query_variables, f(GVars0,Evidence)), simplify_query(GVars0, GVars), ( - GKeys = [] + GKeys = [] -> GVars0 = [V|_], clpbn_display:put_atts(V, [posterior([],[],[],[])]) @@ -378,7 +378,7 @@ call_ground_solver(cbp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, call_ground_solver(Solver, GVars, _GoalKeys, Keys, Factors, Evidence) :- % fall back to traditional solver b_hash_new(Hash0), - foldl(gvar_in_hash, GVars, Hash0, HashI), + foldl(gvar_in_hash, GVars, Hash0, HashI), foldl(key_to_var, Keys, AllVars, HashI, Hash1), foldl(evidence_to_v, Evidence, _EVars, Hash1, Hash), %writeln(Keys:AllVars), @@ -432,7 +432,7 @@ write_out(Solver, _, _, _) :- % gvar_in_hash(V, Hash0, Hash) :- get_atts(V, [key(K)]), - b_hash_insert(Hash0, K, V, Hash). + b_hash_insert(Hash0, K, V, Hash). key_to_var(K, V, Hash0, Hash0) :- b_hash_lookup(K, V, Hash0), !. @@ -496,12 +496,12 @@ process_vars([V|Vs], [K|Ks]) :- process_var(V, K), process_vars(Vs, Ks). -process_var(V, K) :- get_atts(V, [key(K)]), !. +process_var(V, K) :- get_atts(V, [key(K)]), !. % oops: this variable has no attributes. process_var(V, _) :- throw(error(instantiation_error,clpbn(attribute_goal(V)))). % -% unify a CLPBN variable with something. +% unify a CLPBN variable with something. % verify_attributes(Var, T, Goal) :- get_atts(Var, [key(Key),dist(Dist,Parents)]), !, @@ -641,7 +641,7 @@ clpbn_init_solver(pcg, LVs, Vs0, VarsWithUnboundKeys, State) :- % % LVs is the list of lists of variables to marginalise % Vs is the full graph -% Ps are the probabilities on LVs. +% Ps are the probabilities on LVs. % clpbn_run_solver(LVs, LPs, State) :- solver(Solver), @@ -708,7 +708,7 @@ pfl_run_solver(LVs, LPs, State) :- pfl_run_solver(LVs, LPs, State, ve) :- !, run_ve_ground_solver(LVs, LPs, State). - + pfl_run_solver(LVs, LPs, State, hve) :- !, run_horus_ground_solver(LVs, LPs, State). @@ -732,7 +732,7 @@ add_keys(Key1+V1,_Key2,Key1+V1). probability(Goal, Prob) :- findall(Prob, do_probability(Goal, [], Prob), [Prob]). - + conditional_probability(Goal, ListOfGoals, Prob) :- \+ ground(Goal), throw(error(ground(Goal),conditional_probability(Goal, ListOfGoals, Prob))). @@ -766,7 +766,7 @@ evidence_to_var(Goal, C, VItem, V) :- Goal =.. [L|Args], variabilise_last(Args, C, NArgs, V), VItem =.. [L|NArgs]. - + variabilise_last([Arg], Arg, [V], V). variabilise_last([Arg1,Arg2|Args], Arg, Arg1.NArgs, V) :- variabilise_last(Arg2.Args, Arg, NArgs, V). diff --git a/packages/CLPBN/clpbn/aggregates.yap b/packages/CLPBN/clpbn/aggregates.yap index 71e08795b..90c1ef51e 100644 --- a/packages/CLPBN/clpbn/aggregates.yap +++ b/packages/CLPBN/clpbn/aggregates.yap @@ -96,11 +96,11 @@ find_ev(_Evs, Key, RemKeys, [Key|RemKeys], Ev, Ev). % +final CPT % - New Parents % + - list of new keys -% +% avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, Vars, Vs, Vs, Id) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 256, + TabSize =< 256, /* case gmp is not there !! */ TabSize > 0, !, average_cpt(Vars, OVars, Domain, TotEvidence, Softness, CPT), @@ -170,7 +170,7 @@ cpt_min([_|Vars], Key, Els0, CPT, Vs, NewVs) :- build_avg_table(Vars, OVars, Domain, _, TotEvidence, Softness, CPT, Vars, Vs, Vs) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 256, + TabSize =< 256, /* case gmp is not there !! */ TabSize > 0, !, average_cpt(Vars, OVars, Domain, TotEvidence, Softness, CPT). @@ -304,7 +304,7 @@ get_ds_lengths([],[]). get_ds_lengths([V|Vs],[Sz|Lengs]) :- get_vdist_size(V, Sz), get_ds_lengths(Vs,Lengs). - + fill_in_average(Lengs, N, Base, MCPT) :- generate(Lengs, Case), average(Case, N, Base, Val), diff --git a/packages/CLPBN/clpbn/bdd.yap b/packages/CLPBN/clpbn/bdd.yap index a7bc3abd7..b0335eb08 100644 --- a/packages/CLPBN/clpbn/bdd.yap +++ b/packages/CLPBN/clpbn/bdd.yap @@ -9,12 +9,12 @@ V = v(Va, Vb, Vc) The generic formula is -V <- X, Y +V <- X, Y Va <- P*X1*Y1 + Q*X2*Y2 + ... - + **************************************************/ :- module(clpbn_bdd, @@ -80,8 +80,8 @@ bdds(bdd). % % QVars: all query variables? -% -% +% +% init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, bdd(QueryKeys, AllKeys, Factors, Evidence)). % @@ -161,7 +161,7 @@ sort_keys(AllFs, AllVars, Leaves) :- dgraph_top_sort(Graph, AllVars). add_node(f([K|Parents],_,_,_), Graph0, Graph) :- - dgraph_add_vertex(Graph0, K, Graph1), + dgraph_add_vertex(Graph0, K, Graph1), foldl(add_edge(K), Parents, Graph1, Graph). add_edge(K, K0, Graph0, Graph) :- @@ -176,7 +176,7 @@ sort_vars(AllVars0, AllVars, Leaves) :- build_graph([], Graph, Graph). build_graph([V|AllVars0], Graph0, Graph) :- clpbn:get_atts(V, [dist(_DistId, Parents)]), !, - dgraph_add_vertex(Graph0, V, Graph1), + dgraph_add_vertex(Graph0, V, Graph1), add_parents(Parents, V, Graph1, GraphI), build_graph(AllVars0, GraphI, Graph). build_graph(_V.AllVars0, Graph0, Graph) :- @@ -184,7 +184,7 @@ build_graph(_V.AllVars0, Graph0, Graph) :- add_parents([], _V, Graph, Graph). add_parents([V0|Parents], V, Graph0, GraphF) :- - dgraph_add_edge(Graph0, V0, V, GraphI), + dgraph_add_edge(Graph0, V0, V, GraphI), add_parents(Parents, V, GraphI, GraphF). get_keys_info([], _, _, _, Vs, Vs, Ps, Ps, _, _) --> []. @@ -378,7 +378,7 @@ avg_tree([Vals|PVars], P, Max, Im, IM, Size, O, H0, HF) :- MaxI is Max-(Size-1), avg_exp(Vals, PVars, 0, P, MaxI, Size, Im, IM, HI, HF, Exp), simplify_exp(Exp, Simp). - + avg_exp([], _, _, _P, _Max, _Size, _Im, _IM, H, H, 0). avg_exp([Val|Vals], PVars, I0, P0, Max, Size, Im, IM, HI, HF, O) :- (Vals = [] -> O=O1 ; O = Val*O1+not(Val)*O2 ), @@ -434,7 +434,7 @@ bup_avg(V, Size, Domain, Parents0, Vs, Vs2, Lvs, Outs, DIST) :- bin_sums(Vs, Sums, F) :- vs_to_sums(Vs, Sums0), bin_sums(Sums0, Sums, F, []). - + vs_to_sums([], []). vs_to_sums([V|Vs], [Sum|Sums0]) :- Sum =.. [sum|V], @@ -536,7 +536,7 @@ sum_all([_V|Vs], Pos, I, Max0, Sums, List) :- gen_arg(J, Sums, Max, S0) :- gen_arg(0, Max, J, Sums, S0). - + gen_arg(Max, Max, J, Sums, S0) :- !, I is Max+1, arg(I, Sums, A), @@ -647,19 +647,19 @@ copy(N, [], [], Ms, Parms0, Parms, ParmVars) :-!, copy(N, Ms, NewMs, NewMs, Parms0, Parms, ParmVars). copy(N, D.Ds, ND.NDs, New, El.Parms0, NEl.Parms, V.ParmVars) :- N1 is N-1, - (El == 0.0 -> + (El == 0.0 -> NEl = 0, V = NEl, ND = D - ;El == 1.0 -> + ;El == 1.0 -> NEl = 1, V = NEl, ND = 0.0 - ;El == 0 -> + ;El == 0 -> NEl = 0, V = NEl, ND = D - ;El =:= 1 -> + ;El =:= 1 -> NEl = 1, V = NEl, ND = 0.0, @@ -818,7 +818,7 @@ get_key_evidence(V, Evs, _, Tree, Ev, F0, F, Leaves, Finals) :- %% deterministic(V, DistId), %% !, %% one_list(Ev), -%% eval_outs(F0). +%% eval_outs(F0). %% no evidence !!! get_key_evidence(V, _, _, Tree, _Values, F0, F1, Leaves, Finals) :- insert_output(Leaves, V, Finals, Tree, Outs, SendOut), @@ -836,14 +836,14 @@ get_evidence(V, _Tree, Ev, F0, [], _Leaves, _Finals) :- ( Name = 'AVG' ; Name = 'MAX' ; Name = 'MIN' ), !, one_list(Ev), - eval_outs(F0). + eval_outs(F0). %% no evidence !!! get_evidence(V, Tree, _Values, F0, F1, Leaves, Finals) :- insert_output(Leaves, V, Finals, Tree, Outs, SendOut), get_outs(F0, F1, SendOut, Outs). zero_pos(_, _Pos, []). -zero_pos(Pos, Pos, [1|Values]) :- !, +zero_pos(Pos, Pos, [1|Values]) :- !, I is Pos+1, zero_pos(I, Pos, Values). zero_pos(I0, Pos, [0|Values]) :- @@ -855,7 +855,7 @@ one_list(1.Ev) :- one_list(Ev). % -% insert a node with the disj of all alternatives, this is only done if node ends up to be in the output +% insert a node with the disj of all alternatives, this is only done if node ends up to be in the output % insert_output([], _V, [], _Out, _Outs, []). insert_output(V._Leaves, V0, [Top|_], Top, Outs, [Top = Outs]) :- V == V0, !. @@ -1057,7 +1057,7 @@ generate_exclusions([V0|SeenVs], V) --> build_cnf(CNF, IVs, Indics, AllParms, AllParmValues, Val) :- %(numbervars(CNF,1,_), writeln(cnf_to_ddnnf(CNF, Vars, IVs, [], F)), fail ; true ), - cnf_to_ddnnf(CNF, AllParms, F), + cnf_to_ddnnf(CNF, AllParms, F), AllParms = AllParmValues, IVs = Indics, term_variables(CNF, Extra), diff --git a/packages/CLPBN/clpbn/bnt.yap b/packages/CLPBN/clpbn/bnt.yap index 9fd0d8d65..c09786b20 100644 --- a/packages/CLPBN/clpbn/bnt.yap +++ b/packages/CLPBN/clpbn/bnt.yap @@ -101,7 +101,7 @@ do_bnt(QueryVars, AllVars, AllDiffs) :- add_evidence(SortedVertices, Size, NumberedVertices), marginalize(QueryVars, SortedVertices, NumberedVertices, Ps), clpbn_bind_vals(QueryVars, Ps, AllDiffs). - + create_bnt_graph(AllVars, Representatives) :- create_bnt_graph(AllVars, Representatives, _, _, _). @@ -320,7 +320,7 @@ get_sizes_and_ids([V|Parents],[Id-V|Ids]) :- extract_vars([], L, L). extract_vars([_-V|NIds], NParents, Vs) :- extract_vars(NIds, [V|NParents], Vs). - + mkcpt(BayesNet, I, Tab) :- (BayesNet.'CPD'({I})) <-- tabular_CPD(BayesNet,I,Tab). @@ -336,7 +336,7 @@ create_class_vector([], [], [],[]). create_class_vector([V|Graph], [I|Is], [Id|Classes], [Id-v(V,I,Parents)|Sets]) :- clpbn:get_atts(V, [dist(Id,Parents)]), create_class_vector(Graph, Is,Classes,Sets). - + representatives([],[]). representatives([Class-Rep|Reps1],[Class-Rep|Reps]) :- nonrepresentatives(Reps1, Class, Reps2), @@ -390,7 +390,7 @@ mk_evidence([V|L], [I|Is], [ar(1,I,EvVal1)|LN]) :- mk_evidence(L, Is, LN). mk_evidence([_|L], [_|Is], LN) :- mk_evidence(L, Is, LN). - + evidence_val(Ev,Val,[Ev|_],Val) :- !. evidence_val(Ev,I0,[_|Domain],Val) :- I1 is I0+1, diff --git a/packages/CLPBN/clpbn/discrete_utils.yap b/packages/CLPBN/clpbn/discrete_utils.yap index ebe8ab376..8ba385f4e 100644 --- a/packages/CLPBN/clpbn/discrete_utils.yap +++ b/packages/CLPBN/clpbn/discrete_utils.yap @@ -24,7 +24,7 @@ propagate_evidence(V, Evs) :- clpbn:get_atts(V, [evidence(Ev),dist(Id,_)]), !, get_dist_domain(Id, Out), generate_szs_with_evidence(Out,Ev,0,Evs,Found), - (var(Found) -> + (var(Found) -> clpbn:get_atts(V, [key(K)]), throw(clpbn(evidence_does_not_match,K,Ev,[Out])) ; diff --git a/packages/CLPBN/clpbn/display.yap b/packages/CLPBN/clpbn/display.yap index 5d6afb6ea..7ecba7880 100644 --- a/packages/CLPBN/clpbn/display.yap +++ b/packages/CLPBN/clpbn/display.yap @@ -78,7 +78,7 @@ clpbn_bind_vals([Vs|MoreVs],[Ps|MorePs],AllDiffs) :- clpbn_bind_vals2([],_,_) :- !. % simple case, we want a distribution on a single variable. -clpbn_bind_vals2([V],Ps,AllDiffs) :- +clpbn_bind_vals2([V],Ps,AllDiffs) :- use_parfactors(on), !, clpbn:get_atts(V, [key(K)]), pfl:skolem(K,Vals), diff --git a/packages/CLPBN/clpbn/dists.yap b/packages/CLPBN/clpbn/dists.yap index be4b63b20..ce796e524 100644 --- a/packages/CLPBN/clpbn/dists.yap +++ b/packages/CLPBN/clpbn/dists.yap @@ -90,7 +90,7 @@ where Id is the id, dna for [a,c,g,t] rna for [a,c,g,u] reals - + ********************************************/ diff --git a/packages/CLPBN/clpbn/evidence.yap b/packages/CLPBN/clpbn/evidence.yap index 8fd4ee9bc..4f6982a13 100644 --- a/packages/CLPBN/clpbn/evidence.yap +++ b/packages/CLPBN/clpbn/evidence.yap @@ -31,7 +31,7 @@ :- dynamic node/3, edge/2, evidence/2. % -% new evidence storage algorithm. The idea is that instead of +% new evidence storage algorithm. The idea is that instead of % redoing all the evidence every time we query the network, we shall % keep a precompiled version around. % @@ -51,9 +51,9 @@ compute_evidence(_,PreviousSolver) :- set_clpbn_flag(solver, PreviousSolver). get_clpbn_vars(G, Vars) :- -% attributes:all_attvars(Vars0), +% attributes:all_attvars(Vars0), once(G), - attributes:all_attvars(Vars). + attributes:all_attvars(Vars). evidence_error(Ball,PreviousSolver) :- set_clpbn_flag(solver,PreviousSolver), diff --git a/packages/CLPBN/clpbn/gibbs.yap b/packages/CLPBN/clpbn/gibbs.yap index 3349f4d29..ce45313f7 100644 --- a/packages/CLPBN/clpbn/gibbs.yap +++ b/packages/CLPBN/clpbn/gibbs.yap @@ -137,7 +137,7 @@ graph_representation([V|Vs], Graph, I0, Keys, [I-IParents|TGraph]) :- graph_representation(Vs, Graph, I, Keys, TGraph). write_pars([]). -write_pars([V|Parents]) :- +write_pars([V|Parents]) :- clpbn:get_atts(V, [key(K),dist(I,_)]),write(K:I),nl, write_pars(Parents). @@ -149,7 +149,7 @@ get_sizes([V|Parents], [Sz|Szs]) :- parent_indices([], _, []). parent_indices([V|Parents], Keys, [I|IParents]) :- - rb_lookup(V, I, Keys), + rb_lookup(V, I, Keys), parent_indices(Parents, Keys, IParents). @@ -174,7 +174,7 @@ propagate2parents([V|NewParents], Table, Variables, Graph, Keys) :- propagate2parents(NewParents,Table, Variables, Graph, Keys). add2graph(V, Vals, Table, IParents, Graph, Keys) :- - rb_lookup(V, Index, Keys), + rb_lookup(V, Index, Keys), (var(Vals) -> true ; length(Vals,Sz)), arg(Index, Graph, var(V,Index,_,Vals,Sz,VarSlot,_,_,_)), member(tabular(Table,Index,IParents), VarSlot), !. @@ -239,7 +239,7 @@ mult_list([Sz|Sizes],Mult0,Mult) :- MultI is Sz*Mult0, mult_list(Sizes,MultI,Mult). -% compile node as set of facts, faster execution +% compile node as set of facts, faster execution compile_var(TotSize,I,_Vals,Sz,CPTs,Parents,_Sizes,Graph) :- TotSize < 1024*64, TotSize > 0, !, multiply_all(I,Parents,CPTs,Sz,Graph). @@ -367,8 +367,8 @@ generate_est_mults([], [], _, [], 1). generate_est_mults([V|Vs], [I|Is], Graph, [M0|Mults], M) :- arg(V,Graph,var(_,I,_,_,Sz,_,_,_,_)), generate_est_mults(Vs, Is, Graph, Mults, M0), - M is M0*Sz. - + M is M0*Sz. + gen_e0(0,[]) :- !. gen_e0(Sz,[0|E0L]) :- Sz1 is Sz-1, @@ -531,7 +531,7 @@ add_up_mes(Counts,[me(_,_,Cs)|Chains], Add) :- sum_lists(Counts, Cs, NCounts), add_up_mes(NCounts, Chains, Add). -sum_lists([],[],[]). +sum_lists([],[],[]). sum_lists([Count|Counts], [C|Cs], [NC|NCounts]) :- NC is Count+C, sum_lists(Counts, Cs, NCounts). diff --git a/packages/CLPBN/clpbn/graphviz.yap b/packages/CLPBN/clpbn/graphviz.yap index 4a75e4022..2d0a10cda 100644 --- a/packages/CLPBN/clpbn/graphviz.yap +++ b/packages/CLPBN/clpbn/graphviz.yap @@ -50,7 +50,7 @@ output_parents1(Stream,[V|L]) :- put_code(Stream, 0' ), %' output_parents1(Stream,L). -output_v(V,Stream) :- +output_v(V,Stream) :- clpbn:get_atts(V,[key(Key)]), output_key(Stream,Key). diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 5c4c7b688..344f11d86 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -1,7 +1,7 @@ /******************************************************* Horus Interface - + ********************************************************/ :- module(clpbn_horus, diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index f696bccc0..b4b3f8d25 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -4,7 +4,7 @@ - Variable Elimination - Belief Propagation - Counting Belief Propagation - + ********************************************************/ :- module(clpbn_horus_ground, diff --git a/packages/CLPBN/clpbn/jt.yap b/packages/CLPBN/clpbn/jt.yap index 7eb3c191f..d46f13430 100644 --- a/packages/CLPBN/clpbn/jt.yap +++ b/packages/CLPBN/clpbn/jt.yap @@ -98,7 +98,7 @@ jt(LLVs,Vs0,AllDiffs) :- init_jt_solver(LLVs, Vs0, _, State) :- - check_for_agg_vars(Vs0, Vs1), + check_for_agg_vars(Vs0, Vs1), init_influences(Vs1, G, RG), maplist(init_jt_solver_for_question(G, RG), LLVs, State). @@ -163,7 +163,7 @@ initial_graph(_,Parents, CPTs) :- % from the very beginning. dgraph_transpose(V1, V2), dgraph_to_ugraph(V2, Parents). - + problem_graph([], []). problem_graph([V|BNet], GraphF) :- diff --git a/packages/CLPBN/clpbn/matrix_cpt_utils.yap b/packages/CLPBN/clpbn/matrix_cpt_utils.yap index c12d5b6e6..a3d7d228f 100644 --- a/packages/CLPBN/clpbn/matrix_cpt_utils.yap +++ b/packages/CLPBN/clpbn/matrix_cpt_utils.yap @@ -87,7 +87,7 @@ evidence(V, Pos) :- clpbn:get_atts(V, [evidence(Pos)]). vnth([V1|Deps], N, V, N, Deps) :- - V == V1, !. + V == V1, !. vnth([V1|Deps], N0, V, N, [V1|NDeps]) :- N1 is N0+1, vnth(Deps, N1, V, N, NDeps). @@ -207,9 +207,9 @@ generate_map([V|DimsNew], [V0|Dims0], [0|Map]) :- V == V0, !, generate_map(DimsNew, Dims0, Map). generate_map([V|DimsNew], Dims0, [Sz|Map]) :- clpbn:get_atts(V, [dist(Id,_)]), - clpbn_dist:get_dist_domain_size(Id, Sz), + clpbn_dist:get_dist_domain_size(Id, Sz), generate_map(DimsNew, Dims0, Map). - + unit_CPT(V,CPT) :- clpbn:get_atts(V, [dist(Id,_)]), clpbn_dist:get_dist_domain_size(Id, Sz), @@ -287,7 +287,7 @@ uniform_CPT(Dims, M) :- normalise_possibly_deterministic_CPT(M1, M). normalise_CPT_on_lines(MAT0, MAT2, L1) :- - matrix_agg_cols(MAT0, +, MAT1), + matrix_agg_cols(MAT0, +, MAT1), matrix_sum(MAT1, SUM), matrix_op_to_all(MAT1, /, SUM, MAT2), matrix:matrix_to_list(MAT2,L1). diff --git a/packages/CLPBN/clpbn/numbers.yap b/packages/CLPBN/clpbn/numbers.yap index 88c65c915..15aad1e1f 100644 --- a/packages/CLPBN/clpbn/numbers.yap +++ b/packages/CLPBN/clpbn/numbers.yap @@ -1,7 +1,7 @@ :- module(clpbn_numbers, - [keys_to_numbers/7, - keys_to_numbers/9, + [keys_to_numbers/7, + keys_to_numbers/9, lists_of_keys_to_ids/6 ]). diff --git a/packages/CLPBN/clpbn/pgrammar.yap b/packages/CLPBN/clpbn/pgrammar.yap index f4739cb84..4c6829595 100644 --- a/packages/CLPBN/clpbn/pgrammar.yap +++ b/packages/CLPBN/clpbn/pgrammar.yap @@ -176,7 +176,7 @@ get_internal(S, InternalS, Arg) :- extract_probability(p(Id,Goals), P) :- id(Id,_,P0,_), - LogP0 is log(P0), + LogP0 is log(P0), extract_logprobability(Goals, LogP0, LogP), P is exp(LogP). diff --git a/packages/CLPBN/clpbn/table.yap b/packages/CLPBN/clpbn/table.yap index 748a2757d..155db3941 100644 --- a/packages/CLPBN/clpbn/table.yap +++ b/packages/CLPBN/clpbn/table.yap @@ -37,11 +37,11 @@ clpbn_tabled_clause_ref(:.?,?), clpbn_tabled_retract(:), clpbn_tabled_abolish(:), - clpbn_tabled_asserta(:), - clpbn_tabled_assertz(:), - clpbn_tabled_asserta(:,-), - clpbn_tabled_assertz(:,-), - clpbn_tabled_number_of_clauses(:,-), + clpbn_tabled_asserta(:), + clpbn_tabled_assertz(:), + clpbn_tabled_asserta(:,-), + clpbn_tabled_assertz(:,-), + clpbn_tabled_number_of_clauses(:,-), clpbn_is_tabled(:). :- use_module(library(terms), diff --git a/packages/CLPBN/clpbn/utils.yap b/packages/CLPBN/clpbn/utils.yap index 5bce1c943..d8179a69e 100644 --- a/packages/CLPBN/clpbn/utils.yap +++ b/packages/CLPBN/clpbn/utils.yap @@ -66,7 +66,7 @@ merge_same_key([K1-V1,K2-V2|Vs], SortedAVars, Ks, UnifiableVars) :- attributes:fast_unify_attributed(V1,V2), merge_same_key([K1-V1|Vs], SortedAVars, Ks, UnifiableVars). merge_same_key([K1-V1,K2-V2|Vs], [V1|SortedAVars], Ks, [K1|UnifiableVars]) :- - (in_keys(K1, Ks) ; \+ \+ K1 == K2), !, + (in_keys(K1, Ks) ; \+ \+ K1 == K2), !, add_to_keys(K1, Ks, NKs), merge_same_key([K2-V2|Vs], SortedAVars, NKs, UnifiableVars). merge_same_key([K-V|Vs], [V|SortedAVars], Ks, UnifiableVars) :- @@ -74,7 +74,7 @@ merge_same_key([K-V|Vs], [V|SortedAVars], Ks, UnifiableVars) :- merge_same_key(Vs, SortedAVars, NKs, UnifiableVars). in_keys(K1,[K|_]) :- \+ \+ K1 = K, !. -in_keys(K1,[_|Ks]) :- +in_keys(K1,[_|Ks]) :- in_keys(K1,Ks). add_to_keys(K1, Ks, Ks) :- ground(K1), !. diff --git a/packages/CLPBN/clpbn/ve.yap b/packages/CLPBN/clpbn/ve.yap index b2e8d9ea4..21f0d931a 100644 --- a/packages/CLPBN/clpbn/ve.yap +++ b/packages/CLPBN/clpbn/ve.yap @@ -11,7 +11,7 @@ all tables they connect to; multiply their size order by size - + *********************************/ :- module(clpbn_ve, @@ -134,7 +134,7 @@ evtotree(K=V,Ev0,Ev) :- factor_to_graph( f(Nodes, Sizes, _Pars0, Id), Factors0, Factors, Edges0, Edges, I0, I) :- I is I0+1, pfl:get_pfl_parameters(Id, Pars0), - init_CPT(Pars0, Sizes, CPT0), + init_CPT(Pars0, Sizes, CPT0), reorder_CPT(Nodes, CPT0, FIPs, CPT, _), F = f(I0, FIPs, CPT), rb_insert(Factors0, I0, F, Factors), @@ -196,7 +196,7 @@ id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- get_dist_params(D, Pars0), get_dist_domain_size(D, DS), maplist(parent_to_id(VMap), Ps, Sizes, IPs), - init_CPT(Pars0, [DS|Sizes], CPT0), + init_CPT(Pars0, [DS|Sizes], CPT0), reorder_CPT([I|IPs], CPT0, FIPs, CPT, _), rb_insert(Fs0, IF0, f(IF0, FIPs, CPT), Fs), IF is IF0+1. @@ -261,7 +261,7 @@ solve([_|LQVs], FIds, Bigraph, Ev, LPs) :- do_solve(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Ps) :- % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), + project_to_query_related(IVs, OldVs, SVs, Fs1), % and also prune using evidence rb_visit(Ev, EvL), foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), @@ -300,9 +300,9 @@ run_ve_solver(_, LLPs, state(LQVs, LVs, _VMap, Bigraph, Ev)) :- % solve_ve([IQVs|_], [IVs|_], bigraph(OldVs, IF, _Fs), Ev, Ps) :- % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), + project_to_query_related(IVs, OldVs, SVs, Fs1), % and also prune using evidence - foldl2(clean_v_ev, Ev, Fs1, Fs2, SVs, EVs), + foldl2(clean_v_ev, Ev, Fs1, Fs2, SVs, EVs), % eliminate eliminate(IQVs, digraph(EVs, IF, Fs2), Dist), % writeln(m:Dist),matrix:matrix_to_list(Dist,LD),writeln(LD), @@ -319,7 +319,7 @@ solve_ve([_|MoreLVs], [_|MoreLVis], Digraph, Ev, Ps) :- project_to_query_related(IVs0, OldVs, NVs, NFs) :- sort(IVs0, IVs), rb_new(Vs0), - foldl(cp_to_vs, IVs, Vs0, AuxVs), + foldl(cp_to_vs, IVs, Vs0, AuxVs), rb_new(NFs0), foldl(simplify_graph_node(OldVs, AuxVs), IVs, VFs, NFs0, NFs), list_to_rbtree(VFs, NVs). @@ -343,14 +343,14 @@ simplify_graph_node(OldVs, NVs, V, V-RemFs, NFs0, NFs) :- % % Two cases: first time factor comes up: all its vars must be in subgraph % second case: second time it comes up, it must be already in graph -% -% args: +Factor F, +current V (int), +rbtree with all Vs, +% +% args: +Factor F, +current V (int), +rbtree with all Vs, % -Factors in new Graph, +factors in current graph, -rbtree of factors % % check_factor(V, NVs, F, NFs0, NFs, RemFs, NewRemFs) :- F = f(IF, [V|More], _), !, - ( + ( checklist(check_v(NVs), More) -> rb_insert(NFs0, IF, F, NFs), @@ -361,7 +361,7 @@ check_factor(V, NVs, F, NFs0, NFs, RemFs, NewRemFs) :- ). check_factor(_V, _NVs, F, NFs, NFs, RemFs, NewRemFs) :- F = f(Id, _, _), - ( + ( rb_lookup(Id, F, NFs) -> NewRemFs = [F|RemFs] diff --git a/packages/CLPBN/clpbn/vmap.yap b/packages/CLPBN/clpbn/vmap.yap index 98e3df357..79fd7f566 100644 --- a/packages/CLPBN/clpbn/vmap.yap +++ b/packages/CLPBN/clpbn/vmap.yap @@ -12,7 +12,7 @@ :- use_module(library(maplist)). % -% vmap: map V->I +% vmap: map V->I % contiguous Vs to contiguous integers % init_vmap(vmap(0,Empty)) :- diff --git a/packages/CLPBN/examples/School/parlearn.yap b/packages/CLPBN/examples/School/parlearn.yap index e722d8334..f089fb848 100644 --- a/packages/CLPBN/examples/School/parlearn.yap +++ b/packages/CLPBN/examples/School/parlearn.yap @@ -41,4 +41,4 @@ write_cpts([CPT|CPTs]) :- matrix_to_list(CPT,L), format('CPT=~w~n',[L]), write_cpts(CPTs). - + diff --git a/packages/CLPBN/examples/School/parschema.pfl b/packages/CLPBN/examples/School/parschema.pfl index 3d708936a..b799c60d7 100644 --- a/packages/CLPBN/examples/School/parschema.pfl +++ b/packages/CLPBN/examples/School/parschema.pfl @@ -55,7 +55,7 @@ professor_popularity(P,A) :- pop(P,A). course_difficulty(P,A) :- diff(P,A). student_intelligence(P,A) :- int(P,A). - + course_rating(C,X) :- rat(C,X). registration_grade(R,A) :- diff --git a/packages/CLPBN/examples/city.pfl b/packages/CLPBN/examples/city.pfl index c891ae163..37e0eeb22 100644 --- a/packages/CLPBN/examples/city.pfl +++ b/packages/CLPBN/examples/city.pfl @@ -75,17 +75,17 @@ hair_color_table( /* high low */ /* dark */ [ 0.05, 0.1, /* bright */ 0.95, 0.9 ]). - + car_color_table( /* dark bright */ /* dark */ [ 0.9, 0.2, /* bright */ 0.1, 0.8 ]). - + height_table( /* male female */ /* tall */ [ 0.6, 0.4, /* short */ 0.4, 0.6 ]). - + shoe_size_table( /* tall short */ /* big */ [ 0.9, 0.1, @@ -99,7 +99,7 @@ descn_table( /* car_color(P), hair_color(P), height(P), guilty(P) */ /* fits */ [ 0.99, 0.5, 0.23, 0.88, 0.41, 0.3, 0.76, 0.87, /* fits */ 0.44, 0.43, 0.29, 0.72, 0.23, 0.91, 0.95, 0.92, -/* dont_fit */ 0.01, 0.5, 0.77, 0.12, 0.59, 0.7, 0.24, 0.13, +/* dont_fit */ 0.01, 0.5, 0.77, 0.12, 0.59, 0.7, 0.24, 0.13, /* dont_fit */ 0.56, 0.57, 0.71, 0.28, 0.77, 0.09, 0.05, 0.08 ]). witness_table( diff --git a/packages/CLPBN/horus/BayesBall.h b/packages/CLPBN/horus/BayesBall.h index 4efbd2ed1..2057b6f01 100644 --- a/packages/CLPBN/horus/BayesBall.h +++ b/packages/CLPBN/horus/BayesBall.h @@ -4,7 +4,6 @@ #include #include #include -#include #include "FactorGraph.h" #include "BayesBallGraph.h" @@ -15,8 +14,8 @@ using namespace std; struct ScheduleInfo { - ScheduleInfo (BBNode* n, bool vfp, bool vfc) : - node(n), visitedFromParent(vfp), visitedFromChild(vfc) { } + ScheduleInfo (BBNode* n, bool vfp, bool vfc) + : node(n), visitedFromParent(vfp), visitedFromChild(vfc) { } BBNode* node; bool visitedFromParent; @@ -30,7 +29,7 @@ typedef queue> Scheduling; class BayesBall { public: - BayesBall (FactorGraph& fg) + BayesBall (FactorGraph& fg) : fg_(fg) , dag_(fg.getStructure()) { dag_.clear(); @@ -63,7 +62,7 @@ inline void BayesBall::scheduleParents (const BBNode* n, Scheduling& sch) const { const vector& ps = n->parents(); - for (vector::const_iterator it = ps.begin(); + for (vector::const_iterator it = ps.begin(); it != ps.end(); ++it) { sch.push (ScheduleInfo (*it, false, true)); } diff --git a/packages/CLPBN/horus/BayesBallGraph.h b/packages/CLPBN/horus/BayesBallGraph.h index 72a0f90d0..68cd9effe 100644 --- a/packages/CLPBN/horus/BayesBallGraph.h +++ b/packages/CLPBN/horus/BayesBallGraph.h @@ -30,15 +30,15 @@ class BBNode : public Var void addChild (BBNode* c) { childs_.push_back (c); } bool isVisited (void) const { return visited_; } - + void setAsVisited (void) { visited_ = true; } bool isMarkedOnTop (void) const { return markedOnTop_; } - + void markOnTop (void) { markedOnTop_ = true; } bool isMarkedOnBottom (void) const { return markedOnBottom_; } - + void markOnBottom (void) { markedOnBottom_ = true; } void clear (void) { visited_ = markedOnTop_ = markedOnBottom_ = false; } diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index d96384cfd..f56752fe4 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -146,7 +146,7 @@ BeliefProp::getFactorJoint ( if (Globals::logDomain) { Util::exp (jointDist); } - return jointDist; + return jointDist; } @@ -185,7 +185,7 @@ BeliefProp::runSolver (void) } if (Globals::verbosity > 0) { if (nIters_ < BpOptions::maxIter) { - cout << "Belief propagation converged in " ; + cout << "Belief propagation converged in " ; cout << nIters_ << " iterations" << endl; } else { cout << "The maximum number of iterations was hit, terminating..." ; @@ -459,7 +459,7 @@ void BeliefProp::printLinkInformation (void) const { for (size_t i = 0; i < links_.size(); i++) { - BpLink* l = links_[i]; + BpLink* l = links_[i]; cout << l->toString() << ":" << endl; cout << " curr msg = " ; cout << l->message() << endl; diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index 64a41d916..87364355c 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -17,7 +17,7 @@ class BpLink { public: BpLink (FacNode* fn, VarNode* vn) - { + { fac_ = fn; var_ = vn; v1_.resize (vn->range(), LogAware::log (1.0 / vn->range())); @@ -46,7 +46,7 @@ class BpLink residual_ = LogAware::getMaxNorm (v1_,v2_); } - virtual void updateMessage (void) + virtual void updateMessage (void) { swap (currMsg_, nextMsg_); } diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index 0546d0852..3a9fe7b5e 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -190,7 +190,7 @@ ConstraintTree::ConstraintTree ( ConstraintTree::ConstraintTree (vector> names) { assert (names.empty() == false); - assert (names.front().empty() == false); + assert (names.front().empty() == false); unsigned nrLvs = names[0].size(); for (size_t i = 0; i < nrLvs; i++) { logVars_.push_back (LogVar (i)); @@ -201,7 +201,7 @@ ConstraintTree::ConstraintTree (vector> names) Tuple t; for (size_t j = 0; j < names[i].size(); j++) { assert (names[i].size() == nrLvs); - t.push_back (LiftedUtils::getSymbol (names[i][j])); + t.push_back (LiftedUtils::getSymbol (names[i][j])); } addTuple (t); } @@ -266,7 +266,7 @@ ConstraintTree::moveToTop (const LogVars& lvs) assert (pos != logVars_.size()); for (size_t j = pos; j-- > i; ) { swapLogVar (logVars_[j]); - } + } } } @@ -318,7 +318,7 @@ ConstraintTree::join (ConstraintTree* ct, bool oneTwoOne) } else { moveToTop (intersect.elements()); ct->moveToTop (intersect.elements()); - + Tuples tuples; CTNodes appendNodes; getTuples (ct->root(), Tuples(), intersect.size(), @@ -455,7 +455,7 @@ ConstraintTree::singletons (void) if (isSingleton (logVars_[i])) { singletons.insert (logVars_[i]); } - } + } return singletons; } @@ -585,13 +585,13 @@ ConstraintTree::isCountNormalized (const LogVarSet& Ys) if (countTuples (*it) != count) { return false; } - } + } return true; } -unsigned +unsigned ConstraintTree::getConditionalCount (const LogVarSet& Ys) { assert (isCountNormalized (Ys)); @@ -792,7 +792,7 @@ ConstraintTree::jointCountNormalize ( } for (size_t i = 0; i < normCts1.size(); i++) { - unsigned j; + unsigned j; for (j = 0; counts1[i] + counts2[j] != N; j++) ; // cout << "joint-count(" << counts1[i] ; // cout << "," << counts2[j] << ")" << endl; @@ -947,7 +947,7 @@ ConstraintTree::getNodesBelow (CTNode* fromHere) const CTNodes -ConstraintTree::getNodesAtLevel (unsigned level) const +ConstraintTree::getNodesAtLevel (unsigned level) const { assert (level <= logVars_.size()); if (level == 0) { @@ -1057,7 +1057,7 @@ ConstraintTree::join ( } else { tupleFounded = join (*it, tuple, currIdx + 1, appendNode); } - } + } return tupleFounded; } @@ -1065,7 +1065,7 @@ ConstraintTree::join ( void ConstraintTree::getTuples ( - CTNode* n, + CTNode* n, Tuples currTuples, unsigned stopLevel, Tuples& tuplesCollected, @@ -1147,7 +1147,7 @@ ConstraintTree::split ( CTNode* n2, CTChilds& commChilds, CTChilds& exclChilds, - unsigned stopLevel) + unsigned stopLevel) { CTChilds& childs1 = n1->childs(); for (CTChilds::const_iterator chIt1 = childs1.begin(); diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index d248c602c..b86d22f9f 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -47,8 +47,7 @@ CountingBp::printSolverFlags (void) const ss << ",max_iter=" << BpOptions::maxIter; ss << ",accuracy=" << BpOptions::accuracy; ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << ",chkif=" << - Util::toString (CountingBp::checkForIdenticalFactors); + ss << ",chkif=" << Util::toString (CountingBp::checkForIdenticalFactors); ss << "]" ; cout << ss.str() << endl; } @@ -139,7 +138,7 @@ CountingBp::setInitialColors (void) VarColorMap::iterator it = colorMap.find (range); if (it == colorMap.end()) { it = colorMap.insert (make_pair ( - range, Colors (range + 1, -1))).first; + range, Colors (range + 1, -1))).first; } unsigned idx = varNodes[i]->hasEvidence() ? varNodes[i]->getEvidence() diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index f617d8237..1942bfb85 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -28,7 +28,7 @@ ElimGraph::ElimGraph (const vector& factors) } if (neighbors (n1, n2) == false) { addEdge (n1, n2); - } + } } } if (vids.size() == 1) { @@ -86,7 +86,7 @@ ElimGraph::print (void) const cout << " " << neighs[j]->label(); } cout << endl; - } + } } @@ -142,7 +142,7 @@ ElimGraph::getEliminationOrder ( Factors::const_iterator first = factors.begin(); Factors::const_iterator end = factors.end(); for (; first != end; ++first) { - Util::addToVector (allVids, (*first)->arguments()); + Util::addToVector (allVids, (*first)->arguments()); } TinySet elimOrder (allVids); elimOrder -= TinySet (excludedVids); diff --git a/packages/CLPBN/horus/Factor.h b/packages/CLPBN/horus/Factor.h index 742f20f7a..dd004ee24 100644 --- a/packages/CLPBN/horus/Factor.h +++ b/packages/CLPBN/horus/Factor.h @@ -143,7 +143,7 @@ class TFactor assert (idx != args_.size()); assert (obsIdx < ranges_[idx]); Params newps; - newps.reserve (params_.size() / ranges_[idx]); + newps.reserve (params_.size() / ranges_[idx]); Indexer indexer (ranges_); for (unsigned i = 0; i < obsIdx; ++i) { indexer.incrementDimension (idx); @@ -285,7 +285,7 @@ class Factor : public TFactor void sumOutLastVariable (void); void sumOutArgs (const vector& mask); - + void clone (const Factor& f); }; diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 8f5c446b5..df9dd7941 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -171,7 +171,7 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) std::reverse (vids.begin(), vids.end()); Factor f (vids, ranges, params); std::reverse (vids.begin(), vids.end()); - f.reorderArguments (vids); + f.reorderArguments (vids); addFactor (f); } is.close(); @@ -188,7 +188,7 @@ FactorGraph::addFactor (const Factor& factor) for (size_t i = 0; i < vids.size(); i++) { VarMap::const_iterator it = varMap_.find (vids[i]); if (it != varMap_.end()) { - addEdge (it->second, fn); + addEdge (it->second, fn); } else { VarNode* vn = new VarNode (vids[i], fn->factor().range (i)); addVarNode (vn); @@ -293,7 +293,7 @@ FactorGraph::exportToGraphViz (const char* fileName) const } for (size_t i = 0; i < facNodes_.size(); i++) { out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " [label=\"" << facNodes_[i]->getLabel(); + out << " [label=\"" << facNodes_[i]->getLabel(); out << "\"" << ", shape=box]" << endl; } for (size_t i = 0; i < facNodes_.size(); i++) { diff --git a/packages/CLPBN/horus/FactorGraph.h b/packages/CLPBN/horus/FactorGraph.h index b2b03369d..960f799cc 100644 --- a/packages/CLPBN/horus/FactorGraph.h +++ b/packages/CLPBN/horus/FactorGraph.h @@ -76,7 +76,7 @@ class FactorGraph const FacNodes& facNodes (void) const { return facNodes_; } void setFactorsAsBayesian (void) { bayesFactors_ = true; } - + bool bayesianFactors (void) const { return bayesFactors_; } size_t nrVarNodes (void) const { return varNodes_.size(); } diff --git a/packages/CLPBN/horus/Histogram.cpp b/packages/CLPBN/horus/Histogram.cpp index a9e96cfdd..d5cf729e9 100644 --- a/packages/CLPBN/horus/Histogram.cpp +++ b/packages/CLPBN/horus/Histogram.cpp @@ -59,10 +59,10 @@ HistogramSet::reset (void) -vector +vector HistogramSet::getHistograms (unsigned N, unsigned R) { - HistogramSet hs (N, R); + HistogramSet hs (N, R); unsigned H = hs.nrHistograms(); vector histograms; histograms.reserve (H); @@ -135,7 +135,7 @@ HistogramSet::maxCount (size_t idx) const } return size_ - sum; } - + void diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 520603052..0997e4655 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -77,7 +77,7 @@ readFactorGraph (FactorGraph& fg, const char* s) } else if (extension == "fg") { fg.readFromLibDaiFormat (fileName.c_str()); } else { - cerr << "Error: the probabilistic graphical model must be " ; + cerr << "Error: the probabilistic graphical model must be " ; cerr << "defined either in a UAI or libDAI file." << endl; exit (EXIT_FAILURE); } diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 3c566b73a..77e900bb0 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -57,7 +57,7 @@ createLiftedNetwork (void) } ParfactorList* pfList = new ParfactorList (parfactors); - + if (Globals::verbosity > 2) { Util::printHeader ("SHATTERED PARFACTORS"); pfList->print(); @@ -91,7 +91,7 @@ createGroundNetwork (void) // read the ranges Ranges ranges = readUnsignedList (YAP_ArgOfTerm (2, factor)); // read the parameters - Params params = readParameters (YAP_ArgOfTerm (3, factor)); + Params params = readParameters (YAP_ArgOfTerm (3, factor)); // read dist id unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (4, factor)); fg->addFactor (Factor (varIds, ranges, params, distId)); @@ -126,7 +126,7 @@ runLiftedSolver (void) LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); ParfactorList pfListCopy (*network->first); LiftedOperations::absorveEvidence (pfListCopy, *network->second); - + LiftedSolver* solver = 0; switch (Globals::liftedSolver) { case LiftedSolverType::LVE: solver = new LiftedVe (pfListCopy); break; @@ -181,7 +181,7 @@ int runGroundSolver (void) { FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - + vector tasks; YAP_Term taskList = YAP_ARG2; while (taskList != YAP_TermNil()) { @@ -407,7 +407,7 @@ readParfactor (YAP_Term pfTerm) } // read the parameters - const Params& params = readParameters (YAP_ArgOfTerm (4, pfTerm)); + const Params& params = readParameters (YAP_ArgOfTerm (4, pfTerm)); // read the constraint Tuples tuples; @@ -478,7 +478,7 @@ readLiftedEvidence ( obsFormulas.push_back (ObservedFormula (functor, evidence, args)); } observedList = YAP_TailOfTerm (observedList); - } + } } diff --git a/packages/CLPBN/horus/Indexer.h b/packages/CLPBN/horus/Indexer.h index db99cf1a7..cb8135866 100644 --- a/packages/CLPBN/horus/Indexer.h +++ b/packages/CLPBN/horus/Indexer.h @@ -167,7 +167,7 @@ class MapIndexer } } } - + template MapIndexer ( const vector& allArgs, diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index 45848ab70..46f42d5ec 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -128,7 +128,7 @@ double LeafNode::weight (void) const { assert (clause_->isUnit()); - if (clause_->posCountedLogVars().empty() == false + if (clause_->posCountedLogVars().empty() == false || clause_->negCountedLogVars().empty() == false) { if (SetOrNode::isSet() == false) { // return a NaN if we have a SetOrNode diff --git a/packages/CLPBN/horus/LiftedOperations.cpp b/packages/CLPBN/horus/LiftedOperations.cpp index e0da2dd3b..986a22c03 100644 --- a/packages/CLPBN/horus/LiftedOperations.cpp +++ b/packages/CLPBN/horus/LiftedOperations.cpp @@ -60,7 +60,7 @@ LiftedOperations::runWeakBayesBall ( const Grounds& query) { queue todo; // groups to process - set done; // processed or in queue + set done; // processed or in queue for (size_t i = 0; i < query.size(); i++) { ParfactorList::iterator it = pfList.begin(); while (it != pfList.end()) { @@ -225,7 +225,7 @@ LiftedOperations::absorve ( absorvedPfs.push_back (0); } break; - } + } g->constr()->moveToTop (formulas[i].logVars()); std::pair res; diff --git a/packages/CLPBN/horus/LiftedOperations.h b/packages/CLPBN/horus/LiftedOperations.h index fc25363d3..1f4b53d3a 100644 --- a/packages/CLPBN/horus/LiftedOperations.h +++ b/packages/CLPBN/horus/LiftedOperations.h @@ -10,7 +10,7 @@ class LiftedOperations ParfactorList& pfList, const Grounds& query); static void runWeakBayesBall ( - ParfactorList& pfList, const Grounds&); + ParfactorList& pfList, const Grounds&); static void absorveEvidence ( ParfactorList& pfList, ObservedFormulas& obsFormulas); diff --git a/packages/CLPBN/horus/LiftedUtils.cpp b/packages/CLPBN/horus/LiftedUtils.cpp index 9ad750f90..0233a8554 100644 --- a/packages/CLPBN/horus/LiftedUtils.cpp +++ b/packages/CLPBN/horus/LiftedUtils.cpp @@ -61,7 +61,7 @@ ostream& operator<< (ostream &os, const Symbol& s) ostream& operator<< (ostream &os, const LogVar& X) { const string labels[] = { - "A", "B", "C", "D", "E", "F", + "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "M" }; (X >= 12) ? os << "X_" << X.id_ : os << labels[X]; return os; diff --git a/packages/CLPBN/horus/LiftedUtils.h b/packages/CLPBN/horus/LiftedUtils.h index 1f563eaf7..de0782f1c 100644 --- a/packages/CLPBN/horus/LiftedUtils.h +++ b/packages/CLPBN/horus/LiftedUtils.h @@ -51,7 +51,7 @@ class LogVar } bool valid (void) const - { + { return id_ != Util::maxUnsigned(); } @@ -145,7 +145,7 @@ class Substitution return X; } - bool containsReplacementFor (LogVar X) const + bool containsReplacementFor (LogVar X) const { return Util::contains (subs_, X); } diff --git a/packages/CLPBN/horus/LiftedVe.cpp b/packages/CLPBN/horus/LiftedVe.cpp index 141006c46..bcce3e100 100644 --- a/packages/CLPBN/horus/LiftedVe.cpp +++ b/packages/CLPBN/horus/LiftedVe.cpp @@ -133,7 +133,7 @@ ProductOperator::toString (void) stringstream ss; ss << "just multiplicate " ; ss << (*g1_)->getAllGroups(); - ss << " x " ; + ss << " x " ; ss << (*g2_)->getAllGroups(); ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; return ss.str(); @@ -155,7 +155,7 @@ ProductOperator::validOp (Parfactor* g1, Parfactor* g2) } size_t idx1 = g1->indexOfGroup (intersect[i]); size_t idx2 = g2->indexOfGroup (intersect[i]); - if (g1->range (idx1) != g2->range (idx2)) { + if (g1->range (idx1) != g2->range (idx2)) { return false; } } @@ -713,7 +713,7 @@ LiftedVe::getBestOperation (const Grounds& query) if ((bestOp == 0) || (cost < bestCost)) { bestOp = validOps[i]; bestCost = cost; - } + } } if (bestCost > largestCost_) { largestCost_ = bestCost; diff --git a/packages/CLPBN/horus/LiftedVe.h b/packages/CLPBN/horus/LiftedVe.h index 7d9974294..b747d9da3 100644 --- a/packages/CLPBN/horus/LiftedVe.h +++ b/packages/CLPBN/horus/LiftedVe.h @@ -9,7 +9,7 @@ class LiftedOperator { public: virtual ~LiftedOperator (void) { } - + virtual double getLogCost (void) = 0; virtual void apply (void) = 0; @@ -55,7 +55,7 @@ class ProductOperator : public LiftedOperator class SumOutOperator : public LiftedOperator { public: - SumOutOperator (PrvGroup group, ParfactorList& pfList) + SumOutOperator (PrvGroup group, ParfactorList& pfList) : group_(group), pfList_(pfList) { } double getLogCost (void); diff --git a/packages/CLPBN/horus/LiftedWCNF.cpp b/packages/CLPBN/horus/LiftedWCNF.cpp index ba7097dbf..a75741fa8 100644 --- a/packages/CLPBN/horus/LiftedWCNF.cpp +++ b/packages/CLPBN/horus/LiftedWCNF.cpp @@ -195,7 +195,7 @@ Clause::isPositiveCountedLogVar (LogVar X) const assert (constr_.logVarSet().contains (X)); return posCountedLvs_.contains (X); } - + bool @@ -235,7 +235,7 @@ Clause::ipgCandidates (void) const LogVarSet allLvs = constr_.logVarSet(); allLvs -= ipgLvs_; allLvs -= posCountedLvs_; - allLvs -= negCountedLvs_; + allLvs -= negCountedLvs_; for (size_t i = 0; i < allLvs.size(); i++) { bool valid = true; for (size_t j = 0; j < literals_.size(); j++) { @@ -262,7 +262,7 @@ Clause::logVarTypes (size_t litIdx) const if (posCountedLvs_.contains (lvs[i])) { types.push_back (LogVarType::POS_LV); } else if (negCountedLvs_.contains (lvs[i])) { - types.push_back (LogVarType::NEG_LV); + types.push_back (LogVarType::NEG_LV); } else { types.push_back (LogVarType::FULL_LV); } @@ -391,7 +391,7 @@ LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) { addIndicatorClauses (pfList); addParameterClauses (pfList); - + /* // INCLUSION-EXCLUSION TEST clauses_.clear(); @@ -579,7 +579,7 @@ LiftedWCNF::addParameterClauses (const ParfactorList& pfList) // ¬θxi|u1,...,un v λu2 -> tempClause double posWeight = (**it)[indexer]; addWeight (paramVarLid, posWeight, LogAware::one()); - + Clause* clause1 = new Clause (*(*it)->constr()); for (unsigned i = 0; i < groups.size(); i++) { @@ -593,7 +593,7 @@ LiftedWCNF::addParameterClauses (const ParfactorList& pfList) tempClause->addLiteralComplemented (Literal ( paramVarLid, (*it)->constr()->logVars())); tempClause->addLiteral (Literal (lid, (*it)->argument(i).logVars())); - clauses_.push_back (tempClause); + clauses_.push_back (tempClause); } clause1->addLiteral (Literal (paramVarLid, (*it)->constr()->logVars())); clauses_.push_back (clause1); diff --git a/packages/CLPBN/horus/Parfactor.cpp b/packages/CLPBN/horus/Parfactor.cpp index ef2301b7b..fb5518d1b 100644 --- a/packages/CLPBN/horus/Parfactor.cpp +++ b/packages/CLPBN/horus/Parfactor.cpp @@ -8,7 +8,7 @@ Parfactor::Parfactor ( const ProbFormulas& formulas, - const Params& params, + const Params& params, const Tuples& tuples, unsigned distId) { @@ -221,7 +221,7 @@ Parfactor::countConvert (LogVar X) assert (constr_->isCountNormalized (X)); assert (constr_->getConditionalCount (X) > 1); assert (canCountConvert (X)); - + unsigned N = constr_->getConditionalCount (X); unsigned R = ranges_[fIdx]; unsigned H = HistogramSet::nrHistograms (N, R); @@ -336,7 +336,7 @@ Parfactor::fullExpand (LogVar X) sumIndexes.push_back (HistogramSet::findIndex (hist, originHists)); ++ indexer; } - + expandPotential (fIdx, std::pow (R, N), sumIndexes); ProbFormula f = args_[fIdx]; @@ -360,7 +360,7 @@ Parfactor::reorderAccordingGrounds (const Grounds& grounds) ProbFormulas newFormulas; for (size_t i = 0; i < grounds.size(); i++) { for (size_t j = 0; j < args_.size(); j++) { - if (grounds[i].functor() == args_[j].functor() && + if (grounds[i].functor() == args_[j].functor() && grounds[i].arity() == args_[j].arity()) { constr_->moveToTop (args_[j].logVars()); if (constr_->containsTuple (grounds[i].args())) { @@ -424,7 +424,7 @@ Parfactor::indexOfGround (const Ground& ground) const { size_t idx = args_.size(); for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].functor() == ground.functor() && + if (args_[i].functor() == ground.functor() && args_[i].arity() == ground.arity()) { constr_->moveToTop (args_[i].logVars()); if (constr_->containsTuple (ground.args())) { @@ -806,7 +806,7 @@ Parfactor::simplifyParfactor (size_t fIdx1, size_t fIdx2) while (indexer.valid()) { if (indexer[fIdx1] == indexer[fIdx2]) { params_.push_back (backup[indexer]); - } + } ++ indexer; } for (size_t i = 0; i < args_[fIdx2].logVars().size(); i++) { @@ -829,7 +829,7 @@ Parfactor::getAlignLogVars (Parfactor* g1, Parfactor* g2) TinySet matchedI; TinySet matchedJ; ProbFormulas& formulas1 = g1->arguments(); - ProbFormulas& formulas2 = g2->arguments(); + ProbFormulas& formulas2 = g2->arguments(); for (size_t i = 0; i < formulas1.size(); i++) { for (size_t j = 0; j < formulas2.size(); j++) { if (formulas1[i].group() == formulas2[j].group() && @@ -882,7 +882,7 @@ Parfactor::alignLogicalVars (Parfactor* g1, Parfactor* g2) LogVar freeLogVar (0); Substitution theta1, theta2; for (size_t i = 0; i < alignLvs1.size(); i++) { - bool b1 = theta1.containsReplacementFor (alignLvs1[i]); + bool b1 = theta1.containsReplacementFor (alignLvs1[i]); bool b2 = theta2.containsReplacementFor (alignLvs2[i]); if (b1 == false && b2 == false) { theta1.add (alignLvs1[i], freeLogVar); @@ -911,11 +911,11 @@ Parfactor::alignLogicalVars (Parfactor* g1, Parfactor* g2) } // handle this type of situation: - // g1 = p(X), q(X) ; X in {(p1),(p2)} + // g1 = p(X), q(X) ; X in {(p1),(p2)} // g2 = p(X), q(Y) ; (X,Y) in {(p1,p2),(p2,p1)} LogVars discardedLvs1 = theta1.getDiscardedLogVars(); for (size_t i = 0; i < discardedLvs1.size(); i++) { - if (g1->constr()->isSingleton (discardedLvs1[i]) && + if (g1->constr()->isSingleton (discardedLvs1[i]) && g1->nrFormulas (discardedLvs1[i]) == 1) { g1->constr()->remove (discardedLvs1[i]); } else { diff --git a/packages/CLPBN/horus/Parfactor.h b/packages/CLPBN/horus/Parfactor.h index 1c65c2ea0..2f4b45cd7 100644 --- a/packages/CLPBN/horus/Parfactor.h +++ b/packages/CLPBN/horus/Parfactor.h @@ -115,7 +115,7 @@ class Parfactor : public TFactor static void alignLogicalVars (Parfactor*, Parfactor*); ConstraintTree* constr_; - + }; diff --git a/packages/CLPBN/horus/ParfactorList.cpp b/packages/CLPBN/horus/ParfactorList.cpp index 1de1ccc7d..2962f144e 100644 --- a/packages/CLPBN/horus/ParfactorList.cpp +++ b/packages/CLPBN/horus/ParfactorList.cpp @@ -9,7 +9,7 @@ ParfactorList::ParfactorList (const ParfactorList& pfList) while (it != pfList.end()) { addShattered (new Parfactor (**it)); ++ it; - } + } } @@ -74,7 +74,7 @@ ParfactorList::insertShattered ( list::iterator -ParfactorList::remove (list::iterator it) +ParfactorList::remove (list::iterator it) { return pfList_.erase (it); } @@ -418,7 +418,7 @@ ParfactorList::shatter (Parfactor* g1, Parfactor* g2) if (formulas1[i].sameSkeletonAs (formulas2[j])) { std::pair res; res = shatter (i, g1, j, g2); - if (res.first.empty() == false || + if (res.first.empty() == false || res.second.empty() == false) { return res; } @@ -470,7 +470,7 @@ ParfactorList::shatter ( ConstraintTree* exclCt1 = split1.second; if (commCt1->empty()) { - // disjoint + // disjoint delete commCt1; delete exclCt1; return { }; @@ -549,11 +549,11 @@ ParfactorList::shatter ( Parfactor* newPf = new Parfactor (g, cts[i]); if (cts[i]->nrLogVars() == g->constr()->nrLogVars() + 1) { newPf->expand (f.countedLogVar(), X_new1, X_new2); - assert (g->constr()->getConditionalCount (f.countedLogVar()) == + assert (g->constr()->getConditionalCount (f.countedLogVar()) == cts[i]->getConditionalCount (X_new1) + cts[i]->getConditionalCount (X_new2)); } else { - assert (g->constr()->getConditionalCount (f.countedLogVar()) == + assert (g->constr()->getConditionalCount (f.countedLogVar()) == cts[i]->getConditionalCount (f.countedLogVar())); } newPf->setNewGroups(); diff --git a/packages/CLPBN/horus/ProbFormula.cpp b/packages/CLPBN/horus/ProbFormula.cpp index fa2d26d05..081cccf95 100644 --- a/packages/CLPBN/horus/ProbFormula.cpp +++ b/packages/CLPBN/horus/ProbFormula.cpp @@ -61,7 +61,7 @@ ProbFormula::countedLogVar (void) const } - + void ProbFormula::setCountedLogVar (LogVar lv) { @@ -92,9 +92,10 @@ ProbFormula::rename (LogVar oldName, LogVar newName) } + bool operator== (const ProbFormula& f1, const ProbFormula& f2) -{ - return f1.group_ == f2.group_ && +{ + return f1.group_ == f2.group_ && f1.logVars_ == f2.logVars_; } diff --git a/packages/CLPBN/horus/ProbFormula.h b/packages/CLPBN/horus/ProbFormula.h index 63086266a..52bc7d4a0 100644 --- a/packages/CLPBN/horus/ProbFormula.h +++ b/packages/CLPBN/horus/ProbFormula.h @@ -12,11 +12,11 @@ typedef unsigned long PrvGroup; class ProbFormula { public: - ProbFormula (Symbol f, const LogVars& lvs, unsigned range) + ProbFormula (Symbol f, const LogVars& lvs, unsigned range) : functor_(f), logVars_(lvs), range_(range), countedLogVar_(), group_(numeric_limits::max()) { } - ProbFormula (Symbol f, unsigned r) + ProbFormula (Symbol f, unsigned r) : functor_(f), range_(r), group_(numeric_limits::max()) { } Symbol functor (void) const { return functor_; } @@ -76,10 +76,10 @@ typedef vector ProbFormulas; class ObservedFormula { public: - ObservedFormula (Symbol f, unsigned a, unsigned ev) + ObservedFormula (Symbol f, unsigned a, unsigned ev) : functor_(f), arity_(a), evidence_(ev), constr_(a) { } - ObservedFormula (Symbol f, unsigned ev, const Tuple& tuple) + ObservedFormula (Symbol f, unsigned ev, const Tuple& tuple) : functor_(f), arity_(tuple.size()), evidence_(ev), constr_(arity_) { constr_.addTuple (tuple); diff --git a/packages/CLPBN/horus/TinySet.h b/packages/CLPBN/horus/TinySet.h index 4b3c4bd83..f7ff6e083 100644 --- a/packages/CLPBN/horus/TinySet.h +++ b/packages/CLPBN/horus/TinySet.h @@ -21,7 +21,7 @@ class TinySet TinySet (const Compare& cmp = Compare()) : vec_(), cmp_(cmp) { } - TinySet (const T& t, const Compare& cmp = Compare()) + TinySet (const T& t, const Compare& cmp = Compare()) : vec_(1, t), cmp_(cmp) { } TinySet (const vector& elements, const Compare& cmp = Compare()) @@ -153,12 +153,12 @@ class TinySet { return vec_[i]; } - + T& operator[] (typename vector::size_type i) { return vec_[i]; } - + T front (void) const { return vec_.front(); @@ -219,7 +219,7 @@ class TinySet return ! (s1.vec_ == s2.vec_); } - friend std::ostream& operator << (std::ostream& out, const TinySet& s) + friend std::ostream& operator<< (std::ostream& out, const TinySet& s) { out << "{" ; typename vector::size_type i; diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 0f3ce6544..6afd56f43 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -140,7 +140,7 @@ nrDigits (int num) { unsigned count = 1; while (num >= 10) { - num /= 10; + num /= 10; count ++; } return count; @@ -166,7 +166,7 @@ parametersToString (const Params& v, unsigned precision) { stringstream ss; ss.precision (precision); - ss << "[" ; + ss << "[" ; for (size_t i = 0; i < v.size(); i++) { if (i != 0) ss << ", " ; ss << v[i]; diff --git a/packages/CLPBN/horus/Util.h b/packages/CLPBN/horus/Util.h index 38a088714..1a4bfa441 100644 --- a/packages/CLPBN/horus/Util.h +++ b/packages/CLPBN/horus/Util.h @@ -373,8 +373,8 @@ void operator^=(std::vector& v, int iexp) -template -std::ostream& operator << (std::ostream& os, const vector& v) +template +std::ostream& operator<< (std::ostream& os, const vector& v) { os << "[" ; os << Util::elementsToString (v, ", "); diff --git a/packages/CLPBN/horus/Var.cpp b/packages/CLPBN/horus/Var.cpp index 44ab6b1e4..80fc0abe6 100644 --- a/packages/CLPBN/horus/Var.cpp +++ b/packages/CLPBN/horus/Var.cpp @@ -49,7 +49,7 @@ Var::isValidState (const string& stateName) void -Var::setEvidence (int ev) +Var::setEvidence (int ev) { assert (ev < (int) range_); evidence_ = ev; @@ -58,8 +58,8 @@ Var::setEvidence (int ev) void -Var::setEvidence (const string& ev) -{ +Var::setEvidence (const string& ev) +{ States states = Var::getVarInfo (varId_).states; for (size_t i = 0; i < states.size(); i++) { if (states[i] == ev) { diff --git a/packages/CLPBN/horus/VarElim.cpp b/packages/CLPBN/horus/VarElim.cpp index fb4eecf50..54ee18d20 100644 --- a/packages/CLPBN/horus/VarElim.cpp +++ b/packages/CLPBN/horus/VarElim.cpp @@ -70,7 +70,7 @@ VarElim::createFactorList (void) factorList_.push_back (new Factor (facNodes[i]->factor())); const VarNodes& neighs = facNodes[i]->neighbors(); for (size_t j = 0; j < neighs.size(); j++) { - unordered_map>::iterator it + unordered_map>::iterator it = varFactors_.find (neighs[j]->varId()); if (it == varFactors_.end()) { it = varFactors_.insert (make_pair ( diff --git a/packages/CLPBN/horus/WeightedBp.cpp b/packages/CLPBN/horus/WeightedBp.cpp index d8a32a246..8416c4592 100644 --- a/packages/CLPBN/horus/WeightedBp.cpp +++ b/packages/CLPBN/horus/WeightedBp.cpp @@ -132,7 +132,7 @@ WeightedBp::maxResidualSchedule (void) } } // in counting bp, the message that a variable X sends to - // to a factor F depends on the message that F sent to the X + // to a factor F depends on the message that F sent to the X const BpLinks& links = ninf(link->facNode())->getLinks(); for (size_t i = 0; i < links.size(); i++) { if (links[i]->varNode() != link->varNode()) { @@ -258,7 +258,7 @@ WeightedBp::getVarToFactorMsg (const BpLink* _link) const if ( ! (l->facNode() == dst && l->index() == link->index())) { msg *= l->powMessage(); if (Constants::SHOW_BP_CALCS) { - cout << " x " << l->nextMessage() << "^" << link->weight(); + cout << " x " << l->nextMessage() << "^" << link->weight(); } } } diff --git a/packages/CLPBN/horus/WeightedBp.h b/packages/CLPBN/horus/WeightedBp.h index 7794fd509..1e79fd4db 100644 --- a/packages/CLPBN/horus/WeightedBp.h +++ b/packages/CLPBN/horus/WeightedBp.h @@ -6,7 +6,7 @@ class WeightedLink : public BpLink { public: - WeightedLink (FacNode* fn, VarNode* vn, size_t idx, unsigned weight) + WeightedLink (FacNode* fn, VarNode* vn, size_t idx, unsigned weight) : BpLink (fn, vn), index_(idx), weight_(weight), pwdMsg_(vn->range(), LogAware::one()) { } @@ -16,7 +16,7 @@ class WeightedLink : public BpLink const Params& powMessage (void) const { return pwdMsg_; } - void updateMessage (void) + void updateMessage (void) { pwdMsg_ = *nextMsg_; swap (currMsg_, nextMsg_); diff --git a/packages/CLPBN/learning/aleph_params.yap b/packages/CLPBN/learning/aleph_params.yap index cb6070eb4..5fcf9be9d 100644 --- a/packages/CLPBN/learning/aleph_params.yap +++ b/packages/CLPBN/learning/aleph_params.yap @@ -50,7 +50,7 @@ % % Tell Aleph not to use default solver during saturation % -% all work will be done by EM +% all work will be done by EM %:- set_clpbn_flag(solver,none). % @@ -123,7 +123,7 @@ add_new_clause(_,(H :- B),_,_) :- asserta(user:(H :- IB)) ), user:setting(verbosity,V), - ( V >= 1 -> + ( V >= 1 -> user:p_message('CLP(BN) Theory'), functor(H,N,Ar), listing(user:N/Ar) ; @@ -138,7 +138,7 @@ update_tabled_theory(H) :- clpbn_tabled_assertz((user:(H:-NB))), fail. update_tabled_theory(_). - + update_theory(H) :- clause(user:H,B,Ref), add_correct_cpt(B,NB), @@ -161,7 +161,7 @@ correct_tab(p(Vs,_,Ps),K,p(Vs,TDist,Ps)) :- get_dist_key(Id, K), get_dist_params(Id, TDist). -% user-defined cost function, Aleph knows about this (and only about this). +% user-defined cost function, Aleph knows about this (and only about this). user:cost((H :- B),Inf,Score) :- domain(H, K, V, D), check_info(Inf), @@ -261,7 +261,7 @@ rewrite_body((A,B), (user:NA,NB), [V|Vs], [D|Ds], Tail) :- rewrite_body(B, NB, Vs, Ds, Tail). rewrite_body((A,B), (user:A,NB), Vs, Ds, Tail) :- !, rewrite_body(B,NB, Vs, Ds, Tail). -rewrite_body(A,(user:NA,Tail), [V], [D], Tail) :- +rewrite_body(A,(user:NA,Tail), [V], [D], Tail) :- rewrite_goal(A, V, D, NA), !. rewrite_body(A, (user:A,Tail), [], [], Tail). diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index 8abe4e92d..7ace0b9b3 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -33,7 +33,7 @@ [generate_network/5, f/3 ]). - + :- use_module(library('clpbn/utils'), [check_for_hidden_vars/3, sort_vars_by_key/3 @@ -46,7 +46,7 @@ compute_likelihood/3, soften_sample/2 ]). - + :- use_module(library(bhash), [b_hash_new/1, b_hash_lookup/3, @@ -198,7 +198,7 @@ ltables([Id-T|Tables], [Key-LTable|FTables]) :- generate_dists(Factors, EList, AllDists, AllInfo, MargVars) :- - b_hash_new(Ev0), + b_hash_new(Ev0), foldl(elist_to_hash, EList, Ev0, Ev), maplist(process_factor(Ev), Factors, Dists0), sort(Dists0, Dists1), @@ -220,7 +220,7 @@ fetch_evidence(_Ev, K, Ns, NonEvs, [K|NonEvs]) :- domain_to_number(_, I0, I0, I) :- I is I0+1. - + % collect the different dists we are going to learn next. different_dists(AllVars, AllDists, AllInfo, MargVars) :- @@ -232,9 +232,9 @@ different_dists(AllVars, AllDists, AllInfo, MargVars) :- % % V -> to Id defining V. We get: % the random variables that are parents -% the cases that can happen, eg if we have A <- B, C +% the cases that can happen, eg if we have A <- B, C % A and B are boolean w/o evidence, and C is f, the cases could be -% [0,0,1], [0,1,1], [1,0,0], [1,1,0], +% [0,0,1], [0,1,1], [1,0,0], [1,1,0], % Hiddens will be C % all_dists([], _, []). diff --git a/packages/CLPBN/learning/mle.yap b/packages/CLPBN/learning/mle.yap index 14080fe69..4456db86e 100644 --- a/packages/CLPBN/learning/mle.yap +++ b/packages/CLPBN/learning/mle.yap @@ -57,7 +57,7 @@ mk_sample(AllVars, SortedSample) :- msort(Sample, SortedSample). % -% assumes we have full data, meaning evidence for every variable +% assumes we have full data, meaning evidence for every variable % add2sample([], []). add2sample([V|Vs],[val(Id,[Ev|EParents])|Vals]) :- diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 5272aa8f5..4c2a0efc6 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -50,7 +50,7 @@ append/3, member/2 ]). - + :- dynamic factor/6, skolem_in/2, skolem/2, preprocess/3, evidence/2, id/1. user:term_expansion( bayes((Formula ; Phi ; Constraints)), pfl:factor(bayes,Id,FList,FV,Phi,Constraints)) :- @@ -178,7 +178,7 @@ add_evidence(Sk,Var) :- clpbn:put_atts(_V,[key(Sk),evidence(E)]). -%% get_pfl_cpt(Id, Keys, Ev, NewKeys, Out) :- +%% get_pfl_cpt(Id, Keys, Ev, NewKeys, Out) :- %% factor(_Type,Id,[Key|_],_FV,avg,_Constraints), !, %% Keys = [Key|Parents], %% writeln(Key:Parents), From 4b901d26d78ca21d123e7a1d8adfb9ff07b6c2be Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 20 Dec 2012 23:34:53 +0000 Subject: [PATCH 41/89] Improve error messages --- packages/CLPBN/clpbn.yap | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index 605e9dd74..8e994330a 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -423,7 +423,7 @@ write_out(bnt, GVars, AVars, DiffVars) :- !, do_bnt(GVars, AVars, DiffVars). write_out(Solver, _, _, _) :- - format("Error: solver `~w' is unknown", [Solver]), + format("Error: solver '~w' is unknown.", [Solver]), fail. % @@ -675,7 +675,7 @@ pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State) :- (ground_solver(Solver) -> true ; - format("Error: `~w' is an unknow solver.", [Solver]), fail + format("Error: solver '~w' is unknown.", [Solver]), fail ), pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, Solver). @@ -698,7 +698,7 @@ pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- !, init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). pfl_init_solver(_, _, _, _, _, Solver) :- - format("Error: solver `~w' cannot be used for learning.", [Solver]), + format("Error: solver '~w' can't be used for learning.", [Solver]), fail. From d8c5725b2e5b5d41a3ecd51c1486d3dfe2aa8476 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Fri, 21 Dec 2012 15:20:28 +0000 Subject: [PATCH 42/89] This shouldn't be on the repository --- packages/CLPBN/horus2/BayesBall.cpp | 84 -- packages/CLPBN/horus2/BayesBall.h | 85 -- packages/CLPBN/horus2/BayesBallGraph.cpp | 106 -- packages/CLPBN/horus2/BayesBallGraph.h | 84 -- packages/CLPBN/horus2/BeliefProp.cpp | 471 ------- packages/CLPBN/horus2/BeliefProp.h | 188 --- packages/CLPBN/horus2/ConstraintTree.cpp | 1174 ------------------ packages/CLPBN/horus2/ConstraintTree.h | 237 ---- packages/CLPBN/horus2/CountingBp.cpp | 424 ------- packages/CLPBN/horus2/CountingBp.h | 182 --- packages/CLPBN/horus2/ElimGraph.cpp | 243 ---- packages/CLPBN/horus2/ElimGraph.h | 139 --- packages/CLPBN/horus2/Factor.cpp | 237 ---- packages/CLPBN/horus2/Factor.h | 294 ----- packages/CLPBN/horus2/FactorGraph.cpp | 454 ------- packages/CLPBN/horus2/FactorGraph.h | 150 --- packages/CLPBN/horus2/GroundSolver.cpp | 107 -- packages/CLPBN/horus2/GroundSolver.h | 36 - packages/CLPBN/horus2/Histogram.cpp | 146 --- packages/CLPBN/horus2/Histogram.h | 45 - packages/CLPBN/horus2/Horus.h | 87 -- packages/CLPBN/horus2/HorusCli.cpp | 187 --- packages/CLPBN/horus2/HorusYap.cpp | 570 --------- packages/CLPBN/horus2/Indexer.h | 258 ---- packages/CLPBN/horus2/LiftedBp.cpp | 234 ---- packages/CLPBN/horus2/LiftedBp.h | 43 - packages/CLPBN/horus2/LiftedKc.cpp | 1309 -------------------- packages/CLPBN/horus2/LiftedKc.h | 300 ----- packages/CLPBN/horus2/LiftedOperations.cpp | 271 ---- packages/CLPBN/horus2/LiftedOperations.h | 27 - packages/CLPBN/horus2/LiftedSolver.h | 27 - packages/CLPBN/horus2/LiftedUtils.cpp | 131 -- packages/CLPBN/horus2/LiftedUtils.h | 166 --- packages/CLPBN/horus2/LiftedVe.cpp | 728 ----------- packages/CLPBN/horus2/LiftedVe.h | 155 --- packages/CLPBN/horus2/LiftedWCNF.cpp | 658 ---------- packages/CLPBN/horus2/LiftedWCNF.h | 239 ---- packages/CLPBN/horus2/Parfactor.cpp | 942 -------------- packages/CLPBN/horus2/Parfactor.h | 125 -- packages/CLPBN/horus2/ParfactorList.cpp | 638 ---------- packages/CLPBN/horus2/ParfactorList.h | 121 -- packages/CLPBN/horus2/ProbFormula.cpp | 140 --- packages/CLPBN/horus2/ProbFormula.h | 114 -- packages/CLPBN/horus2/TinySet.h | 264 ---- packages/CLPBN/horus2/Util.cpp | 429 ------- packages/CLPBN/horus2/Util.h | 422 ------- packages/CLPBN/horus2/Var.cpp | 102 -- packages/CLPBN/horus2/Var.h | 108 -- packages/CLPBN/horus2/VarElim.cpp | 217 ---- packages/CLPBN/horus2/VarElim.h | 46 - packages/CLPBN/horus2/WeightedBp.cpp | 288 ----- packages/CLPBN/horus2/WeightedBp.h | 61 - 52 files changed, 14293 deletions(-) delete mode 100644 packages/CLPBN/horus2/BayesBall.cpp delete mode 100644 packages/CLPBN/horus2/BayesBall.h delete mode 100644 packages/CLPBN/horus2/BayesBallGraph.cpp delete mode 100644 packages/CLPBN/horus2/BayesBallGraph.h delete mode 100644 packages/CLPBN/horus2/BeliefProp.cpp delete mode 100644 packages/CLPBN/horus2/BeliefProp.h delete mode 100644 packages/CLPBN/horus2/ConstraintTree.cpp delete mode 100644 packages/CLPBN/horus2/ConstraintTree.h delete mode 100644 packages/CLPBN/horus2/CountingBp.cpp delete mode 100644 packages/CLPBN/horus2/CountingBp.h delete mode 100644 packages/CLPBN/horus2/ElimGraph.cpp delete mode 100644 packages/CLPBN/horus2/ElimGraph.h delete mode 100644 packages/CLPBN/horus2/Factor.cpp delete mode 100644 packages/CLPBN/horus2/Factor.h delete mode 100644 packages/CLPBN/horus2/FactorGraph.cpp delete mode 100644 packages/CLPBN/horus2/FactorGraph.h delete mode 100644 packages/CLPBN/horus2/GroundSolver.cpp delete mode 100644 packages/CLPBN/horus2/GroundSolver.h delete mode 100644 packages/CLPBN/horus2/Histogram.cpp delete mode 100644 packages/CLPBN/horus2/Histogram.h delete mode 100644 packages/CLPBN/horus2/Horus.h delete mode 100644 packages/CLPBN/horus2/HorusCli.cpp delete mode 100644 packages/CLPBN/horus2/HorusYap.cpp delete mode 100644 packages/CLPBN/horus2/Indexer.h delete mode 100644 packages/CLPBN/horus2/LiftedBp.cpp delete mode 100644 packages/CLPBN/horus2/LiftedBp.h delete mode 100644 packages/CLPBN/horus2/LiftedKc.cpp delete mode 100644 packages/CLPBN/horus2/LiftedKc.h delete mode 100644 packages/CLPBN/horus2/LiftedOperations.cpp delete mode 100644 packages/CLPBN/horus2/LiftedOperations.h delete mode 100644 packages/CLPBN/horus2/LiftedSolver.h delete mode 100644 packages/CLPBN/horus2/LiftedUtils.cpp delete mode 100644 packages/CLPBN/horus2/LiftedUtils.h delete mode 100644 packages/CLPBN/horus2/LiftedVe.cpp delete mode 100644 packages/CLPBN/horus2/LiftedVe.h delete mode 100644 packages/CLPBN/horus2/LiftedWCNF.cpp delete mode 100644 packages/CLPBN/horus2/LiftedWCNF.h delete mode 100644 packages/CLPBN/horus2/Parfactor.cpp delete mode 100644 packages/CLPBN/horus2/Parfactor.h delete mode 100644 packages/CLPBN/horus2/ParfactorList.cpp delete mode 100644 packages/CLPBN/horus2/ParfactorList.h delete mode 100644 packages/CLPBN/horus2/ProbFormula.cpp delete mode 100644 packages/CLPBN/horus2/ProbFormula.h delete mode 100644 packages/CLPBN/horus2/TinySet.h delete mode 100644 packages/CLPBN/horus2/Util.cpp delete mode 100644 packages/CLPBN/horus2/Util.h delete mode 100644 packages/CLPBN/horus2/Var.cpp delete mode 100644 packages/CLPBN/horus2/Var.h delete mode 100644 packages/CLPBN/horus2/VarElim.cpp delete mode 100644 packages/CLPBN/horus2/VarElim.h delete mode 100644 packages/CLPBN/horus2/WeightedBp.cpp delete mode 100644 packages/CLPBN/horus2/WeightedBp.h diff --git a/packages/CLPBN/horus2/BayesBall.cpp b/packages/CLPBN/horus2/BayesBall.cpp deleted file mode 100644 index 0fac25056..000000000 --- a/packages/CLPBN/horus2/BayesBall.cpp +++ /dev/null @@ -1,84 +0,0 @@ -#include -#include - -#include -#include -#include - -#include "BayesBall.h" -#include "Util.h" - - -FactorGraph* -BayesBall::getMinimalFactorGraph (const VarIds& queryIds) -{ - assert (fg_.bayesianFactors()); - Scheduling scheduling; - for (size_t i = 0; i < queryIds.size(); i++) { - assert (dag_.getNode (queryIds[i])); - BBNode* n = dag_.getNode (queryIds[i]); - scheduling.push (ScheduleInfo (n, false, true)); - } - - while (!scheduling.empty()) { - ScheduleInfo& sch = scheduling.front(); - BBNode* n = sch.node; - n->setAsVisited(); - if (n->hasEvidence() == false && sch.visitedFromChild) { - if (n->isMarkedOnTop() == false) { - n->markOnTop(); - scheduleParents (n, scheduling); - } - if (n->isMarkedOnBottom() == false) { - n->markOnBottom(); - scheduleChilds (n, scheduling); - } - } - if (sch.visitedFromParent) { - if (n->hasEvidence() && n->isMarkedOnTop() == false) { - n->markOnTop(); - scheduleParents (n, scheduling); - } - if (n->hasEvidence() == false && n->isMarkedOnBottom() == false) { - n->markOnBottom(); - scheduleChilds (n, scheduling); - } - } - scheduling.pop(); - } - - FactorGraph* fg = new FactorGraph(); - constructGraph (fg); - return fg; -} - - - -void -BayesBall::constructGraph (FactorGraph* fg) const -{ - const FacNodes& facNodes = fg_.facNodes(); - for (size_t i = 0; i < facNodes.size(); i++) { - const BBNode* n = dag_.getNode ( - facNodes[i]->factor().argument (0)); - if (n->isMarkedOnTop()) { - fg->addFactor (facNodes[i]->factor()); - } else if (n->hasEvidence() && n->isVisited()) { - VarIds varIds = { facNodes[i]->factor().argument (0) }; - Ranges ranges = { facNodes[i]->factor().range (0) }; - Params params (ranges[0], LogAware::noEvidence()); - params[n->getEvidence()] = LogAware::withEvidence(); - fg->addFactor (Factor (varIds, ranges, params)); - } - } - const VarNodes& varNodes = fg_.varNodes(); - for (size_t i = 0; i < varNodes.size(); i++) { - if (varNodes[i]->hasEvidence()) { - VarNode* vn = fg->getVarNode (varNodes[i]->varId()); - if (vn) { - vn->setEvidence (varNodes[i]->getEvidence()); - } - } - } -} - diff --git a/packages/CLPBN/horus2/BayesBall.h b/packages/CLPBN/horus2/BayesBall.h deleted file mode 100644 index 4efbd2ed1..000000000 --- a/packages/CLPBN/horus2/BayesBall.h +++ /dev/null @@ -1,85 +0,0 @@ -#ifndef HORUS_BAYESBALL_H -#define HORUS_BAYESBALL_H - -#include -#include -#include -#include - -#include "FactorGraph.h" -#include "BayesBallGraph.h" -#include "Horus.h" - -using namespace std; - - -struct ScheduleInfo -{ - ScheduleInfo (BBNode* n, bool vfp, bool vfc) : - node(n), visitedFromParent(vfp), visitedFromChild(vfc) { } - - BBNode* node; - bool visitedFromParent; - bool visitedFromChild; -}; - - -typedef queue> Scheduling; - - -class BayesBall -{ - public: - BayesBall (FactorGraph& fg) - : fg_(fg) , dag_(fg.getStructure()) - { - dag_.clear(); - } - - FactorGraph* getMinimalFactorGraph (const VarIds&); - - static FactorGraph* getMinimalFactorGraph (FactorGraph& fg, VarIds vids) - { - BayesBall bb (fg); - return bb.getMinimalFactorGraph (vids); - } - - private: - - void constructGraph (FactorGraph* fg) const; - - void scheduleParents (const BBNode* n, Scheduling& sch) const; - - void scheduleChilds (const BBNode* n, Scheduling& sch) const; - - FactorGraph& fg_; - - BayesBallGraph& dag_; -}; - - - -inline void -BayesBall::scheduleParents (const BBNode* n, Scheduling& sch) const -{ - const vector& ps = n->parents(); - for (vector::const_iterator it = ps.begin(); - it != ps.end(); ++it) { - sch.push (ScheduleInfo (*it, false, true)); - } -} - - - -inline void -BayesBall::scheduleChilds (const BBNode* n, Scheduling& sch) const -{ - const vector& cs = n->childs(); - for (vector::const_iterator it = cs.begin(); - it != cs.end(); ++it) { - sch.push (ScheduleInfo (*it, true, false)); - } -} - -#endif // HORUS_BAYESBALL_H - diff --git a/packages/CLPBN/horus2/BayesBallGraph.cpp b/packages/CLPBN/horus2/BayesBallGraph.cpp deleted file mode 100644 index 36fcbb5ee..000000000 --- a/packages/CLPBN/horus2/BayesBallGraph.cpp +++ /dev/null @@ -1,106 +0,0 @@ -#include -#include - -#include -#include -#include - -#include "BayesBallGraph.h" -#include "Util.h" - - -void -BayesBallGraph::addNode (BBNode* n) -{ - assert (Util::contains (varMap_, n->varId()) == false); - nodes_.push_back (n); - varMap_[n->varId()] = n; -} - - - -void -BayesBallGraph::addEdge (VarId vid1, VarId vid2) -{ - unordered_map::iterator it1; - unordered_map::iterator it2; - it1 = varMap_.find (vid1); - it2 = varMap_.find (vid2); - assert (it1 != varMap_.end()); - assert (it2 != varMap_.end()); - it1->second->addChild (it2->second); - it2->second->addParent (it1->second); -} - - - -const BBNode* -BayesBallGraph::getNode (VarId vid) const -{ - unordered_map::const_iterator it; - it = varMap_.find (vid); - return it != varMap_.end() ? it->second : 0; -} - - - -BBNode* -BayesBallGraph::getNode (VarId vid) -{ - unordered_map::const_iterator it; - it = varMap_.find (vid); - return it != varMap_.end() ? it->second : 0; -} - - - -void -BayesBallGraph::setIndexes (void) -{ - for (size_t i = 0; i < nodes_.size(); i++) { - nodes_[i]->setIndex (i); - } -} - - - -void -BayesBallGraph::clear (void) -{ - for (size_t i = 0; i < nodes_.size(); i++) { - nodes_[i]->clear(); - } -} - - - -void -BayesBallGraph::exportToGraphViz (const char* fileName) -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "digraph {" << endl; - out << "ranksep=1" << endl; - for (size_t i = 0; i < nodes_.size(); i++) { - out << nodes_[i]->varId() ; - out << " [" ; - out << "label=\"" << nodes_[i]->label() << "\"" ; - if (nodes_[i]->hasEvidence()) { - out << ",style=filled, fillcolor=yellow" ; - } - out << "]" << endl; - } - for (size_t i = 0; i < nodes_.size(); i++) { - const vector& childs = nodes_[i]->childs(); - for (size_t j = 0; j < childs.size(); j++) { - out << nodes_[i]->varId() << " -> " << childs[j]->varId(); - out << " [style=bold]" << endl ; - } - } - out << "}" << endl; - out.close(); -} - diff --git a/packages/CLPBN/horus2/BayesBallGraph.h b/packages/CLPBN/horus2/BayesBallGraph.h deleted file mode 100644 index 72a0f90d0..000000000 --- a/packages/CLPBN/horus2/BayesBallGraph.h +++ /dev/null @@ -1,84 +0,0 @@ -#ifndef HORUS_BAYESBALLGRAPH_H -#define HORUS_BAYESBALLGRAPH_H - -#include -#include -#include -#include - -#include "Var.h" -#include "Horus.h" - -using namespace std; - -class BBNode : public Var -{ - public: - BBNode (Var* v) : Var (v) , visited_(false), - markedOnTop_(false), markedOnBottom_(false) { } - - const vector& childs (void) const { return childs_; } - - vector& childs (void) { return childs_; } - - const vector& parents (void) const { return parents_; } - - vector& parents (void) { return parents_; } - - void addParent (BBNode* p) { parents_.push_back (p); } - - void addChild (BBNode* c) { childs_.push_back (c); } - - bool isVisited (void) const { return visited_; } - - void setAsVisited (void) { visited_ = true; } - - bool isMarkedOnTop (void) const { return markedOnTop_; } - - void markOnTop (void) { markedOnTop_ = true; } - - bool isMarkedOnBottom (void) const { return markedOnBottom_; } - - void markOnBottom (void) { markedOnBottom_ = true; } - - void clear (void) { visited_ = markedOnTop_ = markedOnBottom_ = false; } - - private: - bool visited_; - bool markedOnTop_; - bool markedOnBottom_; - - vector childs_; - vector parents_; -}; - - -class BayesBallGraph -{ - public: - BayesBallGraph (void) { } - - void addNode (BBNode* n); - - void addEdge (VarId vid1, VarId vid2); - - const BBNode* getNode (VarId vid) const; - - BBNode* getNode (VarId vid); - - bool empty (void) const { return nodes_.empty(); } - - void setIndexes (void); - - void clear (void); - - void exportToGraphViz (const char*); - - private: - vector nodes_; - - unordered_map varMap_; -}; - -#endif // HORUS_BAYESBALLGRAPH_H - diff --git a/packages/CLPBN/horus2/BeliefProp.cpp b/packages/CLPBN/horus2/BeliefProp.cpp deleted file mode 100644 index d96384cfd..000000000 --- a/packages/CLPBN/horus2/BeliefProp.cpp +++ /dev/null @@ -1,471 +0,0 @@ -#include -#include - -#include - -#include - -#include "BeliefProp.h" -#include "FactorGraph.h" -#include "Factor.h" -#include "Indexer.h" -#include "Horus.h" - - -BeliefProp::BeliefProp (const FactorGraph& fg) : GroundSolver (fg) -{ - runned_ = false; -} - - - -BeliefProp::~BeliefProp (void) -{ - for (size_t i = 0; i < varsI_.size(); i++) { - delete varsI_[i]; - } - for (size_t i = 0; i < facsI_.size(); i++) { - delete facsI_[i]; - } - for (size_t i = 0; i < links_.size(); i++) { - delete links_[i]; - } -} - - - -Params -BeliefProp::solveQuery (VarIds queryVids) -{ - assert (queryVids.empty() == false); - return queryVids.size() == 1 - ? getPosterioriOf (queryVids[0]) - : getJointDistributionOf (queryVids); -} - - - -void -BeliefProp::printSolverFlags (void) const -{ - stringstream ss; - ss << "belief propagation [" ; - ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; - } - ss << ",max_iter=" << Util::toString (BpOptions::maxIter); - ss << ",accuracy=" << Util::toString (BpOptions::accuracy); - ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << "]" ; - cout << ss.str() << endl; -} - - - -Params -BeliefProp::getPosterioriOf (VarId vid) -{ - if (runned_ == false) { - runSolver(); - } - assert (fg.getVarNode (vid)); - VarNode* var = fg.getVarNode (vid); - Params probs; - if (var->hasEvidence()) { - probs.resize (var->range(), LogAware::noEvidence()); - probs[var->getEvidence()] = LogAware::withEvidence(); - } else { - probs.resize (var->range(), LogAware::multIdenty()); - const BpLinks& links = ninf(var)->getLinks(); - if (Globals::logDomain) { - for (size_t i = 0; i < links.size(); i++) { - probs += links[i]->message(); - } - LogAware::normalize (probs); - Util::exp (probs); - } else { - for (size_t i = 0; i < links.size(); i++) { - probs *= links[i]->message(); - } - LogAware::normalize (probs); - } - } - return probs; -} - - - -Params -BeliefProp::getJointDistributionOf (const VarIds& jointVarIds) -{ - if (runned_ == false) { - runSolver(); - } - VarNode* vn = fg.getVarNode (jointVarIds[0]); - const FacNodes& facNodes = vn->neighbors(); - size_t idx = facNodes.size(); - for (size_t i = 0; i < facNodes.size(); i++) { - if (facNodes[i]->factor().contains (jointVarIds)) { - idx = i; - break; - } - } - if (idx == facNodes.size()) { - return getJointByConditioning (jointVarIds); - } - return getFactorJoint (facNodes[idx], jointVarIds); -} - - - -Params -BeliefProp::getFactorJoint ( - FacNode* fn, - const VarIds& jointVarIds) -{ - if (runned_ == false) { - runSolver(); - } - Factor res (fn->factor()); - const BpLinks& links = ninf(fn)->getLinks(); - for (size_t i = 0; i < links.size(); i++) { - Factor msg ({links[i]->varNode()->varId()}, - {links[i]->varNode()->range()}, - getVarToFactorMsg (links[i])); - res.multiply (msg); - } - res.sumOutAllExcept (jointVarIds); - res.reorderArguments (jointVarIds); - res.normalize(); - Params jointDist = res.params(); - if (Globals::logDomain) { - Util::exp (jointDist); - } - return jointDist; -} - - - -void -BeliefProp::runSolver (void) -{ - initializeSolver(); - nIters_ = 0; - while (!converged() && nIters_ < BpOptions::maxIter) { - nIters_ ++; - if (Globals::verbosity > 1) { - Util::printHeader (string ("Iteration ") + Util::toString (nIters_)); - } - switch (BpOptions::schedule) { - case BpOptions::Schedule::SEQ_RANDOM: - std::random_shuffle (links_.begin(), links_.end()); - // no break - case BpOptions::Schedule::SEQ_FIXED: - for (size_t i = 0; i < links_.size(); i++) { - calculateAndUpdateMessage (links_[i]); - } - break; - case BpOptions::Schedule::PARALLEL: - for (size_t i = 0; i < links_.size(); i++) { - calculateMessage (links_[i]); - } - for (size_t i = 0; i < links_.size(); i++) { - updateMessage(links_[i]); - } - break; - case BpOptions::Schedule::MAX_RESIDUAL: - maxResidualSchedule(); - break; - } - } - if (Globals::verbosity > 0) { - if (nIters_ < BpOptions::maxIter) { - cout << "Belief propagation converged in " ; - cout << nIters_ << " iterations" << endl; - } else { - cout << "The maximum number of iterations was hit, terminating..." ; - cout << endl; - } - cout << endl; - } - runned_ = true; -} - - - -void -BeliefProp::createLinks (void) -{ - const FacNodes& facNodes = fg.facNodes(); - for (size_t i = 0; i < facNodes.size(); i++) { - const VarNodes& neighbors = facNodes[i]->neighbors(); - for (size_t j = 0; j < neighbors.size(); j++) { - links_.push_back (new BpLink (facNodes[i], neighbors[j])); - } - } -} - - - -void -BeliefProp::maxResidualSchedule (void) -{ - if (nIters_ == 1) { - for (size_t i = 0; i < links_.size(); i++) { - calculateMessage (links_[i]); - SortedOrder::iterator it = sortedOrder_.insert (links_[i]); - linkMap_.insert (make_pair (links_[i], it)); - } - return; - } - - for (size_t c = 0; c < links_.size(); c++) { - if (Globals::verbosity > 1) { - cout << "current residuals:" << endl; - for (SortedOrder::iterator it = sortedOrder_.begin(); - it != sortedOrder_.end(); ++it) { - cout << " " << setw (30) << left << (*it)->toString(); - cout << "residual = " << (*it)->residual() << endl; - } - } - - SortedOrder::iterator it = sortedOrder_.begin(); - BpLink* link = *it; - if (link->residual() < BpOptions::accuracy) { - return; - } - updateMessage (link); - link->clearResidual(); - sortedOrder_.erase (it); - linkMap_.find (link)->second = sortedOrder_.insert (link); - - // update the messages that depend on message source --> destin - const FacNodes& factorNeighbors = link->varNode()->neighbors(); - for (size_t i = 0; i < factorNeighbors.size(); i++) { - if (factorNeighbors[i] != link->facNode()) { - const BpLinks& links = ninf(factorNeighbors[i])->getLinks(); - for (size_t j = 0; j < links.size(); j++) { - if (links[j]->varNode() != link->varNode()) { - calculateMessage (links[j]); - BpLinkMap::iterator iter = linkMap_.find (links[j]); - sortedOrder_.erase (iter->second); - iter->second = sortedOrder_.insert (links[j]); - } - } - } - } - if (Globals::verbosity > 1) { - Util::printDashedLine(); - } - } -} - - - -void -BeliefProp::calcFactorToVarMsg (BpLink* link) -{ - FacNode* src = link->facNode(); - const VarNode* dst = link->varNode(); - const BpLinks& links = ninf(src)->getLinks(); - // calculate the product of messages that were sent - // to factor `src', except from var `dst' - unsigned reps = 1; - unsigned msgSize = Util::sizeExpected (src->factor().ranges()); - Params msgProduct (msgSize, LogAware::multIdenty()); - if (Globals::logDomain) { - for (size_t i = links.size(); i-- > 0; ) { - if (links[i]->varNode() != dst) { - if (Constants::SHOW_BP_CALCS) { - cout << " message from " << links[i]->varNode()->label(); - cout << ": " ; - } - Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), - reps, std::plus()); - if (Constants::SHOW_BP_CALCS) { - cout << endl; - } - } - reps *= links[i]->varNode()->range(); - } - } else { - for (size_t i = links.size(); i-- > 0; ) { - if (links[i]->varNode() != dst) { - if (Constants::SHOW_BP_CALCS) { - cout << " message from " << links[i]->varNode()->label(); - cout << ": " ; - } - Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), - reps, std::multiplies()); - if (Constants::SHOW_BP_CALCS) { - cout << endl; - } - } - reps *= links[i]->varNode()->range(); - } - } - Factor result (src->factor().arguments(), - src->factor().ranges(), msgProduct); - result.multiply (src->factor()); - if (Constants::SHOW_BP_CALCS) { - cout << " message product: " << msgProduct << endl; - cout << " original factor: " << src->factor().params() << endl; - cout << " factor product: " << result.params() << endl; - } - result.sumOutAllExcept (dst->varId()); - if (Constants::SHOW_BP_CALCS) { - cout << " marginalized: " << result.params() << endl; - } - link->nextMessage() = result.params(); - LogAware::normalize (link->nextMessage()); - if (Constants::SHOW_BP_CALCS) { - cout << " curr msg: " << link->message() << endl; - cout << " next msg: " << link->nextMessage() << endl; - } -} - - - -Params -BeliefProp::getVarToFactorMsg (const BpLink* link) const -{ - const VarNode* src = link->varNode(); - Params msg; - if (src->hasEvidence()) { - msg.resize (src->range(), LogAware::noEvidence()); - msg[src->getEvidence()] = LogAware::withEvidence(); - } else { - msg.resize (src->range(), LogAware::one()); - } - if (Constants::SHOW_BP_CALCS) { - cout << msg; - } - BpLinks::const_iterator it; - const BpLinks& links = ninf (src)->getLinks(); - if (Globals::logDomain) { - for (it = links.begin(); it != links.end(); ++it) { - if (*it != link) { - msg += (*it)->message(); - } - if (Constants::SHOW_BP_CALCS) { - cout << " x " << (*it)->message(); - } - } - } else { - for (it = links.begin(); it != links.end(); ++it) { - if (*it != link) { - msg *= (*it)->message(); - } - if (Constants::SHOW_BP_CALCS) { - cout << " x " << (*it)->message(); - } - } - } - if (Constants::SHOW_BP_CALCS) { - cout << " = " << msg; - } - return msg; -} - - - -Params -BeliefProp::getJointByConditioning (const VarIds& jointVarIds) const -{ - return GroundSolver::getJointByConditioning ( - GroundSolverType::BP, fg, jointVarIds); -} - - - -void -BeliefProp::initializeSolver (void) -{ - const VarNodes& varNodes = fg.varNodes(); - varsI_.reserve (varNodes.size()); - for (size_t i = 0; i < varNodes.size(); i++) { - varsI_.push_back (new SPNodeInfo()); - } - const FacNodes& facNodes = fg.facNodes(); - facsI_.reserve (facNodes.size()); - for (size_t i = 0; i < facNodes.size(); i++) { - facsI_.push_back (new SPNodeInfo()); - } - createLinks(); - for (size_t i = 0; i < links_.size(); i++) { - FacNode* src = links_[i]->facNode(); - VarNode* dst = links_[i]->varNode(); - ninf (dst)->addBpLink (links_[i]); - ninf (src)->addBpLink (links_[i]); - } -} - - - -bool -BeliefProp::converged (void) -{ - if (links_.size() == 0) { - return true; - } - if (nIters_ == 0) { - return false; - } - if (Globals::verbosity > 2) { - cout << endl; - } - if (nIters_ == 1) { - if (Globals::verbosity > 1) { - cout << "no residuals" << endl << endl; - } - return false; - } - bool converged = true; - if (BpOptions::schedule == BpOptions::Schedule::MAX_RESIDUAL) { - double maxResidual = (*(sortedOrder_.begin()))->residual(); - if (maxResidual > BpOptions::accuracy) { - converged = false; - } else { - converged = true; - } - } else { - for (size_t i = 0; i < links_.size(); i++) { - double residual = links_[i]->residual(); - if (Globals::verbosity > 1) { - cout << links_[i]->toString() + " residual = " << residual << endl; - } - if (residual > BpOptions::accuracy) { - converged = false; - if (Globals::verbosity < 2) { - break; - } - } - } - if (Globals::verbosity > 1) { - cout << endl; - } - } - return converged; -} - - - -void -BeliefProp::printLinkInformation (void) const -{ - for (size_t i = 0; i < links_.size(); i++) { - BpLink* l = links_[i]; - cout << l->toString() << ":" << endl; - cout << " curr msg = " ; - cout << l->message() << endl; - cout << " next msg = " ; - cout << l->nextMessage() << endl; - cout << " residual = " << l->residual() << endl; - } -} - diff --git a/packages/CLPBN/horus2/BeliefProp.h b/packages/CLPBN/horus2/BeliefProp.h deleted file mode 100644 index 64a41d916..000000000 --- a/packages/CLPBN/horus2/BeliefProp.h +++ /dev/null @@ -1,188 +0,0 @@ -#ifndef HORUS_BELIEFPROP_H -#define HORUS_BELIEFPROP_H - -#include -#include -#include - -#include "GroundSolver.h" -#include "Factor.h" -#include "FactorGraph.h" -#include "Util.h" - -using namespace std; - - -class BpLink -{ - public: - BpLink (FacNode* fn, VarNode* vn) - { - fac_ = fn; - var_ = vn; - v1_.resize (vn->range(), LogAware::log (1.0 / vn->range())); - v2_.resize (vn->range(), LogAware::log (1.0 / vn->range())); - currMsg_ = &v1_; - nextMsg_ = &v2_; - residual_ = 0.0; - } - - virtual ~BpLink (void) { }; - - FacNode* facNode (void) const { return fac_; } - - VarNode* varNode (void) const { return var_; } - - const Params& message (void) const { return *currMsg_; } - - Params& nextMessage (void) { return *nextMsg_; } - - double residual (void) const { return residual_; } - - void clearResidual (void) { residual_ = 0.0; } - - void updateResidual (void) - { - residual_ = LogAware::getMaxNorm (v1_,v2_); - } - - virtual void updateMessage (void) - { - swap (currMsg_, nextMsg_); - } - - string toString (void) const - { - stringstream ss; - ss << fac_->getLabel(); - ss << " -- " ; - ss << var_->label(); - return ss.str(); - } - - protected: - FacNode* fac_; - VarNode* var_; - Params v1_; - Params v2_; - Params* currMsg_; - Params* nextMsg_; - double residual_; -}; - -typedef vector BpLinks; - - -class SPNodeInfo -{ - public: - void addBpLink (BpLink* link) { links_.push_back (link); } - const BpLinks& getLinks (void) { return links_; } - private: - BpLinks links_; -}; - - -class BeliefProp : public GroundSolver -{ - public: - BeliefProp (const FactorGraph&); - - virtual ~BeliefProp (void); - - Params solveQuery (VarIds); - - virtual void printSolverFlags (void) const; - - virtual Params getPosterioriOf (VarId); - - virtual Params getJointDistributionOf (const VarIds&); - - protected: - void runSolver (void); - - virtual void createLinks (void); - - virtual void maxResidualSchedule (void); - - virtual void calcFactorToVarMsg (BpLink*); - - virtual Params getVarToFactorMsg (const BpLink*) const; - - virtual Params getJointByConditioning (const VarIds&) const; - - public: - Params getFactorJoint (FacNode* fn, const VarIds&); - - protected: - SPNodeInfo* ninf (const VarNode* var) const - { - return varsI_[var->getIndex()]; - } - - SPNodeInfo* ninf (const FacNode* fac) const - { - return facsI_[fac->getIndex()]; - } - - void calculateAndUpdateMessage (BpLink* link, bool calcResidual = true) - { - if (Globals::verbosity > 2) { - cout << "calculating & updating " << link->toString() << endl; - } - calcFactorToVarMsg (link); - if (calcResidual) { - link->updateResidual(); - } - link->updateMessage(); - } - - void calculateMessage (BpLink* link, bool calcResidual = true) - { - if (Globals::verbosity > 2) { - cout << "calculating " << link->toString() << endl; - } - calcFactorToVarMsg (link); - if (calcResidual) { - link->updateResidual(); - } - } - - void updateMessage (BpLink* link) - { - link->updateMessage(); - if (Globals::verbosity > 2) { - cout << "updating " << link->toString() << endl; - } - } - - struct CompareResidual - { - inline bool operator() (const BpLink* link1, const BpLink* link2) - { - return link1->residual() > link2->residual(); - } - }; - - BpLinks links_; - unsigned nIters_; - vector varsI_; - vector facsI_; - bool runned_; - - typedef multiset SortedOrder; - SortedOrder sortedOrder_; - - typedef unordered_map BpLinkMap; - BpLinkMap linkMap_; - - private: - void initializeSolver (void); - - bool converged (void); - - virtual void printLinkInformation (void) const; -}; - -#endif // HORUS_BELIEFPROP_H - diff --git a/packages/CLPBN/horus2/ConstraintTree.cpp b/packages/CLPBN/horus2/ConstraintTree.cpp deleted file mode 100644 index 0546d0852..000000000 --- a/packages/CLPBN/horus2/ConstraintTree.cpp +++ /dev/null @@ -1,1174 +0,0 @@ -#include - -#include - -#include "ConstraintTree.h" -#include "Util.h" - - -void -CTNode::mergeSubtree (CTNode* n, bool updateLevels) -{ - if (updateLevels) { - updateChildLevels (n, level_ + 1); - } - CTChilds::iterator chIt = childs_.find (n); - if (chIt != childs_.end()) { - assert ((*chIt)->symbol() == n->symbol()); - const CTChilds& childsToAdd = n->childs(); - for (CTChilds::const_iterator it = childsToAdd.begin(); - it != childsToAdd.end(); ++ it) { - (*chIt)->mergeSubtree (*it, false); - } - delete n; - } else { - childs_.insert (n); - } -} - - - -void -CTNode::removeChild (CTNode* child) -{ - assert (childs_.contains (child)); - childs_.remove (child); -} - - - -void -CTNode::removeChilds (void) -{ - childs_.clear(); -} - - - -void -CTNode::removeAndDeleteChild (CTNode* child) -{ - removeChild (child); - CTNode::deleteSubtree (child); -} - - - -void -CTNode::removeAndDeleteAllChilds (void) -{ - for (CTChilds::const_iterator chIt = childs_.begin(); - chIt != childs_.end(); ++ chIt) { - deleteSubtree (*chIt); - } - childs_.clear(); -} - - - -SymbolSet -CTNode::childSymbols (void) const -{ - SymbolSet symbols; - for (CTChilds::const_iterator chIt = childs_.begin(); - chIt != childs_.end(); ++ chIt) { - symbols.insert ((*chIt)->symbol()); - } - return symbols; -} - - - -void -CTNode::updateChildLevels (CTNode* n, unsigned level) -{ - CTNodes stack; - stack.push_back (n); - n->setLevel (level); - while (stack.empty() == false) { - CTNode* node = stack.back(); - stack.pop_back(); - for (CTChilds::const_iterator chIt = node->childs().begin(); - chIt != node->childs().end(); ++ chIt) { - (*chIt)->setLevel (node->level() + 1); - } - stack.insert (stack.end(), node->childs().begin(), - node->childs().end()); - } -} - - - -CTNode* -CTNode::copySubtree (const CTNode* root1) -{ - if (root1->childs().empty()) { - return new CTNode (*root1); - } - CTNode* root2 = new CTNode (*root1); - typedef pair StackPair; - vector stack = { StackPair (root1, root2) }; - while (stack.empty() == false) { - const CTNode* n1 = stack.back().first; - CTNode* n2 = stack.back().second; - stack.pop_back(); - // cout << "n2 childs: " << n2->childs(); - // cout << "n1 childs: " << n1->childs(); - n2->childs().reserve (n1->nrChilds()); - stack.reserve (n1->nrChilds()); - for (CTChilds::const_iterator chIt = n1->childs().begin(); - chIt != n1->childs().end(); ++ chIt) { - CTNode* chCopy = new CTNode (**chIt); - n2->childs().insert_sorted (chCopy); - if ((*chIt)->nrChilds() != 0) { - stack.push_back (StackPair (*chIt, chCopy)); - } - } - } - return root2; -} - - - -void -CTNode::deleteSubtree (CTNode* n) -{ - assert (n); - const CTChilds& childs = n->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - deleteSubtree (*chIt); - } - delete n; -} - - - -ostream& operator<< (ostream &out, const CTNode& n) -{ - out << "(" << n.level() << ") " ; - out << n.symbol(); - return out; -} - - - -ConstraintTree::ConstraintTree (unsigned nrLvs) -{ - for (unsigned i = 0; i < nrLvs; i++) { - logVars_.push_back (LogVar (i)); - } - root_ = new CTNode (0, 0); - logVarSet_ = LogVarSet (logVars_); -} - - - -ConstraintTree::ConstraintTree (const LogVars& logVars) -{ - root_ = new CTNode (0, 0); - logVars_ = logVars; - logVarSet_ = LogVarSet (logVars); -} - - - -ConstraintTree::ConstraintTree ( - const LogVars& logVars, - const Tuples& tuples) -{ - root_ = new CTNode (0, 0); - logVars_ = logVars; - logVarSet_ = LogVarSet (logVars); - for (size_t i = 0; i < tuples.size(); i++) { - addTuple (tuples[i]); - } -} - - - -ConstraintTree::ConstraintTree (vector> names) -{ - assert (names.empty() == false); - assert (names.front().empty() == false); - unsigned nrLvs = names[0].size(); - for (size_t i = 0; i < nrLvs; i++) { - logVars_.push_back (LogVar (i)); - } - root_ = new CTNode (0, 0); - logVarSet_ = LogVarSet (logVars_); - for (size_t i = 0; i < names.size(); i++) { - Tuple t; - for (size_t j = 0; j < names[i].size(); j++) { - assert (names[i].size() == nrLvs); - t.push_back (LiftedUtils::getSymbol (names[i][j])); - } - addTuple (t); - } -} - - - -ConstraintTree::ConstraintTree (const ConstraintTree& ct) -{ - *this = ct; -} - - - -ConstraintTree::~ConstraintTree (void) -{ - CTNode::deleteSubtree (root_); -} - - - -void -ConstraintTree::addTuple (const Tuple& tuple) -{ - CTNode* prevNode = root_; - for (size_t i = 0; i < tuple.size(); i++) { - CTChilds::const_iterator it = prevNode->findSymbol (tuple[i]); - if (it == prevNode->childs().end()) { - CTNode* newNode = new CTNode (tuple[i], i + 1); - prevNode->mergeSubtree (newNode, false); - prevNode = newNode; - } else { - prevNode = *it; - } - } -} - - - -bool -ConstraintTree::containsTuple (const Tuple& tuple) -{ - CTNode* prevNode = root_; - for (size_t i = 0; i < tuple.size(); i++) { - CTChilds::const_iterator it = prevNode->findSymbol (tuple[i]); - if (it == prevNode->childs().end()) { - return false; - } else { - prevNode = *it; - } - } - return true; -} - - - -void -ConstraintTree::moveToTop (const LogVars& lvs) -{ - for (size_t i = 0; i < lvs.size(); i++) { - size_t pos = Util::indexOf (logVars_, lvs[i]); - assert (pos != logVars_.size()); - for (size_t j = pos; j-- > i; ) { - swapLogVar (logVars_[j]); - } - } -} - - - -void -ConstraintTree::moveToBottom (const LogVars& lvs) -{ - for (size_t i = lvs.size(); i-- > 0; ) { - size_t pos = Util::indexOf (logVars_, lvs[i]); - assert (pos != logVars_.size()); - size_t stop = logVars_.size() - (lvs.size() - i - 1); - for (size_t j = pos; j < stop - 1; j++) { - swapLogVar (logVars_[j]); - } - } -} - - - -void -ConstraintTree::join (ConstraintTree* ct, bool oneTwoOne) -{ - if (logVarSet_.empty()) { - CTNode::deleteSubtree (root_); - root_ = CTNode::copySubtree (ct->root()); - logVars_ = ct->logVars(); - logVarSet_ = ct->logVarSet(); - return; - } - if (oneTwoOne) { - if (logVarSet_.contains (ct->logVarSet())) { - return; - } - if (ct->logVarSet().contains (logVarSet_)) { - CTNode::deleteSubtree (root_); - root_ = CTNode::copySubtree (ct->root()); - logVars_ = ct->logVars(); - logVarSet_ = ct->logVarSet(); - return; - } - } - LogVarSet intersect = logVarSet_ & ct->logVarSet_; - if (intersect.empty()) { - // cartesian product - appendOnBottom (root_, ct->root()->childs()); - Util::addToVector (logVars_, ct->logVars_); - logVarSet_ |= ct->logVarSet_; - } else { - moveToTop (intersect.elements()); - ct->moveToTop (intersect.elements()); - - Tuples tuples; - CTNodes appendNodes; - getTuples (ct->root(), Tuples(), intersect.size(), - tuples, appendNodes); - - CTNodes::const_iterator appendIt = appendNodes.begin(); - for (size_t i = 0; i < tuples.size(); ++ i, ++ appendIt) { - bool tupleFounded = join (root_, tuples[i], 0, *appendIt); - if (oneTwoOne && tupleFounded == false) { - assert (false); - } - } - - LogVars newLvs (ct->logVars().begin() + intersect.size(), - ct->logVars().end()); - Util::addToVector (logVars_, newLvs); - logVarSet_ |= LogVarSet (newLvs); - } -} - - - -unsigned -ConstraintTree::getLevel (LogVar X) const -{ - unsigned level = Util::indexOf (logVars_, X); - level += 1; // root is in level 0, first logVar is in level 1 - return level; -} - - - -void -ConstraintTree::rename (LogVar X_old, LogVar X_new) -{ - assert (logVarSet_.contains (X_old)); - assert (logVarSet_.contains (X_new) == false); - logVarSet_ -= X_old; - logVarSet_ |= X_new; - for (size_t i = 0; i < logVars_.size(); i++) { - if (logVars_[i] == X_old) { - logVars_[i] = X_new; - return; - } - } - assert (false); -} - - - -void -ConstraintTree::applySubstitution (const Substitution& theta) -{ - for (size_t i = 0; i < logVars_.size(); i++) { - logVars_[i] = theta.newNameFor (logVars_[i]); - } - logVarSet_ = LogVarSet (logVars_); -} - - - -void -ConstraintTree::project (const LogVarSet& X) -{ - assert (logVarSet_.contains (X)); - remove ((logVarSet_ - X)); -} - - - -ConstraintTree -ConstraintTree::projectedCopy (const LogVarSet& X) -{ - ConstraintTree copy = *this; - copy.project (X); - return copy; -} - - - -void -ConstraintTree::remove (const LogVarSet& X) -{ - assert (logVarSet_.contains (X)); - if (X.empty()) { - return; - } - moveToBottom (X.elements()); - unsigned level = getLevel (X.front()) - 1; - CTNodes nodes = getNodesAtLevel (level); - for (CTNodes::const_iterator it = nodes.begin(); - it != nodes.end(); ++ it) { - (*it)->removeAndDeleteAllChilds(); - } - logVars_.resize (logVars_.size() - X.size()); - logVarSet_ -= X; -} - - - -bool -ConstraintTree::ConstraintTree::isSingleton (LogVar X) -{ - Symbol symb; - unsigned level = getLevel (X); - CTNodes stack; - stack.push_back (root_); - while (stack.empty() == false) { - CTNode* node = stack.back(); - stack.pop_back(); - if (node->level() == level) { - if (symb.valid()) { - if (node->symbol() != symb) { - return false; - } - } else { - symb = node->symbol(); - } - } else { - stack.insert (stack.end(), node->childs().begin(), - node->childs().end()); - } - } - return true; -} - - - -LogVarSet -ConstraintTree::singletons (void) -{ - LogVarSet singletons; - for (size_t i = 0; i < logVars_.size(); i++) { - if (isSingleton (logVars_[i])) { - singletons.insert (logVars_[i]); - } - } - return singletons; -} - - - -TupleSet -ConstraintTree::tupleSet (unsigned stopLevel) const -{ - assert (root_->isRoot()); - Tuples tuples; - if (stopLevel == 0) { - stopLevel = logVars_.size(); - } - getTuples (root_, Tuples(), stopLevel, tuples, CTNodes() = {}); - return TupleSet (tuples); -} - - - -TupleSet -ConstraintTree::tupleSet (const LogVars& originalLvs) -{ - LogVars uniqueLvs; - for (size_t i = 0; i < originalLvs.size(); i++) { - if (Util::contains (uniqueLvs, originalLvs[i]) == false) { - uniqueLvs.push_back (originalLvs[i]); - } - } - - Tuples tuples; - moveToTop (uniqueLvs); - unsigned stopLevel = uniqueLvs.size(); - getTuples (root_, Tuples(), stopLevel, tuples, CTNodes() = {}); - - if (originalLvs.size() != uniqueLvs.size()) { - vector indexes; - indexes.reserve (originalLvs.size()); - for (size_t i = 0; i < originalLvs.size(); i++) { - indexes.push_back (Util::indexOf (uniqueLvs, originalLvs[i])); - } - Tuples tuples2; - tuples2.reserve (tuples.size()); - for (size_t i = 0; i < tuples.size(); i++) { - Tuple t; - t.reserve (originalLvs.size()); - for (size_t j = 0; j < originalLvs.size(); j++) { - t.push_back (tuples[i][indexes[j]]); - } - tuples2.push_back (t); - } - return TupleSet (tuples2); - } - - return TupleSet (tuples); -} - - - -void -ConstraintTree::exportToGraphViz ( - const char* fileName, - bool showLogVars) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "digraph {" << endl; - ConstraintTree copy (*this); - copy.moveToTop (copy.logVarSet_.elements()); - CTNodes nodes = getNodesBelow (copy.root_); - out << "\"" << copy.root_ << "\"" << " [label=\"R\"]" << endl; - for (CTNodes::const_iterator it = ++ nodes.begin(); - it != nodes.end(); ++ it) { - out << "\"" << *it << "\""; - out << " [label=\"" << **it << "\"]" ; - out << endl; - } - for (CTNodes::const_iterator it = nodes.begin(); - it != nodes.end(); ++ it) { - const CTChilds& childs = (*it)->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - out << "\"" << *it << "\"" ; - out << " -> " ; - out << "\"" << *chIt << "\"" << endl ; - } - } - if (showLogVars) { - out << "Root [label=\"\", shape=plaintext]" << endl; - for (size_t i = 0; i < copy.logVars_.size(); i++) { - out << copy.logVars_[i] << " [label=" ; - out << copy.logVars_[i] << ", " ; - out << "shape=plaintext, fontsize=14]" << endl; - } - out << "Root -> " << copy.logVars_[0]; - out << " [style=invis]" << endl; - for (size_t i = 0; i < copy.logVars_.size() - 1; i++) { - out << copy.logVars_[i] << " -> " << copy.logVars_[i + 1]; - out << " [style=invis]" << endl; - } - } - out << "}" << endl; - out.close(); -} - - - -bool -ConstraintTree::isCountNormalized (const LogVarSet& Ys) -{ - assert (logVarSet_.contains (Ys)); - if (Ys.empty()) { - return true; - } - if (Ys.size() == logVars_.size()) { - assert (LogVarSet (logVars_) == LogVarSet (Ys)); - return true; - } - LogVarSet Zs = logVarSet_ - LogVarSet (Ys); - moveToTop (Zs.elements()); - CTNodes nodes = getNodesAtLevel (Zs.size()); - unsigned count = countTuples (*nodes.begin()); - for (CTNodes::const_iterator it = nodes.begin(); - it != nodes.end(); ++ it) { - if (countTuples (*it) != count) { - return false; - } - } - return true; -} - - - -unsigned -ConstraintTree::getConditionalCount (const LogVarSet& Ys) -{ - assert (isCountNormalized (Ys)); - if (Ys.empty()) { - return 1; - } - if (Ys.size() == logVars_.size()) { - assert (LogVarSet (Ys) == LogVarSet (logVars_)); - return countTuples (root_); - } - LogVarSet Zs = logVarSet_ - Ys; - moveToTop (Zs.elements()); - CTNode* n = root_; - unsigned l = 0; - while (l != Zs.size()) { - n = *(n->childs().begin()); - l ++; - } - return countTuples (n); -} - - - -TinySet -ConstraintTree::getConditionalCounts (const LogVarSet& Ys) -{ - TinySet counts; - assert (logVarSet_.contains (Ys)); - if (Ys.empty()) { - counts.insert (1); - } else if (Ys.size() == logVars_.size()) { - assert (LogVarSet (logVars_) == LogVarSet (Ys)); - counts.insert (countTuples (root_)); - } else { - LogVarSet Zs = logVarSet_ - LogVarSet (Ys); - moveToTop (Zs.elements()); - CTNodes nodes = getNodesAtLevel (Zs.size()); - for (CTNodes::const_iterator it = nodes.begin(); - it != nodes.end(); ++ it) { - counts.insert (countTuples (*it)); - } - } - return counts; -} - - - -bool -ConstraintTree::isCartesianProduct (const LogVarSet& Xs) -{ - assert (logVarSet_.contains (Xs)); - if (Xs.size() <= 1) { - return true; - } - moveToTop (Xs.elements()); - for (size_t i = 1; i < Xs.size(); i++) { - CTNodes nodes = getNodesAtLevel (i); - for (size_t j = 1; j < nodes.size(); j++) { - if (nodes[j-1]->nrChilds() != nodes[ j ]->nrChilds()) { - return false; - } - if (nodes[j-1]->childSymbols() != nodes[ j ]->childSymbols()) { - return false; - } - } - } - return true; -} - - - -std::pair -ConstraintTree::split (const Tuple& tuple) -{ - // assumes that my log vars are already on top - LogVars lvs (logVars_.begin(), logVars_.begin() + tuple.size()); - ConstraintTree tempCt (logVars_, {tuple}); - return split (lvs, &tempCt, lvs); -} - - - -std::pair -ConstraintTree::split ( - const LogVars& lvs1, - ConstraintTree* ct, - const LogVars& lvs2) -{ - assert (lvs1.size() == lvs2.size()); - assert (lvs1.size() == LogVarSet (lvs1).size()); - assert (lvs2.size() == LogVarSet (lvs2).size()); - assert (logVarSet_.contains (lvs1)); - assert (ct->logVarSet().contains (lvs2)); - CTChilds commChilds, exclChilds; - unsigned stopLevel = lvs1.size(); - split (root_, ct->root(), commChilds, exclChilds, stopLevel); - ConstraintTree* commCt = new ConstraintTree (commChilds, logVars_); - ConstraintTree* exclCt = new ConstraintTree (exclChilds, logVars_); - // cout << commCt->tupleSet() << " + " ; - // cout << exclCt->tupleSet() << " = " ; - // cout << tupleSet() << endl; - assert ((commCt->tupleSet() | exclCt->tupleSet()) == tupleSet()); - assert ((exclCt->tupleSet (stopLevel) & ct->tupleSet (stopLevel)).empty()); - return {commCt, exclCt}; -} - - - -ConstraintTrees -ConstraintTree::countNormalize (const LogVarSet& Ys) -{ - assert (logVarSet_.contains (Ys)); - LogVarSet Zs = logVarSet_ - Ys; - if (Ys.empty() || Zs.empty()) { - return { new ConstraintTree (*this) }; - } - moveToTop (Zs.elements()); - ConstraintTrees cts; - unordered_map countMap; - unsigned stopLevel = getLevel (Zs.back()); - const CTChilds& childs = root_->childs(); - - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - const vector>& res = - countNormalize (*chIt, stopLevel); - for (size_t j = 0; j < res.size(); j++) { - unordered_map::iterator it - = countMap.find (res[j].second); - if (it == countMap.end()) { - ConstraintTree* newCt = new ConstraintTree (logVars_); - it = countMap.insert (make_pair (res[j].second, newCt)).first; - cts.push_back (newCt); - } - it->second->root_->mergeSubtree (res[j].first); - } - } - return cts; -} - - - -ConstraintTrees -ConstraintTree::jointCountNormalize ( - ConstraintTree* commCt, - ConstraintTree* exclCt, - LogVar X, - LogVar X_new1, - LogVar X_new2) -{ - unsigned N = getConditionalCount (X); - // cout << "My tuples: " << tupleSet() << endl; - // cout << "CommCt tuples: " << commCt->tupleSet() << endl; - // cout << "ExclCt tuples: " << exclCt->tupleSet() << endl; - // cout << "Counted Lv: " << X << endl; - // cout << "X_new1: " << X_new1 << endl; - // cout << "X_new2: " << X_new2 << endl; - // cout << "Original N: " << N << endl; - // cout << endl; - - ConstraintTrees normCts1 = commCt->countNormalize (X); - vector counts1 (normCts1.size()); - for (size_t i = 0; i < normCts1.size(); i++) { - counts1[i] = normCts1[i]->getConditionalCount (X); - // cout << "normCts1[" << i << "] #" << counts1[i] ; - // cout << " " << normCts1[i]->tupleSet() << endl; - } - - ConstraintTrees normCts2 = exclCt->countNormalize (X); - vector counts2 (normCts2.size()); - for (size_t i = 0; i < normCts2.size(); i++) { - counts2[i] = normCts2[i]->getConditionalCount (X); - // cout << "normCts2[" << i << "] #" << counts2[i] ; - // cout << " " << normCts2[i]->tupleSet() << endl; - } - // cout << endl; - - ConstraintTree* excl1 = 0; - for (size_t i = 0; i < normCts1.size(); i++) { - if (counts1[i] == N) { - excl1 = normCts1[i]; - normCts1.erase (normCts1.begin() + i); - counts1.erase (counts1.begin() + i); - // cout << "joint-count(" << N << ",0)" << endl; - break; - } - } - - ConstraintTree* excl2 = 0; - for (size_t i = 0; i < normCts2.size(); i++) { - if (counts2[i] == N) { - excl2 = normCts2[i]; - normCts2.erase (normCts2.begin() + i); - counts2.erase (counts2.begin() + i); - // cout << "joint-count(0," << N << ")" << endl; - break; - } - } - - for (size_t i = 0; i < normCts1.size(); i++) { - unsigned j; - for (j = 0; counts1[i] + counts2[j] != N; j++) ; - // cout << "joint-count(" << counts1[i] ; - // cout << "," << counts2[j] << ")" << endl; - const CTChilds& childs = normCts2[j]->root_->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - normCts1[i]->root_->mergeSubtree (CTNode::copySubtree (*chIt)); - } - delete normCts2[j]; - } - - ConstraintTrees cts = normCts1; - commCt->rename (X, X_new1); - exclCt->rename (X, X_new2); - for (size_t i = 0; i < cts.size(); i++) { - cts[i]->remove (X); - cts[i]->join (commCt); - cts[i]->join (exclCt); - } - - if (excl1 != 0) { - cts.push_back (excl1); - } - if (excl2 != 0) { - cts.push_back (excl2); - } - - return cts; -} - - - -LogVars -ConstraintTree::expand (LogVar X) -{ - moveToBottom ({X}); - assert (isCountNormalized (X)); - CTNodes nodes = getNodesAtLevel (logVars_.size() - 1); - unsigned nrSymbols = getConditionalCount (X); - for (CTNodes::const_iterator it = nodes.begin(); - it != nodes.end(); ++ it) { - Symbols symbols; - const CTChilds& childs = (*it)->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - symbols.push_back ((*chIt)->symbol()); - } - (*it)->removeAndDeleteAllChilds(); - CTNode* prev = *it; - assert (symbols.size() == nrSymbols); - for (size_t j = 0; j < nrSymbols; j++) { - CTNode* newNode = new CTNode (symbols[j], (*it)->level() + j); - prev->mergeSubtree (newNode); - prev = newNode; - } - } - LogVars newLvs; - logVars_.pop_back(); - for (size_t i = 0; i < nrSymbols; i++) { - logVars_.push_back (LogVar (logVarSet_.back() + 1)); - newLvs.push_back (LogVar (logVarSet_.back() + 1)); - logVarSet_.insert (LogVar (logVarSet_.back() + 1)); - } - logVarSet_ -= X; - return newLvs; -} - - - -ConstraintTrees -ConstraintTree::ground (LogVar X) -{ - moveToTop ({X}); - ConstraintTrees cts; - const CTChilds& nodes = root_->childs(); - for (CTChilds::const_iterator it = nodes.begin(); - it != nodes.end(); ++ it) { - CTNode* copy = CTNode::copySubtree (*it); - copy->setSymbol ((*it)->symbol()); - ConstraintTree* newCt = new ConstraintTree (logVars_); - newCt->root()->mergeSubtree (copy); - cts.push_back (newCt); - } - return cts; -} - - - -void -ConstraintTree::cloneLogVar (LogVar X_1, LogVar X_2) -{ - moveToBottom ({X_1}); - CTNodes leafs = getNodesAtLevel (logVars_.size()); - for (size_t i = 0; i < leafs.size(); i++) { - leafs[i]->childs().insert_sorted ( - new CTNode (leafs[i]->symbol(), leafs[i]->level() + 1)); - } - logVars_.push_back (X_2); - logVarSet_.insert (X_2); -} - - - -ConstraintTree& -ConstraintTree::operator= (const ConstraintTree& ct) -{ - if (this != &ct) { - root_ = CTNode::copySubtree (ct.root_); - logVars_ = ct.logVars_; - logVarSet_ = ct.logVarSet_; - } - return *this; -} - - - -unsigned -ConstraintTree::countTuples (const CTNode* n) const -{ - if (n->isLeaf()) { - return 1; - } - unsigned sum = 0; - const CTChilds& childs = n->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - sum += countTuples (*chIt); - } - return sum; -} - - - -CTNodes -ConstraintTree::getNodesBelow (CTNode* fromHere) const -{ - CTNodes nodes; - queue queue; - queue.push (fromHere); - while (queue.empty() == false) { - CTNode* node = queue.front(); - nodes.push_back (node); - for (CTChilds::const_iterator chIt = node->childs().begin(); - chIt != node->childs().end(); ++ chIt) { - queue.push (*chIt); - } - queue.pop(); - } - return nodes; -} - - - -CTNodes -ConstraintTree::getNodesAtLevel (unsigned level) const -{ - assert (level <= logVars_.size()); - if (level == 0) { - return { root_ }; - } - CTNodes stack; - CTNodes nodes; - stack.push_back (root_); - while (stack.empty() == false) { - CTNode* node = stack.back(); - stack.pop_back(); - if (node->level() + 1 == level) { - nodes.insert (nodes.end(), node->childs().begin(), - node->childs().end()); - } else { - stack.insert (stack.end(), node->childs().begin(), - node->childs().end()); - } - } - return nodes; -} - - - -unsigned -ConstraintTree::nrNodes (const CTNode* n) const -{ - unsigned nr = 0; - if (n->isLeaf() == false) { - for (CTChilds::const_iterator chIt = n->childs().begin(); - chIt != n->childs().end(); ++ chIt) { - nr += nrNodes (*chIt); - } - } - return nr; -} - - - -void -ConstraintTree::appendOnBottom (CTNode* n, const CTChilds& childs) -{ - if (childs.empty()) { - return; - } - CTNodes stack { n }; - while (stack.empty() == false) { - CTNode* node = stack.back(); - stack.pop_back(); - if (node->isLeaf()) { - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - node->mergeSubtree (CTNode::copySubtree (*chIt)); - } - } else { - stack.insert (stack.end(), node->childs().begin(), - node->childs().end()); - } - } -} - - - -void -ConstraintTree::swapLogVar (LogVar X) -{ - size_t pos = Util::indexOf (logVars_, X); - assert (pos != logVars_.size()); - const CTNodes& nodes = getNodesAtLevel (pos); - for (CTNodes::const_iterator nodeIt = nodes.begin(); - nodeIt != nodes.end(); ++ nodeIt) { - CTChilds childsCopy = (*nodeIt)->childs(); - (*nodeIt)->removeChilds(); - for (CTChilds::const_iterator ccIt = childsCopy.begin(); - ccIt != childsCopy.end(); ++ ccIt) { - const CTChilds& grandsons = (*ccIt)->childs(); - for (CTChilds::const_iterator gsIt = grandsons.begin(); - gsIt != grandsons.end(); ++ gsIt) { - CTNode* childCopy = new CTNode ( - (*ccIt)->symbol(), (*ccIt)->level() + 1, (*gsIt)->childs()); - (*gsIt)->removeChilds(); - (*gsIt)->childs().insert_sorted (childCopy); - (*gsIt)->setLevel ((*gsIt)->level() - 1); - (*nodeIt)->mergeSubtree ((*gsIt), false); - } - delete (*ccIt); - } - } - std::swap (logVars_[pos], logVars_[pos + 1]); -} - - - -bool -ConstraintTree::join ( - CTNode* currNode, - const Tuple& tuple, - size_t currIdx, - CTNode* appendNode) -{ - bool tupleFounded = false; - CTChilds::const_iterator it = currNode->findSymbol (tuple[currIdx]); - if (it != currNode->childs().end()) { - if (currIdx == tuple.size() - 1) { - appendOnBottom (*it, appendNode->childs()); - return true; - } else { - tupleFounded = join (*it, tuple, currIdx + 1, appendNode); - } - } - return tupleFounded; -} - - - -void -ConstraintTree::getTuples ( - CTNode* n, - Tuples currTuples, - unsigned stopLevel, - Tuples& tuplesCollected, - CTNodes& continuationNodes) const -{ - if (n->isRoot() == false) { - if (currTuples.size() == 0) { - currTuples.push_back ({ n->symbol()}); - } else { - for (size_t i = 0; i < currTuples.size(); i++) { - currTuples[i].push_back (n->symbol()); - } - } - if (n->level() == stopLevel) { - for (size_t i = 0; i < currTuples.size(); i++) { - tuplesCollected.push_back (currTuples[i]); - continuationNodes.push_back (n); - } - return; - } - } - const CTChilds& childs = n->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - getTuples (*chIt, currTuples, stopLevel, tuplesCollected, - continuationNodes); - } -} - - - -unsigned -ConstraintTree::size (void) const -{ - return countTuples (root_); -} - - - -unsigned -ConstraintTree::nrSymbols (LogVar X) -{ - moveToTop ({X}); - return root_->childs().size(); -} - - - -vector> -ConstraintTree::countNormalize ( - const CTNode* n, - unsigned stopLevel) -{ - if (n->level() == stopLevel) { - return vector>() = { - make_pair (CTNode::copySubtree (n), countTuples (n)) - }; - } - vector> res; - const CTChilds& childs = n->childs(); - for (CTChilds::const_iterator chIt = childs.begin(); - chIt != childs.end(); ++ chIt) { - const vector>& lowerRes = - countNormalize (*chIt, stopLevel); - for (size_t j = 0; j < lowerRes.size(); j++) { - CTNode* newNode = new CTNode (*n); - newNode->mergeSubtree (lowerRes[j].first); - res.push_back (make_pair (newNode, lowerRes[j].second)); - } - } - return res; -} - - - -void -ConstraintTree::split ( - CTNode* n1, - CTNode* n2, - CTChilds& commChilds, - CTChilds& exclChilds, - unsigned stopLevel) -{ - CTChilds& childs1 = n1->childs(); - for (CTChilds::const_iterator chIt1 = childs1.begin(); - chIt1 != childs1.end(); ++ chIt1) { - CTChilds::iterator chIt2 = n2->findSymbol ((*chIt1)->symbol()); - if (chIt2 == n2->childs().end()) { - exclChilds.insert_sorted (CTNode::copySubtree (*chIt1)); - } else { - if ((*chIt1)->level() == stopLevel) { - commChilds.insert_sorted (CTNode::copySubtree (*chIt1)); - } else { - CTChilds lowerCommChilds, lowerExclChilds; - split (*chIt1, *chIt2, lowerCommChilds, lowerExclChilds, stopLevel); - if (lowerCommChilds.empty() == false) { - commChilds.insert_sorted (new CTNode (**chIt1, lowerCommChilds)); - } - if (lowerExclChilds.empty() == false) { - exclChilds.insert_sorted (new CTNode (**chIt1, lowerExclChilds)); - } - } - } - } -} - diff --git a/packages/CLPBN/horus2/ConstraintTree.h b/packages/CLPBN/horus2/ConstraintTree.h deleted file mode 100644 index cccb070b4..000000000 --- a/packages/CLPBN/horus2/ConstraintTree.h +++ /dev/null @@ -1,237 +0,0 @@ -#ifndef HORUS_CONSTRAINTTREE_H -#define HORUS_CONSTRAINTTREE_H - -#include -#include - -#include -#include - -#include "TinySet.h" -#include "LiftedUtils.h" - -using namespace std; - - -class CTNode; -typedef vector CTNodes; - -class ConstraintTree; -typedef vector ConstraintTrees; - - -class CTNode -{ - public: - - struct CompareSymbol - { - bool operator() (const CTNode* n1, const CTNode* n2) const - { - return n1->symbol() < n2->symbol(); - } - }; - - private: - - typedef TinySet CTChilds_; - - public: - - CTNode (const CTNode& n, const CTChilds_& chs = CTChilds_()) - : symbol_(n.symbol()), childs_(chs), level_(n.level()) { } - - CTNode (Symbol s, unsigned l, const CTChilds_& chs = CTChilds_()) - : symbol_(s), childs_(chs), level_(l) { } - - unsigned level (void) const { return level_; } - - void setLevel (unsigned level) { level_ = level; } - - Symbol symbol (void) const { return symbol_; } - - void setSymbol (const Symbol s) { symbol_ = s; } - - public: - - CTChilds_& childs (void) { return childs_; } - - const CTChilds_& childs (void) const { return childs_; } - - size_t nrChilds (void) const { return childs_.size(); } - - bool isRoot (void) const { return level_ == 0; } - - bool isLeaf (void) const { return childs_.empty(); } - - CTChilds_::iterator findSymbol (Symbol symb) - { - CTNode tmp (symb, 0); - return childs_.find (&tmp); - } - - void mergeSubtree (CTNode*, bool = true); - - void removeChild (CTNode*); - - void removeChilds (void); - - void removeAndDeleteChild (CTNode*); - - void removeAndDeleteAllChilds (void); - - SymbolSet childSymbols (void) const; - - static CTNode* copySubtree (const CTNode*); - - static void deleteSubtree (CTNode*); - - private: - void updateChildLevels (CTNode*, unsigned); - - Symbol symbol_; - CTChilds_ childs_; - unsigned level_; -}; - -ostream& operator<< (ostream &out, const CTNode&); - - -typedef TinySet CTChilds; - - -class ConstraintTree -{ - public: - ConstraintTree (unsigned); - - ConstraintTree (const LogVars&); - - ConstraintTree (const LogVars&, const Tuples&); - - ConstraintTree (vector> names); - - ConstraintTree (const ConstraintTree&); - - ConstraintTree (const CTChilds& rootChilds, const LogVars& logVars) - : root_(new CTNode (0, 0, rootChilds)), - logVars_(logVars), - logVarSet_(logVars) { } - - ~ConstraintTree (void); - - CTNode* root (void) const { return root_; } - - bool empty (void) const { return root_->childs().empty(); } - - const LogVars& logVars (void) const - { - assert (LogVarSet (logVars_) == logVarSet_); - return logVars_; - } - - const LogVarSet& logVarSet (void) const - { - assert (LogVarSet (logVars_) == logVarSet_); - return logVarSet_; - } - - size_t nrLogVars (void) const - { - return logVars_.size(); - assert (LogVarSet (logVars_) == logVarSet_); - } - - void addTuple (const Tuple&); - - bool containsTuple (const Tuple&); - - void moveToTop (const LogVars&); - - void moveToBottom (const LogVars&); - - void join (ConstraintTree*, bool oneTwoOne = false); - - unsigned getLevel (LogVar) const; - - void rename (LogVar, LogVar); - - void applySubstitution (const Substitution&); - - void project (const LogVarSet&); - - ConstraintTree projectedCopy (const LogVarSet&); - - void remove (const LogVarSet&); - - bool isSingleton (LogVar); - - LogVarSet singletons (void); - - TupleSet tupleSet (unsigned = 0) const; - - TupleSet tupleSet (const LogVars&); - - unsigned size (void) const; - - unsigned nrSymbols (LogVar); - - void exportToGraphViz (const char*, bool = false) const; - - bool isCountNormalized (const LogVarSet&); - - unsigned getConditionalCount (const LogVarSet&); - - TinySet getConditionalCounts (const LogVarSet&); - - bool isCartesianProduct (const LogVarSet&); - - std::pair split (const Tuple&); - - std::pair split ( - const LogVars&, ConstraintTree*, const LogVars&); - - ConstraintTrees countNormalize (const LogVarSet&); - - ConstraintTrees jointCountNormalize ( - ConstraintTree*, ConstraintTree*, LogVar, LogVar, LogVar); - - LogVars expand (LogVar); - - ConstraintTrees ground (LogVar); - - void cloneLogVar (LogVar, LogVar); - - ConstraintTree& operator= (const ConstraintTree& ct); - - private: - unsigned countTuples (const CTNode*) const; - - CTNodes getNodesBelow (CTNode*) const; - - CTNodes getNodesAtLevel (unsigned) const; - - unsigned nrNodes (const CTNode* n) const; - - void appendOnBottom (CTNode* n1, const CTChilds&); - - void swapLogVar (LogVar); - - bool join (CTNode*, const Tuple&, size_t, CTNode*); - - void getTuples (CTNode*, Tuples, unsigned, Tuples&, CTNodes&) const; - - vector> countNormalize ( - const CTNode*, unsigned); - - static void split ( - CTNode*, CTNode*, CTChilds&, CTChilds&, unsigned); - - CTNode* root_; - LogVars logVars_; - LogVarSet logVarSet_; -}; - - -#endif // HORUS_CONSTRAINTTREE_H - diff --git a/packages/CLPBN/horus2/CountingBp.cpp b/packages/CLPBN/horus2/CountingBp.cpp deleted file mode 100644 index d248c602c..000000000 --- a/packages/CLPBN/horus2/CountingBp.cpp +++ /dev/null @@ -1,424 +0,0 @@ -#include "CountingBp.h" -#include "WeightedBp.h" - - -bool CountingBp::checkForIdenticalFactors = true; - - -CountingBp::CountingBp (const FactorGraph& fg) - : GroundSolver (fg), freeColor_(0) -{ - findIdenticalFactors(); - setInitialColors(); - createGroups(); - compressedFg_ = getCompressedFactorGraph(); - solver_ = new WeightedBp (*compressedFg_, getWeights()); -} - - - -CountingBp::~CountingBp (void) -{ - delete solver_; - delete compressedFg_; - for (size_t i = 0; i < varClusters_.size(); i++) { - delete varClusters_[i]; - } - for (size_t i = 0; i < facClusters_.size(); i++) { - delete facClusters_[i]; - } -} - - - -void -CountingBp::printSolverFlags (void) const -{ - stringstream ss; - ss << "counting bp [" ; - ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; - } - ss << ",max_iter=" << BpOptions::maxIter; - ss << ",accuracy=" << BpOptions::accuracy; - ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << ",chkif=" << - Util::toString (CountingBp::checkForIdenticalFactors); - ss << "]" ; - cout << ss.str() << endl; -} - - - -Params -CountingBp::solveQuery (VarIds queryVids) -{ - assert (queryVids.empty() == false); - Params res; - if (queryVids.size() == 1) { - res = solver_->getPosterioriOf (getRepresentative (queryVids[0])); - } else { - VarNode* vn = fg.getVarNode (queryVids[0]); - const FacNodes& facNodes = vn->neighbors(); - size_t idx = facNodes.size(); - for (size_t i = 0; i < facNodes.size(); i++) { - if (facNodes[i]->factor().contains (queryVids)) { - idx = i; - break; - } - cout << endl; - } - if (idx == facNodes.size()) { - res = GroundSolver::getJointByConditioning ( - GroundSolverType::CBP, fg, queryVids); - } else { - VarIds reprArgs; - for (size_t i = 0; i < queryVids.size(); i++) { - reprArgs.push_back (getRepresentative (queryVids[i])); - } - FacNode* reprFac = getRepresentative (facNodes[idx]); - assert (reprFac != 0); - res = solver_->getFactorJoint (reprFac, reprArgs); - } - } - return res; -} - - - -void -CountingBp::findIdenticalFactors() -{ - const FacNodes& facNodes = fg.facNodes(); - if (checkForIdenticalFactors == false || - facNodes.size() == 1) { - return; - } - for (size_t i = 0; i < facNodes.size(); i++) { - facNodes[i]->factor().setDistId (Util::maxUnsigned()); - } - unsigned groupCount = 1; - for (size_t i = 0; i < facNodes.size() - 1; i++) { - Factor& f1 = facNodes[i]->factor(); - if (f1.distId() != Util::maxUnsigned()) { - continue; - } - f1.setDistId (groupCount); - for (size_t j = i + 1; j < facNodes.size(); j++) { - Factor& f2 = facNodes[j]->factor(); - if (f2.distId() != Util::maxUnsigned()) { - continue; - } - if (f1.size() == f2.size() && - f1.ranges() == f2.ranges() && - f1.params() == f2.params()) { - f2.setDistId (groupCount); - } - } - groupCount ++; - } -} - - - -void -CountingBp::setInitialColors (void) -{ - varColors_.resize (fg.nrVarNodes()); - facColors_.resize (fg.nrFacNodes()); - // create the initial variable colors - VarColorMap colorMap; - const VarNodes& varNodes = fg.varNodes(); - for (size_t i = 0; i < varNodes.size(); i++) { - unsigned range = varNodes[i]->range(); - VarColorMap::iterator it = colorMap.find (range); - if (it == colorMap.end()) { - it = colorMap.insert (make_pair ( - range, Colors (range + 1, -1))).first; - } - unsigned idx = varNodes[i]->hasEvidence() - ? varNodes[i]->getEvidence() - : range; - Colors& stateColors = it->second; - if (stateColors[idx] == -1) { - stateColors[idx] = getNewColor(); - } - setColor (varNodes[i], stateColors[idx]); - } - const FacNodes& facNodes = fg.facNodes(); - // create the initial factor colors - DistColorMap distColors; - for (size_t i = 0; i < facNodes.size(); i++) { - unsigned distId = facNodes[i]->factor().distId(); - DistColorMap::iterator it = distColors.find (distId); - if (it == distColors.end()) { - it = distColors.insert (make_pair (distId, getNewColor())).first; - } - setColor (facNodes[i], it->second); - } -} - - - -void -CountingBp::createGroups (void) -{ - VarSignMap varGroups; - FacSignMap facGroups; - unsigned nIters = 0; - bool groupsHaveChanged = true; - const VarNodes& varNodes = fg.varNodes(); - const FacNodes& facNodes = fg.facNodes(); - - while (groupsHaveChanged || nIters == 1) { - nIters ++; - - // set a new color to the variables with the same signature - size_t prevVarGroupsSize = varGroups.size(); - varGroups.clear(); - for (size_t i = 0; i < varNodes.size(); i++) { - const VarSignature& signature = getSignature (varNodes[i]); - VarSignMap::iterator it = varGroups.find (signature); - if (it == varGroups.end()) { - it = varGroups.insert (make_pair (signature, VarNodes())).first; - } - it->second.push_back (varNodes[i]); - } - for (VarSignMap::iterator it = varGroups.begin(); - it != varGroups.end(); ++it) { - Color newColor = getNewColor(); - VarNodes& groupMembers = it->second; - for (size_t i = 0; i < groupMembers.size(); i++) { - setColor (groupMembers[i], newColor); - } - } - - size_t prevFactorGroupsSize = facGroups.size(); - facGroups.clear(); - // set a new color to the factors with the same signature - for (size_t i = 0; i < facNodes.size(); i++) { - const FacSignature& signature = getSignature (facNodes[i]); - FacSignMap::iterator it = facGroups.find (signature); - if (it == facGroups.end()) { - it = facGroups.insert (make_pair (signature, FacNodes())).first; - } - it->second.push_back (facNodes[i]); - } - for (FacSignMap::iterator it = facGroups.begin(); - it != facGroups.end(); ++it) { - Color newColor = getNewColor(); - FacNodes& groupMembers = it->second; - for (size_t i = 0; i < groupMembers.size(); i++) { - setColor (groupMembers[i], newColor); - } - } - - groupsHaveChanged = prevVarGroupsSize != varGroups.size() - || prevFactorGroupsSize != facGroups.size(); - } - // printGroups (varGroups, facGroups); - createClusters (varGroups, facGroups); -} - - - -void -CountingBp::createClusters ( - const VarSignMap& varGroups, - const FacSignMap& facGroups) -{ - varClusters_.reserve (varGroups.size()); - for (VarSignMap::const_iterator it = varGroups.begin(); - it != varGroups.end(); ++it) { - const VarNodes& groupVars = it->second; - VarCluster* vc = new VarCluster (groupVars); - for (size_t i = 0; i < groupVars.size(); i++) { - varClusterMap_.insert (make_pair (groupVars[i]->varId(), vc)); - } - varClusters_.push_back (vc); - } - - facClusters_.reserve (facGroups.size()); - for (FacSignMap::const_iterator it = facGroups.begin(); - it != facGroups.end(); ++it) { - FacNode* groupFactor = it->second[0]; - const VarNodes& neighs = groupFactor->neighbors(); - VarClusters varClusters; - varClusters.reserve (neighs.size()); - for (size_t i = 0; i < neighs.size(); i++) { - VarId vid = neighs[i]->varId(); - varClusters.push_back (varClusterMap_.find (vid)->second); - } - facClusters_.push_back (new FacCluster (it->second, varClusters)); - } -} - - - -VarSignature -CountingBp::getSignature (const VarNode* varNode) -{ - const FacNodes& neighs = varNode->neighbors(); - VarSignature sign; - sign.reserve (neighs.size() + 1); - for (size_t i = 0; i < neighs.size(); i++) { - sign.push_back (make_pair ( - getColor (neighs[i]), - neighs[i]->factor().indexOf (varNode->varId()))); - } - std::sort (sign.begin(), sign.end()); - sign.push_back (make_pair (getColor (varNode), 0)); - return sign; -} - - - -FacSignature -CountingBp::getSignature (const FacNode* facNode) -{ - const VarNodes& neighs = facNode->neighbors(); - FacSignature sign; - sign.reserve (neighs.size() + 1); - for (size_t i = 0; i < neighs.size(); i++) { - sign.push_back (getColor (neighs[i])); - } - sign.push_back (getColor (facNode)); - return sign; -} - - - -VarId -CountingBp::getRepresentative (VarId vid) -{ - assert (Util::contains (varClusterMap_, vid)); - VarCluster* vc = varClusterMap_.find (vid)->second; - return vc->representative()->varId(); -} - - - -FacNode* -CountingBp::getRepresentative (FacNode* fn) -{ - for (size_t i = 0; i < facClusters_.size(); i++) { - if (Util::contains (facClusters_[i]->members(), fn)) { - return facClusters_[i]->representative(); - } - } - return 0; -} - - - -FactorGraph* -CountingBp::getCompressedFactorGraph (void) -{ - FactorGraph* fg = new FactorGraph(); - for (size_t i = 0; i < varClusters_.size(); i++) { - VarNode* newVar = new VarNode (varClusters_[i]->first()); - varClusters_[i]->setRepresentative (newVar); - fg->addVarNode (newVar); - } - for (size_t i = 0; i < facClusters_.size(); i++) { - Vars vars; - const VarClusters& clusters = facClusters_[i]->varClusters(); - for (size_t j = 0; j < clusters.size(); j++) { - vars.push_back (clusters[j]->representative()); - } - const Factor& groundFac = facClusters_[i]->first()->factor(); - FacNode* fn = new FacNode (Factor ( - vars, groundFac.params(), groundFac.distId())); - facClusters_[i]->setRepresentative (fn); - fg->addFacNode (fn); - for (size_t j = 0; j < vars.size(); j++) { - fg->addEdge (static_cast (vars[j]), fn); - } - } - return fg; -} - - - -vector> -CountingBp::getWeights (void) const -{ - vector> weights; - weights.reserve (facClusters_.size()); - for (size_t i = 0; i < facClusters_.size(); i++) { - const VarClusters& neighs = facClusters_[i]->varClusters(); - weights.push_back ({ }); - weights.back().reserve (neighs.size()); - for (size_t j = 0; j < neighs.size(); j++) { - weights.back().push_back (getWeight ( - facClusters_[i], neighs[j], j)); - } - } - return weights; -} - - - -unsigned -CountingBp::getWeight ( - const FacCluster* fc, - const VarCluster* vc, - size_t index) const -{ - unsigned weight = 0; - VarId reprVid = vc->representative()->varId(); - VarNode* groundVar = fg.getVarNode (reprVid); - const FacNodes& neighs = groundVar->neighbors(); - for (size_t i = 0; i < neighs.size(); i++) { - FacNodes::const_iterator it; - it = std::find (fc->members().begin(), fc->members().end(), neighs[i]); - if (it != fc->members().end() && - (*it)->factor().indexOf (reprVid) == index) { - weight ++; - } - } - return weight; -} - - - -void -CountingBp::printGroups ( - const VarSignMap& varGroups, - const FacSignMap& facGroups) const -{ - unsigned count = 1; - cout << "variable groups:" << endl; - for (VarSignMap::const_iterator it = varGroups.begin(); - it != varGroups.end(); ++it) { - const VarNodes& groupMembers = it->second; - if (groupMembers.size() > 0) { - cout << count << ": " ; - for (size_t i = 0; i < groupMembers.size(); i++) { - cout << groupMembers[i]->label() << " " ; - } - count ++; - cout << endl; - } - } - count = 1; - cout << endl << "factor groups:" << endl; - for (FacSignMap::const_iterator it = facGroups.begin(); - it != facGroups.end(); ++it) { - const FacNodes& groupMembers = it->second; - if (groupMembers.size() > 0) { - cout << ++count << ": " ; - for (size_t i = 0; i < groupMembers.size(); i++) { - cout << groupMembers[i]->getLabel() << " " ; - } - count ++; - cout << endl; - } - } -} - diff --git a/packages/CLPBN/horus2/CountingBp.h b/packages/CLPBN/horus2/CountingBp.h deleted file mode 100644 index 2cbd2f995..000000000 --- a/packages/CLPBN/horus2/CountingBp.h +++ /dev/null @@ -1,182 +0,0 @@ -#ifndef HORUS_COUNTINGBP_H -#define HORUS_COUNTINGBP_H - -#include - -#include "GroundSolver.h" -#include "FactorGraph.h" -#include "Util.h" -#include "Horus.h" - -class VarCluster; -class FacCluster; -class WeightedBp; - -typedef long Color; -typedef vector Colors; -typedef vector> VarSignature; -typedef vector FacSignature; - -typedef unordered_map DistColorMap; -typedef unordered_map VarColorMap; - -typedef unordered_map VarSignMap; -typedef unordered_map FacSignMap; - -typedef unordered_map VarClusterMap; - -typedef vector VarClusters; -typedef vector FacClusters; - -template -inline size_t hash_combine (size_t seed, const T& v) -{ - return seed ^ (hash()(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2)); -} - - -namespace std { - template struct hash> - { - size_t operator() (const std::pair& p) const - { - return hash_combine (std::hash()(p.first), p.second); - } - }; - - template struct hash> - { - size_t operator() (const std::vector& vec) const - { - size_t h = 0; - typename vector::const_iterator first = vec.begin(); - typename vector::const_iterator last = vec.end(); - for (; first != last; ++first) { - h = hash_combine (h, *first); - } - return h; - } - }; -} - - -class VarCluster -{ - public: - VarCluster (const VarNodes& vs) : members_(vs) { } - - const VarNode* first (void) const { return members_.front(); } - - const VarNodes& members (void) const { return members_; } - - VarNode* representative (void) const { return repr_; } - - void setRepresentative (VarNode* vn) { repr_ = vn; } - - private: - VarNodes members_; - VarNode* repr_; -}; - - -class FacCluster -{ - public: - FacCluster (const FacNodes& fcs, const VarClusters& vcs) - : members_(fcs), varClusters_(vcs) { } - - const FacNode* first (void) const { return members_.front(); } - - const FacNodes& members (void) const { return members_; } - - FacNode* representative (void) const { return repr_; } - - void setRepresentative (FacNode* fn) { repr_ = fn; } - - VarClusters& varClusters (void) { return varClusters_; } - - private: - FacNodes members_; - FacNode* repr_; - VarClusters varClusters_; -}; - - -class CountingBp : public GroundSolver -{ - public: - CountingBp (const FactorGraph& fg); - - ~CountingBp (void); - - void printSolverFlags (void) const; - - Params solveQuery (VarIds); - - static bool checkForIdenticalFactors; - - private: - Color getNewColor (void) - { - ++ freeColor_; - return freeColor_ - 1; - } - - Color getColor (const VarNode* vn) const - { - return varColors_[vn->getIndex()]; - } - - Color getColor (const FacNode* fn) const - { - return facColors_[fn->getIndex()]; - } - - void setColor (const VarNode* vn, Color c) - { - varColors_[vn->getIndex()] = c; - } - - void setColor (const FacNode* fn, Color c) - { - facColors_[fn->getIndex()] = c; - } - - void findIdenticalFactors (void); - - void setInitialColors (void); - - void createGroups (void); - - void createClusters (const VarSignMap&, const FacSignMap&); - - VarSignature getSignature (const VarNode*); - - FacSignature getSignature (const FacNode*); - - void printGroups (const VarSignMap&, const FacSignMap&) const; - - VarId getRepresentative (VarId vid); - - FacNode* getRepresentative (FacNode*); - - FactorGraph* getCompressedFactorGraph (void); - - vector> getWeights (void) const; - - unsigned getWeight (const FacCluster*, - const VarCluster*, size_t index) const; - - - Color freeColor_; - Colors varColors_; - Colors facColors_; - VarClusters varClusters_; - FacClusters facClusters_; - VarClusterMap varClusterMap_; - const FactorGraph* compressedFg_; - WeightedBp* solver_; -}; - -#endif // HORUS_COUNTINGBP_H - diff --git a/packages/CLPBN/horus2/ElimGraph.cpp b/packages/CLPBN/horus2/ElimGraph.cpp deleted file mode 100644 index f617d8237..000000000 --- a/packages/CLPBN/horus2/ElimGraph.cpp +++ /dev/null @@ -1,243 +0,0 @@ -#include - -#include - -#include "ElimGraph.h" - -ElimHeuristic ElimGraph::elimHeuristic = MIN_NEIGHBORS; - - -ElimGraph::ElimGraph (const vector& factors) -{ - for (size_t i = 0; i < factors.size(); i++) { - if (factors[i] == 0) { // if contained just one var with evidence - continue; - } - const VarIds& vids = factors[i]->arguments(); - for (size_t j = 0; j < vids.size() - 1; j++) { - EgNode* n1 = getEgNode (vids[j]); - if (n1 == 0) { - n1 = new EgNode (vids[j], factors[i]->range (j)); - addNode (n1); - } - for (size_t k = j + 1; k < vids.size(); k++) { - EgNode* n2 = getEgNode (vids[k]); - if (n2 == 0) { - n2 = new EgNode (vids[k], factors[i]->range (k)); - addNode (n2); - } - if (neighbors (n1, n2) == false) { - addEdge (n1, n2); - } - } - } - if (vids.size() == 1) { - if (getEgNode (vids[0]) == 0) { - addNode (new EgNode (vids[0], factors[i]->range (0))); - } - } - } -} - - - -ElimGraph::~ElimGraph (void) -{ - for (size_t i = 0; i < nodes_.size(); i++) { - delete nodes_[i]; - } -} - - - -VarIds -ElimGraph::getEliminatingOrder (const VarIds& exclude) -{ - VarIds elimOrder; - unmarked_.reserve (nodes_.size()); - for (size_t i = 0; i < nodes_.size(); i++) { - if (Util::contains (exclude, nodes_[i]->varId()) == false) { - unmarked_.insert (nodes_[i]); - } - } - size_t nrVarsToEliminate = nodes_.size() - exclude.size(); - for (size_t i = 0; i < nrVarsToEliminate; i++) { - EgNode* node = getLowestCostNode(); - unmarked_.remove (node); - const EGNeighs& neighs = node->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - neighs[j]->removeNeighbor (node); - } - elimOrder.push_back (node->varId()); - connectAllNeighbors (node); - } - return elimOrder; -} - - - -void -ElimGraph::print (void) const -{ - for (size_t i = 0; i < nodes_.size(); i++) { - cout << "node " << nodes_[i]->label() << " neighs:" ; - EGNeighs neighs = nodes_[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - cout << " " << neighs[j]->label(); - } - cout << endl; - } -} - - - -void -ElimGraph::exportToGraphViz ( - const char* fileName, - bool showNeighborless, - const VarIds& highlightVarIds) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "strict graph {" << endl; - for (size_t i = 0; i < nodes_.size(); i++) { - if (showNeighborless || nodes_[i]->neighbors().size() != 0) { - out << '"' << nodes_[i]->label() << '"' << endl; - } - } - for (size_t i = 0; i < highlightVarIds.size(); i++) { - EgNode* node =getEgNode (highlightVarIds[i]); - if (node) { - out << '"' << node->label() << '"' ; - out << " [shape=box3d]" << endl; - } else { - cerr << "Error: invalid variable id: " << highlightVarIds[i] << "." ; - cerr << endl; - exit (EXIT_FAILURE); - } - } - for (size_t i = 0; i < nodes_.size(); i++) { - EGNeighs neighs = nodes_[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - out << '"' << nodes_[i]->label() << '"' << " -- " ; - out << '"' << neighs[j]->label() << '"' << endl; - } - } - out << "}" << endl; - out.close(); -} - - - -VarIds -ElimGraph::getEliminationOrder ( - const Factors& factors, - VarIds excludedVids) -{ - if (elimHeuristic == ElimHeuristic::SEQUENTIAL) { - VarIds allVids; - Factors::const_iterator first = factors.begin(); - Factors::const_iterator end = factors.end(); - for (; first != end; ++first) { - Util::addToVector (allVids, (*first)->arguments()); - } - TinySet elimOrder (allVids); - elimOrder -= TinySet (excludedVids); - return elimOrder.elements(); - } - ElimGraph graph (factors); - return graph.getEliminatingOrder (excludedVids); -} - - - -void -ElimGraph::addNode (EgNode* n) -{ - nodes_.push_back (n); - n->setIndex (nodes_.size() - 1); - varMap_.insert (make_pair (n->varId(), n)); -} - - - -EgNode* -ElimGraph::getEgNode (VarId vid) const -{ - unordered_map::const_iterator it; - it = varMap_.find (vid); - return (it != varMap_.end()) ? it->second : 0; -} - - - -EgNode* -ElimGraph::getLowestCostNode (void) const -{ - EgNode* bestNode = 0; - unsigned minCost = std::numeric_limits::max(); - EGNeighs::const_iterator it; - switch (elimHeuristic) { - case MIN_NEIGHBORS: { - for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { - unsigned cost = getNeighborsCost (*it); - if (cost < minCost) { - bestNode = *it; - minCost = cost; - } - }} - break; - case MIN_WEIGHT: { - for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { - unsigned cost = getWeightCost (*it); - if (cost < minCost) { - bestNode = *it; - minCost = cost; - } - }} - break; - case MIN_FILL: { - for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { - unsigned cost = getFillCost (*it); - if (cost < minCost) { - bestNode = *it; - minCost = cost; - } - }} - break; - case WEIGHTED_MIN_FILL: { - for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { - unsigned cost = getWeightedFillCost (*it); - if (cost < minCost) { - bestNode = *it; - minCost = cost; - } - }} - break; - default: - assert (false); - } - assert (bestNode); - return bestNode; -} - - - -void -ElimGraph::connectAllNeighbors (const EgNode* n) -{ - const EGNeighs& neighs = n->neighbors(); - if (neighs.size() > 0) { - for (size_t i = 0; i < neighs.size() - 1; i++) { - for (size_t j = i + 1; j < neighs.size(); j++) { - if ( ! neighbors (neighs[i], neighs[j])) { - addEdge (neighs[i], neighs[j]); - } - } - } - } -} - diff --git a/packages/CLPBN/horus2/ElimGraph.h b/packages/CLPBN/horus2/ElimGraph.h deleted file mode 100644 index 8188b5ba6..000000000 --- a/packages/CLPBN/horus2/ElimGraph.h +++ /dev/null @@ -1,139 +0,0 @@ -#ifndef HORUS_ELIMGRAPH_H -#define HORUS_ELIMGRAPH_H - -#include "unordered_map" - -#include "FactorGraph.h" -#include "TinySet.h" -#include "Horus.h" - - -using namespace std; - -enum ElimHeuristic -{ - SEQUENTIAL, - MIN_NEIGHBORS, - MIN_WEIGHT, - MIN_FILL, - WEIGHTED_MIN_FILL -}; - - -class EgNode; - -typedef TinySet EGNeighs; - - -class EgNode : public Var -{ - public: - EgNode (VarId vid, unsigned range) : Var (vid, range) { } - - void addNeighbor (EgNode* n) { neighs_.insert (n); } - - void removeNeighbor (EgNode* n) { neighs_.remove (n); } - - bool isNeighbor (EgNode* n) const { return neighs_.contains (n); } - - const EGNeighs& neighbors (void) const { return neighs_; } - - private: - EGNeighs neighs_; -}; - - -class ElimGraph -{ - public: - ElimGraph (const Factors&); - - ~ElimGraph (void); - - VarIds getEliminatingOrder (const VarIds&); - - void print (void) const; - - void exportToGraphViz (const char*, bool = true, - const VarIds& = VarIds()) const; - - static VarIds getEliminationOrder (const Factors&, VarIds); - - static ElimHeuristic elimHeuristic; - - private: - - void addEdge (EgNode* n1, EgNode* n2) - { - assert (n1 != n2); - n1->addNeighbor (n2); - n2->addNeighbor (n1); - } - - unsigned getNeighborsCost (const EgNode* n) const - { - return n->neighbors().size(); - } - - unsigned getWeightCost (const EgNode* n) const - { - unsigned cost = 1; - const EGNeighs& neighs = n->neighbors(); - for (size_t i = 0; i < neighs.size(); i++) { - cost *= neighs[i]->range(); - } - return cost; - } - - unsigned getFillCost (const EgNode* n) const - { - unsigned cost = 0; - const EGNeighs& neighs = n->neighbors(); - if (neighs.size() > 0) { - for (size_t i = 0; i < neighs.size() - 1; i++) { - for (size_t j = i + 1; j < neighs.size(); j++) { - if ( ! neighbors (neighs[i], neighs[j])) { - cost ++; - } - } - } - } - return cost; - } - - unsigned getWeightedFillCost (const EgNode* n) const - { - unsigned cost = 0; - const EGNeighs& neighs = n->neighbors(); - if (neighs.size() > 0) { - for (size_t i = 0; i < neighs.size() - 1; i++) { - for (size_t j = i + 1; j < neighs.size(); j++) { - if ( ! neighbors (neighs[i], neighs[j])) { - cost += neighs[i]->range() * neighs[j]->range(); - } - } - } - } - return cost; - } - - bool neighbors (EgNode* n1, EgNode* n2) const - { - return n1->isNeighbor (n2); - } - - void addNode (EgNode*); - - EgNode* getEgNode (VarId) const; - - EgNode* getLowestCostNode (void) const; - - void connectAllNeighbors (const EgNode*); - - vector nodes_; - TinySet unmarked_; - unordered_map varMap_; -}; - -#endif // HORUS_ELIMGRAPH_H - diff --git a/packages/CLPBN/horus2/Factor.cpp b/packages/CLPBN/horus2/Factor.cpp deleted file mode 100644 index 9b8ad0be7..000000000 --- a/packages/CLPBN/horus2/Factor.cpp +++ /dev/null @@ -1,237 +0,0 @@ -#include -#include - -#include - -#include -#include - -#include "Factor.h" -#include "Indexer.h" - - -Factor::Factor (const Factor& g) -{ - clone (g); -} - - - -Factor::Factor ( - const VarIds& vids, - const Ranges& ranges, - const Params& params, - unsigned distId) -{ - args_ = vids; - ranges_ = ranges; - params_ = params; - distId_ = distId; - assert (params_.size() == Util::sizeExpected (ranges_)); -} - - - -Factor::Factor ( - const Vars& vars, - const Params& params, - unsigned distId) -{ - for (size_t i = 0; i < vars.size(); i++) { - args_.push_back (vars[i]->varId()); - ranges_.push_back (vars[i]->range()); - } - params_ = params; - distId_ = distId; - assert (params_.size() == Util::sizeExpected (ranges_)); -} - - - -void -Factor::sumOut (VarId vid) -{ - if (vid == args_.front() && ranges_.front() == 2) { - // optimization - sumOutFirstVariable(); - } else if (vid == args_.back() && ranges_.back() == 2) { - // optimization - sumOutLastVariable(); - } else { - assert (indexOf (vid) != args_.size()); - sumOutIndex (indexOf (vid)); - } -} - - - -void -Factor::sumOutAllExcept (VarId vid) -{ - assert (indexOf (vid) != args_.size()); - sumOutAllExceptIndex (indexOf (vid)); -} - - - -void -Factor::sumOutAllExcept (const VarIds& vids) -{ - vector mask (args_.size(), false); - for (unsigned i = 0; i < vids.size(); i++) { - assert (indexOf (vids[i]) != args_.size()); - mask[indexOf (vids[i])] = true; - } - sumOutArgs (mask); -} - - - -void -Factor::sumOutAllExceptIndex (size_t idx) -{ - assert (idx < args_.size()); - vector mask (args_.size(), false); - mask[idx] = true; - sumOutArgs (mask); -} - - -void -Factor::multiply (Factor& g) -{ - if (args_.size() == 0) { - clone (g); - return; - } - TFactor::multiply (g); -} - - - -string -Factor::getLabel (void) const -{ - stringstream ss; - ss << "f(" ; - for (size_t i = 0; i < args_.size(); i++) { - if (i != 0) ss << "," ; - ss << Var (args_[i], ranges_[i]).label(); - } - ss << ")" ; - return ss.str(); -} - - - -void -Factor::print (void) const -{ - Vars vars; - for (size_t i = 0; i < args_.size(); i++) { - vars.push_back (new Var (args_[i], ranges_[i])); - } - vector jointStrings = Util::getStateLines (vars); - for (size_t i = 0; i < params_.size(); i++) { - // cout << "[" << distId_ << "] " ; - cout << "f(" << jointStrings[i] << ")" ; - cout << " = " << params_[i] << endl; - } - cout << endl; - for (size_t i = 0; i < vars.size(); i++) { - delete vars[i]; - } -} - - - -void -Factor::sumOutFirstVariable (void) -{ - size_t sep = params_.size() / 2; - if (Globals::logDomain) { - std::transform ( - params_.begin(), params_.begin() + sep, - params_.begin() + sep, params_.begin(), - Util::logSum); - - } else { - std::transform ( - params_.begin(), params_.begin() + sep, - params_.begin() + sep, params_.begin(), - std::plus()); - } - params_.resize (sep); - args_.erase (args_.begin()); - ranges_.erase (ranges_.begin()); -} - - - -void -Factor::sumOutLastVariable (void) -{ - Params::iterator first1 = params_.begin(); - Params::iterator first2 = params_.begin(); - Params::iterator last = params_.end(); - if (Globals::logDomain) { - while (first2 != last) { - // the arguments can be swaped, but that is ok - *first1++ = Util::logSum (*first2++, *first2++); - } - } else { - while (first2 != last) { - *first1++ = (*first2++) + (*first2++); - } - } - params_.resize (params_.size() / 2); - args_.pop_back(); - ranges_.pop_back(); -} - - - -void -Factor::sumOutArgs (const vector& mask) -{ - assert (mask.size() == args_.size()); - size_t new_size = 1; - Ranges oldRanges = ranges_; - args_.clear(); - ranges_.clear(); - for (unsigned i = 0; i < mask.size(); i++) { - if (mask[i]) { - new_size *= ranges_[i]; - args_.push_back (args_[i]); - ranges_.push_back (ranges_[i]); - } - } - Params newps (new_size, LogAware::addIdenty()); - Params::const_iterator first = params_.begin(); - Params::const_iterator last = params_.end(); - MapIndexer indexer (oldRanges, mask); - if (Globals::logDomain) { - while (first != last) { - newps[indexer] = Util::logSum (newps[indexer], *first++); - ++ indexer; - } - } else { - while (first != last) { - newps[indexer] += *first++; - ++ indexer; - } - } - params_ = newps; -} - - - -void -Factor::clone (const Factor& g) -{ - args_ = g.arguments(); - ranges_ = g.ranges(); - params_ = g.params(); - distId_ = g.distId(); -} - diff --git a/packages/CLPBN/horus2/Factor.h b/packages/CLPBN/horus2/Factor.h deleted file mode 100644 index 742f20f7a..000000000 --- a/packages/CLPBN/horus2/Factor.h +++ /dev/null @@ -1,294 +0,0 @@ -#ifndef HORUS_FACTOR_H -#define HORUS_FACTOR_H - -#include - -#include "Var.h" -#include "Indexer.h" -#include "Util.h" - - -using namespace std; - - -template -class TFactor -{ - public: - const vector& arguments (void) const { return args_; } - - vector& arguments (void) { return args_; } - - const Ranges& ranges (void) const { return ranges_; } - - const Params& params (void) const { return params_; } - - Params& params (void) { return params_; } - - size_t nrArguments (void) const { return args_.size(); } - - size_t size (void) const { return params_.size(); } - - unsigned distId (void) const { return distId_; } - - void setDistId (unsigned id) { distId_ = id; } - - void normalize (void) { LogAware::normalize (params_); } - - void randomize (void) - { - for (size_t i = 0; i < params_.size(); ++i) { - params_[i] = (double) std::rand() / RAND_MAX; - } - } - - void setParams (const Params& newParams) - { - params_ = newParams; - assert (params_.size() == Util::sizeExpected (ranges_)); - } - - size_t indexOf (const T& t) const - { - return Util::indexOf (args_, t); - } - - const T& argument (size_t idx) const - { - assert (idx < args_.size()); - return args_[idx]; - } - - T& argument (size_t idx) - { - assert (idx < args_.size()); - return args_[idx]; - } - - unsigned range (size_t idx) const - { - assert (idx < ranges_.size()); - return ranges_[idx]; - } - - void multiply (TFactor& g) - { - if (args_ == g.arguments()) { - // optimization - Globals::logDomain - ? params_ += g.params() - : params_ *= g.params(); - return; - } - unsigned range_prod = 1; - bool share_arguments = false; - const vector& g_args = g.arguments(); - const Ranges& g_ranges = g.ranges(); - const Params& g_params = g.params(); - for (size_t i = 0; i < g_args.size(); i++) { - size_t idx = indexOf (g_args[i]); - if (idx == args_.size()) { - range_prod *= g_ranges[i]; - args_.push_back (g_args[i]); - ranges_.push_back (g_ranges[i]); - } else { - share_arguments = true; - } - } - if (share_arguments == false) { - // optimization - cartesianProduct (g_params.begin(), g_params.end()); - } else { - extend (range_prod); - Params::iterator it = params_.begin(); - MapIndexer indexer (args_, ranges_, g_args, g_ranges); - if (Globals::logDomain) { - for (; indexer.valid(); ++it, ++indexer) { - *it += g_params[indexer]; - } - } else { - for (; indexer.valid(); ++it, ++indexer) { - *it *= g_params[indexer]; - } - } - } - } - - void sumOutIndex (size_t idx) - { - assert (idx < args_.size()); - assert (args_.size() > 1); - size_t new_size = params_.size() / ranges_[idx]; - Params newps (new_size, LogAware::addIdenty()); - Params::const_iterator first = params_.begin(); - Params::const_iterator last = params_.end(); - MapIndexer indexer (ranges_, idx); - if (Globals::logDomain) { - for (; first != last; ++indexer) { - newps[indexer] = Util::logSum (newps[indexer], *first++); - } - } else { - for (; first != last; ++indexer) { - newps[indexer] += *first++; - } - } - params_ = newps; - args_.erase (args_.begin() + idx); - ranges_.erase (ranges_.begin() + idx); - } - - void absorveEvidence (const T& arg, unsigned obsIdx) - { - size_t idx = indexOf (arg); - assert (idx != args_.size()); - assert (obsIdx < ranges_[idx]); - Params newps; - newps.reserve (params_.size() / ranges_[idx]); - Indexer indexer (ranges_); - for (unsigned i = 0; i < obsIdx; ++i) { - indexer.incrementDimension (idx); - } - while (indexer.valid()) { - newps.push_back (params_[indexer]); - indexer.incrementExceptDimension (idx); - } - params_ = newps; - args_.erase (args_.begin() + idx); - ranges_.erase (ranges_.begin() + idx); - } - - void reorderArguments (const vector new_args) - { - assert (new_args.size() == args_.size()); - if (new_args == args_) { - return; // already on the desired order - } - Ranges new_ranges; - for (size_t i = 0; i < new_args.size(); i++) { - size_t idx = indexOf (new_args[i]); - assert (idx != args_.size()); - new_ranges.push_back (ranges_[idx]); - } - Params newps; - newps.reserve (params_.size()); - MapIndexer indexer (new_args, new_ranges, args_, ranges_); - for (; indexer.valid(); ++indexer) { - newps.push_back (params_[indexer]); - } - params_ = newps; - args_ = new_args; - ranges_ = new_ranges; - } - - bool contains (const T& arg) const - { - return Util::contains (args_, arg); - } - - bool contains (const vector& args) const - { - for (size_t i = 0; i < args.size(); i++) { - if (contains (args[i]) == false) { - return false; - } - } - return true; - } - - double& operator[] (size_t idx) - { - assert (idx < params_.size()); - return params_[idx]; - } - - - protected: - vector args_; - Ranges ranges_; - Params params_; - unsigned distId_; - - private: - void extend (unsigned range_prod) - { - Params backup = params_; - params_.clear(); - params_.reserve (backup.size() * range_prod); - Params::const_iterator first = backup.begin(); - Params::const_iterator last = backup.end(); - for (; first != last; ++first) { - for (unsigned reps = 0; reps < range_prod; ++reps) { - params_.push_back (*first); - } - } - } - - void cartesianProduct ( - Params::const_iterator first2, - Params::const_iterator last2) - { - Params backup = params_; - params_.clear(); - params_.reserve (params_.size() * (last2 - first2)); - Params::const_iterator first1 = backup.begin(); - Params::const_iterator last1 = backup.end(); - Params::const_iterator tmp; - if (Globals::logDomain) { - for (; first1 != last1; ++first1) { - for (tmp = first2; tmp != last2; ++tmp) { - params_.push_back ((*first1) + (*tmp)); - } - } - } else { - for (; first1 != last1; ++first1) { - for (tmp = first2; tmp != last2; ++tmp) { - params_.push_back ((*first1) * (*tmp)); - } - } - } - } - -}; - - - -class Factor : public TFactor -{ - public: - Factor (void) { } - - Factor (const Factor&); - - Factor (const VarIds&, const Ranges&, const Params&, - unsigned = Util::maxUnsigned()); - - Factor (const Vars&, const Params&, - unsigned = Util::maxUnsigned()); - - void sumOut (VarId); - - void sumOutAllExcept (VarId); - - void sumOutAllExcept (const VarIds&); - - void sumOutAllExceptIndex (size_t idx); - - void multiply (Factor&); - - string getLabel (void) const; - - void print (void) const; - - private: - void sumOutFirstVariable (void); - - void sumOutLastVariable (void); - - void sumOutArgs (const vector& mask); - - void clone (const Factor& f); - -}; - -#endif // HORUS_FACTOR_H - diff --git a/packages/CLPBN/horus2/FactorGraph.cpp b/packages/CLPBN/horus2/FactorGraph.cpp deleted file mode 100644 index ba31a9faa..000000000 --- a/packages/CLPBN/horus2/FactorGraph.cpp +++ /dev/null @@ -1,454 +0,0 @@ -#include -#include -#include - -#include -#include -#include - -#include "FactorGraph.h" -#include "Factor.h" -#include "BayesBall.h" -#include "Util.h" - - -FactorGraph::FactorGraph (const FactorGraph& fg) -{ - const VarNodes& varNodes = fg.varNodes(); - for (size_t i = 0; i < varNodes.size(); i++) { - addVarNode (new VarNode (varNodes[i])); - } - const FacNodes& facNodes = fg.facNodes(); - for (size_t i = 0; i < facNodes.size(); i++) { - FacNode* facNode = new FacNode (facNodes[i]->factor()); - addFacNode (facNode); - const VarNodes& neighs = facNodes[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - addEdge (varNodes_[neighs[j]->getIndex()], facNode); - } - } - bayesFactors_ = fg.bayesianFactors(); -} - - - -void -FactorGraph::readFromUaiFormat (const char* fileName) -{ - std::ifstream is (fileName); - if (!is.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - exit (EXIT_FAILURE); - } - ignoreLines (is); - string line; - getline (is, line); - if (line != "MARKOV") { - cerr << "Error: the network must be a MARKOV network." << endl; - exit (EXIT_FAILURE); - } - // read the number of vars - ignoreLines (is); - unsigned nrVars; - is >> nrVars; - // read the range of each var - ignoreLines (is); - Ranges ranges (nrVars); - for (unsigned i = 0; i < nrVars; i++) { - is >> ranges[i]; - } - unsigned nrFactors; - unsigned nrArgs; - unsigned vid; - is >> nrFactors; - vector factorVarIds; - vector factorRanges; - for (unsigned i = 0; i < nrFactors; i++) { - ignoreLines (is); - is >> nrArgs; - factorVarIds.push_back ({ }); - factorRanges.push_back ({ }); - for (unsigned j = 0; j < nrArgs; j++) { - is >> vid; - if (vid >= ranges.size()) { - cerr << "Error: invalid variable identifier `" << vid << "'. " ; - cerr << "Identifiers must be between 0 and " << ranges.size() - 1 ; - cerr << "." << endl; - exit (EXIT_FAILURE); - } - factorVarIds.back().push_back (vid); - factorRanges.back().push_back (ranges[vid]); - } - } - // read the parameters - unsigned nrParams; - for (unsigned i = 0; i < nrFactors; i++) { - ignoreLines (is); - is >> nrParams; - if (nrParams != Util::sizeExpected (factorRanges[i])) { - cerr << "Error: invalid number of parameters for factor nº " << i ; - cerr << ", " << Util::sizeExpected (factorRanges[i]); - cerr << " expected, " << nrParams << " given." << endl; - exit (EXIT_FAILURE); - } - Params params (nrParams); - for (unsigned j = 0; j < nrParams; j++) { - is >> params[j]; - } - if (Globals::logDomain) { - Util::log (params); - } - addFactor (Factor (factorVarIds[i], factorRanges[i], params)); - } - is.close(); -} - - - -void -FactorGraph::readFromLibDaiFormat (const char* fileName) -{ - std::ifstream is (fileName); - if (!is.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - exit (EXIT_FAILURE); - } - ignoreLines (is); - unsigned nrFactors; - unsigned nrArgs; - VarId vid; - is >> nrFactors; - for (unsigned i = 0; i < nrFactors; i++) { - ignoreLines (is); - // read the factor arguments - is >> nrArgs; - VarIds vids; - for (unsigned j = 0; j < nrArgs; j++) { - ignoreLines (is); - is >> vid; - vids.push_back (vid); - } - // read ranges - Ranges ranges (nrArgs); - for (unsigned j = 0; j < nrArgs; j++) { - ignoreLines (is); - is >> ranges[j]; - VarNode* var = getVarNode (vids[j]); - if (var != 0 && ranges[j] != var->range()) { - cerr << "Error: variable `" << vids[j] << "' appears in two or " ; - cerr << "more factors with a different range." << endl; - } - } - // read parameters - ignoreLines (is); - unsigned nNonzeros; - is >> nNonzeros; - Params params (Util::sizeExpected (ranges), 0); - for (unsigned j = 0; j < nNonzeros; j++) { - ignoreLines (is); - unsigned index; - is >> index; - ignoreLines (is); - double val; - is >> val; - params[index] = val; - } - if (Globals::logDomain) { - Util::log (params); - } - std::reverse (vids.begin(), vids.end()); - Factor f (vids, ranges, params); - std::reverse (vids.begin(), vids.end()); - f.reorderArguments (vids); - addFactor (f); - } - is.close(); -} - - - -FactorGraph::~FactorGraph (void) -{ - for (size_t i = 0; i < varNodes_.size(); i++) { - delete varNodes_[i]; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - delete facNodes_[i]; - } -} - - - -void -FactorGraph::addFactor (const Factor& factor) -{ - FacNode* fn = new FacNode (factor); - addFacNode (fn); - const VarIds& vids = fn->factor().arguments(); - for (size_t i = 0; i < vids.size(); i++) { - VarMap::const_iterator it = varMap_.find (vids[i]); - if (it != varMap_.end()) { - addEdge (it->second, fn); - } else { - VarNode* vn = new VarNode (vids[i], fn->factor().range (i)); - addVarNode (vn); - addEdge (vn, fn); - } - } -} - - - -void -FactorGraph::addVarNode (VarNode* vn) -{ - varNodes_.push_back (vn); - vn->setIndex (varNodes_.size() - 1); - varMap_.insert (make_pair (vn->varId(), vn)); -} - - - -void -FactorGraph::addFacNode (FacNode* fn) -{ - facNodes_.push_back (fn); - fn->setIndex (facNodes_.size() - 1); -} - - - -void -FactorGraph::addEdge (VarNode* vn, FacNode* fn) -{ - vn->addNeighbor (fn); - fn->addNeighbor (vn); -} - - - -bool -FactorGraph::isTree (void) const -{ - return !containsCycle(); -} - - - -BayesBallGraph& -FactorGraph::getStructure (void) -{ - assert (bayesFactors_); - if (structure_.empty()) { - for (size_t i = 0; i < varNodes_.size(); i++) { - structure_.addNode (new BBNode (varNodes_[i])); - } - for (size_t i = 0; i < facNodes_.size(); i++) { - const VarIds& vids = facNodes_[i]->factor().arguments(); - for (size_t j = 1; j < vids.size(); j++) { - structure_.addEdge (vids[j], vids[0]); - } - } - } - return structure_; -} - - - -void -FactorGraph::print (void) const -{ - for (size_t i = 0; i < varNodes_.size(); i++) { - cout << "var id = " << varNodes_[i]->varId() << endl; - cout << "label = " << varNodes_[i]->label() << endl; - cout << "range = " << varNodes_[i]->range() << endl; - cout << "evidence = " << varNodes_[i]->getEvidence() << endl; - cout << "factors = " ; - for (size_t j = 0; j < varNodes_[i]->neighbors().size(); j++) { - cout << varNodes_[i]->neighbors()[j]->getLabel() << " " ; - } - cout << endl << endl; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - facNodes_[i]->factor().print(); - } -} - - - -void -FactorGraph::exportToGraphViz (const char* fileName) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "graph \"" << fileName << "\" {" << endl; - for (size_t i = 0; i < varNodes_.size(); i++) { - if (varNodes_[i]->hasEvidence()) { - out << '"' << varNodes_[i]->label() << '"' ; - out << " [style=filled, fillcolor=yellow]" << endl; - } - } - for (size_t i = 0; i < facNodes_.size(); i++) { - out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " [label=\"" << facNodes_[i]->getLabel(); - out << "\"" << ", shape=box]" << endl; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - const VarNodes& myVars = facNodes_[i]->neighbors(); - for (size_t j = 0; j < myVars.size(); j++) { - out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " -- " ; - out << '"' << myVars[j]->label() << '"' << endl; - } - } - out << "}" << endl; - out.close(); -} - - - -void -FactorGraph::exportToUaiFormat (const char* fileName) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "MARKOV" << endl; - out << varNodes_.size() << endl; - VarNodes sortedVns = varNodes_; - std::sort (sortedVns.begin(), sortedVns.end(), sortByVarId()); - for (size_t i = 0; i < sortedVns.size(); i++) { - out << ((i != 0) ? " " : "") << sortedVns[i]->range(); - } - out << endl << facNodes_.size() << endl; - for (size_t i = 0; i < facNodes_.size(); i++) { - VarIds args = facNodes_[i]->factor().arguments(); - out << args.size() << " " << Util::elementsToString (args) << endl; - } - out << endl; - for (size_t i = 0; i < facNodes_.size(); i++) { - Params params = facNodes_[i]->factor().params(); - if (Globals::logDomain) { - Util::exp (params); - } - out << params.size() << endl << " " ; - out << Util::elementsToString (params) << endl << endl; - } - out.close(); -} - - - -void -FactorGraph::exportToLibDaiFormat (const char* fileName) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << facNodes_.size() << endl << endl; - for (size_t i = 0; i < facNodes_.size(); i++) { - Factor f (facNodes_[i]->factor()); - out << f.nrArguments() << endl; - out << Util::elementsToString (f.arguments()) << endl; - out << Util::elementsToString (f.ranges()) << endl; - VarIds args = f.arguments(); - std::reverse (args.begin(), args.end()); - f.reorderArguments (args); - if (Globals::logDomain) { - Util::exp (f.params()); - } - out << f.size() << endl; - for (size_t j = 0; j < f.size(); j++) { - out << j << " " << f[j] << endl; - } - out << endl; - } - out.close(); -} - - - -void -FactorGraph::ignoreLines (std::ifstream& is) const -{ - string ignoreStr; - while (is.peek() == '#' || is.peek() == '\n') { - getline (is, ignoreStr); - } -} - - - -bool -FactorGraph::containsCycle (void) const -{ - vector visitedVars (varNodes_.size(), false); - vector visitedFactors (facNodes_.size(), false); - for (size_t i = 0; i < varNodes_.size(); i++) { - int v = varNodes_[i]->getIndex(); - if (!visitedVars[v]) { - if (containsCycle (varNodes_[i], 0, visitedVars, visitedFactors)) { - return true; - } - } - } - return false; -} - - - -bool -FactorGraph::containsCycle ( - const VarNode* v, - const FacNode* p, - vector& visitedVars, - vector& visitedFactors) const -{ - visitedVars[v->getIndex()] = true; - const FacNodes& adjacencies = v->neighbors(); - for (size_t i = 0; i < adjacencies.size(); i++) { - int w = adjacencies[i]->getIndex(); - if (!visitedFactors[w]) { - if (containsCycle (adjacencies[i], v, visitedVars, visitedFactors)) { - return true; - } - } - else if (visitedFactors[w] && adjacencies[i] != p) { - return true; - } - } - return false; // no cycle detected in this component -} - - - -bool -FactorGraph::containsCycle ( - const FacNode* v, - const VarNode* p, - vector& visitedVars, - vector& visitedFactors) const -{ - visitedFactors[v->getIndex()] = true; - const VarNodes& adjacencies = v->neighbors(); - for (size_t i = 0; i < adjacencies.size(); i++) { - int w = adjacencies[i]->getIndex(); - if (!visitedVars[w]) { - if (containsCycle (adjacencies[i], v, visitedVars, visitedFactors)) { - return true; - } - } - else if (visitedVars[w] && adjacencies[i] != p) { - return true; - } - } - return false; // no cycle detected in this component -} - diff --git a/packages/CLPBN/horus2/FactorGraph.h b/packages/CLPBN/horus2/FactorGraph.h deleted file mode 100644 index b2b03369d..000000000 --- a/packages/CLPBN/horus2/FactorGraph.h +++ /dev/null @@ -1,150 +0,0 @@ -#ifndef HORUS_FACTORGRAPH_H -#define HORUS_FACTORGRAPH_H - -#include - -#include "Factor.h" -#include "BayesBallGraph.h" -#include "Horus.h" - -using namespace std; - - -class FacNode; - -class VarNode : public Var -{ - public: - VarNode (VarId varId, unsigned nrStates, - int evidence = Constants::NO_EVIDENCE) - : Var (varId, nrStates, evidence) { } - - VarNode (const Var* v) : Var (v) { } - - void addNeighbor (FacNode* fn) { neighs_.push_back (fn); } - - const FacNodes& neighbors (void) const { return neighs_; } - - private: - DISALLOW_COPY_AND_ASSIGN (VarNode); - - FacNodes neighs_; -}; - - - -class FacNode -{ - public: - FacNode (const Factor& f) : factor_(f), index_(-1) { } - - const Factor& factor (void) const { return factor_; } - - Factor& factor (void) { return factor_; } - - void addNeighbor (VarNode* vn) { neighs_.push_back (vn); } - - const VarNodes& neighbors (void) const { return neighs_; } - - size_t getIndex (void) const { return index_; } - - void setIndex (size_t index) { index_ = index; } - - string getLabel (void) { return factor_.getLabel(); } - - private: - DISALLOW_COPY_AND_ASSIGN (FacNode); - - VarNodes neighs_; - Factor factor_; - size_t index_; -}; - - - -class FactorGraph -{ - public: - FactorGraph (void) : bayesFactors_(false) { } - - FactorGraph (const FactorGraph&); - - ~FactorGraph (void); - - const VarNodes& varNodes (void) const { return varNodes_; } - - const FacNodes& facNodes (void) const { return facNodes_; } - - void setFactorsAsBayesian (void) { bayesFactors_ = true; } - - bool bayesianFactors (void) const { return bayesFactors_; } - - size_t nrVarNodes (void) const { return varNodes_.size(); } - - size_t nrFacNodes (void) const { return facNodes_.size(); } - - VarNode* getVarNode (VarId vid) const - { - VarMap::const_iterator it = varMap_.find (vid); - return it != varMap_.end() ? it->second : 0; - } - - void readFromUaiFormat (const char*); - - void readFromLibDaiFormat (const char*); - - void addFactor (const Factor& factor); - - void addVarNode (VarNode*); - - void addFacNode (FacNode*); - - void addEdge (VarNode*, FacNode*); - - bool isTree (void) const; - - BayesBallGraph& getStructure (void); - - void print (void) const; - - void exportToGraphViz (const char*) const; - - void exportToUaiFormat (const char*) const; - - void exportToLibDaiFormat (const char*) const; - - private: - // DISALLOW_COPY_AND_ASSIGN (FactorGraph); - - void ignoreLines (std::ifstream&) const; - - bool containsCycle (void) const; - - bool containsCycle (const VarNode*, const FacNode*, - vector&, vector&) const; - - bool containsCycle (const FacNode*, const VarNode*, - vector&, vector&) const; - - VarNodes varNodes_; - FacNodes facNodes_; - - BayesBallGraph structure_; - bool bayesFactors_; - - typedef unordered_map VarMap; - VarMap varMap_; -}; - - - -struct sortByVarId -{ - bool operator()(VarNode* vn1, VarNode* vn2) { - return vn1->varId() < vn2->varId(); - } -}; - - -#endif // HORUS_FACTORGRAPH_H - diff --git a/packages/CLPBN/horus2/GroundSolver.cpp b/packages/CLPBN/horus2/GroundSolver.cpp deleted file mode 100644 index 4cd3fdbd2..000000000 --- a/packages/CLPBN/horus2/GroundSolver.cpp +++ /dev/null @@ -1,107 +0,0 @@ -#include "GroundSolver.h" -#include "Util.h" -#include "BeliefProp.h" -#include "CountingBp.h" -#include "VarElim.h" - - -void -GroundSolver::printAnswer (const VarIds& vids) -{ - Vars unobservedVars; - VarIds unobservedVids; - for (size_t i = 0; i < vids.size(); i++) { - VarNode* vn = fg.getVarNode (vids[i]); - if (vn->hasEvidence() == false) { - unobservedVars.push_back (vn); - unobservedVids.push_back (vids[i]); - } - } - if (unobservedVids.empty() == false) { - Params res = solveQuery (unobservedVids); - vector stateLines = Util::getStateLines (unobservedVars); - for (size_t i = 0; i < res.size(); i++) { - cout << "P(" << stateLines[i] << ") = " ; - cout << std::setprecision (Constants::PRECISION) << res[i]; - cout << endl; - } - cout << endl; - } -} - - - -void -GroundSolver::printAllPosterioris (void) -{ - VarNodes vars = fg.varNodes(); - std::sort (vars.begin(), vars.end(), sortByVarId()); - for (size_t i = 0; i < vars.size(); i++) { - printAnswer ({vars[i]->varId()}); - } -} - - - -Params -GroundSolver::getJointByConditioning ( - GroundSolverType solverType, - FactorGraph fg, - const VarIds& jointVarIds) const -{ - VarNodes jointVars; - for (size_t i = 0; i < jointVarIds.size(); i++) { - assert (fg.getVarNode (jointVarIds[i])); - jointVars.push_back (fg.getVarNode (jointVarIds[i])); - } - - GroundSolver* solver = 0; - switch (solverType) { - case GroundSolverType::BP: solver = new BeliefProp (fg); break; - case GroundSolverType::CBP: solver = new CountingBp (fg); break; - case GroundSolverType::VE: solver = new VarElim (fg); break; - } - Params prevBeliefs = solver->solveQuery ({jointVarIds[0]}); - VarIds observedVids = {jointVars[0]->varId()}; - - for (size_t i = 1; i < jointVarIds.size(); i++) { - assert (jointVars[i]->hasEvidence() == false); - Params newBeliefs; - Vars observedVars; - Ranges observedRanges; - for (size_t j = 0; j < observedVids.size(); j++) { - observedVars.push_back (fg.getVarNode (observedVids[j])); - observedRanges.push_back (observedVars.back()->range()); - } - Indexer indexer (observedRanges, false); - while (indexer.valid()) { - for (size_t j = 0; j < observedVars.size(); j++) { - observedVars[j]->setEvidence (indexer[j]); - } - delete solver; - switch (solverType) { - case GroundSolverType::BP: solver = new BeliefProp (fg); break; - case GroundSolverType::CBP: solver = new CountingBp (fg); break; - case GroundSolverType::VE: solver = new VarElim (fg); break; - } - Params beliefs = solver->solveQuery ({jointVarIds[i]}); - for (size_t k = 0; k < beliefs.size(); k++) { - newBeliefs.push_back (beliefs[k]); - } - ++ indexer; - } - - int count = -1; - for (size_t j = 0; j < newBeliefs.size(); j++) { - if (j % jointVars[i]->range() == 0) { - count ++; - } - newBeliefs[j] *= prevBeliefs[count]; - } - prevBeliefs = newBeliefs; - observedVids.push_back (jointVars[i]->varId()); - } - delete solver; - return prevBeliefs; -} - diff --git a/packages/CLPBN/horus2/GroundSolver.h b/packages/CLPBN/horus2/GroundSolver.h deleted file mode 100644 index 18b81454b..000000000 --- a/packages/CLPBN/horus2/GroundSolver.h +++ /dev/null @@ -1,36 +0,0 @@ -#ifndef HORUS_GROUNDSOLVER_H -#define HORUS_GROUNDSOLVER_H - -#include - -#include "FactorGraph.h" -#include "Var.h" -#include "Horus.h" - - -using namespace std; - -class GroundSolver -{ - public: - GroundSolver (const FactorGraph& factorGraph) : fg(factorGraph) { } - - virtual ~GroundSolver() { } // ensure that subclass destructor is called - - virtual Params solveQuery (VarIds queryVids) = 0; - - virtual void printSolverFlags (void) const = 0; - - void printAnswer (const VarIds& vids); - - void printAllPosterioris (void); - - Params getJointByConditioning (GroundSolverType, - FactorGraph, const VarIds& jointVarIds) const; - - protected: - const FactorGraph& fg; -}; - -#endif // HORUS_GROUNDSOLVER_H - diff --git a/packages/CLPBN/horus2/Histogram.cpp b/packages/CLPBN/horus2/Histogram.cpp deleted file mode 100644 index a9e96cfdd..000000000 --- a/packages/CLPBN/horus2/Histogram.cpp +++ /dev/null @@ -1,146 +0,0 @@ -#include - -#include -#include - -#include "Histogram.h" -#include "Util.h" - - -HistogramSet::HistogramSet (unsigned size, unsigned range) -{ - size_ = size; - hist_.resize (range, 0); - hist_[0] = size; -} - - - -void -HistogramSet::nextHistogram (void) -{ - for (size_t i = hist_.size() - 1; i-- > 0; ) { - if (hist_[i] > 0) { - hist_[i] --; - hist_[i + 1] = maxCount (i + 1); - clearAfter (i + 1); - break; - } - } - assert (std::accumulate (hist_.begin(), hist_.end(), 0) - == (int) size_); -} - - - -unsigned -HistogramSet::operator[] (size_t idx) const -{ - assert (idx < hist_.size()); - return hist_[idx]; -} - - - -unsigned -HistogramSet::nrHistograms (void) const -{ - return HistogramSet::nrHistograms (size_, hist_.size()); -} - - - -void -HistogramSet::reset (void) -{ - std::fill (hist_.begin() + 1, hist_.end(), 0); - hist_[0] = size_; -} - - - -vector -HistogramSet::getHistograms (unsigned N, unsigned R) -{ - HistogramSet hs (N, R); - unsigned H = hs.nrHistograms(); - vector histograms; - histograms.reserve (H); - for (unsigned i = 0; i < H; i++) { - histograms.push_back (hs.hist_); - hs.nextHistogram(); - } - return histograms; -} - - - -unsigned -HistogramSet::nrHistograms (unsigned N, unsigned R) -{ - return Util::nrCombinations (N + R - 1, R - 1); -} - - - -size_t -HistogramSet::findIndex ( - const Histogram& h, - const vector& hists) -{ - vector::const_iterator it = std::lower_bound ( - hists.begin(), hists.end(), h, std::greater()); - assert (it != hists.end() && *it == h); - return std::distance (hists.begin(), it); -} - - - -vector -HistogramSet::getNumAssigns (unsigned N, unsigned R) -{ - HistogramSet hs (N, R); - double N_fac = Util::logFactorial (N); - unsigned H = hs.nrHistograms(); - vector numAssigns; - numAssigns.reserve (H); - for (unsigned h = 0; h < H; h++) { - double prod = 0.0; - for (unsigned r = 0; r < R; r++) { - prod += Util::logFactorial (hs[r]); - } - double res = N_fac - prod; - numAssigns.push_back (Globals::logDomain ? res : std::exp (res)); - hs.nextHistogram(); - } - return numAssigns; -} - - - -ostream& operator<< (ostream &os, const HistogramSet& hs) -{ - os << "#" << hs.hist_; - return os; -} - - - -unsigned -HistogramSet::maxCount (size_t idx) const -{ - unsigned sum = 0; - for (size_t i = 0; i < idx; i++) { - sum += hist_[i]; - } - return size_ - sum; -} - - - -void -HistogramSet::clearAfter (size_t idx) -{ - std::fill (hist_.begin() + idx + 1, hist_.end(), 0); -} - diff --git a/packages/CLPBN/horus2/Histogram.h b/packages/CLPBN/horus2/Histogram.h deleted file mode 100644 index af0c4595e..000000000 --- a/packages/CLPBN/horus2/Histogram.h +++ /dev/null @@ -1,45 +0,0 @@ -#ifndef HORUS_HISTOGRAM_H -#define HORUS_HISTOGRAM_H - -#include -#include - -using namespace std; - -typedef vector Histogram; - -class HistogramSet -{ - public: - HistogramSet (unsigned, unsigned); - - void nextHistogram (void); - - unsigned operator[] (size_t idx) const; - - unsigned nrHistograms (void) const; - - void reset (void); - - static vector getHistograms (unsigned ,unsigned); - - static unsigned nrHistograms (unsigned, unsigned); - - static size_t findIndex ( - const Histogram&, const vector&); - - static vector getNumAssigns (unsigned, unsigned); - - friend std::ostream& operator<< (ostream &os, const HistogramSet& hs); - - private: - unsigned maxCount (size_t) const; - - void clearAfter (size_t); - - unsigned size_; - Histogram hist_; -}; - -#endif // HORUS_HISTOGRAM_H - diff --git a/packages/CLPBN/horus2/Horus.h b/packages/CLPBN/horus2/Horus.h deleted file mode 100644 index 7e5f12c8e..000000000 --- a/packages/CLPBN/horus2/Horus.h +++ /dev/null @@ -1,87 +0,0 @@ -#ifndef HORUS_HORUS_H -#define HORUS_HORUS_H - -#include - -#include - -#define DISALLOW_COPY_AND_ASSIGN(TypeName) \ - TypeName(const TypeName&); \ - void operator=(const TypeName&) - -using namespace std; - -class Var; -class Factor; -class VarNode; -class FacNode; - -typedef vector Params; -typedef unsigned VarId; -typedef vector VarIds; -typedef vector Vars; -typedef vector VarNodes; -typedef vector FacNodes; -typedef vector Factors; -typedef vector States; -typedef vector Ranges; -typedef unsigned long long ullong; - - -enum LiftedSolverType -{ - LVE, // generalized counting first-order variable elimination (GC-FOVE) - LBP, // lifted first-order belief propagation - LKC // lifted first-order knowledge compilation -}; - - -enum GroundSolverType -{ - VE, // variable elimination - BP, // belief propagation - CBP // counting belief propagation -}; - - -namespace Globals { - -extern bool logDomain; - -// level of debug information -extern unsigned verbosity; - -extern LiftedSolverType liftedSolver; -extern GroundSolverType groundSolver; - -}; - - -namespace Constants { - -// show message calculation for belief propagation -const bool SHOW_BP_CALCS = false; - -const int NO_EVIDENCE = -1; - -// number of digits to show when printing a parameter -const unsigned PRECISION = 6; - -}; - - -namespace BpOptions -{ - enum Schedule { - SEQ_FIXED, - SEQ_RANDOM, - PARALLEL, - MAX_RESIDUAL - }; - extern Schedule schedule; - extern double accuracy; - extern unsigned maxIter; -} - -#endif // HORUS_HORUS_H - diff --git a/packages/CLPBN/horus2/HorusCli.cpp b/packages/CLPBN/horus2/HorusCli.cpp deleted file mode 100644 index 520603052..000000000 --- a/packages/CLPBN/horus2/HorusCli.cpp +++ /dev/null @@ -1,187 +0,0 @@ -#include - -#include -#include - -#include "FactorGraph.h" -#include "VarElim.h" -#include "BeliefProp.h" -#include "CountingBp.h" - -using namespace std; - -int readHorusFlags (int, const char* []); -void readFactorGraph (FactorGraph&, const char*); -VarIds readQueryAndEvidence (FactorGraph&, int, const char* [], int); - -void runSolver (const FactorGraph&, const VarIds&); - -const string USAGE = "usage: ./hcli [HORUS_FLAG=VALUE] \ -MODEL_FILE [VARIABLE | OBSERVED_VARIABLE=EVIDENCE] ..." ; - - -int -main (int argc, const char* argv[]) -{ - if (argc <= 1) { - cerr << "Error: no probabilistic graphical model was given." << endl; - cerr << USAGE << endl; - exit (EXIT_FAILURE); - } - int idx = readHorusFlags (argc, argv); - FactorGraph fg; - readFactorGraph (fg, argv[idx]); - VarIds queryIds = readQueryAndEvidence (fg, argc, argv, idx + 1); - runSolver (fg, queryIds); - return 0; -} - - - -int -readHorusFlags (int argc, const char* argv[]) -{ - int i = 1; - for (; i < argc; i++) { - const string& arg = argv[i]; - size_t pos = arg.find ('='); - if (pos == std::string::npos) { - return i; - } - string leftArg = arg.substr (0, pos); - string rightArg = arg.substr (pos + 1); - if (leftArg.empty()) { - cerr << "Error: missing left argument." << endl; - cerr << USAGE << endl; - exit (EXIT_FAILURE); - } - if (rightArg.empty()) { - cerr << "Error: missing right argument." << endl; - cerr << USAGE << endl; - exit (EXIT_FAILURE); - } - Util::setHorusFlag (leftArg, rightArg); - } - return i + 1; -} - - - -void -readFactorGraph (FactorGraph& fg, const char* s) -{ - string fileName (s); - string extension = fileName.substr (fileName.find_last_of ('.') + 1); - if (extension == "uai") { - fg.readFromUaiFormat (fileName.c_str()); - } else if (extension == "fg") { - fg.readFromLibDaiFormat (fileName.c_str()); - } else { - cerr << "Error: the probabilistic graphical model must be " ; - cerr << "defined either in a UAI or libDAI file." << endl; - exit (EXIT_FAILURE); - } -} - - - -VarIds -readQueryAndEvidence ( - FactorGraph& fg, - int argc, - const char* argv[], - int start) -{ - VarIds queryIds; - for (int i = start; i < argc; i++) { - const string& arg = argv[i]; - if (arg.find ('=') == std::string::npos) { - if (Util::isInteger (arg) == false) { - cerr << "Error: `" << arg << "' " ; - cerr << "is not a variable id." ; - cerr << endl; - exit (EXIT_FAILURE); - } - VarId vid = Util::stringToUnsigned (arg); - VarNode* queryVar = fg.getVarNode (vid); - if (queryVar == false) { - cerr << "Error: unknow variable with id " ; - cerr << "`" << vid << "'." << endl; - exit (EXIT_FAILURE); - } - queryIds.push_back (vid); - } else { - size_t pos = arg.find ('='); - string leftArg = arg.substr (0, pos); - string rightArg = arg.substr (pos + 1); - if (leftArg.empty()) { - cerr << "Error: missing left argument." << endl; - cerr << USAGE << endl; - exit (EXIT_FAILURE); - } - if (Util::isInteger (leftArg) == false) { - cerr << "Error: `" << leftArg << "' " ; - cerr << "is not a variable id." << endl ; - exit (EXIT_FAILURE); - } - VarId vid = Util::stringToUnsigned (leftArg); - VarNode* observedVar = fg.getVarNode (vid); - if (observedVar == false) { - cerr << "Error: unknow variable with id " ; - cerr << "`" << vid << "'." << endl; - exit (EXIT_FAILURE); - } - if (rightArg.empty()) { - cerr << "Error: missing right argument." << endl; - cerr << USAGE << endl; - exit (EXIT_FAILURE); - } - if (Util::isInteger (rightArg) == false) { - cerr << "Error: `" << rightArg << "' " ; - cerr << "is not a state index." << endl ; - exit (EXIT_FAILURE); - } - unsigned stateIdx = Util::stringToUnsigned (rightArg); - if (observedVar->isValidState (stateIdx) == false) { - cerr << "Error: `" << stateIdx << "' " ; - cerr << "is not a valid state index for variable with id " ; - cerr << "`" << vid << "'." << endl; - exit (EXIT_FAILURE); - } - observedVar->setEvidence (stateIdx); - } - } - return queryIds; -} - - - -void -runSolver (const FactorGraph& fg, const VarIds& queryIds) -{ - GroundSolver* solver = 0; - switch (Globals::groundSolver) { - case GroundSolverType::VE: - solver = new VarElim (fg); - break; - case GroundSolverType::BP: - solver = new BeliefProp (fg); - break; - case GroundSolverType::CBP: - solver = new CountingBp (fg); - break; - default: - assert (false); - } - if (Globals::verbosity > 0) { - solver->printSolverFlags(); - cout << endl; - } - if (queryIds.empty()) { - solver->printAllPosterioris(); - } else { - solver->printAnswer (queryIds); - } - delete solver; -} - diff --git a/packages/CLPBN/horus2/HorusYap.cpp b/packages/CLPBN/horus2/HorusYap.cpp deleted file mode 100644 index 3c566b73a..000000000 --- a/packages/CLPBN/horus2/HorusYap.cpp +++ /dev/null @@ -1,570 +0,0 @@ -#include - -#include - -#include -#include - -#include - -#include "ParfactorList.h" -#include "FactorGraph.h" -#include "LiftedOperations.h" -#include "LiftedVe.h" -#include "VarElim.h" -#include "LiftedBp.h" -#include "CountingBp.h" -#include "BeliefProp.h" -#include "LiftedKc.h" -#include "ElimGraph.h" -#include "BayesBall.h" - - -using namespace std; - -typedef std::pair LiftedNetwork; - -Parfactor* readParfactor (YAP_Term); - -void readLiftedEvidence (YAP_Term, ObservedFormulas&); - -vector readUnsignedList (YAP_Term list); - -Params readParameters (YAP_Term); - -YAP_Term fillAnswersPrologList (vector& results); - - - -int -createLiftedNetwork (void) -{ - Parfactors parfactors; - YAP_Term parfactorList = YAP_ARG1; - while (parfactorList != YAP_TermNil()) { - YAP_Term pfTerm = YAP_HeadOfTerm (parfactorList); - parfactors.push_back (readParfactor (pfTerm)); - parfactorList = YAP_TailOfTerm (parfactorList); - } - - // LiftedUtils::printSymbolDictionary(); - if (Globals::verbosity > 2) { - Util::printHeader ("INITIAL PARFACTORS"); - for (size_t i = 0; i < parfactors.size(); i++) { - parfactors[i]->print(); - cout << endl; - } - } - - ParfactorList* pfList = new ParfactorList (parfactors); - - if (Globals::verbosity > 2) { - Util::printHeader ("SHATTERED PARFACTORS"); - pfList->print(); - } - - // read evidence - ObservedFormulas* obsFormulas = new ObservedFormulas(); - readLiftedEvidence (YAP_ARG2, *(obsFormulas)); - - LiftedNetwork* net = new LiftedNetwork (pfList, obsFormulas); - - YAP_Int p = (YAP_Int) (net); - return YAP_Unify (YAP_MkIntTerm (p), YAP_ARG3); -} - - - -int -createGroundNetwork (void) -{ - string factorsType ((char*) YAP_AtomName (YAP_AtomOfTerm (YAP_ARG1))); - FactorGraph* fg = new FactorGraph(); - if (factorsType == "bayes") { - fg->setFactorsAsBayesian(); - } - YAP_Term factorList = YAP_ARG2; - while (factorList != YAP_TermNil()) { - YAP_Term factor = YAP_HeadOfTerm (factorList); - // read the var ids - VarIds varIds = readUnsignedList (YAP_ArgOfTerm (1, factor)); - // read the ranges - Ranges ranges = readUnsignedList (YAP_ArgOfTerm (2, factor)); - // read the parameters - Params params = readParameters (YAP_ArgOfTerm (3, factor)); - // read dist id - unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (4, factor)); - fg->addFactor (Factor (varIds, ranges, params, distId)); - factorList = YAP_TailOfTerm (factorList); - } - unsigned nrObservedVars = 0; - YAP_Term evidenceList = YAP_ARG3; - while (evidenceList != YAP_TermNil()) { - YAP_Term evTerm = YAP_HeadOfTerm (evidenceList); - unsigned vid = (unsigned) YAP_IntOfTerm ((YAP_ArgOfTerm (1, evTerm))); - unsigned ev = (unsigned) YAP_IntOfTerm ((YAP_ArgOfTerm (2, evTerm))); - assert (fg->getVarNode (vid)); - fg->getVarNode (vid)->setEvidence (ev); - evidenceList = YAP_TailOfTerm (evidenceList); - nrObservedVars ++; - } - if (Globals::verbosity > 0) { - cout << "factor graph contains " ; - cout << fg->nrVarNodes() << " variables " ; - cout << "(" << nrObservedVars << " observed) and " ; - cout << fg->nrFacNodes() << " factors " << endl; - } - YAP_Int p = (YAP_Int) (fg); - return YAP_Unify (YAP_MkIntTerm (p), YAP_ARG4); -} - - - -int -runLiftedSolver (void) -{ - LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); - ParfactorList pfListCopy (*network->first); - LiftedOperations::absorveEvidence (pfListCopy, *network->second); - - LiftedSolver* solver = 0; - switch (Globals::liftedSolver) { - case LiftedSolverType::LVE: solver = new LiftedVe (pfListCopy); break; - case LiftedSolverType::LBP: solver = new LiftedBp (pfListCopy); break; - case LiftedSolverType::LKC: solver = new LiftedKc (pfListCopy); break; - } - - if (Globals::verbosity > 0) { - solver->printSolverFlags(); - cout << endl; - } - - YAP_Term taskList = YAP_ARG2; - vector results; - while (taskList != YAP_TermNil()) { - Grounds queryVars; - YAP_Term jointList = YAP_HeadOfTerm (taskList); - while (jointList != YAP_TermNil()) { - YAP_Term ground = YAP_HeadOfTerm (jointList); - if (YAP_IsAtomTerm (ground)) { - string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ground))); - queryVars.push_back (Ground (LiftedUtils::getSymbol (name))); - } else { - assert (YAP_IsApplTerm (ground)); - YAP_Functor yapFunctor = YAP_FunctorOfTerm (ground); - string name ((char*) (YAP_AtomName (YAP_NameOfFunctor (yapFunctor)))); - unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); - Symbol functor = LiftedUtils::getSymbol (name); - Symbols args; - for (unsigned i = 1; i <= arity; i++) { - YAP_Term ti = YAP_ArgOfTerm (i, ground); - assert (YAP_IsAtomTerm (ti)); - string arg ((char *) YAP_AtomName (YAP_AtomOfTerm (ti))); - args.push_back (LiftedUtils::getSymbol (arg)); - } - queryVars.push_back (Ground (functor, args)); - } - jointList = YAP_TailOfTerm (jointList); - } - results.push_back (solver->solveQuery (queryVars)); - taskList = YAP_TailOfTerm (taskList); - } - - delete solver; - - return YAP_Unify (fillAnswersPrologList (results), YAP_ARG3); -} - - - -int -runGroundSolver (void) -{ - FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - - vector tasks; - YAP_Term taskList = YAP_ARG2; - while (taskList != YAP_TermNil()) { - tasks.push_back (readUnsignedList (YAP_HeadOfTerm (taskList))); - taskList = YAP_TailOfTerm (taskList); - } - - FactorGraph* mfg = fg; - if (fg->bayesianFactors()) { - std::set vids; - for (size_t i = 0; i < tasks.size(); i++) { - Util::addToSet (vids, tasks[i]); - } - mfg = BayesBall::getMinimalFactorGraph ( - *fg, VarIds (vids.begin(), vids.end())); - } - - GroundSolver* solver = 0; - CountingBp::checkForIdenticalFactors = false; - switch (Globals::groundSolver) { - case GroundSolverType::VE: solver = new VarElim (*mfg); break; - case GroundSolverType::BP: solver = new BeliefProp (*mfg); break; - case GroundSolverType::CBP: solver = new CountingBp (*mfg); break; - } - - if (Globals::verbosity > 0) { - solver->printSolverFlags(); - cout << endl; - } - - vector results; - results.reserve (tasks.size()); - for (size_t i = 0; i < tasks.size(); i++) { - results.push_back (solver->solveQuery (tasks[i])); - } - - delete solver; - if (fg->bayesianFactors()) { - delete mfg; - } - - return YAP_Unify (fillAnswersPrologList (results), YAP_ARG3); -} - - - -int -setParfactorsParams (void) -{ - LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); - ParfactorList* pfList = network->first; - YAP_Term distIdsList = YAP_ARG2; - YAP_Term paramsList = YAP_ARG3; - unordered_map paramsMap; - while (distIdsList != YAP_TermNil()) { - unsigned distId = (unsigned) YAP_IntOfTerm ( - YAP_HeadOfTerm (distIdsList)); - assert (Util::contains (paramsMap, distId) == false); - paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); - distIdsList = YAP_TailOfTerm (distIdsList); - paramsList = YAP_TailOfTerm (paramsList); - } - ParfactorList::iterator it = pfList->begin(); - while (it != pfList->end()) { - assert (Util::contains (paramsMap, (*it)->distId())); - (*it)->setParams (paramsMap[(*it)->distId()]); - ++ it; - } - return TRUE; -} - - - -int -setFactorsParams (void) -{ - FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - YAP_Term distIdsList = YAP_ARG2; - YAP_Term paramsList = YAP_ARG3; - unordered_map paramsMap; - while (distIdsList != YAP_TermNil()) { - unsigned distId = (unsigned) YAP_IntOfTerm ( - YAP_HeadOfTerm (distIdsList)); - assert (Util::contains (paramsMap, distId) == false); - paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); - distIdsList = YAP_TailOfTerm (distIdsList); - paramsList = YAP_TailOfTerm (paramsList); - } - const FacNodes& facNodes = fg->facNodes(); - for (size_t i = 0; i < facNodes.size(); i++) { - unsigned distId = facNodes[i]->factor().distId(); - assert (Util::contains (paramsMap, distId)); - facNodes[i]->factor().setParams (paramsMap[distId]); - } - return TRUE; -} - - - -int -setVarsInformation (void) -{ - Var::clearVarsInfo(); - vector labels; - YAP_Term labelsL = YAP_ARG1; - while (labelsL != YAP_TermNil()) { - YAP_Atom atom = YAP_AtomOfTerm (YAP_HeadOfTerm (labelsL)); - labels.push_back ((char*) YAP_AtomName (atom)); - labelsL = YAP_TailOfTerm (labelsL); - } - unsigned count = 0; - YAP_Term stateNamesL = YAP_ARG2; - while (stateNamesL != YAP_TermNil()) { - States states; - YAP_Term namesL = YAP_HeadOfTerm (stateNamesL); - while (namesL != YAP_TermNil()) { - YAP_Atom atom = YAP_AtomOfTerm (YAP_HeadOfTerm (namesL)); - states.push_back ((char*) YAP_AtomName (atom)); - namesL = YAP_TailOfTerm (namesL); - } - Var::addVarInfo (count, labels[count], states); - count ++; - stateNamesL = YAP_TailOfTerm (stateNamesL); - } - return TRUE; -} - - - -int -setHorusFlag (void) -{ - string key ((char*) YAP_AtomName (YAP_AtomOfTerm (YAP_ARG1))); - string value; - if (key == "verbosity") { - stringstream ss; - ss << (int) YAP_IntOfTerm (YAP_ARG2); - ss >> value; - } else if (key == "accuracy") { - stringstream ss; - ss << (float) YAP_FloatOfTerm (YAP_ARG2); - ss >> value; - } else if (key == "max_iter") { - stringstream ss; - ss << (int) YAP_IntOfTerm (YAP_ARG2); - ss >> value; - } else { - value = ((char*) YAP_AtomName (YAP_AtomOfTerm (YAP_ARG2))); - } - return Util::setHorusFlag (key, value); -} - - - -int -freeGroundNetwork (void) -{ - delete (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - return TRUE; -} - - - -int -freeLiftedNetwork (void) -{ - LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); - delete network->first; - delete network->second; - delete network; - return TRUE; -} - - - -Parfactor* -readParfactor (YAP_Term pfTerm) -{ - // read dist id - unsigned distId = YAP_IntOfTerm (YAP_ArgOfTerm (1, pfTerm)); - - // read the ranges - Ranges ranges; - YAP_Term rangeList = YAP_ArgOfTerm (3, pfTerm); - while (rangeList != YAP_TermNil()) { - unsigned range = (unsigned) YAP_IntOfTerm (YAP_HeadOfTerm (rangeList)); - ranges.push_back (range); - rangeList = YAP_TailOfTerm (rangeList); - } - - // read parametric random vars - ProbFormulas formulas; - unsigned count = 0; - unordered_map lvMap; - YAP_Term pvList = YAP_ArgOfTerm (2, pfTerm); - while (pvList != YAP_TermNil()) { - YAP_Term formulaTerm = YAP_HeadOfTerm (pvList); - if (YAP_IsAtomTerm (formulaTerm)) { - string name ((char*) YAP_AtomName (YAP_AtomOfTerm (formulaTerm))); - Symbol functor = LiftedUtils::getSymbol (name); - formulas.push_back (ProbFormula (functor, ranges[count])); - } else { - LogVars logVars; - YAP_Functor yapFunctor = YAP_FunctorOfTerm (formulaTerm); - string name ((char*) YAP_AtomName (YAP_NameOfFunctor (yapFunctor))); - Symbol functor = LiftedUtils::getSymbol (name); - unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); - for (unsigned i = 1; i <= arity; i++) { - YAP_Term ti = YAP_ArgOfTerm (i, formulaTerm); - unordered_map::iterator it = lvMap.find (ti); - if (it != lvMap.end()) { - logVars.push_back (it->second); - } else { - unsigned newLv = lvMap.size(); - lvMap[ti] = newLv; - logVars.push_back (newLv); - } - } - formulas.push_back (ProbFormula (functor, logVars, ranges[count])); - } - count ++; - pvList = YAP_TailOfTerm (pvList); - } - - // read the parameters - const Params& params = readParameters (YAP_ArgOfTerm (4, pfTerm)); - - // read the constraint - Tuples tuples; - if (lvMap.size() >= 1) { - YAP_Term tupleList = YAP_ArgOfTerm (5, pfTerm); - while (tupleList != YAP_TermNil()) { - YAP_Term term = YAP_HeadOfTerm (tupleList); - assert (YAP_IsApplTerm (term)); - YAP_Functor yapFunctor = YAP_FunctorOfTerm (term); - unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); - assert (lvMap.size() == arity); - Tuple tuple (arity); - for (unsigned i = 1; i <= arity; i++) { - YAP_Term ti = YAP_ArgOfTerm (i, term); - if (YAP_IsAtomTerm (ti) == false) { - cerr << "Error: the constraint contains free variables." << endl; - exit (EXIT_FAILURE); - } - string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ti))); - tuple[i - 1] = LiftedUtils::getSymbol (name); - } - tuples.push_back (tuple); - tupleList = YAP_TailOfTerm (tupleList); - } - } - return new Parfactor (formulas, params, tuples, distId); -} - - - -void -readLiftedEvidence ( - YAP_Term observedList, - ObservedFormulas& obsFormulas) -{ - while (observedList != YAP_TermNil()) { - YAP_Term pair = YAP_HeadOfTerm (observedList); - YAP_Term ground = YAP_ArgOfTerm (1, pair); - Symbol functor; - Symbols args; - if (YAP_IsAtomTerm (ground)) { - string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ground))); - functor = LiftedUtils::getSymbol (name); - } else { - assert (YAP_IsApplTerm (ground)); - YAP_Functor yapFunctor = YAP_FunctorOfTerm (ground); - string name ((char*) (YAP_AtomName (YAP_NameOfFunctor (yapFunctor)))); - functor = LiftedUtils::getSymbol (name); - unsigned arity = (unsigned) YAP_ArityOfFunctor (yapFunctor); - for (unsigned i = 1; i <= arity; i++) { - YAP_Term ti = YAP_ArgOfTerm (i, ground); - assert (YAP_IsAtomTerm (ti)); - string arg ((char *) YAP_AtomName (YAP_AtomOfTerm (ti))); - args.push_back (LiftedUtils::getSymbol (arg)); - } - } - unsigned evidence = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (2, pair)); - bool found = false; - for (size_t i = 0; i < obsFormulas.size(); i++) { - if (obsFormulas[i].functor() == functor && - obsFormulas[i].arity() == args.size() && - obsFormulas[i].evidence() == evidence) { - obsFormulas[i].addTuple (args); - found = true; - } - } - if (found == false) { - obsFormulas.push_back (ObservedFormula (functor, evidence, args)); - } - observedList = YAP_TailOfTerm (observedList); - } -} - - - -vector -readUnsignedList (YAP_Term list) -{ - vector vec; - while (list != YAP_TermNil()) { - vec.push_back ((unsigned) YAP_IntOfTerm (YAP_HeadOfTerm (list))); - list = YAP_TailOfTerm (list); - } - return vec; -} - - - -Params -readParameters (YAP_Term paramL) -{ - Params params; - assert (YAP_IsPairTerm (paramL)); - while (paramL != YAP_TermNil()) { - params.push_back ((double) YAP_FloatOfTerm (YAP_HeadOfTerm (paramL))); - paramL = YAP_TailOfTerm (paramL); - } - if (Globals::logDomain) { - Util::log (params); - } - return params; -} - - - -YAP_Term -fillAnswersPrologList (vector& results) -{ - YAP_Term list = YAP_TermNil(); - for (size_t i = results.size(); i-- > 0; ) { - const Params& beliefs = results[i]; - YAP_Term queryBeliefsL = YAP_TermNil(); - for (size_t j = beliefs.size(); j-- > 0; ) { - YAP_Int sl1 = YAP_InitSlot (list); - YAP_Term belief = YAP_MkFloatTerm (beliefs[j]); - queryBeliefsL = YAP_MkPairTerm (belief, queryBeliefsL); - list = YAP_GetFromSlot (sl1); - YAP_RecoverSlots (1); - } - list = YAP_MkPairTerm (queryBeliefsL, list); - } - return list; -} - - - -extern "C" void -init_predicates (void) -{ - YAP_UserCPredicate ("cpp_create_lifted_network", - createLiftedNetwork, 3); - - YAP_UserCPredicate ("cpp_create_ground_network", - createGroundNetwork, 4); - - YAP_UserCPredicate ("cpp_run_lifted_solver", - runLiftedSolver, 3); - - YAP_UserCPredicate ("cpp_run_ground_solver", - runGroundSolver, 3); - - YAP_UserCPredicate ("cpp_set_parfactors_params", - setParfactorsParams, 3); - - YAP_UserCPredicate ("cpp_set_factors_params", - setFactorsParams, 3); - - YAP_UserCPredicate ("cpp_set_vars_information", - setVarsInformation, 2); - - YAP_UserCPredicate ("cpp_set_horus_flag", - setHorusFlag, 2); - - YAP_UserCPredicate ("cpp_free_lifted_network", - freeLiftedNetwork, 1); - - YAP_UserCPredicate ("cpp_free_ground_network", - freeGroundNetwork, 1); -} - diff --git a/packages/CLPBN/horus2/Indexer.h b/packages/CLPBN/horus2/Indexer.h deleted file mode 100644 index db99cf1a7..000000000 --- a/packages/CLPBN/horus2/Indexer.h +++ /dev/null @@ -1,258 +0,0 @@ -#ifndef HORUS_INDEXER_H -#define HORUS_INDEXER_H - -#include -#include - -#include -#include - -#include "Util.h" - - -class Indexer -{ - public: - Indexer (const Ranges& ranges, bool calcOffsets = true) - : index_(0), indices_(ranges.size(), 0), ranges_(ranges), - size_(Util::sizeExpected (ranges)) - { - if (calcOffsets) { - calculateOffsets(); - } - } - - void increment (void) - { - for (size_t i = ranges_.size(); i-- > 0; ) { - indices_[i] ++; - if (indices_[i] != ranges_[i]) { - break; - } else { - indices_[i] = 0; - } - } - index_ ++; - } - - void incrementDimension (size_t dim) - { - assert (dim < ranges_.size()); - assert (ranges_.size() == offsets_.size()); - assert (indices_[dim] < ranges_[dim]); - indices_[dim] ++; - index_ += offsets_[dim]; - } - - void incrementExceptDimension (size_t dim) - { - assert (ranges_.size() == offsets_.size()); - for (size_t i = ranges_.size(); i-- > 0; ) { - if (i != dim) { - indices_[i] ++; - index_ += offsets_[i]; - if (indices_[i] != ranges_[i]) { - return; - } else { - indices_[i] = 0; - index_ -= offsets_[i] * ranges_[i]; - } - } - } - index_ = size_; - } - - Indexer& operator++ (void) - { - increment(); - return *this; - } - - operator size_t (void) const - { - return index_; - } - - unsigned operator[] (size_t dim) const - { - assert (valid()); - assert (dim < ranges_.size()); - return indices_[dim]; - } - - bool valid (void) const - { - return index_ < size_; - } - - void reset (void) - { - std::fill (indices_.begin(), indices_.end(), 0); - index_ = 0; - } - - void resetDimension (size_t dim) - { - indices_[dim] = 0; - index_ -= offsets_[dim] * ranges_[dim]; - } - - size_t size (void) const - { - return size_ ; - } - - friend std::ostream& operator<< (std::ostream&, const Indexer&); - - private: - void calculateOffsets (void) - { - size_t prod = 1; - offsets_.resize (ranges_.size()); - for (size_t i = ranges_.size(); i-- > 0; ) { - offsets_[i] = prod; - prod *= ranges_[i]; - } - } - - size_t index_; - Ranges indices_; - const Ranges& ranges_; - size_t size_; - vector offsets_; -}; - - - -inline std::ostream& -operator<< (std::ostream& os, const Indexer& indexer) -{ - os << "(" ; - os << std::setw (2) << std::setfill('0') << indexer.index_; - os << ") " ; - os << indexer.indices_; - return os; -} - - - -class MapIndexer -{ - public: - MapIndexer (const Ranges& ranges, const vector& mask) - : index_(0), indices_(ranges.size(), 0), ranges_(ranges), - valid_(true) - { - size_t prod = 1; - offsets_.resize (ranges.size(), 0); - for (size_t i = ranges.size(); i-- > 0; ) { - if (mask[i]) { - offsets_[i] = prod; - prod *= ranges[i]; - } - } - assert (ranges.size() == mask.size()); - } - - MapIndexer (const Ranges& ranges, size_t dim) - : index_(0), indices_(ranges.size(), 0), ranges_(ranges), - valid_(true) - { - size_t prod = 1; - offsets_.resize (ranges.size(), 0); - for (size_t i = ranges.size(); i-- > 0; ) { - if (i != dim) { - offsets_[i] = prod; - prod *= ranges[i]; - } - } - } - - template - MapIndexer ( - const vector& allArgs, - const Ranges& allRanges, - const vector& wantedArgs, - const Ranges& wantedRanges) - : index_(0), indices_(allArgs.size(), 0), ranges_(allRanges), - valid_(true) - { - size_t prod = 1; - vector offsets (wantedRanges.size()); - for (size_t i = wantedRanges.size(); i-- > 0; ) { - offsets[i] = prod; - prod *= wantedRanges[i]; - } - offsets_.reserve (allArgs.size()); - for (size_t i = 0; i < allArgs.size(); i++) { - size_t idx = Util::indexOf (wantedArgs, allArgs[i]); - offsets_.push_back (idx != wantedArgs.size() ? offsets[idx] : 0); - } - } - - MapIndexer& operator++ (void) - { - assert (valid_); - for (size_t i = ranges_.size(); i-- > 0; ) { - indices_[i] ++; - index_ += offsets_[i]; - if (indices_[i] != ranges_[i]) { - return *this; - } else { - indices_[i] = 0; - index_ -= offsets_[i] * ranges_[i]; - } - } - valid_ = false; - return *this; - } - - operator size_t (void) const - { - assert (valid()); - return index_; - } - - unsigned operator[] (size_t dim) const - { - assert (valid()); - assert (dim < ranges_.size()); - return indices_[dim]; - } - - bool valid (void) const - { - return valid_; - } - - void reset (void) - { - std::fill (indices_.begin(), indices_.end(), 0); - index_ = 0; - } - - friend std::ostream& operator<< (std::ostream&, const MapIndexer&); - - private: - size_t index_; - Ranges indices_; - const Ranges& ranges_; - bool valid_; - vector offsets_; -}; - - - -inline std::ostream& -operator<< (std::ostream &os, const MapIndexer& indexer) -{ - os << "(" ; - os << std::setw (2) << std::setfill('0') << indexer.index_; - os << ") " ; - os << indexer.indices_; - return os; -} - - -#endif // HORUS_INDEXER_H - diff --git a/packages/CLPBN/horus2/LiftedBp.cpp b/packages/CLPBN/horus2/LiftedBp.cpp deleted file mode 100644 index d3f757704..000000000 --- a/packages/CLPBN/horus2/LiftedBp.cpp +++ /dev/null @@ -1,234 +0,0 @@ -#include "LiftedBp.h" -#include "WeightedBp.h" -#include "FactorGraph.h" -#include "LiftedOperations.h" - - -LiftedBp::LiftedBp (const ParfactorList& parfactorList) - : LiftedSolver (parfactorList) -{ - refineParfactors(); - createFactorGraph(); - solver_ = new WeightedBp (*fg_, getWeights()); -} - - - -LiftedBp::~LiftedBp (void) -{ - delete solver_; - delete fg_; -} - - - -Params -LiftedBp::solveQuery (const Grounds& query) -{ - assert (query.empty() == false); - Params res; - vector groups = getQueryGroups (query); - if (query.size() == 1) { - res = solver_->getPosterioriOf (groups[0]); - } else { - ParfactorList::iterator it = pfList_.begin(); - size_t idx = pfList_.size(); - size_t count = 0; - while (it != pfList_.end()) { - if ((*it)->containsGrounds (query)) { - idx = count; - break; - } - ++ it; - ++ count; - } - if (idx == pfList_.size()) { - res = getJointByConditioning (pfList_, query); - } else { - VarIds queryVids; - for (unsigned i = 0; i < groups.size(); i++) { - queryVids.push_back (groups[i]); - } - res = solver_->getFactorJoint (fg_->facNodes()[idx], queryVids); - } - } - return res; -} - - - -void -LiftedBp::printSolverFlags (void) const -{ - stringstream ss; - ss << "lifted bp [" ; - ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; - } - ss << ",max_iter=" << BpOptions::maxIter; - ss << ",accuracy=" << BpOptions::accuracy; - ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << "]" ; - cout << ss.str() << endl; -} - - - -void -LiftedBp::refineParfactors (void) -{ - pfList_ = parfactorList; - while (iterate() == false); - - if (Globals::verbosity > 2) { - Util::printHeader ("AFTER REFINEMENT"); - pfList_.print(); - } -} - - - -bool -LiftedBp::iterate (void) -{ - ParfactorList::iterator it = pfList_.begin(); - while (it != pfList_.end()) { - const ProbFormulas& args = (*it)->arguments(); - for (size_t i = 0; i < args.size(); i++) { - LogVarSet lvs = (*it)->logVarSet() - args[i].logVars(); - if ((*it)->constr()->isCountNormalized (lvs) == false) { - Parfactors pfs = LiftedOperations::countNormalize (*it, lvs); - it = pfList_.removeAndDelete (it); - pfList_.add (pfs); - return false; - } - } - ++ it; - } - return true; -} - - - -vector -LiftedBp::getQueryGroups (const Grounds& query) -{ - vector queryGroups; - for (unsigned i = 0; i < query.size(); i++) { - ParfactorList::const_iterator it = pfList_.begin(); - for (; it != pfList_.end(); ++it) { - if ((*it)->containsGround (query[i])) { - queryGroups.push_back ((*it)->findGroup (query[i])); - break; - } - } - } - assert (queryGroups.size() == query.size()); - return queryGroups; -} - - - -void -LiftedBp::createFactorGraph (void) -{ - fg_ = new FactorGraph(); - ParfactorList::const_iterator it = pfList_.begin(); - for (; it != pfList_.end(); ++it) { - vector groups = (*it)->getAllGroups(); - VarIds varIds; - for (size_t i = 0; i < groups.size(); i++) { - varIds.push_back (groups[i]); - } - fg_->addFactor (Factor (varIds, (*it)->ranges(), (*it)->params())); - } -} - - - -vector> -LiftedBp::getWeights (void) const -{ - vector> weights; - weights.reserve (pfList_.size()); - ParfactorList::const_iterator it = pfList_.begin(); - for (; it != pfList_.end(); ++it) { - const ProbFormulas& args = (*it)->arguments(); - weights.push_back ({ }); - weights.back().reserve (args.size()); - for (size_t i = 0; i < args.size(); i++) { - LogVarSet lvs = (*it)->logVarSet() - args[i].logVars(); - weights.back().push_back ((*it)->constr()->getConditionalCount (lvs)); - } - } - return weights; -} - - - -unsigned -LiftedBp::rangeOfGround (const Ground& gr) -{ - ParfactorList::iterator it = pfList_.begin(); - while (it != pfList_.end()) { - if ((*it)->containsGround (gr)) { - PrvGroup prvGroup = (*it)->findGroup (gr); - return (*it)->range ((*it)->indexOfGroup (prvGroup)); - } - ++ it; - } - return std::numeric_limits::max(); -} - - - -Params -LiftedBp::getJointByConditioning ( - const ParfactorList& pfList, - const Grounds& query) -{ - LiftedBp solver (pfList); - Params prevBeliefs = solver.solveQuery ({query[0]}); - Grounds obsGrounds = {query[0]}; - for (size_t i = 1; i < query.size(); i++) { - Params newBeliefs; - vector obsFs; - Ranges obsRanges; - for (size_t j = 0; j < obsGrounds.size(); j++) { - obsFs.push_back (ObservedFormula ( - obsGrounds[j].functor(), 0, obsGrounds[j].args())); - obsRanges.push_back (rangeOfGround (obsGrounds[j])); - } - Indexer indexer (obsRanges, false); - while (indexer.valid()) { - for (size_t j = 0; j < obsFs.size(); j++) { - obsFs[j].setEvidence (indexer[j]); - } - ParfactorList tempPfList (pfList); - LiftedOperations::absorveEvidence (tempPfList, obsFs); - LiftedBp solver (tempPfList); - Params beliefs = solver.solveQuery ({query[i]}); - for (size_t k = 0; k < beliefs.size(); k++) { - newBeliefs.push_back (beliefs[k]); - } - ++ indexer; - } - int count = -1; - unsigned range = rangeOfGround (query[i]); - for (size_t j = 0; j < newBeliefs.size(); j++) { - if (j % range == 0) { - count ++; - } - newBeliefs[j] *= prevBeliefs[count]; - } - prevBeliefs = newBeliefs; - obsGrounds.push_back (query[i]); - } - return prevBeliefs; -} - diff --git a/packages/CLPBN/horus2/LiftedBp.h b/packages/CLPBN/horus2/LiftedBp.h deleted file mode 100644 index 274503f29..000000000 --- a/packages/CLPBN/horus2/LiftedBp.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef HORUS_LIFTEDBP_H -#define HORUS_LIFTEDBP_H - -#include "LiftedSolver.h" -#include "ParfactorList.h" - -class FactorGraph; -class WeightedBp; - -class LiftedBp : public LiftedSolver -{ - public: - LiftedBp (const ParfactorList& pfList); - - ~LiftedBp (void); - - Params solveQuery (const Grounds&); - - void printSolverFlags (void) const; - - private: - void refineParfactors (void); - - bool iterate (void); - - vector getQueryGroups (const Grounds&); - - void createFactorGraph (void); - - vector> getWeights (void) const; - - unsigned rangeOfGround (const Ground&); - - Params getJointByConditioning (const ParfactorList&, const Grounds&); - - ParfactorList pfList_; - WeightedBp* solver_; - FactorGraph* fg_; - -}; - -#endif // HORUS_LIFTEDBP_H - diff --git a/packages/CLPBN/horus2/LiftedKc.cpp b/packages/CLPBN/horus2/LiftedKc.cpp deleted file mode 100644 index 45848ab70..000000000 --- a/packages/CLPBN/horus2/LiftedKc.cpp +++ /dev/null @@ -1,1309 +0,0 @@ -#include - -#include "LiftedKc.h" -#include "LiftedOperations.h" -#include "Indexer.h" - - - -OrNode::~OrNode (void) -{ - delete leftBranch_; - delete rightBranch_; -} - - - -double -OrNode::weight (void) const -{ - double lw = leftBranch_->weight(); - double rw = rightBranch_->weight(); - return Globals::logDomain ? Util::logSum (lw, rw) : lw + rw; -} - - - -AndNode::~AndNode (void) -{ - delete leftBranch_; - delete rightBranch_; -} - - - -double -AndNode::weight (void) const -{ - double lw = leftBranch_->weight(); - double rw = rightBranch_->weight(); - return Globals::logDomain ? lw + rw : lw * rw; -} - - - -int SetOrNode::nrPos_ = -1; -int SetOrNode::nrNeg_ = -1; - - - -SetOrNode::~SetOrNode (void) -{ - delete follow_; -} - - - -double -SetOrNode::weight (void) const -{ - double weightSum = LogAware::addIdenty(); - for (unsigned i = 0; i < nrGroundings_ + 1; i++) { - nrPos_ = nrGroundings_ - i; - nrNeg_ = i; - if (Globals::logDomain) { - double nrCombs = Util::nrCombinations (nrGroundings_, i); - double w = follow_->weight(); - weightSum = Util::logSum (weightSum, std::log (nrCombs) + w); - } else { - double w = follow_->weight(); - weightSum += Util::nrCombinations (nrGroundings_, i) * w; - } - } - nrPos_ = -1; - nrNeg_ = -1; - return weightSum; -} - - - -SetAndNode::~SetAndNode (void) -{ - delete follow_; -} - - - -double -SetAndNode::weight (void) const -{ - return LogAware::pow (follow_->weight(), nrGroundings_); -} - - - -IncExcNode::~IncExcNode (void) -{ - delete plus1Branch_; - delete plus2Branch_; - delete minusBranch_; -} - - - -double -IncExcNode::weight (void) const -{ - double w = 0.0; - if (Globals::logDomain) { - w = Util::logSum (plus1Branch_->weight(), plus2Branch_->weight()); - w = std::log (std::exp (w) - std::exp (minusBranch_->weight())); - } else { - w = plus1Branch_->weight() + plus2Branch_->weight(); - w -= minusBranch_->weight(); - } - return w; -} - - - -LeafNode::~LeafNode (void) -{ - delete clause_; -} - - - -double -LeafNode::weight (void) const -{ - assert (clause_->isUnit()); - if (clause_->posCountedLogVars().empty() == false - || clause_->negCountedLogVars().empty() == false) { - if (SetOrNode::isSet() == false) { - // return a NaN if we have a SetOrNode - // ancester that is not set. This can only - // happen when calculating the weights - // for the edge labels in graphviz - return 0.0 / 0.0; - } - } - double weight = clause_->literals()[0].isPositive() - ? lwcnf_.posWeight (clause_->literals().front().lid()) - : lwcnf_.negWeight (clause_->literals().front().lid()); - LogVarSet lvs = clause_->constr().logVarSet(); - lvs -= clause_->ipgLogVars(); - lvs -= clause_->posCountedLogVars(); - lvs -= clause_->negCountedLogVars(); - unsigned nrGroundings = 1; - if (lvs.empty() == false) { - nrGroundings = clause_->constr().projectedCopy (lvs).size(); - } - if (clause_->posCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrPositives(), - clause_->nrPosCountedLogVars()); - } - if (clause_->negCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrNegatives(), - clause_->nrNegCountedLogVars()); - } - return LogAware::pow (weight, nrGroundings); -} - - - -SmoothNode::~SmoothNode (void) -{ - Clause::deleteClauses (clauses_); -} - - - -double -SmoothNode::weight (void) const -{ - Clauses cs = clauses(); - double totalWeight = LogAware::multIdenty(); - for (size_t i = 0; i < cs.size(); i++) { - double posWeight = lwcnf_.posWeight (cs[i]->literals()[0].lid()); - double negWeight = lwcnf_.negWeight (cs[i]->literals()[0].lid()); - LogVarSet lvs = cs[i]->constr().logVarSet(); - lvs -= cs[i]->ipgLogVars(); - lvs -= cs[i]->posCountedLogVars(); - lvs -= cs[i]->negCountedLogVars(); - unsigned nrGroundings = 1; - if (lvs.empty() == false) { - nrGroundings = cs[i]->constr().projectedCopy (lvs).size(); - } - if (cs[i]->posCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrPositives(), - cs[i]->nrPosCountedLogVars()); - } - if (cs[i]->negCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrNegatives(), - cs[i]->nrNegCountedLogVars()); - } - if (Globals::logDomain) { - totalWeight += Util::logSum (posWeight, negWeight) * nrGroundings; - } else { - totalWeight *= std::pow (posWeight + negWeight, nrGroundings); - } - } - return totalWeight; -} - - - -double -TrueNode::weight (void) const -{ - return LogAware::multIdenty(); -} - - - -double -CompilationFailedNode::weight (void) const -{ - // weighted model counting in compilation - // failed nodes should give NaN - return 0.0 / 0.0; -} - - - -LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) - : lwcnf_(lwcnf) -{ - root_ = 0; - compilationSucceeded_ = true; - Clauses clauses = Clause::copyClauses (lwcnf->clauses()); - compile (&root_, clauses); - if (compilationSucceeded_) { - smoothCircuit (root_); - } - if (Globals::verbosity > 1) { - if (compilationSucceeded_) { - double wmc = LogAware::exp (getWeightedModelCount()); - cout << "Weighted model count = " << wmc << endl << endl; - } - cout << "Exporting circuit to graphviz (circuit.dot)..." ; - cout << endl << endl; - exportToGraphViz ("circuit.dot"); - } -} - - - -LiftedCircuit::~LiftedCircuit (void) -{ - delete root_; - unordered_map::iterator it; - it = originClausesMap_.begin(); - while (it != originClausesMap_.end()) { - Clause::deleteClauses (it->second); - ++ it; - } -} - - - -bool -LiftedCircuit::isCompilationSucceeded (void) const -{ - return compilationSucceeded_; -} - - - -double -LiftedCircuit::getWeightedModelCount (void) const -{ - assert (compilationSucceeded_); - return root_->weight(); -} - - - -void -LiftedCircuit::exportToGraphViz (const char* fileName) -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "Error: couldn't open file '" << fileName << "'." ; - return; - } - out << "digraph {" << endl; - out << "ranksep=1" << endl; - exportToGraphViz (root_, out); - out << "}" << endl; - out.close(); -} - - - -void -LiftedCircuit::compile ( - CircuitNode** follow, - Clauses& clauses) -{ - if (compilationSucceeded_ == false - && Globals::verbosity <= 1) { - return; - } - - if (clauses.empty()) { - *follow = new TrueNode(); - return; - } - - if (clauses.size() == 1 && clauses[0]->isUnit()) { - *follow = new LeafNode (clauses[0], *lwcnf_); - return; - } - - if (tryUnitPropagation (follow, clauses)) { - return; - } - - if (tryIndependence (follow, clauses)) { - return; - } - - if (tryShannonDecomp (follow, clauses)) { - return; - } - - if (tryInclusionExclusion (follow, clauses)) { - return; - } - - if (tryIndepPartialGrounding (follow, clauses)) { - return; - } - - if (tryAtomCounting (follow, clauses)) { - return; - } - - *follow = new CompilationFailedNode(); - if (Globals::verbosity > 1) { - originClausesMap_[*follow] = clauses; - explanationMap_[*follow] = "" ; - } - compilationSucceeded_ = false; -} - - - -bool -LiftedCircuit::tryUnitPropagation ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - if (clauses[i]->isUnit()) { - Clauses propagClauses; - for (size_t j = 0; j < clauses.size(); j++) { - if (i != j) { - LiteralId lid = clauses[i]->literals()[0].lid(); - LogVarTypes types = clauses[i]->logVarTypes (0); - if (clauses[i]->literals()[0].isPositive()) { - if (clauses[j]->containsPositiveLiteral (lid, types) == false) { - clauses[j]->removeNegativeLiterals (lid, types); - if (clauses[j]->nrLiterals() > 0) { - propagClauses.push_back (clauses[j]); - } else { - delete clauses[j]; - } - } else { - delete clauses[j]; - } - } else if (clauses[i]->literals()[0].isNegative()) { - if (clauses[j]->containsNegativeLiteral (lid, types) == false) { - clauses[j]->removePositiveLiterals (lid, types); - if (clauses[j]->nrLiterals() > 0) { - propagClauses.push_back (clauses[j]); - } else { - delete clauses[j]; - } - } else { - delete clauses[j]; - } - } - } - } - - AndNode* andNode = new AndNode(); - if (Globals::verbosity > 1) { - originClausesMap_[andNode] = backupClauses_; - stringstream explanation; - explanation << " UP on " << clauses[i]->literals()[0]; - explanationMap_[andNode] = explanation.str(); - } - - Clauses unitClause = { clauses[i] }; - compile (andNode->leftBranch(), unitClause); - compile (andNode->rightBranch(), propagClauses); - (*follow) = andNode; - return true; - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryIndependence ( - CircuitNode** follow, - Clauses& clauses) -{ - if (clauses.size() == 1) { - return false; - } - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - Clauses depClauses = { clauses[0] }; - Clauses indepClauses (clauses.begin() + 1, clauses.end()); - bool finish = false; - while (finish == false) { - finish = true; - for (size_t i = 0; i < indepClauses.size(); i++) { - if (independentClause (*indepClauses[i], depClauses) == false) { - depClauses.push_back (indepClauses[i]); - indepClauses.erase (indepClauses.begin() + i); - finish = false; - break; - } - } - } - if (indepClauses.empty() == false) { - AndNode* andNode = new AndNode (); - if (Globals::verbosity > 1) { - originClausesMap_[andNode] = backupClauses_; - explanationMap_[andNode] = " Independence" ; - } - compile (andNode->leftBranch(), depClauses); - compile (andNode->rightBranch(), indepClauses); - (*follow) = andNode; - return true; - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryShannonDecomp ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - const Literals& literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].isGround ( - clauses[i]->constr(), clauses[i]->ipgLogVars())) { - - Clause* c1 = lwcnf_->createClause (literals[j].lid()); - Clause* c2 = new Clause (*c1); - c2->literals().front().complement(); - - Clauses otherClauses = Clause::copyClauses (clauses); - clauses.push_back (c1); - otherClauses.push_back (c2); - - OrNode* orNode = new OrNode(); - if (Globals::verbosity > 1) { - originClausesMap_[orNode] = backupClauses_; - stringstream explanation; - explanation << " SD on " << literals[j]; - explanationMap_[orNode] = explanation.str(); - } - - compile (orNode->leftBranch(), clauses); - compile (orNode->rightBranch(), otherClauses); - (*follow) = orNode; - return true; - } - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryInclusionExclusion ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - Literals depLits = { clauses[i]->literals().front() }; - Literals indepLits (clauses[i]->literals().begin() + 1, - clauses[i]->literals().end()); - bool finish = false; - while (finish == false) { - finish = true; - for (size_t j = 0; j < indepLits.size(); j++) { - if (independentLiteral (indepLits[j], depLits) == false) { - depLits.push_back (indepLits[j]); - indepLits.erase (indepLits.begin() + j); - finish = false; - break; - } - } - } - if (indepLits.empty() == false) { - LogVarSet lvs1; - for (size_t j = 0; j < depLits.size(); j++) { - lvs1 |= depLits[j].logVarSet(); - } - if (clauses[i]->constr().isCountNormalized (lvs1) == false) { - break; - } - LogVarSet lvs2; - for (size_t j = 0; j < indepLits.size(); j++) { - lvs2 |= indepLits[j].logVarSet(); - } - if (clauses[i]->constr().isCountNormalized (lvs2) == false) { - break; - } - Clause* c1 = new Clause (clauses[i]->constr().projectedCopy (lvs1)); - for (size_t j = 0; j < depLits.size(); j++) { - c1->addLiteral (depLits[j]); - } - Clause* c2 = new Clause (clauses[i]->constr().projectedCopy (lvs2)); - for (size_t j = 0; j < indepLits.size(); j++) { - c2->addLiteral (indepLits[j]); - } - - clauses.erase (clauses.begin() + i); - Clauses plus1Clauses = Clause::copyClauses (clauses); - Clauses plus2Clauses = Clause::copyClauses (clauses); - - plus1Clauses.push_back (c1); - plus2Clauses.push_back (c2); - clauses.push_back (c1); - clauses.push_back (c2); - - IncExcNode* ieNode = new IncExcNode(); - if (Globals::verbosity > 1) { - originClausesMap_[ieNode] = backupClauses_; - stringstream explanation; - explanation << " IncExc on clause nº " << i + 1; - explanationMap_[ieNode] = explanation.str(); - } - compile (ieNode->plus1Branch(), plus1Clauses); - compile (ieNode->plus2Branch(), plus2Clauses); - compile (ieNode->minusBranch(), clauses); - *follow = ieNode; - return true; - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryIndepPartialGrounding ( - CircuitNode** follow, - Clauses& clauses) -{ - // assumes that all literals have logical variables - // else, shannon decomp was possible - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - LogVars rootLogVars; - LogVarSet lvs = clauses[0]->ipgCandidates(); - for (size_t i = 0; i < lvs.size(); i++) { - rootLogVars.clear(); - rootLogVars.push_back (lvs[i]); - ConstraintTree ct = clauses[0]->constr().projectedCopy ({lvs[i]}); - if (tryIndepPartialGroundingAux (clauses, ct, rootLogVars)) { - for (size_t j = 0; j < clauses.size(); j++) { - clauses[j]->addIpgLogVar (rootLogVars[j]); - } - SetAndNode* setAndNode = new SetAndNode (ct.size()); - if (Globals::verbosity > 1) { - originClausesMap_[setAndNode] = backupClauses_; - explanationMap_[setAndNode] = " IPG" ; - } - *follow = setAndNode; - compile (setAndNode->follow(), clauses); - return true; - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -bool -LiftedCircuit::tryIndepPartialGroundingAux ( - Clauses& clauses, - ConstraintTree& ct, - LogVars& rootLogVars) -{ - for (size_t i = 1; i < clauses.size(); i++) { - LogVarSet lvs = clauses[i]->ipgCandidates(); - for (size_t j = 0; j < lvs.size(); j++) { - ConstraintTree ct2 = clauses[i]->constr().projectedCopy ({lvs[j]}); - if (ct.tupleSet() == ct2.tupleSet()) { - rootLogVars.push_back (lvs[j]); - break; - } - } - if (rootLogVars.size() != i + 1) { - return false; - } - } - // verifies if the IPG logical vars appear in the same positions - unordered_map positions; - for (size_t i = 0; i < clauses.size(); i++) { - const Literals& literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - size_t idx = literals[j].indexOfLogVar (rootLogVars[i]); - assert (idx != literals[j].nrLogVars()); - unordered_map::iterator it; - it = positions.find (literals[j].lid()); - if (it != positions.end()) { - if (it->second != idx) { - return false; - } - } else { - positions[literals[j].lid()] = idx; - } - } - } - return true; -} - - - -bool -LiftedCircuit::tryAtomCounting ( - CircuitNode** follow, - Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size(); i++) { - if (clauses[i]->nrPosCountedLogVars() > 0 - || clauses[i]->nrNegCountedLogVars() > 0) { - // only allow one atom counting node per branch - return false; - } - } - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - Literals literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].nrLogVars() == 1 - && ! clauses[i]->isIpgLogVar (literals[j].logVars().front()) - && ! clauses[i]->isCountedLogVar (literals[j].logVars().front())) { - unsigned nrGroundings = clauses[i]->constr().projectedCopy ( - literals[j].logVars()).size(); - SetOrNode* setOrNode = new SetOrNode (nrGroundings); - if (Globals::verbosity > 1) { - originClausesMap_[setOrNode] = backupClauses_; - explanationMap_[setOrNode] = " AC" ; - } - Clause* c1 = new Clause ( - clauses[i]->constr().projectedCopy (literals[j].logVars())); - Clause* c2 = new Clause ( - clauses[i]->constr().projectedCopy (literals[j].logVars())); - c1->addLiteral (literals[j]); - c2->addLiteralComplemented (literals[j]); - c1->addPosCountedLogVar (literals[j].logVars().front()); - c2->addNegCountedLogVar (literals[j].logVars().front()); - clauses.push_back (c1); - clauses.push_back (c2); - shatterCountedLogVars (clauses); - compile (setOrNode->follow(), clauses); - *follow = setOrNode; - return true; - } - } - } - if (Globals::verbosity > 1) { - Clause::deleteClauses (backupClauses_); - } - return false; -} - - - -void -LiftedCircuit::shatterCountedLogVars (Clauses& clauses) -{ - while (shatterCountedLogVarsAux (clauses)) ; -} - - - -bool -LiftedCircuit::shatterCountedLogVarsAux (Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size() - 1; i++) { - for (size_t j = i + 1; j < clauses.size(); j++) { - bool splitedSome = shatterCountedLogVarsAux (clauses, i, j); - if (splitedSome) { - return true; - } - } - } - return false; -} - - - -bool -LiftedCircuit::shatterCountedLogVarsAux ( - Clauses& clauses, - size_t idx1, - size_t idx2) -{ - Literals lits1 = clauses[idx1]->literals(); - Literals lits2 = clauses[idx2]->literals(); - for (size_t i = 0; i < lits1.size(); i++) { - for (size_t j = 0; j < lits2.size(); j++) { - if (lits1[i].lid() == lits2[j].lid()) { - LogVars lvs1 = lits1[i].logVars(); - LogVars lvs2 = lits2[j].logVars(); - for (size_t k = 0; k < lvs1.size(); k++) { - if (clauses[idx1]->isCountedLogVar (lvs1[k]) - && clauses[idx2]->isCountedLogVar (lvs2[k]) == false) { - clauses.push_back (new Clause (*clauses[idx2])); - clauses[idx2]->addPosCountedLogVar (lvs2[k]); - clauses.back()->addNegCountedLogVar (lvs2[k]); - return true; - } - if (clauses[idx2]->isCountedLogVar (lvs2[k]) - && clauses[idx1]->isCountedLogVar (lvs1[k]) == false) { - clauses.push_back (new Clause (*clauses[idx1])); - clauses[idx1]->addPosCountedLogVar (lvs1[k]); - clauses.back()->addNegCountedLogVar (lvs1[k]); - return true; - } - } - } - } - } - return false; -} - - - -bool -LiftedCircuit::independentClause ( - Clause& clause, - Clauses& otherClauses) const -{ - for (size_t i = 0; i < otherClauses.size(); i++) { - if (Clause::independentClauses (clause, *otherClauses[i]) == false) { - return false; - } - } - return true; -} - - - -bool -LiftedCircuit::independentLiteral ( - const Literal& lit, - const Literals& otherLits) const -{ - for (size_t i = 0; i < otherLits.size(); i++) { - if (lit.lid() == otherLits[i].lid() - || (lit.logVarSet() & otherLits[i].logVarSet()).empty() == false) { - return false; - } - } - return true; -} - - - -LitLvTypesSet -LiftedCircuit::smoothCircuit (CircuitNode* node) -{ - assert (node != 0); - LitLvTypesSet propagLits; - - switch (getCircuitNodeType (node)) { - - case CircuitNodeType::OR_NODE: { - OrNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); - LitLvTypesSet missingLeft = lids2 - lids1; - LitLvTypesSet missingRight = lids1 - lids2; - createSmoothNode (missingLeft, casted->leftBranch()); - createSmoothNode (missingRight, casted->rightBranch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::AND_NODE: { - AndNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::SET_OR_NODE: { - SetOrNode* casted = dynamic_cast(node); - propagLits = smoothCircuit (*casted->follow()); - TinySet> litSet; - for (size_t i = 0; i < propagLits.size(); i++) { - litSet.insert (make_pair (propagLits[i].lid(), - propagLits[i].logVarTypes().size())); - } - LitLvTypesSet missingLids; - for (size_t i = 0; i < litSet.size(); i++) { - vector allTypes = getAllPossibleTypes (litSet[i].second); - for (size_t j = 0; j < allTypes.size(); j++) { - bool typeFound = false; - for (size_t k = 0; k < propagLits.size(); k++) { - if (litSet[i].first == propagLits[k].lid() - && containsTypes (propagLits[k].logVarTypes(), allTypes[j])) { - typeFound = true; - break; - } - } - if (typeFound == false) { - missingLids.insert (LitLvTypes (litSet[i].first, allTypes[j])); - } - } - } - createSmoothNode (missingLids, casted->follow()); - // setAllFullLogVars() can cause repeated elements in - // the set. Fix this by reconstructing the set again - LitLvTypesSet copy = propagLits; - propagLits.clear(); - for (size_t i = 0; i < copy.size(); i++) { - copy[i].setAllFullLogVars(); - propagLits.insert (copy[i]); - } - break; - } - - case CircuitNodeType::SET_AND_NODE: { - SetAndNode* casted = dynamic_cast(node); - propagLits = smoothCircuit (*casted->follow()); - break; - } - - case CircuitNodeType::INC_EXC_NODE: { - IncExcNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->plus1Branch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->plus2Branch()); - LitLvTypesSet missingPlus1 = lids2 - lids1; - LitLvTypesSet missingPlus2 = lids1 - lids2; - createSmoothNode (missingPlus1, casted->plus1Branch()); - createSmoothNode (missingPlus2, casted->plus2Branch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::LEAF_NODE: { - LeafNode* casted = dynamic_cast(node); - propagLits.insert (LitLvTypes ( - casted->clause()->literals()[0].lid(), - casted->clause()->logVarTypes(0))); - } - - default: - break; - } - - return propagLits; -} - - - -void -LiftedCircuit::createSmoothNode ( - const LitLvTypesSet& missingLits, - CircuitNode** prev) -{ - if (missingLits.empty() == false) { - if (Globals::verbosity > 1) { - unordered_map::iterator it; - it = originClausesMap_.find (*prev); - if (it != originClausesMap_.end()) { - backupClauses_ = it->second; - } else { - backupClauses_ = Clause::copyClauses ( - {((dynamic_cast(*prev))->clause())}); - } - } - Clauses clauses; - for (size_t i = 0; i < missingLits.size(); i++) { - LiteralId lid = missingLits[i].lid(); - const LogVarTypes& types = missingLits[i].logVarTypes(); - Clause* c = lwcnf_->createClause (lid); - for (size_t j = 0; j < types.size(); j++) { - LogVar X = c->literals().front().logVars()[j]; - if (types[j] == LogVarType::POS_LV) { - c->addPosCountedLogVar (X); - } else if (types[j] == LogVarType::NEG_LV) { - c->addNegCountedLogVar (X); - } - } - c->addLiteralComplemented (c->literals()[0]); - clauses.push_back (c); - } - SmoothNode* smoothNode = new SmoothNode (clauses, *lwcnf_); - *prev = new AndNode (smoothNode, *prev); - if (Globals::verbosity > 1) { - originClausesMap_[*prev] = backupClauses_; - explanationMap_[*prev] = " Smoothing" ; - } - } -} - - - -vector -LiftedCircuit::getAllPossibleTypes (unsigned nrLogVars) const -{ - if (nrLogVars == 0) { - return {}; - } - if (nrLogVars == 1) { - return {{LogVarType::POS_LV},{LogVarType::NEG_LV}}; - } - vector res; - Ranges ranges (nrLogVars, 2); - Indexer indexer (ranges); - while (indexer.valid()) { - LogVarTypes types; - for (size_t i = 0; i < nrLogVars; i++) { - if (indexer[i] == 0) { - types.push_back (LogVarType::POS_LV); - } else { - types.push_back (LogVarType::NEG_LV); - } - } - res.push_back (types); - ++ indexer; - } - return res; -} - - - -bool -LiftedCircuit::containsTypes ( - const LogVarTypes& typesA, - const LogVarTypes& typesB) const -{ - for (size_t i = 0; i < typesA.size(); i++) { - if (typesA[i] == LogVarType::FULL_LV) { - - } else if (typesA[i] == LogVarType::POS_LV - && typesB[i] == LogVarType::POS_LV) { - - } else if (typesA[i] == LogVarType::NEG_LV - && typesB[i] == LogVarType::NEG_LV) { - - } else { - return false; - } - } - return true; -} - - - -CircuitNodeType -LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const -{ - CircuitNodeType type; - if (dynamic_cast(node) != 0) { - type = CircuitNodeType::OR_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::AND_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SET_OR_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SET_AND_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::INC_EXC_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::LEAF_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SMOOTH_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::TRUE_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::COMPILATION_FAILED_NODE; - } else { - assert (false); - } - return type; -} - - - -void -LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) -{ - assert (node != 0); - - static unsigned nrAuxNodes = 0; - stringstream ss; - ss << "n" << nrAuxNodes; - string auxNode = ss.str(); - nrAuxNodes ++; - string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; - - switch (getCircuitNodeType (node)) { - - case OR_NODE: { - OrNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∨\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->leftBranch()); - os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->rightBranch()); - os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->leftBranch(), os); - exportToGraphViz (*casted->rightBranch(), os); - break; - } - - case AND_NODE: { - AndNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∧\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->leftBranch()); - os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->rightBranch()) << endl; - os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->leftBranch(), os); - exportToGraphViz (*casted->rightBranch(), os); - break; - } - - case SET_OR_NODE: { - SetOrNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∨(X)\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->follow()); - os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->follow(), os); - break; - } - - case SET_AND_NODE: { - SetAndNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∧(X)\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->follow()); - os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->follow(), os); - break; - } - - case INC_EXC_NODE: { - IncExcNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"+ - +\"]" ; - os << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->plus1Branch()); - os << " [label=\" " << (*casted->plus1Branch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->minusBranch()) << endl; - os << " [label=\" " << (*casted->minusBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->plus2Branch()); - os << " [label=\" " << (*casted->plus2Branch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->plus1Branch(), os); - exportToGraphViz (*casted->plus2Branch(), os); - exportToGraphViz (*casted->minusBranch(), os); - break; - } - - case LEAF_NODE: { - printClauses (node, os, "style=filled,fillcolor=palegreen,"); - break; - } - - case SMOOTH_NODE: { - printClauses (node, os, "style=filled,fillcolor=lightblue,"); - break; - } - - case TRUE_NODE: { - os << escapeNode (node); - os << " [shape=box,label=\"⊤\"]" ; - os << endl; - break; - } - - case COMPILATION_FAILED_NODE: { - printClauses (node, os, "style=filled,fillcolor=salmon,"); - break; - } - - default: - assert (false); - } -} - - - -string -LiftedCircuit::escapeNode (const CircuitNode* node) const -{ - stringstream ss; - ss << "\"" << node << "\"" ; - return ss.str(); -} - - - -string -LiftedCircuit::getExplanationString (CircuitNode* node) -{ - return Util::contains (explanationMap_, node) - ? explanationMap_[node] - : "" ; -} - - - -void -LiftedCircuit::printClauses ( - CircuitNode* node, - ofstream& os, - string extraOptions) -{ - Clauses clauses; - if (Util::contains (originClausesMap_, node)) { - clauses = originClausesMap_[node]; - } else if (getCircuitNodeType (node) == CircuitNodeType::LEAF_NODE) { - clauses = { (dynamic_cast(node))->clause() } ; - } else if (getCircuitNodeType (node) == CircuitNodeType::SMOOTH_NODE) { - clauses = (dynamic_cast(node))->clauses(); - } - assert (clauses.empty() == false); - os << escapeNode (node); - os << " [shape=box," << extraOptions << "label=\"" ; - for (size_t i = 0; i < clauses.size(); i++) { - if (i != 0) os << "\\n" ; - os << *clauses[i]; - } - os << "\"]" ; - os << endl; -} - - - -LiftedKc::~LiftedKc (void) -{ - delete lwcnf_; - delete circuit_; -} - - - -Params -LiftedKc::solveQuery (const Grounds& query) -{ - pfList_ = parfactorList; - LiftedOperations::shatterAgainstQuery (pfList_, query); - LiftedOperations::runWeakBayesBall (pfList_, query); - lwcnf_ = new LiftedWCNF (pfList_); - circuit_ = new LiftedCircuit (lwcnf_); - if (circuit_->isCompilationSucceeded() == false) { - cerr << "Error: the circuit compilation has failed." << endl; - exit (EXIT_FAILURE); - } - vector groups; - Ranges ranges; - for (size_t i = 0; i < query.size(); i++) { - ParfactorList::const_iterator it = pfList_.begin(); - while (it != pfList_.end()) { - size_t idx = (*it)->indexOfGround (query[i]); - if (idx != (*it)->nrArguments()) { - groups.push_back ((*it)->argument (idx).group()); - ranges.push_back ((*it)->range (idx)); - break; - } - ++ it; - } - } - assert (groups.size() == query.size()); - Params params; - Indexer indexer (ranges); - while (indexer.valid()) { - for (size_t i = 0; i < groups.size(); i++) { - vector litIds = lwcnf_->prvGroupLiterals (groups[i]); - for (size_t j = 0; j < litIds.size(); j++) { - if (indexer[i] == j) { - lwcnf_->addWeight (litIds[j], LogAware::one(), - LogAware::one()); - } else { - lwcnf_->addWeight (litIds[j], LogAware::zero(), - LogAware::one()); - } - } - } - params.push_back (circuit_->getWeightedModelCount()); - ++ indexer; - } - LogAware::normalize (params); - if (Globals::logDomain) { - Util::exp (params); - } - return params; -} - - - -void -LiftedKc::printSolverFlags (void) const -{ - stringstream ss; - ss << "lifted kc [" ; - ss << "log_domain=" << Util::toString (Globals::logDomain); - ss << "]" ; - cout << ss.str() << endl; -} - diff --git a/packages/CLPBN/horus2/LiftedKc.h b/packages/CLPBN/horus2/LiftedKc.h deleted file mode 100644 index a4cd2dbeb..000000000 --- a/packages/CLPBN/horus2/LiftedKc.h +++ /dev/null @@ -1,300 +0,0 @@ -#ifndef HORUS_LIFTEDKC_H -#define HORUS_LIFTEDKC_H - - -#include "LiftedWCNF.h" -#include "LiftedSolver.h" -#include "ParfactorList.h" - - -enum CircuitNodeType { - OR_NODE, - AND_NODE, - SET_OR_NODE, - SET_AND_NODE, - INC_EXC_NODE, - LEAF_NODE, - SMOOTH_NODE, - TRUE_NODE, - COMPILATION_FAILED_NODE -}; - - - -class CircuitNode -{ - public: - CircuitNode (void) { } - - virtual ~CircuitNode (void) { } - - virtual double weight (void) const = 0; -}; - - - -class OrNode : public CircuitNode -{ - public: - OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - - ~OrNode (void); - - CircuitNode** leftBranch (void) { return &leftBranch_; } - CircuitNode** rightBranch (void) { return &rightBranch_; } - - double weight (void) const; - - private: - CircuitNode* leftBranch_; - CircuitNode* rightBranch_; -}; - - - -class AndNode : public CircuitNode -{ - public: - AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - - AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) - : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } - - ~AndNode (void); - - CircuitNode** leftBranch (void) { return &leftBranch_; } - CircuitNode** rightBranch (void) { return &rightBranch_; } - - double weight (void) const; - - private: - CircuitNode* leftBranch_; - CircuitNode* rightBranch_; -}; - - - -class SetOrNode : public CircuitNode -{ - public: - SetOrNode (unsigned nrGroundings) - : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } - - ~SetOrNode (void); - - CircuitNode** follow (void) { return &follow_; } - - static unsigned nrPositives (void) { return nrPos_; } - - static unsigned nrNegatives (void) { return nrNeg_; } - - static bool isSet (void) { return nrPos_ >= 0; } - - double weight (void) const; - - private: - CircuitNode* follow_; - unsigned nrGroundings_; - static int nrPos_; - static int nrNeg_; -}; - - - -class SetAndNode : public CircuitNode -{ - public: - SetAndNode (unsigned nrGroundings) - : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } - - ~SetAndNode (void); - - CircuitNode** follow (void) { return &follow_; } - - double weight (void) const; - - private: - CircuitNode* follow_; - unsigned nrGroundings_; -}; - - - -class IncExcNode : public CircuitNode -{ - public: - IncExcNode (void) - : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } - - ~IncExcNode (void); - - CircuitNode** plus1Branch (void) { return &plus1Branch_; } - CircuitNode** plus2Branch (void) { return &plus2Branch_; } - CircuitNode** minusBranch (void) { return &minusBranch_; } - - double weight (void) const; - - private: - CircuitNode* plus1Branch_; - CircuitNode* plus2Branch_; - CircuitNode* minusBranch_; -}; - - - -class LeafNode : public CircuitNode -{ - public: - LeafNode (Clause* clause, const LiftedWCNF& lwcnf) - : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } - - ~LeafNode (void); - - const Clause* clause (void) const { return clause_; } - - Clause* clause (void) { return clause_; } - - double weight (void) const; - - private: - Clause* clause_; - const LiftedWCNF& lwcnf_; -}; - - - -class SmoothNode : public CircuitNode -{ - public: - SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) - : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } - - ~SmoothNode (void); - - const Clauses& clauses (void) const { return clauses_; } - - Clauses clauses (void) { return clauses_; } - - double weight (void) const; - - private: - Clauses clauses_; - const LiftedWCNF& lwcnf_; -}; - - - -class TrueNode : public CircuitNode -{ - public: - TrueNode (void) : CircuitNode() { } - - double weight (void) const; -}; - - - -class CompilationFailedNode : public CircuitNode -{ - public: - CompilationFailedNode (void) : CircuitNode() { } - - double weight (void) const; -}; - - - -class LiftedCircuit -{ - public: - LiftedCircuit (const LiftedWCNF* lwcnf); - - ~LiftedCircuit (void); - - bool isCompilationSucceeded (void) const; - - double getWeightedModelCount (void) const; - - void exportToGraphViz (const char*); - - private: - - void compile (CircuitNode** follow, Clauses& clauses); - - bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); - - bool tryIndependence (CircuitNode** follow, Clauses& clauses); - - bool tryShannonDecomp (CircuitNode** follow, Clauses& clauses); - - bool tryInclusionExclusion (CircuitNode** follow, Clauses& clauses); - - bool tryIndepPartialGrounding (CircuitNode** follow, Clauses& clauses); - - bool tryIndepPartialGroundingAux (Clauses& clauses, ConstraintTree& ct, - LogVars& rootLogVars); - - bool tryAtomCounting (CircuitNode** follow, Clauses& clauses); - - void shatterCountedLogVars (Clauses& clauses); - - bool shatterCountedLogVarsAux (Clauses& clauses); - - bool shatterCountedLogVarsAux (Clauses& clauses, size_t idx1, size_t idx2); - - bool independentClause (Clause& clause, Clauses& otherClauses) const; - - bool independentLiteral (const Literal& lit, - const Literals& otherLits) const; - - LitLvTypesSet smoothCircuit (CircuitNode* node); - - void createSmoothNode (const LitLvTypesSet& lids, - CircuitNode** prev); - - vector getAllPossibleTypes (unsigned nrLogVars) const; - - bool containsTypes (const LogVarTypes& typesA, - const LogVarTypes& typesB) const; - - CircuitNodeType getCircuitNodeType (const CircuitNode* node) const; - - void exportToGraphViz (CircuitNode* node, ofstream&); - - void printClauses (CircuitNode* node, ofstream&, - string extraOptions = ""); - - string escapeNode (const CircuitNode* node) const; - - string getExplanationString (CircuitNode* node); - - CircuitNode* root_; - const LiftedWCNF* lwcnf_; - bool compilationSucceeded_; - Clauses backupClauses_; - unordered_map originClausesMap_; - unordered_map explanationMap_; -}; - - - -class LiftedKc : public LiftedSolver -{ - public: - LiftedKc (const ParfactorList& pfList) - : LiftedSolver(pfList) { } - - ~LiftedKc (void); - - Params solveQuery (const Grounds&); - - void printSolverFlags (void) const; - - private: - LiftedWCNF* lwcnf_; - LiftedCircuit* circuit_; - ParfactorList pfList_; -}; - -#endif // HORUS_LIFTEDKC_H - diff --git a/packages/CLPBN/horus2/LiftedOperations.cpp b/packages/CLPBN/horus2/LiftedOperations.cpp deleted file mode 100644 index e0da2dd3b..000000000 --- a/packages/CLPBN/horus2/LiftedOperations.cpp +++ /dev/null @@ -1,271 +0,0 @@ -#include "LiftedOperations.h" - - -void -LiftedOperations::shatterAgainstQuery ( - ParfactorList& pfList, - const Grounds& query) -{ - for (size_t i = 0; i < query.size(); i++) { - if (query[i].isAtom()) { - continue; - } - bool found = false; - Parfactors newPfs; - ParfactorList::iterator it = pfList.begin(); - while (it != pfList.end()) { - if ((*it)->containsGround (query[i])) { - found = true; - std::pair split; - LogVars queryLvs ( - (*it)->constr()->logVars().begin(), - (*it)->constr()->logVars().begin() + query[i].arity()); - split = (*it)->constr()->split (query[i].args()); - ConstraintTree* commCt = split.first; - ConstraintTree* exclCt = split.second; - newPfs.push_back (new Parfactor (*it, commCt)); - if (exclCt->empty() == false) { - newPfs.push_back (new Parfactor (*it, exclCt)); - } else { - delete exclCt; - } - it = pfList.removeAndDelete (it); - } else { - ++ it; - } - } - if (found == false) { - cerr << "Error: could not find a parfactor with ground " ; - cerr << "`" << query[i] << "'." << endl; - exit (EXIT_FAILURE); - } - pfList.add (newPfs); - } - if (Globals::verbosity > 2) { - Util::printAsteriskLine(); - cout << "SHATTERED AGAINST THE QUERY" << endl; - for (size_t i = 0; i < query.size(); i++) { - cout << " -> " << query[i] << endl; - } - Util::printAsteriskLine(); - pfList.print(); - } -} - - - -void -LiftedOperations::runWeakBayesBall ( - ParfactorList& pfList, - const Grounds& query) -{ - queue todo; // groups to process - set done; // processed or in queue - for (size_t i = 0; i < query.size(); i++) { - ParfactorList::iterator it = pfList.begin(); - while (it != pfList.end()) { - PrvGroup group = (*it)->findGroup (query[i]); - if (group != numeric_limits::max()) { - todo.push (group); - done.insert (group); - break; - } - ++ it; - } - } - - set requiredPfs; - while (todo.empty() == false) { - PrvGroup group = todo.front(); - ParfactorList::iterator it = pfList.begin(); - while (it != pfList.end()) { - if (Util::contains (requiredPfs, *it) == false && - (*it)->containsGroup (group)) { - vector groups = (*it)->getAllGroups(); - for (size_t i = 0; i < groups.size(); i++) { - if (Util::contains (done, groups[i]) == false) { - todo.push (groups[i]); - done.insert (groups[i]); - } - } - requiredPfs.insert (*it); - } - ++ it; - } - todo.pop(); - } - - ParfactorList::iterator it = pfList.begin(); - bool foundNotRequired = false; - while (it != pfList.end()) { - if (Util::contains (requiredPfs, *it) == false) { - if (Globals::verbosity > 2) { - if (foundNotRequired == false) { - Util::printHeader ("PARFACTORS TO DISCARD"); - foundNotRequired = true; - } - (*it)->print(); - } - it = pfList.removeAndDelete (it); - } else { - ++ it; - } - } -} - - - -void -LiftedOperations::absorveEvidence ( - ParfactorList& pfList, - ObservedFormulas& obsFormulas) -{ - for (size_t i = 0; i < obsFormulas.size(); i++) { - Parfactors newPfs; - ParfactorList::iterator it = pfList.begin(); - while (it != pfList.end()) { - Parfactor* pf = *it; - it = pfList.remove (it); - Parfactors absorvedPfs = absorve (obsFormulas[i], pf); - if (absorvedPfs.empty() == false) { - if (absorvedPfs.size() == 1 && absorvedPfs[0] == 0) { - // just remove pf; - } else { - Util::addToVector (newPfs, absorvedPfs); - } - delete pf; - } else { - it = pfList.insertShattered (it, pf); - ++ it; - } - } - pfList.add (newPfs); - } - if (Globals::verbosity > 2 && obsFormulas.empty() == false) { - Util::printAsteriskLine(); - cout << "AFTER EVIDENCE ABSORVED" << endl; - for (size_t i = 0; i < obsFormulas.size(); i++) { - cout << " -> " << obsFormulas[i] << endl; - } - Util::printAsteriskLine(); - pfList.print(); - } -} - - - -Parfactors -LiftedOperations::countNormalize ( - Parfactor* g, - const LogVarSet& set) -{ - Parfactors normPfs; - if (set.empty()) { - normPfs.push_back (new Parfactor (*g)); - } else { - ConstraintTrees normCts = g->constr()->countNormalize (set); - for (size_t i = 0; i < normCts.size(); i++) { - normPfs.push_back (new Parfactor (g, normCts[i])); - } - } - return normPfs; -} - - - -Parfactor -LiftedOperations::calcGroundMultiplication (Parfactor pf) -{ - LogVarSet lvs = pf.constr()->logVarSet(); - lvs -= pf.constr()->singletons(); - Parfactors newPfs = {new Parfactor (pf)}; - for (size_t i = 0; i < lvs.size(); i++) { - Parfactors pfs = newPfs; - newPfs.clear(); - for (size_t j = 0; j < pfs.size(); j++) { - bool countedLv = pfs[j]->countedLogVars().contains (lvs[i]); - if (countedLv) { - pfs[j]->fullExpand (lvs[i]); - newPfs.push_back (pfs[j]); - } else { - ConstraintTrees cts = pfs[j]->constr()->ground (lvs[i]); - for (size_t k = 0; k < cts.size(); k++) { - newPfs.push_back (new Parfactor (pfs[j], cts[k])); - } - delete pfs[j]; - } - } - } - ParfactorList pfList (newPfs); - Parfactors groundShatteredPfs (pfList.begin(),pfList.end()); - for (size_t i = 1; i < groundShatteredPfs.size(); i++) { - groundShatteredPfs[0]->multiply (*groundShatteredPfs[i]); - } - return Parfactor (*groundShatteredPfs[0]); -} - - - -Parfactors -LiftedOperations::absorve ( - ObservedFormula& obsFormula, - Parfactor* g) -{ - Parfactors absorvedPfs; - const ProbFormulas& formulas = g->arguments(); - for (size_t i = 0; i < formulas.size(); i++) { - if (obsFormula.functor() == formulas[i].functor() && - obsFormula.arity() == formulas[i].arity()) { - - if (obsFormula.isAtom()) { - if (formulas.size() > 1) { - g->absorveEvidence (formulas[i], obsFormula.evidence()); - } else { - // hack to erase parfactor g - absorvedPfs.push_back (0); - } - break; - } - - g->constr()->moveToTop (formulas[i].logVars()); - std::pair res; - res = g->constr()->split ( - formulas[i].logVars(), - &(obsFormula.constr()), - obsFormula.constr().logVars()); - ConstraintTree* commCt = res.first; - ConstraintTree* exclCt = res.second; - - if (commCt->empty() == false) { - if (formulas.size() > 1) { - LogVarSet excl = g->exclusiveLogVars (i); - Parfactor tempPf (g, commCt); - Parfactors countNormPfs = LiftedOperations::countNormalize ( - &tempPf, excl); - for (size_t j = 0; j < countNormPfs.size(); j++) { - countNormPfs[j]->absorveEvidence ( - formulas[i], obsFormula.evidence()); - absorvedPfs.push_back (countNormPfs[j]); - } - } else { - delete commCt; - } - if (exclCt->empty() == false) { - absorvedPfs.push_back (new Parfactor (g, exclCt)); - } else { - delete exclCt; - } - if (absorvedPfs.empty()) { - // hack to erase parfactor g - absorvedPfs.push_back (0); - } - break; - } else { - delete commCt; - delete exclCt; - } - } - } - return absorvedPfs; -} - diff --git a/packages/CLPBN/horus2/LiftedOperations.h b/packages/CLPBN/horus2/LiftedOperations.h deleted file mode 100644 index fc25363d3..000000000 --- a/packages/CLPBN/horus2/LiftedOperations.h +++ /dev/null @@ -1,27 +0,0 @@ -#ifndef HORUS_LIFTEDOPERATIONS_H -#define HORUS_LIFTEDOPERATIONS_H - -#include "ParfactorList.h" - -class LiftedOperations -{ - public: - static void shatterAgainstQuery ( - ParfactorList& pfList, const Grounds& query); - - static void runWeakBayesBall ( - ParfactorList& pfList, const Grounds&); - - static void absorveEvidence ( - ParfactorList& pfList, ObservedFormulas& obsFormulas); - - static Parfactors countNormalize (Parfactor*, const LogVarSet&); - - static Parfactor calcGroundMultiplication (Parfactor pf); - - private: - static Parfactors absorve (ObservedFormula&, Parfactor*); -}; - -#endif // HORUS_LIFTEDOPERATIONS_H - diff --git a/packages/CLPBN/horus2/LiftedSolver.h b/packages/CLPBN/horus2/LiftedSolver.h deleted file mode 100644 index 5429fc5b3..000000000 --- a/packages/CLPBN/horus2/LiftedSolver.h +++ /dev/null @@ -1,27 +0,0 @@ -#ifndef HORUS_LIFTEDSOLVER_H -#define HORUS_LIFTEDSOLVER_H - -#include "ParfactorList.h" -#include "Horus.h" - - -using namespace std; - -class LiftedSolver -{ - public: - LiftedSolver (const ParfactorList& pfList) - : parfactorList(pfList) { } - - virtual ~LiftedSolver() { } // ensure that subclass destructor is called - - virtual Params solveQuery (const Grounds& query) = 0; - - virtual void printSolverFlags (void) const = 0; - - protected: - const ParfactorList& parfactorList; -}; - -#endif // HORUS_LIFTEDSOLVER_H - diff --git a/packages/CLPBN/horus2/LiftedUtils.cpp b/packages/CLPBN/horus2/LiftedUtils.cpp deleted file mode 100644 index 9ad750f90..000000000 --- a/packages/CLPBN/horus2/LiftedUtils.cpp +++ /dev/null @@ -1,131 +0,0 @@ -#include - -#include -#include -#include - -#include "LiftedUtils.h" -#include "ConstraintTree.h" - - -namespace LiftedUtils { - - -unordered_map symbolDict; - - -Symbol -getSymbol (const string& symbolName) -{ - unordered_map::iterator it - = symbolDict.find (symbolName); - if (it != symbolDict.end()) { - return it->second; - } else { - symbolDict[symbolName] = symbolDict.size() - 1; - return symbolDict.size() - 1; - } -} - - - -void -printSymbolDictionary (void) -{ - unordered_map::const_iterator it - = symbolDict.begin(); - while (it != symbolDict.end()) { - cout << it->first << " -> " << it->second << endl; - ++ it; - } -} - -} - - - -ostream& operator<< (ostream &os, const Symbol& s) -{ - unordered_map::const_iterator it - = LiftedUtils::symbolDict.begin(); - while (it != LiftedUtils::symbolDict.end() && it->second != s) { - ++ it; - } - assert (it != LiftedUtils::symbolDict.end()); - os << it->first; - return os; -} - - - -ostream& operator<< (ostream &os, const LogVar& X) -{ - const string labels[] = { - "A", "B", "C", "D", "E", "F", - "G", "H", "I", "J", "K", "M" }; - (X >= 12) ? os << "X_" << X.id_ : os << labels[X]; - return os; -} - - - -ostream& operator<< (ostream &os, const Tuple& t) -{ - os << "(" ; - for (size_t i = 0; i < t.size(); i++) { - os << ((i != 0) ? "," : "") << t[i]; - } - os << ")" ; - return os; -} - - - -ostream& operator<< (ostream &os, const Ground& gr) -{ - os << gr.functor(); - os << "(" ; - for (size_t i = 0; i < gr.args().size(); i++) { - if (i != 0) os << ", " ; - os << gr.args()[i]; - } - os << ")" ; - return os; -} - - - -LogVars -Substitution::getDiscardedLogVars (void) const -{ - LogVars discardedLvs; - set doneLvs; - unordered_map::const_iterator it; - it = subs_.begin(); - while (it != subs_.end()) { - if (Util::contains (doneLvs, it->second)) { - discardedLvs.push_back (it->first); - } else { - doneLvs.insert (it->second); - } - ++ it; - } - return discardedLvs; -} - - - -ostream& operator<< (ostream &os, const Substitution& theta) -{ - unordered_map::const_iterator it; - os << "[" ; - it = theta.subs_.begin(); - while (it != theta.subs_.end()) { - if (it != theta.subs_.begin()) os << ", " ; - os << it->first << "->" << it->second ; - ++ it; - } - os << "]" ; - return os; -} - diff --git a/packages/CLPBN/horus2/LiftedUtils.h b/packages/CLPBN/horus2/LiftedUtils.h deleted file mode 100644 index 1f563eaf7..000000000 --- a/packages/CLPBN/horus2/LiftedUtils.h +++ /dev/null @@ -1,166 +0,0 @@ -#ifndef HORUS_LIFTEDUTILS_H -#define HORUS_LIFTEDUTILS_H - -#include -#include -#include -#include - - -#include "TinySet.h" -#include "Util.h" - - -using namespace std; - - -class Symbol -{ - public: - Symbol (void) : id_(Util::maxUnsigned()) { } - - Symbol (unsigned id) : id_(id) { } - - operator unsigned (void) const { return id_; } - - bool valid (void) const { return id_ != Util::maxUnsigned(); } - - static Symbol invalid (void) { return Symbol(); } - - friend ostream& operator<< (ostream &os, const Symbol& s); - - private: - unsigned id_; -}; - - -class LogVar -{ - public: - LogVar (void) : id_(Util::maxUnsigned()) { } - - LogVar (unsigned id) : id_(id) { } - - operator unsigned (void) const { return id_; } - - LogVar& operator++ (void) - { - assert (valid()); - id_ ++; - return *this; - } - - bool valid (void) const - { - return id_ != Util::maxUnsigned(); - } - - friend ostream& operator<< (ostream &os, const LogVar& X); - - private: - unsigned id_; -}; - - -namespace std { -template <> struct hash { - size_t operator() (const Symbol& s) const { - return std::hash() (s); - }}; - -template <> struct hash { - size_t operator() (const LogVar& X) const { - return std::hash() (X); - }}; -}; - - -typedef vector Symbols; -typedef vector Tuple; -typedef vector Tuples; -typedef vector LogVars; -typedef TinySet SymbolSet; -typedef TinySet LogVarSet; -typedef TinySet TupleSet; - - -ostream& operator<< (ostream &os, const Tuple& t); - - -namespace LiftedUtils { -Symbol getSymbol (const string&); -void printSymbolDictionary (void); -} - - - -class Ground -{ - public: - Ground (Symbol f) : functor_(f) { } - - Ground (Symbol f, const Symbols& args) : functor_(f), args_(args) { } - - Symbol functor (void) const { return functor_; } - - Symbols args (void) const { return args_; } - - size_t arity (void) const { return args_.size(); } - - bool isAtom (void) const { return args_.size() == 0; } - - friend ostream& operator<< (ostream &os, const Ground& gr); - - private: - Symbol functor_; - Symbols args_; -}; - -typedef vector Grounds; - - - -class Substitution -{ - public: - void add (LogVar X_old, LogVar X_new) - { - assert (Util::contains (subs_, X_old) == false); - subs_.insert (make_pair (X_old, X_new)); - } - - void rename (LogVar X_old, LogVar X_new) - { - assert (Util::contains (subs_, X_old)); - subs_.find (X_old)->second = X_new; - } - - LogVar newNameFor (LogVar X) const - { - unordered_map::const_iterator it; - it = subs_.find (X); - if (it != subs_.end()) { - return subs_.find (X)->second; - } - return X; - } - - bool containsReplacementFor (LogVar X) const - { - return Util::contains (subs_, X); - } - - size_t nrReplacements (void) const { return subs_.size(); } - - LogVars getDiscardedLogVars (void) const; - - friend ostream& operator<< (ostream &os, const Substitution& theta); - - private: - unordered_map subs_; - -}; - - -#endif // HORUS_LIFTEDUTILS_H - diff --git a/packages/CLPBN/horus2/LiftedVe.cpp b/packages/CLPBN/horus2/LiftedVe.cpp deleted file mode 100644 index 141006c46..000000000 --- a/packages/CLPBN/horus2/LiftedVe.cpp +++ /dev/null @@ -1,728 +0,0 @@ -#include -#include - -#include "LiftedVe.h" -#include "LiftedOperations.h" -#include "Histogram.h" -#include "Util.h" - - -vector -LiftedOperator::getValidOps ( - ParfactorList& pfList, - const Grounds& query) -{ - vector validOps; - vector multOps; - - multOps = ProductOperator::getValidOps (pfList); - validOps.insert (validOps.end(), multOps.begin(), multOps.end()); - - if (Globals::verbosity > 1 || multOps.empty()) { - vector sumOutOps; - vector countOps; - vector groundOps; - sumOutOps = SumOutOperator::getValidOps (pfList, query); - countOps = CountingOperator::getValidOps (pfList); - groundOps = GroundOperator::getValidOps (pfList); - validOps.insert (validOps.end(), sumOutOps.begin(), sumOutOps.end()); - validOps.insert (validOps.end(), countOps.begin(), countOps.end()); - validOps.insert (validOps.end(), groundOps.begin(), groundOps.end()); - } - - return validOps; -} - - - -void -LiftedOperator::printValidOps ( - ParfactorList& pfList, - const Grounds& query) -{ - vector validOps; - validOps = LiftedOperator::getValidOps (pfList, query); - for (size_t i = 0; i < validOps.size(); i++) { - cout << "-> " << validOps[i]->toString(); - delete validOps[i]; - } -} - - - -vector -LiftedOperator::getParfactorsWithGroup ( - ParfactorList& pfList, PrvGroup group) -{ - vector iters; - ParfactorList::iterator pflIt = pfList.begin(); - while (pflIt != pfList.end()) { - if ((*pflIt)->containsGroup (group)) { - iters.push_back (pflIt); - } - ++ pflIt; - } - return iters; -} - - - -double -ProductOperator::getLogCost (void) -{ - return std::log (0.0); -} - - - -void -ProductOperator::apply (void) -{ - Parfactor* g1 = *g1_; - Parfactor* g2 = *g2_; - g1->multiply (*g2); - pfList_.remove (g1_); - pfList_.removeAndDelete (g2_); - pfList_.addShattered (g1); -} - - - -vector -ProductOperator::getValidOps (ParfactorList& pfList) -{ - vector validOps; - ParfactorList::iterator it1 = pfList.begin(); - ParfactorList::iterator penultimate = -- pfList.end(); - set pfs; - while (it1 != penultimate) { - if (Util::contains (pfs, *it1)) { - ++ it1; - continue; - } - ParfactorList::iterator it2 = it1; - ++ it2; - while (it2 != pfList.end()) { - if (Util::contains (pfs, *it2)) { - ++ it2; - continue; - } else { - if (validOp (*it1, *it2)) { - pfs.insert (*it1); - pfs.insert (*it2); - validOps.push_back (new ProductOperator ( - it1, it2, pfList)); - if (Globals::verbosity < 2) { - return validOps; - } - break; - } - } - ++ it2; - } - ++ it1; - } - return validOps; -} - - - -string -ProductOperator::toString (void) -{ - stringstream ss; - ss << "just multiplicate " ; - ss << (*g1_)->getAllGroups(); - ss << " x " ; - ss << (*g2_)->getAllGroups(); - ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; - return ss.str(); -} - - - -bool -ProductOperator::validOp (Parfactor* g1, Parfactor* g2) -{ - TinySet g1_gs (g1->getAllGroups()); - TinySet g2_gs (g2->getAllGroups()); - if (g1_gs.contains (g2_gs) || g2_gs.contains (g1_gs)) { - TinySet intersect = g1_gs & g2_gs; - for (size_t i = 0; i < intersect.size(); i++) { - if (g1->nrFormulasWithGroup (intersect[i]) != 1 || - g2->nrFormulasWithGroup (intersect[i]) != 1) { - return false; - } - size_t idx1 = g1->indexOfGroup (intersect[i]); - size_t idx2 = g2->indexOfGroup (intersect[i]); - if (g1->range (idx1) != g2->range (idx2)) { - return false; - } - } - return Parfactor::canMultiply (g1, g2); - } - return false; -} - - - -double -SumOutOperator::getLogCost (void) -{ - TinySet groupSet; - ParfactorList::const_iterator pfIter = pfList_.begin(); - unsigned nrProdFactors = 0; - while (pfIter != pfList_.end()) { - if ((*pfIter)->containsGroup (group_)) { - vector groups = (*pfIter)->getAllGroups(); - groupSet |= TinySet (groups); - ++ nrProdFactors; - } - ++ pfIter; - } - if (nrProdFactors == 1) { - // best possible case - return std::log (0.0); - } - double cost = 1.0; - for (size_t i = 0; i < groupSet.size(); i++) { - pfIter = pfList_.begin(); - while (pfIter != pfList_.end()) { - if ((*pfIter)->containsGroup (groupSet[i])) { - size_t idx = (*pfIter)->indexOfGroup (groupSet[i]); - cost *= (*pfIter)->range (idx); - break; - } - ++ pfIter; - } - } - return std::log (cost); -} - - - -void -SumOutOperator::apply (void) -{ - vector iters; - iters = getParfactorsWithGroup (pfList_, group_); - Parfactor* product = *(iters[0]); - pfList_.remove (iters[0]); - for (size_t i = 1; i < iters.size(); i++) { - product->multiply (**(iters[i])); - pfList_.removeAndDelete (iters[i]); - } - if (product->nrArguments() == 1) { - delete product; - return; - } - size_t fIdx = product->indexOfGroup (group_); - LogVarSet excl = product->exclusiveLogVars (fIdx); - if (product->constr()->isCountNormalized (excl)) { - product->sumOutIndex (fIdx); - pfList_.addShattered (product); - } else { - Parfactors pfs = LiftedOperations::countNormalize (product, excl); - for (size_t i = 0; i < pfs.size(); i++) { - pfs[i]->sumOutIndex (fIdx); - pfList_.add (pfs[i]); - } - delete product; - } -} - - - -vector -SumOutOperator::getValidOps ( - ParfactorList& pfList, - const Grounds& query) -{ - vector validOps; - set allGroups; - ParfactorList::const_iterator it = pfList.begin(); - while (it != pfList.end()) { - const ProbFormulas& formulas = (*it)->arguments(); - for (size_t i = 0; i < formulas.size(); i++) { - allGroups.insert (formulas[i].group()); - } - ++ it; - } - set::const_iterator groupIt = allGroups.begin(); - while (groupIt != allGroups.end()) { - if (validOp (*groupIt, pfList, query)) { - validOps.push_back (new SumOutOperator (*groupIt, pfList)); - } - ++ groupIt; - } - return validOps; -} - - - -string -SumOutOperator::toString (void) -{ - stringstream ss; - vector pfIters; - pfIters = getParfactorsWithGroup (pfList_, group_); - size_t idx = (*pfIters[0])->indexOfGroup (group_); - ProbFormula f = (*pfIters[0])->argument (idx); - TupleSet tupleSet = (*pfIters[0])->constr()->tupleSet (f.logVars()); - ss << "sum out " << f.functor() << "/" << f.arity(); - ss << "|" << tupleSet << " (group " << group_ << ")"; - ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; - return ss.str(); -} - - - -bool -SumOutOperator::validOp ( - PrvGroup group, - ParfactorList& pfList, - const Grounds& query) -{ - vector pfIters; - pfIters = getParfactorsWithGroup (pfList, group); - if (isToEliminate (*pfIters[0], group, query) == false) { - return false; - } - int range = -1; - for (size_t i = 0; i < pfIters.size(); i++) { - if ((*pfIters[i])->nrFormulasWithGroup (group) > 1) { - return false; - } - size_t fIdx = (*pfIters[i])->indexOfGroup (group); - if ((*pfIters[i])->argument (fIdx).contains ( - (*pfIters[i])->elimLogVars()) == false) { - return false; - } - if (range == -1) { - range = (*pfIters[i])->range (fIdx); - } else if ((int)(*pfIters[i])->range (fIdx) != range) { - return false; - } - } - return true; -} - - - -bool -SumOutOperator::isToEliminate ( - Parfactor* g, - PrvGroup group, - const Grounds& query) -{ - size_t fIdx = g->indexOfGroup (group); - const ProbFormula& formula = g->argument (fIdx); - bool toElim = true; - for (size_t i = 0; i < query.size(); i++) { - if (formula.functor() == query[i].functor() && - formula.arity() == query[i].arity()) { - g->constr()->moveToTop (formula.logVars()); - if (g->constr()->containsTuple (query[i].args())) { - toElim = false; - break; - } - } - } - return toElim; -} - - - -double -CountingOperator::getLogCost (void) -{ - double cost = 0.0; - size_t fIdx = (*pfIter_)->indexOfLogVar (X_); - unsigned range = (*pfIter_)->range (fIdx); - unsigned size = (*pfIter_)->size() / range; - TinySet counts; - counts = (*pfIter_)->constr()->getConditionalCounts (X_); - for (size_t i = 0; i < counts.size(); i++) { - cost += size * HistogramSet::nrHistograms (counts[i], range); - } - PrvGroup group = (*pfIter_)->argument (fIdx).group(); - size_t lvIndex = Util::indexOf ( - (*pfIter_)->argument (fIdx).logVars(), X_); - assert (lvIndex != (*pfIter_)->argument (fIdx).logVars().size()); - ParfactorList::iterator pfIter = pfList_.begin(); - while (pfIter != pfList_.end()) { - if (pfIter != pfIter_) { - size_t fIdx2 = (*pfIter)->indexOfGroup (group); - if (fIdx2 != (*pfIter)->nrArguments()) { - LogVar Y = ((*pfIter)->argument (fIdx2).logVars()[lvIndex]); - if ((*pfIter)->canCountConvert (Y) == false) { - // the real cost should be the cost of grounding Y - cost *= 10.0; - } - } - } - ++ pfIter; - } - return std::log (cost); -} - - - -void -CountingOperator::apply (void) -{ - if ((*pfIter_)->constr()->isCountNormalized (X_)) { - (*pfIter_)->countConvert (X_); - } else { - Parfactor* pf = *pfIter_; - pfList_.remove (pfIter_); - Parfactors pfs = LiftedOperations::countNormalize (pf, X_); - for (size_t i = 0; i < pfs.size(); i++) { - unsigned condCount = pfs[i]->constr()->getConditionalCount (X_); - bool cartProduct = pfs[i]->constr()->isCartesianProduct ( - pfs[i]->countedLogVars() | X_); - if (condCount > 1 && cartProduct) { - pfs[i]->countConvert (X_); - } - pfList_.add (pfs[i]); - } - delete pf; - } -} - - - -vector -CountingOperator::getValidOps (ParfactorList& pfList) -{ - vector validOps; - ParfactorList::iterator it = pfList.begin(); - while (it != pfList.end()) { - LogVarSet candidates = (*it)->uncountedLogVars(); - for (size_t i = 0; i < candidates.size(); i++) { - if (validOp (*it, candidates[i])) { - validOps.push_back (new CountingOperator ( - it, candidates[i], pfList)); - } else { - } - } - ++ it; - } - return validOps; -} - - - -string -CountingOperator::toString (void) -{ - stringstream ss; - ss << "count convert " << X_ << " in " ; - ss << (*pfIter_)->getLabel(); - ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; - Parfactors pfs = LiftedOperations::countNormalize (*pfIter_, X_); - if ((*pfIter_)->constr()->isCountNormalized (X_) == false) { - for (size_t i = 0; i < pfs.size(); i++) { - ss << " º " << pfs[i]->getLabel() << endl; - } - } - for (size_t i = 0; i < pfs.size(); i++) { - delete pfs[i]; - } - return ss.str(); -} - - - -bool -CountingOperator::validOp (Parfactor* g, LogVar X) -{ - if (g->nrFormulas (X) != 1) { - return false; - } - size_t fIdx = g->indexOfLogVar (X); - if (g->argument (fIdx).isCounting()) { - return false; - } - bool countNormalized = g->constr()->isCountNormalized (X); - if (countNormalized) { - return g->canCountConvert (X); - } - return true; -} - - - -double -GroundOperator::getLogCost (void) -{ - vector> affectedFormulas; - affectedFormulas = getAffectedFormulas(); - // cout << "affected formulas: " ; - // for (size_t i = 0; i < affectedFormulas.size(); i++) { - // cout << affectedFormulas[i].first << ":" ; - // cout << affectedFormulas[i].second << " " ; - // } - // cout << "cost =" ; - double totalCost = std::log (0.0); - ParfactorList::iterator pflIt = pfList_.begin(); - while (pflIt != pfList_.end()) { - Parfactor* pf = *pflIt; - double reps = 0.0; - double pfSize = std::log (pf->size()); - bool willBeAffected = false; - LogVarSet lvsToGround; - for (size_t i = 0; i < affectedFormulas.size(); i++) { - size_t fIdx = pf->indexOfGroup (affectedFormulas[i].first); - if (fIdx != pf->nrArguments()) { - ProbFormula f = pf->argument (fIdx); - LogVar X = f.logVars()[affectedFormulas[i].second]; - bool isCountingLv = pf->countedLogVars().contains (X); - if (isCountingLv) { - unsigned nrHists = pf->range (fIdx); - unsigned nrSymbols = pf->constr()->getConditionalCount (X); - unsigned range = pf->argument (fIdx).range(); - double power = std::log (range) * nrSymbols; - pfSize = (pfSize - std::log (nrHists)) + power; - } else { - if (lvsToGround.contains (X) == false) { - reps += std::log (pf->constr()->nrSymbols (X)); - lvsToGround.insert (X); - } - } - willBeAffected = true; - } - } - if (willBeAffected) { - // cout << " + " << std::exp (reps) << "x" << std::exp (pfSize); - double pfCost = reps + pfSize; - totalCost = Util::logSum (totalCost, pfCost); - } - ++ pflIt; - } - // cout << endl; - return totalCost + 3; -} - - - -void -GroundOperator::apply (void) -{ - ParfactorList::iterator pfIter; - pfIter = getParfactorsWithGroup (pfList_, group_).front(); - Parfactor* pf = *pfIter; - size_t idx = pf->indexOfGroup (group_); - ProbFormula f = pf->argument (idx); - LogVar X = f.logVars()[lvIndex_]; - bool countedLv = pf->countedLogVars().contains (X); - pfList_.remove (pfIter); - if (countedLv) { - pf->fullExpand (X); - pfList_.add (pf); - } else { - ConstraintTrees cts = pf->constr()->ground (X); - for (size_t i = 0; i < cts.size(); i++) { - pfList_.add (new Parfactor (pf, cts[i])); - } - delete pf; - } - ParfactorList::iterator pflIt = pfList_.begin(); - while (pflIt != pfList_.end()) { - (*pflIt)->simplifyGrounds(); - ++ pflIt; - } -} - - - -vector -GroundOperator::getValidOps (ParfactorList& pfList) -{ - vector validOps; - set allGroups; - ParfactorList::const_iterator it = pfList.begin(); - while (it != pfList.end()) { - const ProbFormulas& formulas = (*it)->arguments(); - for (size_t i = 0; i < formulas.size(); i++) { - if (Util::contains (allGroups, formulas[i].group()) == false) { - const LogVars& lvs = formulas[i].logVars(); - for (size_t j = 0; j < lvs.size(); j++) { - if ((*it)->constr()->isSingleton (lvs[j]) == false) { - validOps.push_back (new GroundOperator ( - formulas[i].group(), j, pfList)); - } - } - allGroups.insert (formulas[i].group()); - } - } - ++ it; - } - return validOps; -} - - - -string -GroundOperator::toString (void) -{ - stringstream ss; - vector pfIters; - pfIters = getParfactorsWithGroup (pfList_, group_); - Parfactor* pf = *(getParfactorsWithGroup (pfList_, group_).front()); - size_t idx = pf->indexOfGroup (group_); - ProbFormula f = pf->argument (idx); - LogVar lv = f.logVars()[lvIndex_]; - TupleSet tupleSet = pf->constr()->tupleSet ({lv}); - string pos = "th"; - if (lvIndex_ == 0) { - pos = "st" ; - } else if (lvIndex_ == 1) { - pos = "nd" ; - } else if (lvIndex_ == 2) { - pos = "rd" ; - } - ss << "grounding " << lvIndex_ + 1 << pos << " log var in " ; - ss << f.functor() << "/" << f.arity(); - ss << "|" << tupleSet << " (group " << group_ << ")"; - ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; - return ss.str(); -} - - - -vector> -GroundOperator::getAffectedFormulas (void) -{ - vector> affectedFormulas; - affectedFormulas.push_back (make_pair (group_, lvIndex_)); - queue> q; - q.push (make_pair (group_, lvIndex_)); - while (q.empty() == false) { - pair front = q.front(); - ParfactorList::iterator pflIt = pfList_.begin(); - while (pflIt != pfList_.end()) { - size_t idx = (*pflIt)->indexOfGroup (front.first); - if (idx != (*pflIt)->nrArguments()) { - ProbFormula f = (*pflIt)->argument (idx); - LogVar X = f.logVars()[front.second]; - const ProbFormulas& fs = (*pflIt)->arguments(); - for (size_t i = 0; i < fs.size(); i++) { - if (i != idx && fs[i].contains (X)) { - pair pair = make_pair ( - fs[i].group(), fs[i].indexOf (X)); - if (Util::contains (affectedFormulas, pair) == false) { - q.push (pair); - affectedFormulas.push_back (pair); - } - } - } - } - ++ pflIt; - } - q.pop(); - } - return affectedFormulas; -} - - - -Params -LiftedVe::solveQuery (const Grounds& query) -{ - assert (query.empty() == false); - pfList_ = parfactorList; - runSolver (query); - (*pfList_.begin())->normalize(); - Params params = (*pfList_.begin())->params(); - if (Globals::logDomain) { - Util::exp (params); - } - return params; -} - - - -void -LiftedVe::printSolverFlags (void) const -{ - stringstream ss; - ss << "lve [" ; - ss << "log_domain=" << Util::toString (Globals::logDomain); - ss << "]" ; - cout << ss.str() << endl; -} - - - -void -LiftedVe::runSolver (const Grounds& query) -{ - largestCost_ = std::log (0); - LiftedOperations::shatterAgainstQuery (pfList_, query); - LiftedOperations::runWeakBayesBall (pfList_, query); - while (true) { - if (Globals::verbosity > 2) { - Util::printDashedLine(); - pfList_.print(); - if (Globals::verbosity > 3) { - LiftedOperator::printValidOps (pfList_, query); - } - } - LiftedOperator* op = getBestOperation (query); - if (op == 0) { - break; - } - if (Globals::verbosity > 1) { - cout << "best operation: " << op->toString(); - if (Globals::verbosity > 2) { - cout << endl; - } - } - op->apply(); - delete op; - } - assert (pfList_.size() > 0); - if (pfList_.size() > 1) { - ParfactorList::iterator pfIter = pfList_.begin(); - ++ pfIter; - while (pfIter != pfList_.end()) { - (*pfList_.begin())->multiply (**pfIter); - ++ pfIter; - } - } - if (Globals::verbosity > 0) { - cout << "largest cost = " << std::exp (largestCost_) << endl; - cout << endl; - } - (*pfList_.begin())->simplifyGrounds(); - (*pfList_.begin())->reorderAccordingGrounds (query); -} - - - -LiftedOperator* -LiftedVe::getBestOperation (const Grounds& query) -{ - double bestCost = 0.0; - LiftedOperator* bestOp = 0; - vector validOps; - validOps = LiftedOperator::getValidOps (pfList_, query); - for (size_t i = 0; i < validOps.size(); i++) { - double cost = validOps[i]->getLogCost(); - if ((bestOp == 0) || (cost < bestCost)) { - bestOp = validOps[i]; - bestCost = cost; - } - } - if (bestCost > largestCost_) { - largestCost_ = bestCost; - } - for (size_t i = 0; i < validOps.size(); i++) { - if (validOps[i] != bestOp) { - delete validOps[i]; - } - } - return bestOp; -} - diff --git a/packages/CLPBN/horus2/LiftedVe.h b/packages/CLPBN/horus2/LiftedVe.h deleted file mode 100644 index 7d9974294..000000000 --- a/packages/CLPBN/horus2/LiftedVe.h +++ /dev/null @@ -1,155 +0,0 @@ -#ifndef HORUS_LIFTEDVE_H -#define HORUS_LIFTEDVE_H - -#include "LiftedSolver.h" -#include "ParfactorList.h" - - -class LiftedOperator -{ - public: - virtual ~LiftedOperator (void) { } - - virtual double getLogCost (void) = 0; - - virtual void apply (void) = 0; - - virtual string toString (void) = 0; - - static vector getValidOps ( - ParfactorList&, const Grounds&); - - static void printValidOps (ParfactorList&, const Grounds&); - - static vector getParfactorsWithGroup ( - ParfactorList&, PrvGroup group); -}; - - - -class ProductOperator : public LiftedOperator -{ - public: - ProductOperator ( - ParfactorList::iterator g1, ParfactorList::iterator g2, - ParfactorList& pfList) : g1_(g1), g2_(g2), pfList_(pfList) { } - - double getLogCost (void); - - void apply (void); - - static vector getValidOps (ParfactorList&); - - string toString (void); - - private: - static bool validOp (Parfactor*, Parfactor*); - - ParfactorList::iterator g1_; - ParfactorList::iterator g2_; - ParfactorList& pfList_; -}; - - - -class SumOutOperator : public LiftedOperator -{ - public: - SumOutOperator (PrvGroup group, ParfactorList& pfList) - : group_(group), pfList_(pfList) { } - - double getLogCost (void); - - void apply (void); - - static vector getValidOps ( - ParfactorList&, const Grounds&); - - string toString (void); - - private: - static bool validOp (PrvGroup, ParfactorList&, const Grounds&); - - static bool isToEliminate (Parfactor*, PrvGroup, const Grounds&); - - PrvGroup group_; - ParfactorList& pfList_; -}; - - - -class CountingOperator : public LiftedOperator -{ - public: - CountingOperator ( - ParfactorList::iterator pfIter, - LogVar X, - ParfactorList& pfList) - : pfIter_(pfIter), X_(X), pfList_(pfList) { } - - double getLogCost (void); - - void apply (void); - - static vector getValidOps (ParfactorList&); - - string toString (void); - - private: - static bool validOp (Parfactor*, LogVar); - - ParfactorList::iterator pfIter_; - LogVar X_; - ParfactorList& pfList_; -}; - - - -class GroundOperator : public LiftedOperator -{ - public: - GroundOperator ( - PrvGroup group, - unsigned lvIndex, - ParfactorList& pfList) - : group_(group), lvIndex_(lvIndex), pfList_(pfList) { } - - double getLogCost (void); - - void apply (void); - - static vector getValidOps (ParfactorList&); - - string toString (void); - - private: - vector> getAffectedFormulas (void); - - PrvGroup group_; - unsigned lvIndex_; - ParfactorList& pfList_; -}; - - - -class LiftedVe : public LiftedSolver -{ - public: - LiftedVe (const ParfactorList& pfList) - : LiftedSolver(pfList) { } - - Params solveQuery (const Grounds&); - - void printSolverFlags (void) const; - - private: - void runSolver (const Grounds&); - - LiftedOperator* getBestOperation (const Grounds&); - - ParfactorList pfList_; - double largestCost_; -}; - -#endif // HORUS_LIFTEDVE_H - diff --git a/packages/CLPBN/horus2/LiftedWCNF.cpp b/packages/CLPBN/horus2/LiftedWCNF.cpp deleted file mode 100644 index ba7097dbf..000000000 --- a/packages/CLPBN/horus2/LiftedWCNF.cpp +++ /dev/null @@ -1,658 +0,0 @@ -#include "LiftedWCNF.h" -#include "ConstraintTree.h" -#include "Indexer.h" - - - -bool -Literal::isGround (ConstraintTree constr, LogVarSet ipgLogVars) const -{ - if (logVars_.size() == 0) { - return true; - } - LogVarSet lvs (logVars_); - lvs -= ipgLogVars; - return constr.singletons().contains (lvs); -} - - - -size_t -Literal::indexOfLogVar (LogVar X) const -{ - return Util::indexOf (logVars_, X); -} - - - -string -Literal::toString ( - LogVarSet ipgLogVars, - LogVarSet posCountedLvs, - LogVarSet negCountedLvs) const -{ - stringstream ss; - negated_ ? ss << "¬" : ss << "" ; - ss << "λ" ; - ss << lid_ ; - if (logVars_.empty() == false) { - ss << "(" ; - for (size_t i = 0; i < logVars_.size(); i++) { - if (i != 0) ss << ","; - if (posCountedLvs.contains (logVars_[i])) { - ss << "+" << logVars_[i]; - } else if (negCountedLvs.contains (logVars_[i])) { - ss << "-" << logVars_[i]; - } else if (ipgLogVars.contains (logVars_[i])) { - LogVar X = logVars_[i]; - const string labels[] = { - "a", "b", "c", "d", "e", "f", - "g", "h", "i", "j", "k", "m" }; - (X >= 12) ? ss << "x_" << X : ss << labels[X]; - } else { - ss << logVars_[i]; - } - } - ss << ")" ; - } - return ss.str(); -} - - - -std::ostream& -operator<< (ostream &os, const Literal& lit) -{ - os << lit.toString(); - return os; -} - - - -void -Clause::addLiteralComplemented (const Literal& lit) -{ - assert (constr_.logVarSet().contains (lit.logVars())); - literals_.push_back (lit); - literals_.back().complement(); -} - - - -bool -Clause::containsLiteral (LiteralId lid) const -{ - for (size_t i = 0; i < literals_.size(); i++) { - if (literals_[i].lid() == lid) { - return true; - } - } - return false; -} - - - -bool -Clause::containsPositiveLiteral ( - LiteralId lid, - const LogVarTypes& types) const -{ - for (size_t i = 0; i < literals_.size(); i++) { - if (literals_[i].lid() == lid - && literals_[i].isPositive() - && logVarTypes (i) == types) { - return true; - } - } - return false; -} - - - -bool -Clause::containsNegativeLiteral ( - LiteralId lid, - const LogVarTypes& types) const -{ - for (size_t i = 0; i < literals_.size(); i++) { - if (literals_[i].lid() == lid - && literals_[i].isNegative() - && logVarTypes (i) == types) { - return true; - } - } - return false; -} - - - -void -Clause::removeLiterals (LiteralId lid) -{ - size_t i = 0; - while (i != literals_.size()) { - if (literals_[i].lid() == lid) { - removeLiteral (i); - } else { - i ++; - } - } -} - - - -void -Clause::removePositiveLiterals ( - LiteralId lid, - const LogVarTypes& types) -{ - size_t i = 0; - while (i != literals_.size()) { - if (literals_[i].lid() == lid - && literals_[i].isPositive() - && logVarTypes (i) == types) { - removeLiteral (i); - } else { - i ++; - } - } -} - - - -void -Clause::removeNegativeLiterals ( - LiteralId lid, - const LogVarTypes& types) -{ - size_t i = 0; - while (i != literals_.size()) { - if (literals_[i].lid() == lid - && literals_[i].isNegative() - && logVarTypes (i) == types) { - removeLiteral (i); - } else { - i ++; - } - } -} - - - -bool -Clause::isCountedLogVar (LogVar X) const -{ - assert (constr_.logVarSet().contains (X)); - return posCountedLvs_.contains (X) - || negCountedLvs_.contains (X); -} - - - -bool -Clause::isPositiveCountedLogVar (LogVar X) const -{ - assert (constr_.logVarSet().contains (X)); - return posCountedLvs_.contains (X); -} - - - -bool -Clause::isNegativeCountedLogVar (LogVar X) const -{ - assert (constr_.logVarSet().contains (X)); - return negCountedLvs_.contains (X); -} - - - -bool -Clause::isIpgLogVar (LogVar X) const -{ - assert (constr_.logVarSet().contains (X)); - return ipgLvs_.contains (X); -} - - - -TinySet -Clause::lidSet (void) const -{ - TinySet lidSet; - for (size_t i = 0; i < literals_.size(); i++) { - lidSet.insert (literals_[i].lid()); - } - return lidSet; -} - - - -LogVarSet -Clause::ipgCandidates (void) const -{ - LogVarSet candidates; - LogVarSet allLvs = constr_.logVarSet(); - allLvs -= ipgLvs_; - allLvs -= posCountedLvs_; - allLvs -= negCountedLvs_; - for (size_t i = 0; i < allLvs.size(); i++) { - bool valid = true; - for (size_t j = 0; j < literals_.size(); j++) { - if (Util::contains (literals_[j].logVars(), allLvs[i]) == false) { - valid = false; - break; - } - } - if (valid) { - candidates.insert (allLvs[i]); - } - } - return candidates; -} - - - -LogVarTypes -Clause::logVarTypes (size_t litIdx) const -{ - LogVarTypes types; - const LogVars& lvs = literals_[litIdx].logVars(); - for (size_t i = 0; i < lvs.size(); i++) { - if (posCountedLvs_.contains (lvs[i])) { - types.push_back (LogVarType::POS_LV); - } else if (negCountedLvs_.contains (lvs[i])) { - types.push_back (LogVarType::NEG_LV); - } else { - types.push_back (LogVarType::FULL_LV); - } - } - return types; -} - - - -void -Clause::removeLiteral (size_t litIdx) -{ - LogVarSet lvsToRemove = literals_[litIdx].logVarSet() - - getLogVarSetExcluding (litIdx); - ipgLvs_ -= lvsToRemove; - posCountedLvs_ -= lvsToRemove; - negCountedLvs_ -= lvsToRemove; - constr_.remove (lvsToRemove); - literals_.erase (literals_.begin() + litIdx); -} - - - -bool -Clause::independentClauses (Clause& c1, Clause& c2) -{ - const Literals& lits1 = c1.literals(); - const Literals& lits2 = c2.literals(); - for (size_t i = 0; i < lits1.size(); i++) { - for (size_t j = 0; j < lits2.size(); j++) { - if (lits1[i].lid() == lits2[j].lid() - && c1.logVarTypes (i) == c2.logVarTypes (j)) { - return false; - } - } - } - return true; -} - - - -Clauses -Clause::copyClauses (const Clauses& clauses) -{ - Clauses copy; - copy.reserve (clauses.size()); - for (size_t i = 0; i < clauses.size(); i++) { - copy.push_back (new Clause (*clauses[i])); - } - return copy; -} - - - -void -Clause::printClauses (const Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size(); i++) { - cout << *clauses[i] << endl; - } -} - - - -void -Clause::deleteClauses (Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size(); i++) { - delete clauses[i]; - } -} - - - -std::ostream& -operator<< (ostream &os, const Clause& clause) -{ - for (unsigned i = 0; i < clause.literals_.size(); i++) { - if (i != 0) os << " v " ; - os << clause.literals_[i].toString (clause.ipgLvs_, - clause.posCountedLvs_, clause.negCountedLvs_); - } - if (clause.constr_.empty() == false) { - ConstraintTree copy (clause.constr_); - copy.moveToTop (copy.logVarSet().elements()); - os << " | " << copy.tupleSet(); - } - return os; -} - - - -LogVarSet -Clause::getLogVarSetExcluding (size_t idx) const -{ - LogVarSet lvs; - for (size_t i = 0; i < literals_.size(); i++) { - if (i != idx) { - lvs |= literals_[i].logVars(); - } - } - return lvs; -} - - - -std::ostream& -operator<< (std::ostream &os, const LitLvTypes& lit) -{ - os << lit.lid_ << "<" ; - for (size_t i = 0; i < lit.lvTypes_.size(); i++) { - switch (lit.lvTypes_[i]) { - case LogVarType::FULL_LV: os << "F" ; break; - case LogVarType::POS_LV: os << "P" ; break; - case LogVarType::NEG_LV: os << "N" ; break; - } - } - os << ">" ; - return os; -} - - - -LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) - : freeLiteralId_(0), pfList_(pfList) -{ - addIndicatorClauses (pfList); - addParameterClauses (pfList); - - /* - // INCLUSION-EXCLUSION TEST - clauses_.clear(); - vector> names = { - {"a1","b1"},{"a2","b2"} - }; - Clause* c1 = new Clause (names); - c1->addLiteral (Literal (0, LogVars() = {0})); - c1->addLiteral (Literal (1, LogVars() = {1})); - clauses_.push_back(c1); - */ - - /* - // INDEPENDENT PARTIAL GROUND TEST - clauses_.clear(); - vector> names = { - {"a1","b1"},{"a2","b2"} - }; - Clause* c1 = new Clause (names); - c1->addLiteral (Literal (0, LogVars() = {0,1})); - c1->addLiteral (Literal (1, LogVars() = {0,1})); - clauses_.push_back(c1); - Clause* c2 = new Clause (names); - c2->addLiteral (Literal (2, LogVars() = {0})); - c2->addLiteral (Literal (1, LogVars() = {0,1})); - clauses_.push_back(c2); - */ - - /* - // ATOM-COUNTING TEST - clauses_.clear(); - vector> names = { - {"p1","p1"},{"p1","p2"},{"p1","p3"}, - {"p2","p1"},{"p2","p2"},{"p2","p3"}, - {"p3","p1"},{"p3","p2"},{"p3","p3"} - }; - Clause* c1 = new Clause (names); - c1->addLiteral (Literal (0, LogVars() = {0})); - c1->addLiteralComplemented (Literal (1, {0,1})); - clauses_.push_back(c1); - Clause* c2 = new Clause (names); - c2->addLiteral (Literal (0, LogVars()={0})); - c2->addLiteralComplemented (Literal (1, {1,0})); - clauses_.push_back(c2); - */ - - if (Globals::verbosity > 1) { - cout << "FORMULA INDICATORS:" << endl; - printFormulaIndicators(); - cout << endl; - cout << "WEIGHTED INDICATORS:" << endl; - printWeights(); - cout << endl; - cout << "CLAUSES:" << endl; - printClauses(); - cout << endl; - } -} - - - -LiftedWCNF::~LiftedWCNF (void) -{ - Clause::deleteClauses (clauses_); -} - - - -void -LiftedWCNF::addWeight (LiteralId lid, double posW, double negW) -{ - weights_[lid] = make_pair (posW, negW); -} - - - -double -LiftedWCNF::posWeight (LiteralId lid) const -{ - unordered_map>::const_iterator it; - it = weights_.find (lid); - return it != weights_.end() ? it->second.first : LogAware::one(); -} - - - -double -LiftedWCNF::negWeight (LiteralId lid) const -{ - unordered_map>::const_iterator it; - it = weights_.find (lid); - return it != weights_.end() ? it->second.second : LogAware::one(); -} - - - -vector -LiftedWCNF::prvGroupLiterals (PrvGroup prvGroup) -{ - assert (Util::contains (map_, prvGroup)); - return map_[prvGroup]; -} - - - -Clause* -LiftedWCNF::createClause (LiteralId lid) const -{ - for (size_t i = 0; i < clauses_.size(); i++) { - const Literals& literals = clauses_[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].lid() == lid) { - ConstraintTree ct = clauses_[i]->constr().projectedCopy ( - literals[j].logVars()); - Clause* c = new Clause (ct); - c->addLiteral (literals[j]); - return c; - } - } - } - return 0; -} - - - -LiteralId -LiftedWCNF::getLiteralId (PrvGroup prvGroup, unsigned range) -{ - assert (Util::contains (map_, prvGroup)); - return map_[prvGroup][range]; -} - - - -void -LiftedWCNF::addIndicatorClauses (const ParfactorList& pfList) -{ - ParfactorList::const_iterator it = pfList.begin(); - while (it != pfList.end()) { - const ProbFormulas& formulas = (*it)->arguments(); - for (size_t i = 0; i < formulas.size(); i++) { - if (Util::contains (map_, formulas[i].group()) == false) { - ConstraintTree tempConstr = (*it)->constr()->projectedCopy( - formulas[i].logVars()); - Clause* clause = new Clause (tempConstr); - vector lids; - for (size_t j = 0; j < formulas[i].range(); j++) { - clause->addLiteral (Literal (freeLiteralId_, formulas[i].logVars())); - lids.push_back (freeLiteralId_); - freeLiteralId_ ++; - } - clauses_.push_back (clause); - for (size_t j = 0; j < formulas[i].range() - 1; j++) { - for (size_t k = j + 1; k < formulas[i].range(); k++) { - ConstraintTree tempConstr2 = (*it)->constr()->projectedCopy ( - formulas[i].logVars()); - Clause* clause2 = new Clause (tempConstr2); - clause2->addLiteralComplemented (Literal (clause->literals()[j])); - clause2->addLiteralComplemented (Literal (clause->literals()[k])); - clauses_.push_back (clause2); - } - } - map_[formulas[i].group()] = lids; - } - } - ++ it; - } -} - - - -void -LiftedWCNF::addParameterClauses (const ParfactorList& pfList) -{ - ParfactorList::const_iterator it = pfList.begin(); - while (it != pfList.end()) { - Indexer indexer ((*it)->ranges()); - vector groups = (*it)->getAllGroups(); - while (indexer.valid()) { - LiteralId paramVarLid = freeLiteralId_; - // λu1 ∧ ... ∧ λun ∧ λxi <=> θxi|u1,...,un - // - // ¬λu1 ... ¬λun v θxi|u1,...,un -> clause1 - // ¬θxi|u1,...,un v λu1 -> tempClause - // ¬θxi|u1,...,un v λu2 -> tempClause - double posWeight = (**it)[indexer]; - addWeight (paramVarLid, posWeight, LogAware::one()); - - Clause* clause1 = new Clause (*(*it)->constr()); - - for (unsigned i = 0; i < groups.size(); i++) { - LiteralId lid = getLiteralId (groups[i], indexer[i]); - - clause1->addLiteralComplemented ( - Literal (lid, (*it)->argument(i).logVars())); - - ConstraintTree ct = *(*it)->constr(); - Clause* tempClause = new Clause (ct); - tempClause->addLiteralComplemented (Literal ( - paramVarLid, (*it)->constr()->logVars())); - tempClause->addLiteral (Literal (lid, (*it)->argument(i).logVars())); - clauses_.push_back (tempClause); - } - clause1->addLiteral (Literal (paramVarLid, (*it)->constr()->logVars())); - clauses_.push_back (clause1); - freeLiteralId_ ++; - ++ indexer; - } - ++ it; - } -} - - - -void -LiftedWCNF::printFormulaIndicators (void) const -{ - if (map_.empty()) { - return; - } - set allGroups; - ParfactorList::const_iterator it = pfList_.begin(); - while (it != pfList_.end()) { - const ProbFormulas& formulas = (*it)->arguments(); - for (size_t i = 0; i < formulas.size(); i++) { - if (Util::contains (allGroups, formulas[i].group()) == false) { - allGroups.insert (formulas[i].group()); - cout << formulas[i] << " | " ; - ConstraintTree tempCt = (*it)->constr()->projectedCopy ( - formulas[i].logVars()); - cout << tempCt.tupleSet(); - cout << " indicators => " ; - vector indicators = - (map_.find (formulas[i].group()))->second; - cout << indicators << endl; - } - } - ++ it; - } -} - - - -void -LiftedWCNF::printWeights (void) const -{ - unordered_map>::const_iterator it; - it = weights_.begin(); - while (it != weights_.end()) { - cout << "λ" << it->first << " weights: " ; - cout << it->second.first << " " << it->second.second; - cout << endl; - ++ it; - } -} - - - -void -LiftedWCNF::printClauses (void) const -{ - Clause::printClauses (clauses_); -} - diff --git a/packages/CLPBN/horus2/LiftedWCNF.h b/packages/CLPBN/horus2/LiftedWCNF.h deleted file mode 100644 index e0f901b7c..000000000 --- a/packages/CLPBN/horus2/LiftedWCNF.h +++ /dev/null @@ -1,239 +0,0 @@ -#ifndef HORUS_LIFTEDWCNF_H -#define HORUS_LIFTEDWCNF_H - -#include "ParfactorList.h" - -using namespace std; - -typedef long LiteralId; - -class ConstraintTree; - - -enum LogVarType -{ - FULL_LV, - POS_LV, - NEG_LV -}; - -typedef vector LogVarTypes; - - - -class Literal -{ - public: - Literal (LiteralId lid, const LogVars& lvs) : - lid_(lid), logVars_(lvs), negated_(false) { } - - Literal (const Literal& lit, bool negated) : - lid_(lit.lid_), logVars_(lit.logVars_), negated_(negated) { } - - LiteralId lid (void) const { return lid_; } - - LogVars logVars (void) const { return logVars_; } - - size_t nrLogVars (void) const { return logVars_.size(); } - - LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } - - void complement (void) { negated_ = !negated_; } - - bool isPositive (void) const { return negated_ == false; } - - bool isNegative (void) const { return negated_; } - - bool isGround (ConstraintTree constr, LogVarSet ipgLogVars) const; - - size_t indexOfLogVar (LogVar X) const; - - string toString (LogVarSet ipgLogVars = LogVarSet(), - LogVarSet posCountedLvs = LogVarSet(), - LogVarSet negCountedLvs = LogVarSet()) const; - - friend std::ostream& operator<< (std::ostream &os, const Literal& lit); - - private: - LiteralId lid_; - LogVars logVars_; - bool negated_; -}; - -typedef vector Literals; - - - -class Clause -{ - public: - Clause (const ConstraintTree& ct = ConstraintTree({})) : constr_(ct) { } - - Clause (vector> names) : constr_(ConstraintTree (names)) { } - - void addLiteral (const Literal& l) { literals_.push_back (l); } - - const Literals& literals (void) const { return literals_; } - - Literals& literals (void) { return literals_; } - - size_t nrLiterals (void) const { return literals_.size(); } - - const ConstraintTree& constr (void) const { return constr_; } - - ConstraintTree constr (void) { return constr_; } - - bool isUnit (void) const { return literals_.size() == 1; } - - LogVarSet ipgLogVars (void) const { return ipgLvs_; } - - void addIpgLogVar (LogVar X) { ipgLvs_.insert (X); } - - void addPosCountedLogVar (LogVar X) { posCountedLvs_.insert (X); } - - void addNegCountedLogVar (LogVar X) { negCountedLvs_.insert (X); } - - LogVarSet posCountedLogVars (void) const { return posCountedLvs_; } - - LogVarSet negCountedLogVars (void) const { return negCountedLvs_; } - - unsigned nrPosCountedLogVars (void) const { return posCountedLvs_.size(); } - - unsigned nrNegCountedLogVars (void) const { return negCountedLvs_.size(); } - - void addLiteralComplemented (const Literal& lit); - - bool containsLiteral (LiteralId lid) const; - - bool containsPositiveLiteral (LiteralId lid, const LogVarTypes&) const; - - bool containsNegativeLiteral (LiteralId lid, const LogVarTypes&) const; - - void removeLiterals (LiteralId lid); - - void removePositiveLiterals (LiteralId lid, const LogVarTypes&); - - void removeNegativeLiterals (LiteralId lid, const LogVarTypes&); - - bool isCountedLogVar (LogVar X) const; - - bool isPositiveCountedLogVar (LogVar X) const; - - bool isNegativeCountedLogVar (LogVar X) const; - - bool isIpgLogVar (LogVar X) const; - - TinySet lidSet (void) const; - - LogVarSet ipgCandidates (void) const; - - LogVarTypes logVarTypes (size_t litIdx) const; - - void removeLiteral (size_t litIdx); - - static bool independentClauses (Clause& c1, Clause& c2); - - static vector copyClauses (const vector& clauses); - - static void printClauses (const vector& clauses); - - static void deleteClauses (vector& clauses); - - friend std::ostream& operator<< (ostream &os, const Clause& clause); - - private: - LogVarSet getLogVarSetExcluding (size_t idx) const; - - Literals literals_; - LogVarSet ipgLvs_; - LogVarSet posCountedLvs_; - LogVarSet negCountedLvs_; - ConstraintTree constr_; -}; - -typedef vector Clauses; - - - -class LitLvTypes -{ - public: - struct CompareLitLvTypes - { - bool operator() ( - const LitLvTypes& types1, - const LitLvTypes& types2) const - { - if (types1.lid_ < types2.lid_) { - return true; - } - if (types1.lid_ == types2.lid_) { - return types1.lvTypes_ < types2.lvTypes_; - } - return false; - } - }; - - LitLvTypes (LiteralId lid, const LogVarTypes& lvTypes) : - lid_(lid), lvTypes_(lvTypes) { } - - LiteralId lid (void) const { return lid_; } - - const LogVarTypes& logVarTypes (void) const { return lvTypes_; } - - void setAllFullLogVars (void) { - std::fill (lvTypes_.begin(), lvTypes_.end(), LogVarType::FULL_LV); } - - friend std::ostream& operator<< (std::ostream &os, const LitLvTypes& lit); - - private: - LiteralId lid_; - LogVarTypes lvTypes_; -}; - -typedef TinySet LitLvTypesSet; - - - -class LiftedWCNF -{ - public: - LiftedWCNF (const ParfactorList& pfList); - - ~LiftedWCNF (void); - - const Clauses& clauses (void) const { return clauses_; } - - void addWeight (LiteralId lid, double posW, double negW); - - double posWeight (LiteralId lid) const; - - double negWeight (LiteralId lid) const; - - vector prvGroupLiterals (PrvGroup prvGroup); - - Clause* createClause (LiteralId lid) const; - - void printFormulaIndicators (void) const; - - void printWeights (void) const; - - void printClauses (void) const; - - private: - - LiteralId getLiteralId (PrvGroup prvGroup, unsigned range); - - void addIndicatorClauses (const ParfactorList& pfList); - - void addParameterClauses (const ParfactorList& pfList); - - Clauses clauses_; - LiteralId freeLiteralId_; - const ParfactorList& pfList_; - unordered_map> map_; - unordered_map> weights_; -}; - -#endif // HORUS_LIFTEDWCNF_H - diff --git a/packages/CLPBN/horus2/Parfactor.cpp b/packages/CLPBN/horus2/Parfactor.cpp deleted file mode 100644 index ef2301b7b..000000000 --- a/packages/CLPBN/horus2/Parfactor.cpp +++ /dev/null @@ -1,942 +0,0 @@ - -#include "Parfactor.h" -#include "Histogram.h" -#include "Indexer.h" -#include "Util.h" -#include "Horus.h" - - -Parfactor::Parfactor ( - const ProbFormulas& formulas, - const Params& params, - const Tuples& tuples, - unsigned distId) -{ - args_ = formulas; - params_ = params; - distId_ = distId; - - LogVars logVars; - for (size_t i = 0; i < args_.size(); i++) { - ranges_.push_back (args_[i].range()); - const LogVars& lvs = args_[i].logVars(); - for (size_t j = 0; j < lvs.size(); j++) { - if (Util::contains (logVars, lvs[j]) == false) { - logVars.push_back (lvs[j]); - } - } - } - LogVar newLv = logVars.size(); - constr_ = new ConstraintTree (logVars, tuples); - // Change formulas like f(X,X), X in {(p1),(p2),...} - // to be like f(X,Y), (X,Y) in {(p1,p1),(p2,p2),...}. - // This will simplify shattering on the constraint tree. - for (size_t i = 0; i < args_.size(); i++) { - LogVarSet lvSet; - LogVars& lvs = args_[i].logVars(); - for (size_t j = 0; j < lvs.size(); j++) { - if (lvSet.contains (lvs[j]) == false) { - lvSet |= lvs[j]; - } else { - constr_->cloneLogVar (lvs[j], newLv); - lvs[j] = newLv; - ++ newLv; - } - } - } - assert (params_.size() == Util::sizeExpected (ranges_)); -} - - - -Parfactor::Parfactor (const Parfactor* g, const Tuple& tuple) -{ - args_ = g->arguments(); - params_ = g->params(); - ranges_ = g->ranges(); - distId_ = g->distId(); - constr_ = new ConstraintTree (g->logVars(), {tuple}); - assert (params_.size() == Util::sizeExpected (ranges_)); -} - - - -Parfactor::Parfactor (const Parfactor* g, ConstraintTree* constr) -{ - args_ = g->arguments(); - params_ = g->params(); - ranges_ = g->ranges(); - distId_ = g->distId(); - constr_ = constr; - assert (params_.size() == Util::sizeExpected (ranges_)); -} - - - -Parfactor::Parfactor (const Parfactor& g) -{ - args_ = g.arguments(); - params_ = g.params(); - ranges_ = g.ranges(); - distId_ = g.distId(); - constr_ = new ConstraintTree (*g.constr()); - assert (params_.size() == Util::sizeExpected (ranges_)); -} - - - -Parfactor::~Parfactor (void) -{ - delete constr_; -} - - - -LogVarSet -Parfactor::countedLogVars (void) const -{ - LogVarSet set; - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].isCounting()) { - set.insert (args_[i].countedLogVar()); - } - } - return set; -} - - - -LogVarSet -Parfactor::uncountedLogVars (void) const -{ - return constr_->logVarSet() - countedLogVars(); -} - - - -LogVarSet -Parfactor::elimLogVars (void) const -{ - LogVarSet requiredToElim = constr_->logVarSet(); - requiredToElim -= constr_->singletons(); - requiredToElim -= countedLogVars(); - return requiredToElim; -} - - - -LogVarSet -Parfactor::exclusiveLogVars (size_t fIdx) const -{ - assert (fIdx < args_.size()); - LogVarSet remaining; - for (size_t i = 0; i < args_.size(); i++) { - if (i != fIdx) { - remaining |= args_[i].logVarSet(); - } - } - return args_[fIdx].logVarSet() - remaining; -} - - - -void -Parfactor::sumOutIndex (size_t fIdx) -{ - assert (fIdx < args_.size()); - assert (args_[fIdx].contains (elimLogVars())); - - if (args_[fIdx].isCounting()) { - unsigned N = constr_->getConditionalCount ( - args_[fIdx].countedLogVar()); - unsigned R = args_[fIdx].range(); - vector numAssigns = HistogramSet::getNumAssigns (N, R); - Indexer indexer (ranges_, fIdx); - while (indexer.valid()) { - if (Globals::logDomain) { - params_[indexer] += numAssigns[ indexer[fIdx] ]; - } else { - params_[indexer] *= numAssigns[ indexer[fIdx] ]; - } - ++ indexer; - } - } - - LogVarSet excl = exclusiveLogVars (fIdx); - unsigned exp; - if (args_[fIdx].isCounting()) { - // counting log vars were already raised on counting conversion - exp = constr_->getConditionalCount (excl - args_[fIdx].countedLogVar()); - } else { - exp = constr_->getConditionalCount (excl); - } - constr_->remove (excl); - - TFactor::sumOutIndex (fIdx); - LogAware::pow (params_, exp); -} - - - -void -Parfactor::multiply (Parfactor& g) -{ - alignAndExponentiate (this, &g); - TFactor::multiply (g); - constr_->join (g.constr(), true); - simplifyGrounds(); - assert (constr_->isCartesianProduct (countedLogVars())); -} - - - -bool -Parfactor::canCountConvert (LogVar X) -{ - if (nrFormulas (X) != 1) { - return false; - } - size_t fIdx = indexOfLogVar (X); - if (args_[fIdx].isCounting()) { - return false; - } - if (constr_->isCountNormalized (X) == false) { - return false; - } - if (constr_->getConditionalCount (X) == 1) { - return false; - } - if (constr_->isCartesianProduct (countedLogVars() | X) == false) { - return false; - } - return true; -} - - - -void -Parfactor::countConvert (LogVar X) -{ - size_t fIdx = indexOfLogVar (X); - assert (constr_->isCountNormalized (X)); - assert (constr_->getConditionalCount (X) > 1); - assert (canCountConvert (X)); - - unsigned N = constr_->getConditionalCount (X); - unsigned R = ranges_[fIdx]; - unsigned H = HistogramSet::nrHistograms (N, R); - vector histograms = HistogramSet::getHistograms (N, R); - - Indexer indexer (ranges_); - vector sumout (params_.size() / R); - unsigned count = 0; - while (indexer.valid()) { - sumout[count].reserve (R); - for (unsigned r = 0; r < R; r++) { - sumout[count].push_back (params_[indexer]); - indexer.incrementDimension (fIdx); - } - count ++; - indexer.resetDimension (fIdx); - indexer.incrementExceptDimension (fIdx); - } - - params_.clear(); - params_.reserve (sumout.size() * H); - - ranges_[fIdx] = H; - MapIndexer mapIndexer (ranges_, fIdx); - while (mapIndexer.valid()) { - double prod = LogAware::multIdenty(); - size_t i = mapIndexer; - unsigned h = mapIndexer[fIdx]; - for (unsigned r = 0; r < R; r++) { - if (Globals::logDomain) { - prod += LogAware::pow (sumout[i][r], histograms[h][r]); - } else { - prod *= LogAware::pow (sumout[i][r], histograms[h][r]); - } - } - params_.push_back (prod); - ++ mapIndexer; - } - args_[fIdx].setCountedLogVar (X); - simplifyCountingFormulas (fIdx); -} - - - -void -Parfactor::expand (LogVar X, LogVar X_new1, LogVar X_new2) -{ - size_t fIdx = indexOfLogVar (X); - assert (fIdx != args_.size()); - assert (args_[fIdx].isCounting()); - - unsigned N1 = constr_->getConditionalCount (X_new1); - unsigned N2 = constr_->getConditionalCount (X_new2); - unsigned N = N1 + N2; - unsigned R = args_[fIdx].range(); - unsigned H1 = HistogramSet::nrHistograms (N1, R); - unsigned H2 = HistogramSet::nrHistograms (N2, R); - - vector histograms = HistogramSet::getHistograms (N, R); - vector histograms1 = HistogramSet::getHistograms (N1, R); - vector histograms2 = HistogramSet::getHistograms (N2, R); - - vector sumIndexes; - sumIndexes.reserve (H1 * H2); - for (unsigned i = 0; i < H1; i++) { - for (unsigned j = 0; j < H2; j++) { - Histogram hist = histograms1[i]; - hist += histograms2[j]; - sumIndexes.push_back (HistogramSet::findIndex (hist, histograms)); - } - } - - expandPotential (fIdx, H1 * H2, sumIndexes); - - args_.insert (args_.begin() + fIdx + 1, args_[fIdx]); - args_[fIdx].rename (X, X_new1); - args_[fIdx + 1].rename (X, X_new2); - if (H1 == 2) { - args_[fIdx].clearCountedLogVar(); - } - if (H2 == 2) { - args_[fIdx + 1].clearCountedLogVar(); - } - ranges_.insert (ranges_.begin() + fIdx + 1, H2); - ranges_[fIdx] = H1; -} - - - -void -Parfactor::fullExpand (LogVar X) -{ - size_t fIdx = indexOfLogVar (X); - assert (fIdx != args_.size()); - assert (args_[fIdx].isCounting()); - - unsigned N = constr_->getConditionalCount (X); - unsigned R = args_[fIdx].range(); - vector originHists = HistogramSet::getHistograms (N, R); - vector expandHists = HistogramSet::getHistograms (1, R); - assert (ranges_[fIdx] == originHists.size()); - vector sumIndexes; - sumIndexes.reserve (N * R); - - Ranges expandRanges (N, R); - Indexer indexer (expandRanges); - while (indexer.valid()) { - vector hist (R, 0); - for (unsigned n = 0; n < N; n++) { - hist += expandHists[indexer[n]]; - } - sumIndexes.push_back (HistogramSet::findIndex (hist, originHists)); - ++ indexer; - } - - expandPotential (fIdx, std::pow (R, N), sumIndexes); - - ProbFormula f = args_[fIdx]; - args_.erase (args_.begin() + fIdx); - ranges_.erase (ranges_.begin() + fIdx); - LogVars newLvs = constr_->expand (X); - assert (newLvs.size() == N); - for (unsigned i = 0 ; i < N; i++) { - ProbFormula newFormula (f.functor(), f.logVars(), f.range()); - newFormula.rename (X, newLvs[i]); - args_.insert (args_.begin() + fIdx + i, newFormula); - ranges_.insert (ranges_.begin() + fIdx + i, R); - } -} - - - -void -Parfactor::reorderAccordingGrounds (const Grounds& grounds) -{ - ProbFormulas newFormulas; - for (size_t i = 0; i < grounds.size(); i++) { - for (size_t j = 0; j < args_.size(); j++) { - if (grounds[i].functor() == args_[j].functor() && - grounds[i].arity() == args_[j].arity()) { - constr_->moveToTop (args_[j].logVars()); - if (constr_->containsTuple (grounds[i].args())) { - newFormulas.push_back (args_[j]); - break; - } - } - } - assert (newFormulas.size() == i + 1); - } - reorderArguments (newFormulas); -} - - - -void -Parfactor::absorveEvidence (const ProbFormula& formula, unsigned evidence) -{ - size_t fIdx = indexOf (formula); - assert (fIdx != args_.size()); - LogVarSet excl = exclusiveLogVars (fIdx); - assert (args_[fIdx].isCounting() == false); - assert (constr_->isCountNormalized (excl)); - LogAware::pow (params_, constr_->getConditionalCount (excl)); - TFactor::absorveEvidence (formula, evidence); - constr_->remove (excl); -} - - - -void -Parfactor::setNewGroups (void) -{ - for (size_t i = 0; i < args_.size(); i++) { - args_[i].setGroup (ProbFormula::getNewGroup()); - } -} - - - -void -Parfactor::applySubstitution (const Substitution& theta) -{ - for (size_t i = 0; i < args_.size(); i++) { - LogVars& lvs = args_[i].logVars(); - for (size_t j = 0; j < lvs.size(); j++) { - lvs[j] = theta.newNameFor (lvs[j]); - } - if (args_[i].isCounting()) { - LogVar clv = args_[i].countedLogVar(); - args_[i].setCountedLogVar (theta.newNameFor (clv)); - } - } - constr_->applySubstitution (theta); -} - - - -size_t -Parfactor::indexOfGround (const Ground& ground) const -{ - size_t idx = args_.size(); - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].functor() == ground.functor() && - args_[i].arity() == ground.arity()) { - constr_->moveToTop (args_[i].logVars()); - if (constr_->containsTuple (ground.args())) { - idx = i; - break; - } - } - } - return idx; -} - - - -PrvGroup -Parfactor::findGroup (const Ground& ground) const -{ - size_t idx = indexOfGround (ground); - return idx == args_.size() - ? numeric_limits::max() - : args_[idx].group(); -} - - - -bool -Parfactor::containsGround (const Ground& ground) const -{ - return findGroup (ground) != numeric_limits::max(); -} - - - -bool -Parfactor::containsGrounds (const Grounds& grounds) const -{ - Tuple tuple; - LogVars tupleLvs; - for (size_t i = 0; i < grounds.size(); i++) { - size_t idx = indexOfGround (grounds[i]); - if (idx == args_.size()) { - return false; - } - LogVars lvs = args_[idx].logVars(); - for (size_t j = 0; j < lvs.size(); j++) { - if (Util::contains (tupleLvs, lvs[j]) == false) { - tuple.push_back (grounds[i].args()[j]); - tupleLvs.push_back (lvs[j]); - } - } - } - constr_->moveToTop (tupleLvs); - return constr_->containsTuple (tuple); -} - - - -bool -Parfactor::containsGroup (PrvGroup group) const -{ - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].group() == group) { - return true; - } - } - return false; -} - - - -bool -Parfactor::containsGroups (vector groups) const -{ - for (size_t i = 0; i < groups.size(); i++) { - if (containsGroup (groups[i]) == false) { - return false; - } - } - return true; -} - - - -unsigned -Parfactor::nrFormulas (LogVar X) const -{ - unsigned count = 0; - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].contains (X)) { - count ++; - } - } - return count; -} - - - -int -Parfactor::indexOfLogVar (LogVar X) const -{ - size_t idx = args_.size(); - assert (nrFormulas (X) == 1); - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].contains (X)) { - idx = i; - break; - } - } - return idx; -} - - - -int -Parfactor::indexOfGroup (PrvGroup group) const -{ - size_t pos = args_.size(); - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].group() == group) { - pos = i; - break; - } - } - return pos; -} - - - -unsigned -Parfactor::nrFormulasWithGroup (PrvGroup group) const -{ - unsigned count = 0; - for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].group() == group) { - count ++; - } - } - return count; -} - - - -vector -Parfactor::getAllGroups (void) const -{ - vector groups (args_.size()); - for (size_t i = 0; i < args_.size(); i++) { - groups[i] = args_[i].group(); - } - return groups; -} - - - -string -Parfactor::getLabel (void) const -{ - stringstream ss; - ss << "phi(" ; - for (size_t i = 0; i < args_.size(); i++) { - if (i != 0) ss << "," ; - ss << args_[i]; - } - ss << ")" ; - ConstraintTree copy (*constr_); - copy.moveToTop (copy.logVarSet().elements()); - ss << "|" << copy.tupleSet(); - return ss.str(); -} - - - -void -Parfactor::print (bool printParams) const -{ - cout << "Formulas: " ; - for (size_t i = 0; i < args_.size(); i++) { - if (i != 0) cout << ", " ; - cout << args_[i]; - } - cout << endl; - if (args_[0].group() != Util::maxUnsigned()) { - vector groups; - for (size_t i = 0; i < args_.size(); i++) { - groups.push_back (string ("g") + Util::toString (args_[i].group())); - } - cout << "Groups: " << groups << endl; - } - cout << "LogVars: " << constr_->logVarSet() << endl; - cout << "Ranges: " << ranges_ << endl; - if (printParams == false) { - cout << "Params: " ; - if (params_.size() <= 32) { - cout.precision(10); - cout << params_ << endl; - } else { - cout << "|" << params_.size() << "|" << endl; - } - } - ConstraintTree copy (*constr_); - copy.moveToTop (copy.logVarSet().elements()); - cout << "Tuples: " << copy.tupleSet() << endl; - if (printParams) { - printParameters(); - } -} - - - -void -Parfactor::printParameters (void) const -{ - vector jointStrings; - Indexer indexer (ranges_); - while (indexer.valid()) { - stringstream ss; - for (size_t i = 0; i < args_.size(); i++) { - if (i != 0) ss << ", " ; - if (args_[i].isCounting()) { - unsigned N = constr_->getConditionalCount ( - args_[i].countedLogVar()); - HistogramSet hs (N, args_[i].range()); - unsigned c = 0; - while (c < indexer[i]) { - hs.nextHistogram(); - c ++; - } - ss << hs; - } else { - ss << indexer[i]; - } - } - jointStrings.push_back (ss.str()); - ++ indexer; - } - for (size_t i = 0; i < params_.size(); i++) { - cout << "f(" << jointStrings[i] << ")" ; - cout << " = " << params_[i] << endl; - } -} - - - -void -Parfactor::printProjections (void) const -{ - ConstraintTree copy (*constr_); - - LogVarSet Xs = copy.logVarSet(); - for (size_t i = 0; i < Xs.size(); i++) { - cout << "-> projection of " << Xs[i] << ": " ; - cout << copy.tupleSet ({Xs[i]}) << endl; - } -} - - - -void -Parfactor::expandPotential ( - size_t fIdx, - unsigned newRange, - const vector& sumIndexes) -{ - ullong newSize = (params_.size() / ranges_[fIdx]) * newRange; - if (newSize > params_.max_size()) { - cerr << "Error: an overflow occurred when performing expansion." ; - cerr << endl; - exit (EXIT_FAILURE); - } - - Params backup = params_; - params_.clear(); - params_.reserve (newSize); - - size_t prod = 1; - vector offsets (ranges_.size()); - for (size_t i = ranges_.size(); i-- > 0; ) { - offsets[i] = prod; - prod *= ranges_[i]; - } - - size_t index = 0; - ranges_[fIdx] = newRange; - vector indices (ranges_.size(), 0); - for (size_t k = 0; k < newSize; k++) { - assert (index < backup.size()); - params_.push_back (backup[index]); - for (size_t i = ranges_.size(); i-- > 0; ) { - indices[i] ++; - if (i == fIdx) { - if (indices[i] != ranges_[i]) { - int diff = sumIndexes[indices[i]] - sumIndexes[indices[i] - 1]; - index += diff * offsets[i]; - break; - } else { - // last index contains the old range minus 1 - index -= sumIndexes.back() * offsets[i]; - indices[i] = 0; - } - } else { - if (indices[i] != ranges_[i]) { - index += offsets[i]; - break; - } else { - index -= (ranges_[i] - 1) * offsets[i]; - indices[i] = 0; - } - } - } - } -} - - - -void -Parfactor::simplifyCountingFormulas (size_t fIdx) -{ - // check if we can simplify the parfactor - for (size_t i = 0; i < args_.size(); i++) { - if (i != fIdx && - args_[i].isCounting() && - args_[i].group() == args_[fIdx].group()) { - // if they only differ in the name of the counting log var - if ((args_[i].logVarSet() - args_[i].countedLogVar()) == - (args_[fIdx].logVarSet()) - args_[fIdx].countedLogVar() && - ranges_[i] == ranges_[fIdx]) { - simplifyParfactor (fIdx, i); - break; - } - } - } -} - - - -void -Parfactor::simplifyGrounds (void) -{ - if (args_.size() == 1) { - return; - } - LogVarSet singletons = constr_->singletons(); - for (long i = 0; i < (long)args_.size() - 1; i++) { - for (size_t j = i + 1; j < args_.size(); j++) { - if (args_[i].group() == args_[j].group() && - singletons.contains (args_[i].logVarSet()) && - singletons.contains (args_[j].logVarSet())) { - simplifyParfactor (i, j); - i --; - break; - } - } - } -} - - - -bool -Parfactor::canMultiply (Parfactor* g1, Parfactor* g2) -{ - std::pair res = getAlignLogVars (g1, g2); - LogVarSet Xs_1 (res.first); - LogVarSet Xs_2 (res.second); - LogVarSet Y_1 = g1->logVarSet() - Xs_1; - LogVarSet Y_2 = g2->logVarSet() - Xs_2; - Y_1 -= g1->countedLogVars(); - Y_2 -= g2->countedLogVars(); - return g1->constr()->isCountNormalized (Y_1) && - g2->constr()->isCountNormalized (Y_2); -} - - - -void -Parfactor::simplifyParfactor (size_t fIdx1, size_t fIdx2) -{ - Params backup = params_; - params_.clear(); - Indexer indexer (ranges_); - while (indexer.valid()) { - if (indexer[fIdx1] == indexer[fIdx2]) { - params_.push_back (backup[indexer]); - } - ++ indexer; - } - for (size_t i = 0; i < args_[fIdx2].logVars().size(); i++) { - if (nrFormulas (args_[fIdx2].logVars()[i]) == 1) { - constr_->remove ({ args_[fIdx2].logVars()[i] }); - } - } - args_.erase (args_.begin() + fIdx2); - ranges_.erase (ranges_.begin() + fIdx2); -} - - - -std::pair -Parfactor::getAlignLogVars (Parfactor* g1, Parfactor* g2) -{ - g1->simplifyGrounds(); - g2->simplifyGrounds(); - LogVars Xs_1, Xs_2; - TinySet matchedI; - TinySet matchedJ; - ProbFormulas& formulas1 = g1->arguments(); - ProbFormulas& formulas2 = g2->arguments(); - for (size_t i = 0; i < formulas1.size(); i++) { - for (size_t j = 0; j < formulas2.size(); j++) { - if (formulas1[i].group() == formulas2[j].group() && - g1->range (i) == g2->range (j) && - matchedI.contains (i) == false && - matchedJ.contains (j) == false) { - Util::addToVector (Xs_1, formulas1[i].logVars()); - Util::addToVector (Xs_2, formulas2[j].logVars()); - matchedI.insert (i); - matchedJ.insert (j); - } - } - } - return make_pair (Xs_1, Xs_2); -} - - - -void -Parfactor::alignAndExponentiate (Parfactor* g1, Parfactor* g2) -{ - alignLogicalVars (g1, g2); - LogVarSet comm = g1->logVarSet() & g2->logVarSet(); - LogVarSet Y_1 = g1->logVarSet() - comm; - LogVarSet Y_2 = g2->logVarSet() - comm; - Y_1 -= g1->countedLogVars(); - Y_2 -= g2->countedLogVars(); - assert (g1->constr()->isCountNormalized (Y_1)); - assert (g2->constr()->isCountNormalized (Y_2)); - unsigned condCount1 = g1->constr()->getConditionalCount (Y_1); - unsigned condCount2 = g2->constr()->getConditionalCount (Y_2); - LogAware::pow (g1->params(), 1.0 / condCount2); - LogAware::pow (g2->params(), 1.0 / condCount1); -} - - - -void -Parfactor::alignLogicalVars (Parfactor* g1, Parfactor* g2) -{ - std::pair res = getAlignLogVars (g1, g2); - const LogVars& alignLvs1 = res.first; - const LogVars& alignLvs2 = res.second; - // cout << "ALIGNING :::::::::::::::::" << endl; - // g1->print(); - // cout << "AND" << endl; - // g2->print(); - // cout << "-> align lvs1 = " << alignLvs1 << endl; - // cout << "-> align lvs2 = " << alignLvs2 << endl; - LogVar freeLogVar (0); - Substitution theta1, theta2; - for (size_t i = 0; i < alignLvs1.size(); i++) { - bool b1 = theta1.containsReplacementFor (alignLvs1[i]); - bool b2 = theta2.containsReplacementFor (alignLvs2[i]); - if (b1 == false && b2 == false) { - theta1.add (alignLvs1[i], freeLogVar); - theta2.add (alignLvs2[i], freeLogVar); - ++ freeLogVar; - } else if (b1 == false && b2) { - theta1.add (alignLvs1[i], theta2.newNameFor (alignLvs2[i])); - } else if (b1 && b2 == false) { - theta2.add (alignLvs2[i], theta1.newNameFor (alignLvs1[i])); - } - } - - const LogVarSet& allLvs1 = g1->logVarSet(); - for (size_t i = 0; i < allLvs1.size(); i++) { - if (theta1.containsReplacementFor (allLvs1[i]) == false) { - theta1.add (allLvs1[i], freeLogVar); - ++ freeLogVar; - } - } - const LogVarSet& allLvs2 = g2->logVarSet(); - for (size_t i = 0; i < allLvs2.size(); i++) { - if (theta2.containsReplacementFor (allLvs2[i]) == false) { - theta2.add (allLvs2[i], freeLogVar); - ++ freeLogVar; - } - } - - // handle this type of situation: - // g1 = p(X), q(X) ; X in {(p1),(p2)} - // g2 = p(X), q(Y) ; (X,Y) in {(p1,p2),(p2,p1)} - LogVars discardedLvs1 = theta1.getDiscardedLogVars(); - for (size_t i = 0; i < discardedLvs1.size(); i++) { - if (g1->constr()->isSingleton (discardedLvs1[i]) && - g1->nrFormulas (discardedLvs1[i]) == 1) { - g1->constr()->remove (discardedLvs1[i]); - } else { - LogVar X_new = ++ g1->constr()->logVarSet().back(); - theta1.rename (discardedLvs1[i], X_new); - } - } - LogVars discardedLvs2 = theta2.getDiscardedLogVars(); - for (size_t i = 0; i < discardedLvs2.size(); i++) { - if (g2->constr()->isSingleton (discardedLvs2[i]) && - g2->nrFormulas (discardedLvs2[i]) == 1) { - g2->constr()->remove (discardedLvs2[i]); - } else { - LogVar X_new = ++ g2->constr()->logVarSet().back(); - theta2.rename (discardedLvs2[i], X_new); - } - } - - // cout << "theta1: " << theta1 << endl; - // cout << "theta2: " << theta2 << endl; - g1->applySubstitution (theta1); - g2->applySubstitution (theta2); -} - diff --git a/packages/CLPBN/horus2/Parfactor.h b/packages/CLPBN/horus2/Parfactor.h deleted file mode 100644 index 1c65c2ea0..000000000 --- a/packages/CLPBN/horus2/Parfactor.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef HORUS_PARFACTOR_H -#define HORUS_PARFACTOR_H - -#include -#include - -#include "ProbFormula.h" -#include "ConstraintTree.h" -#include "LiftedUtils.h" -#include "Horus.h" - -#include "Factor.h" - -class Parfactor : public TFactor -{ - public: - Parfactor ( - const ProbFormulas&, - const Params&, - const Tuples&, - unsigned distId); - - Parfactor (const Parfactor*, const Tuple&); - - Parfactor (const Parfactor*, ConstraintTree*); - - Parfactor (const Parfactor&); - - ~Parfactor (void); - - ConstraintTree* constr (void) { return constr_; } - - const ConstraintTree* constr (void) const { return constr_; } - - const LogVars& logVars (void) const { return constr_->logVars(); } - - const LogVarSet& logVarSet (void) const { return constr_->logVarSet(); } - - LogVarSet countedLogVars (void) const; - - LogVarSet uncountedLogVars (void) const; - - LogVarSet elimLogVars (void) const; - - LogVarSet exclusiveLogVars (size_t fIdx) const; - - void sumOutIndex (size_t fIdx); - - void multiply (Parfactor&); - - bool canCountConvert (LogVar X); - - void countConvert (LogVar); - - void expand (LogVar, LogVar, LogVar); - - void fullExpand (LogVar); - - void reorderAccordingGrounds (const Grounds&); - - void absorveEvidence (const ProbFormula&, unsigned); - - void setNewGroups (void); - - void applySubstitution (const Substitution&); - - size_t indexOfGround (const Ground&) const; - - PrvGroup findGroup (const Ground&) const; - - bool containsGround (const Ground&) const; - - bool containsGrounds (const Grounds&) const; - - bool containsGroup (PrvGroup) const; - - bool containsGroups (vector) const; - - unsigned nrFormulas (LogVar) const; - - int indexOfLogVar (LogVar) const; - - int indexOfGroup (PrvGroup) const; - - unsigned nrFormulasWithGroup (PrvGroup) const; - - vector getAllGroups (void) const; - - void print (bool = false) const; - - void printParameters (void) const; - - void printProjections (void) const; - - string getLabel (void) const; - - void simplifyGrounds (void); - - static bool canMultiply (Parfactor*, Parfactor*); - - private: - - void simplifyCountingFormulas (size_t fIdx); - - void simplifyParfactor (size_t fIdx1, size_t fIdx2); - - static std::pair getAlignLogVars ( - Parfactor* g1, Parfactor* g2); - - void expandPotential (size_t fIdx, unsigned newRange, - const vector& sumIndexes); - - static void alignAndExponentiate (Parfactor*, Parfactor*); - - static void alignLogicalVars (Parfactor*, Parfactor*); - - ConstraintTree* constr_; - -}; - - -typedef vector Parfactors; - -#endif // HORUS_PARFACTOR_H - diff --git a/packages/CLPBN/horus2/ParfactorList.cpp b/packages/CLPBN/horus2/ParfactorList.cpp deleted file mode 100644 index 1de1ccc7d..000000000 --- a/packages/CLPBN/horus2/ParfactorList.cpp +++ /dev/null @@ -1,638 +0,0 @@ -#include - -#include "ParfactorList.h" - - -ParfactorList::ParfactorList (const ParfactorList& pfList) -{ - ParfactorList::const_iterator it = pfList.begin(); - while (it != pfList.end()) { - addShattered (new Parfactor (**it)); - ++ it; - } -} - - - -ParfactorList::ParfactorList (const Parfactors& pfs) -{ - add (pfs); -} - - - -ParfactorList::~ParfactorList (void) -{ - ParfactorList::const_iterator it = pfList_.begin(); - while (it != pfList_.end()) { - delete *it; - ++ it; - } -} - - - -void -ParfactorList::add (Parfactor* pf) -{ - pf->setNewGroups(); - addToShatteredList (pf); -} - - - -void -ParfactorList::add (const Parfactors& pfs) -{ - for (size_t i = 0; i < pfs.size(); i++) { - pfs[i]->setNewGroups(); - addToShatteredList (pfs[i]); - } -} - - - -void -ParfactorList::addShattered (Parfactor* pf) -{ - assert (isAllShattered()); - pfList_.push_back (pf); - assert (isAllShattered()); -} - - - -list::iterator -ParfactorList::insertShattered ( - list::iterator it, - Parfactor* pf) -{ - return pfList_.insert (it, pf); - assert (isAllShattered()); -} - - - -list::iterator -ParfactorList::remove (list::iterator it) -{ - return pfList_.erase (it); -} - - - -list::iterator -ParfactorList::removeAndDelete (list::iterator it) -{ - delete *it; - return pfList_.erase (it); -} - - - -bool -ParfactorList::isAllShattered (void) const -{ - if (pfList_.size() <= 1) { - return true; - } - vector pfs (pfList_.begin(), pfList_.end()); - for (size_t i = 0; i < pfs.size(); i++) { - assert (isShattered (pfs[i])); - } - for (size_t i = 0; i < pfs.size() - 1; i++) { - for (size_t j = i + 1; j < pfs.size(); j++) { - if (isShattered (pfs[i], pfs[j]) == false) { - return false; - } - } - } - return true; -} - - - -void -ParfactorList::print (void) const -{ - Parfactors pfVec (pfList_.begin(), pfList_.end()); - std::sort (pfVec.begin(), pfVec.end(), sortByParams()); - for (size_t i = 0; i < pfVec.size(); i++) { - pfVec[i]->print(); - cout << endl; - } -} - - - -ParfactorList& -ParfactorList::operator= (const ParfactorList& pfList) -{ - if (this != &pfList) { - ParfactorList::const_iterator it0 = pfList_.begin(); - while (it0 != pfList_.end()) { - delete *it0; - ++ it0; - } - pfList_.clear(); - ParfactorList::const_iterator it = pfList.begin(); - while (it != pfList.end()) { - addShattered (new Parfactor (**it)); - ++ it; - } - } - return *this; -} - - - -bool -ParfactorList::isShattered (const Parfactor* g) const -{ - const ProbFormulas& formulas = g->arguments(); - if (formulas.size() < 2) { - return true; - } - ConstraintTree ct (*g->constr()); - for (size_t i = 0; i < formulas.size() - 1; i++) { - for (size_t j = i + 1; j < formulas.size(); j++) { - if (formulas[i].group() == formulas[j].group()) { - if (identical ( - formulas[i], *(g->constr()), - formulas[j], *(g->constr())) == false) { - g->print(); - cout << "-> not identical on positions " ; - cout << i << " and " << j << endl; - return false; - } - } else { - if (disjoint ( - formulas[i], *(g->constr()), - formulas[j], *(g->constr())) == false) { - g->print(); - cout << "-> not disjoint on positions " ; - cout << i << " and " << j << endl; - return false; - } - } - } - } - return true; -} - - - -bool -ParfactorList::isShattered ( - const Parfactor* g1, - const Parfactor* g2) const -{ - assert (g1 != g2); - const ProbFormulas& fms1 = g1->arguments(); - const ProbFormulas& fms2 = g2->arguments(); - - for (size_t i = 0; i < fms1.size(); i++) { - for (size_t j = 0; j < fms2.size(); j++) { - if (fms1[i].group() == fms2[j].group()) { - if (identical ( - fms1[i], *(g1->constr()), - fms2[j], *(g2->constr())) == false) { - g1->print(); - cout << "^" << endl; - g2->print(); - cout << "-> not identical on group " << fms1[i].group() << endl; - return false; - } - } else { - if (disjoint ( - fms1[i], *(g1->constr()), - fms2[j], *(g2->constr())) == false) { - g1->print(); - cout << "^" << endl; - g2->print(); - cout << "-> not disjoint on groups " << fms1[i].group(); - cout << " and " << fms2[j].group() << endl; - return false; - } - } - } - } - return true; -} - - - -void -ParfactorList::addToShatteredList (Parfactor* g) -{ - queue residuals; - residuals.push (g); - while (residuals.empty() == false) { - Parfactor* pf = residuals.front(); - bool pfSplitted = false; - list::iterator pfIter; - pfIter = pfList_.begin(); - while (pfIter != pfList_.end()) { - std::pair shattRes; - shattRes = shatter (*pfIter, pf); - if (shattRes.first.empty() == false) { - pfIter = removeAndDelete (pfIter); - Util::addToQueue (residuals, shattRes.first); - } else { - ++ pfIter; - } - if (shattRes.second.empty() == false) { - delete pf; - Util::addToQueue (residuals, shattRes.second); - pfSplitted = true; - break; - } - } - residuals.pop(); - if (pfSplitted == false) { - Parfactors res = shatterAgainstMySelf (pf); - if (res.empty()) { - addShattered (pf); - } else { - Util::addToQueue (residuals, res); - } - } - } - assert (isAllShattered()); -} - - - -Parfactors -ParfactorList::shatterAgainstMySelf (Parfactor* g) -{ - Parfactors pfs; - queue residuals; - residuals.push (g); - bool shattered = true; - while (residuals.empty() == false) { - Parfactor* pf = residuals.front(); - Parfactors res = shatterAgainstMySelf2 (pf); - if (res.empty()) { - assert (isShattered (pf)); - if (shattered) { - return { }; - } - pfs.push_back (pf); - } else { - shattered = false; - for (size_t i = 0; i < res.size(); i++) { - assert (res[i]->constr()->empty() == false); - residuals.push (res[i]); - } - delete pf; - } - residuals.pop(); - } - return pfs; -} - - - -Parfactors -ParfactorList::shatterAgainstMySelf2 (Parfactor* g) -{ - // slip a parfactor with overlapping formulas: - // e.g. {s(X),s(Y)}, with (X,Y) in {(p1,p2),(p1,p3),(p4,p1)} - const ProbFormulas& formulas = g->arguments(); - for (size_t i = 0; i < formulas.size() - 1; i++) { - for (size_t j = i + 1; j < formulas.size(); j++) { - if (formulas[i].sameSkeletonAs (formulas[j])) { - Parfactors res = shatterAgainstMySelf (g, i, j); - if (res.empty() == false) { - return res; - } - } - } - } - return Parfactors(); -} - - - -Parfactors -ParfactorList::shatterAgainstMySelf ( - Parfactor* g, - size_t fIdx1, - size_t fIdx2) -{ - /* - Util::printDashedLine(); - cout << "-> SHATTERING" << endl; - g->print(); - cout << "-> ON: " << g->argument (fIdx1) << "|" ; - cout << g->constr()->tupleSet (g->argument (fIdx1).logVars()) << endl; - cout << "-> ON: " << g->argument (fIdx2) << "|" ; - cout << g->constr()->tupleSet (g->argument (fIdx2).logVars()) << endl; - Util::printDashedLine(); - */ - ProbFormula& f1 = g->argument (fIdx1); - ProbFormula& f2 = g->argument (fIdx2); - if (f1.isAtom()) { - cerr << "Error: a ground occurs twice in the same parfactor." << endl; - cerr << endl; - exit (EXIT_FAILURE); - } - assert (g->constr()->empty() == false); - ConstraintTree ctCopy (*g->constr()); - if (f1.group() == f2.group()) { - assert (identical (f1, *(g->constr()), f2, ctCopy)); - return { }; - } - - g->constr()->moveToTop (f1.logVars()); - ctCopy.moveToTop (f2.logVars()); - - std::pair split1 = - g->constr()->split (f1.logVars(), &ctCopy, f2.logVars()); - ConstraintTree* commCt1 = split1.first; - ConstraintTree* exclCt1 = split1.second; - - if (commCt1->empty()) { - // disjoint - delete commCt1; - delete exclCt1; - return { }; - } - - PrvGroup newGroup = ProbFormula::getNewGroup(); - Parfactors res1 = shatter (g, fIdx1, commCt1, exclCt1, newGroup); - if (res1.empty()) { - res1.push_back (g); - } - - Parfactors res; - ctCopy.moveToTop (f1.logVars()); - for (size_t i = 0; i < res1.size(); i++) { - res1[i]->constr()->moveToTop (f2.logVars()); - std::pair split2; - split2 = res1[i]->constr()->split (f2.logVars(), &ctCopy, f1.logVars()); - ConstraintTree* commCt2 = split2.first; - ConstraintTree* exclCt2 = split2.second; - if (commCt2->empty()) { - if (res1[i] != g) { - res.push_back (res1[i]); - } - delete commCt2; - delete exclCt2; - continue; - } - newGroup = ProbFormula::getNewGroup(); - Parfactors res2 = shatter (res1[i], fIdx2, commCt2, exclCt2, newGroup); - if (res2.empty()) { - if (res1[i] != g) { - res.push_back (res1[i]); - } - } else { - Util::addToVector (res, res2); - for (size_t j = 0; j < res2.size(); j++) { - } - if (res1[i] != g) { - delete res1[i]; - } - } - } - - if (res.empty()) { - g->argument (fIdx2).setGroup (g->argument (fIdx1).group()); - updateGroups (f2.group(), f1.group()); - } - return res; -} - - - -std::pair -ParfactorList::shatter (Parfactor* g1, Parfactor* g2) -{ - ProbFormulas& formulas1 = g1->arguments(); - ProbFormulas& formulas2 = g2->arguments(); - assert (g1 != 0 && g2 != 0 && g1 != g2); - for (size_t i = 0; i < formulas1.size(); i++) { - for (size_t j = 0; j < formulas2.size(); j++) { - if (formulas1[i].sameSkeletonAs (formulas2[j])) { - std::pair res; - res = shatter (i, g1, j, g2); - if (res.first.empty() == false || - res.second.empty() == false) { - return res; - } - } - } - } - return make_pair (Parfactors(), Parfactors()); -} - - - -std::pair -ParfactorList::shatter ( - size_t fIdx1, Parfactor* g1, - size_t fIdx2, Parfactor* g2) -{ - ProbFormula& f1 = g1->argument (fIdx1); - ProbFormula& f2 = g2->argument (fIdx2); - /* - Util::printDashedLine(); - cout << "-> SHATTERING" << endl; - g1->print(); - cout << "-> WITH" << endl; - g2->print(); - cout << "-> ON: " << f1 << "|" ; - cout << g1->constr()->tupleSet (f1.logVars()) << endl; - cout << "-> ON: " << f2 << "|" ; - cout << g2->constr()->tupleSet (f2.logVars()) << endl; - Util::printDashedLine(); - */ - if (f1.isAtom()) { - f2.setGroup (f1.group()); - updateGroups (f2.group(), f1.group()); - return { }; - } - assert (g1->constr()->empty() == false); - assert (g2->constr()->empty() == false); - if (f1.group() == f2.group()) { - assert (identical (f1, *(g1->constr()), f2, *(g2->constr()))); - return { }; - } - - g1->constr()->moveToTop (f1.logVars()); - g2->constr()->moveToTop (f2.logVars()); - - std::pair split1 = - g1->constr()->split (f1.logVars(), g2->constr(), f2.logVars()); - ConstraintTree* commCt1 = split1.first; - ConstraintTree* exclCt1 = split1.second; - - if (commCt1->empty()) { - // disjoint - delete commCt1; - delete exclCt1; - return { }; - } - - std::pair split2 = - g2->constr()->split (f2.logVars(), g1->constr(), f1.logVars()); - ConstraintTree* commCt2 = split2.first; - ConstraintTree* exclCt2 = split2.second; - - assert (commCt1->tupleSet (f1.logVars()) == - commCt2->tupleSet (f2.logVars())); - - // stringstream ss1; ss1 << "" << count << "_A.dot" ; - // stringstream ss2; ss2 << "" << count << "_B.dot" ; - // stringstream ss3; ss3 << "" << count << "_A_comm.dot" ; - // stringstream ss4; ss4 << "" << count << "_A_excl.dot" ; - // stringstream ss5; ss5 << "" << count << "_B_comm.dot" ; - // stringstream ss6; ss6 << "" << count << "_B_excl.dot" ; - // g1->constr()->exportToGraphViz (ss1.str().c_str(), true); - // g2->constr()->exportToGraphViz (ss2.str().c_str(), true); - // commCt1->exportToGraphViz (ss3.str().c_str(), true); - // exclCt1->exportToGraphViz (ss4.str().c_str(), true); - // commCt2->exportToGraphViz (ss5.str().c_str(), true); - // exclCt2->exportToGraphViz (ss6.str().c_str(), true); - - if (exclCt1->empty() && exclCt2->empty()) { - // identical - f2.setGroup (f1.group()); - updateGroups (f2.group(), f1.group()); - delete commCt1; - delete exclCt1; - delete commCt2; - delete exclCt2; - return { }; - } - - PrvGroup group; - if (exclCt1->empty()) { - group = f1.group(); - } else if (exclCt2->empty()) { - group = f2.group(); - } else { - group = ProbFormula::getNewGroup(); - } - Parfactors res1 = shatter (g1, fIdx1, commCt1, exclCt1, group); - Parfactors res2 = shatter (g2, fIdx2, commCt2, exclCt2, group); - return make_pair (res1, res2); -} - - - -Parfactors -ParfactorList::shatter ( - Parfactor* g, - size_t fIdx, - ConstraintTree* commCt, - ConstraintTree* exclCt, - PrvGroup commGroup) -{ - ProbFormula& f = g->argument (fIdx); - if (exclCt->empty()) { - delete commCt; - delete exclCt; - f.setGroup (commGroup); - return { }; - } - - Parfactors result; - if (f.isCounting()) { - LogVar X_new1 = g->constr()->logVarSet().back() + 1; - LogVar X_new2 = g->constr()->logVarSet().back() + 2; - ConstraintTrees cts = g->constr()->jointCountNormalize ( - commCt, exclCt, f.countedLogVar(), X_new1, X_new2); - for (size_t i = 0; i < cts.size(); i++) { - Parfactor* newPf = new Parfactor (g, cts[i]); - if (cts[i]->nrLogVars() == g->constr()->nrLogVars() + 1) { - newPf->expand (f.countedLogVar(), X_new1, X_new2); - assert (g->constr()->getConditionalCount (f.countedLogVar()) == - cts[i]->getConditionalCount (X_new1) + - cts[i]->getConditionalCount (X_new2)); - } else { - assert (g->constr()->getConditionalCount (f.countedLogVar()) == - cts[i]->getConditionalCount (f.countedLogVar())); - } - newPf->setNewGroups(); - result.push_back (newPf); - } - delete commCt; - delete exclCt; - } else { - Parfactor* newPf = new Parfactor (g, commCt); - newPf->setNewGroups(); - newPf->argument (fIdx).setGroup (commGroup); - result.push_back (newPf); - newPf = new Parfactor (g, exclCt); - newPf->setNewGroups(); - result.push_back (newPf); - } - return result; -} - - - -void -ParfactorList::updateGroups (PrvGroup oldGroup, PrvGroup newGroup) -{ - for (ParfactorList::iterator it = pfList_.begin(); - it != pfList_.end(); ++it) { - ProbFormulas& formulas = (*it)->arguments(); - for (size_t i = 0; i < formulas.size(); i++) { - if (formulas[i].group() == oldGroup) { - formulas[i].setGroup (newGroup); - } - } - } -} - - - -bool -ParfactorList::proper ( - const ProbFormula& f1, ConstraintTree ct1, - const ProbFormula& f2, ConstraintTree ct2) const -{ - return disjoint (f1, ct1, f2, ct2) - || identical (f1, ct1, f2, ct2); -} - - - -bool -ParfactorList::identical ( - const ProbFormula& f1, ConstraintTree ct1, - const ProbFormula& f2, ConstraintTree ct2) const -{ - if (f1.sameSkeletonAs (f2) == false) { - return false; - } - if (f1.isAtom()) { - return true; - } - TupleSet ts1 = ct1.tupleSet (f1.logVars()); - TupleSet ts2 = ct2.tupleSet (f2.logVars()); - return ts1 == ts2; -} - - - -bool -ParfactorList::disjoint ( - const ProbFormula& f1, ConstraintTree ct1, - const ProbFormula& f2, ConstraintTree ct2) const -{ - if (f1.sameSkeletonAs (f2) == false) { - return true; - } - if (f1.isAtom()) { - return false; - } - TupleSet ts1 = ct1.tupleSet (f1.logVars()); - TupleSet ts2 = ct2.tupleSet (f2.logVars()); - return (ts1 & ts2).empty(); -} - diff --git a/packages/CLPBN/horus2/ParfactorList.h b/packages/CLPBN/horus2/ParfactorList.h deleted file mode 100644 index 1c6404dcb..000000000 --- a/packages/CLPBN/horus2/ParfactorList.h +++ /dev/null @@ -1,121 +0,0 @@ -#ifndef HORUS_PARFACTORLIST_H -#define HORUS_PARFACTORLIST_H - -#include -#include - -#include "Parfactor.h" -#include "ProbFormula.h" - - -using namespace std; - - -class ParfactorList -{ - public: - ParfactorList (void) { } - - ParfactorList (const ParfactorList&); - - ParfactorList (const Parfactors&); - - ~ParfactorList (void); - - const list& parfactors (void) const { return pfList_; } - - void clear (void) { pfList_.clear(); } - - size_t size (void) const { return pfList_.size(); } - - typedef std::list::iterator iterator; - - iterator begin (void) { return pfList_.begin(); } - - iterator end (void) { return pfList_.end(); } - - typedef std::list::const_iterator const_iterator; - - const_iterator begin (void) const { return pfList_.begin(); } - - const_iterator end (void) const { return pfList_.end(); } - - void add (Parfactor* pf); - - void add (const Parfactors& pfs); - - void addShattered (Parfactor* pf); - - list::iterator insertShattered ( - list::iterator, Parfactor*); - - list::iterator remove (list::iterator); - - list::iterator removeAndDelete (list::iterator); - - bool isAllShattered (void) const; - - void print (void) const; - - ParfactorList& operator= (const ParfactorList& pfList); - - private: - bool isShattered (const Parfactor*) const; - - bool isShattered (const Parfactor*, const Parfactor*) const; - - void addToShatteredList (Parfactor*); - - Parfactors shatterAgainstMySelf (Parfactor* g); - - Parfactors shatterAgainstMySelf2 (Parfactor* g); - - Parfactors shatterAgainstMySelf ( - Parfactor* g, size_t fIdx1, size_t fIdx2); - - std::pair shatter ( - Parfactor*, Parfactor*); - - std::pair shatter ( - size_t, Parfactor*, size_t, Parfactor*); - - Parfactors shatter ( - Parfactor*, - size_t, - ConstraintTree*, - ConstraintTree*, - PrvGroup); - - void updateGroups (PrvGroup group1, PrvGroup group2); - - bool proper ( - const ProbFormula&, ConstraintTree, - const ProbFormula&, ConstraintTree) const; - - bool identical ( - const ProbFormula&, ConstraintTree, - const ProbFormula&, ConstraintTree) const; - - bool disjoint ( - const ProbFormula&, ConstraintTree, - const ProbFormula&, ConstraintTree) const; - - struct sortByParams - { - inline bool operator() (const Parfactor* pf1, const Parfactor* pf2) - { - if (pf1->params().size() < pf2->params().size()) { - return true; - } else if (pf1->params().size() == pf2->params().size() && - pf1->params() < pf2->params()) { - return true; - } - return false; - } - }; - - list pfList_; -}; - -#endif // HORUS_PARFACTORLIST_H - diff --git a/packages/CLPBN/horus2/ProbFormula.cpp b/packages/CLPBN/horus2/ProbFormula.cpp deleted file mode 100644 index fa2d26d05..000000000 --- a/packages/CLPBN/horus2/ProbFormula.cpp +++ /dev/null @@ -1,140 +0,0 @@ -#include "ProbFormula.h" - - -PrvGroup ProbFormula::freeGroup_ = 0; - - - -bool -ProbFormula::sameSkeletonAs (const ProbFormula& f) const -{ - return functor_ == f.functor() && logVars_.size() == f.arity(); -} - - - -bool -ProbFormula::contains (LogVar lv) const -{ - return Util::contains (logVars_, lv); -} - - - -bool -ProbFormula::contains (LogVarSet s) const -{ - return LogVarSet (logVars_).contains (s); -} - - - -size_t -ProbFormula::indexOf (LogVar X) const -{ - return Util::indexOf (logVars_, X); -} - - - -bool -ProbFormula::isAtom (void) const -{ - return logVars_.size() == 0; -} - - - -bool -ProbFormula::isCounting (void) const -{ - return countedLogVar_.valid(); -} - - - -LogVar -ProbFormula::countedLogVar (void) const -{ - assert (isCounting()); - return countedLogVar_; -} - - - -void -ProbFormula::setCountedLogVar (LogVar lv) -{ - countedLogVar_ = lv; -} - - - -void -ProbFormula::clearCountedLogVar (void) -{ - countedLogVar_ = LogVar(); -} - - - -void -ProbFormula::rename (LogVar oldName, LogVar newName) -{ - for (size_t i = 0; i < logVars_.size(); i++) { - if (logVars_[i] == oldName) { - logVars_[i] = newName; - } - } - if (isCounting() && countedLogVar_ == oldName) { - countedLogVar_ = newName; - } -} - - -bool operator== (const ProbFormula& f1, const ProbFormula& f2) -{ - return f1.group_ == f2.group_ && - f1.logVars_ == f2.logVars_; -} - - - -std::ostream& operator<< (ostream &os, const ProbFormula& f) -{ - os << f.functor_; - if (f.isAtom() == false) { - os << "(" ; - for (size_t i = 0; i < f.logVars_.size(); i++) { - if (i != 0) os << ","; - if (f.isCounting() && f.logVars_[i] == f.countedLogVar_) { - os << "#" ; - } - os << f.logVars_[i]; - } - os << ")" ; - } - os << "::" << f.range_; - return os; -} - - - -PrvGroup -ProbFormula::getNewGroup (void) -{ - freeGroup_ ++; - assert (freeGroup_ != numeric_limits::max()); - return freeGroup_; -} - - - -ostream& operator<< (ostream &os, const ObservedFormula& of) -{ - os << of.functor_ << "/" << of.arity_; - os << "|" << of.constr_.tupleSet(); - os << " [evidence=" << of.evidence_ << "]"; - return os; -} - diff --git a/packages/CLPBN/horus2/ProbFormula.h b/packages/CLPBN/horus2/ProbFormula.h deleted file mode 100644 index 63086266a..000000000 --- a/packages/CLPBN/horus2/ProbFormula.h +++ /dev/null @@ -1,114 +0,0 @@ -#ifndef HORUS_PROBFORMULA_H -#define HORUS_PROBFORMULA_H - -#include - -#include "ConstraintTree.h" -#include "LiftedUtils.h" -#include "Horus.h" - -typedef unsigned long PrvGroup; - -class ProbFormula -{ - public: - ProbFormula (Symbol f, const LogVars& lvs, unsigned range) - : functor_(f), logVars_(lvs), range_(range), - countedLogVar_(), group_(numeric_limits::max()) { } - - ProbFormula (Symbol f, unsigned r) - : functor_(f), range_(r), group_(numeric_limits::max()) { } - - Symbol functor (void) const { return functor_; } - - unsigned arity (void) const { return logVars_.size(); } - - unsigned range (void) const { return range_; } - - LogVars& logVars (void) { return logVars_; } - - const LogVars& logVars (void) const { return logVars_; } - - LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } - - PrvGroup group (void) const { return group_; } - - void setGroup (PrvGroup g) { group_ = g; } - - bool sameSkeletonAs (const ProbFormula&) const; - - bool contains (LogVar) const; - - bool contains (LogVarSet) const; - - size_t indexOf (LogVar) const; - - bool isAtom (void) const; - - bool isCounting (void) const; - - LogVar countedLogVar (void) const; - - void setCountedLogVar (LogVar); - - void clearCountedLogVar (void); - - void rename (LogVar, LogVar); - - static PrvGroup getNewGroup (void); - - friend std::ostream& operator<< (ostream &os, const ProbFormula& f); - - friend bool operator== (const ProbFormula& f1, const ProbFormula& f2); - - private: - Symbol functor_; - LogVars logVars_; - unsigned range_; - LogVar countedLogVar_; - PrvGroup group_; - static PrvGroup freeGroup_; -}; - -typedef vector ProbFormulas; - - -class ObservedFormula -{ - public: - ObservedFormula (Symbol f, unsigned a, unsigned ev) - : functor_(f), arity_(a), evidence_(ev), constr_(a) { } - - ObservedFormula (Symbol f, unsigned ev, const Tuple& tuple) - : functor_(f), arity_(tuple.size()), evidence_(ev), constr_(arity_) - { - constr_.addTuple (tuple); - } - - Symbol functor (void) const { return functor_; } - - unsigned arity (void) const { return arity_; } - - unsigned evidence (void) const { return evidence_; } - - void setEvidence (unsigned ev) { evidence_ = ev; } - - ConstraintTree& constr (void) { return constr_; } - - bool isAtom (void) const { return arity_ == 0; } - - void addTuple (const Tuple& tuple) { constr_.addTuple (tuple); } - - friend ostream& operator<< (ostream &os, const ObservedFormula& of); - - private: - Symbol functor_; - unsigned arity_; - unsigned evidence_; - ConstraintTree constr_; -}; - -typedef vector ObservedFormulas; - -#endif // HORUS_PROBFORMULA_H - diff --git a/packages/CLPBN/horus2/TinySet.h b/packages/CLPBN/horus2/TinySet.h deleted file mode 100644 index 4b3c4bd83..000000000 --- a/packages/CLPBN/horus2/TinySet.h +++ /dev/null @@ -1,264 +0,0 @@ -#ifndef HORUS_TINYSET_H -#define HORUS_TINYSET_H - -#include -#include - -using namespace std; - - -template > -class TinySet -{ - public: - - typedef typename vector::iterator iterator; - typedef typename vector::const_iterator const_iterator; - - TinySet (const TinySet& s) - : vec_(s.vec_), cmp_(s.cmp_) { } - - TinySet (const Compare& cmp = Compare()) - : vec_(), cmp_(cmp) { } - - TinySet (const T& t, const Compare& cmp = Compare()) - : vec_(1, t), cmp_(cmp) { } - - TinySet (const vector& elements, const Compare& cmp = Compare()) - : vec_(elements), cmp_(cmp) - { - std::sort (begin(), end(), cmp_); - iterator it = unique_cmp (begin(), end()); - vec_.resize (it - begin()); - } - - iterator insert (const T& t) - { - iterator it = std::lower_bound (begin(), end(), t, cmp_); - if (it == end() || cmp_(t, *it)) { - vec_.insert (it, t); - } - return it; - } - - void insert_sorted (const T& t) - { - vec_.push_back (t); - assert (consistent()); - } - - void remove (const T& t) - { - iterator it = std::lower_bound (begin(), end(), t, cmp_); - if (it != end()) { - vec_.erase (it); - } - } - - const_iterator find (const T& t) const - { - const_iterator it = std::lower_bound (begin(), end(), t, cmp_); - return it == end() || cmp_(t, *it) ? end() : it; - } - - iterator find (const T& t) - { - iterator it = std::lower_bound (begin(), end(), t, cmp_); - return it == end() || cmp_(t, *it) ? end() : it; - } - - /* set union */ - TinySet operator| (const TinySet& s) const - { - TinySet res; - std::set_union ( - vec_.begin(), vec_.end(), - s.vec_.begin(), s.vec_.end(), - std::back_inserter (res.vec_), - cmp_); - return res; - } - - /* set intersection */ - TinySet operator& (const TinySet& s) const - { - TinySet res; - std::set_intersection ( - vec_.begin(), vec_.end(), - s.vec_.begin(), s.vec_.end(), - std::back_inserter (res.vec_), - cmp_); - return res; - } - - /* set difference */ - TinySet operator- (const TinySet& s) const - { - TinySet res; - std::set_difference ( - vec_.begin(), vec_.end(), - s.vec_.begin(), s.vec_.end(), - std::back_inserter (res.vec_), - cmp_); - return res; - } - - TinySet& operator|= (const TinySet& s) - { - return *this = (*this | s); - } - - TinySet& operator&= (const TinySet& s) - { - return *this = (*this & s); - } - - TinySet& operator-= (const TinySet& s) - { - return *this = (*this - s); - } - - bool contains (const T& t) const - { - return std::binary_search ( - vec_.begin(), vec_.end(), t, cmp_); - } - - bool contains (const TinySet& s) const - { - return std::includes ( - vec_.begin(), - vec_.end(), - s.vec_.begin(), - s.vec_.end(), - cmp_); - } - - bool in (const TinySet& s) const - { - return std::includes ( - s.vec_.begin(), - s.vec_.end(), - vec_.begin(), - vec_.end(), - cmp_); - } - - bool intersects (const TinySet& s) const - { - return (*this & s).size() > 0; - } - - const T& operator[] (typename vector::size_type i) const - { - return vec_[i]; - } - - T& operator[] (typename vector::size_type i) - { - return vec_[i]; - } - - T front (void) const - { - return vec_.front(); - } - - T& front (void) - { - return vec_.front(); - } - - T back (void) const - { - return vec_.back(); - } - - T& back (void) - { - return vec_.back(); - } - - const vector& elements (void) const - { - return vec_; - } - - bool empty (void) const - { - return size() == 0; - } - - typename vector::size_type size (void) const - { - return vec_.size(); - } - - void clear (void) - { - vec_.clear(); - } - - void reserve (typename vector::size_type size) - { - vec_.reserve (size); - } - - iterator begin (void) { return vec_.begin(); } - iterator end (void) { return vec_.end(); } - const_iterator begin (void) const { return vec_.begin(); } - const_iterator end (void) const { return vec_.end(); } - - friend bool operator== (const TinySet& s1, const TinySet& s2) - { - return s1.vec_ == s2.vec_; - } - - friend bool operator!= (const TinySet& s1, const TinySet& s2) - { - return ! (s1.vec_ == s2.vec_); - } - - friend std::ostream& operator << (std::ostream& out, const TinySet& s) - { - out << "{" ; - typename vector::size_type i; - for (i = 0; i < s.size(); i++) { - out << ((i != 0) ? "," : "") << s.vec_[i]; - } - out << "}" ; - return out; - } - - private: - iterator unique_cmp (iterator first, iterator last) - { - if (first == last) { - return last; - } - iterator result = first; - while (++first != last) { - if (cmp_(*result, *first)) { - *(++result) = *first; - } - } - return ++result; - } - - bool consistent (void) const - { - typename vector::size_type i; - for (i = 0; i < vec_.size() - 1; i++) { - if ( ! cmp_(vec_[i], vec_[i + 1])) { - return false; - } - } - return true; - } - - vector vec_; - Compare cmp_; -}; - -#endif // HORUS_TINYSET_H - diff --git a/packages/CLPBN/horus2/Util.cpp b/packages/CLPBN/horus2/Util.cpp deleted file mode 100644 index 0f3ce6544..000000000 --- a/packages/CLPBN/horus2/Util.cpp +++ /dev/null @@ -1,429 +0,0 @@ -#include - -#include -#include - -#include "Util.h" -#include "Indexer.h" -#include "ElimGraph.h" - - -namespace Globals { -bool logDomain = false; - -unsigned verbosity = 0; - -LiftedSolverType liftedSolver = LiftedSolverType::LVE; - -GroundSolverType groundSolver = GroundSolverType::VE; - -}; - - - -namespace BpOptions { -Schedule schedule = BpOptions::Schedule::SEQ_FIXED; -//Schedule schedule = BpOptions::Schedule::SEQ_RANDOM; -//Schedule schedule = BpOptions::Schedule::PARALLEL; -//Schedule schedule = BpOptions::Schedule::MAX_RESIDUAL; -double accuracy = 0.0001; -unsigned maxIter = 1000; -} - - - -namespace Util { - - -template <> std::string -toString (const bool& b) -{ - std::stringstream ss; - ss << std::boolalpha << b; - return ss.str(); -} - - - -unsigned -stringToUnsigned (string str) -{ - int val; - stringstream ss; - ss << str; - ss >> val; - if (val < 0) { - cerr << "Error: the number readed is negative." << endl; - exit (EXIT_FAILURE); - } - return static_cast (val); -} - - - -double -stringToDouble (string str) -{ - double val; - stringstream ss; - ss << str; - ss >> val; - return val; -} - - - -double -factorial (unsigned num) -{ - double result = 1.0; - for (unsigned i = 1; i <= num; i++) { - result *= i; - } - return result; -} - - - -double -logFactorial (unsigned num) -{ - double result = 0.0; - if (num < 150) { - result = std::log (factorial (num)); - } else { - for (unsigned i = 1; i <= num; i++) { - result += std::log (i); - } - } - return result; -} - - - -unsigned -nrCombinations (unsigned n, unsigned k) -{ - assert (n >= k); - int diff = n - k; - unsigned result = 0; - if (n < 150) { - unsigned prod = 1; - for (int i = n; i > diff; i--) { - prod *= i; - } - result = prod / factorial (k); - } else { - double prod = 0.0; - for (int i = n; i > diff; i--) { - prod += std::log (i); - } - prod -= logFactorial (k); - result = static_cast (std::exp (prod)); - } - return result; -} - - - -size_t -sizeExpected (const Ranges& ranges) -{ - return std::accumulate (ranges.begin(), - ranges.end(), 1, multiplies()); -} - - - -unsigned -nrDigits (int num) -{ - unsigned count = 1; - while (num >= 10) { - num /= 10; - count ++; - } - return count; -} - - - -bool -isInteger (const string& s) -{ - stringstream ss1 (s); - stringstream ss2; - int integer; - ss1 >> integer; - ss2 << integer; - return (ss1.str() == ss2.str()); -} - - - -string -parametersToString (const Params& v, unsigned precision) -{ - stringstream ss; - ss.precision (precision); - ss << "[" ; - for (size_t i = 0; i < v.size(); i++) { - if (i != 0) ss << ", " ; - ss << v[i]; - } - ss << "]" ; - return ss.str(); -} - - - -vector -getStateLines (const Vars& vars) -{ - Ranges ranges; - for (size_t i = 0; i < vars.size(); i++) { - ranges.push_back (vars[i]->range()); - } - Indexer indexer (ranges); - vector jointStrings; - while (indexer.valid()) { - stringstream ss; - for (size_t i = 0; i < vars.size(); i++) { - if (i != 0) ss << ", " ; - ss << vars[i]->label() << "=" ; - ss << vars[i]->states()[(indexer[i])]; - } - jointStrings.push_back (ss.str()); - ++ indexer; - } - return jointStrings; -} - - - -bool -setHorusFlag (string key, string value) -{ - bool returnVal = true; - if (key == "verbosity") { - stringstream ss; - ss << value; - ss >> Globals::verbosity; - } else if (key == "lifted_solver") { - if ( value == "lve") { - Globals::liftedSolver = LiftedSolverType::LVE; - } else if (value == "lbp") { - Globals::liftedSolver = LiftedSolverType::LBP; - } else if (value == "lkc") { - Globals::liftedSolver = LiftedSolverType::LKC; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "ground_solver") { - if ( value == "ve") { - Globals::groundSolver = GroundSolverType::VE; - } else if (value == "bp") { - Globals::groundSolver = GroundSolverType::BP; - } else if (value == "cbp") { - Globals::groundSolver = GroundSolverType::CBP; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "elim_heuristic") { - if ( value == "sequential") { - ElimGraph::elimHeuristic = ElimHeuristic::SEQUENTIAL; - } else if (value == "min_neighbors") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_NEIGHBORS; - } else if (value == "min_weight") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_WEIGHT; - } else if (value == "min_fill") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_FILL; - } else if (value == "weighted_min_fill") { - ElimGraph::elimHeuristic = ElimHeuristic::WEIGHTED_MIN_FILL; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "schedule") { - if ( value == "seq_fixed") { - BpOptions::schedule = BpOptions::Schedule::SEQ_FIXED; - } else if (value == "seq_random") { - BpOptions::schedule = BpOptions::Schedule::SEQ_RANDOM; - } else if (value == "parallel") { - BpOptions::schedule = BpOptions::Schedule::PARALLEL; - } else if (value == "max_residual") { - BpOptions::schedule = BpOptions::Schedule::MAX_RESIDUAL; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "accuracy") { - stringstream ss; - ss << value; - ss >> BpOptions::accuracy; - } else if (key == "max_iter") { - stringstream ss; - ss << value; - ss >> BpOptions::maxIter; - } else if (key == "use_logarithms") { - if ( value == "true") { - Globals::logDomain = true; - } else if (value == "false") { - Globals::logDomain = false; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else { - cerr << "warning: invalid key `" << key << "'" << endl; - returnVal = false; - } - return returnVal; -} - - - -void -printHeader (string header, std::ostream& os) -{ - printAsteriskLine (os); - os << header << endl; - printAsteriskLine (os); -} - - - -void -printSubHeader (string header, std::ostream& os) -{ - printDashedLine (os); - os << header << endl; - printDashedLine (os); -} - - - -void -printAsteriskLine (std::ostream& os) -{ - os << "********************************" ; - os << "********************************" ; - os << endl; -} - - - -void -printDashedLine (std::ostream& os) -{ - os << "--------------------------------" ; - os << "--------------------------------" ; - os << endl; -} - - -} - - - -namespace LogAware { - -void -normalize (Params& v) -{ - if (Globals::logDomain) { - double sum = std::accumulate (v.begin(), v.end(), - LogAware::addIdenty(), Util::logSum); - assert (sum != -numeric_limits::infinity()); - v -= sum; - } else { - double sum = std::accumulate (v.begin(), v.end(), 0.0); - assert (sum != 0.0); - v /= sum; - } -} - - - -double -getL1Distance (const Params& v1, const Params& v2) -{ - assert (v1.size() == v2.size()); - double dist = 0.0; - if (Globals::logDomain) { - dist = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, - std::plus(), FuncObject::abs_diff_exp()); - } else { - dist = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, - std::plus(), FuncObject::abs_diff()); - } - return dist; -} - - - -double -getMaxNorm (const Params& v1, const Params& v2) -{ - assert (v1.size() == v2.size()); - double max = 0.0; - if (Globals::logDomain) { - max = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, - FuncObject::max(), FuncObject::abs_diff_exp()); - } else { - max = std::inner_product (v1.begin(), v1.end(), v2.begin(), 0.0, - FuncObject::max(), FuncObject::abs_diff()); - } - return max; -} - - - -double -pow (double base, unsigned iexp) -{ - return Globals::logDomain - ? base * iexp - : std::pow (base, iexp); -} - - - -double -pow (double base, double exp) -{ - // `expoent' should not be in log domain - return Globals::logDomain - ? base * exp - : std::pow (base, exp); -} - - - -void -pow (Params& v, unsigned iexp) -{ - if (iexp == 1) { - return; - } - Globals::logDomain ? v *= iexp : v ^= (int)iexp; -} - - - -void -pow (Params& v, double exp) -{ - // `expoent' should not be in log domain - Globals::logDomain ? v *= exp : v ^= exp; -} - -} - diff --git a/packages/CLPBN/horus2/Util.h b/packages/CLPBN/horus2/Util.h deleted file mode 100644 index 38a088714..000000000 --- a/packages/CLPBN/horus2/Util.h +++ /dev/null @@ -1,422 +0,0 @@ -#ifndef HORUS_UTIL_H -#define HORUS_UTIL_H - -#include -#include -#include - -#include -#include -#include -#include -#include - -#include -#include - -#include "Horus.h" - -using namespace std; - - -namespace { -const double NEG_INF = -numeric_limits::infinity(); -}; - - -namespace Util { - -template void addToVector (vector&, const vector&); - -template void addToSet (set&, const vector&); - -template void addToQueue (queue&, const vector&); - -template bool contains (const vector&, const T&); - -template bool contains (const set&, const T&); - -template bool contains ( - const unordered_map&, const K&); - -template size_t indexOf (const vector&, const T&); - -template -void apply_n_times (Params& v1, const Params& v2, unsigned repetitions, Operation); - -template void log (vector&); - -template void exp (vector&); - -template string elementsToString ( - const vector& v, string sep = " "); - -template std::string toString (const T&); - -template <> std::string toString (const bool&); - -double logSum (double, double); - -unsigned maxUnsigned (void); - -unsigned stringToUnsigned (string); - -double stringToDouble (string); - -double factorial (unsigned); - -double logFactorial (unsigned); - -unsigned nrCombinations (unsigned, unsigned); - -size_t sizeExpected (const Ranges&); - -unsigned nrDigits (int); - -bool isInteger (const string&); - -string parametersToString (const Params&, unsigned = Constants::PRECISION); - -vector getStateLines (const Vars&); - -bool setHorusFlag (string key, string value); - -void printHeader (string, std::ostream& os = std::cout); - -void printSubHeader (string, std::ostream& os = std::cout); - -void printAsteriskLine (std::ostream& os = std::cout); - -void printDashedLine (std::ostream& os = std::cout); - -}; - - - -template void -Util::addToVector (vector& v, const vector& elements) -{ - v.insert (v.end(), elements.begin(), elements.end()); -} - - - -template void -Util::addToSet (set& s, const vector& elements) -{ - s.insert (elements.begin(), elements.end()); -} - - - -template void -Util::addToQueue (queue& q, const vector& elements) -{ - for (size_t i = 0; i < elements.size(); i++) { - q.push (elements[i]); - } -} - - - -template bool -Util::contains (const vector& v, const T& e) -{ - return std::find (v.begin(), v.end(), e) != v.end(); -} - - - -template bool -Util::contains (const set& s, const T& e) -{ - return s.find (e) != s.end(); -} - - - -template bool -Util::contains (const unordered_map& m, const K& k) -{ - return m.find (k) != m.end(); -} - - - -template size_t -Util::indexOf (const vector& v, const T& e) -{ - return std::distance (v.begin(), - std::find (v.begin(), v.end(), e)); -} - - - -template void -Util::apply_n_times (Params& v1, const Params& v2, unsigned repetitions, - Operation unary_op) -{ - Params::iterator first = v1.begin(); - Params::const_iterator last = v1.end(); - Params::const_iterator first2 = v2.begin(); - Params::const_iterator last2 = v2.end(); - while (first != last) { - for (first2 = v2.begin(); first2 != last2; ++first2) { - std::transform (first, first + repetitions, first, - std::bind1st (unary_op, *first2)); - first += repetitions; - } - } -} - - - -template void -Util::log (vector& v) -{ - std::transform (v.begin(), v.end(), v.begin(), ::log); -} - - - -template void -Util::exp (vector& v) -{ - std::transform (v.begin(), v.end(), v.begin(), ::exp); -} - - - -template string -Util::elementsToString (const vector& v, string sep) -{ - stringstream ss; - for (size_t i = 0; i < v.size(); i++) { - ss << ((i != 0) ? sep : "") << v[i]; - } - return ss.str(); -} - - - -template std::string -Util::toString (const T& t) -{ - std::stringstream ss; - ss << t; - return ss.str(); -} - - - -inline double -Util::logSum (double x, double y) -{ - // std::log (std::exp (x) + std::exp (y)) can overflow! - assert (std::isnan (x) == false); - assert (std::isnan (y) == false); - if (x == NEG_INF) { - return y; - } - if (y == NEG_INF) { - return x; - } - // if one value is much smaller than the other, - // keep the larger value - const double tol = 460.517; // log (1e200) - if (x < y - tol) { - return y; - } - if (y < x - tol) { - return x; - } - assert (std::isnan (x - y) == false); - const double exp_diff = std::exp (x - y); - if (std::isfinite (exp_diff) == false) { - // difference is too large - return x > y ? x : y; - } - // otherwise return the sum - return y + std::log (static_cast(1.0) + exp_diff); -} - - - -inline unsigned -Util::maxUnsigned (void) -{ - return numeric_limits::max(); -} - - - -namespace LogAware { - -inline double one() { return Globals::logDomain ? 0.0 : 1.0; } -inline double zero() { return Globals::logDomain ? NEG_INF : 0.0; } -inline double addIdenty() { return Globals::logDomain ? NEG_INF : 0.0; } -inline double multIdenty() { return Globals::logDomain ? 0.0 : 1.0; } -inline double withEvidence() { return Globals::logDomain ? 0.0 : 1.0; } -inline double noEvidence() { return Globals::logDomain ? NEG_INF : 0.0; } -inline double log (double v) { return Globals::logDomain ? ::log (v) : v; } -inline double exp (double v) { return Globals::logDomain ? ::exp (v) : v; } - -void normalize (Params&); - -double getL1Distance (const Params&, const Params&); - -double getMaxNorm (const Params&, const Params&); - -double pow (double, unsigned); - -double pow (double, double); - -void pow (Params&, unsigned); - -void pow (Params&, double); - -}; - - - -template -void operator+=(std::vector& v, double val) -{ - std::transform (v.begin(), v.end(), v.begin(), - std::bind2nd (plus(), val)); -} - - - -template -void operator-=(std::vector& v, double val) -{ - std::transform (v.begin(), v.end(), v.begin(), - std::bind2nd (minus(), val)); -} - - - -template -void operator*=(std::vector& v, double val) -{ - std::transform (v.begin(), v.end(), v.begin(), - std::bind2nd (multiplies(), val)); -} - - - -template -void operator/=(std::vector& v, double val) -{ - std::transform (v.begin(), v.end(), v.begin(), - std::bind2nd (divides(), val)); -} - - - -template -void operator+=(std::vector& a, const std::vector& b) -{ - assert (a.size() == b.size()); - std::transform (a.begin(), a.end(), b.begin(), a.begin(), - plus()); -} - - - -template -void operator-=(std::vector& a, const std::vector& b) -{ - assert (a.size() == b.size()); - std::transform (a.begin(), a.end(), b.begin(), a.begin(), - minus()); -} - - - -template -void operator*=(std::vector& a, const std::vector& b) -{ - assert (a.size() == b.size()); - std::transform (a.begin(), a.end(), b.begin(), a.begin(), - multiplies()); -} - - - -template -void operator/=(std::vector& a, const std::vector& b) -{ - assert (a.size() == b.size()); - std::transform (a.begin(), a.end(), b.begin(), a.begin(), - divides()); -} - - - -template -void operator^=(std::vector& v, double exp) -{ - std::transform (v.begin(), v.end(), v.begin(), - std::bind2nd (ptr_fun (std::pow), exp)); -} - - - -template -void operator^=(std::vector& v, int iexp) -{ - std::transform (v.begin(), v.end(), v.begin(), - std::bind2nd (ptr_fun (std::pow), iexp)); -} - - - -template -std::ostream& operator << (std::ostream& os, const vector& v) -{ - os << "[" ; - os << Util::elementsToString (v, ", "); - os << "]" ; - return os; -} - - -namespace FuncObject { - -template -struct max : public std::binary_function -{ - T operator() (const T& x, const T& y) const - { - return x < y ? y : x; - } -}; - - - -template -struct abs_diff : public std::binary_function -{ - T operator() (const T& x, const T& y) const - { - return std::abs (x - y); - } -}; - - - -template -struct abs_diff_exp : public std::binary_function -{ - T operator() (const T& x, const T& y) const - { - return std::abs (std::exp (x) - std::exp (y)); - } -}; - -} - -#endif // HORUS_UTIL_H - diff --git a/packages/CLPBN/horus2/Var.cpp b/packages/CLPBN/horus2/Var.cpp deleted file mode 100644 index 44ab6b1e4..000000000 --- a/packages/CLPBN/horus2/Var.cpp +++ /dev/null @@ -1,102 +0,0 @@ -#include -#include - -#include "Var.h" - -using namespace std; - - -unordered_map Var::varsInfo_; - - -Var::Var (const Var* v) -{ - varId_ = v->varId(); - range_ = v->range(); - evidence_ = v->getEvidence(); - index_ = std::numeric_limits::max(); -} - - - -Var::Var (VarId varId, unsigned range, int evidence) -{ - assert (range != 0); - assert (evidence < (int) range); - varId_ = varId; - range_ = range; - evidence_ = evidence; - index_ = std::numeric_limits::max(); -} - - - -bool -Var::isValidState (int stateIndex) -{ - return stateIndex >= 0 && stateIndex < (int) range_; -} - - - -bool -Var::isValidState (const string& stateName) -{ - States states = Var::getVarInfo (varId_).states; - return Util::contains (states, stateName); -} - - - -void -Var::setEvidence (int ev) -{ - assert (ev < (int) range_); - evidence_ = ev; -} - - - -void -Var::setEvidence (const string& ev) -{ - States states = Var::getVarInfo (varId_).states; - for (size_t i = 0; i < states.size(); i++) { - if (states[i] == ev) { - evidence_ = i; - return; - } - } - assert (false); -} - - - -string -Var::label (void) const -{ - if (Var::varsHaveInfo()) { - return Var::getVarInfo (varId_).label; - } - stringstream ss; - ss << "x" << varId_; - return ss.str(); -} - - - -States -Var::states (void) const -{ - if (Var::varsHaveInfo()) { - return Var::getVarInfo (varId_).states; - } - States states; - for (unsigned i = 0; i < range_; i++) { - stringstream ss; - ss << i ; - states.push_back (ss.str()); - } - return states; -} - diff --git a/packages/CLPBN/horus2/Var.h b/packages/CLPBN/horus2/Var.h deleted file mode 100644 index 8ab580c3a..000000000 --- a/packages/CLPBN/horus2/Var.h +++ /dev/null @@ -1,108 +0,0 @@ -#ifndef HORUS_VAR_H -#define HORUS_VAR_H - -#include - -#include - -#include "Util.h" -#include "Horus.h" - - -using namespace std; - - -struct VarInfo -{ - VarInfo (string l, const States& sts) : label(l), states(sts) { } - string label; - States states; -}; - - - -class Var -{ - public: - Var (const Var*); - - Var (VarId, unsigned, int = Constants::NO_EVIDENCE); - - virtual ~Var (void) { }; - - VarId varId (void) const { return varId_; } - - unsigned range (void) const { return range_; } - - int getEvidence (void) const { return evidence_; } - - size_t getIndex (void) const { return index_; } - - void setIndex (size_t idx) { index_ = idx; } - - bool hasEvidence (void) const - { - return evidence_ != Constants::NO_EVIDENCE; - } - - operator size_t (void) const { return index_; } - - bool operator== (const Var& var) const - { - assert (!(varId_ == var.varId() && range_ != var.range())); - return varId_ == var.varId(); - } - - bool operator!= (const Var& var) const - { - assert (!(varId_ == var.varId() && range_ != var.range())); - return varId_ != var.varId(); - } - - bool isValidState (int); - - bool isValidState (const string&); - - void setEvidence (int); - - void setEvidence (const string&); - - string label (void) const; - - States states (void) const; - - static void addVarInfo ( - VarId vid, string label, const States& states) - { - assert (Util::contains (varsInfo_, vid) == false); - varsInfo_.insert (make_pair (vid, VarInfo (label, states))); - } - - static VarInfo getVarInfo (VarId vid) - { - assert (Util::contains (varsInfo_, vid)); - return varsInfo_.find (vid)->second; - } - - static bool varsHaveInfo (void) - { - return varsInfo_.size() != 0; - } - - static void clearVarsInfo (void) - { - varsInfo_.clear(); - } - - private: - VarId varId_; - unsigned range_; - int evidence_; - size_t index_; - - static unordered_map varsInfo_; - -}; - -#endif // HORUS_VAR_H - diff --git a/packages/CLPBN/horus2/VarElim.cpp b/packages/CLPBN/horus2/VarElim.cpp deleted file mode 100644 index fb4eecf50..000000000 --- a/packages/CLPBN/horus2/VarElim.cpp +++ /dev/null @@ -1,217 +0,0 @@ -#include - -#include "VarElim.h" -#include "ElimGraph.h" -#include "Factor.h" -#include "Util.h" - - -VarElim::~VarElim (void) -{ - delete factorList_.back(); -} - - - -Params -VarElim::solveQuery (VarIds queryVids) -{ - if (Globals::verbosity > 1) { - cout << "Solving query on " ; - for (size_t i = 0; i < queryVids.size(); i++) { - if (i != 0) cout << ", " ; - cout << fg.getVarNode (queryVids[i])->label(); - } - cout << endl; - } - factorList_.clear(); - varFactors_.clear(); - elimOrder_.clear(); - createFactorList(); - absorveEvidence(); - findEliminationOrder (queryVids); - processFactorList (queryVids); - Params params = factorList_.back()->params(); - if (Globals::logDomain) { - Util::exp (params); - } - return params; -} - - - -void -VarElim::printSolverFlags (void) const -{ - stringstream ss; - ss << "variable elimination [" ; - ss << "elim_heuristic=" ; - ElimHeuristic eh = ElimGraph::elimHeuristic; - switch (eh) { - case SEQUENTIAL: ss << "sequential"; break; - case MIN_NEIGHBORS: ss << "min_neighbors"; break; - case MIN_WEIGHT: ss << "min_weight"; break; - case MIN_FILL: ss << "min_fill"; break; - case WEIGHTED_MIN_FILL: ss << "weighted_min_fill"; break; - } - ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << "]" ; - cout << ss.str() << endl; -} - - - -void -VarElim::createFactorList (void) -{ - const FacNodes& facNodes = fg.facNodes(); - factorList_.reserve (facNodes.size() * 2); - for (size_t i = 0; i < facNodes.size(); i++) { - factorList_.push_back (new Factor (facNodes[i]->factor())); - const VarNodes& neighs = facNodes[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - unordered_map>::iterator it - = varFactors_.find (neighs[j]->varId()); - if (it == varFactors_.end()) { - it = varFactors_.insert (make_pair ( - neighs[j]->varId(), vector())).first; - } - it->second.push_back (i); - } - } -} - - - -void -VarElim::absorveEvidence (void) -{ - if (Globals::verbosity > 2) { - Util::printDashedLine(); - cout << "(initial factor list)" << endl; - printActiveFactors(); - } - const VarNodes& varNodes = fg.varNodes(); - for (size_t i = 0; i < varNodes.size(); i++) { - if (varNodes[i]->hasEvidence()) { - if (Globals::verbosity > 1) { - cout << "-> aborving evidence on "; - cout << varNodes[i]->label() << " = " ; - cout << varNodes[i]->getEvidence() << endl; - } - const vector& idxs = - varFactors_.find (varNodes[i]->varId())->second; - for (size_t j = 0; j < idxs.size(); j++) { - Factor* factor = factorList_[idxs[j]]; - if (factor->nrArguments() == 1) { - factorList_[idxs[j]] = 0; - } else { - factorList_[idxs[j]]->absorveEvidence ( - varNodes[i]->varId(), varNodes[i]->getEvidence()); - } - } - } - } -} - - - -void -VarElim::findEliminationOrder (const VarIds& vids) -{ - elimOrder_ = ElimGraph::getEliminationOrder (factorList_, vids); -} - - - -void -VarElim::processFactorList (const VarIds& vids) -{ - totalFactorSize_ = 0; - largestFactorSize_ = 0; - for (size_t i = 0; i < elimOrder_.size(); i++) { - if (Globals::verbosity >= 2) { - if (Globals::verbosity >= 3) { - Util::printDashedLine(); - printActiveFactors(); - } - cout << "-> summing out " ; - cout << fg.getVarNode (elimOrder_[i])->label() << endl; - } - eliminate (elimOrder_[i]); - } - - Factor* finalFactor = new Factor(); - for (size_t i = 0; i < factorList_.size(); i++) { - if (factorList_[i]) { - finalFactor->multiply (*factorList_[i]); - delete factorList_[i]; - factorList_[i] = 0; - } - } - - VarIds unobservedVids; - for (size_t i = 0; i < vids.size(); i++) { - if (fg.getVarNode (vids[i])->hasEvidence() == false) { - unobservedVids.push_back (vids[i]); - } - } - - finalFactor->reorderArguments (unobservedVids); - finalFactor->normalize(); - factorList_.push_back (finalFactor); - if (Globals::verbosity > 0) { - cout << "total factor size: " << totalFactorSize_ << endl; - cout << "largest factor size: " << largestFactorSize_ << endl; - cout << endl; - } -} - - - -void -VarElim::eliminate (VarId elimVar) -{ - Factor* result = 0; - vector& idxs = varFactors_.find (elimVar)->second; - for (size_t i = 0; i < idxs.size(); i++) { - size_t idx = idxs[i]; - if (factorList_[idx]) { - if (result == 0) { - result = new Factor (*factorList_[idx]); - } else { - result->multiply (*factorList_[idx]); - } - delete factorList_[idx]; - factorList_[idx] = 0; - } - } - totalFactorSize_ += result->size(); - if (result->size() > largestFactorSize_) { - largestFactorSize_ = result->size(); - } - if (result != 0 && result->nrArguments() != 1) { - result->sumOut (elimVar); - factorList_.push_back (result); - const VarIds& resultVarIds = result->arguments(); - for (size_t i = 0; i < resultVarIds.size(); i++) { - vector& idxs = - varFactors_.find (resultVarIds[i])->second; - idxs.push_back (factorList_.size() - 1); - } - } -} - - - -void -VarElim::printActiveFactors (void) -{ - for (size_t i = 0; i < factorList_.size(); i++) { - if (factorList_[i] != 0) { - cout << factorList_[i]->getLabel() << " " ; - cout << factorList_[i]->params() << endl; - } - } -} - diff --git a/packages/CLPBN/horus2/VarElim.h b/packages/CLPBN/horus2/VarElim.h deleted file mode 100644 index fe1327fc0..000000000 --- a/packages/CLPBN/horus2/VarElim.h +++ /dev/null @@ -1,46 +0,0 @@ -#ifndef HORUS_VARELIM_H -#define HORUS_VARELIM_H - -#include "unordered_map" - -#include "GroundSolver.h" -#include "FactorGraph.h" -#include "Horus.h" - - -using namespace std; - - -class VarElim : public GroundSolver -{ - public: - VarElim (const FactorGraph& fg) : GroundSolver (fg) { } - - ~VarElim (void); - - Params solveQuery (VarIds); - - void printSolverFlags (void) const; - - private: - void createFactorList (void); - - void absorveEvidence (void); - - void findEliminationOrder (const VarIds&); - - void processFactorList (const VarIds&); - - void eliminate (VarId); - - void printActiveFactors (void); - - Factors factorList_; - VarIds elimOrder_; - unsigned largestFactorSize_; - unsigned totalFactorSize_; - unordered_map> varFactors_; -}; - -#endif // HORUS_VARELIM_H - diff --git a/packages/CLPBN/horus2/WeightedBp.cpp b/packages/CLPBN/horus2/WeightedBp.cpp deleted file mode 100644 index d8a32a246..000000000 --- a/packages/CLPBN/horus2/WeightedBp.cpp +++ /dev/null @@ -1,288 +0,0 @@ -#include "WeightedBp.h" - - -WeightedBp::~WeightedBp (void) -{ - for (size_t i = 0; i < links_.size(); i++) { - delete links_[i]; - } - links_.clear(); -} - - - -Params -WeightedBp::getPosterioriOf (VarId vid) -{ - if (runned_ == false) { - runSolver(); - } - VarNode* var = fg.getVarNode (vid); - assert (var != 0); - Params probs; - if (var->hasEvidence()) { - probs.resize (var->range(), LogAware::noEvidence()); - probs[var->getEvidence()] = LogAware::withEvidence(); - } else { - probs.resize (var->range(), LogAware::multIdenty()); - const BpLinks& links = ninf(var)->getLinks(); - if (Globals::logDomain) { - for (size_t i = 0; i < links.size(); i++) { - WeightedLink* l = static_cast (links[i]); - probs += l->powMessage(); - } - LogAware::normalize (probs); - Util::exp (probs); - } else { - for (size_t i = 0; i < links.size(); i++) { - WeightedLink* l = static_cast (links[i]); - probs *= l->powMessage(); - } - LogAware::normalize (probs); - } - } - return probs; -} - - - -void -WeightedBp::createLinks (void) -{ - if (Globals::verbosity > 0) { - cout << "compressed factor graph contains " ; - cout << fg.nrVarNodes() << " variables and " ; - cout << fg.nrFacNodes() << " factors " << endl; - cout << endl; - } - const FacNodes& facNodes = fg.facNodes(); - for (size_t i = 0; i < facNodes.size(); i++) { - const VarNodes& neighs = facNodes[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - if (Globals::verbosity > 1) { - cout << "creating link " ; - cout << facNodes[i]->getLabel(); - cout << " -- " ; - cout << neighs[j]->label(); - cout << " idx=" << j << ", weight=" << weights_[i][j] << endl; - } - links_.push_back (new WeightedLink ( - facNodes[i], neighs[j], j, weights_[i][j])); - } - } - if (Globals::verbosity > 1) { - cout << endl; - } -} - - - -void -WeightedBp::maxResidualSchedule (void) -{ - if (nIters_ == 1) { - for (size_t i = 0; i < links_.size(); i++) { - calculateMessage (links_[i]); - SortedOrder::iterator it = sortedOrder_.insert (links_[i]); - linkMap_.insert (make_pair (links_[i], it)); - if (Globals::verbosity >= 1) { - cout << "calculating " << links_[i]->toString() << endl; - } - } - return; - } - - for (size_t c = 0; c < links_.size(); c++) { - if (Globals::verbosity > 1) { - cout << endl << "current residuals:" << endl; - for (SortedOrder::iterator it = sortedOrder_.begin(); - it != sortedOrder_.end(); ++it) { - cout << " " << setw (30) << left << (*it)->toString(); - cout << "residual = " << (*it)->residual() << endl; - } - } - - SortedOrder::iterator it = sortedOrder_.begin(); - BpLink* link = *it; - if (Globals::verbosity >= 1) { - cout << "updating " << (*sortedOrder_.begin())->toString() << endl; - } - if (link->residual() < BpOptions::accuracy) { - return; - } - link->updateMessage(); - link->clearResidual(); - sortedOrder_.erase (it); - linkMap_.find (link)->second = sortedOrder_.insert (link); - - // update the messages that depend on message source --> destin - const FacNodes& factorNeighbors = link->varNode()->neighbors(); - for (size_t i = 0; i < factorNeighbors.size(); i++) { - const BpLinks& links = ninf(factorNeighbors[i])->getLinks(); - for (size_t j = 0; j < links.size(); j++) { - if (links[j]->varNode() != link->varNode()) { - if (Globals::verbosity > 1) { - cout << " calculating " << links[j]->toString() << endl; - } - calculateMessage (links[j]); - BpLinkMap::iterator iter = linkMap_.find (links[j]); - sortedOrder_.erase (iter->second); - iter->second = sortedOrder_.insert (links[j]); - } - } - } - // in counting bp, the message that a variable X sends to - // to a factor F depends on the message that F sent to the X - const BpLinks& links = ninf(link->facNode())->getLinks(); - for (size_t i = 0; i < links.size(); i++) { - if (links[i]->varNode() != link->varNode()) { - if (Globals::verbosity > 1) { - cout << " calculating " << links[i]->toString() << endl; - } - calculateMessage (links[i]); - BpLinkMap::iterator iter = linkMap_.find (links[i]); - sortedOrder_.erase (iter->second); - iter->second = sortedOrder_.insert (links[i]); - } - } - } -} - - - -void -WeightedBp::calcFactorToVarMsg (BpLink* _link) -{ - WeightedLink* link = static_cast (_link); - FacNode* src = link->facNode(); - const VarNode* dst = link->varNode(); - const BpLinks& links = ninf(src)->getLinks(); - // calculate the product of messages that were sent - // to factor `src', except from var `dst' - unsigned reps = 1; - unsigned msgSize = Util::sizeExpected (src->factor().ranges()); - Params msgProduct (msgSize, LogAware::multIdenty()); - if (Globals::logDomain) { - for (size_t i = links.size(); i-- > 0; ) { - const WeightedLink* l = static_cast (links[i]); - if ( ! (l->varNode() == dst && l->index() == link->index())) { - if (Constants::SHOW_BP_CALCS) { - cout << " message from " << links[i]->varNode()->label(); - cout << ": " ; - } - Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), - reps, std::plus()); - if (Constants::SHOW_BP_CALCS) { - cout << endl; - } - } - reps *= links[i]->varNode()->range(); - } - } else { - for (size_t i = links.size(); i-- > 0; ) { - const WeightedLink* l = static_cast (links[i]); - if ( ! (l->varNode() == dst && l->index() == link->index())) { - if (Constants::SHOW_BP_CALCS) { - cout << " message from " << links[i]->varNode()->label(); - cout << ": " ; - } - Util::apply_n_times (msgProduct, getVarToFactorMsg (links[i]), - reps, std::multiplies()); - if (Constants::SHOW_BP_CALCS) { - cout << endl; - } - } - reps *= links[i]->varNode()->range(); - } - } - Factor result (src->factor().arguments(), - src->factor().ranges(), msgProduct); - assert (msgProduct.size() == src->factor().size()); - if (Globals::logDomain) { - result.params() += src->factor().params(); - } else { - result.params() *= src->factor().params(); - } - if (Constants::SHOW_BP_CALCS) { - cout << " message product: " << msgProduct << endl; - cout << " original factor: " << src->factor().params() << endl; - cout << " factor product: " << result.params() << endl; - } - result.sumOutAllExceptIndex (link->index()); - if (Constants::SHOW_BP_CALCS) { - cout << " marginalized: " << result.params() << endl; - } - link->nextMessage() = result.params(); - LogAware::normalize (link->nextMessage()); - if (Constants::SHOW_BP_CALCS) { - cout << " curr msg: " << link->message() << endl; - cout << " next msg: " << link->nextMessage() << endl; - } -} - - - -Params -WeightedBp::getVarToFactorMsg (const BpLink* _link) const -{ - const WeightedLink* link = static_cast (_link); - const VarNode* src = link->varNode(); - const FacNode* dst = link->facNode(); - Params msg; - if (src->hasEvidence()) { - msg.resize (src->range(), LogAware::noEvidence()); - double value = link->message()[src->getEvidence()]; - if (Constants::SHOW_BP_CALCS) { - msg[src->getEvidence()] = value; - cout << msg << "^" << link->weight() << "-1" ; - } - msg[src->getEvidence()] = LogAware::pow (value, link->weight() - 1); - } else { - msg = link->message(); - if (Constants::SHOW_BP_CALCS) { - cout << msg << "^" << link->weight() << "-1" ; - } - LogAware::pow (msg, link->weight() - 1); - } - const BpLinks& links = ninf(src)->getLinks(); - if (Globals::logDomain) { - for (size_t i = 0; i < links.size(); i++) { - WeightedLink* l = static_cast (links[i]); - if ( ! (l->facNode() == dst && l->index() == link->index())) { - msg += l->powMessage(); - } - } - } else { - for (size_t i = 0; i < links.size(); i++) { - WeightedLink* l = static_cast (links[i]); - if ( ! (l->facNode() == dst && l->index() == link->index())) { - msg *= l->powMessage(); - if (Constants::SHOW_BP_CALCS) { - cout << " x " << l->nextMessage() << "^" << link->weight(); - } - } - } - } - if (Constants::SHOW_BP_CALCS) { - cout << " = " << msg; - } - return msg; -} - - - -void -WeightedBp::printLinkInformation (void) const -{ - for (size_t i = 0; i < links_.size(); i++) { - WeightedLink* l = static_cast (links_[i]); - cout << l->toString() << ":" << endl; - cout << " curr msg = " << l->message() << endl; - cout << " next msg = " << l->nextMessage() << endl; - cout << " pow msg = " << l->powMessage() << endl; - cout << " index = " << l->index() << endl; - cout << " weight = " << l->weight() << endl; - cout << " residual = " << l->residual() << endl; - } -} - diff --git a/packages/CLPBN/horus2/WeightedBp.h b/packages/CLPBN/horus2/WeightedBp.h deleted file mode 100644 index 7794fd509..000000000 --- a/packages/CLPBN/horus2/WeightedBp.h +++ /dev/null @@ -1,61 +0,0 @@ -#ifndef HORUS_WEIGHTEDBP_H -#define HORUS_WEIGHTEDBP_H - -#include "BeliefProp.h" - -class WeightedLink : public BpLink -{ - public: - WeightedLink (FacNode* fn, VarNode* vn, size_t idx, unsigned weight) - : BpLink (fn, vn), index_(idx), weight_(weight), - pwdMsg_(vn->range(), LogAware::one()) { } - - size_t index (void) const { return index_; } - - unsigned weight (void) const { return weight_; } - - const Params& powMessage (void) const { return pwdMsg_; } - - void updateMessage (void) - { - pwdMsg_ = *nextMsg_; - swap (currMsg_, nextMsg_); - LogAware::pow (pwdMsg_, weight_); - } - - private: - size_t index_; - unsigned weight_; - Params pwdMsg_; -}; - - - -class WeightedBp : public BeliefProp -{ - public: - WeightedBp (const FactorGraph& fg, - const vector>& weights) - : BeliefProp (fg), weights_(weights) { } - - ~WeightedBp (void); - - Params getPosterioriOf (VarId); - - private: - - void createLinks (void); - - void maxResidualSchedule (void); - - void calcFactorToVarMsg (BpLink*); - - Params getVarToFactorMsg (const BpLink*) const; - - void printLinkInformation (void) const; - - vector> weights_; -}; - -#endif // HORUS_WEIGHTEDBP_H - From 188f3594963edddd9b3e5e9437234958803c0f96 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Wed, 26 Dec 2012 21:48:05 +0000 Subject: [PATCH 43/89] Fix a warning about a unitialized var --- packages/CLPBN/horus/LiftedKc.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index 46f42d5ec..4dbd9e9ca 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -1003,7 +1003,7 @@ LiftedCircuit::containsTypes ( CircuitNodeType LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const { - CircuitNodeType type; + CircuitNodeType type = CircuitNodeType::OR_NODE; if (dynamic_cast(node) != 0) { type = CircuitNodeType::OR_NODE; } else if (dynamic_cast(node) != 0) { From 8bdcb65907c5e96c9bf43c9832d572b679338eb2 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Wed, 26 Dec 2012 22:55:48 +0000 Subject: [PATCH 44/89] Improve variable elimination --- packages/CLPBN/horus/VarElim.cpp | 124 +++++++++++++------------------ packages/CLPBN/horus/VarElim.h | 15 ++-- 2 files changed, 59 insertions(+), 80 deletions(-) diff --git a/packages/CLPBN/horus/VarElim.cpp b/packages/CLPBN/horus/VarElim.cpp index 54ee18d20..d31f6ce51 100644 --- a/packages/CLPBN/horus/VarElim.cpp +++ b/packages/CLPBN/horus/VarElim.cpp @@ -6,13 +6,6 @@ #include "Util.h" -VarElim::~VarElim (void) -{ - delete factorList_.back(); -} - - - Params VarElim::solveQuery (VarIds queryVids) { @@ -24,14 +17,13 @@ VarElim::solveQuery (VarIds queryVids) } cout << endl; } + totalFactorSize_ = 0; + largestFactorSize_ = 0; factorList_.clear(); - varFactors_.clear(); - elimOrder_.clear(); + varMap_.clear(); createFactorList(); absorveEvidence(); - findEliminationOrder (queryVids); - processFactorList (queryVids); - Params params = factorList_.back()->params(); + Params params = processFactorList (queryVids); if (Globals::logDomain) { Util::exp (params); } @@ -68,15 +60,15 @@ VarElim::createFactorList (void) factorList_.reserve (facNodes.size() * 2); for (size_t i = 0; i < facNodes.size(); i++) { factorList_.push_back (new Factor (facNodes[i]->factor())); - const VarNodes& neighs = facNodes[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - unordered_map>::iterator it - = varFactors_.find (neighs[j]->varId()); - if (it == varFactors_.end()) { - it = varFactors_.insert (make_pair ( - neighs[j]->varId(), vector())).first; + const VarIds& args = facNodes[i]->factor().arguments(); + for (size_t j = 0; j < args.size(); j++) { + unordered_map>::iterator it; + it = varMap_.find (args[j]); + if (it != varMap_.end()) { + it->second.push_back (i); + } else { + varMap_[args[j]] = { i }; } - it->second.push_back (i); } } } @@ -99,15 +91,15 @@ VarElim::absorveEvidence (void) cout << varNodes[i]->label() << " = " ; cout << varNodes[i]->getEvidence() << endl; } - const vector& idxs = - varFactors_.find (varNodes[i]->varId())->second; - for (size_t j = 0; j < idxs.size(); j++) { - Factor* factor = factorList_[idxs[j]]; - if (factor->nrArguments() == 1) { - factorList_[idxs[j]] = 0; - } else { - factorList_[idxs[j]]->absorveEvidence ( + const vector& indices = varMap_[varNodes[i]->varId()]; + for (size_t j = 0; j < indices.size(); j++) { + size_t idx = indices[j]; + if (factorList_[idx]->nrArguments() > 1) { + factorList_[idx]->absorveEvidence ( varNodes[i]->varId(), varNodes[i]->getEvidence()); + } else { + delete factorList_[idx]; + factorList_[idx] = 0; } } } @@ -116,72 +108,60 @@ VarElim::absorveEvidence (void) -void -VarElim::findEliminationOrder (const VarIds& vids) +Params +VarElim::processFactorList (const VarIds& queryVids) { - elimOrder_ = ElimGraph::getEliminationOrder (factorList_, vids); -} - - - -void -VarElim::processFactorList (const VarIds& vids) -{ - totalFactorSize_ = 0; - largestFactorSize_ = 0; - for (size_t i = 0; i < elimOrder_.size(); i++) { + VarIds elimOrder = ElimGraph::getEliminationOrder ( + factorList_, queryVids); + for (size_t i = 0; i < elimOrder.size(); i++) { if (Globals::verbosity >= 2) { if (Globals::verbosity >= 3) { Util::printDashedLine(); printActiveFactors(); } cout << "-> summing out " ; - cout << fg.getVarNode (elimOrder_[i])->label() << endl; + cout << fg.getVarNode (elimOrder[i])->label() << endl; } - eliminate (elimOrder_[i]); + eliminate (elimOrder[i]); } - Factor* finalFactor = new Factor(); + Factor result; for (size_t i = 0; i < factorList_.size(); i++) { if (factorList_[i]) { - finalFactor->multiply (*factorList_[i]); + result.multiply (*factorList_[i]); delete factorList_[i]; factorList_[i] = 0; } } VarIds unobservedVids; - for (size_t i = 0; i < vids.size(); i++) { - if (fg.getVarNode (vids[i])->hasEvidence() == false) { - unobservedVids.push_back (vids[i]); + for (size_t i = 0; i < queryVids.size(); i++) { + if (fg.getVarNode (queryVids[i])->hasEvidence() == false) { + unobservedVids.push_back (queryVids[i]); } } - finalFactor->reorderArguments (unobservedVids); - finalFactor->normalize(); - factorList_.push_back (finalFactor); + result.reorderArguments (unobservedVids); + result.normalize(); if (Globals::verbosity > 0) { cout << "total factor size: " << totalFactorSize_ << endl; cout << "largest factor size: " << largestFactorSize_ << endl; cout << endl; } + return result.params(); } void -VarElim::eliminate (VarId elimVar) +VarElim::eliminate (VarId vid) { - Factor* result = 0; - vector& idxs = varFactors_.find (elimVar)->second; - for (size_t i = 0; i < idxs.size(); i++) { - size_t idx = idxs[i]; + Factor* result = new Factor(); + const vector& indices = varMap_[vid]; + for (size_t i = 0; i < indices.size(); i++) { + size_t idx = indices[i]; if (factorList_[idx]) { - if (result == 0) { - result = new Factor (*factorList_[idx]); - } else { - result->multiply (*factorList_[idx]); - } + result->multiply (*factorList_[idx]); delete factorList_[idx]; factorList_[idx] = 0; } @@ -190,15 +170,16 @@ VarElim::eliminate (VarId elimVar) if (result->size() > largestFactorSize_) { largestFactorSize_ = result->size(); } - if (result != 0 && result->nrArguments() != 1) { - result->sumOut (elimVar); - factorList_.push_back (result); - const VarIds& resultVarIds = result->arguments(); - for (size_t i = 0; i < resultVarIds.size(); i++) { - vector& idxs = - varFactors_.find (resultVarIds[i])->second; - idxs.push_back (factorList_.size() - 1); + if (result->nrArguments() > 1) { + result->sumOut (vid); + const VarIds& args = result->arguments(); + for (size_t i = 0; i < args.size(); i++) { + vector& indices2 = varMap_[args[i]]; + indices2.push_back (factorList_.size()); } + factorList_.push_back (result); + } else { + delete result; } } @@ -208,9 +189,10 @@ void VarElim::printActiveFactors (void) { for (size_t i = 0; i < factorList_.size(); i++) { - if (factorList_[i] != 0) { + if (factorList_[i]) { cout << factorList_[i]->getLabel() << " " ; - cout << factorList_[i]->params() << endl; + cout << factorList_[i]->params(); + cout << endl; } } } diff --git a/packages/CLPBN/horus/VarElim.h b/packages/CLPBN/horus/VarElim.h index fe1327fc0..96906bb00 100644 --- a/packages/CLPBN/horus/VarElim.h +++ b/packages/CLPBN/horus/VarElim.h @@ -16,7 +16,7 @@ class VarElim : public GroundSolver public: VarElim (const FactorGraph& fg) : GroundSolver (fg) { } - ~VarElim (void); + ~VarElim (void) { } Params solveQuery (VarIds); @@ -27,19 +27,16 @@ class VarElim : public GroundSolver void absorveEvidence (void); - void findEliminationOrder (const VarIds&); - - void processFactorList (const VarIds&); + Params processFactorList (const VarIds&); void eliminate (VarId); void printActiveFactors (void); - Factors factorList_; - VarIds elimOrder_; - unsigned largestFactorSize_; - unsigned totalFactorSize_; - unordered_map> varFactors_; + Factors factorList_; + unsigned largestFactorSize_; + unsigned totalFactorSize_; + unordered_map> varMap_; }; #endif // HORUS_VARELIM_H From cbea630fbffc7d00c7609932a3681579f3bb90e8 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 12:54:58 +0000 Subject: [PATCH 45/89] Cosmetic fixes --- packages/CLPBN/horus/BayesBall.cpp | 6 --- packages/CLPBN/horus/BayesBallGraph.cpp | 2 +- packages/CLPBN/horus/BayesBallGraph.h | 6 +-- packages/CLPBN/horus/BeliefProp.cpp | 5 +-- packages/CLPBN/horus/BeliefProp.h | 4 +- packages/CLPBN/horus/ConstraintTree.cpp | 8 ++-- packages/CLPBN/horus/CountingBp.cpp | 2 +- packages/CLPBN/horus/CountingBp.h | 1 - packages/CLPBN/horus/ElimGraph.cpp | 53 ++++++++++------------- packages/CLPBN/horus/ElimGraph.h | 1 - packages/CLPBN/horus/Factor.cpp | 8 ++-- packages/CLPBN/horus/Factor.h | 1 - packages/CLPBN/horus/FactorGraph.cpp | 10 ++--- packages/CLPBN/horus/FactorGraph.h | 1 - packages/CLPBN/horus/GroundSolver.cpp | 4 +- packages/CLPBN/horus/GroundSolver.h | 5 +-- packages/CLPBN/horus/Histogram.h | 1 + packages/CLPBN/horus/Horus.h | 2 - packages/CLPBN/horus/LiftedBp.cpp | 4 +- packages/CLPBN/horus/LiftedKc.cpp | 23 +++++----- packages/CLPBN/horus/LiftedKc.h | 3 +- packages/CLPBN/horus/LiftedOperations.cpp | 4 +- packages/CLPBN/horus/LiftedUtils.cpp | 1 - packages/CLPBN/horus/LiftedUtils.h | 5 +-- packages/CLPBN/horus/LiftedVe.cpp | 3 +- packages/CLPBN/horus/LiftedWCNF.cpp | 3 +- packages/CLPBN/horus/LiftedWCNF.h | 9 ++-- packages/CLPBN/horus/Parfactor.cpp | 5 +-- packages/CLPBN/horus/Parfactor.h | 5 +-- packages/CLPBN/horus/ParfactorList.cpp | 4 +- packages/CLPBN/horus/ParfactorList.h | 3 +- packages/CLPBN/horus/ProbFormula.cpp | 4 +- packages/CLPBN/horus/ProbFormula.h | 5 ++- packages/CLPBN/horus/TinySet.h | 5 ++- packages/CLPBN/horus/Util.cpp | 5 +-- packages/CLPBN/horus/Util.h | 14 +++--- packages/CLPBN/horus/Var.cpp | 7 +-- packages/CLPBN/horus/Var.h | 10 ++--- packages/CLPBN/horus/WeightedBp.cpp | 2 +- 39 files changed, 106 insertions(+), 138 deletions(-) diff --git a/packages/CLPBN/horus/BayesBall.cpp b/packages/CLPBN/horus/BayesBall.cpp index 0fac25056..da0c73ff5 100644 --- a/packages/CLPBN/horus/BayesBall.cpp +++ b/packages/CLPBN/horus/BayesBall.cpp @@ -1,12 +1,6 @@ -#include #include -#include -#include -#include - #include "BayesBall.h" -#include "Util.h" FactorGraph* diff --git a/packages/CLPBN/horus/BayesBallGraph.cpp b/packages/CLPBN/horus/BayesBallGraph.cpp index 36fcbb5ee..60db22bfe 100644 --- a/packages/CLPBN/horus/BayesBallGraph.cpp +++ b/packages/CLPBN/horus/BayesBallGraph.cpp @@ -2,8 +2,8 @@ #include #include -#include #include +#include #include "BayesBallGraph.h" #include "Util.h" diff --git a/packages/CLPBN/horus/BayesBallGraph.h b/packages/CLPBN/horus/BayesBallGraph.h index 68cd9effe..eb44f0ae8 100644 --- a/packages/CLPBN/horus/BayesBallGraph.h +++ b/packages/CLPBN/horus/BayesBallGraph.h @@ -2,9 +2,7 @@ #define HORUS_BAYESBALLGRAPH_H #include -#include -#include -#include +#include #include "Var.h" #include "Horus.h" @@ -14,7 +12,7 @@ using namespace std; class BBNode : public Var { public: - BBNode (Var* v) : Var (v) , visited_(false), + BBNode (Var* v) : Var (v), visited_(false), markedOnTop_(false), markedOnBottom_(false) { } const vector& childs (void) const { return childs_; } diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index f56752fe4..64195c91b 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -1,13 +1,10 @@ #include -#include #include #include #include "BeliefProp.h" -#include "FactorGraph.h" -#include "Factor.h" #include "Indexer.h" #include "Horus.h" @@ -410,7 +407,7 @@ BeliefProp::initializeSolver (void) bool BeliefProp::converged (void) { - if (links_.size() == 0) { + if (links_.empty()) { return true; } if (nIters_ == 0) { diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index 87364355c..cfdf98cbb 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -3,12 +3,12 @@ #include #include + #include #include "GroundSolver.h" -#include "Factor.h" #include "FactorGraph.h" -#include "Util.h" + using namespace std; diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index 3a9fe7b5e..599d28f37 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -120,7 +120,7 @@ CTNode::copySubtree (const CTNode* root1) chIt != n1->childs().end(); ++ chIt) { CTNode* chCopy = new CTNode (**chIt); n2->childs().insert_sorted (chCopy); - if ((*chIt)->nrChilds() != 0) { + if ((*chIt)->nrChilds() > 0) { stack.push_back (StackPair (*chIt, chCopy)); } } @@ -813,10 +813,10 @@ ConstraintTree::jointCountNormalize ( cts[i]->join (exclCt); } - if (excl1 != 0) { + if (excl1) { cts.push_back (excl1); } - if (excl2 != 0) { + if (excl2) { cts.push_back (excl2); } @@ -1072,7 +1072,7 @@ ConstraintTree::getTuples ( CTNodes& continuationNodes) const { if (n->isRoot() == false) { - if (currTuples.size() == 0) { + if (currTuples.empty()) { currTuples.push_back ({ n->symbol()}); } else { for (size_t i = 0; i < currTuples.size(); i++) { diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index b86d22f9f..a0836332f 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -81,7 +81,7 @@ CountingBp::solveQuery (VarIds queryVids) reprArgs.push_back (getRepresentative (queryVids[i])); } FacNode* reprFac = getRepresentative (facNodes[idx]); - assert (reprFac != 0); + assert (reprFac); res = solver_->getFactorJoint (reprFac, reprArgs); } } diff --git a/packages/CLPBN/horus/CountingBp.h b/packages/CLPBN/horus/CountingBp.h index 2cbd2f995..c6487cdd0 100644 --- a/packages/CLPBN/horus/CountingBp.h +++ b/packages/CLPBN/horus/CountingBp.h @@ -5,7 +5,6 @@ #include "GroundSolver.h" #include "FactorGraph.h" -#include "Util.h" #include "Horus.h" class VarCluster; diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index 1942bfb85..0292c775f 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -1,5 +1,3 @@ -#include - #include #include "ElimGraph.h" @@ -10,30 +8,27 @@ ElimHeuristic ElimGraph::elimHeuristic = MIN_NEIGHBORS; ElimGraph::ElimGraph (const vector& factors) { for (size_t i = 0; i < factors.size(); i++) { - if (factors[i] == 0) { // if contained just one var with evidence - continue; - } - const VarIds& vids = factors[i]->arguments(); - for (size_t j = 0; j < vids.size() - 1; j++) { - EgNode* n1 = getEgNode (vids[j]); - if (n1 == 0) { - n1 = new EgNode (vids[j], factors[i]->range (j)); - addNode (n1); - } - for (size_t k = j + 1; k < vids.size(); k++) { - EgNode* n2 = getEgNode (vids[k]); - if (n2 == 0) { - n2 = new EgNode (vids[k], factors[i]->range (k)); - addNode (n2); + if (factors[i]) { + const VarIds& args = factors[i]->arguments(); + for (size_t j = 0; j < args.size() - 1; j++) { + EgNode* n1 = getEgNode (args[j]); + if (!n1) { + n1 = new EgNode (args[j], factors[i]->range (j)); + addNode (n1); } - if (neighbors (n1, n2) == false) { - addEdge (n1, n2); + for (size_t k = j + 1; k < args.size(); k++) { + EgNode* n2 = getEgNode (args[k]); + if (!n2) { + n2 = new EgNode (args[k], factors[i]->range (k)); + addNode (n2); + } + if (!neighbors (n1, n2)) { + addEdge (n1, n2); + } } } - } - if (vids.size() == 1) { - if (getEgNode (vids[0]) == 0) { - addNode (new EgNode (vids[0], factors[i]->range (0))); + if (args.size() == 1 && !getEgNode (args[0])) { + addNode (new EgNode (args[0], factors[i]->range (0))); } } } @@ -51,16 +46,16 @@ ElimGraph::~ElimGraph (void) VarIds -ElimGraph::getEliminatingOrder (const VarIds& exclude) +ElimGraph::getEliminatingOrder (const VarIds& excludedVids) { VarIds elimOrder; unmarked_.reserve (nodes_.size()); for (size_t i = 0; i < nodes_.size(); i++) { - if (Util::contains (exclude, nodes_[i]->varId()) == false) { + if (Util::contains (excludedVids, nodes_[i]->varId()) == false) { unmarked_.insert (nodes_[i]); } } - size_t nrVarsToEliminate = nodes_.size() - exclude.size(); + size_t nrVarsToEliminate = nodes_.size() - excludedVids.size(); for (size_t i = 0; i < nrVarsToEliminate; i++) { EgNode* node = getLowestCostNode(); unmarked_.remove (node); @@ -104,7 +99,7 @@ ElimGraph::exportToGraphViz ( } out << "strict graph {" << endl; for (size_t i = 0; i < nodes_.size(); i++) { - if (showNeighborless || nodes_[i]->neighbors().size() != 0) { + if (showNeighborless || nodes_[i]->neighbors().empty() == false) { out << '"' << nodes_[i]->label() << '"' << endl; } } @@ -178,7 +173,7 @@ EgNode* ElimGraph::getLowestCostNode (void) const { EgNode* bestNode = 0; - unsigned minCost = std::numeric_limits::max(); + unsigned minCost = Util::maxUnsigned(); EGNeighs::const_iterator it; switch (elimHeuristic) { case MIN_NEIGHBORS: { @@ -233,7 +228,7 @@ ElimGraph::connectAllNeighbors (const EgNode* n) if (neighs.size() > 0) { for (size_t i = 0; i < neighs.size() - 1; i++) { for (size_t j = i + 1; j < neighs.size(); j++) { - if ( ! neighbors (neighs[i], neighs[j])) { + if (!neighbors (neighs[i], neighs[j])) { addEdge (neighs[i], neighs[j]); } } diff --git a/packages/CLPBN/horus/ElimGraph.h b/packages/CLPBN/horus/ElimGraph.h index 8188b5ba6..575258829 100644 --- a/packages/CLPBN/horus/ElimGraph.h +++ b/packages/CLPBN/horus/ElimGraph.h @@ -7,7 +7,6 @@ #include "TinySet.h" #include "Horus.h" - using namespace std; enum ElimHeuristic diff --git a/packages/CLPBN/horus/Factor.cpp b/packages/CLPBN/horus/Factor.cpp index 9b8ad0be7..d0acade1f 100644 --- a/packages/CLPBN/horus/Factor.cpp +++ b/packages/CLPBN/horus/Factor.cpp @@ -7,7 +7,7 @@ #include #include "Factor.h" -#include "Indexer.h" +#include "Var.h" Factor::Factor (const Factor& g) @@ -100,11 +100,11 @@ Factor::sumOutAllExceptIndex (size_t idx) void Factor::multiply (Factor& g) { - if (args_.size() == 0) { + if (args_.empty()) { clone (g); - return; + } else { + TFactor::multiply (g); } - TFactor::multiply (g); } diff --git a/packages/CLPBN/horus/Factor.h b/packages/CLPBN/horus/Factor.h index dd004ee24..415a3d1df 100644 --- a/packages/CLPBN/horus/Factor.h +++ b/packages/CLPBN/horus/Factor.h @@ -3,7 +3,6 @@ #include -#include "Var.h" #include "Indexer.h" #include "Util.h" diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index df9dd7941..85925abf6 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -1,13 +1,13 @@ -#include -#include #include +#include +#include + #include -#include #include +#include #include "FactorGraph.h" -#include "Factor.h" #include "BayesBall.h" #include "Util.h" @@ -146,7 +146,7 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) ignoreLines (is); is >> ranges[j]; VarNode* var = getVarNode (vids[j]); - if (var != 0 && ranges[j] != var->range()) { + if (var && ranges[j] != var->range()) { cerr << "Error: variable `" << vids[j] << "' appears in two or " ; cerr << "more factors with a different range." << endl; } diff --git a/packages/CLPBN/horus/FactorGraph.h b/packages/CLPBN/horus/FactorGraph.h index 960f799cc..3417acf38 100644 --- a/packages/CLPBN/horus/FactorGraph.h +++ b/packages/CLPBN/horus/FactorGraph.h @@ -9,7 +9,6 @@ using namespace std; - class FacNode; class VarNode : public Var diff --git a/packages/CLPBN/horus/GroundSolver.cpp b/packages/CLPBN/horus/GroundSolver.cpp index 4cd3fdbd2..b683c8233 100644 --- a/packages/CLPBN/horus/GroundSolver.cpp +++ b/packages/CLPBN/horus/GroundSolver.cpp @@ -1,8 +1,8 @@ #include "GroundSolver.h" -#include "Util.h" #include "BeliefProp.h" #include "CountingBp.h" #include "VarElim.h" +#include "Util.h" void @@ -47,7 +47,7 @@ Params GroundSolver::getJointByConditioning ( GroundSolverType solverType, FactorGraph fg, - const VarIds& jointVarIds) const + const VarIds& jointVarIds) { VarNodes jointVars; for (size_t i = 0; i < jointVarIds.size(); i++) { diff --git a/packages/CLPBN/horus/GroundSolver.h b/packages/CLPBN/horus/GroundSolver.h index 18b81454b..ec6bf60c7 100644 --- a/packages/CLPBN/horus/GroundSolver.h +++ b/packages/CLPBN/horus/GroundSolver.h @@ -4,7 +4,6 @@ #include #include "FactorGraph.h" -#include "Var.h" #include "Horus.h" @@ -25,8 +24,8 @@ class GroundSolver void printAllPosterioris (void); - Params getJointByConditioning (GroundSolverType, - FactorGraph, const VarIds& jointVarIds) const; + static Params getJointByConditioning (GroundSolverType, + FactorGraph, const VarIds& jointVarIds); protected: const FactorGraph& fg; diff --git a/packages/CLPBN/horus/Histogram.h b/packages/CLPBN/horus/Histogram.h index af0c4595e..c9bdd78e1 100644 --- a/packages/CLPBN/horus/Histogram.h +++ b/packages/CLPBN/horus/Histogram.h @@ -2,6 +2,7 @@ #define HORUS_HISTOGRAM_H #include + #include using namespace std; diff --git a/packages/CLPBN/horus/Horus.h b/packages/CLPBN/horus/Horus.h index 7e5f12c8e..960e7bb6a 100644 --- a/packages/CLPBN/horus/Horus.h +++ b/packages/CLPBN/horus/Horus.h @@ -1,8 +1,6 @@ #ifndef HORUS_HORUS_H #define HORUS_HORUS_H -#include - #include #define DISALLOW_COPY_AND_ASSIGN(TypeName) \ diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index d3f757704..bdf761e4f 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -1,7 +1,7 @@ #include "LiftedBp.h" +#include "LiftedOperations.h" #include "WeightedBp.h" #include "FactorGraph.h" -#include "LiftedOperations.h" LiftedBp::LiftedBp (const ParfactorList& parfactorList) @@ -182,7 +182,7 @@ LiftedBp::rangeOfGround (const Ground& gr) } ++ it; } - return std::numeric_limits::max(); + return Util::maxUnsigned(); } diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index 4dbd9e9ca..c366c282d 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -5,7 +5,6 @@ #include "Indexer.h" - OrNode::~OrNode (void) { delete leftBranch_; @@ -806,7 +805,7 @@ LiftedCircuit::independentLiteral ( LitLvTypesSet LiftedCircuit::smoothCircuit (CircuitNode* node) { - assert (node != 0); + assert (node); LitLvTypesSet propagLits; switch (getCircuitNodeType (node)) { @@ -1004,23 +1003,23 @@ CircuitNodeType LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const { CircuitNodeType type = CircuitNodeType::OR_NODE; - if (dynamic_cast(node) != 0) { + if (dynamic_cast(node)) { type = CircuitNodeType::OR_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::AND_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::SET_OR_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::SET_AND_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::INC_EXC_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::LEAF_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::SMOOTH_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::TRUE_NODE; - } else if (dynamic_cast(node) != 0) { + } else if (dynamic_cast(node)) { type = CircuitNodeType::COMPILATION_FAILED_NODE; } else { assert (false); @@ -1033,7 +1032,7 @@ LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const void LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) { - assert (node != 0); + assert (node); static unsigned nrAuxNodes = 0; stringstream ss; diff --git a/packages/CLPBN/horus/LiftedKc.h b/packages/CLPBN/horus/LiftedKc.h index a4cd2dbeb..6dc4440ea 100644 --- a/packages/CLPBN/horus/LiftedKc.h +++ b/packages/CLPBN/horus/LiftedKc.h @@ -1,9 +1,8 @@ #ifndef HORUS_LIFTEDKC_H #define HORUS_LIFTEDKC_H - -#include "LiftedWCNF.h" #include "LiftedSolver.h" +#include "LiftedWCNF.h" #include "ParfactorList.h" diff --git a/packages/CLPBN/horus/LiftedOperations.cpp b/packages/CLPBN/horus/LiftedOperations.cpp index 986a22c03..6ccc41b3b 100644 --- a/packages/CLPBN/horus/LiftedOperations.cpp +++ b/packages/CLPBN/horus/LiftedOperations.cpp @@ -65,7 +65,7 @@ LiftedOperations::runWeakBayesBall ( ParfactorList::iterator it = pfList.begin(); while (it != pfList.end()) { PrvGroup group = (*it)->findGroup (query[i]); - if (group != numeric_limits::max()) { + if (group != std::numeric_limits::max()) { todo.push (group); done.insert (group); break; @@ -128,7 +128,7 @@ LiftedOperations::absorveEvidence ( it = pfList.remove (it); Parfactors absorvedPfs = absorve (obsFormulas[i], pf); if (absorvedPfs.empty() == false) { - if (absorvedPfs.size() == 1 && absorvedPfs[0] == 0) { + if (absorvedPfs.size() == 1 && !absorvedPfs[0]) { // just remove pf; } else { Util::addToVector (newPfs, absorvedPfs); diff --git a/packages/CLPBN/horus/LiftedUtils.cpp b/packages/CLPBN/horus/LiftedUtils.cpp index 0233a8554..b85990ec1 100644 --- a/packages/CLPBN/horus/LiftedUtils.cpp +++ b/packages/CLPBN/horus/LiftedUtils.cpp @@ -1,6 +1,5 @@ #include -#include #include #include diff --git a/packages/CLPBN/horus/LiftedUtils.h b/packages/CLPBN/horus/LiftedUtils.h index de0782f1c..70e0d1071 100644 --- a/packages/CLPBN/horus/LiftedUtils.h +++ b/packages/CLPBN/horus/LiftedUtils.h @@ -1,12 +1,11 @@ #ifndef HORUS_LIFTEDUTILS_H #define HORUS_LIFTEDUTILS_H -#include #include + #include #include - #include "TinySet.h" #include "Util.h" @@ -107,7 +106,7 @@ class Ground size_t arity (void) const { return args_.size(); } - bool isAtom (void) const { return args_.size() == 0; } + bool isAtom (void) const { return args_.empty(); } friend ostream& operator<< (ostream &os, const Ground& gr); diff --git a/packages/CLPBN/horus/LiftedVe.cpp b/packages/CLPBN/horus/LiftedVe.cpp index bcce3e100..5c4c7464e 100644 --- a/packages/CLPBN/horus/LiftedVe.cpp +++ b/packages/CLPBN/horus/LiftedVe.cpp @@ -1,4 +1,5 @@ #include + #include #include "LiftedVe.h" @@ -710,7 +711,7 @@ LiftedVe::getBestOperation (const Grounds& query) validOps = LiftedOperator::getValidOps (pfList_, query); for (size_t i = 0; i < validOps.size(); i++) { double cost = validOps[i]->getLogCost(); - if ((bestOp == 0) || (cost < bestCost)) { + if (!bestOp || cost < bestCost) { bestOp = validOps[i]; bestCost = cost; } diff --git a/packages/CLPBN/horus/LiftedWCNF.cpp b/packages/CLPBN/horus/LiftedWCNF.cpp index a75741fa8..682dddd6d 100644 --- a/packages/CLPBN/horus/LiftedWCNF.cpp +++ b/packages/CLPBN/horus/LiftedWCNF.cpp @@ -3,11 +3,10 @@ #include "Indexer.h" - bool Literal::isGround (ConstraintTree constr, LogVarSet ipgLogVars) const { - if (logVars_.size() == 0) { + if (logVars_.empty()) { return true; } LogVarSet lvs (logVars_); diff --git a/packages/CLPBN/horus/LiftedWCNF.h b/packages/CLPBN/horus/LiftedWCNF.h index e0f901b7c..2aed7a6cf 100644 --- a/packages/CLPBN/horus/LiftedWCNF.h +++ b/packages/CLPBN/horus/LiftedWCNF.h @@ -1,15 +1,14 @@ #ifndef HORUS_LIFTEDWCNF_H #define HORUS_LIFTEDWCNF_H +#include + #include "ParfactorList.h" using namespace std; -typedef long LiteralId; - class ConstraintTree; - enum LogVarType { FULL_LV, @@ -17,8 +16,8 @@ enum LogVarType NEG_LV }; -typedef vector LogVarTypes; - +typedef long LiteralId; +typedef vector LogVarTypes; class Literal diff --git a/packages/CLPBN/horus/Parfactor.cpp b/packages/CLPBN/horus/Parfactor.cpp index fb5518d1b..bc326801b 100644 --- a/packages/CLPBN/horus/Parfactor.cpp +++ b/packages/CLPBN/horus/Parfactor.cpp @@ -1,4 +1,3 @@ - #include "Parfactor.h" #include "Histogram.h" #include "Indexer.h" @@ -443,7 +442,7 @@ Parfactor::findGroup (const Ground& ground) const { size_t idx = indexOfGround (ground); return idx == args_.size() - ? numeric_limits::max() + ? std::numeric_limits::max() : args_[idx].group(); } @@ -452,7 +451,7 @@ Parfactor::findGroup (const Ground& ground) const bool Parfactor::containsGround (const Ground& ground) const { - return findGroup (ground) != numeric_limits::max(); + return findGroup (ground) != std::numeric_limits::max(); } diff --git a/packages/CLPBN/horus/Parfactor.h b/packages/CLPBN/horus/Parfactor.h index 2f4b45cd7..6cd04b23c 100644 --- a/packages/CLPBN/horus/Parfactor.h +++ b/packages/CLPBN/horus/Parfactor.h @@ -1,15 +1,12 @@ #ifndef HORUS_PARFACTOR_H #define HORUS_PARFACTOR_H -#include -#include - +#include "Factor.h" #include "ProbFormula.h" #include "ConstraintTree.h" #include "LiftedUtils.h" #include "Horus.h" -#include "Factor.h" class Parfactor : public TFactor { diff --git a/packages/CLPBN/horus/ParfactorList.cpp b/packages/CLPBN/horus/ParfactorList.cpp index 2962f144e..3481696de 100644 --- a/packages/CLPBN/horus/ParfactorList.cpp +++ b/packages/CLPBN/horus/ParfactorList.cpp @@ -1,5 +1,7 @@ #include +#include + #include "ParfactorList.h" @@ -412,7 +414,7 @@ ParfactorList::shatter (Parfactor* g1, Parfactor* g2) { ProbFormulas& formulas1 = g1->arguments(); ProbFormulas& formulas2 = g2->arguments(); - assert (g1 != 0 && g2 != 0 && g1 != g2); + assert (g1 && g2 && g1 != g2); for (size_t i = 0; i < formulas1.size(); i++) { for (size_t j = 0; j < formulas2.size(); j++) { if (formulas1[i].sameSkeletonAs (formulas2[j])) { diff --git a/packages/CLPBN/horus/ParfactorList.h b/packages/CLPBN/horus/ParfactorList.h index 1c6404dcb..377299952 100644 --- a/packages/CLPBN/horus/ParfactorList.h +++ b/packages/CLPBN/horus/ParfactorList.h @@ -2,7 +2,6 @@ #define HORUS_PARFACTORLIST_H #include -#include #include "Parfactor.h" #include "ProbFormula.h" @@ -11,6 +10,8 @@ using namespace std; +class Parfactor; + class ParfactorList { public: diff --git a/packages/CLPBN/horus/ProbFormula.cpp b/packages/CLPBN/horus/ProbFormula.cpp index 081cccf95..67473734c 100644 --- a/packages/CLPBN/horus/ProbFormula.cpp +++ b/packages/CLPBN/horus/ProbFormula.cpp @@ -40,7 +40,7 @@ ProbFormula::indexOf (LogVar X) const bool ProbFormula::isAtom (void) const { - return logVars_.size() == 0; + return logVars_.empty(); } @@ -125,7 +125,7 @@ PrvGroup ProbFormula::getNewGroup (void) { freeGroup_ ++; - assert (freeGroup_ != numeric_limits::max()); + assert (freeGroup_ != std::numeric_limits::max()); return freeGroup_; } diff --git a/packages/CLPBN/horus/ProbFormula.h b/packages/CLPBN/horus/ProbFormula.h index 52bc7d4a0..48824b5db 100644 --- a/packages/CLPBN/horus/ProbFormula.h +++ b/packages/CLPBN/horus/ProbFormula.h @@ -14,10 +14,11 @@ class ProbFormula public: ProbFormula (Symbol f, const LogVars& lvs, unsigned range) : functor_(f), logVars_(lvs), range_(range), - countedLogVar_(), group_(numeric_limits::max()) { } + countedLogVar_(), group_(std::numeric_limits::max()) { } ProbFormula (Symbol f, unsigned r) - : functor_(f), range_(r), group_(numeric_limits::max()) { } + : functor_(f), range_(r), + group_(std::numeric_limits::max()) { } Symbol functor (void) const { return functor_; } diff --git a/packages/CLPBN/horus/TinySet.h b/packages/CLPBN/horus/TinySet.h index f7ff6e083..f307e4530 100644 --- a/packages/CLPBN/horus/TinySet.h +++ b/packages/CLPBN/horus/TinySet.h @@ -1,9 +1,10 @@ #ifndef HORUS_TINYSET_H #define HORUS_TINYSET_H -#include #include +#include + using namespace std; @@ -186,7 +187,7 @@ class TinySet bool empty (void) const { - return size() == 0; + return vec_.empty(); } typename vector::size_type size (void) const diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 6afd56f43..4258908d0 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -1,6 +1,3 @@ -#include - -#include #include #include "Util.h" @@ -341,7 +338,7 @@ normalize (Params& v) if (Globals::logDomain) { double sum = std::accumulate (v.begin(), v.end(), LogAware::addIdenty(), Util::logSum); - assert (sum != -numeric_limits::infinity()); + assert (sum != -std::numeric_limits::infinity()); v -= sum; } else { double sum = std::accumulate (v.begin(), v.end(), 0.0); diff --git a/packages/CLPBN/horus/Util.h b/packages/CLPBN/horus/Util.h index 1a4bfa441..f73651013 100644 --- a/packages/CLPBN/horus/Util.h +++ b/packages/CLPBN/horus/Util.h @@ -3,16 +3,17 @@ #include #include -#include #include +#include + #include -#include #include +#include #include -#include #include +#include #include "Horus.h" @@ -20,7 +21,7 @@ using namespace std; namespace { -const double NEG_INF = -numeric_limits::infinity(); +const double NEG_INF = -std::numeric_limits::infinity(); }; @@ -42,7 +43,8 @@ template bool contains ( template size_t indexOf (const vector&, const T&); template -void apply_n_times (Params& v1, const Params& v2, unsigned repetitions, Operation); +void apply_n_times (Params& v1, const Params& v2, + unsigned repetitions, Operation); template void log (vector&); @@ -245,7 +247,7 @@ Util::logSum (double x, double y) inline unsigned Util::maxUnsigned (void) { - return numeric_limits::max(); + return std::numeric_limits::max(); } diff --git a/packages/CLPBN/horus/Var.cpp b/packages/CLPBN/horus/Var.cpp index 80fc0abe6..f84257585 100644 --- a/packages/CLPBN/horus/Var.cpp +++ b/packages/CLPBN/horus/Var.cpp @@ -1,10 +1,7 @@ -#include #include #include "Var.h" -using namespace std; - unordered_map Var::varsInfo_; @@ -14,7 +11,7 @@ Var::Var (const Var* v) varId_ = v->varId(); range_ = v->range(); evidence_ = v->getEvidence(); - index_ = std::numeric_limits::max(); + index_ = Util::maxUnsigned(); } @@ -26,7 +23,7 @@ Var::Var (VarId varId, unsigned range, int evidence) varId_ = varId; range_ = range; evidence_ = evidence; - index_ = std::numeric_limits::max(); + index_ = Util::maxUnsigned(); } diff --git a/packages/CLPBN/horus/Var.h b/packages/CLPBN/horus/Var.h index 8ab580c3a..d1b3461ac 100644 --- a/packages/CLPBN/horus/Var.h +++ b/packages/CLPBN/horus/Var.h @@ -3,8 +3,6 @@ #include -#include - #include "Util.h" #include "Horus.h" @@ -14,7 +12,8 @@ using namespace std; struct VarInfo { - VarInfo (string l, const States& sts) : label(l), states(sts) { } + VarInfo (string l, const States& sts) + : label(l), states(sts) { } string label; States states; }; @@ -55,8 +54,7 @@ class Var bool operator!= (const Var& var) const { - assert (!(varId_ == var.varId() && range_ != var.range())); - return varId_ != var.varId(); + return !(*this == var); } bool isValidState (int); @@ -86,7 +84,7 @@ class Var static bool varsHaveInfo (void) { - return varsInfo_.size() != 0; + return varsInfo_.empty() == false; } static void clearVarsInfo (void) diff --git a/packages/CLPBN/horus/WeightedBp.cpp b/packages/CLPBN/horus/WeightedBp.cpp index 8416c4592..9f6fca8df 100644 --- a/packages/CLPBN/horus/WeightedBp.cpp +++ b/packages/CLPBN/horus/WeightedBp.cpp @@ -18,7 +18,7 @@ WeightedBp::getPosterioriOf (VarId vid) runSolver(); } VarNode* var = fg.getVarNode (vid); - assert (var != 0); + assert (var); Params probs; if (var->hasEvidence()) { probs.resize (var->range(), LogAware::noEvidence()); From de0a118ae5301f3c4406488af05b6d5e75eb6975 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 15:00:30 +0000 Subject: [PATCH 46/89] Move belief propagation stuff out from Horus.h --- packages/CLPBN/horus/BeliefProp.cpp | 42 ++++++++++++++++------------- packages/CLPBN/horus/BeliefProp.h | 12 +++++++++ packages/CLPBN/horus/CountingBp.cpp | 15 +++++------ packages/CLPBN/horus/Horus.h | 14 ---------- packages/CLPBN/horus/LiftedBp.cpp | 15 +++++------ packages/CLPBN/horus/Util.cpp | 24 +++++------------ packages/CLPBN/horus/WeightedBp.cpp | 2 +- 7 files changed, 57 insertions(+), 67 deletions(-) diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index 64195c91b..e8d5244ad 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -9,6 +9,11 @@ #include "Horus.h" +MsgSchedule BeliefProp::schedule = MsgSchedule::SEQ_FIXED; +double BeliefProp::accuracy = 0.0001; +unsigned BeliefProp::maxIter = 1000; + + BeliefProp::BeliefProp (const FactorGraph& fg) : GroundSolver (fg) { runned_ = false; @@ -48,15 +53,14 @@ BeliefProp::printSolverFlags (void) const stringstream ss; ss << "belief propagation [" ; ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + switch (schedule) { + case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; + case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; + case MsgSchedule::PARALLEL: ss << "parallel"; break; + case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << Util::toString (BpOptions::maxIter); - ss << ",accuracy=" << Util::toString (BpOptions::accuracy); + ss << ",max_iter=" << Util::toString (maxIter); + ss << ",accuracy=" << Util::toString (accuracy); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; @@ -153,21 +157,21 @@ BeliefProp::runSolver (void) { initializeSolver(); nIters_ = 0; - while (!converged() && nIters_ < BpOptions::maxIter) { + while (!converged() && nIters_ < maxIter) { nIters_ ++; if (Globals::verbosity > 1) { Util::printHeader (string ("Iteration ") + Util::toString (nIters_)); } - switch (BpOptions::schedule) { - case BpOptions::Schedule::SEQ_RANDOM: + switch (schedule) { + case MsgSchedule::SEQ_RANDOM: std::random_shuffle (links_.begin(), links_.end()); // no break - case BpOptions::Schedule::SEQ_FIXED: + case MsgSchedule::SEQ_FIXED: for (size_t i = 0; i < links_.size(); i++) { calculateAndUpdateMessage (links_[i]); } break; - case BpOptions::Schedule::PARALLEL: + case MsgSchedule::PARALLEL: for (size_t i = 0; i < links_.size(); i++) { calculateMessage (links_[i]); } @@ -175,13 +179,13 @@ BeliefProp::runSolver (void) updateMessage(links_[i]); } break; - case BpOptions::Schedule::MAX_RESIDUAL: + case MsgSchedule::MAX_RESIDUAL: maxResidualSchedule(); break; } } if (Globals::verbosity > 0) { - if (nIters_ < BpOptions::maxIter) { + if (nIters_ < maxIter) { cout << "Belief propagation converged in " ; cout << nIters_ << " iterations" << endl; } else { @@ -233,7 +237,7 @@ BeliefProp::maxResidualSchedule (void) SortedOrder::iterator it = sortedOrder_.begin(); BpLink* link = *it; - if (link->residual() < BpOptions::accuracy) { + if (link->residual() < accuracy) { return; } updateMessage (link); @@ -423,9 +427,9 @@ BeliefProp::converged (void) return false; } bool converged = true; - if (BpOptions::schedule == BpOptions::Schedule::MAX_RESIDUAL) { + if (schedule == MsgSchedule::MAX_RESIDUAL) { double maxResidual = (*(sortedOrder_.begin()))->residual(); - if (maxResidual > BpOptions::accuracy) { + if (maxResidual > accuracy) { converged = false; } else { converged = true; @@ -436,7 +440,7 @@ BeliefProp::converged (void) if (Globals::verbosity > 1) { cout << links_[i]->toString() + " residual = " << residual << endl; } - if (residual > BpOptions::accuracy) { + if (residual > accuracy) { converged = false; if (Globals::verbosity < 2) { break; diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index cfdf98cbb..a7f9a3961 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -13,6 +13,14 @@ using namespace std; +enum MsgSchedule { + SEQ_FIXED, + SEQ_RANDOM, + PARALLEL, + MAX_RESIDUAL +}; + + class BpLink { public: @@ -98,6 +106,10 @@ class BeliefProp : public GroundSolver virtual Params getJointDistributionOf (const VarIds&); + static MsgSchedule schedule; + static double accuracy; + static unsigned maxIter; + protected: void runSolver (void); diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index a0836332f..876104f2a 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -37,15 +37,14 @@ CountingBp::printSolverFlags (void) const stringstream ss; ss << "counting bp [" ; ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + switch (WeightedBp::schedule) { + case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; + case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; + case MsgSchedule::PARALLEL: ss << "parallel"; break; + case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << BpOptions::maxIter; - ss << ",accuracy=" << BpOptions::accuracy; + ss << ",max_iter=" << WeightedBp::maxIter; + ss << ",accuracy=" << WeightedBp::accuracy; ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << ",chkif=" << Util::toString (CountingBp::checkForIdenticalFactors); ss << "]" ; diff --git a/packages/CLPBN/horus/Horus.h b/packages/CLPBN/horus/Horus.h index 960e7bb6a..17141d63e 100644 --- a/packages/CLPBN/horus/Horus.h +++ b/packages/CLPBN/horus/Horus.h @@ -67,19 +67,5 @@ const unsigned PRECISION = 6; }; - -namespace BpOptions -{ - enum Schedule { - SEQ_FIXED, - SEQ_RANDOM, - PARALLEL, - MAX_RESIDUAL - }; - extern Schedule schedule; - extern double accuracy; - extern unsigned maxIter; -} - #endif // HORUS_HORUS_H diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index bdf761e4f..7cfb49c23 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -63,15 +63,14 @@ LiftedBp::printSolverFlags (void) const stringstream ss; ss << "lifted bp [" ; ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + switch (WeightedBp::schedule) { + case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; + case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; + case MsgSchedule::PARALLEL: ss << "parallel"; break; + case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << BpOptions::maxIter; - ss << ",accuracy=" << BpOptions::accuracy; + ss << ",max_iter=" << WeightedBp::maxIter; + ss << ",accuracy=" << WeightedBp::accuracy; ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 4258908d0..810be63c8 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -3,6 +3,7 @@ #include "Util.h" #include "Indexer.h" #include "ElimGraph.h" +#include "BeliefProp.h" namespace Globals { @@ -18,17 +19,6 @@ GroundSolverType groundSolver = GroundSolverType::VE; -namespace BpOptions { -Schedule schedule = BpOptions::Schedule::SEQ_FIXED; -//Schedule schedule = BpOptions::Schedule::SEQ_RANDOM; -//Schedule schedule = BpOptions::Schedule::PARALLEL; -//Schedule schedule = BpOptions::Schedule::MAX_RESIDUAL; -double accuracy = 0.0001; -unsigned maxIter = 1000; -} - - - namespace Util { @@ -248,13 +238,13 @@ setHorusFlag (string key, string value) } } else if (key == "schedule") { if ( value == "seq_fixed") { - BpOptions::schedule = BpOptions::Schedule::SEQ_FIXED; + BeliefProp::schedule = MsgSchedule::SEQ_FIXED; } else if (value == "seq_random") { - BpOptions::schedule = BpOptions::Schedule::SEQ_RANDOM; + BeliefProp::schedule = MsgSchedule::SEQ_RANDOM; } else if (value == "parallel") { - BpOptions::schedule = BpOptions::Schedule::PARALLEL; + BeliefProp::schedule = MsgSchedule::PARALLEL; } else if (value == "max_residual") { - BpOptions::schedule = BpOptions::Schedule::MAX_RESIDUAL; + BeliefProp::schedule = MsgSchedule::MAX_RESIDUAL; } else { cerr << "warning: invalid value `" << value << "' " ; cerr << "for `" << key << "'" << endl; @@ -263,11 +253,11 @@ setHorusFlag (string key, string value) } else if (key == "accuracy") { stringstream ss; ss << value; - ss >> BpOptions::accuracy; + ss >> BeliefProp::accuracy; } else if (key == "max_iter") { stringstream ss; ss << value; - ss >> BpOptions::maxIter; + ss >> BeliefProp::maxIter; } else if (key == "use_logarithms") { if ( value == "true") { Globals::logDomain = true; diff --git a/packages/CLPBN/horus/WeightedBp.cpp b/packages/CLPBN/horus/WeightedBp.cpp index 9f6fca8df..28a31bb60 100644 --- a/packages/CLPBN/horus/WeightedBp.cpp +++ b/packages/CLPBN/horus/WeightedBp.cpp @@ -107,7 +107,7 @@ WeightedBp::maxResidualSchedule (void) if (Globals::verbosity >= 1) { cout << "updating " << (*sortedOrder_.begin())->toString() << endl; } - if (link->residual() < BpOptions::accuracy) { + if (link->residual() < accuracy) { return; } link->updateMessage(); From b996436b2446f42b657a37ad94a4908c740c5520 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 15:05:40 +0000 Subject: [PATCH 47/89] Reorganize --- packages/CLPBN/horus/BeliefProp.h | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index a7f9a3961..beaf73b1e 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -106,26 +106,12 @@ class BeliefProp : public GroundSolver virtual Params getJointDistributionOf (const VarIds&); + Params getFactorJoint (FacNode* fn, const VarIds&); + static MsgSchedule schedule; static double accuracy; static unsigned maxIter; - protected: - void runSolver (void); - - virtual void createLinks (void); - - virtual void maxResidualSchedule (void); - - virtual void calcFactorToVarMsg (BpLink*); - - virtual Params getVarToFactorMsg (const BpLink*) const; - - virtual Params getJointByConditioning (const VarIds&) const; - - public: - Params getFactorJoint (FacNode* fn, const VarIds&); - protected: SPNodeInfo* ninf (const VarNode* var) const { @@ -176,6 +162,18 @@ class BeliefProp : public GroundSolver } }; + void runSolver (void); + + virtual void createLinks (void); + + virtual void maxResidualSchedule (void); + + virtual void calcFactorToVarMsg (BpLink*); + + virtual Params getVarToFactorMsg (const BpLink*) const; + + virtual Params getJointByConditioning (const VarIds&) const; + BpLinks links_; unsigned nIters_; vector varsI_; From 7b7f663ac61e8d2f498832b755c635110def688a Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 15:44:40 +0000 Subject: [PATCH 48/89] Don't use public members for solver flags --- packages/CLPBN/horus/BeliefProp.cpp | 26 +++++++++++++------------- packages/CLPBN/horus/BeliefProp.h | 18 +++++++++++++++--- packages/CLPBN/horus/CountingBp.cpp | 6 +++--- packages/CLPBN/horus/ElimGraph.cpp | 6 +++--- packages/CLPBN/horus/ElimGraph.h | 6 +++++- packages/CLPBN/horus/LiftedBp.cpp | 6 +++--- packages/CLPBN/horus/Util.cpp | 26 +++++++++++++++----------- packages/CLPBN/horus/VarElim.cpp | 13 ++++++------- packages/CLPBN/horus/WeightedBp.cpp | 2 +- 9 files changed, 64 insertions(+), 45 deletions(-) diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index e8d5244ad..bf8f30a79 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -9,9 +9,9 @@ #include "Horus.h" -MsgSchedule BeliefProp::schedule = MsgSchedule::SEQ_FIXED; -double BeliefProp::accuracy = 0.0001; -unsigned BeliefProp::maxIter = 1000; +double BeliefProp::accuracy_ = 0.0001; +unsigned BeliefProp::maxIter_ = 1000; +MsgSchedule BeliefProp::schedule_ = MsgSchedule::SEQ_FIXED; BeliefProp::BeliefProp (const FactorGraph& fg) : GroundSolver (fg) @@ -53,14 +53,14 @@ BeliefProp::printSolverFlags (void) const stringstream ss; ss << "belief propagation [" ; ss << "schedule=" ; - switch (schedule) { + switch (schedule_) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; case MsgSchedule::PARALLEL: ss << "parallel"; break; case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << Util::toString (maxIter); - ss << ",accuracy=" << Util::toString (accuracy); + ss << ",max_iter=" << Util::toString (maxIter_); + ss << ",accuracy=" << Util::toString (accuracy_); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; @@ -157,12 +157,12 @@ BeliefProp::runSolver (void) { initializeSolver(); nIters_ = 0; - while (!converged() && nIters_ < maxIter) { + while (!converged() && nIters_ < maxIter_) { nIters_ ++; if (Globals::verbosity > 1) { Util::printHeader (string ("Iteration ") + Util::toString (nIters_)); } - switch (schedule) { + switch (schedule_) { case MsgSchedule::SEQ_RANDOM: std::random_shuffle (links_.begin(), links_.end()); // no break @@ -185,7 +185,7 @@ BeliefProp::runSolver (void) } } if (Globals::verbosity > 0) { - if (nIters_ < maxIter) { + if (nIters_ < maxIter_) { cout << "Belief propagation converged in " ; cout << nIters_ << " iterations" << endl; } else { @@ -237,7 +237,7 @@ BeliefProp::maxResidualSchedule (void) SortedOrder::iterator it = sortedOrder_.begin(); BpLink* link = *it; - if (link->residual() < accuracy) { + if (link->residual() < accuracy_) { return; } updateMessage (link); @@ -427,9 +427,9 @@ BeliefProp::converged (void) return false; } bool converged = true; - if (schedule == MsgSchedule::MAX_RESIDUAL) { + if (schedule_ == MsgSchedule::MAX_RESIDUAL) { double maxResidual = (*(sortedOrder_.begin()))->residual(); - if (maxResidual > accuracy) { + if (maxResidual > accuracy_) { converged = false; } else { converged = true; @@ -440,7 +440,7 @@ BeliefProp::converged (void) if (Globals::verbosity > 1) { cout << links_[i]->toString() + " residual = " << residual << endl; } - if (residual > accuracy) { + if (residual > accuracy_) { converged = false; if (Globals::verbosity < 2) { break; diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index beaf73b1e..6399c65e7 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -108,9 +108,17 @@ class BeliefProp : public GroundSolver Params getFactorJoint (FacNode* fn, const VarIds&); - static MsgSchedule schedule; - static double accuracy; - static unsigned maxIter; + static double accuracy (void) { return accuracy_; } + + static void setAccuracy (double acc) { accuracy_ = acc; } + + static unsigned maxIterations (void) { return maxIter_; } + + static void setMaxIterations (unsigned mi) { maxIter_ = mi; } + + static MsgSchedule msgSchedule (void) { return schedule_; } + + static void setMsgSchedule (MsgSchedule sch) { schedule_ = sch; } protected: SPNodeInfo* ninf (const VarNode* var) const @@ -186,6 +194,10 @@ class BeliefProp : public GroundSolver typedef unordered_map BpLinkMap; BpLinkMap linkMap_; + static double accuracy_; + static unsigned maxIter_; + static MsgSchedule schedule_; + private: void initializeSolver (void); diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index 876104f2a..006bf99fd 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -37,14 +37,14 @@ CountingBp::printSolverFlags (void) const stringstream ss; ss << "counting bp [" ; ss << "schedule=" ; - switch (WeightedBp::schedule) { + switch (WeightedBp::msgSchedule()) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; case MsgSchedule::PARALLEL: ss << "parallel"; break; case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << WeightedBp::maxIter; - ss << ",accuracy=" << WeightedBp::accuracy; + ss << ",max_iter=" << WeightedBp::maxIterations(); + ss << ",accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << ",chkif=" << Util::toString (CountingBp::checkForIdenticalFactors); ss << "]" ; diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index 0292c775f..3a808a8c2 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -2,7 +2,7 @@ #include "ElimGraph.h" -ElimHeuristic ElimGraph::elimHeuristic = MIN_NEIGHBORS; +ElimHeuristic ElimGraph::elimHeuristic_ = MIN_NEIGHBORS; ElimGraph::ElimGraph (const vector& factors) @@ -132,7 +132,7 @@ ElimGraph::getEliminationOrder ( const Factors& factors, VarIds excludedVids) { - if (elimHeuristic == ElimHeuristic::SEQUENTIAL) { + if (elimHeuristic_ == ElimHeuristic::SEQUENTIAL) { VarIds allVids; Factors::const_iterator first = factors.begin(); Factors::const_iterator end = factors.end(); @@ -175,7 +175,7 @@ ElimGraph::getLowestCostNode (void) const EgNode* bestNode = 0; unsigned minCost = Util::maxUnsigned(); EGNeighs::const_iterator it; - switch (elimHeuristic) { + switch (elimHeuristic_) { case MIN_NEIGHBORS: { for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { unsigned cost = getNeighborsCost (*it); diff --git a/packages/CLPBN/horus/ElimGraph.h b/packages/CLPBN/horus/ElimGraph.h index 575258829..881f59759 100644 --- a/packages/CLPBN/horus/ElimGraph.h +++ b/packages/CLPBN/horus/ElimGraph.h @@ -58,7 +58,9 @@ class ElimGraph static VarIds getEliminationOrder (const Factors&, VarIds); - static ElimHeuristic elimHeuristic; + static ElimHeuristic elimHeuristic (void) { return elimHeuristic_; } + + static void setElimHeuristic (ElimHeuristic eh) { elimHeuristic_ = eh; } private: @@ -132,6 +134,8 @@ class ElimGraph vector nodes_; TinySet unmarked_; unordered_map varMap_; + + static ElimHeuristic elimHeuristic_; }; #endif // HORUS_ELIMGRAPH_H diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index 7cfb49c23..18f056f8a 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -63,14 +63,14 @@ LiftedBp::printSolverFlags (void) const stringstream ss; ss << "lifted bp [" ; ss << "schedule=" ; - switch (WeightedBp::schedule) { + switch (WeightedBp::msgSchedule()) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; case MsgSchedule::PARALLEL: ss << "parallel"; break; case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << WeightedBp::maxIter; - ss << ",accuracy=" << WeightedBp::accuracy; + ss << ",max_iter=" << WeightedBp::maxIterations(); + ss << ",accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 810be63c8..9fad10705 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -222,15 +222,15 @@ setHorusFlag (string key, string value) } } else if (key == "elim_heuristic") { if ( value == "sequential") { - ElimGraph::elimHeuristic = ElimHeuristic::SEQUENTIAL; + ElimGraph::setElimHeuristic (ElimHeuristic::SEQUENTIAL); } else if (value == "min_neighbors") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_NEIGHBORS; + ElimGraph::setElimHeuristic (ElimHeuristic::MIN_NEIGHBORS); } else if (value == "min_weight") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_WEIGHT; + ElimGraph::setElimHeuristic (ElimHeuristic::MIN_WEIGHT); } else if (value == "min_fill") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_FILL; + ElimGraph::setElimHeuristic (ElimHeuristic::MIN_FILL); } else if (value == "weighted_min_fill") { - ElimGraph::elimHeuristic = ElimHeuristic::WEIGHTED_MIN_FILL; + ElimGraph::setElimHeuristic (ElimHeuristic::WEIGHTED_MIN_FILL); } else { cerr << "warning: invalid value `" << value << "' " ; cerr << "for `" << key << "'" << endl; @@ -238,13 +238,13 @@ setHorusFlag (string key, string value) } } else if (key == "schedule") { if ( value == "seq_fixed") { - BeliefProp::schedule = MsgSchedule::SEQ_FIXED; + BeliefProp::setMsgSchedule (MsgSchedule::SEQ_FIXED); } else if (value == "seq_random") { - BeliefProp::schedule = MsgSchedule::SEQ_RANDOM; + BeliefProp::setMsgSchedule (MsgSchedule::SEQ_RANDOM); } else if (value == "parallel") { - BeliefProp::schedule = MsgSchedule::PARALLEL; + BeliefProp::setMsgSchedule (MsgSchedule::PARALLEL); } else if (value == "max_residual") { - BeliefProp::schedule = MsgSchedule::MAX_RESIDUAL; + BeliefProp::setMsgSchedule (MsgSchedule::MAX_RESIDUAL); } else { cerr << "warning: invalid value `" << value << "' " ; cerr << "for `" << key << "'" << endl; @@ -252,12 +252,16 @@ setHorusFlag (string key, string value) } } else if (key == "accuracy") { stringstream ss; + double acc; ss << value; - ss >> BeliefProp::accuracy; + ss >> acc; + BeliefProp::setAccuracy (acc); } else if (key == "max_iter") { stringstream ss; + unsigned mi; ss << value; - ss >> BeliefProp::maxIter; + ss >> mi; + BeliefProp::setMaxIterations (mi); } else if (key == "use_logarithms") { if ( value == "true") { Globals::logDomain = true; diff --git a/packages/CLPBN/horus/VarElim.cpp b/packages/CLPBN/horus/VarElim.cpp index d31f6ce51..e1b11edf8 100644 --- a/packages/CLPBN/horus/VarElim.cpp +++ b/packages/CLPBN/horus/VarElim.cpp @@ -38,13 +38,12 @@ VarElim::printSolverFlags (void) const stringstream ss; ss << "variable elimination [" ; ss << "elim_heuristic=" ; - ElimHeuristic eh = ElimGraph::elimHeuristic; - switch (eh) { - case SEQUENTIAL: ss << "sequential"; break; - case MIN_NEIGHBORS: ss << "min_neighbors"; break; - case MIN_WEIGHT: ss << "min_weight"; break; - case MIN_FILL: ss << "min_fill"; break; - case WEIGHTED_MIN_FILL: ss << "weighted_min_fill"; break; + switch (ElimGraph::elimHeuristic()) { + case ElimHeuristic::SEQUENTIAL: ss << "sequential"; break; + case ElimHeuristic::MIN_NEIGHBORS: ss << "min_neighbors"; break; + case ElimHeuristic::MIN_WEIGHT: ss << "min_weight"; break; + case ElimHeuristic::MIN_FILL: ss << "min_fill"; break; + case ElimHeuristic::WEIGHTED_MIN_FILL: ss << "weighted_min_fill"; break; } ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; diff --git a/packages/CLPBN/horus/WeightedBp.cpp b/packages/CLPBN/horus/WeightedBp.cpp index 28a31bb60..269891f78 100644 --- a/packages/CLPBN/horus/WeightedBp.cpp +++ b/packages/CLPBN/horus/WeightedBp.cpp @@ -107,7 +107,7 @@ WeightedBp::maxResidualSchedule (void) if (Globals::verbosity >= 1) { cout << "updating " << (*sortedOrder_.begin())->toString() << endl; } - if (link->residual() < accuracy) { + if (link->residual() < accuracy_) { return; } link->updateMessage(); From 485c3e34b2d1e044936bbe103f4de1b469ab916a Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 22:25:45 +0000 Subject: [PATCH 49/89] Use more DISALLOW_COPY_AND_ASSIGN --- packages/CLPBN/horus/BeliefProp.h | 7 +++++++ packages/CLPBN/horus/ConstraintTree.h | 13 +++++-------- packages/CLPBN/horus/CountingBp.h | 7 ++++++- packages/CLPBN/horus/ElimGraph.h | 3 ++- packages/CLPBN/horus/Factor.h | 1 + packages/CLPBN/horus/FactorGraph.h | 16 ++++++++-------- packages/CLPBN/horus/GroundSolver.h | 2 ++ packages/CLPBN/horus/Histogram.h | 4 ++++ packages/CLPBN/horus/Horus.h | 10 ++++++++-- packages/CLPBN/horus/Indexer.h | 4 ++++ packages/CLPBN/horus/LiftedBp.h | 2 ++ packages/CLPBN/horus/LiftedKc.h | 5 ++++- packages/CLPBN/horus/LiftedOperations.h | 2 ++ packages/CLPBN/horus/LiftedSolver.h | 3 +++ packages/CLPBN/horus/LiftedUtils.h | 1 - packages/CLPBN/horus/LiftedVe.h | 15 ++++++++++++++- packages/CLPBN/horus/LiftedWCNF.h | 5 ++++- packages/CLPBN/horus/Parfactor.h | 3 +-- packages/CLPBN/horus/VarElim.h | 2 ++ packages/CLPBN/horus/WeightedBp.h | 5 ++++- 20 files changed, 83 insertions(+), 27 deletions(-) diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index 6399c65e7..68d47ac5c 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -76,6 +76,9 @@ class BpLink Params* currMsg_; Params* nextMsg_; double residual_; + + private: + DISALLOW_COPY_AND_ASSIGN (BpLink); }; typedef vector BpLinks; @@ -84,10 +87,12 @@ typedef vector BpLinks; class SPNodeInfo { public: + SPNodeInfo (void) { } void addBpLink (BpLink* link) { links_.push_back (link); } const BpLinks& getLinks (void) { return links_; } private: BpLinks links_; + DISALLOW_COPY_AND_ASSIGN (SPNodeInfo); }; @@ -204,6 +209,8 @@ class BeliefProp : public GroundSolver bool converged (void); virtual void printLinkInformation (void) const; + + DISALLOW_COPY_AND_ASSIGN (BeliefProp); }; #endif // HORUS_BELIEFPROP_H diff --git a/packages/CLPBN/horus/ConstraintTree.h b/packages/CLPBN/horus/ConstraintTree.h index cccb070b4..2c0c09464 100644 --- a/packages/CLPBN/horus/ConstraintTree.h +++ b/packages/CLPBN/horus/ConstraintTree.h @@ -23,7 +23,6 @@ typedef vector ConstraintTrees; class CTNode { public: - struct CompareSymbol { bool operator() (const CTNode* n1, const CTNode* n2) const @@ -33,11 +32,9 @@ class CTNode }; private: - typedef TinySet CTChilds_; public: - CTNode (const CTNode& n, const CTChilds_& chs = CTChilds_()) : symbol_(n.symbol()), childs_(chs), level_(n.level()) { } @@ -52,8 +49,6 @@ class CTNode void setSymbol (const Symbol s) { symbol_ = s; } - public: - CTChilds_& childs (void) { return childs_; } const CTChilds_& childs (void) const { return childs_; } @@ -89,9 +84,11 @@ class CTNode private: void updateChildLevels (CTNode*, unsigned); - Symbol symbol_; - CTChilds_ childs_; - unsigned level_; + Symbol symbol_; + CTChilds_ childs_; + unsigned level_; + + DISALLOW_ASSIGN (CTNode); }; ostream& operator<< (ostream &out, const CTNode&); diff --git a/packages/CLPBN/horus/CountingBp.h b/packages/CLPBN/horus/CountingBp.h index c6487cdd0..4f674e687 100644 --- a/packages/CLPBN/horus/CountingBp.h +++ b/packages/CLPBN/horus/CountingBp.h @@ -75,6 +75,8 @@ class VarCluster private: VarNodes members_; VarNode* repr_; + + DISALLOW_COPY_AND_ASSIGN (VarCluster); }; @@ -98,6 +100,8 @@ class FacCluster FacNodes members_; FacNode* repr_; VarClusters varClusters_; + + DISALLOW_COPY_AND_ASSIGN (FacCluster); }; @@ -166,7 +170,6 @@ class CountingBp : public GroundSolver unsigned getWeight (const FacCluster*, const VarCluster*, size_t index) const; - Color freeColor_; Colors varColors_; Colors facColors_; @@ -175,6 +178,8 @@ class CountingBp : public GroundSolver VarClusterMap varClusterMap_; const FactorGraph* compressedFg_; WeightedBp* solver_; + + DISALLOW_COPY_AND_ASSIGN (CountingBp); }; #endif // HORUS_COUNTINGBP_H diff --git a/packages/CLPBN/horus/ElimGraph.h b/packages/CLPBN/horus/ElimGraph.h index 881f59759..a636d316d 100644 --- a/packages/CLPBN/horus/ElimGraph.h +++ b/packages/CLPBN/horus/ElimGraph.h @@ -63,7 +63,6 @@ class ElimGraph static void setElimHeuristic (ElimHeuristic eh) { elimHeuristic_ = eh; } private: - void addEdge (EgNode* n1, EgNode* n2) { assert (n1 != n2); @@ -136,6 +135,8 @@ class ElimGraph unordered_map varMap_; static ElimHeuristic elimHeuristic_; + + DISALLOW_COPY_AND_ASSIGN (ElimGraph); }; #endif // HORUS_ELIMGRAPH_H diff --git a/packages/CLPBN/horus/Factor.h b/packages/CLPBN/horus/Factor.h index 415a3d1df..ea11d1137 100644 --- a/packages/CLPBN/horus/Factor.h +++ b/packages/CLPBN/horus/Factor.h @@ -287,6 +287,7 @@ class Factor : public TFactor void clone (const Factor& f); + DISALLOW_ASSIGN (Factor); }; #endif // HORUS_FACTOR_H diff --git a/packages/CLPBN/horus/FactorGraph.h b/packages/CLPBN/horus/FactorGraph.h index 3417acf38..a235d8d26 100644 --- a/packages/CLPBN/horus/FactorGraph.h +++ b/packages/CLPBN/horus/FactorGraph.h @@ -25,9 +25,9 @@ class VarNode : public Var const FacNodes& neighbors (void) const { return neighs_; } private: - DISALLOW_COPY_AND_ASSIGN (VarNode); - FacNodes neighs_; + + DISALLOW_COPY_AND_ASSIGN (VarNode); }; @@ -52,11 +52,11 @@ class FacNode string getLabel (void) { return factor_.getLabel(); } private: - DISALLOW_COPY_AND_ASSIGN (FacNode); - VarNodes neighs_; Factor factor_; size_t index_; + + DISALLOW_COPY_AND_ASSIGN (FacNode); }; @@ -113,8 +113,6 @@ class FactorGraph void exportToLibDaiFormat (const char*) const; private: - // DISALLOW_COPY_AND_ASSIGN (FactorGraph); - void ignoreLines (std::ifstream&) const; bool containsCycle (void) const; @@ -128,11 +126,13 @@ class FactorGraph VarNodes varNodes_; FacNodes facNodes_; - BayesBallGraph structure_; - bool bayesFactors_; + BayesBallGraph structure_; + bool bayesFactors_; typedef unordered_map VarMap; VarMap varMap_; + + DISALLOW_ASSIGN (FactorGraph); }; diff --git a/packages/CLPBN/horus/GroundSolver.h b/packages/CLPBN/horus/GroundSolver.h index ec6bf60c7..eac28b045 100644 --- a/packages/CLPBN/horus/GroundSolver.h +++ b/packages/CLPBN/horus/GroundSolver.h @@ -29,6 +29,8 @@ class GroundSolver protected: const FactorGraph& fg; + + DISALLOW_COPY_AND_ASSIGN (GroundSolver); }; #endif // HORUS_GROUNDSOLVER_H diff --git a/packages/CLPBN/horus/Histogram.h b/packages/CLPBN/horus/Histogram.h index c9bdd78e1..9b65f9f02 100644 --- a/packages/CLPBN/horus/Histogram.h +++ b/packages/CLPBN/horus/Histogram.h @@ -5,6 +5,8 @@ #include +#include "Horus.h" + using namespace std; typedef vector Histogram; @@ -40,6 +42,8 @@ class HistogramSet unsigned size_; Histogram hist_; + + DISALLOW_COPY_AND_ASSIGN (HistogramSet); }; #endif // HORUS_HISTOGRAM_H diff --git a/packages/CLPBN/horus/Horus.h b/packages/CLPBN/horus/Horus.h index 17141d63e..46d619b9d 100644 --- a/packages/CLPBN/horus/Horus.h +++ b/packages/CLPBN/horus/Horus.h @@ -1,12 +1,18 @@ #ifndef HORUS_HORUS_H #define HORUS_HORUS_H -#include - #define DISALLOW_COPY_AND_ASSIGN(TypeName) \ TypeName(const TypeName&); \ void operator=(const TypeName&) +#define DISALLOW_COPY(TypeName) \ + TypeName(const TypeName&) + +#define DISALLOW_ASSIGN(TypeName) \ + void operator=(const TypeName&) + +#include + using namespace std; class Var; diff --git a/packages/CLPBN/horus/Indexer.h b/packages/CLPBN/horus/Indexer.h index cb8135866..a4141ebed 100644 --- a/packages/CLPBN/horus/Indexer.h +++ b/packages/CLPBN/horus/Indexer.h @@ -120,6 +120,8 @@ class Indexer const Ranges& ranges_; size_t size_; vector offsets_; + + DISALLOW_COPY_AND_ASSIGN (Indexer); }; @@ -239,6 +241,8 @@ class MapIndexer const Ranges& ranges_; bool valid_; vector offsets_; + + DISALLOW_COPY_AND_ASSIGN (MapIndexer); }; diff --git a/packages/CLPBN/horus/LiftedBp.h b/packages/CLPBN/horus/LiftedBp.h index 274503f29..01807ddfb 100644 --- a/packages/CLPBN/horus/LiftedBp.h +++ b/packages/CLPBN/horus/LiftedBp.h @@ -37,6 +37,8 @@ class LiftedBp : public LiftedSolver WeightedBp* solver_; FactorGraph* fg_; + DISALLOW_COPY_AND_ASSIGN (LiftedBp); + }; #endif // HORUS_LIFTEDBP_H diff --git a/packages/CLPBN/horus/LiftedKc.h b/packages/CLPBN/horus/LiftedKc.h index 6dc4440ea..949787f01 100644 --- a/packages/CLPBN/horus/LiftedKc.h +++ b/packages/CLPBN/horus/LiftedKc.h @@ -217,7 +217,6 @@ class LiftedCircuit void exportToGraphViz (const char*); private: - void compile (CircuitNode** follow, Clauses& clauses); bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); @@ -273,6 +272,8 @@ class LiftedCircuit Clauses backupClauses_; unordered_map originClausesMap_; unordered_map explanationMap_; + + DISALLOW_COPY_AND_ASSIGN (LiftedCircuit); }; @@ -293,6 +294,8 @@ class LiftedKc : public LiftedSolver LiftedWCNF* lwcnf_; LiftedCircuit* circuit_; ParfactorList pfList_; + + DISALLOW_COPY_AND_ASSIGN (LiftedKc); }; #endif // HORUS_LIFTEDKC_H diff --git a/packages/CLPBN/horus/LiftedOperations.h b/packages/CLPBN/horus/LiftedOperations.h index 1f4b53d3a..d17bb5359 100644 --- a/packages/CLPBN/horus/LiftedOperations.h +++ b/packages/CLPBN/horus/LiftedOperations.h @@ -21,6 +21,8 @@ class LiftedOperations private: static Parfactors absorve (ObservedFormula&, Parfactor*); + + DISALLOW_COPY_AND_ASSIGN (LiftedOperations); }; #endif // HORUS_LIFTEDOPERATIONS_H diff --git a/packages/CLPBN/horus/LiftedSolver.h b/packages/CLPBN/horus/LiftedSolver.h index 5429fc5b3..afdfe1461 100644 --- a/packages/CLPBN/horus/LiftedSolver.h +++ b/packages/CLPBN/horus/LiftedSolver.h @@ -21,6 +21,9 @@ class LiftedSolver protected: const ParfactorList& parfactorList; + + private: + DISALLOW_COPY_AND_ASSIGN (LiftedSolver); }; #endif // HORUS_LIFTEDSOLVER_H diff --git a/packages/CLPBN/horus/LiftedUtils.h b/packages/CLPBN/horus/LiftedUtils.h index 70e0d1071..66d5c6e07 100644 --- a/packages/CLPBN/horus/LiftedUtils.h +++ b/packages/CLPBN/horus/LiftedUtils.h @@ -160,6 +160,5 @@ class Substitution }; - #endif // HORUS_LIFTEDUTILS_H diff --git a/packages/CLPBN/horus/LiftedVe.h b/packages/CLPBN/horus/LiftedVe.h index b747d9da3..d66f42e7f 100644 --- a/packages/CLPBN/horus/LiftedVe.h +++ b/packages/CLPBN/horus/LiftedVe.h @@ -23,6 +23,9 @@ class LiftedOperator static vector getParfactorsWithGroup ( ParfactorList&, PrvGroup group); + + private: + DISALLOW_ASSIGN (LiftedOperator); }; @@ -48,6 +51,8 @@ class ProductOperator : public LiftedOperator ParfactorList::iterator g1_; ParfactorList::iterator g2_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (ProductOperator); }; @@ -74,6 +79,8 @@ class SumOutOperator : public LiftedOperator PrvGroup group_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (SumOutOperator); }; @@ -101,6 +108,8 @@ class CountingOperator : public LiftedOperator ParfactorList::iterator pfIter_; LogVar X_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (CountingOperator); }; @@ -123,11 +132,13 @@ class GroundOperator : public LiftedOperator string toString (void); private: - vector> getAffectedFormulas (void); + vector> getAffectedFormulas (void); PrvGroup group_; unsigned lvIndex_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (GroundOperator); }; @@ -149,6 +160,8 @@ class LiftedVe : public LiftedSolver ParfactorList pfList_; double largestCost_; + + DISALLOW_COPY_AND_ASSIGN (LiftedVe); }; #endif // HORUS_LIFTEDVE_H diff --git a/packages/CLPBN/horus/LiftedWCNF.h b/packages/CLPBN/horus/LiftedWCNF.h index 2aed7a6cf..619d2c5a5 100644 --- a/packages/CLPBN/horus/LiftedWCNF.h +++ b/packages/CLPBN/horus/LiftedWCNF.h @@ -148,6 +148,8 @@ class Clause LogVarSet posCountedLvs_; LogVarSet negCountedLvs_; ConstraintTree constr_; + + DISALLOW_ASSIGN (Clause); }; typedef vector Clauses; @@ -220,7 +222,6 @@ class LiftedWCNF void printClauses (void) const; private: - LiteralId getLiteralId (PrvGroup prvGroup, unsigned range); void addIndicatorClauses (const ParfactorList& pfList); @@ -232,6 +233,8 @@ class LiftedWCNF const ParfactorList& pfList_; unordered_map> map_; unordered_map> weights_; + + DISALLOW_COPY_AND_ASSIGN (LiftedWCNF); }; #endif // HORUS_LIFTEDWCNF_H diff --git a/packages/CLPBN/horus/Parfactor.h b/packages/CLPBN/horus/Parfactor.h index 6cd04b23c..f21fc5f69 100644 --- a/packages/CLPBN/horus/Parfactor.h +++ b/packages/CLPBN/horus/Parfactor.h @@ -96,7 +96,6 @@ class Parfactor : public TFactor static bool canMultiply (Parfactor*, Parfactor*); private: - void simplifyCountingFormulas (size_t fIdx); void simplifyParfactor (size_t fIdx1, size_t fIdx2); @@ -113,9 +112,9 @@ class Parfactor : public TFactor ConstraintTree* constr_; + DISALLOW_ASSIGN (Parfactor); }; - typedef vector Parfactors; #endif // HORUS_PARFACTOR_H diff --git a/packages/CLPBN/horus/VarElim.h b/packages/CLPBN/horus/VarElim.h index 96906bb00..da05e51a1 100644 --- a/packages/CLPBN/horus/VarElim.h +++ b/packages/CLPBN/horus/VarElim.h @@ -37,6 +37,8 @@ class VarElim : public GroundSolver unsigned largestFactorSize_; unsigned totalFactorSize_; unordered_map> varMap_; + + DISALLOW_COPY_AND_ASSIGN (VarElim); }; #endif // HORUS_VARELIM_H diff --git a/packages/CLPBN/horus/WeightedBp.h b/packages/CLPBN/horus/WeightedBp.h index 1e79fd4db..9bb40fe95 100644 --- a/packages/CLPBN/horus/WeightedBp.h +++ b/packages/CLPBN/horus/WeightedBp.h @@ -24,6 +24,8 @@ class WeightedLink : public BpLink } private: + DISALLOW_COPY_AND_ASSIGN (WeightedLink); + size_t index_; unsigned weight_; Params pwdMsg_; @@ -43,7 +45,6 @@ class WeightedBp : public BeliefProp Params getPosterioriOf (VarId); private: - void createLinks (void); void maxResidualSchedule (void); @@ -55,6 +56,8 @@ class WeightedBp : public BeliefProp void printLinkInformation (void) const; vector> weights_; + + DISALLOW_COPY_AND_ASSIGN (WeightedBp); }; #endif // HORUS_WEIGHTEDBP_H From c96496720bdac4cea0243f4074314cef2b839fc5 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 22:28:19 +0000 Subject: [PATCH 50/89] Trivial --- packages/CLPBN/horus/GroundSolver.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/GroundSolver.cpp b/packages/CLPBN/horus/GroundSolver.cpp index b683c8233..1916315bb 100644 --- a/packages/CLPBN/horus/GroundSolver.cpp +++ b/packages/CLPBN/horus/GroundSolver.cpp @@ -1,7 +1,7 @@ #include "GroundSolver.h" +#include "VarElim.h" #include "BeliefProp.h" #include "CountingBp.h" -#include "VarElim.h" #include "Util.h" From d36b63ece30e5a45e4a5364ee9610ef06227dccf Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 22:29:20 +0000 Subject: [PATCH 51/89] Trivial --- packages/CLPBN/horus/Histogram.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/Histogram.h b/packages/CLPBN/horus/Histogram.h index 9b65f9f02..d60c2d22f 100644 --- a/packages/CLPBN/horus/Histogram.h +++ b/packages/CLPBN/horus/Histogram.h @@ -24,7 +24,7 @@ class HistogramSet void reset (void); - static vector getHistograms (unsigned ,unsigned); + static vector getHistograms (unsigned, unsigned); static unsigned nrHistograms (unsigned, unsigned); From 7d9af75c352d6cddbc7678fa166dc9bc9c306a2c Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 27 Dec 2012 23:21:32 +0000 Subject: [PATCH 52/89] Improve solver flags --- packages/CLPBN/clpbn/horus.yap | 26 ++++++++++++------- packages/CLPBN/horus/BeliefProp.cpp | 2 +- packages/CLPBN/horus/CountingBp.cpp | 9 +++---- packages/CLPBN/horus/CountingBp.h | 4 ++- packages/CLPBN/horus/HorusYap.cpp | 6 ++--- packages/CLPBN/horus/LiftedBp.cpp | 2 +- packages/CLPBN/horus/Util.cpp | 40 ++++++++++++++--------------- 7 files changed, 49 insertions(+), 40 deletions(-) diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 344f11d86..976d481b9 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -34,15 +34,23 @@ warning :- set_horus_flag(K,V) :- cpp_set_horus_flag(K,V). -:- cpp_set_horus_flag(schedule, seq_fixed). -%:- cpp_set_horus_flag(schedule, seq_random). -%:- cpp_set_horus_flag(schedule, parallel). -%:- cpp_set_horus_flag(schedule, max_residual). - -:- cpp_set_horus_flag(accuracy, 0.0001). - -:- cpp_set_horus_flag(max_iter, 1000). +:- cpp_set_horus_flag(verbosity, 0). :- cpp_set_horus_flag(use_logarithms, false). -% :- cpp_set_horus_flag(use_logarithms, true). +%:- cpp_set_horus_flag(use_logarithms, true). + +%:- cpp_set_horus_flag(hve_elim_heuristic, sequential). +%:- cpp_set_horus_flag(hve_elim_heuristic, min_neighbors). +%:- cpp_set_horus_flag(hve_elim_heuristic, min_weight). +%:- cpp_set_horus_flag(hve_elim_heuristic, min_fill). +:- cpp_set_horus_flag(hve_elim_heuristic, weighted_min_fill). + +:- cpp_set_horus_flag(bp_msg_schedule, seq_fixed). +%:- cpp_set_horus_flag(bp_msg_schedule, seq_random). +%:- cpp_set_horus_flag(bp_msg_schedule, parallel). +%:- cpp_set_horus_flag(bp_msg_schedule, max_residual). + +:- cpp_set_horus_flag(bp_accuracy, 0.0001). + +:- cpp_set_horus_flag(bp_max_iter, 1000). diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index bf8f30a79..d009cd7a9 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -52,7 +52,7 @@ BeliefProp::printSolverFlags (void) const { stringstream ss; ss << "belief propagation [" ; - ss << "schedule=" ; + ss << "msg_schedule=" ; switch (schedule_) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index 006bf99fd..39b47eab3 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -2,7 +2,7 @@ #include "WeightedBp.h" -bool CountingBp::checkForIdenticalFactors = true; +bool CountingBp::fif_ = true; CountingBp::CountingBp (const FactorGraph& fg) @@ -36,7 +36,7 @@ CountingBp::printSolverFlags (void) const { stringstream ss; ss << "counting bp [" ; - ss << "schedule=" ; + ss << "msg_schedule=" ; switch (WeightedBp::msgSchedule()) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; @@ -46,7 +46,7 @@ CountingBp::printSolverFlags (void) const ss << ",max_iter=" << WeightedBp::maxIterations(); ss << ",accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << ",chkif=" << Util::toString (CountingBp::checkForIdenticalFactors); + ss << ",fif=" << Util::toString (CountingBp::fif_); ss << "]" ; cout << ss.str() << endl; } @@ -93,8 +93,7 @@ void CountingBp::findIdenticalFactors() { const FacNodes& facNodes = fg.facNodes(); - if (checkForIdenticalFactors == false || - facNodes.size() == 1) { + if (fif_ == false || facNodes.size() == 1) { return; } for (size_t i = 0; i < facNodes.size(); i++) { diff --git a/packages/CLPBN/horus/CountingBp.h b/packages/CLPBN/horus/CountingBp.h index 4f674e687..605fa8b22 100644 --- a/packages/CLPBN/horus/CountingBp.h +++ b/packages/CLPBN/horus/CountingBp.h @@ -116,7 +116,7 @@ class CountingBp : public GroundSolver Params solveQuery (VarIds); - static bool checkForIdenticalFactors; + static void setFindIdenticalFactorsFlag (bool fif) { fif_ = fif; } private: Color getNewColor (void) @@ -179,6 +179,8 @@ class CountingBp : public GroundSolver const FactorGraph* compressedFg_; WeightedBp* solver_; + static bool fif_; + DISALLOW_COPY_AND_ASSIGN (CountingBp); }; diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 77e900bb0..dbd210412 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -200,7 +200,7 @@ runGroundSolver (void) } GroundSolver* solver = 0; - CountingBp::checkForIdenticalFactors = false; + CountingBp::setFindIdenticalFactorsFlag (false); switch (Globals::groundSolver) { case GroundSolverType::VE: solver = new VarElim (*mfg); break; case GroundSolverType::BP: solver = new BeliefProp (*mfg); break; @@ -320,11 +320,11 @@ setHorusFlag (void) stringstream ss; ss << (int) YAP_IntOfTerm (YAP_ARG2); ss >> value; - } else if (key == "accuracy") { + } else if (key == "bp_accuracy") { stringstream ss; ss << (float) YAP_FloatOfTerm (YAP_ARG2); ss >> value; - } else if (key == "max_iter") { + } else if (key == "bp_max_iter") { stringstream ss; ss << (int) YAP_IntOfTerm (YAP_ARG2); ss >> value; diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index 18f056f8a..b748cc9e1 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -62,7 +62,7 @@ LiftedBp::printSolverFlags (void) const { stringstream ss; ss << "lifted bp [" ; - ss << "schedule=" ; + ss << "msg_schedule=" ; switch (WeightedBp::msgSchedule()) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 9fad10705..4d682a1b0 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -192,11 +192,7 @@ bool setHorusFlag (string key, string value) { bool returnVal = true; - if (key == "verbosity") { - stringstream ss; - ss << value; - ss >> Globals::verbosity; - } else if (key == "lifted_solver") { + if ( key == "lifted_solver") { if ( value == "lve") { Globals::liftedSolver = LiftedSolverType::LVE; } else if (value == "lbp") { @@ -209,7 +205,7 @@ setHorusFlag (string key, string value) returnVal = false; } } else if (key == "ground_solver") { - if ( value == "ve") { + if ( value == "ve" || value == "hve") { Globals::groundSolver = GroundSolverType::VE; } else if (value == "bp") { Globals::groundSolver = GroundSolverType::BP; @@ -220,7 +216,21 @@ setHorusFlag (string key, string value) cerr << "for `" << key << "'" << endl; returnVal = false; } - } else if (key == "elim_heuristic") { + } else if (key == "verbosity") { + stringstream ss; + ss << value; + ss >> Globals::verbosity; + } else if (key == "use_logarithms") { + if ( value == "true") { + Globals::logDomain = true; + } else if (value == "false") { + Globals::logDomain = false; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "ve_elim_heuristic" || key == "hve_elim_heuristic") { if ( value == "sequential") { ElimGraph::setElimHeuristic (ElimHeuristic::SEQUENTIAL); } else if (value == "min_neighbors") { @@ -236,7 +246,7 @@ setHorusFlag (string key, string value) cerr << "for `" << key << "'" << endl; returnVal = false; } - } else if (key == "schedule") { + } else if (key == "bp_msg_schedule") { if ( value == "seq_fixed") { BeliefProp::setMsgSchedule (MsgSchedule::SEQ_FIXED); } else if (value == "seq_random") { @@ -250,28 +260,18 @@ setHorusFlag (string key, string value) cerr << "for `" << key << "'" << endl; returnVal = false; } - } else if (key == "accuracy") { + } else if (key == "bp_accuracy") { stringstream ss; double acc; ss << value; ss >> acc; BeliefProp::setAccuracy (acc); - } else if (key == "max_iter") { + } else if (key == "bp_max_iter") { stringstream ss; unsigned mi; ss << value; ss >> mi; BeliefProp::setMaxIterations (mi); - } else if (key == "use_logarithms") { - if ( value == "true") { - Globals::logDomain = true; - } else if (value == "false") { - Globals::logDomain = false; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } } else { cerr << "warning: invalid key `" << key << "'" << endl; returnVal = false; From 3363019c933aa79c22ea1bb4728f890f8a9c6cf9 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Fri, 28 Dec 2012 01:35:59 +0000 Subject: [PATCH 53/89] Remove unnecessary methods --- packages/CLPBN/horus/Var.cpp | 30 +++--------------------------- packages/CLPBN/horus/Var.h | 4 ---- 2 files changed, 3 insertions(+), 31 deletions(-) diff --git a/packages/CLPBN/horus/Var.cpp b/packages/CLPBN/horus/Var.cpp index f84257585..99540718a 100644 --- a/packages/CLPBN/horus/Var.cpp +++ b/packages/CLPBN/horus/Var.cpp @@ -36,35 +36,11 @@ Var::isValidState (int stateIndex) -bool -Var::isValidState (const string& stateName) -{ - States states = Var::getVarInfo (varId_).states; - return Util::contains (states, stateName); -} - - - void -Var::setEvidence (int ev) +Var::setEvidence (int evidence) { - assert (ev < (int) range_); - evidence_ = ev; -} - - - -void -Var::setEvidence (const string& ev) -{ - States states = Var::getVarInfo (varId_).states; - for (size_t i = 0; i < states.size(); i++) { - if (states[i] == ev) { - evidence_ = i; - return; - } - } - assert (false); + assert (evidence < (int) range_); + evidence_ = evidence; } diff --git a/packages/CLPBN/horus/Var.h b/packages/CLPBN/horus/Var.h index d1b3461ac..3ae6eeed8 100644 --- a/packages/CLPBN/horus/Var.h +++ b/packages/CLPBN/horus/Var.h @@ -59,12 +59,8 @@ class Var bool isValidState (int); - bool isValidState (const string&); - void setEvidence (int); - void setEvidence (const string&); - string label (void) const; States states (void) const; From d293c5e5a9ee6a3c09db1ff39f5a944f496d0b19 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Fri, 28 Dec 2012 15:40:32 +0000 Subject: [PATCH 54/89] Trivial --- packages/CLPBN/horus/Horus.h | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/packages/CLPBN/horus/Horus.h b/packages/CLPBN/horus/Horus.h index 46d619b9d..045ca42f2 100644 --- a/packages/CLPBN/horus/Horus.h +++ b/packages/CLPBN/horus/Horus.h @@ -12,24 +12,23 @@ void operator=(const TypeName&) #include - -using namespace std; +#include class Var; class Factor; class VarNode; class FacNode; -typedef vector Params; -typedef unsigned VarId; -typedef vector VarIds; -typedef vector Vars; -typedef vector VarNodes; -typedef vector FacNodes; -typedef vector Factors; -typedef vector States; -typedef vector Ranges; -typedef unsigned long long ullong; +typedef std::vector Params; +typedef unsigned VarId; +typedef std::vector VarIds; +typedef std::vector Vars; +typedef std::vector VarNodes; +typedef std::vector FacNodes; +typedef std::vector Factors; +typedef std::vector States; +typedef std::vector Ranges; +typedef unsigned long long ullong; enum LiftedSolverType From ea5dbe2bca8c98cd88ad7022e185cf1b7ec8a168 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 3 Jan 2013 18:26:12 +0000 Subject: [PATCH 55/89] Trivial --- packages/CLPBN/examples/sprinkler.pfl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/examples/sprinkler.pfl b/packages/CLPBN/examples/sprinkler.pfl index daceb3786..5d6e2fce3 100644 --- a/packages/CLPBN/examples/sprinkler.pfl +++ b/packages/CLPBN/examples/sprinkler.pfl @@ -25,7 +25,7 @@ cloudy_table( sprinkler_table( [ 0.5, 0.9, - 0.5, 0.1 ]). + 0.5, 0.1 ]). rain_table( [ 0.8, 0.2, From d4d11cd4b422344db45616cf965a9ec522b46d5b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 3 Jan 2013 18:48:15 +0000 Subject: [PATCH 56/89] Fix sprinkler probability order --- packages/CLPBN/examples/sprinkler.pfl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/CLPBN/examples/sprinkler.pfl b/packages/CLPBN/examples/sprinkler.pfl index 5d6e2fce3..a69c2158e 100644 --- a/packages/CLPBN/examples/sprinkler.pfl +++ b/packages/CLPBN/examples/sprinkler.pfl @@ -24,16 +24,16 @@ cloudy_table( 0.5 ]). sprinkler_table( - [ 0.5, 0.9, - 0.5, 0.1 ]). + [ 0.1, 0.5, + 0.9, 0.5 ]). rain_table( [ 0.8, 0.2, 0.2, 0.8 ]). wet_grass_table( - [ 1.0, 0.1, 0.1, 0.01, - 0.0, 0.9, 0.9, 0.99 ]). + [ 0.99, 0.9, 0.9, 0.0, + 0.01, 0.1, 0.1, 1.0 ]). % ?- wet_grass(X). From 3347f0e676e366b249ed2effbf8c60115876ea54 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Thu, 3 Jan 2013 23:44:30 +0000 Subject: [PATCH 57/89] Rename two examples --- .../CLPBN/examples/{social_domain1.pfl => social_network1.pfl} | 0 .../CLPBN/examples/{social_domain2.pfl => social_network2.pfl} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename packages/CLPBN/examples/{social_domain1.pfl => social_network1.pfl} (100%) rename packages/CLPBN/examples/{social_domain2.pfl => social_network2.pfl} (100%) diff --git a/packages/CLPBN/examples/social_domain1.pfl b/packages/CLPBN/examples/social_network1.pfl similarity index 100% rename from packages/CLPBN/examples/social_domain1.pfl rename to packages/CLPBN/examples/social_network1.pfl diff --git a/packages/CLPBN/examples/social_domain2.pfl b/packages/CLPBN/examples/social_network2.pfl similarity index 100% rename from packages/CLPBN/examples/social_domain2.pfl rename to packages/CLPBN/examples/social_network2.pfl From f2682374fb246bb46eb27c7f15a520c5dae3993c Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Fri, 4 Jan 2013 15:41:39 +0000 Subject: [PATCH 58/89] Forgot to update the Makefile in the previous commit --- packages/CLPBN/Makefile.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index 602565138..f8f1ed929 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -106,8 +106,8 @@ CLPBN_EXAMPLES= \ $(CLPBN_EXDIR)/cg.yap \ $(CLPBN_EXDIR)/city.pfl \ $(CLPBN_EXDIR)/comp_workshops.pfl \ - $(CLPBN_EXDIR)/social_domain1.pfl \ - $(CLPBN_EXDIR)/social_domain2.pfl \ + $(CLPBN_EXDIR)/social_network1.pfl \ + $(CLPBN_EXDIR)/social_network2.pfl \ $(CLPBN_EXDIR)/sprinkler.pfl \ $(CLPBN_EXDIR)/workshop_attrs.pfl From c5a390460d645311e71a41c60dd3d1d640991b62 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Fri, 4 Jan 2013 19:48:31 +0000 Subject: [PATCH 59/89] Chanse use_logarithms default to true. Reasoning: - In small networks, the use of logarithms is not noticed in terms of performance. - In large networks, the chance to have a division by 0 is too high when not using logarithms. --- packages/CLPBN/clpbn/horus.yap | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 976d481b9..b43d51ded 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -36,8 +36,8 @@ set_horus_flag(K,V) :- cpp_set_horus_flag(K,V). :- cpp_set_horus_flag(verbosity, 0). -:- cpp_set_horus_flag(use_logarithms, false). -%:- cpp_set_horus_flag(use_logarithms, true). +%:- cpp_set_horus_flag(use_logarithms, false). +:- cpp_set_horus_flag(use_logarithms, true). %:- cpp_set_horus_flag(hve_elim_heuristic, sequential). %:- cpp_set_horus_flag(hve_elim_heuristic, min_neighbors). From f55ccdef331b59a5487a1130d9a2b76e85c02a36 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 5 Jan 2013 00:49:39 +0000 Subject: [PATCH 60/89] Add a manual for PFL written in Latex --- packages/CLPBN/Makefile.in | 10 ++ packages/CLPBN/README.txt | 111 -------------- packages/CLPBN/pfl.tex | 289 +++++++++++++++++++++++++++++++++++++ 3 files changed, 299 insertions(+), 111 deletions(-) delete mode 100644 packages/CLPBN/README.txt create mode 100644 packages/CLPBN/pfl.tex diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index f8f1ed929..7b563b427 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -28,6 +28,9 @@ INSTALL=@INSTALL@ INSTALL_DATA=@INSTALL_DATA@ INSTALL_PROGRAM=@INSTALL_PROGRAM@ srcdir=@srcdir@ +PDFLATEX=pdflatex + +PFL_MANUAL = $(srcdir)/pfl CLPBN_TOP= $(srcdir)/clpbn.yap \ $(srcdir)/pfl.yap @@ -127,3 +130,10 @@ install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_PROGRAMS) for h in $(CLPBN_HMMER_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(EXDIR)/HMMer; done for h in $(CLPBN_LEARNING_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(EXDIR)/learning; done +docs: $(MANUAL) + $(PDFLATEX) $(PFL_MANUAL) + $(PDFLATEX) $(PFL_MANUAL) + +install_docs: docs + $(INSTALL_DATA) pfl.pdf $(DESTDIR)$(EXDIR) + diff --git a/packages/CLPBN/README.txt b/packages/CLPBN/README.txt deleted file mode 100644 index 76231b268..000000000 --- a/packages/CLPBN/README.txt +++ /dev/null @@ -1,111 +0,0 @@ -Prolog Factor Language (PFL) - -Prolog Factor Language (PFL) is a extension of the Prolog language that -allows a natural representation of this first-order probabilistic models -(either directed or undirected). PFL is also capable of solving probabilistic -queries on this models through the implementation of several inference -techniques: variable elimination, belief propagation, lifted variable -elimination and lifted belief propagation. - -Language -------------------------------------------------------------------------------- -A graphical model in PFL is represented using parfactors. A PFL parfactor -has the following four components: - -Type ; Formulas ; Phi ; Constraint . - -- Type refers the type of the network over which the parfactor is defined. -It can be bayes for directed networks, or markov for undirected ones. -- Formulas is a sequence of Prolog terms that define sets of random variables -under the constraint. -- Phi is either a list of parameters or a call to a Prolog goal that will -unify its last argument with a list of parameters. -- Constraint is a list (possible empty) of Prolog goals that will impose -bindings on the logical variables that appear in the formulas. - -The "examples" directory contains some popular graphical models described -using PFL. - -Querying -------------------------------------------------------------------------------- -Now we show how to use PFL to solve probabilistic queries. We will -use the burlgary alarm network as an example. First, we load the model: - -$ yap -l examples/burglary-alarm.yap - -Now let's suppose that we want to estimate the probability of a earthquake -ocurred given that mary called. We can do it with the following query: - -?- earthquake(X), mary_calls(t). - -Suppose now that we want the joint distribution for john_calls and -mary_calls. We can obtain this with the following query: - -?- john_calls(X), mary_calls(Y). - - -Inference Options -------------------------------------------------------------------------------- -PFL supports both ground and lifted inference. The inference algorithm -can be chosen using the set_solver/1 predicate. The following algorithms -are supported: -- lve: generalized counting first-order variable elimination (GC-FOVE) -- hve: (ground) variable elimination -- lbp: lifted first-order belief propagation -- cbp: counting belief propagation -- bp: (ground) belief propagation -- lkc: lifted first-order knowledge compilation - -For example, if we want to use ground variable elimination to solve some -query, we need to call first the following goal: - -?- set_solver(hve). - -It is possible to tweak several parameters of PFL through the -set_horus_flag/2 predicate. The first argument is a key that -identifies the parameter that we desire to tweak, while the second -is some possible value for this key. - -The verbosity key controls the level of log information that will be -printed by the corresponding solver. Its possible values are positive -integers. The bigger the number, more log information will be printed. -For example, to view some basic log information we need to call the -following goal: - -?- set_horus_flag(verbosity, 1). - -The use_logarithms key controls whether the calculations performed -during inference should be done in the log domain or not. Its values -can be true or false. By default is false. - -There are also keys specific to the inference algorithm. For example, -elim_heuristic key controls the elimination heuristic that will be -used by ground variable elimination. The following heuristics are -supported: -- sequential -- min_neighbors -- min_weight -- min_fill -- weighted_min_fill - -An explanation of this heuristics can be found in Probabilistic Graphical -Models by Daphne Koller. - -The schedule, accuracy and max_iter keys are specific for inference -algorithms based on message passing, namely lbp, cbp and bp. -The key schedule can be used to specify the order in which the messages -are sent in belief propagation. The possible values are: -- seq_fixed: at each iteration, all messages are sent in the same order -- seq_random: at each iteration, the messages are sent with a random order -- parallel: at each iteration, the messages are all calculated using the -values of the previous iteration. -- max_residual: the next message to be sent is the one with maximum residual, -(Residual Belief Propagation:Informed Scheduling for Asynchronous Message -Passing) - -The max_iter key sets the maximum number of iterations. One iteration -consists in sending all possible messages. The accuracy key indicate -when we should stop sending messages. If the largest difference between -a message sent in the current iteration and one message sent in the previous -iteration is less that accuracy value given, we terminate belief propagation. - diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex new file mode 100644 index 000000000..5da5399e8 --- /dev/null +++ b/packages/CLPBN/pfl.tex @@ -0,0 +1,289 @@ +\documentclass{article} + +\usepackage{tikz} +\usepackage{setspace} +\usepackage{fancyvrb} +\usetikzlibrary{arrows,shapes,positioning} + +\begin{document} + +\DefineVerbatimEnvironment{pflcodeve}{Verbatim} {xleftmargin=3.0em,fontsize=\small} + +\newenvironment{pflcode} + {\VerbatimEnvironment \setstretch{0.8} \begin{pflcodeve}} + {\end{pflcodeve} } + +\newcommand{\true} {\mathtt{t}} +\newcommand{\false} {\mathtt{f}} +\newcommand{\tableline} {\noalign{\hrule height 0.8pt}} + +\tikzstyle{nodestyle} = [draw, thick, circle, minimum size=0.9cm] +\tikzstyle{bnedgestyle} = [-triangle 45,thick] + +\setlength{\parskip}{\baselineskip} + +\title{The Prolog Factor Language (PFL)} +\author{} +\date{} + +\maketitle + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Introduction} +The Prolog Factor Language (PFL) is a extension of the Prolog language that allows a natural representation of this first-order probabilistic models (either directed or undirected). PFL is also capable of solving probabilistic queries on this models through the implementation of several inference techniques: variable elimination, belief propagation, lifted variable elimination and lifted belief propagation. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Language} +A first-order probabilistic graphical model is described using parametric factors, or just parfactors. The PFL syntax for a parfactor is + +$$Type~~F~~;~~Phi~~;~~C.$$ + +, where +\begin{itemize} +\item $Type$ refers the type of network over which the parfactor is defined. It can be \texttt{bayes} for directed networks, or \texttt{markov} for undirected ones. + +\item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the others defines its parents. + +\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. + +\item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be any relation (set of tuples) over the logical variables. +\end{itemize} + + +\begin{figure}[t!] +\begin{center} +\begin{tikzpicture}[>=latex',line join=bevel,transform shape,scale=0.8] + +\node (cloudy) at (50bp, 122bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Cloudy$}; +\node (sprinker) at ( 0bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Sprinker$}; +\node (rain) at (100bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Rain$}; +\node (wetgrass) at (50bp, 10bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$WetGrass$}; +\draw [bnedgestyle] (cloudy) -- (sprinker); +\draw [bnedgestyle] (cloudy) -- (rain); +\draw [bnedgestyle] (sprinker) -- (wetgrass); +\draw [bnedgestyle] (rain) -- (wetgrass); + +\node [above=0.4cm of cloudy,inner sep=0pt] { +\begin{tabular}[b]{lc} + $C$ & $P(C)$ \\ \tableline + $\true$ & 0.5 \\ + $\false$ & 0.5 \\ +\end{tabular} +}; + +\node [left=0.4cm of sprinker,inner sep=0pt] { +\begin{tabular}{lcc} + $S$ & $C$ & $P(S|C)$ \\ \tableline + $\true$ & $\true$ & 0.1 \\ + $\true$ & $\false$ & 0.5 \\ + $\false$ & $\true$ & 0.9 \\ + $\false$ & $\false$ & 0.5 \\ +\end{tabular} +}; + +\node [right=0.4cm of rain,inner sep=0pt] { +\begin{tabular}{llc} + $R$ & $C$ & $P(R|C)$ \\ \tableline + $\true$ & $\true$ & 0.8 \\ + $\true$ & $\false$ & 0.2 \\ + $\false$ & $\true$ & 0.2 \\ + $\false$ & $\false$ & 0.8 \\ +\end{tabular} +}; + +\node [below=0.4cm of wetgrass,inner sep=0pt] { +\begin{tabular}{llll} + $W$ & $S$ & $R$ & $P(W|S,R)$ \\ \tableline + $\true$ & $\true$ & $\true$ & \hspace{1em} 0.99 \\ + $\true$ & $\true$ & $\false$ & \hspace{1em} 0.9 \\ + $\true$ & $\false$ & $\true$ & \hspace{1em} 0.9 \\ + $\true$ & $\false$ & $\false$ & \hspace{1em} 0.0 \\ + $\false$ & $\true$ & $\true$ & \hspace{1em} 0.01 \\ + $\false$ & $\true$ & $\false$ & \hspace{1em} 0.1 \\ + $\false$ & $\false$ & $\true$ & \hspace{1em} 0.1 \\ + $\false$ & $\false$ & $\false$ & \hspace{1em} 1.0 \\ +\end{tabular} +}; + +\end{tikzpicture} +\caption{The sprinkler network.} +\label{fig:sprinkler-bn} +\end{center} +\end{figure} + +To better explain the language, below we show the PFL representation for network found in Figure~\ref{fig:sprinkler-bn}. + +\begin{pflcode} +:- use_module(library(pfl)). + +bayes cloudy ; cloudy_table ; []. + +bayes sprinkler, cloudy ; sprinkler_table ; []. + +bayes rain, cloudy ; rain_table ; []. + +bayes wet_grass, sprinkler, rain ; wet_grass_table ; []. + +cloudy_table( + [ 0.5, + 0.5 ]). + +sprinkler_table( + [ 0.1, 0.5, + 0.9, 0.5 ]). + +rain_table( + [ 0.8, 0.2, + 0.2, 0.8 ]). + +wet_grass_table( + [ 0.99, 0.9, 0.9, 0.0, + 0.01, 0.1, 0.1, 1.0 ]). +\end{pflcode} + +Note that this network is fully grounded, as the constraints are all empty. Next we present the PFL representation for a well-known markov logic network - the social network model. The weighted formulas of this model are shown below. + +\begin{pflcode} +1.5 : Smokes(x) => Cancer(x) +1.1 : Smokes(x) ^ Friends(x,y) => Smokes(y) +\end{pflcode} + +We can represent this model using PFL with the following code. + +\begin{pflcode} +:- use_module(library(pfl)). + +person(anna). +person(bob). + +markov smokes(X), cancer(X) ; + [4.482, 4.482, 1.0, 4.482] ; + [person(X)]. + +markov friends(X,Y), smokes(X), smokes(Y) ; + [3.004, 3.004, 3.004, 3.004, 3.004, 1.0, 1.0, 3.004] ; + [person(X), person(Y)]. +\end{pflcode} +%markov smokes(X) ; [1.0, 4.055]; [person(X)]. +%markov cancer(X) ; [1.0, 9.974]; [person(X)]. +%markov friends(X,Y) ; [1.0, 99.484] ; [person(X), person(Y)]. + +Notice that we defined the world to be consisted of two persons, \texttt{anne} and \texttt{bob}. We can easily add as many persons as we want by inserting in the program a fact like \texttt{person @ 10.}~. This would create ten persons named \texttt{p1}, \texttt{p2}, \dots, \texttt{p10}. + +Unlike other fist-order probabilistic languages, in PFL the logical variables that appear in the terms are not directly typed, and they will be only constrained by the goals that appear in the constraint of the parfactor. This allows the logical variables to be constrained by any relation (set of tuples), and not by pairwise (in)equalities. For instance, the next example defines a ground network with three factors, each over the random variables \texttt{p(a,b)}, \texttt{p(b,d)} and \texttt{p(d,e)}. + +\begin{pflcode} +constraint(a,b). +constraint(b,d). +constraint(d,e). + +markov p(A,B); some_table; [constraint(A,B)]. +\end{pflcode} + +We can easily add static evidence to PFL programs by inserting a fact with the same functor and arguments as the random variable, plus one extra argument with the observed state or value. For instance, suppose that we now that \texttt{anna} and \texttt{bob} are friends. We can add this knowledge to the program with the following fact: \texttt{friends(anna,bob,t).}~. + +One last note for the domain of the random variables. By default all terms will generate boolean (\texttt{t}/\texttt{f}) random variables. It is possible to chose a different domain by appending a list of the possible values or states to the term. Next we present a self-explanatory example of how this can be done. + +\begin{pflcode} +bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. +\end{pflcode} + +More examples can be found in the CLPBN examples directory, which defaults to ``share/doc/Yap/packages/examples/CLPBN'' from the base directory where the YAP Prolog system was installed. + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Querying} +In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as an example. + +Assuming that the current directory is where the examples are located, first we load the model: + +\texttt{\$ yap -l sprinker.pfl} + +Let's suppose that we want to estimate the marginal probability for the $WetGrass$ random variable. We can do it calling the following goal: + +\texttt{?- wet\_grass(X).} + +The output of the goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. + +Let's now suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: + +\texttt{?- wet\_grass(X), rain(t).} + +PFL also supports calculating joint probability distributions. For instance, we can obtain the joint probability for $Sprinkler$ and $Rain$ with: + +\texttt{?- sprinkler(X), rain(Y).} + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Inference Options} +PFL supports both ground and lifted inference methods. The inference algorithm can be chosen by calling \texttt{set\_solver/1}. The following are supported: +\begin{itemize} + \item \texttt{ve}, variable elimination (written in Prolog) + \item \texttt{hve}, variable elimination (written in C++) + \item \texttt{jt}, junction tree + \item \texttt{bdd}, binary decision diagrams + \item \texttt{bp}, belief propagation + \item \texttt{cbp}, counting belief propagation + \item \texttt{gibbs}, gibbs sampling + \item \texttt{lve}, generalized counting first-order variable elimination (GC-FOVE) + \item \texttt{lkc}, lifted first-order knowledge compilation + \item \texttt{lbp}, lifted first-order belief propagation +\end{itemize} + +For instance, if we want to use belief propagation to solve some probabilistic query, we need to call first: + +\texttt{?- set\_solver(bp).} + +It is possible to tweak some parameters of PFL through \texttt{set\_horus\_flag/2} predicate. The first argument is a key that identifies the parameter that we desire to tweak, while the second is some possible value for this key. + +The \texttt{verbosity} key controls the level of debugging information that will be printed. Its possible values are positive integers. The higher the number, the more information that will be shown. For example, to view some basic debugging information we call: + +\texttt{?- set\_horus\_flag(verbosity, 1).} + +This key defaults to 0 (no debugging information) and only \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers have support for this key. + +The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} or \texttt{false}. By default is \texttt{true} and only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always do their calculations in a logarithm domain. + +There are keys specific only to some algorithm. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: +\begin{itemize} + \item \texttt{sequential} + \item \texttt{min\_neighbors} + \item \texttt{min\_weight} + \item \texttt{min\_fill} + \item \texttt{weighted\_min\_fill} +\end{itemize} + +It defaults to \texttt{weighted\_min\_fill}. An explanation of each of these heuristics can be found in Daphne Koller's book \textit{Probabilistic Graphical Models}. + +The \texttt{bp\_msg\_schedule}, \texttt{bp\_accuracy} and \texttt{bp\_max\_iter} keys are specific for message passing based algorithms, namely \texttt{bp}, \texttt{cbp} and \texttt{lbp}. + +The \texttt{bp\_max\_iter} key establishes a maximum number of iterations. One iteration consists in sending all possible messages. It defaults to 1000. + +The \texttt{bp\_accuracy} key indicates when the message passing should cease. Be the residual of one message the difference (according some metric) between the one sent in the current iteration and the one sent in the previous. If the highest residual is lesser than the given value, the message passing is stopped and the probabilities are calculated using the last messages that were sent. This key defaults to 0.0001. + +The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possible values are: +\begin{itemize} + \item \texttt{seq\_fixed}, at each iteration, all messages are sent with the same order. + + \item \texttt{seq\_random}, at each iteration, all messages are sent with a random order. + + \item \texttt{parallel}, at each iteration, all messages are calculated using only the values of the previous iteration. + + \item \texttt{max\_residual}, the next message to be sent is the one with maximum residual (as explained in the paper \textit{Residual Belief Propagation: Informed Scheduling for Asynchronous Message Passing}). +\end{itemize} +It defaults to \texttt{seq\_fixed}. + +\end{document} From f0768e76f283d1a35aec0a8ce84d4e09fc6f637a Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 5 Jan 2013 01:02:19 +0000 Subject: [PATCH 61/89] Improve Makefile --- packages/CLPBN/Makefile.in | 38 +++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index 7b563b427..2ad9616c2 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -32,15 +32,16 @@ PDFLATEX=pdflatex PFL_MANUAL = $(srcdir)/pfl -CLPBN_TOP= $(srcdir)/clpbn.yap \ - $(srcdir)/pfl.yap - CLPBN_SRCDIR = $(srcdir)/clpbn CLPBN_LEARNING_SRCDIR = $(srcdir)/learning CLPBN_EXDIR = $(srcdir)/examples +CLPBN_TOP= \ + $(srcdir)/clpbn.yap \ + $(srcdir)/pfl.yap + CLPBN_PROGRAMS= \ $(CLPBN_SRCDIR)/aggregates.yap \ $(CLPBN_SRCDIR)/bdd.yap \ @@ -77,12 +78,24 @@ CLPBN_LEARNING_PROGRAMS= \ $(CLPBN_LEARNING_SRCDIR)/learn_utils.yap \ $(CLPBN_LEARNING_SRCDIR)/mle.yap +CLPBN_EXAMPLES= \ + $(CLPBN_EXDIR)/burglary-alarm.fg \ + $(CLPBN_EXDIR)/burglary-alarm.pfl \ + $(CLPBN_EXDIR)/burglary-alarm.uai \ + $(CLPBN_EXDIR)/cg.yap \ + $(CLPBN_EXDIR)/city.pfl \ + $(CLPBN_EXDIR)/comp_workshops.pfl \ + $(CLPBN_EXDIR)/social_network1.pfl \ + $(CLPBN_EXDIR)/social_network2.pfl \ + $(CLPBN_EXDIR)/sprinkler.pfl \ + $(CLPBN_EXDIR)/workshop_attrs.pfl + CLPBN_SCHOOL_EXAMPLES= \ $(CLPBN_EXDIR)/School/README \ $(CLPBN_EXDIR)/School/evidence_128.yap \ $(CLPBN_EXDIR)/School/parschema.pfl \ $(CLPBN_EXDIR)/School/school_128.yap \ - $(CLPBN_EXDIR)/School/school32.yap \ + $(CLPBN_EXDIR)/School/school_32.yap \ $(CLPBN_EXDIR)/School/school_64.yap \ $(CLPBN_EXDIR)/School/tables.yap @@ -102,20 +115,8 @@ CLPBN_LEARNING_EXAMPLES= \ $(CLPBN_EXDIR)/learning/sprinkler_params.yap \ $(CLPBN_EXDIR)/learning/train.yap -CLPBN_EXAMPLES= \ - $(CLPBN_EXDIR)/burglary-alarm.fg \ - $(CLPBN_EXDIR)/burglary-alarm.pfl \ - $(CLPBN_EXDIR)/burglary-alarm.uai \ - $(CLPBN_EXDIR)/cg.yap \ - $(CLPBN_EXDIR)/city.pfl \ - $(CLPBN_EXDIR)/comp_workshops.pfl \ - $(CLPBN_EXDIR)/social_network1.pfl \ - $(CLPBN_EXDIR)/social_network2.pfl \ - $(CLPBN_EXDIR)/sprinkler.pfl \ - $(CLPBN_EXDIR)/workshop_attrs.pfl - -install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_PROGRAMS) +install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_LEARNING_PROGRAMS) $(CLPBN_SCHOOL_EXAMPLES) $(CLPBN_HMMER_EXAMPLES) $(CLPBN_LEARNING_EXAMPLES) mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn/learning mkdir -p $(DESTDIR)$(EXDIR) @@ -130,9 +131,12 @@ install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_PROGRAMS) for h in $(CLPBN_HMMER_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(EXDIR)/HMMer; done for h in $(CLPBN_LEARNING_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(EXDIR)/learning; done + docs: $(MANUAL) $(PDFLATEX) $(PFL_MANUAL) $(PDFLATEX) $(PFL_MANUAL) + rm pfl.aux pfl.log + install_docs: docs $(INSTALL_DATA) pfl.pdf $(DESTDIR)$(EXDIR) From ec2d9e09675834f3a71e79937f2562db8cab07fd Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Sat, 5 Jan 2013 12:04:43 +0000 Subject: [PATCH 62/89] Trivial --- packages/CLPBN/horus/BeliefProp.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index 68d47ac5c..5fad0c496 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -51,7 +51,7 @@ class BpLink void updateResidual (void) { - residual_ = LogAware::getMaxNorm (v1_,v2_); + residual_ = LogAware::getMaxNorm (v1_, v2_); } virtual void updateMessage (void) From 96f4e7ea1bcf22293b98ecdff02cf98fae3c86f8 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 14:04:44 +0000 Subject: [PATCH 63/89] Fix typo --- packages/CLPBN/clpbn/horus.yap | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index b43d51ded..03d5662ff 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -5,7 +5,7 @@ ********************************************************/ :- module(clpbn_horus, - [set_horus_flag/1, + [set_horus_flag/2, cpp_create_lifted_network/3, cpp_create_ground_network/4, cpp_set_parfactors_params/3, From 91dbd60ad4aaa21c6641b398a7a5b09462fbf2e1 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 14:05:56 +0000 Subject: [PATCH 64/89] Allow calling set_horus_flag/2 without loading clpbn_horus --- packages/CLPBN/pfl.yap | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 4c2a0efc6..b36c74950 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -32,6 +32,9 @@ :- reexport(library(clpbn/aggregates), [avg_factors/5]). +:- reexport('clpbn/horus', + [set_horus_flag/2]). + :- ( % if clp(bn) has done loading, we're top-level predicate_property(set_pfl_flag(_,_), imported_from(clpbn)) -> From bafd7320a51e022de388fb0c77370ad7ed41e790 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 14:59:51 +0000 Subject: [PATCH 65/89] Rework a bit the examples --- packages/CLPBN/examples/city.pfl | 69 +++++++++++---------- packages/CLPBN/examples/comp_workshops.pfl | 47 +++++++------- packages/CLPBN/examples/social_network1.pfl | 32 ++++++---- packages/CLPBN/examples/social_network2.pfl | 32 ++++++---- packages/CLPBN/examples/workshop_attrs.pfl | 23 ++++--- 5 files changed, 115 insertions(+), 88 deletions(-) diff --git a/packages/CLPBN/examples/city.pfl b/packages/CLPBN/examples/city.pfl index 37e0eeb22..d79e824a8 100644 --- a/packages/CLPBN/examples/city.pfl +++ b/packages/CLPBN/examples/city.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "First-order + probabilistic inference" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -11,14 +16,14 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile people/2. +:- multifile person/2. :- multifile ev/1. -people(joe,nyc). -people(p2, nyc). -people(p3, nyc). -people(p4, nyc). -people(p5, nyc). +person(joe,nyc). +person(p2, nyc). +person(p3, nyc). +person(p4, nyc). +person(p5, nyc). ev(descn(p2, fits)). ev(descn(p3, fits)). @@ -26,41 +31,41 @@ ev(descn(p4, fits)). ev(descn(p5, fits)). bayes city_conservativeness(C)::[high,low] ; - cons_table ; - [people(_,C)]. + cons_table ; + [person(_,C)]. bayes gender(P)::[male,female] ; - gender_table ; - [people(P,_)]. + gender_table ; + [person(P,_)]. bayes hair_color(P)::[dark,bright], city_conservativeness(C) ; - hair_color_table ; - [people(P,C)]. + hair_color_table ; + [person(P,C)]. bayes car_color(P)::[dark,bright], hair_color(P) ; - car_color_table ; - [people(P,_)]. + car_color_table ; + [person(P,_)]. bayes height(P)::[tall,short], gender(P) ; - height_table ; - [people(P,_)]. + height_table ; + [person(P,_)]. bayes shoe_size(P)::[big,small], height(P) ; - shoe_size_table ; - [people(P,_)]. + shoe_size_table ; + [person(P,_)]. bayes guilty(P)::[y,n] ; - guilty_table ; - [people(P,_)]. + guilty_table ; + [person(P,_)]. bayes descn(P)::[fits,dont_fit], car_color(P), - hair_color(P), height(P), guilty(P) ; - descn_table ; - [people(P,_)]. + hair_color(P), height(P), guilty(P) ; + descn_table ; + [person(P,_)]. bayes witness(C), descn(Joe), descn(P2) ; - witness_table ; - [people(_,C), Joe=joe, P2=p2]. + witness_table ; + [person(_,C), Joe=joe, P2=p2]. cons_table( @@ -109,20 +114,20 @@ witness_table( runall(G, Wrapper) :- - findall(G, Wrapper, L), - execute_all(L). + findall(G, Wrapper, L), + execute_all(L). execute_all([]). execute_all(G.L) :- - call(G), - execute_all(L). + call(G), + execute_all(L). is_joe_guilty(Guilty) :- - witness(nyc, t), - runall(X, ev(X)), - guilty(joe, Guilty). + witness(nyc, t), + runall(X, ev(X)), + guilty(joe, Guilty). % ?- is_joe_guilty(Guilty). diff --git a/packages/CLPBN/examples/comp_workshops.pfl b/packages/CLPBN/examples/comp_workshops.pfl index b14a0fba2..1b020035b 100644 --- a/packages/CLPBN/examples/comp_workshops.pfl +++ b/packages/CLPBN/examples/comp_workshops.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "Lifted Probabilistic + Inference with Counting Formulas" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -10,31 +15,31 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile c/2. +:- multifile reg/2. -c(p1,w1). -c(p1,w2). -c(p1,w3). -c(p2,w1). -c(p2,w2). -c(p2,w3). -c(p3,w1). -c(p3,w2). -c(p3,w3). -c(p4,w1). -c(p4,w2). -c(p4,w3). -c(p5,w1). -c(p5,w2). -c(p5,w3). +reg(p1,w1). +reg(p1,w2). +reg(p1,w3). +reg(p2,w1). +reg(p2,w2). +reg(p2,w3). +reg(p3,w1). +reg(p3,w2). +reg(p3,w3). +reg(p4,w1). +reg(p4,w2). +reg(p4,w3). +reg(p5,w1). +reg(p5,w2). +reg(p5,w3). markov attends(P), hot(W) ; - [0.2, 0.8, 0.8, 0.8] ; - [c(P,W)]. + [0.2, 0.8, 0.8, 0.8] ; + [reg(P,W)]. markov attends(P), series ; - [0.501, 0.499, 0.499, 0.499] ; - [c(P,_)]. + [0.501, 0.499, 0.499, 0.499] ; + [reg(P,_)]. -?- series(X). +% ?- series(X). diff --git a/packages/CLPBN/examples/social_network1.pfl b/packages/CLPBN/examples/social_network1.pfl index 8330ebe84..e8f976e3a 100644 --- a/packages/CLPBN/examples/social_network1.pfl +++ b/packages/CLPBN/examples/social_network1.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "Lifted First-Order + Belief Propagation" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -11,28 +16,29 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile people/1. +:- multifile person/1. -people @ 5. +person @ 5. -people(X,Y) :- - people(X), - people(Y), - X \== Y. +person(X,Y) :- + person(X), + person(Y) +% ,X \== Y + . -markov smokes(X) ; [1.0, 4.0552]; [people(X)]. +markov smokes(X) ; [1.0, 4.0552]; [person(X)]. -markov cancer(X) ; [1.0, 9.9742]; [people(X)]. +markov cancer(X) ; [1.0, 9.9742]; [person(X)]. -markov friends(X,Y) ; [1.0, 99.48432] ; [people(X,Y)]. +markov friends(X,Y) ; [1.0, 99.48432] ; [person(X,Y)]. markov smokes(X), cancer(X) ; - [4.48169, 4.48169, 1.0, 4.48169] ; - [people(X)]. + [4.48169, 4.48169, 1.0, 4.48169] ; + [person(X)]. markov friends(X,Y), smokes(X), smokes(Y) ; - [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; - [people(X,Y)]. + [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; + [person(X,Y)]. % ?- friends(p1,p2,X). diff --git a/packages/CLPBN/examples/social_network2.pfl b/packages/CLPBN/examples/social_network2.pfl index b030fc0a0..8dc823da4 100644 --- a/packages/CLPBN/examples/social_network2.pfl +++ b/packages/CLPBN/examples/social_network2.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "Lifted Inference Seen + from the Other Side: The Tractable Features" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -11,28 +16,29 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile people/1. +:- multifile person/1. -people @ 5. +person @ 5. -people(X,Y) :- - people(X), - people(Y). -% X \== Y. +person(X,Y) :- + person(X), + person(Y) +% ,X \== Y + . -markov smokes(X) ; [1.0, 4.0552]; [people(X)]. +markov smokes(X) ; [1.0, 4.0552]; [person(X)]. -markov asthma(X) ; [1.0, 9.9742] ; [people(X)]. +markov asthma(X) ; [1.0, 9.9742] ; [person(X)]. -markov friends(X,Y) ; [1.0, 99.48432] ; [people(X,Y)]. +markov friends(X,Y) ; [1.0, 99.48432] ; [person(X,Y)]. markov asthma(X), smokes(X) ; - [4.48169, 4.48169, 1.0, 4.48169] ; - [people(X)]. + [4.48169, 4.48169, 1.0, 4.48169] ; + [person(X)]. markov asthma(X), friends(X,Y), smokes(Y) ; - [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; - [people(X,Y)]. + [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; + [person(X,Y)]. % ?- smokes(p1,t), smokes(p2,t), friends(p1,p2,X). diff --git a/packages/CLPBN/examples/workshop_attrs.pfl b/packages/CLPBN/examples/workshop_attrs.pfl index c5e9d08f1..248529980 100644 --- a/packages/CLPBN/examples/workshop_attrs.pfl +++ b/packages/CLPBN/examples/workshop_attrs.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "Lifted Probabilistic + Inference with Counting Formulas" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -11,23 +16,23 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile people/1. +:- multifile person/1. -people @ 5. +person @ 5. -markov attends(P), attr1 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr1 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr2 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr2 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr3 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr3 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr4 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr4 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr5 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr5 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr6 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr6 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), series ; [0.501, 0.499, 0.499, 0.499] ; [people(P)]. +markov attends(P), series ; [0.501, 0.499, 0.499, 0.499] ; [person(P)]. % ?- series(X). From 82a4cc508b25560ec7cd84cee569677ae5405c69 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 22:01:49 +0000 Subject: [PATCH 66/89] Change burglary-alarm.uai to be bayes instead of markov --- packages/CLPBN/examples/burglary-alarm.uai | 23 +++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/packages/CLPBN/examples/burglary-alarm.uai b/packages/CLPBN/examples/burglary-alarm.uai index 4e950cd9f..111f4178a 100644 --- a/packages/CLPBN/examples/burglary-alarm.uai +++ b/packages/CLPBN/examples/burglary-alarm.uai @@ -1,12 +1,13 @@ -MARKOV +BAYES + 5 2 2 2 2 2 5 1 0 1 1 -3 2 0 1 -2 3 2 -2 4 2 +3 0 1 2 +2 2 3 +2 2 4 2 .001 .999 @@ -15,14 +16,14 @@ MARKOV .002 .998 8 - .95 .94 .29 .001 - .05 .06 .71 .999 + .95 .05 .94 .06 + .29 .71 .001 .999 4 - .9 .05 - .1 .95 + .9 .1 + .05 .95 4 - .7 .01 - .3 .99 - + .7 .3 + .01 .99 + From ba32ebc5f5a4319a4d22ef9649b2f46f2a59bb63 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 22:17:05 +0000 Subject: [PATCH 67/89] Add support for bayesian networks defined in an UAI file format --- packages/CLPBN/horus/FactorGraph.cpp | 47 ++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 85925abf6..3d781628a 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -55,8 +55,12 @@ FactorGraph::readFromUaiFormat (const char* fileName) ignoreLines (is); string line; getline (is, line); - if (line != "MARKOV") { - cerr << "Error: the network must be a MARKOV network." << endl; + if (line == "BAYES") { + bayesFactors_ = true; + } else if (line == "MARKOV") { + bayesFactors_ = false; + } else { + cerr << "Error: the type of network is missing." << endl; exit (EXIT_FAILURE); } // read the number of vars @@ -73,13 +77,13 @@ FactorGraph::readFromUaiFormat (const char* fileName) unsigned nrArgs; unsigned vid; is >> nrFactors; - vector factorVarIds; - vector factorRanges; + vector allVarIds; + vector allRanges; for (unsigned i = 0; i < nrFactors; i++) { ignoreLines (is); is >> nrArgs; - factorVarIds.push_back ({ }); - factorRanges.push_back ({ }); + allVarIds.push_back ({ }); + allRanges.push_back ({ }); for (unsigned j = 0; j < nrArgs; j++) { is >> vid; if (vid >= ranges.size()) { @@ -88,8 +92,8 @@ FactorGraph::readFromUaiFormat (const char* fileName) cerr << "." << endl; exit (EXIT_FAILURE); } - factorVarIds.back().push_back (vid); - factorRanges.back().push_back (ranges[vid]); + allVarIds.back().push_back (vid); + allRanges.back().push_back (ranges[vid]); } } // read the parameters @@ -97,9 +101,9 @@ FactorGraph::readFromUaiFormat (const char* fileName) for (unsigned i = 0; i < nrFactors; i++) { ignoreLines (is); is >> nrParams; - if (nrParams != Util::sizeExpected (factorRanges[i])) { + if (nrParams != Util::sizeExpected (allRanges[i])) { cerr << "Error: invalid number of parameters for factor nº " << i ; - cerr << ", " << Util::sizeExpected (factorRanges[i]); + cerr << ", " << Util::sizeExpected (allRanges[i]); cerr << " expected, " << nrParams << " given." << endl; exit (EXIT_FAILURE); } @@ -110,7 +114,14 @@ FactorGraph::readFromUaiFormat (const char* fileName) if (Globals::logDomain) { Util::log (params); } - addFactor (Factor (factorVarIds[i], factorRanges[i], params)); + Factor f (allVarIds[i], allRanges[i], params); + if (bayesFactors_ && allVarIds[i].size() > 1) { + // In this format the child is the last variable, + // move it to be the first + std::swap (allVarIds[i].front(), allVarIds[i].back()); + f.reorderArguments (allVarIds[i]); + } + addFactor (f); } is.close(); } @@ -318,7 +329,8 @@ FactorGraph::exportToUaiFormat (const char* fileName) const cerr << "Error: couldn't open file '" << fileName << "'." ; return; } - out << "MARKOV" << endl; + out << (bayesFactors_ ? "BAYES" : "MARKOV") ; + out << endl << endl; out << varNodes_.size() << endl; VarNodes sortedVns = varNodes_; std::sort (sortedVns.begin(), sortedVns.end(), sortByVarId()); @@ -328,11 +340,20 @@ FactorGraph::exportToUaiFormat (const char* fileName) const out << endl << facNodes_.size() << endl; for (size_t i = 0; i < facNodes_.size(); i++) { VarIds args = facNodes_[i]->factor().arguments(); + if (bayesFactors_) { + std::swap (args.front(), args.back()); + } out << args.size() << " " << Util::elementsToString (args) << endl; } out << endl; for (size_t i = 0; i < facNodes_.size(); i++) { - Params params = facNodes_[i]->factor().params(); + Factor f = facNodes_[i]->factor(); + if (bayesFactors_) { + VarIds args = f.arguments(); + std::swap (args.front(), args.back()); + f.reorderArguments (args); + } + Params params = f.params(); if (Globals::logDomain) { Util::exp (params); } From 9bb43360a21d2af929cb9c738ac2eb63c21465ac Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 22:18:50 +0000 Subject: [PATCH 68/89] Rearrange the probabilities to do a clean diff against the exported model --- packages/CLPBN/examples/burglary-alarm.uai | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/packages/CLPBN/examples/burglary-alarm.uai b/packages/CLPBN/examples/burglary-alarm.uai index 111f4178a..c584a98aa 100644 --- a/packages/CLPBN/examples/burglary-alarm.uai +++ b/packages/CLPBN/examples/burglary-alarm.uai @@ -10,20 +10,17 @@ BAYES 2 2 4 2 - .001 .999 + 0.001 0.999 2 - .002 .998 + 0.002 0.998 8 - .95 .05 .94 .06 - .29 .71 .001 .999 + 0.95 0.05 0.94 0.06 0.29 0.71 0.001 0.999 4 - .9 .1 - .05 .95 + 0.9 0.1 0.05 0.95 4 - .7 .3 - .01 .99 + 0.7 0.3 0.01 0.99 From f598881cc5aab0a4e554ec7a215dbcaa1d25f443 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Mon, 7 Jan 2013 22:20:53 +0000 Subject: [PATCH 69/89] Explain better the potential value order --- packages/CLPBN/pfl.tex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index 5da5399e8..26caf6a38 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -52,7 +52,7 @@ $$Type~~F~~;~~Phi~~;~~C.$$ \item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the others defines its parents. -\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. +\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. Domain combinations are implicitly assumed in ascending order, with the first term being the 'most significant' (e.g. $\mathtt{x_0y_0}$, $\mathtt{x_0y_1}$, $\mathtt{x_0y_2}$, $\mathtt{x_1y_0}$, $\mathtt{x_1y_1}$, $\mathtt{x_1y_2}$). \item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be any relation (set of tuples) over the logical variables. \end{itemize} From c92b7c3e7e5884d326e32be35a904072538b125b Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 00:33:01 +0000 Subject: [PATCH 70/89] Install hcli to the bin directory --- packages/CLPBN/horus/Makefile.in | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/horus/Makefile.in b/packages/CLPBN/horus/Makefile.in index 24e7d0b87..cbf9d700e 100644 --- a/packages/CLPBN/horus/Makefile.in +++ b/packages/CLPBN/horus/Makefile.in @@ -43,6 +43,7 @@ SO=@SO@ #4.1VPATH=@srcdir@:@srcdir@/OPTYap CWD=$(PWD) +HCLI = $(srcdir)/hcli HEADERS = \ $(srcdir)/BayesBall.h \ @@ -156,15 +157,16 @@ all: $(SOBJS) hcli hcli: $(HCLI_OBJS) - $(CXX) -o hcli $(HCLI_OBJS) + $(CXX) -o $(HCLI) $(HCLI_OBJS) install: all $(INSTALL_PROGRAM) $(SOBJS) $(DESTDIR)$(YAPLIBDIR) + $(INSTALL_PROGRAM) $(HCLI) $(DESTDIR)$(BINDIR) clean: - rm -f *.o *~ $(OBJS) $(SOBJS) *.BAK hcli + rm -f *.o *~ $(OBJS) $(SOBJS) $(HCLI) *.BAK erase_dots: From 182429252ef210ed9435a14c7bd361bc8fcdb2b0 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 14:47:21 +0000 Subject: [PATCH 71/89] Update the usage string for hcli --- packages/CLPBN/horus/HorusCli.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 0997e4655..9a0fb7a3a 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -16,8 +16,8 @@ VarIds readQueryAndEvidence (FactorGraph&, int, const char* [], int); void runSolver (const FactorGraph&, const VarIds&); -const string USAGE = "usage: ./hcli [HORUS_FLAG=VALUE] \ -MODEL_FILE [VARIABLE | OBSERVED_VARIABLE=EVIDENCE] ..." ; +const string USAGE = "usage: ./hcli \ +[=]... [|=]..." ; int From 6375d05a93dcd5a09fce9ef824e02b4a09f412d5 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 15:31:52 +0000 Subject: [PATCH 72/89] PFL manual: add section for horus cli --- packages/CLPBN/pfl.tex | 40 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index 26caf6a38..27ed6275f 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -1,8 +1,9 @@ \documentclass{article} -\usepackage{tikz} +\usepackage{hyperref} \usepackage{setspace} \usepackage{fancyvrb} +\usepackage{tikz} \usetikzlibrary{arrows,shapes,positioning} \begin{document} @@ -286,4 +287,41 @@ The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possi \end{itemize} It defaults to \texttt{seq\_fixed}. +\section{Horus Command Line} +This package also includes an utility to perform inference over probabilistic graphical models described in other formats, namely the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format} + +This utility is called \texttt{hcli} and can be found inside binary directory used for the YAP installation. Its usage is: + +\begin{verbatim} +./hcli [=]... [|=]... +\end{verbatim} + +Let's assume that the working directory is where \texttt{hcli} is installed. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show the command for loading a model described in an UAI file format. + +\begin{verbatim} +./hcli $EXAMPLES_DIR$/burglary-alarm.uai +\end{verbatim} + +With this command, the program will load the model and print the marginal probabilities for all random variables defined in the model. We can view only the marginal probability for some variable with a identifier $X$, if we pass $X$ as an extra argument following the file name. For instance, the following command will show only the marginal probability for the variable with identifier $0$. + +\begin{verbatim} +./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0 +\end{verbatim} + +If we give more than one variable identifier as argument, the program will show the joint probability for all variables given. + +Evidence can be given as pairs with a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. + +\begin{verbatim} +./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0=1 +\end{verbatim} + +By default, all probability tasks are resolved with the \texttt{hve} solver. It is possible to choose another solver using the \texttt{ground\_solver} key as follows. Note that only \texttt{hve}, \texttt{bp} and \texttt{cbp} can be used in \texttt{hcli}. + +\begin{verbatim} +./hcli ground_solver=bp ../examples/burglary-alarm.uai +\end{verbatim} + +The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs $Key=Value$ before the model's file name. + \end{document} From 75b652b0c9f7b13d6565be345c5e61589d73f4f8 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 15:33:55 +0000 Subject: [PATCH 73/89] PFL manual: improve title page and reword a sentence --- packages/CLPBN/pfl.tex | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index 27ed6275f..7e210efb2 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -23,11 +23,17 @@ \setlength{\parskip}{\baselineskip} -\title{The Prolog Factor Language (PFL)} -\author{} +\title{\Huge\textbf{Prolog Factor Language (PFL) Manual}} +\author{Tiago Gomes, V\'{i}tor Santos Costa} \date{} \maketitle +\thispagestyle{empty} +\vspace{5cm} +\begin{center} + \large Last revision: January 8, 2013 +\end{center} +\newpage %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -38,6 +44,7 @@ The Prolog Factor Language (PFL) is a extension of the Prolog language that allo + %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -120,7 +127,7 @@ $$Type~~F~~;~~Phi~~;~~C.$$ \end{center} \end{figure} -To better explain the language, below we show the PFL representation for network found in Figure~\ref{fig:sprinkler-bn}. +Towards a better understanding of the language, next we show the PFL representation for network found in Figure~\ref{fig:sprinkler-bn}. \begin{pflcode} :- use_module(library(pfl)). From 4220069d909a205f48d9868ee29762a031013050 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 15:34:56 +0000 Subject: [PATCH 74/89] PFL manual: use the Unix end-of-line marker --- packages/CLPBN/pfl.tex | 668 ++++++++++++++++++++--------------------- 1 file changed, 334 insertions(+), 334 deletions(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index 7e210efb2..df0d9db19 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -1,334 +1,334 @@ -\documentclass{article} - -\usepackage{hyperref} -\usepackage{setspace} -\usepackage{fancyvrb} -\usepackage{tikz} -\usetikzlibrary{arrows,shapes,positioning} - -\begin{document} - -\DefineVerbatimEnvironment{pflcodeve}{Verbatim} {xleftmargin=3.0em,fontsize=\small} - -\newenvironment{pflcode} - {\VerbatimEnvironment \setstretch{0.8} \begin{pflcodeve}} - {\end{pflcodeve} } - -\newcommand{\true} {\mathtt{t}} -\newcommand{\false} {\mathtt{f}} -\newcommand{\tableline} {\noalign{\hrule height 0.8pt}} - -\tikzstyle{nodestyle} = [draw, thick, circle, minimum size=0.9cm] -\tikzstyle{bnedgestyle} = [-triangle 45,thick] - -\setlength{\parskip}{\baselineskip} - -\title{\Huge\textbf{Prolog Factor Language (PFL) Manual}} -\author{Tiago Gomes, V\'{i}tor Santos Costa} -\date{} - -\maketitle -\thispagestyle{empty} -\vspace{5cm} -\begin{center} - \large Last revision: January 8, 2013 -\end{center} -\newpage - -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -\section{Introduction} -The Prolog Factor Language (PFL) is a extension of the Prolog language that allows a natural representation of this first-order probabilistic models (either directed or undirected). PFL is also capable of solving probabilistic queries on this models through the implementation of several inference techniques: variable elimination, belief propagation, lifted variable elimination and lifted belief propagation. - - - - -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -\section{Language} -A first-order probabilistic graphical model is described using parametric factors, or just parfactors. The PFL syntax for a parfactor is - -$$Type~~F~~;~~Phi~~;~~C.$$ - -, where -\begin{itemize} -\item $Type$ refers the type of network over which the parfactor is defined. It can be \texttt{bayes} for directed networks, or \texttt{markov} for undirected ones. - -\item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the others defines its parents. - -\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. Domain combinations are implicitly assumed in ascending order, with the first term being the 'most significant' (e.g. $\mathtt{x_0y_0}$, $\mathtt{x_0y_1}$, $\mathtt{x_0y_2}$, $\mathtt{x_1y_0}$, $\mathtt{x_1y_1}$, $\mathtt{x_1y_2}$). - -\item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be any relation (set of tuples) over the logical variables. -\end{itemize} - - -\begin{figure}[t!] -\begin{center} -\begin{tikzpicture}[>=latex',line join=bevel,transform shape,scale=0.8] - -\node (cloudy) at (50bp, 122bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Cloudy$}; -\node (sprinker) at ( 0bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Sprinker$}; -\node (rain) at (100bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Rain$}; -\node (wetgrass) at (50bp, 10bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$WetGrass$}; -\draw [bnedgestyle] (cloudy) -- (sprinker); -\draw [bnedgestyle] (cloudy) -- (rain); -\draw [bnedgestyle] (sprinker) -- (wetgrass); -\draw [bnedgestyle] (rain) -- (wetgrass); - -\node [above=0.4cm of cloudy,inner sep=0pt] { -\begin{tabular}[b]{lc} - $C$ & $P(C)$ \\ \tableline - $\true$ & 0.5 \\ - $\false$ & 0.5 \\ -\end{tabular} -}; - -\node [left=0.4cm of sprinker,inner sep=0pt] { -\begin{tabular}{lcc} - $S$ & $C$ & $P(S|C)$ \\ \tableline - $\true$ & $\true$ & 0.1 \\ - $\true$ & $\false$ & 0.5 \\ - $\false$ & $\true$ & 0.9 \\ - $\false$ & $\false$ & 0.5 \\ -\end{tabular} -}; - -\node [right=0.4cm of rain,inner sep=0pt] { -\begin{tabular}{llc} - $R$ & $C$ & $P(R|C)$ \\ \tableline - $\true$ & $\true$ & 0.8 \\ - $\true$ & $\false$ & 0.2 \\ - $\false$ & $\true$ & 0.2 \\ - $\false$ & $\false$ & 0.8 \\ -\end{tabular} -}; - -\node [below=0.4cm of wetgrass,inner sep=0pt] { -\begin{tabular}{llll} - $W$ & $S$ & $R$ & $P(W|S,R)$ \\ \tableline - $\true$ & $\true$ & $\true$ & \hspace{1em} 0.99 \\ - $\true$ & $\true$ & $\false$ & \hspace{1em} 0.9 \\ - $\true$ & $\false$ & $\true$ & \hspace{1em} 0.9 \\ - $\true$ & $\false$ & $\false$ & \hspace{1em} 0.0 \\ - $\false$ & $\true$ & $\true$ & \hspace{1em} 0.01 \\ - $\false$ & $\true$ & $\false$ & \hspace{1em} 0.1 \\ - $\false$ & $\false$ & $\true$ & \hspace{1em} 0.1 \\ - $\false$ & $\false$ & $\false$ & \hspace{1em} 1.0 \\ -\end{tabular} -}; - -\end{tikzpicture} -\caption{The sprinkler network.} -\label{fig:sprinkler-bn} -\end{center} -\end{figure} - -Towards a better understanding of the language, next we show the PFL representation for network found in Figure~\ref{fig:sprinkler-bn}. - -\begin{pflcode} -:- use_module(library(pfl)). - -bayes cloudy ; cloudy_table ; []. - -bayes sprinkler, cloudy ; sprinkler_table ; []. - -bayes rain, cloudy ; rain_table ; []. - -bayes wet_grass, sprinkler, rain ; wet_grass_table ; []. - -cloudy_table( - [ 0.5, - 0.5 ]). - -sprinkler_table( - [ 0.1, 0.5, - 0.9, 0.5 ]). - -rain_table( - [ 0.8, 0.2, - 0.2, 0.8 ]). - -wet_grass_table( - [ 0.99, 0.9, 0.9, 0.0, - 0.01, 0.1, 0.1, 1.0 ]). -\end{pflcode} - -Note that this network is fully grounded, as the constraints are all empty. Next we present the PFL representation for a well-known markov logic network - the social network model. The weighted formulas of this model are shown below. - -\begin{pflcode} -1.5 : Smokes(x) => Cancer(x) -1.1 : Smokes(x) ^ Friends(x,y) => Smokes(y) -\end{pflcode} - -We can represent this model using PFL with the following code. - -\begin{pflcode} -:- use_module(library(pfl)). - -person(anna). -person(bob). - -markov smokes(X), cancer(X) ; - [4.482, 4.482, 1.0, 4.482] ; - [person(X)]. - -markov friends(X,Y), smokes(X), smokes(Y) ; - [3.004, 3.004, 3.004, 3.004, 3.004, 1.0, 1.0, 3.004] ; - [person(X), person(Y)]. -\end{pflcode} -%markov smokes(X) ; [1.0, 4.055]; [person(X)]. -%markov cancer(X) ; [1.0, 9.974]; [person(X)]. -%markov friends(X,Y) ; [1.0, 99.484] ; [person(X), person(Y)]. - -Notice that we defined the world to be consisted of two persons, \texttt{anne} and \texttt{bob}. We can easily add as many persons as we want by inserting in the program a fact like \texttt{person @ 10.}~. This would create ten persons named \texttt{p1}, \texttt{p2}, \dots, \texttt{p10}. - -Unlike other fist-order probabilistic languages, in PFL the logical variables that appear in the terms are not directly typed, and they will be only constrained by the goals that appear in the constraint of the parfactor. This allows the logical variables to be constrained by any relation (set of tuples), and not by pairwise (in)equalities. For instance, the next example defines a ground network with three factors, each over the random variables \texttt{p(a,b)}, \texttt{p(b,d)} and \texttt{p(d,e)}. - -\begin{pflcode} -constraint(a,b). -constraint(b,d). -constraint(d,e). - -markov p(A,B); some_table; [constraint(A,B)]. -\end{pflcode} - -We can easily add static evidence to PFL programs by inserting a fact with the same functor and arguments as the random variable, plus one extra argument with the observed state or value. For instance, suppose that we now that \texttt{anna} and \texttt{bob} are friends. We can add this knowledge to the program with the following fact: \texttt{friends(anna,bob,t).}~. - -One last note for the domain of the random variables. By default all terms will generate boolean (\texttt{t}/\texttt{f}) random variables. It is possible to chose a different domain by appending a list of the possible values or states to the term. Next we present a self-explanatory example of how this can be done. - -\begin{pflcode} -bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. -\end{pflcode} - -More examples can be found in the CLPBN examples directory, which defaults to ``share/doc/Yap/packages/examples/CLPBN'' from the base directory where the YAP Prolog system was installed. - -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -\section{Querying} -In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as an example. - -Assuming that the current directory is where the examples are located, first we load the model: - -\texttt{\$ yap -l sprinker.pfl} - -Let's suppose that we want to estimate the marginal probability for the $WetGrass$ random variable. We can do it calling the following goal: - -\texttt{?- wet\_grass(X).} - -The output of the goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. - -Let's now suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: - -\texttt{?- wet\_grass(X), rain(t).} - -PFL also supports calculating joint probability distributions. For instance, we can obtain the joint probability for $Sprinkler$ and $Rain$ with: - -\texttt{?- sprinkler(X), rain(Y).} - - -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -\section{Inference Options} -PFL supports both ground and lifted inference methods. The inference algorithm can be chosen by calling \texttt{set\_solver/1}. The following are supported: -\begin{itemize} - \item \texttt{ve}, variable elimination (written in Prolog) - \item \texttt{hve}, variable elimination (written in C++) - \item \texttt{jt}, junction tree - \item \texttt{bdd}, binary decision diagrams - \item \texttt{bp}, belief propagation - \item \texttt{cbp}, counting belief propagation - \item \texttt{gibbs}, gibbs sampling - \item \texttt{lve}, generalized counting first-order variable elimination (GC-FOVE) - \item \texttt{lkc}, lifted first-order knowledge compilation - \item \texttt{lbp}, lifted first-order belief propagation -\end{itemize} - -For instance, if we want to use belief propagation to solve some probabilistic query, we need to call first: - -\texttt{?- set\_solver(bp).} - -It is possible to tweak some parameters of PFL through \texttt{set\_horus\_flag/2} predicate. The first argument is a key that identifies the parameter that we desire to tweak, while the second is some possible value for this key. - -The \texttt{verbosity} key controls the level of debugging information that will be printed. Its possible values are positive integers. The higher the number, the more information that will be shown. For example, to view some basic debugging information we call: - -\texttt{?- set\_horus\_flag(verbosity, 1).} - -This key defaults to 0 (no debugging information) and only \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers have support for this key. - -The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} or \texttt{false}. By default is \texttt{true} and only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always do their calculations in a logarithm domain. - -There are keys specific only to some algorithm. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: -\begin{itemize} - \item \texttt{sequential} - \item \texttt{min\_neighbors} - \item \texttt{min\_weight} - \item \texttt{min\_fill} - \item \texttt{weighted\_min\_fill} -\end{itemize} - -It defaults to \texttt{weighted\_min\_fill}. An explanation of each of these heuristics can be found in Daphne Koller's book \textit{Probabilistic Graphical Models}. - -The \texttt{bp\_msg\_schedule}, \texttt{bp\_accuracy} and \texttt{bp\_max\_iter} keys are specific for message passing based algorithms, namely \texttt{bp}, \texttt{cbp} and \texttt{lbp}. - -The \texttt{bp\_max\_iter} key establishes a maximum number of iterations. One iteration consists in sending all possible messages. It defaults to 1000. - -The \texttt{bp\_accuracy} key indicates when the message passing should cease. Be the residual of one message the difference (according some metric) between the one sent in the current iteration and the one sent in the previous. If the highest residual is lesser than the given value, the message passing is stopped and the probabilities are calculated using the last messages that were sent. This key defaults to 0.0001. - -The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possible values are: -\begin{itemize} - \item \texttt{seq\_fixed}, at each iteration, all messages are sent with the same order. - - \item \texttt{seq\_random}, at each iteration, all messages are sent with a random order. - - \item \texttt{parallel}, at each iteration, all messages are calculated using only the values of the previous iteration. - - \item \texttt{max\_residual}, the next message to be sent is the one with maximum residual (as explained in the paper \textit{Residual Belief Propagation: Informed Scheduling for Asynchronous Message Passing}). -\end{itemize} -It defaults to \texttt{seq\_fixed}. - -\section{Horus Command Line} -This package also includes an utility to perform inference over probabilistic graphical models described in other formats, namely the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format} - -This utility is called \texttt{hcli} and can be found inside binary directory used for the YAP installation. Its usage is: - -\begin{verbatim} -./hcli [=]... [|=]... -\end{verbatim} - -Let's assume that the working directory is where \texttt{hcli} is installed. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show the command for loading a model described in an UAI file format. - -\begin{verbatim} -./hcli $EXAMPLES_DIR$/burglary-alarm.uai -\end{verbatim} - -With this command, the program will load the model and print the marginal probabilities for all random variables defined in the model. We can view only the marginal probability for some variable with a identifier $X$, if we pass $X$ as an extra argument following the file name. For instance, the following command will show only the marginal probability for the variable with identifier $0$. - -\begin{verbatim} -./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0 -\end{verbatim} - -If we give more than one variable identifier as argument, the program will show the joint probability for all variables given. - -Evidence can be given as pairs with a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. - -\begin{verbatim} -./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0=1 -\end{verbatim} - -By default, all probability tasks are resolved with the \texttt{hve} solver. It is possible to choose another solver using the \texttt{ground\_solver} key as follows. Note that only \texttt{hve}, \texttt{bp} and \texttt{cbp} can be used in \texttt{hcli}. - -\begin{verbatim} -./hcli ground_solver=bp ../examples/burglary-alarm.uai -\end{verbatim} - -The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs $Key=Value$ before the model's file name. - -\end{document} +\documentclass{article} + +\usepackage{hyperref} +\usepackage{setspace} +\usepackage{fancyvrb} +\usepackage{tikz} +\usetikzlibrary{arrows,shapes,positioning} + +\begin{document} + +\DefineVerbatimEnvironment{pflcodeve}{Verbatim} {xleftmargin=3.0em,fontsize=\small} + +\newenvironment{pflcode} + {\VerbatimEnvironment \setstretch{0.8} \begin{pflcodeve}} + {\end{pflcodeve} } + +\newcommand{\true} {\mathtt{t}} +\newcommand{\false} {\mathtt{f}} +\newcommand{\tableline} {\noalign{\hrule height 0.8pt}} + +\tikzstyle{nodestyle} = [draw, thick, circle, minimum size=0.9cm] +\tikzstyle{bnedgestyle} = [-triangle 45,thick] + +\setlength{\parskip}{\baselineskip} + +\title{\Huge\textbf{Prolog Factor Language (PFL) Manual}} +\author{Tiago Gomes, V\'{i}tor Santos Costa} +\date{} + +\maketitle +\thispagestyle{empty} +\vspace{5cm} +\begin{center} + \large Last revision: January 8, 2013 +\end{center} +\newpage + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Introduction} +The Prolog Factor Language (PFL) is a extension of the Prolog language that allows a natural representation of this first-order probabilistic models (either directed or undirected). PFL is also capable of solving probabilistic queries on this models through the implementation of several inference techniques: variable elimination, belief propagation, lifted variable elimination and lifted belief propagation. + + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Language} +A first-order probabilistic graphical model is described using parametric factors, or just parfactors. The PFL syntax for a parfactor is + +$$Type~~F~~;~~Phi~~;~~C.$$ + +, where +\begin{itemize} +\item $Type$ refers the type of network over which the parfactor is defined. It can be \texttt{bayes} for directed networks, or \texttt{markov} for undirected ones. + +\item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the others defines its parents. + +\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. Domain combinations are implicitly assumed in ascending order, with the first term being the 'most significant' (e.g. $\mathtt{x_0y_0}$, $\mathtt{x_0y_1}$, $\mathtt{x_0y_2}$, $\mathtt{x_1y_0}$, $\mathtt{x_1y_1}$, $\mathtt{x_1y_2}$). + +\item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be any relation (set of tuples) over the logical variables. +\end{itemize} + + +\begin{figure}[t!] +\begin{center} +\begin{tikzpicture}[>=latex',line join=bevel,transform shape,scale=0.8] + +\node (cloudy) at (50bp, 122bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Cloudy$}; +\node (sprinker) at ( 0bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Sprinker$}; +\node (rain) at (100bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Rain$}; +\node (wetgrass) at (50bp, 10bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$WetGrass$}; +\draw [bnedgestyle] (cloudy) -- (sprinker); +\draw [bnedgestyle] (cloudy) -- (rain); +\draw [bnedgestyle] (sprinker) -- (wetgrass); +\draw [bnedgestyle] (rain) -- (wetgrass); + +\node [above=0.4cm of cloudy,inner sep=0pt] { +\begin{tabular}[b]{lc} + $C$ & $P(C)$ \\ \tableline + $\true$ & 0.5 \\ + $\false$ & 0.5 \\ +\end{tabular} +}; + +\node [left=0.4cm of sprinker,inner sep=0pt] { +\begin{tabular}{lcc} + $S$ & $C$ & $P(S|C)$ \\ \tableline + $\true$ & $\true$ & 0.1 \\ + $\true$ & $\false$ & 0.5 \\ + $\false$ & $\true$ & 0.9 \\ + $\false$ & $\false$ & 0.5 \\ +\end{tabular} +}; + +\node [right=0.4cm of rain,inner sep=0pt] { +\begin{tabular}{llc} + $R$ & $C$ & $P(R|C)$ \\ \tableline + $\true$ & $\true$ & 0.8 \\ + $\true$ & $\false$ & 0.2 \\ + $\false$ & $\true$ & 0.2 \\ + $\false$ & $\false$ & 0.8 \\ +\end{tabular} +}; + +\node [below=0.4cm of wetgrass,inner sep=0pt] { +\begin{tabular}{llll} + $W$ & $S$ & $R$ & $P(W|S,R)$ \\ \tableline + $\true$ & $\true$ & $\true$ & \hspace{1em} 0.99 \\ + $\true$ & $\true$ & $\false$ & \hspace{1em} 0.9 \\ + $\true$ & $\false$ & $\true$ & \hspace{1em} 0.9 \\ + $\true$ & $\false$ & $\false$ & \hspace{1em} 0.0 \\ + $\false$ & $\true$ & $\true$ & \hspace{1em} 0.01 \\ + $\false$ & $\true$ & $\false$ & \hspace{1em} 0.1 \\ + $\false$ & $\false$ & $\true$ & \hspace{1em} 0.1 \\ + $\false$ & $\false$ & $\false$ & \hspace{1em} 1.0 \\ +\end{tabular} +}; + +\end{tikzpicture} +\caption{The sprinkler network.} +\label{fig:sprinkler-bn} +\end{center} +\end{figure} + +Towards a better understanding of the language, next we show the PFL representation for network found in Figure~\ref{fig:sprinkler-bn}. + +\begin{pflcode} +:- use_module(library(pfl)). + +bayes cloudy ; cloudy_table ; []. + +bayes sprinkler, cloudy ; sprinkler_table ; []. + +bayes rain, cloudy ; rain_table ; []. + +bayes wet_grass, sprinkler, rain ; wet_grass_table ; []. + +cloudy_table( + [ 0.5, + 0.5 ]). + +sprinkler_table( + [ 0.1, 0.5, + 0.9, 0.5 ]). + +rain_table( + [ 0.8, 0.2, + 0.2, 0.8 ]). + +wet_grass_table( + [ 0.99, 0.9, 0.9, 0.0, + 0.01, 0.1, 0.1, 1.0 ]). +\end{pflcode} + +Note that this network is fully grounded, as the constraints are all empty. Next we present the PFL representation for a well-known markov logic network - the social network model. The weighted formulas of this model are shown below. + +\begin{pflcode} +1.5 : Smokes(x) => Cancer(x) +1.1 : Smokes(x) ^ Friends(x,y) => Smokes(y) +\end{pflcode} + +We can represent this model using PFL with the following code. + +\begin{pflcode} +:- use_module(library(pfl)). + +person(anna). +person(bob). + +markov smokes(X), cancer(X) ; + [4.482, 4.482, 1.0, 4.482] ; + [person(X)]. + +markov friends(X,Y), smokes(X), smokes(Y) ; + [3.004, 3.004, 3.004, 3.004, 3.004, 1.0, 1.0, 3.004] ; + [person(X), person(Y)]. +\end{pflcode} +%markov smokes(X) ; [1.0, 4.055]; [person(X)]. +%markov cancer(X) ; [1.0, 9.974]; [person(X)]. +%markov friends(X,Y) ; [1.0, 99.484] ; [person(X), person(Y)]. + +Notice that we defined the world to be consisted of two persons, \texttt{anne} and \texttt{bob}. We can easily add as many persons as we want by inserting in the program a fact like \texttt{person @ 10.}~. This would create ten persons named \texttt{p1}, \texttt{p2}, \dots, \texttt{p10}. + +Unlike other fist-order probabilistic languages, in PFL the logical variables that appear in the terms are not directly typed, and they will be only constrained by the goals that appear in the constraint of the parfactor. This allows the logical variables to be constrained by any relation (set of tuples), and not by pairwise (in)equalities. For instance, the next example defines a ground network with three factors, each over the random variables \texttt{p(a,b)}, \texttt{p(b,d)} and \texttt{p(d,e)}. + +\begin{pflcode} +constraint(a,b). +constraint(b,d). +constraint(d,e). + +markov p(A,B); some_table; [constraint(A,B)]. +\end{pflcode} + +We can easily add static evidence to PFL programs by inserting a fact with the same functor and arguments as the random variable, plus one extra argument with the observed state or value. For instance, suppose that we now that \texttt{anna} and \texttt{bob} are friends. We can add this knowledge to the program with the following fact: \texttt{friends(anna,bob,t).}~. + +One last note for the domain of the random variables. By default all terms will generate boolean (\texttt{t}/\texttt{f}) random variables. It is possible to chose a different domain by appending a list of the possible values or states to the term. Next we present a self-explanatory example of how this can be done. + +\begin{pflcode} +bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. +\end{pflcode} + +More examples can be found in the CLPBN examples directory, which defaults to ``share/doc/Yap/packages/examples/CLPBN'' from the base directory where the YAP Prolog system was installed. + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Querying} +In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as an example. + +Assuming that the current directory is where the examples are located, first we load the model: + +\texttt{\$ yap -l sprinker.pfl} + +Let's suppose that we want to estimate the marginal probability for the $WetGrass$ random variable. We can do it calling the following goal: + +\texttt{?- wet\_grass(X).} + +The output of the goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. + +Let's now suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: + +\texttt{?- wet\_grass(X), rain(t).} + +PFL also supports calculating joint probability distributions. For instance, we can obtain the joint probability for $Sprinkler$ and $Rain$ with: + +\texttt{?- sprinkler(X), rain(Y).} + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Inference Options} +PFL supports both ground and lifted inference methods. The inference algorithm can be chosen by calling \texttt{set\_solver/1}. The following are supported: +\begin{itemize} + \item \texttt{ve}, variable elimination (written in Prolog) + \item \texttt{hve}, variable elimination (written in C++) + \item \texttt{jt}, junction tree + \item \texttt{bdd}, binary decision diagrams + \item \texttt{bp}, belief propagation + \item \texttt{cbp}, counting belief propagation + \item \texttt{gibbs}, gibbs sampling + \item \texttt{lve}, generalized counting first-order variable elimination (GC-FOVE) + \item \texttt{lkc}, lifted first-order knowledge compilation + \item \texttt{lbp}, lifted first-order belief propagation +\end{itemize} + +For instance, if we want to use belief propagation to solve some probabilistic query, we need to call first: + +\texttt{?- set\_solver(bp).} + +It is possible to tweak some parameters of PFL through \texttt{set\_horus\_flag/2} predicate. The first argument is a key that identifies the parameter that we desire to tweak, while the second is some possible value for this key. + +The \texttt{verbosity} key controls the level of debugging information that will be printed. Its possible values are positive integers. The higher the number, the more information that will be shown. For example, to view some basic debugging information we call: + +\texttt{?- set\_horus\_flag(verbosity, 1).} + +This key defaults to 0 (no debugging information) and only \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers have support for this key. + +The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} or \texttt{false}. By default is \texttt{true} and only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always do their calculations in a logarithm domain. + +There are keys specific only to some algorithm. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: +\begin{itemize} + \item \texttt{sequential} + \item \texttt{min\_neighbors} + \item \texttt{min\_weight} + \item \texttt{min\_fill} + \item \texttt{weighted\_min\_fill} +\end{itemize} + +It defaults to \texttt{weighted\_min\_fill}. An explanation of each of these heuristics can be found in Daphne Koller's book \textit{Probabilistic Graphical Models}. + +The \texttt{bp\_msg\_schedule}, \texttt{bp\_accuracy} and \texttt{bp\_max\_iter} keys are specific for message passing based algorithms, namely \texttt{bp}, \texttt{cbp} and \texttt{lbp}. + +The \texttt{bp\_max\_iter} key establishes a maximum number of iterations. One iteration consists in sending all possible messages. It defaults to 1000. + +The \texttt{bp\_accuracy} key indicates when the message passing should cease. Be the residual of one message the difference (according some metric) between the one sent in the current iteration and the one sent in the previous. If the highest residual is lesser than the given value, the message passing is stopped and the probabilities are calculated using the last messages that were sent. This key defaults to 0.0001. + +The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possible values are: +\begin{itemize} + \item \texttt{seq\_fixed}, at each iteration, all messages are sent with the same order. + + \item \texttt{seq\_random}, at each iteration, all messages are sent with a random order. + + \item \texttt{parallel}, at each iteration, all messages are calculated using only the values of the previous iteration. + + \item \texttt{max\_residual}, the next message to be sent is the one with maximum residual (as explained in the paper \textit{Residual Belief Propagation: Informed Scheduling for Asynchronous Message Passing}). +\end{itemize} +It defaults to \texttt{seq\_fixed}. + +\section{Horus Command Line} +This package also includes an utility to perform inference over probabilistic graphical models described in other formats, namely the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format} + +This utility is called \texttt{hcli} and can be found inside binary directory used for the YAP installation. Its usage is: + +\begin{verbatim} +./hcli [=]... [|=]... +\end{verbatim} + +Let's assume that the working directory is where \texttt{hcli} is installed. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show the command for loading a model described in an UAI file format. + +\begin{verbatim} +./hcli $EXAMPLES_DIR$/burglary-alarm.uai +\end{verbatim} + +With this command, the program will load the model and print the marginal probabilities for all random variables defined in the model. We can view only the marginal probability for some variable with a identifier $X$, if we pass $X$ as an extra argument following the file name. For instance, the following command will show only the marginal probability for the variable with identifier $0$. + +\begin{verbatim} +./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0 +\end{verbatim} + +If we give more than one variable identifier as argument, the program will show the joint probability for all variables given. + +Evidence can be given as pairs with a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. + +\begin{verbatim} +./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0=1 +\end{verbatim} + +By default, all probability tasks are resolved with the \texttt{hve} solver. It is possible to choose another solver using the \texttt{ground\_solver} key as follows. Note that only \texttt{hve}, \texttt{bp} and \texttt{cbp} can be used in \texttt{hcli}. + +\begin{verbatim} +./hcli ground_solver=bp ../examples/burglary-alarm.uai +\end{verbatim} + +The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs $Key=Value$ before the model's file name. + +\end{document} From da0514a7793b074d4951a16d1b5d4dec5a583d4f Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 17:01:03 +0000 Subject: [PATCH 75/89] Add support for more infernce keys. Add support for export_libdai, export_uai, export_gv and print_fg. Document these keys. --- packages/CLPBN/horus/FactorGraph.cpp | 80 +++++++++++++++------------- packages/CLPBN/horus/FactorGraph.h | 33 +++++++++++- packages/CLPBN/horus/HorusCli.cpp | 17 ++++++ packages/CLPBN/horus/HorusYap.cpp | 15 +++++- packages/CLPBN/horus/Util.cpp | 41 ++++++++++++++ packages/CLPBN/pfl.tex | 4 +- 6 files changed, 148 insertions(+), 42 deletions(-) diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 3d781628a..1f4c614b3 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -12,6 +12,12 @@ #include "Util.h" +bool FactorGraph::exportLd_ = false; +bool FactorGraph::exportUai_ = false; +bool FactorGraph::exportGv_ = false; +bool FactorGraph::printFg_ = false; + + FactorGraph::FactorGraph (const FactorGraph& fg) { const VarNodes& varNodes = fg.varNodes(); @@ -288,41 +294,38 @@ FactorGraph::print (void) const void -FactorGraph::exportToGraphViz (const char* fileName) const +FactorGraph::exportToLibDai (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { cerr << "Error: couldn't open file '" << fileName << "'." ; return; } - out << "graph \"" << fileName << "\" {" << endl; - for (size_t i = 0; i < varNodes_.size(); i++) { - if (varNodes_[i]->hasEvidence()) { - out << '"' << varNodes_[i]->label() << '"' ; - out << " [style=filled, fillcolor=yellow]" << endl; - } - } + out << facNodes_.size() << endl << endl; for (size_t i = 0; i < facNodes_.size(); i++) { - out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " [label=\"" << facNodes_[i]->getLabel(); - out << "\"" << ", shape=box]" << endl; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - const VarNodes& myVars = facNodes_[i]->neighbors(); - for (size_t j = 0; j < myVars.size(); j++) { - out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " -- " ; - out << '"' << myVars[j]->label() << '"' << endl; + Factor f (facNodes_[i]->factor()); + out << f.nrArguments() << endl; + out << Util::elementsToString (f.arguments()) << endl; + out << Util::elementsToString (f.ranges()) << endl; + VarIds args = f.arguments(); + std::reverse (args.begin(), args.end()); + f.reorderArguments (args); + if (Globals::logDomain) { + Util::exp (f.params()); } + out << f.size() << endl; + for (size_t j = 0; j < f.size(); j++) { + out << j << " " << f[j] << endl; + } + out << endl; } - out << "}" << endl; out.close(); } void -FactorGraph::exportToUaiFormat (const char* fileName) const +FactorGraph::exportToUai (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { @@ -366,31 +369,34 @@ FactorGraph::exportToUaiFormat (const char* fileName) const void -FactorGraph::exportToLibDaiFormat (const char* fileName) const +FactorGraph::exportToGraphViz (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { cerr << "Error: couldn't open file '" << fileName << "'." ; return; } - out << facNodes_.size() << endl << endl; - for (size_t i = 0; i < facNodes_.size(); i++) { - Factor f (facNodes_[i]->factor()); - out << f.nrArguments() << endl; - out << Util::elementsToString (f.arguments()) << endl; - out << Util::elementsToString (f.ranges()) << endl; - VarIds args = f.arguments(); - std::reverse (args.begin(), args.end()); - f.reorderArguments (args); - if (Globals::logDomain) { - Util::exp (f.params()); + out << "graph \"" << fileName << "\" {" << endl; + for (size_t i = 0; i < varNodes_.size(); i++) { + if (varNodes_[i]->hasEvidence()) { + out << '"' << varNodes_[i]->label() << '"' ; + out << " [style=filled, fillcolor=yellow]" << endl; } - out << f.size() << endl; - for (size_t j = 0; j < f.size(); j++) { - out << j << " " << f[j] << endl; - } - out << endl; } + for (size_t i = 0; i < facNodes_.size(); i++) { + out << '"' << facNodes_[i]->getLabel() << '"' ; + out << " [label=\"" << facNodes_[i]->getLabel(); + out << "\"" << ", shape=box]" << endl; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + const VarNodes& myVars = facNodes_[i]->neighbors(); + for (size_t j = 0; j < myVars.size(); j++) { + out << '"' << facNodes_[i]->getLabel() << '"' ; + out << " -- " ; + out << '"' << myVars[j]->label() << '"' << endl; + } + } + out << "}" << endl; out.close(); } diff --git a/packages/CLPBN/horus/FactorGraph.h b/packages/CLPBN/horus/FactorGraph.h index a235d8d26..e1cc9277c 100644 --- a/packages/CLPBN/horus/FactorGraph.h +++ b/packages/CLPBN/horus/FactorGraph.h @@ -106,11 +106,35 @@ class FactorGraph void print (void) const; + void exportToLibDai (const char*) const; + + void exportToUai (const char*) const; + void exportToGraphViz (const char*) const; - void exportToUaiFormat (const char*) const; + static bool exportToLibDai (void) { return exportLd_; } - void exportToLibDaiFormat (const char*) const; + static bool exportToUai (void) { return exportUai_; } + + static bool exportGraphViz (void) { return exportGv_; } + + static bool printFactorGraph (void) { return printFg_; } + + static void enableExportToLibDai (void) { exportLd_ = true; } + + static void disableExportToLibDai (void) { exportLd_ = false; } + + static void enableExportToUai (void) { exportUai_ = true; } + + static void disableExportToUai (void) { exportUai_ = false; } + + static void enableExportToGraphViz (void) { exportGv_ = true; } + + static void disableExportToGraphViz (void) { exportGv_ = false; } + + static void enablePrintFactorGraph (void) { printFg_ = true; } + + static void disablePrintFactorGraph (void) { printFg_ = false; } private: void ignoreLines (std::ifstream&) const; @@ -132,6 +156,11 @@ class FactorGraph typedef unordered_map VarMap; VarMap varMap_; + static bool exportLd_; + static bool exportUai_; + static bool exportGv_; + static bool printFg_; + DISALLOW_ASSIGN (FactorGraph); }; diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 9a0fb7a3a..4ff83899e 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -32,6 +32,23 @@ main (int argc, const char* argv[]) FactorGraph fg; readFactorGraph (fg, argv[idx]); VarIds queryIds = readQueryAndEvidence (fg, argc, argv, idx + 1); + if (FactorGraph::exportToLibDai()) { + fg.exportToLibDai ("model.fg"); + } + if (FactorGraph::exportToUai()) { + fg.exportToUai ("model.uai"); + } + if (FactorGraph::exportGraphViz()) { + fg.exportToGraphViz ("model.dot"); + } + if (FactorGraph::printFactorGraph()) { + fg.print(); + } + if (Globals::verbosity > 0) { + cout << "factor graph contains " ; + cout << fg.nrVarNodes() << " variables and " ; + cout << fg.nrFacNodes() << " factors " << endl; + } runSolver (fg, queryIds); return 0; } diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index dbd210412..63a2b69a5 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -108,10 +108,21 @@ createGroundNetwork (void) evidenceList = YAP_TailOfTerm (evidenceList); nrObservedVars ++; } + if (FactorGraph::exportToLibDai()) { + fg->exportToLibDai ("model.fg"); + } + if (FactorGraph::exportToUai()) { + fg->exportToUai ("model.uai"); + } + if (FactorGraph::exportGraphViz()) { + fg->exportToGraphViz ("model.dot"); + } + if (FactorGraph::printFactorGraph()) { + fg->print(); + } if (Globals::verbosity > 0) { cout << "factor graph contains " ; - cout << fg->nrVarNodes() << " variables " ; - cout << "(" << nrObservedVars << " observed) and " ; + cout << fg->nrVarNodes() << " variables and " ; cout << fg->nrFacNodes() << " factors " << endl; } YAP_Int p = (YAP_Int) (fg); diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 4d682a1b0..7ec1aac81 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -272,6 +272,47 @@ setHorusFlag (string key, string value) ss << value; ss >> mi; BeliefProp::setMaxIterations (mi); + } else if (key == "export_libdai") { + if ( value == "true") { + FactorGraph::enableExportToLibDai(); + } else if (value == "false") { + FactorGraph::disableExportToLibDai(); + Globals::logDomain = false; + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "export_uai") { + if ( value == "true") { + FactorGraph::enableExportToUai(); + } else if (value == "false") { + FactorGraph::disableExportToUai(); + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "export_graphviz") { + if ( value == "true") { + FactorGraph::enableExportToGraphViz(); + } else if (value == "false") { + FactorGraph::disableExportToGraphViz(); + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } + } else if (key == "print_fg") { + if ( value == "true") { + FactorGraph::enablePrintFactorGraph(); + } else if (value == "false") { + FactorGraph::disablePrintFactorGraph(); + } else { + cerr << "warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" << endl; + returnVal = false; + } } else { cerr << "warning: invalid key `" << key << "'" << endl; returnVal = false; diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index df0d9db19..ed73ed8de 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -265,7 +265,7 @@ This key defaults to 0 (no debugging information) and only \texttt{hve}, \texttt The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} or \texttt{false}. By default is \texttt{true} and only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always do their calculations in a logarithm domain. -There are keys specific only to some algorithm. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: +There are keys specific only to some algorithms. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: \begin{itemize} \item \texttt{sequential} \item \texttt{min\_neighbors} @@ -294,6 +294,8 @@ The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possi \end{itemize} It defaults to \texttt{seq\_fixed}. +The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export the current model respectively to \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI}, and \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08} formats. With the \texttt{export\_graphviz} key it is possible to save the factor graph into a format that can be read by \href{http://www.graphviz.org/}{Graphviz}. The \texttt{print\_fg} key allows to print all factors before perform inference. All these four keys accept \texttt{true} and \texttt{false} as their values and only produce effect in \texttt{hve}, \texttt{bp}, and \texttt{cbp} solvers. + \section{Horus Command Line} This package also includes an utility to perform inference over probabilistic graphical models described in other formats, namely the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format} From 1b9c4e96e7beacd87cbe738f8dd816eaa4634111 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 17:06:40 +0000 Subject: [PATCH 76/89] Prefix with bp_ some bp flags --- packages/CLPBN/horus/BeliefProp.cpp | 6 +++--- packages/CLPBN/horus/CountingBp.cpp | 6 +++--- packages/CLPBN/horus/LiftedBp.cpp | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index d009cd7a9..5ec3aafd5 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -52,15 +52,15 @@ BeliefProp::printSolverFlags (void) const { stringstream ss; ss << "belief propagation [" ; - ss << "msg_schedule=" ; + ss << "bp_msg_schedule=" ; switch (schedule_) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; case MsgSchedule::PARALLEL: ss << "parallel"; break; case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << Util::toString (maxIter_); - ss << ",accuracy=" << Util::toString (accuracy_); + ss << ",bp_max_iter=" << Util::toString (maxIter_); + ss << ",bp_accuracy=" << Util::toString (accuracy_); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index 39b47eab3..4dc1b249e 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -36,15 +36,15 @@ CountingBp::printSolverFlags (void) const { stringstream ss; ss << "counting bp [" ; - ss << "msg_schedule=" ; + ss << "bp_msg_schedule=" ; switch (WeightedBp::msgSchedule()) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; case MsgSchedule::PARALLEL: ss << "parallel"; break; case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << WeightedBp::maxIterations(); - ss << ",accuracy=" << WeightedBp::accuracy(); + ss << ",bp_max_iter=" << WeightedBp::maxIterations(); + ss << ",bp_accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << ",fif=" << Util::toString (CountingBp::fif_); ss << "]" ; diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index b748cc9e1..b85e87cd5 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -62,15 +62,15 @@ LiftedBp::printSolverFlags (void) const { stringstream ss; ss << "lifted bp [" ; - ss << "msg_schedule=" ; + ss << "bp_msg_schedule=" ; switch (WeightedBp::msgSchedule()) { case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; case MsgSchedule::PARALLEL: ss << "parallel"; break; case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << WeightedBp::maxIterations(); - ss << ",accuracy=" << WeightedBp::accuracy(); + ss << ",bp_max_iter=" << WeightedBp::maxIterations(); + ss << ",bp_accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; From f94032d06cc0abc69e99ac6b0eb3aaa49c7deb00 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 19:39:03 +0000 Subject: [PATCH 77/89] PFL manual: add an installation section --- packages/CLPBN/pfl.tex | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index ed73ed8de..cb43ecf26 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -43,7 +43,19 @@ The Prolog Factor Language (PFL) is a extension of the Prolog language that allows a natural representation of this first-order probabilistic models (either directed or undirected). PFL is also capable of solving probabilistic queries on this models through the implementation of several inference techniques: variable elimination, belief propagation, lifted variable elimination and lifted belief propagation. +\section{Installation} +PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog system. However, there isn't yet a stable release of YAP that includes PFL. So it is required to install a development version of YAP. To to this, you will need to have installed the Git version control system. The commands to do a default installation of YAP in the user's home in a Unix-based environment are shown next. +\begin{enumerate} + \setlength\itemindent{-0.01cm} + \item \texttt{\$ cd \$HOME} + \item \texttt{\$ git clone git://yap.git.sourceforge.net/gitroot/yap/yap-6.3} + \item \texttt{\$ cd yap-6.3/} + \item \texttt{\$ ./configure --enable-clpbn-bp --prefix=\$HOME} + \item \texttt{\$ make depend \& make install} +\end{enumerate} + +In case you want to install YAP somewhere else or with different settings, please consult the YAP documentation. From now on, we will assume that the directory \$HOME/bin (where the binary can be found) is in your \$PATH environment variable. %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ From 9c8fd804dfcf2694a4e5725cf48877900ea90572 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 19:39:39 +0000 Subject: [PATCH 78/89] PFL manual: add a further information section --- packages/CLPBN/pfl.tex | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index cb43ecf26..b76caafe9 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -345,4 +345,7 @@ By default, all probability tasks are resolved with the \texttt{hve} solver. It The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs $Key=Value$ before the model's file name. +\section{Further Information} +Please check the paper \textit{Evaluating Inference Algorithms for the Prolog Factor Language} for further information. + \end{document} From dbd35a20a2fc5d36b27b0d43cf1c9125c0a345dc Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 19:40:20 +0000 Subject: [PATCH 79/89] PFL manual: improve introduction section --- packages/CLPBN/pfl.tex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index b76caafe9..54ce6fa28 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -40,8 +40,9 @@ %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ \section{Introduction} -The Prolog Factor Language (PFL) is a extension of the Prolog language that allows a natural representation of this first-order probabilistic models (either directed or undirected). PFL is also capable of solving probabilistic queries on this models through the implementation of several inference techniques: variable elimination, belief propagation, lifted variable elimination and lifted belief propagation. +The Prolog Factor Language (PFL) is a language that extends Prolog for providing a syntax to describe first-order probabilistic graphical models. These models can be either directed (bayesian networks) or undirected (markov networks). This language replaces the old one known as CLP($\mathcal{BN}$). +The package also includes implementations for a set of well-known inference algorithms for solving probabilistic queries on these models. Both ground and lifted inference methods are support. \section{Installation} PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog system. However, there isn't yet a stable release of YAP that includes PFL. So it is required to install a development version of YAP. To to this, you will need to have installed the Git version control system. The commands to do a default installation of YAP in the user's home in a Unix-based environment are shown next. From dfd7360291ff73c896c11eca5d77677ea2f4c218 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 20:47:12 +0000 Subject: [PATCH 80/89] Allow the use of solver as an alias of ground_solver --- packages/CLPBN/horus/Util.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 7ec1aac81..cf96e44ff 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -204,7 +204,7 @@ setHorusFlag (string key, string value) cerr << "for `" << key << "'" << endl; returnVal = false; } - } else if (key == "ground_solver") { + } else if (key == "ground_solver" || key == "solver") { if ( value == "ve" || value == "hve") { Globals::groundSolver = GroundSolverType::VE; } else if (value == "bp") { From 2738d8330204d155f7d532b8a87ad79806ae8937 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 20:48:46 +0000 Subject: [PATCH 81/89] Remove some alias for hve --- packages/CLPBN/horus/Util.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index cf96e44ff..8927d3d43 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -205,7 +205,7 @@ setHorusFlag (string key, string value) returnVal = false; } } else if (key == "ground_solver" || key == "solver") { - if ( value == "ve" || value == "hve") { + if ( value == "hve") { Globals::groundSolver = GroundSolverType::VE; } else if (value == "bp") { Globals::groundSolver = GroundSolverType::BP; @@ -230,7 +230,7 @@ setHorusFlag (string key, string value) cerr << "for `" << key << "'" << endl; returnVal = false; } - } else if (key == "ve_elim_heuristic" || key == "hve_elim_heuristic") { + } else if (key == "hve_elim_heuristic") { if ( value == "sequential") { ElimGraph::setElimHeuristic (ElimHeuristic::SEQUENTIAL); } else if (value == "min_neighbors") { From dc536fabc2a1a5b690bdbf83e4bc9ee4e9da7cfb Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 21:13:58 +0000 Subject: [PATCH 82/89] Beautify setHorusFlag --- packages/CLPBN/horus/Util.cpp | 149 ++++++++++++++-------------------- 1 file changed, 60 insertions(+), 89 deletions(-) diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index 8927d3d43..ca681b9d7 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -188,133 +188,104 @@ getStateLines (const Vars& vars) +bool invalidValue (string key, string value) +{ + cerr << "Warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" ; + cerr << endl; + return false; +} + + + bool setHorusFlag (string key, string value) { bool returnVal = true; - if ( key == "lifted_solver") { - if ( value == "lve") { - Globals::liftedSolver = LiftedSolverType::LVE; - } else if (value == "lbp") { - Globals::liftedSolver = LiftedSolverType::LBP; - } else if (value == "lkc") { - Globals::liftedSolver = LiftedSolverType::LKC; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (key == "lifted_solver") { + if (value == "lve") Globals::liftedSolver = LiftedSolverType::LVE; + else if (value == "lbp") Globals::liftedSolver = LiftedSolverType::LBP; + else if (value == "lkc") Globals::liftedSolver = LiftedSolverType::LKC; + else returnVal = invalidValue (key, value); + } else if (key == "ground_solver" || key == "solver") { - if ( value == "hve") { - Globals::groundSolver = GroundSolverType::VE; - } else if (value == "bp") { - Globals::groundSolver = GroundSolverType::BP; - } else if (value == "cbp") { - Globals::groundSolver = GroundSolverType::CBP; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "hve") Globals::groundSolver = GroundSolverType::VE; + else if (value == "bp") Globals::groundSolver = GroundSolverType::BP; + else if (value == "cbp") Globals::groundSolver = GroundSolverType::CBP; + else returnVal = invalidValue (key, value); + } else if (key == "verbosity") { stringstream ss; ss << value; ss >> Globals::verbosity; + } else if (key == "use_logarithms") { - if ( value == "true") { - Globals::logDomain = true; - } else if (value == "false") { - Globals::logDomain = false; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "true") Globals::logDomain = true; + else if (value == "false") Globals::logDomain = false; + else returnVal = invalidValue (key, value); + } else if (key == "hve_elim_heuristic") { - if ( value == "sequential") { + if (value == "sequential") ElimGraph::setElimHeuristic (ElimHeuristic::SEQUENTIAL); - } else if (value == "min_neighbors") { + else if (value == "min_neighbors") ElimGraph::setElimHeuristic (ElimHeuristic::MIN_NEIGHBORS); - } else if (value == "min_weight") { + else if (value == "min_weight") ElimGraph::setElimHeuristic (ElimHeuristic::MIN_WEIGHT); - } else if (value == "min_fill") { + else if (value == "min_fill") ElimGraph::setElimHeuristic (ElimHeuristic::MIN_FILL); - } else if (value == "weighted_min_fill") { + else if (value == "weighted_min_fill") ElimGraph::setElimHeuristic (ElimHeuristic::WEIGHTED_MIN_FILL); - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + else + returnVal = invalidValue (key, value); + } else if (key == "bp_msg_schedule") { - if ( value == "seq_fixed") { + if (value == "seq_fixed") BeliefProp::setMsgSchedule (MsgSchedule::SEQ_FIXED); - } else if (value == "seq_random") { + else if (value == "seq_random") BeliefProp::setMsgSchedule (MsgSchedule::SEQ_RANDOM); - } else if (value == "parallel") { + else if (value == "parallel") BeliefProp::setMsgSchedule (MsgSchedule::PARALLEL); - } else if (value == "max_residual") { + else if (value == "max_residual") BeliefProp::setMsgSchedule (MsgSchedule::MAX_RESIDUAL); - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + else + returnVal = invalidValue (key, value); + } else if (key == "bp_accuracy") { stringstream ss; double acc; ss << value; ss >> acc; BeliefProp::setAccuracy (acc); + } else if (key == "bp_max_iter") { stringstream ss; unsigned mi; ss << value; ss >> mi; BeliefProp::setMaxIterations (mi); + } else if (key == "export_libdai") { - if ( value == "true") { - FactorGraph::enableExportToLibDai(); - } else if (value == "false") { - FactorGraph::disableExportToLibDai(); - Globals::logDomain = false; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "true") FactorGraph::enableExportToLibDai(); + else if (value == "false") FactorGraph::disableExportToLibDai(); + else returnVal = invalidValue (key, value); + } else if (key == "export_uai") { - if ( value == "true") { - FactorGraph::enableExportToUai(); - } else if (value == "false") { - FactorGraph::disableExportToUai(); - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "true") FactorGraph::enableExportToUai(); + else if (value == "false") FactorGraph::disableExportToUai(); + else returnVal = invalidValue (key, value); + } else if (key == "export_graphviz") { - if ( value == "true") { - FactorGraph::enableExportToGraphViz(); - } else if (value == "false") { - FactorGraph::disableExportToGraphViz(); - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "true") FactorGraph::enableExportToGraphViz(); + else if (value == "false") FactorGraph::disableExportToGraphViz(); + else returnVal = invalidValue (key, value); + } else if (key == "print_fg") { - if ( value == "true") { - FactorGraph::enablePrintFactorGraph(); - } else if (value == "false") { - FactorGraph::disablePrintFactorGraph(); - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "true") FactorGraph::enablePrintFactorGraph(); + else if (value == "false") FactorGraph::disablePrintFactorGraph(); + else returnVal = invalidValue (key, value); + } else { - cerr << "warning: invalid key `" << key << "'" << endl; + cerr << "Warning: invalid key `" << key << "'" << endl; returnVal = false; } return returnVal; From ea054468bc6f52c7a6ec176954089590376a300c Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 21:17:56 +0000 Subject: [PATCH 83/89] Improve hcli usage string --- packages/CLPBN/horus/HorusCli.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 4ff83899e..82e995921 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -16,8 +16,8 @@ VarIds readQueryAndEvidence (FactorGraph&, int, const char* [], int); void runSolver (const FactorGraph&, const VarIds&); -const string USAGE = "usage: ./hcli \ -[=]... [|=]..." ; +const string USAGE = "usage: ./hcli [solver=hve|bp|cbp] \ +[=]... [|=]... " ; int From 6755e50525f852b8182d4f1d2e4d77ece0cd2be1 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 22:00:31 +0000 Subject: [PATCH 84/89] PFL manual: small improvements --- packages/CLPBN/pfl.tex | 44 +++++++++++++++++++----------------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index 54ce6fa28..d6cf4ad75 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -16,6 +16,7 @@ \newcommand{\true} {\mathtt{t}} \newcommand{\false} {\mathtt{f}} +\newcommand{\pathsep} { $\triangleright$ } \newcommand{\tableline} {\noalign{\hrule height 0.8pt}} \tikzstyle{nodestyle} = [draw, thick, circle, minimum size=0.9cm] @@ -217,7 +218,7 @@ One last note for the domain of the random variables. By default all terms will bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. \end{pflcode} -More examples can be found in the CLPBN examples directory, which defaults to ``share/doc/Yap/packages/examples/CLPBN'' from the base directory where the YAP Prolog system was installed. +More probabilistic models defined using PFL can be found in the examples directory, which defaults to \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -226,7 +227,7 @@ More examples can be found in the CLPBN examples directory, which defaults to `` \section{Querying} In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as an example. -Assuming that the current directory is where the examples are located, first we load the model: +Assuming that the current directory is the one where the examples are located, first we load the model as follows. \texttt{\$ yap -l sprinker.pfl} @@ -236,7 +237,7 @@ Let's suppose that we want to estimate the marginal probability for the $WetGras The output of the goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. -Let's now suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: +Now let's suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: \texttt{?- wet\_grass(X), rain(t).} @@ -310,41 +311,36 @@ It defaults to \texttt{seq\_fixed}. The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export the current model respectively to \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI}, and \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08} formats. With the \texttt{export\_graphviz} key it is possible to save the factor graph into a format that can be read by \href{http://www.graphviz.org/}{Graphviz}. The \texttt{print\_fg} key allows to print all factors before perform inference. All these four keys accept \texttt{true} and \texttt{false} as their values and only produce effect in \texttt{hve}, \texttt{bp}, and \texttt{cbp} solvers. \section{Horus Command Line} -This package also includes an utility to perform inference over probabilistic graphical models described in other formats, namely the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format} +This package also includes an external interface to YAP for perform inference over probabilistic graphical models described in formats other than PFL. Currently two are support, the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format}. -This utility is called \texttt{hcli} and can be found inside binary directory used for the YAP installation. Its usage is: +This utility is called \texttt{hcli} and its usage is as follows. \begin{verbatim} -./hcli [=]... [|=]... + $ ./hcli [solver=hve|bp|cbp] [=]... + [|=]... \end{verbatim} -Let's assume that the working directory is where \texttt{hcli} is installed. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show the command for loading a model described in an UAI file format. +Let's assume that the current directory is the one where the examples are, which defaults to \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show how to load a model using the \texttt{hcli} utility. -\begin{verbatim} -./hcli $EXAMPLES_DIR$/burglary-alarm.uai -\end{verbatim} +\texttt{\$ ./hcli burglary-alarm.uai} -With this command, the program will load the model and print the marginal probabilities for all random variables defined in the model. We can view only the marginal probability for some variable with a identifier $X$, if we pass $X$ as an extra argument following the file name. For instance, the following command will show only the marginal probability for the variable with identifier $0$. +With the above command, the program will load the model and print the marginal probabilities for all defined random variables. We can view only the marginal probability for some variable with a identifier $X$, if we pass $X$ as an extra argument following the file name. For instance, the following command will output only the marginal probability for the variable with identifier $0$. -\begin{verbatim} -./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0 -\end{verbatim} +\texttt{\$ ./hcli burglary-alarm.uai 0} -If we give more than one variable identifier as argument, the program will show the joint probability for all variables given. +If we give more than one variable identifier as argument, the program will output the joint probability for all variables given. -Evidence can be given as pairs with a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. +Evidence can be given as a pair containing a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. -\begin{verbatim} -./hcli $EXAMPLES_DIR$/burglary-alarm.uai 0=1 -\end{verbatim} +\texttt{\$ ./hcli burglary-alarm.uai 0=1} -By default, all probability tasks are resolved with the \texttt{hve} solver. It is possible to choose another solver using the \texttt{ground\_solver} key as follows. Note that only \texttt{hve}, \texttt{bp} and \texttt{cbp} can be used in \texttt{hcli}. +By default, all probability tasks are resolved using the \texttt{hve} solver. It is possible to choose another solver using the \texttt{solver} key as follows. -\begin{verbatim} -./hcli ground_solver=bp ../examples/burglary-alarm.uai -\end{verbatim} +\texttt{\$ ./hcli solver=bp burglary-alarm.uai} -The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs $Key=Value$ before the model's file name. +Notice that only the \texttt{hve}, \texttt{bp} and \texttt{cbp} solvers can be used with \texttt{hcli}. + +The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs \texttt{=} before the model's file name. \section{Further Information} Please check the paper \textit{Evaluating Inference Algorithms for the Prolog Factor Language} for further information. From 59ae52623c3c1b8c979b2818d14643245c90d9f0 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 22:51:44 +0000 Subject: [PATCH 85/89] PFL manual: several improvements --- packages/CLPBN/pfl.tex | 57 ++++++++++++++++++++++++++++++++++++++---- 1 file changed, 52 insertions(+), 5 deletions(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index d6cf4ad75..52ac251ab 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -25,7 +25,12 @@ \setlength{\parskip}{\baselineskip} \title{\Huge\textbf{Prolog Factor Language (PFL) Manual}} -\author{Tiago Gomes, V\'{i}tor Santos Costa} + +\author{Tiago Gomes\\\texttt{tiago.avv@gmail.com} \and V\'{i}tor Santos Costa\\\texttt{vsc@fc.up.pt}\\\\ +CRACS \& INESC TEC, Faculty of Sciences, University of Porto +} + + \date{} \maketitle @@ -36,6 +41,8 @@ \end{center} \newpage + + %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -45,6 +52,12 @@ The Prolog Factor Language (PFL) is a language that extends Prolog for providing The package also includes implementations for a set of well-known inference algorithms for solving probabilistic queries on these models. Both ground and lifted inference methods are support. + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ \section{Installation} PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog system. However, there isn't yet a stable release of YAP that includes PFL. So it is required to install a development version of YAP. To to this, you will need to have installed the Git version control system. The commands to do a default installation of YAP in the user's home in a Unix-based environment are shown next. @@ -57,7 +70,12 @@ PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog sy \item \texttt{\$ make depend \& make install} \end{enumerate} -In case you want to install YAP somewhere else or with different settings, please consult the YAP documentation. From now on, we will assume that the directory \$HOME/bin (where the binary can be found) is in your \$PATH environment variable. +In case you want to install YAP somewhere else or with different settings, please consult the YAP documentation. From now on, we will assume that the directory \texttt{\$HOME\pathsep bin} (where the binary can be found) is in your \texttt{\$PATH} environment variable. + +\label{examples-directory} +Once in a while, we will refer to the PFL examples directory. In a default installation, this directory will be located at \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. + + %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -141,7 +159,7 @@ $$Type~~F~~;~~Phi~~;~~C.$$ \end{center} \end{figure} -Towards a better understanding of the language, next we show the PFL representation for network found in Figure~\ref{fig:sprinkler-bn}. +Towards a better understanding of the language, next we show the PFL representation for the network found in Figure~\ref{fig:sprinkler-bn}. \begin{pflcode} :- use_module(library(pfl)). @@ -218,7 +236,9 @@ One last note for the domain of the random variables. By default all terms will bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. \end{pflcode} -More probabilistic models defined using PFL can be found in the examples directory, which defaults to \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. +More probabilistic models defined using PFL can be found in the examples directory. + + %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -246,6 +266,19 @@ PFL also supports calculating joint probability distributions. For instance, we \texttt{?- sprinkler(X), rain(Y).} + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Parameter Learning} +PFL is capable to learn the parameters for bayesian networks, through an implementation of the expectation-maximization algorithm. + +Inside the \texttt{learning} directory from the examples directory, one can find some examples of how learning works in PFL. + +We can define the solver that will be used for the inference part during parameter learning with the \texttt{set\_em\_solver/1} predicate (defaults to \texttt{hve}). At the moment, only the following solvers support parameter learning: \texttt{ve}, \texttt{hve}, \texttt{bdd}, \texttt{bp} and \texttt{cbp}. + + %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -310,6 +343,12 @@ It defaults to \texttt{seq\_fixed}. The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export the current model respectively to \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI}, and \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08} formats. With the \texttt{export\_graphviz} key it is possible to save the factor graph into a format that can be read by \href{http://www.graphviz.org/}{Graphviz}. The \texttt{print\_fg} key allows to print all factors before perform inference. All these four keys accept \texttt{true} and \texttt{false} as their values and only produce effect in \texttt{hve}, \texttt{bp}, and \texttt{cbp} solvers. + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ \section{Horus Command Line} This package also includes an external interface to YAP for perform inference over probabilistic graphical models described in formats other than PFL. Currently two are support, the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format}. @@ -320,7 +359,7 @@ This utility is called \texttt{hcli} and its usage is as follows. [|=]... \end{verbatim} -Let's assume that the current directory is the one where the examples are, which defaults to \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show how to load a model using the \texttt{hcli} utility. +Let's assume that the current directory is the one where the examples are located. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show how to load a model using the \texttt{hcli} utility. \texttt{\$ ./hcli burglary-alarm.uai} @@ -342,7 +381,15 @@ Notice that only the \texttt{hve}, \texttt{bp} and \texttt{cbp} solvers can be u The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs \texttt{=} before the model's file name. + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ \section{Further Information} Please check the paper \textit{Evaluating Inference Algorithms for the Prolog Factor Language} for further information. +Any question don't hesitate to contact us! + \end{document} From 24271157cee53301674bf8214b68caad65ce0b48 Mon Sep 17 00:00:00 2001 From: Tiago Gomes Date: Tue, 8 Jan 2013 22:55:27 +0000 Subject: [PATCH 86/89] Clean unecessary latex documents --- packages/CLPBN/Makefile.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index 2ad9616c2..53f45c1b6 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -135,7 +135,7 @@ install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_LEARNING_PROGRAMS) $(CLPBN_SCHOOL_ docs: $(MANUAL) $(PDFLATEX) $(PFL_MANUAL) $(PDFLATEX) $(PFL_MANUAL) - rm pfl.aux pfl.log + rm pfl.aux pfl.bbl pfl.blg pfl.log pfl.out install_docs: docs From 7fabfcf4c97f579c6173ef97a33a9f79c34d8ddc Mon Sep 17 00:00:00 2001 From: "tagomes@fc.up.pt" Date: Wed, 9 Jan 2013 11:43:40 +0000 Subject: [PATCH 87/89] Trivial --- packages/CLPBN/Makefile.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index 53f45c1b6..c43612cd1 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -135,7 +135,7 @@ install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_LEARNING_PROGRAMS) $(CLPBN_SCHOOL_ docs: $(MANUAL) $(PDFLATEX) $(PFL_MANUAL) $(PDFLATEX) $(PFL_MANUAL) - rm pfl.aux pfl.bbl pfl.blg pfl.log pfl.out + rm -f pfl.aux pfl.bbl pfl.blg pfl.log pfl.out install_docs: docs From 7f452d23978bdbdf043789d5c5e9aecb7b36e9cb Mon Sep 17 00:00:00 2001 From: tacgomes Date: Wed, 9 Jan 2013 15:25:14 +0000 Subject: [PATCH 88/89] Refactor the benchmarks --- packages/CLPBN/benchmarks/benchs.sh | 38 +++--- packages/CLPBN/benchmarks/city/bp_tests.sh | 2 +- packages/CLPBN/benchmarks/city/cbp_tests.sh | 2 +- packages/CLPBN/benchmarks/city/city.sh | 2 +- packages/CLPBN/benchmarks/city/gen_city.sh | 2 +- packages/CLPBN/benchmarks/city/hve_tests.sh | 2 +- packages/CLPBN/benchmarks/city/lbp_tests.sh | 2 +- .../benchmarks/comp_workshops/bp_tests.sh | 2 +- .../benchmarks/comp_workshops/cbp_tests.sh | 2 +- .../CLPBN/benchmarks/comp_workshops/cw.sh | 2 +- .../comp_workshops/gen_workshops.sh | 2 +- .../benchmarks/comp_workshops/hve_tests.sh | 2 +- .../benchmarks/comp_workshops/lbp_tests.sh | 2 +- packages/CLPBN/benchmarks/run_all.sh | 2 +- .../benchmarks/school/run_school_tests.sh | 129 +++++++----------- .../CLPBN/benchmarks/smokers_evidence/sm.sh | 8 -- .../{smokers => social_network2}/bp_tests.sh | 4 +- .../{smokers => social_network2}/cbp_tests.sh | 4 +- .../gen_people.sh | 2 +- .../{smokers => social_network2}/hve_tests.sh | 10 +- .../{smokers => social_network2}/lbp_tests.sh | 4 +- .../{smokers => social_network2}/lve_tests.sh | 2 +- .../{smokers/sm.sh => social_network2/sn2.sh} | 4 +- .../bp_tests.sh | 4 +- .../cbp_tests.sh | 4 +- .../gen_people.sh | 2 +- .../hve_tests.sh | 10 +- .../lbp_tests.sh | 0 .../lve_tests.sh | 2 +- .../social_network2_evidence/sn2ev.sh | 8 ++ .../benchmarks/workshop_attrs/bp_tests.sh | 2 +- .../benchmarks/workshop_attrs/cbp_tests.sh | 2 +- .../benchmarks/workshop_attrs/gen_attrs.sh | 6 +- .../benchmarks/workshop_attrs/hve_tests.sh | 2 +- .../benchmarks/workshop_attrs/lbp_tests.sh | 2 +- .../CLPBN/benchmarks/workshop_attrs/wa.sh | 2 +- 36 files changed, 125 insertions(+), 152 deletions(-) delete mode 100755 packages/CLPBN/benchmarks/smokers_evidence/sm.sh rename packages/CLPBN/benchmarks/{smokers => social_network2}/bp_tests.sh (86%) rename packages/CLPBN/benchmarks/{smokers => social_network2}/cbp_tests.sh (86%) rename packages/CLPBN/benchmarks/{smokers => social_network2}/gen_people.sh (90%) rename packages/CLPBN/benchmarks/{smokers => social_network2}/hve_tests.sh (64%) rename packages/CLPBN/benchmarks/{smokers => social_network2}/lbp_tests.sh (86%) rename packages/CLPBN/benchmarks/{smokers => social_network2}/lve_tests.sh (97%) rename packages/CLPBN/benchmarks/{smokers/sm.sh => social_network2/sn2.sh} (59%) rename packages/CLPBN/benchmarks/{smokers_evidence => social_network2_evidence}/bp_tests.sh (88%) rename packages/CLPBN/benchmarks/{smokers_evidence => social_network2_evidence}/cbp_tests.sh (88%) rename packages/CLPBN/benchmarks/{smokers_evidence => social_network2_evidence}/gen_people.sh (96%) rename packages/CLPBN/benchmarks/{smokers_evidence => social_network2_evidence}/hve_tests.sh (68%) rename packages/CLPBN/benchmarks/{smokers_evidence => social_network2_evidence}/lbp_tests.sh (100%) rename packages/CLPBN/benchmarks/{smokers_evidence => social_network2_evidence}/lve_tests.sh (97%) create mode 100755 packages/CLPBN/benchmarks/social_network2_evidence/sn2ev.sh diff --git a/packages/CLPBN/benchmarks/benchs.sh b/packages/CLPBN/benchmarks/benchs.sh index f050c0584..7556b7e5c 100755 --- a/packages/CLPBN/benchmarks/benchs.sh +++ b/packages/CLPBN/benchmarks/benchs.sh @@ -1,5 +1,4 @@ - function prepare_new_run { YAP=~/bin/$SHORTNAME-$SOLVER @@ -17,32 +16,33 @@ function prepare_new_run function run_solver { - constraint=$1 + echo $LOG_FILE + CONSTRAINT=$1 solver_flag=true if [ -n "$2" ]; then if [ $SOLVER = hve ]; then - solver_flag=clpbn_horus:set_horus_flag\(elim_heuristic,$2\) + SOLVER_FLAG=set_horus_flag\(hve_elim_heuristic,$2\) elif [ $SOLVER = bp ]; then - solver_flag=clpbn_horus:set_horus_flag\(schedule,$2\) + SOLVER_FLAG=set_horus_flag\(bp_msg_schedule,$2\) elif [ $SOLVER = cbp ]; then - solver_flag=clpbn_horus:set_horus_flag\(schedule,$2\) + SOLVER_FLAG=set_horus_flag\(bp_msg_schedule,$2\) elif [ $SOLVER = lbp ]; then - solver_flag=clpbn_horus:set_horus_flag\(schedule,$2\) + SOLVER_FLAG=set_horus_flag\(bp_msg_schedule,$2\) else echo "unknow flag $2" fi fi /usr/bin/time -o $LOG_FILE -a -f "%U\t%S\t%e\t%M" \ - $YAP << EOF >> $LOG_FILE &>> ignore.$LOG_FILE + $YAP << EOF >> ignore.$LOG_FILE 2>> ignore.$LOG_FILE nogc. [$NETWORK]. -[$constraint]. -clpbn_horus:set_solver($SOLVER). -clpbn_horus:set_horus_flag(use_logarithms, true). -clpbn_horus:set_horus_flag(verbosity, 1). -$solver_flag. +[$CONSTRAINT]. +set_solver($SOLVER). +set_horus_flag(verbosity, 1). +set_horus_flag(use_logarithms, true). +$SOLVER_FLAG. $QUERY. -open("$LOG_FILE", 'append', S), format(S, '$constraint ~15+ ', []), close(S). +open("$LOG_FILE", 'append', S), format(S, "$CONSTRAINT ~15+ ", []), close(S). EOF } @@ -52,12 +52,16 @@ function clear_log_files { rm -f *~ rm -f ../*~ - rm -f school/*.log school/*~ - rm -f ../school/*.log ../school/*~ - rm -f city/*.log city/*~ - rm -f ../city/*.log ../city/*~ rm -f workshop_attrs/*.log workshop_attrs/*~ rm -f ../workshop_attrs/*.log ../workshop_attrs/*~ + rm -f comp_workshops/*.log comp_workshops/*~ + rm -f ../comp_workshops/*.log ../comp_workshops/*~ + rm -f city/*.log city/*~ + rm -f ../city/*.log ../city/*~ + rm -f social_network2/*.log social_network2/*~ + rm -f ../social_network2/*.log ../social_network2/*~ + rm -f social_network2_evidence/*.log social_network2_evidence/*~ + rm -f ../social_network2_evidence/*.log ../social_network2_evidence/*~ echo all done! } diff --git a/packages/CLPBN/benchmarks/city/bp_tests.sh b/packages/CLPBN/benchmarks/city/bp_tests.sh index cfd74d818..cd7b82d50 100755 --- a/packages/CLPBN/benchmarks/city/bp_tests.sh +++ b/packages/CLPBN/benchmarks/city/bp_tests.sh @@ -33,5 +33,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "bp(shedule=seq_fixed) " seq_fixed +run_all_graphs "bp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/city/cbp_tests.sh b/packages/CLPBN/benchmarks/city/cbp_tests.sh index dd9868ae2..3fb8c1693 100755 --- a/packages/CLPBN/benchmarks/city/cbp_tests.sh +++ b/packages/CLPBN/benchmarks/city/cbp_tests.sh @@ -32,5 +32,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "cbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "cbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/city/city.sh b/packages/CLPBN/benchmarks/city/city.sh index 27fa93935..4ad08d871 100755 --- a/packages/CLPBN/benchmarks/city/city.sh +++ b/packages/CLPBN/benchmarks/city/city.sh @@ -1,6 +1,6 @@ #!/bin/bash -NETWORK="'../../examples/city'" +NETWORK="'../../examples/city.pfl'" SHORTNAME="city" QUERY="is_joe_guilty(X)" diff --git a/packages/CLPBN/benchmarks/city/gen_city.sh b/packages/CLPBN/benchmarks/city/gen_city.sh index 7c95f4ed3..fdc094cbc 100755 --- a/packages/CLPBN/benchmarks/city/gen_city.sh +++ b/packages/CLPBN/benchmarks/city/gen_city.sh @@ -19,7 +19,7 @@ main :- generate_people(S, N, Counting) :- Counting > N, !. generate_people(S, N, Counting) :- - format(S, 'people(p~w, nyc).~n', [Counting]), + format(S, 'person(p~w, nyc).~n', [Counting]), Counting1 is Counting + 1, generate_people(S, N, Counting1). diff --git a/packages/CLPBN/benchmarks/city/hve_tests.sh b/packages/CLPBN/benchmarks/city/hve_tests.sh index 37de284ab..3f486977c 100755 --- a/packages/CLPBN/benchmarks/city/hve_tests.sh +++ b/packages/CLPBN/benchmarks/city/hve_tests.sh @@ -33,5 +33,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "hve(elim_heuristic=min_neighbors) " min_neighbors +run_all_graphs "hve(hve_elim_heuristic=min_neighbors) " min_neighbors diff --git a/packages/CLPBN/benchmarks/city/lbp_tests.sh b/packages/CLPBN/benchmarks/city/lbp_tests.sh index ff7b4ff27..c56946539 100755 --- a/packages/CLPBN/benchmarks/city/lbp_tests.sh +++ b/packages/CLPBN/benchmarks/city/lbp_tests.sh @@ -32,5 +32,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "lbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "lbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/comp_workshops/bp_tests.sh b/packages/CLPBN/benchmarks/comp_workshops/bp_tests.sh index 6120edf5d..fce092fc8 100755 --- a/packages/CLPBN/benchmarks/comp_workshops/bp_tests.sh +++ b/packages/CLPBN/benchmarks/comp_workshops/bp_tests.sh @@ -27,5 +27,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "bp(shedule=seq_fixed) " seq_fixed +run_all_graphs "bp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/comp_workshops/cbp_tests.sh b/packages/CLPBN/benchmarks/comp_workshops/cbp_tests.sh index 26b5fc5ad..2f4c9abe4 100755 --- a/packages/CLPBN/benchmarks/comp_workshops/cbp_tests.sh +++ b/packages/CLPBN/benchmarks/comp_workshops/cbp_tests.sh @@ -26,5 +26,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "cbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "cbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/comp_workshops/cw.sh b/packages/CLPBN/benchmarks/comp_workshops/cw.sh index cf5c8215b..574feb7b1 100755 --- a/packages/CLPBN/benchmarks/comp_workshops/cw.sh +++ b/packages/CLPBN/benchmarks/comp_workshops/cw.sh @@ -1,6 +1,6 @@ #!/bin/bash -NETWORK="'../../examples/comp_workshops'" +NETWORK="'../../examples/comp_workshops.pfl'" SHORTNAME="cw" QUERY="series(X)" diff --git a/packages/CLPBN/benchmarks/comp_workshops/gen_workshops.sh b/packages/CLPBN/benchmarks/comp_workshops/gen_workshops.sh index 1b2baaeeb..d032112a9 100755 --- a/packages/CLPBN/benchmarks/comp_workshops/gen_workshops.sh +++ b/packages/CLPBN/benchmarks/comp_workshops/gen_workshops.sh @@ -29,7 +29,7 @@ gen(S, NP, NW, Count) :- gen_workshops(_, _, NW, Count) :- Count > NW, !. gen_workshops(S, P, NW, Count) :- - format(S, 'c(p~w,w~w).~n', [P,Count]), + format(S, 'reg(p~w,w~w).~n', [P,Count]), Count1 is Count + 1, gen_workshops(S, P, NW, Count1). diff --git a/packages/CLPBN/benchmarks/comp_workshops/hve_tests.sh b/packages/CLPBN/benchmarks/comp_workshops/hve_tests.sh index 3fa724e96..c713bfd05 100755 --- a/packages/CLPBN/benchmarks/comp_workshops/hve_tests.sh +++ b/packages/CLPBN/benchmarks/comp_workshops/hve_tests.sh @@ -26,5 +26,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "hve(elim_heuristic=min_neighbors) " min_neighbors +run_all_graphs "hve(hve_elim_heuristic=min_neighbors) " min_neighbors diff --git a/packages/CLPBN/benchmarks/comp_workshops/lbp_tests.sh b/packages/CLPBN/benchmarks/comp_workshops/lbp_tests.sh index 8f850c4bd..706b84e35 100755 --- a/packages/CLPBN/benchmarks/comp_workshops/lbp_tests.sh +++ b/packages/CLPBN/benchmarks/comp_workshops/lbp_tests.sh @@ -26,5 +26,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "lbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "lbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/run_all.sh b/packages/CLPBN/benchmarks/run_all.sh index 4962fd7e5..9123bfa76 100755 --- a/packages/CLPBN/benchmarks/run_all.sh +++ b/packages/CLPBN/benchmarks/run_all.sh @@ -24,7 +24,7 @@ source lbp_tests.sh source cbp_tests.sh cd .. -cd smokers +cd social_network2 source hve_tests.sh source bp_tests.sh source lve_tests.sh diff --git a/packages/CLPBN/benchmarks/school/run_school_tests.sh b/packages/CLPBN/benchmarks/school/run_school_tests.sh index 716c03a34..3cc56eb07 100755 --- a/packages/CLPBN/benchmarks/school/run_school_tests.sh +++ b/packages/CLPBN/benchmarks/school/run_school_tests.sh @@ -1,95 +1,64 @@ #!/bin/bash -#cp ~/bin/yap ~/bin/school_all -#YAP=~/bin/school_all -YAP=~/bin/yap +source ../benchs.sh -#OUT_FILE_NAME=results`date "+ %H:%M:%S %d-%m-%Y"`.log -OUT_FILE_NAME=results.log -rm -f $OUT_FILE_NAME -rm -f ignore.$OUT_FILE_NAME +SHORTNAME="school" +SOLVER="school" -# yap -g "['../../../../examples/School/sch32'], [missing5], use_module(library(clpbn/learning/em)), graph(L), clpbn:set_clpbn_flag(em_solver,bp), clpbn_horus:set_horus_flag(inf_alg, bp), statistics(runtime, _), em(L,0.01,10,_,Lik), statistics(runtime, [T,_])." -function run_solver +function learn_params { -if [ $2 = bp ] -then - if [ $4 = ve ] - then - extra_flag1=clpbn_horus:set_horus_flag\(inf_alg,$4\) - extra_flag2=clpbn_horus:set_horus_flag\(elim_heuristic,$5\) - else - extra_flag1=clpbn_horus:set_horus_flag\(inf_alg,$4\) - extra_flag2=clpbn_horus:set_horus_flag\(schedule,$5\) - fi -else - extra_flag1=true - extra_flag2=true -fi -/usr/bin/time -o "$OUT_FILE_NAME" -a -f "real:%E\tuser:%U\tsys:%S" $YAP << EOF &>> "ignore.$OUT_FILE_NAME" -:- [pos:train]. -:- ['../../../../examples/School/sch32']. -:- use_module(library(clpbn/learning/em)). -:- use_module(library(clpbn/bp)). -[$1]. + NETWORK="'./../../examples/School/school_32'" + CONSTRAINT=$2 + SOLVER=$1 + echo $NETWORK + /usr/bin/time -o $LOG_FILE -a -f "%U\t%S\t%e\t%M" \ + $YAP << EOF >> ignore.$LOG_FILE 2>> ignore.$LOG_FILE +use_module(library(pfl)). +use_module(library(clpbn/learning/em)). +[$NETWORK]. +[$CONSTRAINT]. +set_em_solver($SOLVER). graph(L), - clpbn:set_clpbn_flag(em_solver,$2), - $extra_flag1, $extra_flag2, - em(L,0.01,10,_,Lik), - open("$OUT_FILE_NAME", 'append',S), - format(S, '$3: ~11+ Lik = ~3f, ',[Lik]), - close(S). +% em(L, 0.01, 10, _, Lik), + open("$LOG_FILE", 'append', S), + format(S, "$CONSTRAINT: ~15+ Lik = ~3f\t", [Lik]), + close(S). EOF } -function run_all_graphs -{ - echo "************************************************************************" >> "$OUT_FILE_NAME" - echo "results for solver $2" >> "$OUT_FILE_NAME" - echo "************************************************************************" >> "$OUT_FILE_NAME" - run_solver missing5 $1 missing5 $3 $4 $5 - run_solver missing10 $1 missing10 $3 $4 $5 - #run_solver missing20 $1 missing20 $3 $4 $5 - #run_solver missing30 $1 missing30 $3 $4 $5 - #run_solver missing40 $1 missing40 $3 $4 $5 - #run_solver missing50 $1 missing50 $3 $4 $5 -} +prepare_new_run - -#run_all_graphs bp "hve(min_neighbors) " ve min_neighbors -#run_all_graphs bp "bp(seq_fixed) " bp seq_fixed -#run_all_graphs bp "cbp(seq_fixed) " cbp seq_fixed -exit +write_header hve +learn_params hve missing5 +learn_params hve missing10 +learn_params hve missing20 +#learn_params hve missing30 +#learn_params hve missing40 +#learn_params hve missing50 +write_header ve +learn_params ve missing5 +learn_params ve missing10 +learn_params ve missing20 +#learn_params ve missing30 +#learn_params ve missing40 +#learn_params hve missing50 -run_all_graphs bp "hve(min_neighbors) " ve min_neighbors -run_all_graphs bp "hve(min_weight) " ve min_weight -run_all_graphs bp "hve(min_fill) " ve min_fill -run_all_graphs bp "hve(w_min_fill) " ve weighted_min_fill -run_all_graphs bp "bp(seq_fixed) " bp seq_fixed -run_all_graphs bp "bp(max_residual) " bp max_residual -run_all_graphs bp "cbp(seq_fixed) " cbp seq_fixed -run_all_graphs bp "cbp(max_residual) " cbp max_residual -run_all_graphs gibbs "gibbs " -echo "************************************************************************" >> "$OUT_FILE_NAME" -echo "results for solver ve" >> "$OUT_FILE_NAME" -echo "************************************************************************" >> "$OUT_FILE_NAME" -run_solver missing5 ve missing5 $3 $4 $5 -run_solver missing10 ve missing10 $3 $4 $5 -run_solver missing20 ve missing20 $3 $4 $5 -run_solver missing30 ve missing30 $3 $4 $5 -run_solver missing40 ve missing40 $3 $4 $5 -#run_solver missing50 ve missing50 $3 $4 $5 #+24h! -echo "************************************************************************" >> "$OUT_FILE_NAME" -echo "results for solver jt" >> "$OUT_FILE_NAME" -echo "************************************************************************" >> "$OUT_FILE_NAME" -run_solver missing5 jt missing5 $3 $4 $5 -run_solver missing10 jt missing10 $3 $4 $5 -run_solver missing20 jt missing20 $3 $4 $5 -#run_solver missing30 jt missing30 $3 $4 $5 #+24h! -#run_solver missing40 jt missing40 $3 $4 $5 #+24h! -#run_solver missing50 jt missing50 $3 $4 $5 #+24h! -exit +write_header bp +learn_params bp missing5 +learn_params bp missing10 +learn_params bp missing20 +#learn_params bp missing30 +#learn_params bp missing40 +#learn_params bp missing50 + +write_header cbp +learn_params cbp missing5 +learn_params cbp missing10 +learn_params cbp missing20 +#learn_params cbp missing30 +#learn_params cbp missing40 +#learn_params cbp missing50 diff --git a/packages/CLPBN/benchmarks/smokers_evidence/sm.sh b/packages/CLPBN/benchmarks/smokers_evidence/sm.sh deleted file mode 100755 index 59f6d9d39..000000000 --- a/packages/CLPBN/benchmarks/smokers_evidence/sm.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -NETWORK="'../../examples/social_domain2'" -SHORTNAME="sm" -QUERY="query(X)" - -POP=500 - diff --git a/packages/CLPBN/benchmarks/smokers/bp_tests.sh b/packages/CLPBN/benchmarks/social_network2/bp_tests.sh similarity index 86% rename from packages/CLPBN/benchmarks/smokers/bp_tests.sh rename to packages/CLPBN/benchmarks/social_network2/bp_tests.sh index 65891401e..2af91cbde 100755 --- a/packages/CLPBN/benchmarks/smokers/bp_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2/bp_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2.sh source ../benchs.sh SOLVER="bp" @@ -26,5 +26,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "bp(shedule=seq_fixed) " seq_fixed +run_all_graphs "bp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/smokers/cbp_tests.sh b/packages/CLPBN/benchmarks/social_network2/cbp_tests.sh similarity index 86% rename from packages/CLPBN/benchmarks/smokers/cbp_tests.sh rename to packages/CLPBN/benchmarks/social_network2/cbp_tests.sh index 169a90bf6..828a27fad 100755 --- a/packages/CLPBN/benchmarks/smokers/cbp_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2/cbp_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2.sh source ../benchs.sh SOLVER="cbp" @@ -26,5 +26,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "cbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "cbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/smokers/gen_people.sh b/packages/CLPBN/benchmarks/social_network2/gen_people.sh similarity index 90% rename from packages/CLPBN/benchmarks/smokers/gen_people.sh rename to packages/CLPBN/benchmarks/social_network2/gen_people.sh index b8eebbfa1..f11c64811 100755 --- a/packages/CLPBN/benchmarks/smokers/gen_people.sh +++ b/packages/CLPBN/benchmarks/social_network2/gen_people.sh @@ -17,7 +17,7 @@ main :- generate_people(S, N, Counting) :- Counting > N, !. generate_people(S, N, Counting) :- - format(S, 'people(p~w).~n', [Counting]), + format(S, 'person(p~w).~n', [Counting]), Counting1 is Counting + 1, generate_people(S, N, Counting1). diff --git a/packages/CLPBN/benchmarks/smokers/hve_tests.sh b/packages/CLPBN/benchmarks/social_network2/hve_tests.sh similarity index 64% rename from packages/CLPBN/benchmarks/smokers/hve_tests.sh rename to packages/CLPBN/benchmarks/social_network2/hve_tests.sh index 14e7c6daf..b5f7d3045 100755 --- a/packages/CLPBN/benchmarks/smokers/hve_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2/hve_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2.sh source ../benchs.sh SOLVER="hve" @@ -26,8 +26,8 @@ function run_all_graphs } prepare_new_run -run_all_graphs "hve(elim_heuristic=min_neighbors) " min_neighbors -#run_all_graphs "hve(elim_heuristic=min_weight) " min_weight -#run_all_graphs "hve(elim_heuristic=min_fill) " min_fill -#run_all_graphs "hve(elim_heuristic=weighted_min_fill) " weighted_min_fill +run_all_graphs "hve(hve_elim_heuristic=min_neighbors) " min_neighbors +#run_all_graphs "hve(hve_elim_heuristic=min_weight) " min_weight +#run_all_graphs "hve(hve_elim_heuristic=min_fill) " min_fill +#run_all_graphs "hve(hve_elim_heuristic=weighted_min_fill) " weighted_min_fill diff --git a/packages/CLPBN/benchmarks/smokers/lbp_tests.sh b/packages/CLPBN/benchmarks/social_network2/lbp_tests.sh similarity index 86% rename from packages/CLPBN/benchmarks/smokers/lbp_tests.sh rename to packages/CLPBN/benchmarks/social_network2/lbp_tests.sh index de040bf03..696f3e88b 100755 --- a/packages/CLPBN/benchmarks/smokers/lbp_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2/lbp_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2.sh source ../benchs.sh SOLVER="lbp" @@ -26,5 +26,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "lbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "lbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/smokers/lve_tests.sh b/packages/CLPBN/benchmarks/social_network2/lve_tests.sh similarity index 97% rename from packages/CLPBN/benchmarks/smokers/lve_tests.sh rename to packages/CLPBN/benchmarks/social_network2/lve_tests.sh index e7e08c253..fddd84569 100755 --- a/packages/CLPBN/benchmarks/smokers/lve_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2/lve_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2.sh source ../benchs.sh SOLVER="lve" diff --git a/packages/CLPBN/benchmarks/smokers/sm.sh b/packages/CLPBN/benchmarks/social_network2/sn2.sh similarity index 59% rename from packages/CLPBN/benchmarks/smokers/sm.sh rename to packages/CLPBN/benchmarks/social_network2/sn2.sh index 0981e92d9..b29c3a30d 100755 --- a/packages/CLPBN/benchmarks/smokers/sm.sh +++ b/packages/CLPBN/benchmarks/social_network2/sn2.sh @@ -1,7 +1,7 @@ #!/bin/bash -NETWORK="'../../examples/social_domain2'" -SHORTNAME="sm" +NETWORK="'../../examples/social_network2.pfl'" +SHORTNAME="sn2" #QUERY="smokes(p1,t), smokes(p2,t), friends(p1,p2,X)" QUERY="friends(p1,p2,X)" diff --git a/packages/CLPBN/benchmarks/smokers_evidence/bp_tests.sh b/packages/CLPBN/benchmarks/social_network2_evidence/bp_tests.sh similarity index 88% rename from packages/CLPBN/benchmarks/smokers_evidence/bp_tests.sh rename to packages/CLPBN/benchmarks/social_network2_evidence/bp_tests.sh index 89480a5ce..09c8a3386 100755 --- a/packages/CLPBN/benchmarks/smokers_evidence/bp_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2_evidence/bp_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2ev.sh source ../benchs.sh SOLVER="bp" @@ -30,5 +30,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "bp(shedule=seq_fixed) " seq_fixed +run_all_graphs "bp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/smokers_evidence/cbp_tests.sh b/packages/CLPBN/benchmarks/social_network2_evidence/cbp_tests.sh similarity index 88% rename from packages/CLPBN/benchmarks/smokers_evidence/cbp_tests.sh rename to packages/CLPBN/benchmarks/social_network2_evidence/cbp_tests.sh index f8975e415..953ff7afd 100755 --- a/packages/CLPBN/benchmarks/smokers_evidence/cbp_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2_evidence/cbp_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2ev.sh source ../benchs.sh SOLVER="cbp" @@ -30,5 +30,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "cbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "cbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/smokers_evidence/gen_people.sh b/packages/CLPBN/benchmarks/social_network2_evidence/gen_people.sh similarity index 96% rename from packages/CLPBN/benchmarks/smokers_evidence/gen_people.sh rename to packages/CLPBN/benchmarks/social_network2_evidence/gen_people.sh index 1a72cee03..3106baaa9 100755 --- a/packages/CLPBN/benchmarks/smokers_evidence/gen_people.sh +++ b/packages/CLPBN/benchmarks/social_network2_evidence/gen_people.sh @@ -26,7 +26,7 @@ main :- generate_people(S, N, Counting) :- Counting > N, !. generate_people(S, N, Counting) :- - format(S, 'people(p~w).~n', [Counting]), + format(S, 'person(p~w).~n', [Counting]), Counting1 is Counting + 1, generate_people(S, N, Counting1). diff --git a/packages/CLPBN/benchmarks/smokers_evidence/hve_tests.sh b/packages/CLPBN/benchmarks/social_network2_evidence/hve_tests.sh similarity index 68% rename from packages/CLPBN/benchmarks/smokers_evidence/hve_tests.sh rename to packages/CLPBN/benchmarks/social_network2_evidence/hve_tests.sh index 37cf1f9ab..67de12a15 100755 --- a/packages/CLPBN/benchmarks/smokers_evidence/hve_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2_evidence/hve_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2ev.sh source ../benchs.sh SOLVER="hve" @@ -30,8 +30,8 @@ function run_all_graphs } prepare_new_run -run_all_graphs "hve(elim_heuristic=min_neighbors) " min_neighbors -#run_all_graphs "hve(elim_heuristic=min_weight) " min_weight -#run_all_graphs "hve(elim_heuristic=min_fill) " min_fill -#run_all_graphs "hve(elim_heuristic=weighted_min_fill) " weighted_min_fill +run_all_graphs "hve(hve_elim_heuristic=min_neighbors) " min_neighbors +#run_all_graphs "hve(hve_elim_heuristic=min_weight) " min_weight +#run_all_graphs "hve(hve_elim_heuristic=min_fill) " min_fill +#run_all_graphs "hve(hve_elim_heuristic=weighted_min_fill) " weighted_min_fill diff --git a/packages/CLPBN/benchmarks/smokers_evidence/lbp_tests.sh b/packages/CLPBN/benchmarks/social_network2_evidence/lbp_tests.sh similarity index 100% rename from packages/CLPBN/benchmarks/smokers_evidence/lbp_tests.sh rename to packages/CLPBN/benchmarks/social_network2_evidence/lbp_tests.sh diff --git a/packages/CLPBN/benchmarks/smokers_evidence/lve_tests.sh b/packages/CLPBN/benchmarks/social_network2_evidence/lve_tests.sh similarity index 97% rename from packages/CLPBN/benchmarks/smokers_evidence/lve_tests.sh rename to packages/CLPBN/benchmarks/social_network2_evidence/lve_tests.sh index 5f32c8c6d..2f61cce14 100755 --- a/packages/CLPBN/benchmarks/smokers_evidence/lve_tests.sh +++ b/packages/CLPBN/benchmarks/social_network2_evidence/lve_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -source sm.sh +source sn2ev.sh source ../benchs.sh SOLVER="lve" diff --git a/packages/CLPBN/benchmarks/social_network2_evidence/sn2ev.sh b/packages/CLPBN/benchmarks/social_network2_evidence/sn2ev.sh new file mode 100755 index 000000000..92c2057d9 --- /dev/null +++ b/packages/CLPBN/benchmarks/social_network2_evidence/sn2ev.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +NETWORK="'../../examples/social_network2.pfl'" +SHORTNAME="sn2ev" +QUERY="query(X)" + +POP=500 + diff --git a/packages/CLPBN/benchmarks/workshop_attrs/bp_tests.sh b/packages/CLPBN/benchmarks/workshop_attrs/bp_tests.sh index a52024adf..4b80d2441 100755 --- a/packages/CLPBN/benchmarks/workshop_attrs/bp_tests.sh +++ b/packages/CLPBN/benchmarks/workshop_attrs/bp_tests.sh @@ -33,5 +33,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "bp(shedule=seq_fixed) " seq_fixed +run_all_graphs "bp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/workshop_attrs/cbp_tests.sh b/packages/CLPBN/benchmarks/workshop_attrs/cbp_tests.sh index 7b649584f..728eef96a 100755 --- a/packages/CLPBN/benchmarks/workshop_attrs/cbp_tests.sh +++ b/packages/CLPBN/benchmarks/workshop_attrs/cbp_tests.sh @@ -32,5 +32,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "cbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "cbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/workshop_attrs/gen_attrs.sh b/packages/CLPBN/benchmarks/workshop_attrs/gen_attrs.sh index f3e7a5212..cdeb4dc7b 100755 --- a/packages/CLPBN/benchmarks/workshop_attrs/gen_attrs.sh +++ b/packages/CLPBN/benchmarks/workshop_attrs/gen_attrs.sh @@ -23,7 +23,7 @@ main :- generate_people(S, N, Counting) :- Counting > N, !. generate_people(S, N, Counting) :- - format(S, 'people(p~w).~n', [Counting]), + format(S, 'person(p~w).~n', [Counting]), Counting1 is Counting + 1, generate_people(S, N, Counting1). @@ -31,9 +31,9 @@ generate_people(S, N, Counting) :- generate_attrs(S, N, Counting) :- Counting > N, !. generate_attrs(S, N, Counting) :- - %format(S, 'people(p~w).~n', [Counting]), + %format(S, 'person(p~w).~n', [Counting]), format(S, 'markov attends(P)::[t,f], attr~w::[t,f]', [Counting]), - format(S, '; [0.7, 0.3, 0.3, 0.3] ; [people(P)].~n',[]), + format(S, '; [0.7, 0.3, 0.3, 0.3] ; [person(P)].~n',[]), Counting1 is Counting + 1, generate_attrs(S, N, Counting1). diff --git a/packages/CLPBN/benchmarks/workshop_attrs/hve_tests.sh b/packages/CLPBN/benchmarks/workshop_attrs/hve_tests.sh index aaabfe08e..07dadb79e 100755 --- a/packages/CLPBN/benchmarks/workshop_attrs/hve_tests.sh +++ b/packages/CLPBN/benchmarks/workshop_attrs/hve_tests.sh @@ -32,5 +32,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "hve(elim_heuristic=min_neighbors) " min_neighbors +run_all_graphs "hve(hve_elim_heuristic=min_neighbors) " min_neighbors diff --git a/packages/CLPBN/benchmarks/workshop_attrs/lbp_tests.sh b/packages/CLPBN/benchmarks/workshop_attrs/lbp_tests.sh index cf5f696ee..b0dd10e29 100755 --- a/packages/CLPBN/benchmarks/workshop_attrs/lbp_tests.sh +++ b/packages/CLPBN/benchmarks/workshop_attrs/lbp_tests.sh @@ -32,5 +32,5 @@ function run_all_graphs } prepare_new_run -run_all_graphs "lbp(shedule=seq_fixed) " seq_fixed +run_all_graphs "lbp(bp_msg_shedule=seq_fixed) " seq_fixed diff --git a/packages/CLPBN/benchmarks/workshop_attrs/wa.sh b/packages/CLPBN/benchmarks/workshop_attrs/wa.sh index 2f9cc94a2..2f4946ebd 100755 --- a/packages/CLPBN/benchmarks/workshop_attrs/wa.sh +++ b/packages/CLPBN/benchmarks/workshop_attrs/wa.sh @@ -1,6 +1,6 @@ #!/bin/bash -NETWORK="'../../examples/workshop_attrs'" +NETWORK="'../../examples/workshop_attrs.pfl'" SHORTNAME="wa" QUERY="series(X)" From 2feac46e54ba9051a31ccc92043589cd9a055ec2 Mon Sep 17 00:00:00 2001 From: tacgomes Date: Wed, 9 Jan 2013 16:45:01 +0000 Subject: [PATCH 89/89] PFL manual: several improvements --- packages/CLPBN/pfl.tex | 95 ++++++++++++++++++++++-------------------- 1 file changed, 49 insertions(+), 46 deletions(-) diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex index 52ac251ab..b0e102990 100644 --- a/packages/CLPBN/pfl.tex +++ b/packages/CLPBN/pfl.tex @@ -59,7 +59,7 @@ The package also includes implementations for a set of well-known inference algo %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ \section{Installation} -PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog system. However, there isn't yet a stable release of YAP that includes PFL. So it is required to install a development version of YAP. To to this, you will need to have installed the Git version control system. The commands to do a default installation of YAP in the user's home in a Unix-based environment are shown next. +PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog system. However, there isn't yet a stable release of YAP that includes PFL. So you will need to install a development version of YAP. To to so, you must have installed the \href{http://git-scm.com/}{Git} version control system. The commands to perform a default installation of YAP in your home directory in a Unix-based environment are shown next. \begin{enumerate} \setlength\itemindent{-0.01cm} @@ -70,7 +70,7 @@ PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog sy \item \texttt{\$ make depend \& make install} \end{enumerate} -In case you want to install YAP somewhere else or with different settings, please consult the YAP documentation. From now on, we will assume that the directory \texttt{\$HOME\pathsep bin} (where the binary can be found) is in your \texttt{\$PATH} environment variable. +In case you want to install YAP somewhere else or with different settings, please consult the YAP documentation. From now on, we will assume that the directory \texttt{\$HOME\pathsep bin} (where the binary is) is in your \texttt{\$PATH} environment variable. \label{examples-directory} Once in a while, we will refer to the PFL examples directory. In a default installation, this directory will be located at \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. @@ -82,7 +82,7 @@ Once in a while, we will refer to the PFL examples directory. In a default insta %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ \section{Language} -A first-order probabilistic graphical model is described using parametric factors, or just parfactors. The PFL syntax for a parfactor is +A first-order probabilistic graphical model is described using parametric factors, commonly known as parfactors. The PFL syntax for a parfactor is $$Type~~F~~;~~Phi~~;~~C.$$ @@ -90,11 +90,11 @@ $$Type~~F~~;~~Phi~~;~~C.$$ \begin{itemize} \item $Type$ refers the type of network over which the parfactor is defined. It can be \texttt{bayes} for directed networks, or \texttt{markov} for undirected ones. -\item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the others defines its parents. +\item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the remaining terms define its parents. -\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. Domain combinations are implicitly assumed in ascending order, with the first term being the 'most significant' (e.g. $\mathtt{x_0y_0}$, $\mathtt{x_0y_1}$, $\mathtt{x_0y_2}$, $\mathtt{x_1y_0}$, $\mathtt{x_1y_1}$, $\mathtt{x_1y_2}$). +\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. Notice that if $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. Domain combinations are implicitly assumed in ascending order, with the first term being the 'most significant' (e.g. $\mathtt{x_0y_0}$, $\mathtt{x_0y_1}$, $\mathtt{x_0y_2}$, $\mathtt{x_1y_0}$, $\mathtt{x_1y_1}$, $\mathtt{x_1y_2}$). -\item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be any relation (set of tuples) over the logical variables. +\item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be defined as any relation (set of tuples) over the logical variables. \end{itemize} @@ -189,14 +189,16 @@ wet_grass_table( 0.01, 0.1, 0.1, 1.0 ]). \end{pflcode} -Note that this network is fully grounded, as the constraints are all empty. Next we present the PFL representation for a well-known markov logic network - the social network model. The weighted formulas of this model are shown below. +We started by loading the PFL library, then we have defined one factor for each node, and finally we have specified the probabilities for each conditional probability table. + +Notice that this network is fully grounded, as all constraints are empty. Next we present the PFL representation for a well-known markov logic network - the social network model. For convenience, the two main weighted formulas of this model are shown below. \begin{pflcode} 1.5 : Smokes(x) => Cancer(x) 1.1 : Smokes(x) ^ Friends(x,y) => Smokes(y) \end{pflcode} -We can represent this model using PFL with the following code. +Next, we show the PFL representation for this model. \begin{pflcode} :- use_module(library(pfl)). @@ -216,9 +218,9 @@ markov friends(X,Y), smokes(X), smokes(Y) ; %markov cancer(X) ; [1.0, 9.974]; [person(X)]. %markov friends(X,Y) ; [1.0, 99.484] ; [person(X), person(Y)]. -Notice that we defined the world to be consisted of two persons, \texttt{anne} and \texttt{bob}. We can easily add as many persons as we want by inserting in the program a fact like \texttt{person @ 10.}~. This would create ten persons named \texttt{p1}, \texttt{p2}, \dots, \texttt{p10}. +Notice that we have defined the world to be consisted of only two persons, \texttt{anna} and \texttt{bob}. We can easily add as many persons as we want by inserting in the program a fact like \texttt{person @ 10.}~. This would automatically create ten persons named \texttt{p1}, \texttt{p2}, \dots, \texttt{p10}. -Unlike other fist-order probabilistic languages, in PFL the logical variables that appear in the terms are not directly typed, and they will be only constrained by the goals that appear in the constraint of the parfactor. This allows the logical variables to be constrained by any relation (set of tuples), and not by pairwise (in)equalities. For instance, the next example defines a ground network with three factors, each over the random variables \texttt{p(a,b)}, \texttt{p(b,d)} and \texttt{p(d,e)}. +Unlike other fist-order probabilistic languages, in PFL the logical variables that appear in the terms are not directly typed, and they will be only constrained by the goals that appears in the constraint of the parfactor. This allows the logical variables to be constrained to any relation (set of tuples), and not only pairwise (in)equalities. For instance, the next example defines a network with three ground factors, each defined respectively over the random variables \texttt{p(a,b)}, \texttt{p(b,d)} and \texttt{p(d,e)}. \begin{pflcode} constraint(a,b). @@ -228,9 +230,9 @@ constraint(d,e). markov p(A,B); some_table; [constraint(A,B)]. \end{pflcode} -We can easily add static evidence to PFL programs by inserting a fact with the same functor and arguments as the random variable, plus one extra argument with the observed state or value. For instance, suppose that we now that \texttt{anna} and \texttt{bob} are friends. We can add this knowledge to the program with the following fact: \texttt{friends(anna,bob,t).}~. +We can easily add static evidence to PFL programs by inserting a fact with the same functor and arguments as the random variable, plus one extra argument with the observed state or value. For instance, suppose that we know that \texttt{anna} and \texttt{bob} are friends. We can add this knowledge to the program with the following fact: \texttt{friends(anna,bob,t).}~. -One last note for the domain of the random variables. By default all terms will generate boolean (\texttt{t}/\texttt{f}) random variables. It is possible to chose a different domain by appending a list of the possible values or states to the term. Next we present a self-explanatory example of how this can be done. +One last note for the domain of the random variables. By default, all terms instantiate boolean (\texttt{t}/\texttt{f}) random variables. It is possible to choose a different domain for a term by appending a list of its possible values or states. Next we present a self-explanatory example of how this can be done. \begin{pflcode} bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. @@ -245,19 +247,19 @@ More probabilistic models defined using PFL can be found in the examples directo %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ \section{Querying} -In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as an example. +In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as example. -Assuming that the current directory is the one where the examples are located, first we load the model as follows. +Assuming that the current directory is the one where the examples are located, first we load the model with the following command. \texttt{\$ yap -l sprinker.pfl} -Let's suppose that we want to estimate the marginal probability for the $WetGrass$ random variable. We can do it calling the following goal: +Let's suppose that we want to estimate the marginal probability for the $WetGrass$ random variable. To do so, we call the following goal. \texttt{?- wet\_grass(X).} -The output of the goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. +The output of this goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. -Now let's suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: +Now let's suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained in the day before. We can estimate this probability without resorting to static evidence with: \texttt{?- wet\_grass(X), rain(t).} @@ -267,18 +269,6 @@ PFL also supports calculating joint probability distributions. For instance, we -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -%------------------------------------------------------------------------------ -\section{Parameter Learning} -PFL is capable to learn the parameters for bayesian networks, through an implementation of the expectation-maximization algorithm. - -Inside the \texttt{learning} directory from the examples directory, one can find some examples of how learning works in PFL. - -We can define the solver that will be used for the inference part during parameter learning with the \texttt{set\_em\_solver/1} predicate (defaults to \texttt{hve}). At the moment, only the following solvers support parameter learning: \texttt{ve}, \texttt{hve}, \texttt{bdd}, \texttt{bp} and \texttt{cbp}. - - %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ @@ -302,36 +292,36 @@ For instance, if we want to use belief propagation to solve some probabilistic q \texttt{?- set\_solver(bp).} -It is possible to tweak some parameters of PFL through \texttt{set\_horus\_flag/2} predicate. The first argument is a key that identifies the parameter that we desire to tweak, while the second is some possible value for this key. +It is possible to tweak some parameters of PFL through \texttt{set\_horus\_flag/2} predicate. The first argument is a key that identifies the parameter that we want to tweak. The second argument is some possible value for this key. -The \texttt{verbosity} key controls the level of debugging information that will be printed. Its possible values are positive integers. The higher the number, the more information that will be shown. For example, to view some basic debugging information we call: +The \texttt{verbosity} key controls the level of debugging information that will be printed. Its possible values are positive integers. The higher the number, the more information that will be shown. For instance, we can view some basic debugging information by calling the following goal. \texttt{?- set\_horus\_flag(verbosity, 1).} -This key defaults to 0 (no debugging information) and only \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers have support for this key. +This key defaults to 0 (no debugging) and only \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers have support for this key. -The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} or \texttt{false}. By default is \texttt{true} and only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always do their calculations in a logarithm domain. +The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} (default) or \texttt{false}. This key only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always perform their calculations in a logarithm domain. -There are keys specific only to some algorithms. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: +There are keys specific to some algorithms. The \texttt{hve\_elim\_heuristic} key allows to choose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: \begin{itemize} \item \texttt{sequential} \item \texttt{min\_neighbors} \item \texttt{min\_weight} \item \texttt{min\_fill} - \item \texttt{weighted\_min\_fill} + \item \texttt{weighted\_min\_fill} (default) \end{itemize} -It defaults to \texttt{weighted\_min\_fill}. An explanation of each of these heuristics can be found in Daphne Koller's book \textit{Probabilistic Graphical Models}. +An explanation for each of these heuristics can be found in Daphne Koller's book \textit{Probabilistic Graphical Models}. The \texttt{bp\_msg\_schedule}, \texttt{bp\_accuracy} and \texttt{bp\_max\_iter} keys are specific for message passing based algorithms, namely \texttt{bp}, \texttt{cbp} and \texttt{lbp}. -The \texttt{bp\_max\_iter} key establishes a maximum number of iterations. One iteration consists in sending all possible messages. It defaults to 1000. +The \texttt{bp\_max\_iter} key establishes a maximum number of iterations. One iteration consists in sending all possible messages. It defaults to \texttt{1000}. -The \texttt{bp\_accuracy} key indicates when the message passing should cease. Be the residual of one message the difference (according some metric) between the one sent in the current iteration and the one sent in the previous. If the highest residual is lesser than the given value, the message passing is stopped and the probabilities are calculated using the last messages that were sent. This key defaults to 0.0001. +The \texttt{bp\_accuracy} key allows to control when the message passing should cease. Be the residual of one message the difference (according some metric) between the one sent in the current iteration and the one sent in the previous. If the highest residual is lesser than the given value, the message passing is stopped and the probabilities are calculated using the last messages that were sent. This key defaults to \texttt{0.0001}. -The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possible values are: +The key \texttt{bp\_msg\_schedule} allows to control the message sending order. Its possible values are: \begin{itemize} - \item \texttt{seq\_fixed}, at each iteration, all messages are sent with the same order. + \item \texttt{seq\_fixed} (default), at each iteration, all messages are sent with the same order. \item \texttt{seq\_random}, at each iteration, all messages are sent with a random order. @@ -339,9 +329,22 @@ The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possi \item \texttt{max\_residual}, the next message to be sent is the one with maximum residual (as explained in the paper \textit{Residual Belief Propagation: Informed Scheduling for Asynchronous Message Passing}). \end{itemize} -It defaults to \texttt{seq\_fixed}. -The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export the current model respectively to \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI}, and \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08} formats. With the \texttt{export\_graphviz} key it is possible to save the factor graph into a format that can be read by \href{http://www.graphviz.org/}{Graphviz}. The \texttt{print\_fg} key allows to print all factors before perform inference. All these four keys accept \texttt{true} and \texttt{false} as their values and only produce effect in \texttt{hve}, \texttt{bp}, and \texttt{cbp} solvers. +The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export the current model respectively to \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI}, and \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08} formats. With the \texttt{export\_graphviz} key it is possible to export the factor graph structure into a format that can be parsed by \href{http://www.graphviz.org/}{Graphviz}. The \texttt{print\_fg} key allows to print a textual representation of the factor graph. All these four keys accept \texttt{true} and \texttt{false} as their values and only have effect in \texttt{hve}, \texttt{bp}, and \texttt{cbp} solvers. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Parameter Learning} +PFL is capable to learn the parameters for bayesian networks, through an implementation of the expectation-maximization algorithm. + +Inside the \texttt{learning} directory from the examples directory, one can find some examples of how learning works in PFL. + +It is possible to choose the solver that will be used for the inference part during parameter learning with the \texttt{set\_em\_solver/1} predicate (defaults to \texttt{hve}). At the moment, only the following solvers support parameter learning: \texttt{ve}, \texttt{hve}, \texttt{bdd}, \texttt{bp} and \texttt{cbp}. + @@ -350,7 +353,7 @@ The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export %------------------------------------------------------------------------------ %------------------------------------------------------------------------------ \section{Horus Command Line} -This package also includes an external interface to YAP for perform inference over probabilistic graphical models described in formats other than PFL. Currently two are support, the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format}. +This package also includes an external command for perform inference over probabilistic graphical models described in formats other than PFL. Currently two are support, the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format}. This utility is called \texttt{hcli} and its usage is as follows. @@ -367,7 +370,7 @@ With the above command, the program will load the model and print the marginal p \texttt{\$ ./hcli burglary-alarm.uai 0} -If we give more than one variable identifier as argument, the program will output the joint probability for all variables given. +If we give more than one variable identifier as argument, the program will output the joint probability for all the passed variables. Evidence can be given as a pair containing a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. @@ -379,7 +382,7 @@ By default, all probability tasks are resolved using the \texttt{hve} solver. It Notice that only the \texttt{hve}, \texttt{bp} and \texttt{cbp} solvers can be used with \texttt{hcli}. -The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs \texttt{=} before the model's file name. +The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax is a pair \texttt{=} before the model's file name. @@ -390,6 +393,6 @@ The options that are available with the \texttt{set\_horus\_flag/2} predicate ca \section{Further Information} Please check the paper \textit{Evaluating Inference Algorithms for the Prolog Factor Language} for further information. -Any question don't hesitate to contact us! +Any question? Don't hesitate to contact us! \end{document}