From 902dafa906ffc0abcac79f7ffe3ac955b038d1bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADtor=20Santos=20Costa?= Date: Wed, 26 Nov 2008 09:56:55 +0000 Subject: [PATCH] more support for learning --- CLPBN/clpbn.yap | 2 +- CLPBN/clpbn/dists.yap | 1 - CLPBN/clpbn/table.yap | 4 ++-- CLPBN/learning/aleph_params.yap | 8 +++++--- CLPBN/learning/em.yap | 4 ++-- CLPBN/learning/learn_utils.yap | 8 ++++---- 6 files changed, 14 insertions(+), 13 deletions(-) diff --git a/CLPBN/clpbn.yap b/CLPBN/clpbn.yap index 2e4b75547..c4f507f2d 100644 --- a/CLPBN/clpbn.yap +++ b/CLPBN/clpbn.yap @@ -91,7 +91,7 @@ em_solver(vel). %output(gviz(user_error)). output(no). suppress_attribute_display(false). -parameter_softening(laplace). +parameter_softening(m_estimate(10)). clpbn_flag(Flag,Option) :- clpbn_flag(Flag, Option, Option). diff --git a/CLPBN/clpbn/dists.yap b/CLPBN/clpbn/dists.yap index eac6e3a79..4028c93ba 100644 --- a/CLPBN/clpbn/dists.yap +++ b/CLPBN/clpbn/dists.yap @@ -12,7 +12,6 @@ get_dist_matrix/5, get_possibly_deterministic_dist_matrix/5, get_dist_domain/2, - get_dist_params/2, get_dist_domain_size/2, get_dist_params/2, get_dist_key/2, diff --git a/CLPBN/clpbn/table.yap b/CLPBN/clpbn/table.yap index 5a25ef0ac..e265058b2 100644 --- a/CLPBN/clpbn/table.yap +++ b/CLPBN/clpbn/table.yap @@ -100,10 +100,10 @@ clpbn_tabled_clause(Head, Body) :- clpbn_tabled_clause(Head, M, Body). clpbn_tabled_clause(M:Head, _, Body) :- !, - clpbn_table(Head, M, Body). + clpbn_tabled_clause(Head, M, Body). clpbn_tabled_clause(Head, M, Body) :- clpbn_table(Head, M, THead), - clause(THead, Body). + clause(M:THead, Body). clpbn_tabled_assertz(M:Clause) :- !, diff --git a/CLPBN/learning/aleph_params.yap b/CLPBN/learning/aleph_params.yap index 9d0fa3fea..c27844e22 100644 --- a/CLPBN/learning/aleph_params.yap +++ b/CLPBN/learning/aleph_params.yap @@ -65,7 +65,7 @@ :- user:set(record_testclause_hook, clpbn_aleph:do_nothing). -%:- user:set(newbest_hook, clpbn_aleph:store_theory). +:- user:set(newbest_hook, clpbn_aleph:store_theory). disable_solver(_) :- clpbn_flag(solver, Old, none), @@ -109,15 +109,17 @@ store_theory(_). add_correct_cpt((G,B),(G,NB)) :- !, add_correct_cpt(B,NB). +add_correct_cpt((clpbn:{V = K with Tab }), ({V = K with NTab})) :- + correct_tab(Tab,K,NTab). add_correct_cpt(({V = K with Tab }), ({V = K with NTab})) :- correct_tab(Tab,K,NTab). correct_tab(p(Vs,_),K,p(Vs,TDist)) :- get_dist_key(Id, K), - get_dist_parms(Id, TDist). + get_dist_params(Id, TDist). correct_tab(p(Vs,_,Ps),K,p(Vs,TDist,Ps)) :- get_dist_key(Id, K), - get_dist_parms(Id, TDist). + get_dist_params(Id, TDist). store_cl(Cl) :- recordz(best_theory, Cl, _). diff --git a/CLPBN/learning/em.yap b/CLPBN/learning/em.yap index 7dd762b2f..e72998959 100644 --- a/CLPBN/learning/em.yap +++ b/CLPBN/learning/em.yap @@ -90,7 +90,7 @@ init_em(Items, state( AllDists, AllDistInstances, MargVars, SolverVars)) :- em_loop(Its, Likelihood0, State, MaxError, MaxIts, LikelihoodF, FTables) :- estimate(State, LPs), maximise(State, Tables, LPs, Likelihood), - writeln(Likelihood:Its:Likelihood0:Tables), +% writeln(Likelihood:Its:Likelihood0:Tables), ( ( abs((Likelihood - Likelihood0)/Likelihood) < MaxError @@ -205,7 +205,7 @@ compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, empty_dist(Id, Table0), add_samples(Samples, Table0, MDistTable), soften_sample(Table0, SoftenedTable), - matrix:matrix_sum(Table0,TotM),writeln(Id-TotM), + matrix:matrix_sum(Table0,TotM), normalise_counts(SoftenedTable, NewTable), compute_likelihood(Table0, NewTable, DeltaLik), dist_new_table(Id, NewTable), diff --git a/CLPBN/learning/learn_utils.yap b/CLPBN/learning/learn_utils.yap index 839729049..c5c45a368 100644 --- a/CLPBN/learning/learn_utils.yap +++ b/CLPBN/learning/learn_utils.yap @@ -68,15 +68,15 @@ soften_sample(T0,T) :- soften_sample(no,T,T). soften_sample(m_estimate(M), T0, T) :- - matrix_agg_cols(T0,+,Cols),matrix:matrix_to_list(Cols), writeln(Cols), + matrix_agg_cols(T0,+,Cols), matrix_op_to_all(Cols, *, M, R), - matrix_op_to_cols(T0,+,R,T). + matrix_op_to_cols(T0,R,+,T). soften_sample(auto_m, T0,T) :- - matrix_agg_cols(T0,+,Cols),matrix:matrix_to_list(Cols), writeln(Cols), + matrix_agg_cols(T0,+,Cols), matrix_sum(Cols,TotM), M is sqrt(TotM), matrix_op_to_all(Cols, *, M, R), - matrix_op_to_cols(T0,+,R,T). + matrix_op_to_cols(T0,R,+,T). soften_sample(laplace,T0,T) :- matrix_op_to_all(T0, +, 1, T).