From 9e3a7682200df726511b313ff64cf2afff168d28 Mon Sep 17 00:00:00 2001 From: Vitor Santos Costa Date: Tue, 9 Oct 2018 13:47:27 +0100 Subject: [PATCH] singles --- library/maputils.yap | 2 +- packages/ProbLog/problog/ptree.yap | 2 +- packages/ProbLog/problog_lbfgs.yap | 156 +++++++++++++++-------------- packages/yap-lbfgs/lbfgs.pl | 2 +- packages/yap-lbfgs/yap_lbfgs.c | 6 +- 5 files changed, 89 insertions(+), 79 deletions(-) diff --git a/library/maputils.yap b/library/maputils.yap index b3775ac7f..1846e27a2 100644 --- a/library/maputils.yap +++ b/library/maputils.yap @@ -77,7 +77,7 @@ aux_args([Arg|Args], MVars, [Arg|PArgs], PVars, [Arg|ProtoArgs]) :- aux_args([Arg|Args], [Arg|MVars], [PVar|PArgs], [PVar|PVars], ['_'|ProtoArgs]) :- aux_args(Args, MVars, PArgs, PVars, ProtoArgs). -pred_name(Macro, Arity, _ , Name) :- +pred_name(Macro, Arity, P , Name) :- prolog_load_context(file, FullFileName), file_base_name( FullFileName, File ), prolog_load_context(term_position, Pos), diff --git a/packages/ProbLog/problog/ptree.yap b/packages/ProbLog/problog/ptree.yap index 4fddabef7..bb64f5800 100644 --- a/packages/ProbLog/problog/ptree.yap +++ b/packages/ProbLog/problog/ptree.yap @@ -263,7 +263,7 @@ % this is a test to determine whether YAP provides the needed trie library :- initialization( - ( predicate_property(trie_disable_hash, imported_from(M)) -> + ( predicate_property(trie_disable_hash, imported_from(_M)) -> trie_disable_hash ; print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.') ) diff --git a/packages/ProbLog/problog_lbfgs.yap b/packages/ProbLog/problog_lbfgs.yap index 9bc5c0f7d..c3556c207 100644 --- a/packages/ProbLog/problog_lbfgs.yap +++ b/packages/ProbLog/problog_lbfgs.yap @@ -649,64 +649,6 @@ init_one_query(QueryID,Query,Type) :- recordz(QueryID,bdd(Dir, Grad, MapList),_) ). -qprobability(bdd(Dir, Tree, MapList), Slope, Prob) :- -/* query_probability(21,6.775948e-01). */ - run_sp(Tree, Slope, 1.0, Prob0), - (Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0). - - -qgradient(bdd(Dir, Tree, MapList), Slope, I, Grad) :- - member(I-_, MapList), - run_grad(Tree, I, Slope, 0.0, Grad0), - ( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0). - -% writeln(grad(QueryID:I:Grad)), -% assert(query_gradient_intern(QueryID,I,p,Grad)), -% fail. -%gradient(QueryID, g, Slope) :- -% gradient(QueryID, l, Slope). - -maplist_to_hash([], H0, H0). -maplist_to_hash([I-V|MapList], H0, Hash) :- - rb_insert(H0, V, I, H1), - maplist_to_hash(MapList, H1, Hash). - -tree_to_grad([], _, Grad, Grad). -tree_to_grad([Node|Tree], H, Grad0, Grad) :- - node_to_gradient_node(Node, H, GNode), - tree_to_grad(Tree, H, [GNode|Grad0], Grad). - -node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :- - rb_lookup(X,Id,H), - (L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL), - (R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR). -node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :- - rb_lookup(X,Id,H), - (L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL), - (R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR). - -run_sp([], _, P0, P0). -run_sp(gnodep(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :- - P is EP*PL+ (1.0-EP)*PR, - run_sp(Tree, Slope, P, PF). -run_sp(gnoden(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :- - P is EP*PL + (1.0-EP)*(1.0 - PR), - run_sp(Tree, Slope, P, PF). - -run_grad([], _I, _, G0, G0). -run_grad([gnodep(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :- - P is EP*PL+ (1.0-EP)*PR, - G0 is EP*GL + (1.0-EP)*GR, - % don' t forget the -X - ( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ), - run_grad(Tree, I, Slope, G, GF). -run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :- - P is EP*PL + (1.0-EP)*(1.0 - PR), - G0 is EP*GL - (1.0 - EP) * GR, - ( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ), - run_grad(Tree, I, Slope, G, GF). - - @@ -901,19 +843,6 @@ set_tunable(I, GroundTruth,P) :- format('fact(~d, ~q, ~4f, ~4f).~n',[I,Source,GroundTruth,Pr]), set_fact_probability(I,Pr). -prob2log(X,Slope,FactID) :- - get_fact_probability(FactID, V0), - inv_sigmoid(V0, Slope, V). - -log2prob(X,Slope,FactID) :- - V0 <== X[FactID], - sigmoid(V0, Slope, V). - -bind_maplist([], Slope, X). -bind_maplist([Node-Theta|MapList], Slope, X) :- - Theta <== X[Node], - bind_maplist(MapList, Slope, X). - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % start calculate gradient %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -933,10 +862,13 @@ user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :- forall(tunable_fact(FactID,GroundTruth), (Z<==X[FactID],W<==Grad[FactID],writeln(FactID:(W->Z)))). compute_grad(N, X, Grad, Probs, Slope, Handle, LL) :- - user:example(QueryID,Query,QueryProb,Type), - BDD = bdd(Dir, GradTree, MapList), + user:example(QueryID,_Query,QueryProb,_), + recorded(QueryID,BDD,_), + BDD = bdd(_Dir, _GradTree, MapList), bind_maplist(MapList, Slope, Probs), +%writeln( qprobability(BDD,Slope,BDDProb) ), qprobability(BDD,Slope,BDDProb), +%writeln( gradientpair(BDD,Slope,BDDProb, QueryProb, Grad) ), gradientpair(BDD,Slope,BDDProb, QueryProb, Grad), LL is (((BDDProb)-(QueryProb))**2). @@ -944,11 +876,85 @@ gradientpair(BDD,Slope,BDDProb, QueryProb, Grad) :- qgradient(BDD, Slope, FactID, GradValue), % writeln(FactID), G0 <== Grad[FactID], -writeln( GN is G0-GradValue*(QueryProb-BDDProb)), GN is G0-GradValue*(QueryProb-BDDProb), - writeln(FactID:(G0->GN)), +%writeln( GN is G0-GradValue*(QueryProb-BDDProb)), + GN is G0-GradValue*(QueryProb-BDDProb), + %writeln(FactID:(G0->GN)), Grad[FactID] <== GN. gradientpair(_BDD,_Slope,_BDDProb, _Grad). +qprobability(bdd(Dir, Tree, MapList), Slope, Prob) :- +/* query_probability(21,6.775948e-01). */ + run_sp(Tree, Slope, 1.0, Prob0), + (Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0). + + +qgradient(bdd(Dir, Tree, MapList), Slope, I, Grad) :- + member(I-_, MapList), + run_grad(Tree, I, Slope, 0.0, Grad0), + ( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0). + +% writeln(grad(QueryID:I:Grad)), +% assert(query_gradient_intern(QueryID,I,p,Grad)), +% fail. +%gradient(QueryID, g, Slope) :- +% gradient(QueryID, l, Slope). + +maplist_to_hash([], H0, H0). +maplist_to_hash([I-V|MapList], H0, Hash) :- + rb_insert(H0, V, I, H1), + maplist_to_hash(MapList, H1, Hash). + +tree_to_grad([], _, Grad, Grad). +tree_to_grad([Node|Tree], H, Grad0, Grad) :- + node_to_gradient_node(Node, H, GNode), + tree_to_grad(Tree, H, [GNode|Grad0], Grad). + +node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :- + rb_lookup(X,Id,H), + (L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL), + (R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR). +node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :- + rb_lookup(X,Id,H), + (L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL), + (R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR). + +run_sp([], _, P0, P0). +run_sp(gnodep(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :- + P is EP*PL+ (1.0-EP)*PR, + run_sp(Tree, Slope, P, PF). +run_sp(gnoden(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :- + P is EP*PL + (1.0-EP)*(1.0 - PR), + run_sp(Tree, Slope, P, PF). + +run_grad([], _I, _, G0, G0). +run_grad([gnodep(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :- + P is EP*PL+ (1.0-EP)*PR, + G0 is EP*GL + (1.0-EP)*GR, + % don' t forget the -X + ( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ), + run_grad(Tree, I, Slope, G, GF). +run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :- + P is EP*PL + (1.0-EP)*(1.0 - PR), + G0 is EP*GL - (1.0 - EP) * GR, + ( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ), + run_grad(Tree, I, Slope, G, GF). + + + +prob2log(X,Slope,FactID,V) :- + get_fact_probability(FactID, V0), + inv_sigmoid(V0, Slope, V). + +log2prob(X,Slope,FactID,V) :- + V0 <== X[FactID], + sigmoid(V0, Slope, V). + +bind_maplist([], Slope, X). +bind_maplist([Node-Pr|MapList], Slope, X) :- + V0 <== X[Node], +sigmoid(V0, Slope, V), + bind_maplist(MapList, Slope, X). + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % stop calculate gradient %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/packages/yap-lbfgs/lbfgs.pl b/packages/yap-lbfgs/lbfgs.pl index f6f3ea834..fe46b3661 100644 --- a/packages/yap-lbfgs/lbfgs.pl +++ b/packages/yap-lbfgs/lbfgs.pl @@ -179,7 +179,7 @@ lbfgs_initialize(N,X,U,t(N,X,U,Params)) :- Clean up the memory. */ -lbfgs_finalize(t(N,X,U,Params)) :- +lbfgs_finalize(t(_N,X,_U,Params)) :- lbfgs_release(X) , lbfgs_release_parameters(Params) . diff --git a/packages/yap-lbfgs/yap_lbfgs.c b/packages/yap-lbfgs/yap_lbfgs.c index 79cc14ab3..565b47da2 100644 --- a/packages/yap-lbfgs/yap_lbfgs.c +++ b/packages/yap-lbfgs/yap_lbfgs.c @@ -66,7 +66,11 @@ static lbfgsfloatval_t evaluate(void *instance, const lbfgsfloatval_t *x, // Goal did not succeed return FALSE; } - rc = YAP_FloatOfTerm(YAP_GetFromSlot(sl)); + YAP_Term o; + if (YAP_IsIntTerm((o = YAP_GetFromSlot(sl)))) + rc = YAP_IntOfTerm(o); + else + rc = YAP_FloatOfTerm(o); YAP_RecoverSlots(1, sl); return rc; }