This commit is contained in:
Vitor Santos Costa 2018-10-09 13:47:27 +01:00
parent 48e398576f
commit 9e3a768220
5 changed files with 89 additions and 79 deletions

View File

@ -77,7 +77,7 @@ aux_args([Arg|Args], MVars, [Arg|PArgs], PVars, [Arg|ProtoArgs]) :-
aux_args([Arg|Args], [Arg|MVars], [PVar|PArgs], [PVar|PVars], ['_'|ProtoArgs]) :- aux_args([Arg|Args], [Arg|MVars], [PVar|PArgs], [PVar|PVars], ['_'|ProtoArgs]) :-
aux_args(Args, MVars, PArgs, PVars, ProtoArgs). aux_args(Args, MVars, PArgs, PVars, ProtoArgs).
pred_name(Macro, Arity, _ , Name) :- pred_name(Macro, Arity, P , Name) :-
prolog_load_context(file, FullFileName), prolog_load_context(file, FullFileName),
file_base_name( FullFileName, File ), file_base_name( FullFileName, File ),
prolog_load_context(term_position, Pos), prolog_load_context(term_position, Pos),

View File

@ -263,7 +263,7 @@
% this is a test to determine whether YAP provides the needed trie library % this is a test to determine whether YAP provides the needed trie library
:- initialization( :- initialization(
( predicate_property(trie_disable_hash, imported_from(M)) -> ( predicate_property(trie_disable_hash, imported_from(_M)) ->
trie_disable_hash trie_disable_hash
; print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.') ; print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.')
) )

View File

@ -649,64 +649,6 @@ init_one_query(QueryID,Query,Type) :-
recordz(QueryID,bdd(Dir, Grad, MapList),_) recordz(QueryID,bdd(Dir, Grad, MapList),_)
). ).
qprobability(bdd(Dir, Tree, MapList), Slope, Prob) :-
/* query_probability(21,6.775948e-01). */
run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
qgradient(bdd(Dir, Tree, MapList), Slope, I, Grad) :-
member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).
% writeln(grad(QueryID:I:Grad)),
% assert(query_gradient_intern(QueryID,I,p,Grad)),
% fail.
%gradient(QueryID, g, Slope) :-
% gradient(QueryID, l, Slope).
maplist_to_hash([], H0, H0).
maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash).
tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :-
node_to_gradient_node(Node, H, GNode),
tree_to_grad(Tree, H, [GNode|Grad0], Grad).
node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
run_sp([], _, P0, P0).
run_sp(gnodep(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
P is EP*PL+ (1.0-EP)*PR,
run_sp(Tree, Slope, P, PF).
run_sp(gnoden(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR),
run_sp(Tree, Slope, P, PF).
run_grad([], _I, _, G0, G0).
run_grad([gnodep(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
P is EP*PL+ (1.0-EP)*PR,
G0 is EP*GL + (1.0-EP)*GR,
% don' t forget the -X
( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR),
G0 is EP*GL - (1.0 - EP) * GR,
( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
@ -901,19 +843,6 @@ set_tunable(I, GroundTruth,P) :-
format('fact(~d, ~q, ~4f, ~4f).~n',[I,Source,GroundTruth,Pr]), format('fact(~d, ~q, ~4f, ~4f).~n',[I,Source,GroundTruth,Pr]),
set_fact_probability(I,Pr). set_fact_probability(I,Pr).
prob2log(X,Slope,FactID) :-
get_fact_probability(FactID, V0),
inv_sigmoid(V0, Slope, V).
log2prob(X,Slope,FactID) :-
V0 <== X[FactID],
sigmoid(V0, Slope, V).
bind_maplist([], Slope, X).
bind_maplist([Node-Theta|MapList], Slope, X) :-
Theta <== X[Node],
bind_maplist(MapList, Slope, X).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start calculate gradient % start calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -933,10 +862,13 @@ user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :-
forall(tunable_fact(FactID,GroundTruth), (Z<==X[FactID],W<==Grad[FactID],writeln(FactID:(W->Z)))). forall(tunable_fact(FactID,GroundTruth), (Z<==X[FactID],W<==Grad[FactID],writeln(FactID:(W->Z)))).
compute_grad(N, X, Grad, Probs, Slope, Handle, LL) :- compute_grad(N, X, Grad, Probs, Slope, Handle, LL) :-
user:example(QueryID,Query,QueryProb,Type), user:example(QueryID,_Query,QueryProb,_),
BDD = bdd(Dir, GradTree, MapList), recorded(QueryID,BDD,_),
BDD = bdd(_Dir, _GradTree, MapList),
bind_maplist(MapList, Slope, Probs), bind_maplist(MapList, Slope, Probs),
%writeln( qprobability(BDD,Slope,BDDProb) ),
qprobability(BDD,Slope,BDDProb), qprobability(BDD,Slope,BDDProb),
%writeln( gradientpair(BDD,Slope,BDDProb, QueryProb, Grad) ),
gradientpair(BDD,Slope,BDDProb, QueryProb, Grad), gradientpair(BDD,Slope,BDDProb, QueryProb, Grad),
LL is (((BDDProb)-(QueryProb))**2). LL is (((BDDProb)-(QueryProb))**2).
@ -944,11 +876,85 @@ gradientpair(BDD,Slope,BDDProb, QueryProb, Grad) :-
qgradient(BDD, Slope, FactID, GradValue), qgradient(BDD, Slope, FactID, GradValue),
% writeln(FactID), % writeln(FactID),
G0 <== Grad[FactID], G0 <== Grad[FactID],
writeln( GN is G0-GradValue*(QueryProb-BDDProb)), GN is G0-GradValue*(QueryProb-BDDProb), %writeln( GN is G0-GradValue*(QueryProb-BDDProb)),
writeln(FactID:(G0->GN)), GN is G0-GradValue*(QueryProb-BDDProb),
%writeln(FactID:(G0->GN)),
Grad[FactID] <== GN. Grad[FactID] <== GN.
gradientpair(_BDD,_Slope,_BDDProb, _Grad). gradientpair(_BDD,_Slope,_BDDProb, _Grad).
qprobability(bdd(Dir, Tree, MapList), Slope, Prob) :-
/* query_probability(21,6.775948e-01). */
run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
qgradient(bdd(Dir, Tree, MapList), Slope, I, Grad) :-
member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).
% writeln(grad(QueryID:I:Grad)),
% assert(query_gradient_intern(QueryID,I,p,Grad)),
% fail.
%gradient(QueryID, g, Slope) :-
% gradient(QueryID, l, Slope).
maplist_to_hash([], H0, H0).
maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash).
tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :-
node_to_gradient_node(Node, H, GNode),
tree_to_grad(Tree, H, [GNode|Grad0], Grad).
node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
run_sp([], _, P0, P0).
run_sp(gnodep(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
P is EP*PL+ (1.0-EP)*PR,
run_sp(Tree, Slope, P, PF).
run_sp(gnoden(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR),
run_sp(Tree, Slope, P, PF).
run_grad([], _I, _, G0, G0).
run_grad([gnodep(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
P is EP*PL+ (1.0-EP)*PR,
G0 is EP*GL + (1.0-EP)*GR,
% don' t forget the -X
( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR),
G0 is EP*GL - (1.0 - EP) * GR,
( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
prob2log(X,Slope,FactID,V) :-
get_fact_probability(FactID, V0),
inv_sigmoid(V0, Slope, V).
log2prob(X,Slope,FactID,V) :-
V0 <== X[FactID],
sigmoid(V0, Slope, V).
bind_maplist([], Slope, X).
bind_maplist([Node-Pr|MapList], Slope, X) :-
V0 <== X[Node],
sigmoid(V0, Slope, V),
bind_maplist(MapList, Slope, X).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop calculate gradient % stop calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

View File

@ -179,7 +179,7 @@ lbfgs_initialize(N,X,U,t(N,X,U,Params)) :-
Clean up the memory. Clean up the memory.
*/ */
lbfgs_finalize(t(N,X,U,Params)) :- lbfgs_finalize(t(_N,X,_U,Params)) :-
lbfgs_release(X) , lbfgs_release(X) ,
lbfgs_release_parameters(Params) . lbfgs_release_parameters(Params) .

View File

@ -66,7 +66,11 @@ static lbfgsfloatval_t evaluate(void *instance, const lbfgsfloatval_t *x,
// Goal did not succeed // Goal did not succeed
return FALSE; return FALSE;
} }
rc = YAP_FloatOfTerm(YAP_GetFromSlot(sl)); YAP_Term o;
if (YAP_IsIntTerm((o = YAP_GetFromSlot(sl))))
rc = YAP_IntOfTerm(o);
else
rc = YAP_FloatOfTerm(o);
YAP_RecoverSlots(1, sl); YAP_RecoverSlots(1, sl);
return rc; return rc;
} }