This commit is contained in:
Vitor Santos Costa 2019-03-17 23:01:48 +00:00
parent d10589d60f
commit e8d9e71a4e
4 changed files with 78 additions and 101 deletions

View File

@ -654,40 +654,44 @@ Unify _NElems_ with the type of the elements in _Matrix_.
:- use_module(library(mapargs)). :- use_module(library(mapargs)).
:- use_module(library(lists)). :- use_module(library(lists)).
( X <== '[]'(Dims0, array) of V ) :- ( X <== '[]'(Dims0, array) of T ) :-
var(V), !, var(X),
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ), ( T== ints -> true ; T== floats),
length( L, Size ), !,
X <== matrix( L, [dim=Dims,base=Bases] ). foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
( X <== '[]'(Dims0, array) of ints ) :- !, matrix_new( T , Dims, _, X ),
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ), matrix_base(X, Bases).
matrix_new( ints , Dims, _, X ), ( X <== '[]'(Dims0, array) of T ) :-
matrix_base(X, Bases). atom(X),
( X <== '[]'(Dims0, array) of floats ) :- ( T== ints -> true ; T== floats),
atom(X), !, !,
foldl( norm_dim, Dims0, _Dims, _Bases, 1, Size ), foldl( norm_dim, Dims0, _Dims, _Bases, 1, Size ),
static_array( X, Size, [float] ). static_array( X, Size, [float] ).
( X <== '[]'(Dims0, array) of floats ) :- !, ( X <== '[]'(Dims0, array) of (I:J) ) :-
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ), var(X),
matrix_new( floats , Dims,_, X ), integer(I),
matrix_base(X, Bases). integer(J),
( X <== '[]'(Dims0, array) of (I:J) ) :- !, !,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ), foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
matrix_seq(I, J, Dims, X), matrix_seq(I, J, Dims, X),
matrixn_size(X, Size), matrixn_size(X, Size),
matrix_base(X, Bases). matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of L ) :- ( X <== '[]'(Dims0, array) of L ) :-
length( L, Size ), !, is_list(L),
!,
length( L, Size ), !,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ), foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
X <== matrix( L, [dim=Dims,base=Bases] ). X <== matrix( L, [dim=Dims,base=Bases] ).
( X <== '[]'(Dims0, array) of Pattern ) :- !, ( X <== '[]'(Dims0, array) of Pattern ) :-
array_extension(Pattern, Goal), array_extension(Pattern, Goal),
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ), !,
call(Goal, Pattern, Dims, Size, L), foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
X <== matrix( L, [dim=Dims,base=Bases] ). call(Goal, Pattern, Dims, Size, L),
X <== matrix( L, [dim=Dims,base=Bases] ).
( LHS <== RHS ) :- ( LHS <== RHS ) :-
rhs(RHS, R), rhs(RHS, R),
set_lhs( LHS, R). set_lhs( LHS, R).

View File

@ -63,11 +63,19 @@ maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1), rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash). maplist_to_hash(MapList, H1, Hash).
bind_maplist([]).
bind_maplist([Node-Theta|MapList]) :- prob2log(_X,Slope,FactID,V) :-
get_prob(Node, ProbFact), get_fact_probability(FactID, V0),
inv_sigmoid(ProbFact, Theta), inv_sigmoid(V0, Slope, V).
bind_maplist(MapList).
log2prob(X,Slope,FactID,V) :-
V0 <== X[FactID],
sigmoid(V0, Slope, V).
bind_maplist([], _Slope, _X).
bind_maplist([Node-Pr|MapList], Slope, X) :-
Pr <== X[Node],
bind_maplist(MapList, Slope, X).
tree_to_grad([], _, Grad, Grad). tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :- tree_to_grad([Node|Tree], H, Grad0, Grad) :-
@ -95,15 +103,15 @@ gradient(QueryID, g, Slope) :-
gradient(QueryID, g, Slope) :- gradient(QueryID, g, Slope) :-
gradient(QueryID, l, Slope). gradient(QueryID, l, Slope).
query_probability( DBDD, Slope, Prob) :- query_probability( DBDD, Slope, X, Prob) :-
DBDD = bdd(Dir, Tree, MapList), DBDD = bdd(Dir, Tree, MapList),
bind_maplist(MapList), bind_maplist(MapList, Slope, X),
run_sp(Tree, Slope, 1.0, Prob0), run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0). (Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
query_gradients(bdd(Dir, Tree, MapList),Slope,I,Grad) :- query_gradients(bdd(Dir, Tree, MapList),Slope,X,I,Grad) :-
bind_maplist(MapList), bind_maplist(MapList, Slope, X),
member(I-_, MapList), member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0), run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0). ( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).

View File

@ -26,9 +26,6 @@ graph2bdd(Query,1,bdd(D,T,Vs)) :-
bdd_new(TrieList, C), bdd_new(TrieList, C),
bdd_tree(C, BDD), bdd_tree(C, BDD),
BDD = bdd(D,T,_Vs0). BDD = bdd(D,T,_Vs0).
BDD = bdd(D,T,_Vs0),
writeln(BDD).
:- set_problog_flag(init_method,(Q,N,Bdd,user:graph2bdd(Q,N,Bdd))). :- set_problog_flag(init_method,(Q,N,Bdd,user:graph2bdd(Q,N,Bdd))).

View File

@ -797,8 +797,9 @@ gradient_descent :-
lbfgs_run(Solver,_BestF), lbfgs_run(Solver,_BestF),
lbfgs_finalize(Solver). lbfgs_finalize(Solver).
set_fact(FactID, Slope, X ) :- set_fact(FactID, Slope, P ) :-
get_fact_probability(FactID,Pr), X <== P[FactID],
sigmoid(X, Slope, Pr),
(Pr > 0.99 (Pr > 0.99
-> ->
NPr = 0.99 NPr = 0.99
@ -806,9 +807,8 @@ set_fact(FactID, Slope, X ) :-
Pr < 0.01 Pr < 0.01
-> ->
NPr = 0.01 ; NPr = 0.01 ;
Pr = NPr ), Pr = NPr ),
inv_sigmoid(NPr, Slope, XZ), set_fact_probability(FactID, NPr).
X[FactID] <== XZ.
set_tunable(I,Slope,P) :- set_tunable(I,Slope,P) :-
@ -823,49 +823,33 @@ set_tunable(I,Slope,P) :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :- user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :-
%Handle = user_error, %Handle = user_error,
example_count(TrainingExampleCount), go( X,Grad, LLs),
ExProbs <== array[TrainingExampleCount] of floats, sum_list( LLs, LLH_Training_Queries).
LLs <== array[N] of floats,
Probs <== array[N] of floats, go( X,Grad, LLs) :-
problog_flag(sigmoid_slope,Slope), problog_flag(sigmoid_slope,Slope),
N1 is N-1, findall(
forall(between(0,N1,I), LL,
(Grad[I] <== 0.0, S <== X[I], sigmoid(S,Slope, P), Probs[I] <== P) compute_gradient(Grad, X, Slope,LL),
), LLs
forall( ).
compute_gradient( Grad, X, Slope, LL) :-
user:example(QueryID,_Query,QueryProb),
recorded(QueryID,BDD,_), recorded(QueryID,BDD,_),
compute_probability(BDD,Slope,QueryID,ExProbs) query_probability( BDD, Slope, X, BDDProb),
),
forall(
user:example(QueryID,_Query,QueryProb),
compute_gradient(QueryID, QueryProb,Grad, Probs, Slope,LLs)
),
trace,
LLH_Training_Queries <== sum(LLs).
compute_probability( BDD, Slope, Id, Probs) :-
query_probability( BDD, Slope, Prob),
Probs[Id] <== Prob.
compute_gradient(QueryID,QueryProb, Grad, Probs, ExProbs, Slope, LLs) :-
recorded(QueryID,BDD,_),
BDDProb <== ExProbs[QueryID],
forall(
query_gradients(BDD,Slope,I,GradValue),
gradient_pair(BDDProb, QueryProb, Grad, GradValue, I, Probs)
),
LL is (BDDProb-QueryProb)*(BDDProb-QueryProb), LL is (BDDProb-QueryProb)*(BDDProb-QueryProb),
writeln(LL), forall(
LLs[QueryID] <== LL. query_gradients(BDD,Slope,X,I,GradValue),
gradient_pair(BDDProb, QueryProb, Grad, GradValue, Slope, X, I)
).
gradient_pair(BDDProb, QueryProb, Grad, GradValue, I, Probs) :- gradient_pair(BDDProb, QueryProb, Grad, GradValue, Slope, X, I) :-
G0 <== Grad[I], G0 <== Grad[I],
Prob <== Probs[I], log2prob(X,Slope,I,Prob),
GN is G0-GradValue*2*Prob*(1-Prob)*(QueryProb-BDDProb), GN is G0-GradValue*2*Prob*(1-Prob)*(QueryProb-BDDProb),
writeln(G0), Grad[I] <== GN.
Grad[I] <== GN.
wrap( X, Grad, GradCount) :- wrap( X, Grad, GradCount) :-
tunable_fact(FactID,GroundTruth), tunable_fact(FactID,GroundTruth),
@ -878,22 +862,6 @@ wrap( X, Grad, GradCount) :-
fail. fail.
wrap( _X, _Grad, _GradCount). wrap( _X, _Grad, _GradCount).
prob2log(_X,Slope,FactID,V) :-
get_fact_probability(FactID, V0),
inv_sigmoid(V0, Slope, V).
log2prob(X,Slope,FactID,V) :-
V0 <== X[FactID],
sigmoid(V0, Slope, V).
bind_maplist([], _Slope, _X).
bind_maplist([Node-Pr|MapList], Slope, X) :-
Pr <== X[Node],
bind_maplist(MapList, Slope, X).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop calculate gradient % stop calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -901,7 +869,7 @@ user:progress(FX,_X,_G, _X_Norm,_G_Norm,_Step,_N,_Iteration,_Ls,-1) :-
FX < 0, !, FX < 0, !,
format('stopped on bad FX=~4f~n',[FX]). format('stopped on bad FX=~4f~n',[FX]).
user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N,_Iteration,Ls,0) :- user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N,_Iteration,Ls,0) :-
roblog_flag(sigmoid_slope,Slope), problog_flag(sigmoid_slope,Slope),
forall(tunable_fact(FactID,_GroundTruth), set_tunable(FactID,Slope,X)), forall(tunable_fact(FactID,_GroundTruth), set_tunable(FactID,Slope,X)),
current_iteration(CurrentIteration), current_iteration(CurrentIteration),
retractall(current_iteration(_)), retractall(current_iteration(_)),