This commit is contained in:
Vitor Santos Costa 2019-03-17 23:01:48 +00:00
parent d10589d60f
commit e8d9e71a4e
4 changed files with 78 additions and 101 deletions

View File

@ -654,40 +654,44 @@ Unify _NElems_ with the type of the elements in _Matrix_.
:- use_module(library(mapargs)).
:- use_module(library(lists)).
( X <== '[]'(Dims0, array) of V ) :-
var(V), !,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
length( L, Size ),
X <== matrix( L, [dim=Dims,base=Bases] ).
( X <== '[]'(Dims0, array) of ints ) :- !,
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
matrix_new( ints , Dims, _, X ),
matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of floats ) :-
atom(X), !,
foldl( norm_dim, Dims0, _Dims, _Bases, 1, Size ),
static_array( X, Size, [float] ).
( X <== '[]'(Dims0, array) of floats ) :- !,
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
matrix_new( floats , Dims,_, X ),
matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of (I:J) ) :- !,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
matrix_seq(I, J, Dims, X),
matrixn_size(X, Size),
matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of T ) :-
var(X),
( T== ints -> true ; T== floats),
!,
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
matrix_new( T , Dims, _, X ),
matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of T ) :-
atom(X),
( T== ints -> true ; T== floats),
!,
foldl( norm_dim, Dims0, _Dims, _Bases, 1, Size ),
static_array( X, Size, [float] ).
( X <== '[]'(Dims0, array) of (I:J) ) :-
var(X),
integer(I),
integer(J),
!,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
matrix_seq(I, J, Dims, X),
matrixn_size(X, Size),
matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of L ) :-
length( L, Size ), !,
is_list(L),
!,
length( L, Size ), !,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
X <== matrix( L, [dim=Dims,base=Bases] ).
( X <== '[]'(Dims0, array) of Pattern ) :- !,
array_extension(Pattern, Goal),
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
call(Goal, Pattern, Dims, Size, L),
X <== matrix( L, [dim=Dims,base=Bases] ).
( X <== '[]'(Dims0, array) of Pattern ) :-
array_extension(Pattern, Goal),
!,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
call(Goal, Pattern, Dims, Size, L),
X <== matrix( L, [dim=Dims,base=Bases] ).
( LHS <== RHS ) :-
rhs(RHS, R),
set_lhs( LHS, R).
rhs(RHS, R),
set_lhs( LHS, R).

View File

@ -63,11 +63,19 @@ maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash).
bind_maplist([]).
bind_maplist([Node-Theta|MapList]) :-
get_prob(Node, ProbFact),
inv_sigmoid(ProbFact, Theta),
bind_maplist(MapList).
prob2log(_X,Slope,FactID,V) :-
get_fact_probability(FactID, V0),
inv_sigmoid(V0, Slope, V).
log2prob(X,Slope,FactID,V) :-
V0 <== X[FactID],
sigmoid(V0, Slope, V).
bind_maplist([], _Slope, _X).
bind_maplist([Node-Pr|MapList], Slope, X) :-
Pr <== X[Node],
bind_maplist(MapList, Slope, X).
tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :-
@ -95,15 +103,15 @@ gradient(QueryID, g, Slope) :-
gradient(QueryID, g, Slope) :-
gradient(QueryID, l, Slope).
query_probability( DBDD, Slope, Prob) :-
query_probability( DBDD, Slope, X, Prob) :-
DBDD = bdd(Dir, Tree, MapList),
bind_maplist(MapList),
bind_maplist(MapList, Slope, X),
run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
query_gradients(bdd(Dir, Tree, MapList),Slope,I,Grad) :-
bind_maplist(MapList),
query_gradients(bdd(Dir, Tree, MapList),Slope,X,I,Grad) :-
bind_maplist(MapList, Slope, X),
member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).

View File

@ -26,9 +26,6 @@ graph2bdd(Query,1,bdd(D,T,Vs)) :-
bdd_new(TrieList, C),
bdd_tree(C, BDD),
BDD = bdd(D,T,_Vs0).
BDD = bdd(D,T,_Vs0),
writeln(BDD).
:- set_problog_flag(init_method,(Q,N,Bdd,user:graph2bdd(Q,N,Bdd))).

View File

@ -797,8 +797,9 @@ gradient_descent :-
lbfgs_run(Solver,_BestF),
lbfgs_finalize(Solver).
set_fact(FactID, Slope, X ) :-
get_fact_probability(FactID,Pr),
set_fact(FactID, Slope, P ) :-
X <== P[FactID],
sigmoid(X, Slope, Pr),
(Pr > 0.99
->
NPr = 0.99
@ -806,9 +807,8 @@ set_fact(FactID, Slope, X ) :-
Pr < 0.01
->
NPr = 0.01 ;
Pr = NPr ),
inv_sigmoid(NPr, Slope, XZ),
X[FactID] <== XZ.
Pr = NPr ),
set_fact_probability(FactID, NPr).
set_tunable(I,Slope,P) :-
@ -823,49 +823,33 @@ set_tunable(I,Slope,P) :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :-
%Handle = user_error,
example_count(TrainingExampleCount),
ExProbs <== array[TrainingExampleCount] of floats,
LLs <== array[N] of floats,
Probs <== array[N] of floats,
go( X,Grad, LLs),
sum_list( LLs, LLH_Training_Queries).
go( X,Grad, LLs) :-
problog_flag(sigmoid_slope,Slope),
N1 is N-1,
forall(between(0,N1,I),
(Grad[I] <== 0.0, S <== X[I], sigmoid(S,Slope, P), Probs[I] <== P)
),
forall(
findall(
LL,
compute_gradient(Grad, X, Slope,LL),
LLs
).
compute_gradient( Grad, X, Slope, LL) :-
user:example(QueryID,_Query,QueryProb),
recorded(QueryID,BDD,_),
compute_probability(BDD,Slope,QueryID,ExProbs)
),
forall(
user:example(QueryID,_Query,QueryProb),
compute_gradient(QueryID, QueryProb,Grad, Probs, Slope,LLs)
),
trace,
LLH_Training_Queries <== sum(LLs).
compute_probability( BDD, Slope, Id, Probs) :-
query_probability( BDD, Slope, Prob),
Probs[Id] <== Prob.
compute_gradient(QueryID,QueryProb, Grad, Probs, ExProbs, Slope, LLs) :-
recorded(QueryID,BDD,_),
BDDProb <== ExProbs[QueryID],
forall(
query_gradients(BDD,Slope,I,GradValue),
gradient_pair(BDDProb, QueryProb, Grad, GradValue, I, Probs)
),
query_probability( BDD, Slope, X, BDDProb),
LL is (BDDProb-QueryProb)*(BDDProb-QueryProb),
writeln(LL),
LLs[QueryID] <== LL.
forall(
query_gradients(BDD,Slope,X,I,GradValue),
gradient_pair(BDDProb, QueryProb, Grad, GradValue, Slope, X, I)
).
gradient_pair(BDDProb, QueryProb, Grad, GradValue, I, Probs) :-
gradient_pair(BDDProb, QueryProb, Grad, GradValue, Slope, X, I) :-
G0 <== Grad[I],
Prob <== Probs[I],
log2prob(X,Slope,I,Prob),
GN is G0-GradValue*2*Prob*(1-Prob)*(QueryProb-BDDProb),
writeln(G0),
Grad[I] <== GN.
Grad[I] <== GN.
wrap( X, Grad, GradCount) :-
tunable_fact(FactID,GroundTruth),
@ -878,22 +862,6 @@ wrap( X, Grad, GradCount) :-
fail.
wrap( _X, _Grad, _GradCount).
prob2log(_X,Slope,FactID,V) :-
get_fact_probability(FactID, V0),
inv_sigmoid(V0, Slope, V).
log2prob(X,Slope,FactID,V) :-
V0 <== X[FactID],
sigmoid(V0, Slope, V).
bind_maplist([], _Slope, _X).
bind_maplist([Node-Pr|MapList], Slope, X) :-
Pr <== X[Node],
bind_maplist(MapList, Slope, X).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -901,7 +869,7 @@ user:progress(FX,_X,_G, _X_Norm,_G_Norm,_Step,_N,_Iteration,_Ls,-1) :-
FX < 0, !,
format('stopped on bad FX=~4f~n',[FX]).
user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N,_Iteration,Ls,0) :-
roblog_flag(sigmoid_slope,Slope),
problog_flag(sigmoid_slope,Slope),
forall(tunable_fact(FactID,_GroundTruth), set_tunable(FactID,Slope,X)),
current_iteration(CurrentIteration),
retractall(current_iteration(_)),