This commit is contained in:
Vitor Santos Costa 2019-03-22 15:41:14 +00:00
parent 25afc1477c
commit 953667db21
5 changed files with 96 additions and 89 deletions

View File

@ -17,17 +17,19 @@
:- use_module('../problog_lbfgs').
%:- set_problog_flag(init_method,(Query,K,Bdd,problog:problog_exact_lbdd(Query,Bdd))).
%% :- if(true).
:- use_module('kbgraph').
%:- if(true).
:- use_module('kbgraph').
%%%%
% background knowledge
%%%%
% definition of acyclic path using list of visited nodes
/*:- else.
%:- else.
/*
:- set_problog_flag(init_method,(Query,K,Bdd,problog:problog_exact_lbdd(Query,Bdd))).
path(X,Y) :- path(X,Y,[X],_).
@ -46,7 +48,7 @@ edge(X,Y) :- dir_edge(X,Y).
absent(_,[]).
absent(X,[Y|Z]):-X \= Y, absent(X,Z).
:- endif.
%:- endif.
*/
%%%%
% probabilistic facts

View File

@ -238,7 +238,7 @@
:- dynamic(learning_initialized/0).
:- dynamic(current_iteration/1).
:- dynamic(example_count/1).
%:- dynamic(query_probability_intern/2).
:- dynamic(query_probability_intern/2).
%:- dynamic(query_gradient_intern/4).
:- dynamic(last_mse/1).
:- dynamic(query_is_similar/2).
@ -372,7 +372,7 @@ reset_learning :-
retractall(values_correct),
retractall(current_iteration(_)),
retractall(example_count(_)),
% retractall(query_probability_intern(_,_)),%
retractall(query_probability_intern(_,_)),
% retractall(query_gradient_intern(_,_,_,_)),
retractall(last_mse(_)),
retractall(query_is_similar(_,_)),
@ -610,7 +610,7 @@ init_one_query(QueryID,Query,_Type) :-
%=
%========================================================================
query_probability(QueryID,Prob) :-
Prob <== qp[QueryID].
query_probability_intern(QueryID,Prob).
%========================================================================
%=
@ -663,7 +663,6 @@ mse_trainingset :-
update_values,
findall(t(LogCurrentProb,SquaredError),
(user:example(QueryID,Query,TrueQueryProb,_Type),
% once(update_query(QueryID,'+',probability)),
query_probability(QueryID,CurrentProb),
format(Handle,'ex(~q,training,~q,~q,~10f,~10f).~n',[Iteration,QueryID,Query,TrueQueryProb,CurrentProb]),
@ -781,11 +780,22 @@ inv_sigmoid(T,Slope,InvSig) :-
%= probabilities of the examples have to be recalculated
%========================================================================
:- dynamic index/2.
save_old_probabilities.
mkindex :-
retractall(index(_,_)),
findall(FactID,tunable_fact(FactID,_GroundTruth),L),
foldl(mkindex, L, 0, Count),
assert(count_tunables(Count)).
mkindex(Key,I,I1) :-
I1 is I+1,
assert(index(Key,I),I1).
% vsc: avoid silly search
gradient_descent :-
mkindex,
problog_flag(sigmoid_slope,Slope),
% current_iteration(Iteration),
findall(FactID,tunable_fact(FactID,_GroundTruth),L),
@ -798,7 +808,8 @@ gradient_descent :-
lbfgs_finalize(Solver).
set_fact(FactID, Slope, P ) :-
X <== P[FactID],
index(FactID, I),
X <== P[I],
sigmoid(X, Slope, Pr),
(Pr > 0.99
->
@ -824,7 +835,15 @@ set_tunable(I,Slope,P) :-
user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :-
%Handle = user_error,
go( X,Grad, LLs),
sum_list( LLs, LLH_Training_Queries).
sum_list( LLs, LLH_Training_Queries),
writeln(LLH_Training_Queries).
update_tunables(X) :-
tunable_fact(FactID,GroundTruth),
set_fact_probability(ID,Prob),
fail.
update_tunables.
go( X,Grad, LLs) :-
problog_flag(sigmoid_slope,Slope),
@ -832,6 +851,9 @@ go( X,Grad, LLs) :-
LL,
compute_gradient(Grad, X, Slope,LL),
LLs
),
forall(tunable_fact(FactID,_GroundTruth),
set_fact( FactID, Slope, X)
).
@ -840,6 +862,8 @@ compute_gradient( Grad, X, Slope, LL) :-
recorded(QueryID,BDD,_),
query_probability( BDD, Slope, X, BDDProb),
LL is (BDDProb-QueryProb)*(BDDProb-QueryProb),
retractall( query_probability_intern( QueryID, _) ),
assert( query_probability_intern( QueryID,BDDProb )),
forall(
query_gradients(BDD,Slope,X,I,GradValue),
gradient_pair(BDDProb, QueryProb, Grad, GradValue, Slope, X, I)
@ -848,7 +872,8 @@ compute_gradient( Grad, X, Slope, LL) :-
gradient_pair(BDDProb, QueryProb, Grad, GradValue, Slope, X, I) :-
G0 <== Grad[I],
log2prob(X,Slope,I,Prob),
GN is G0-GradValue*2*Prob*(1-Prob)*(QueryProb-BDDProb),
%writeln(Prob=BDDProb),
GN is G0+GradValue*BDDProb*(1-BDDProb)*2*(QueryProb-BDDProb),
Grad[I] <== GN.
wrap( X, Grad, GradCount) :-
@ -865,16 +890,13 @@ wrap( _X, _Grad, _GradCount).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user:progress(FX,_X,_G, _X_Norm,_G_Norm,_Step,_N,_Iteration,_Ls,-1) :-
user:progress(FX,_X,_G, _X_Norm,_G_Norm,_Step,_N,CurrentIteration,_Ls,-1) :-
FX < 0, !,
format('stopped on bad FX=~4f~n',[FX]).
user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N,_Iteration,Ls,0) :-
user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N, CurrentIteration,Ls,0) :-
assertz(current_iteration(Iteration)),
problog_flag(sigmoid_slope,Slope),
forall(tunable_fact(FactID,_GroundTruth), set_tunable(FactID,Slope,X)),
current_iteration(CurrentIteration),
retractall(current_iteration(_)),
NextIteration is CurrentIteration+1,
assertz(current_iteration(NextIteration)),
logger_set_variable(mse_trainingset, FX),
save_model,
X0 <== X[0], sigmoid(X0,Slope,P0),

View File

@ -462,18 +462,7 @@ do_learning_intern(Iterations,Epsilon) :-
logger_stop_timer(duration),
logger_write_data,
RemainingIterations is Iterations-1,
(
MSE_Diff>Epsilon
->
do_learning_intern(RemainingIterations,Epsilon);
true
).
logger_write_data.
%========================================================================

View File

@ -734,7 +734,9 @@ db_files(Fs) :-
% format( 'I=~w~n', [Verbosity=UserFile] ),
% export to process
b_setval('$lf_status', TOpts),
'$reset_if'(OldIfLevel),
'__NB_getval__'('$if_level', OldIfLevel, OldIfLevel=0),
nb_setval('$if_level',0),
writeln(ln(OldIfLevel)),
% take care with [a:f], a is the ContextModule
'$current_module'(SourceModule, ContextModule),
'$lf_opt'(consult, TOpts, Reconsult0),
@ -768,7 +770,6 @@ db_files(Fs) :-
true
),
'$loop'(Stream,Reconsult),
'$lf_opt'(imports, TOpts, Imports),
'$import_to_current_module'(File, ContextModule, Imports, _, TOpts),
'$current_module'(Mod, SourceModule),
@ -785,14 +786,14 @@ db_files(Fs) :-
;
true
),
writeln(out(OldIfLevel)),
nb_setval('$if_level',OldIfLevel),
set_stream( OldStream, alias(loop_stream) ),
set_prolog_flag(generate_debug_info, GenerateDebug),
'$comp_mode'(_CompMode, OldCompMode),
working_directory(_,OldD),
% surely, we were in run mode or we would not have included the file!
nb_setval('$if_skip_mode',run),
% back to include mode!
nb_setval('$if_level',OldIfLevel),
'$lf_opt'('$use_module', TOpts, UseModule),
'$bind_module'(Mod, UseModule),
'$reexport'( TOpts, ParentF, Reexport, Imports, File ),
@ -812,17 +813,6 @@ db_files(Fs) :-
'$qsave_file_'( File, UserF, F ).
'$q_do_save_file'(_File, _, _TOpts ).
'$reset_if'(OldIfLevel) :-
'__NB_getval__'('$if_level', OldIfLevel, fail), !,
nb_setval('$if_level',0).
'$reset_if'(0) :-
nb_setval('$if_level',0).
'$get_if'(Level0) :-
'__NB_getval__'('$if_level', Level, fail), !,
Level0 = Level.
'$get_if'(0).
'$bind_module'(_, load_files).
'$bind_module'(Mod, use_module(Mod)).
@ -1561,8 +1551,9 @@ If an error occurs, the error is printed and processing proceeds as if
%
'$if'(_,top) :- !, fail.
'$if'(_Goal,_) :-
'$get_if'(Level0),
'__NB_getval__'('$if_level',Level0,Level=0),
Level is Level0 + 1,
writeln(Level),
nb_setval('$if_level',Level),
( '__NB_getval__'('$endif', OldEndif, fail) -> true ; OldEndif=top),
( '__NB_getval__'('$if_skip_mode', Mode, fail) -> true ; Mode = run ),
@ -1574,12 +1565,12 @@ If an error occurs, the error is printed and processing proceeds as if
nb_setval('$endif',endif(Level, OldEndif, skip)).
% we are in non skip mode, check....
'$if'(Goal,_) :-
('$if_call'(Goal)
(
'$if_call'(Goal)
->
% we will execute this branch, and later enter skip
'__NB_getval__'('$endif', elif(Level,OldEndif,Mode), fail),
nb_setval('$endif',endif(Level,OldEndif,Mode))
;
% we are now in skip, but can start an elif.
nb_setval('$if_skip_mode',skip)
@ -1592,7 +1583,8 @@ Start `else' branch.
*/
'$else'(top) :- !, fail.
'$else'(_) :-
'$get_if'(0), !,
'__NB_getval__'('$if_level',0,true),
!,
'$do_error'(context_error(no_if),(:- else)).
% we have done an if, so just skip
'$else'(_) :-
@ -1600,7 +1592,7 @@ Start `else' branch.
nb_setval('$if_skip_mode',skip).
% we can try the elif
'$else'(_) :-
'$get_if'(Level),
'__NB_getval__'('$if_level',Level,Level=0),
nb_getval('$endif',elif(Level,OldEndif,Mode)),
nb_setval('$endif',endif(Level,OldEndif,Mode)),
nb_setval('$if_skip_mode',run).
@ -1614,7 +1606,8 @@ no test succeeds the else branch is processed.
*/
'$elif'(_,top) :- !, fail.
'$elif'(Goal,_) :-
'$get_if'(0),
'__NB_getval__'('$if_level',0,true),
!,
'$do_error'(context_error(no_if),(:- elif(Goal))).
% we have done an if, so just skip
'$elif'(_,_) :-
@ -1622,7 +1615,7 @@ no test succeeds the else branch is processed.
nb_setval('$if_skip_mode',skip).
% we can try the elif
'$elif'(Goal,_) :-
'$get_if'(Level),
'__NB_getval__'('$if_level',Level,fail),
'__NB_getval__'('$endif',elif(Level,OldEndif,Mode),fail),
('$if_call'(Goal)
->
@ -1642,11 +1635,12 @@ End of conditional compilation.
'$endif'(top) :- !, fail.
'$endif'(_) :-
% unmmatched endif.
'$get_if'(0),
'__NB_getval__'('$if_level',0,true),
!,
'$do_error'(context_error(no_if),(:- endif)).
'$endif'(_) :-
% back to where you belong.
'$get_if'(Level),
'__NB_getval__'('$if_level',Level,Level=0),
nb_getval('$endif',Endif),
Level0 is Level-1,
nb_setval('$if_level',Level0),

View File

@ -82,7 +82,7 @@ live :-
% stop at spy-points if debugging is on.
nb_setval('$debug_run',off),
nb_setval('$debug_jump',off),
nb_setval('$debug_status', state(zip, 0, stop), fail),
nb_setval('$debug_status', state(zip, 0, stop)),
'$command'(Command,Varnames,Pos,top),
current_prolog_flag(break_level, BreakLevel),
(