This commit is contained in:
Vítor Santos Costa 2018-10-10 09:39:04 +01:00
parent 9e3a768220
commit 32266a8b98
6 changed files with 83 additions and 49 deletions

View File

@ -1056,6 +1056,9 @@ X_API void YAP_Init(YAP_init_args *yap_init) {
init_globals(yap_init); init_globals(yap_init);
start_modules(); start_modules();
if (yap_init->QuietMode) {
setVerbosity(TermSilent);
}
if (yap_init->install && Yap_OUTPUT_STARTUP) { if (yap_init->install && Yap_OUTPUT_STARTUP) {
setAtomicGlobalPrologFlag(RESOURCE_DATABASE_FLAG, setAtomicGlobalPrologFlag(RESOURCE_DATABASE_FLAG,
MkAtomTerm(Yap_LookupAtom(Yap_INPUT_STARTUP))); MkAtomTerm(Yap_LookupAtom(Yap_INPUT_STARTUP)));

View File

@ -30,10 +30,11 @@ START_LOCAL_FLAGS
/** + `autoload`: set the system to look for undefined procedures */ /** + `autoload`: set the system to look for undefined procedures */
YAP_FLAG(AUTOLOAD_FLAG, "autoload", true, booleanFlag, "false", NULL), YAP_FLAG(AUTOLOAD_FLAG, "autoload", true, booleanFlag, "false", NULL),
/** + `read-only flag, that tells if Prolog is in an inner top-level */
/** + `read-only flag, that tells if Prolog is in an inner top-level */
YAP_FLAG(BREAK_LEVEL_FLAG, "break_level", true, nat, "0", NULL), YAP_FLAG(BREAK_LEVEL_FLAG, "break_level", true, nat, "0", NULL),
YAP_FLAG(CALL_COUNTING_FLAG, "call_counting", true, booleanFlag, "true",
NULL), /** + `call_counting` /** + `call_counting`
Predicates compiled with this flag set maintain a counter Predicates compiled with this flag set maintain a counter
on the numbers of proceduree calls and of retries. These counters on the numbers of proceduree calls and of retries. These counters
@ -51,18 +52,29 @@ YAP_FLAG(AUTOLOAD_FLAG, "autoload", true, booleanFlag, "false", NULL),
If `on` `fileerrors` is `on`, if `off` (default) If `on` `fileerrors` is `on`, if `off` (default)
`fileerrors` is disabled. `fileerrors` is disabled.
*/ */
YAP_FLAG(CALL_COUNTING_FLAG, "call_counting", true, booleanFlag, "true",
NULL),
/** + support for coding systens, YAP relies on UTF-8 internally.
*/
YAP_FLAG(ENCODING_FLAG, "encoding", true, isatom, "utf-8", getenc), YAP_FLAG(ENCODING_FLAG, "encoding", true, isatom, "utf-8", getenc),
YAP_FLAG(FILEERRORS_FLAG, "fileerrors", true, booleanFlag, "true",
/** + what to do if opening a file fails.
*/
YAP_FLAG(FILEERRORS_FLAG, "fileerrors", true, booleanFlag, "true",
NULL), /** + `fileerrors` NULL), /** + `fileerrors`
If `on` `fileerrors` is `on`, if `off` (default) If `on` `fileerrors` is `on`, if `off` (default)
`fileerrors` is disabled. `fileerrors` is disabled.
*/ */
YAP_FLAG(LANGUAGE_MODE_FLAG, "language_mode", true, isatom, "yap", /** + `language_mode`
NULL), /** + `language_mode`
wweter native mode or trying to emulate a different Prolog. wweter native mode or trying to emulate a different
Prolog.
*/ */
YAP_FLAG(LANGUAGE_MODE_FLAG, "language_mode", true, isatom, "yap",
NULL),
YAP_FLAG(STACK_DUMP_ON_ERROR_FLAG, "stack_dump_on_error", true, booleanFlag, YAP_FLAG(STACK_DUMP_ON_ERROR_FLAG, "stack_dump_on_error", true, booleanFlag,
"true", NULL), /** + `stack_dump_on_error ` "true", NULL), /** + `stack_dump_on_error `

View File

@ -2613,7 +2613,7 @@ compute_bounds(LP, UP, Status) :-
% same as problog_threshold/5, but lower bound only (no stopped derivations stored) % same as problog_threshold/5, but lower bound only (no stopped derivations stored)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
problog_low(Goal/Cond, Threshold, _, _) :- problog_low(Goal/Cond, Threshold, Status, P) :-
!, !,
problog_low((Cond,Goal), Threshold, P1, Status), problog_low((Cond,Goal), Threshold, P1, Status),
problog_low( Cond, Threshold, P2, Status), problog_low( Cond, Threshold, P2, Status),

View File

@ -563,7 +563,7 @@ empty_bdd_directory.
set_default_gradient_method :- set_default_gradient_method :-
problog_flag(continuous_facts, true), problog_flag(continuous_facts, true),
!, !,
problog_flag(init_method,OldMethod), % problog_flag(init_method,OldMethod),
format_learning(2,'Theory uses continuous facts.~nWill use problog_exact/3 as initalization method.~2n',[]), format_learning(2,'Theory uses continuous facts.~nWill use problog_exact/3 as initalization method.~2n',[]),
set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile))). set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile))).
set_default_gradient_method :- set_default_gradient_method :-
@ -592,7 +592,7 @@ bdd_input_file(Filename) :-
problog_flag(output_directory,Dir), problog_flag(output_directory,Dir),
concat_path_with_filename(Dir,'input.txt',Filename). concat_path_with_filename(Dir,'input.txt',Filename).
init_one_query(QueryID,Query,Type) :- init_one_query(QueryID,Query,_Type) :-
% format_learning(3,' ~q example ~q: ~q~n',[Type,QueryID,Query]), % format_learning(3,' ~q example ~q: ~q~n',[Type,QueryID,Query]),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -784,7 +784,7 @@ mse_testset :-
logger_set_variable(mse_min_testset,MinError), logger_set_variable(mse_min_testset,MinError),
logger_set_variable(mse_max_testset,MaxError), logger_set_variable(mse_max_testset,MaxError),
logger_set_variable(llh_test_queries,LLH_Test_Queries), logger_set_variable(llh_test_queries,LLH_Test_Queries),
format_learning(2,' (~8f)~n',[M]). format_learning(2,' (~8f)~n',[MSE]).
%======================================================================== %========================================================================
@ -819,8 +819,9 @@ save_old_probabilities :-
% vsc: avoid silly search % vsc: avoid silly search
gradient_descent :- gradient_descent :-
current_iteration(Iteration), problog_flag(sigmoid_slope,Slope),
create_training_predictions_file_name(Iteration,File_Name), % current_iteration(Iteration),
% create_training_predictions_file_name(Iteration,File_Name),
Handle = user_error, Handle = user_error,
format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%~n",[]), format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%~n",[]),
format(Handle,"% Iteration, train/test, QueryID, Query, GroundTruth, Prediction %~n",[]), format(Handle,"% Iteration, train/test, QueryID, Query, GroundTruth, Prediction %~n",[]),
@ -829,70 +830,89 @@ gradient_descent :-
findall(FactID,tunable_fact(FactID,GroundTruth),L), length(L,N), findall(FactID,tunable_fact(FactID,GroundTruth),L), length(L,N),
% leash(0),trace, % leash(0),trace,
lbfgs_initialize(N,X,0,Solver), lbfgs_initialize(N,X,0,Solver),
N1 is N-1, forall(tunable_fact(FactID,GroundTruth),
forall(tunable_fact(FactID,GroundTruth), (X[FactID] <== 0.5)), (XZ is 0.5, X[FactID] <== XZ,set_fact_probability(FactID,XZ))),
problog_flag(sigmoid_slope,Slope), problog_flag(sigmoid_slope,Slope),
%%% lbfgs_set_parameter(min_step, 2e-40, Solver),
lbfgs_run(Solver,BestF), lbfgs_run(Solver,BestF),
format('~2nOptimization done~nWe found a minimum ~4f.~n',[BestF]), format('~2nOptimization done~nWe found a minimum ~4f.~n',[BestF]),
forall(tunable_fact(FactID,GroundTruth), set_tunable(FactID,GroundTruth,X)), forall(tunable_fact(FactID,GroundTruth), set_tunable(FactID,X)),
lbfgs_finalize(Solver). lbfgs_finalize(Solver).
set_tunable(I, GroundTruth,P) :- set_tunable(I,P) :-
Pr <== P[I], Pr <== P[I],
get_fact(I,Source),
format('fact(~d, ~q, ~4f, ~4f).~n',[I,Source,GroundTruth,Pr]),
set_fact_probability(I,Pr). set_fact_probability(I,Pr).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start calculate gradient % start calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :- user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :-
Handle = user_error, %Handle = user_error,
GradCount <== array[N] of ints,
problog_flag(sigmoid_slope,Slope), problog_flag(sigmoid_slope,Slope),
Probs = X, Probs = X,
N1 is N-1, N1 is N-1,
forall(between(0,N1,I), forall(between(0,N1,I),
(Grad[I] <== 0.0) %, sigmoid(X[I],Slope,Probs[I]) ) (Grad[I] <== 0.0) %, sigmoid(X[I],Slope,Probs[I]) )
), ),
findall(LL, findall(LL,
compute_grad(N, X, Grad, Probs, Slope, Handle, LL), compute_grad(Grad, GradCount, Probs, Slope, LL),
LLs LLs
), ),
sum_list(LLs,LLH_Training_Queries), sum_list(LLs,LLH_Training_Queries).
forall(tunable_fact(FactID,GroundTruth), (Z<==X[FactID],W<==Grad[FactID],writeln(FactID:(W->Z)))). %wrap(X, Grad, GradCount).
compute_grad(N, X, Grad, Probs, Slope, Handle, LL) :-
compute_grad(Grad, GradCount, Probs, Slope, LL) :-
user:example(QueryID,_Query,QueryProb,_), user:example(QueryID,_Query,QueryProb,_),
recorded(QueryID,BDD,_), recorded(QueryID,BDD,_),
BDD = bdd(_Dir, _GradTree, MapList), BDD = bdd(_Dir, _GradTree, MapList),
bind_maplist(MapList, Slope, Probs), MapList = [_|_],
%writeln( qprobability(BDD,Slope,BDDProb) ), bind_maplist(MapList, Slope, Probs),
%writeln( MapList ),
qprobability(BDD,Slope,BDDProb), qprobability(BDD,Slope,BDDProb),
%writeln( gradientpair(BDD,Slope,BDDProb, QueryProb, Grad) ), LL is (((BDDProb)-(QueryProb))**2),
gradientpair(BDD,Slope,BDDProb, QueryProb, Grad), %writeln( qprobability(BDD,Slope,BDDProb) ),
LL is (((BDDProb)-(QueryProb))**2). forall(
member(I-_, MapList),
gradientpair(I, BDD,Slope,BDDProb, QueryProb, Grad, GradCount)
).
gradientpair(BDD,Slope,BDDProb, QueryProb, Grad) :- gradientpair(I, BDD,Slope,BDDProb, QueryProb, Grad, GradCount) :-
qgradient(BDD, Slope, FactID, GradValue), qgradient(I, BDD, Slope, FactID, GradValue),
% writeln(FactID), % writeln(FactID),
G0 <== Grad[FactID], G0 <== Grad[FactID],
%writeln( GN is G0-GradValue*(QueryProb-BDDProb)), %writeln( GN is G0-GradValue*(QueryProb-BDDProb)),
GN is G0-GradValue*(QueryProb-BDDProb), GN is G0-GradValue*2*(QueryProb-BDDProb),
%writeln(FactID:(G0->GN)), %writeln(FactID:(G0->GN)),
GC <== GradCount[FactID],
GC1 is GC+1,
GradCount[FactID] <== GC1,
Grad[FactID] <== GN. Grad[FactID] <== GN.
gradientpair(_BDD,_Slope,_BDDProb, _Grad).
qprobability(bdd(Dir, Tree, MapList), Slope, Prob) :- qprobability(bdd(Dir, Tree, _MapList), Slope, Prob) :-
/* query_probability(21,6.775948e-01). */ /* query_probability(21,6.775948e-01). */
run_sp(Tree, Slope, 1.0, Prob0), run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0). (Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
qgradient(bdd(Dir, Tree, MapList), Slope, I, Grad) :- qgradient(I, bdd(Dir, Tree, _MapList), Slope, I, Grad) :-
member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0), run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0). ( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).
wrap( X, Grad, GradCount) :-
tunable_fact(FactID,GroundTruth),
Z<==X[FactID],
W<==Grad[FactID],
WC<==GradCount[FactID],
WC > 0,
format('ex(~d, ~q, ~4f, ~4f).~n',[FactID,GroundTruth,Z,W]),
% Grad[FactID] <== WN,
fail.
wrap( _X, _Grad, _GradCount).
% writeln(grad(QueryID:I:Grad)), % writeln(grad(QueryID:I:Grad)),
% assert(query_gradient_intern(QueryID,I,p,Grad)), % assert(query_gradient_intern(QueryID,I,p,Grad)),
% fail. % fail.
@ -931,17 +951,17 @@ run_grad([gnodep(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
P is EP*PL+ (1.0-EP)*PR, P is EP*PL+ (1.0-EP)*PR,
G0 is EP*GL + (1.0-EP)*GR, G0 is EP*GL + (1.0-EP)*GR,
% don' t forget the -X % don' t forget the -X
( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ), ( I == Id -> G is PL-PR ; G = G0 ),
run_grad(Tree, I, Slope, G, GF). run_grad(Tree, I, Slope, G, GF).
run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :- run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR), P is EP*PL + (1.0-EP)*(1.0 - PR),
G0 is EP*GL - (1.0 - EP) * GR, G0 is EP*GL - (1.0 - EP) * GR,
( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ), ( I == Id -> G is PL-(1.0-PR) ; G = G0 ),
run_grad(Tree, I, Slope, G, GF). run_grad(Tree, I, Slope, G, GF).
prob2log(X,Slope,FactID,V) :- prob2log(_X,Slope,FactID,V) :-
get_fact_probability(FactID, V0), get_fact_probability(FactID, V0),
inv_sigmoid(V0, Slope, V). inv_sigmoid(V0, Slope, V).
@ -949,17 +969,16 @@ log2prob(X,Slope,FactID,V) :-
V0 <== X[FactID], V0 <== X[FactID],
sigmoid(V0, Slope, V). sigmoid(V0, Slope, V).
bind_maplist([], Slope, X). bind_maplist([], _Slope, _X).
bind_maplist([Node-Pr|MapList], Slope, X) :- bind_maplist([Node-Pr|MapList], Slope, X) :-
V0 <== X[Node], Pr <== X[Node],
sigmoid(V0, Slope, V),
bind_maplist(MapList, Slope, X). bind_maplist(MapList, Slope, X).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop calculate gradient % stop calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N,Iteration,Ls,0) :- user:progress(FX,X,_G,X_Norm,G_Norm,Step,_N,Iteration,Ls,0) :-
problog_flag(sigmoid_slope,Slope), % problog_flag(sigmoid_slope,Slope),
X0 <== X[0], X0 <== X[0],
X1 <== X[1], X1 <== X[1],
format('~d. Iteration : (x0,x1)=(~4f,~4f) f(X)=~4f |X|=~4f |X\'|=~4f Step=~4f Ls=~4f~n',[Iteration,X0 ,X1,FX,X_Norm,G_Norm,Step,Ls]). format('~d. Iteration : (x0,x1)=(~4f,~4f) f(X)=~4f |X|=~4f |X\'|=~4f Step=~4f Ls=~4f~n',[Iteration,X0 ,X1,FX,X_Norm,G_Norm,Step,Ls]).

View File

@ -1335,7 +1335,7 @@ lineSearch(Final_X,Final_Value) :-
line_search_evaluate_point(InitLeft,Value_InitLeft), line_search_evaluate_point(InitLeft,Value_InitLeft),
Parameters=ls(A,B,InitLeft,InitRight,Value_A,Value_B,Value_InitLeft,Value_InitRight,1), i Parameters=ls(A,B,InitLeft,InitRight,Value_A,Value_B,Value_InitLeft,Value_InitRight,1),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%% BEGIN BACK TRACKING %%%% BEGIN BACK TRACKING

View File

@ -357,11 +357,11 @@ available it tries reconsulting the source file.
*/ */
qload_module(Mod) :- qload_module(Mod) :-
( current_prolog_flag(verbose_load, false) ( current_prolog_flag(verbose_load, true)
-> ->
Verbosity = silent
;
Verbosity = informational Verbosity = informational
;
current_prolog_flag(verbose_load, Verbosity)
), ),
StartMsg = loading_module, StartMsg = loading_module,
EndMsg = module_loaded, EndMsg = module_loaded,