Newest ProbLog version

This commit is contained in:
Theofrastos Mantadelis 2010-12-16 14:30:50 +01:00
parent 9a45897308
commit a442d888de
24 changed files with 739 additions and 757 deletions

View File

@ -707,6 +707,7 @@ bdd_optimization(N,EV,Decisions,Status) :-
%(problog_flag(verbose,true) -> Debug = ' -d';Debug = ''), % messes up result parsing %(problog_flag(verbose,true) -> Debug = ' -d';Debug = ''), % messes up result parsing
atomic_concat([ProblogBDD, Param, ' -l ',BDDFile,' -i ',BDDParFile,' -u ',UtilFile,' -m s',LocalPar,Forest,' -t ', BDDTime,' > ', ResultFile],Command), atomic_concat([ProblogBDD, Param, ' -l ',BDDFile,' -i ',BDDParFile,' -u ',UtilFile,' -m s',LocalPar,Forest,' -t ', BDDTime,' > ', ResultFile],Command),
statistics(walltime,_), statistics(walltime,_),
% format(user,'$ ~w~n',[Command]),
shell(Command,Return), shell(Command,Return),
(Return =\= 0 -> (Return =\= 0 ->
Status = timeout Status = timeout

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-11-23 11:47:48 +0100 (Tue, 23 Nov 2010) $ % $Date: 2010-12-15 11:12:48 +0100 (Wed, 15 Dec 2010) $
% $Revision: 5027 $ % $Revision: 5138 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -350,16 +350,13 @@
:- use_module('problog/mc_DNF_sampling'). :- use_module('problog/mc_DNF_sampling').
:- use_module('problog/timer'). :- use_module('problog/timer').
:- use_module('problog/utils'). :- use_module('problog/utils').
:- catch(use_module('problog/ad_converter'),_,true). :- use_module('problog/ad_converter').
:- catch(use_module('problog/variable_elimination'),_,true). :- catch(use_module('problog/variable_elimination'),_,true).
% op attaching probabilities to facts % op attaching probabilities to facts
:- op( 550, yfx, :: ). :- op( 550, yfx, :: ).
:- op( 550, fx, ?:: ). :- op( 550, fx, ?:: ).
% for annotated disjunctions
% :- op(1149, yfx, <-- ).
%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%
% control predicates on various levels % control predicates on various levels
%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%
@ -509,7 +506,7 @@ problog_dir(PD):- problog_path(PD).
%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%
init_global_params :- init_global_params :-
%grow_atom_table(1000000), grow_atom_table(1000000), % this will reserve us some memory, there are cases where you might need more
%%%%%%%%%%%% %%%%%%%%%%%%
% working directory: all the temporary and output files will be located there % working directory: all the temporary and output files will be located there
@ -591,9 +588,9 @@ generate_atoms(N, A):-
% dynamic predicate problog_predicate(Name,Arity) keeps track of predicates that already have wrapper clause % dynamic predicate problog_predicate(Name,Arity) keeps track of predicates that already have wrapper clause
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% converts annotated disjunctions - if loaded % converts annotated disjunctions
term_expansion_intern(A, B, C):- term_expansion_intern(A, Module, C):-
catch(term_expansion_intern_ad(A, B, C), _, false). term_expansion_intern_ad(A, Module, C).
% converts ?:: prefix to ? :: infix, as handled by other clause % converts ?:: prefix to ? :: infix, as handled by other clause
term_expansion_intern((Annotation::Fact), Module, ExpandedClause) :- term_expansion_intern((Annotation::Fact), Module, ExpandedClause) :-
@ -667,13 +664,12 @@ term_expansion_intern(Head :: Goal,Module,problog:ProbFact) :-
) )
), ),
copy_term(((X,Distribution) :: Goal), ((X2,Distribution2) :: Goal2)),
% bind_the_variable % bind_the_variable
X2=Distribution2, X=Distribution,
% find position in term % find position in term
Goal2=..[Name|Args], Goal=..[Name|Args],
once(nth1(Pos,Args,Distribution2)), once(nth1(Pos,Args,Distribution)),
length(Args,Arity), length(Args,Arity),
atomic_concat([problogcontinuous_,Name],ProblogName), atomic_concat([problogcontinuous_,Name],ProblogName),
@ -1165,31 +1161,20 @@ set_continuous_fact_parameters(ID,Parameters) :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% writing those facts with learnable parameters to File % writing all probabilistic and continuous facts to Filename
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
export_facts(Filename) :- export_facts(Filename) :-
open(Filename,'write',Handle), open(Filename,'write',Handle),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% forall(P::Goal,
( % go over all probabilistic facts format(Handle,'~10f :: ~q.~n',[P,Goal])),
P::Goal,
format(Handle,'~w :: ~q.~n',[P,Goal]),
fail; % go to next prob. fact forall(continuous_fact(ID),
true (
),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
( % go over all continuous facts
continuous_fact(ID),
get_continuous_fact_parameters(ID,Param), get_continuous_fact_parameters(ID,Param),
format(Handle,'~q. % ~q~n',[Param,ID]), format(Handle,'~q. % ~q~n',[Param,ID])
)
fail; % go to next cont. fact
true
), ),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
close(Handle). close(Handle).
@ -1946,13 +1931,11 @@ disjoin_hybrid_proofs([GroundID|T]) :-
intervals_partition(Intervals,Partition), intervals_partition(Intervals,Partition),
% go over all proofs where this fact occurs % go over all proofs where this fact occurs
forall(hybrid_proof(ProofID,ID,GroundID,Interval),
( (
hybrid_proof(ProofID,ID,GroundID,Interval),
intervals_disjoin(Interval,Partition,PInterval), intervals_disjoin(Interval,Partition,PInterval),
assertz(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)), assertz(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval))
)
fail; % go to next proof
true
), ),
disjoin_hybrid_proofs(T). disjoin_hybrid_proofs(T).
@ -1996,6 +1979,7 @@ problog_low(_, _, LP, Status) :-
timer_stop(sld_time,SLD_Time), timer_stop(sld_time,SLD_Time),
problog_var_set(sld_time, SLD_Time), problog_var_set(sld_time, SLD_Time),
nb_getval(problog_completed_proofs, Trie_Completed_Proofs), nb_getval(problog_completed_proofs, Trie_Completed_Proofs),
%print_nested_ptree(Trie_Completed_Proofs),
eval_dnf(Trie_Completed_Proofs, LP, Status), eval_dnf(Trie_Completed_Proofs, LP, Status),
(problog_flag(verbose, true)-> (problog_flag(verbose, true)->
problog_statistics problog_statistics

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-10-13 17:09:47 +0200 (Wed, 13 Oct 2010) $ % $Date: 2010-12-13 18:15:14 +0100 (Mon, 13 Dec 2010) $
% $Revision: 4915 $ % $Revision: 5125 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -214,51 +214,64 @@
]). ]).
% general yap modules % general yap modules
:- use_module(library(lists),[reverse/2]). :- use_module(library(lists),[reverse/2,member/2,append/3]).
:- use_module(flags).
:- style_check(all). :- style_check(all).
:- yap_flag(unknown,error). :- yap_flag(unknown,error).
:- discontiguous user:ad_intern/2.
:- op( 550, yfx, :: ). :- op( 550, yfx, :: ).
% for annotated disjunctions % for annotated disjunctions
:- op(1149, yfx, <-- ). :- op(1149, yfx, <-- ).
:- initialization(problog_define_flag(show_ad_compilation,problog_flag_validate_boolean,'show compiled code for ADs',false,annotated_disjunctions)).
:- initialization(problog_define_flag(ad_cpl_semantics,problog_flag_validate_boolean,'use CP-logics semantics for ADs',true,annotated_disjunctions)).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
term_expansion_intern_ad( (Head<--Body),Module,ad_intern((Head<--Body),ID)) :- term_expansion_intern_ad( (Head<--Body),Module,ad_intern((Head<--Body),ID)) :-
proper_tunable_annotated_disjunction(Head), problog_flag(ad_cpl_semantics,AD_CPL_Semantics),
!, (
compile_tunable_annotated_disjunction(Head,Body,Facts,Bodies,ID), proper_tunable_annotated_disjunction(Head)
assert_all_ad_facts(Facts,Module), ->
assert_all_ad_bodies(Bodies,Module). compile_tunable_annotated_disjunction(Head,Body,Facts,Bodies,ID,AD_CPL_Semantics);
term_expansion_intern_ad( (Head<--Body),Module,ad_intern((Head<--Body),ID)) :- (
proper_annotated_disjunction(Head), proper_annotated_disjunction(Head),
!, compile_annotated_disjunction(Head,Body,Facts,Bodies,ID,AD_CPL_Semantics)
compile_annotated_disjunction(Head,Body,Facts,Bodies,ID), )
assert_all_ad_facts(Facts,Module), ),
assert_all_ad_bodies(Bodies,Module).
forall(member(F,Facts),(once(problog:term_expansion_intern(F,Module,Atom)),
assertz(problog:Atom))),
forall(member(B,Bodies),assertz(Module:B)),
problog_flag(show_ad_compilation,Show_AD_compilation),
(
Show_AD_compilation==true
->
(
format('Compiling the annotated disjunction~n ~q~ninto the following code~n',[(Head<--Body)]),
format('================================================~n',[]),
forall(member(F,Facts),format(' ~q.~n',[F])),
format(' - - - - - - - - - - - - - - - - - - - - - - ~n',[]),
forall(member(B,Bodies),format(' ~q.~n',[B])),
format('================================================~2n',[])
);
true
).
term_expansion_intern_ad( (Head<--Body),_,_) :- term_expansion_intern_ad( (Head<--Body),_,_) :-
format_to_chars('Error at compiling the annotated disjunction ~q<--Body.',[Head,Body],Error), format_to_chars('Error at compiling the annotated disjunction ~q<--Body.',[Head,Body],Error),
print_message(error,Error), print_message(error,Error),
fail. fail.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
assert_all_ad_facts([],_).
assert_all_ad_facts([F|T],Module) :-
once(problog:term_expansion_intern(F,Module,Atom)),
assertz(problog:Atom),
assert_all_ad_facts(T,Module).
assert_all_ad_bodies([],_).
assert_all_ad_bodies([B|T],Module) :-
assertz(Module:B),
assert_all_ad_bodies(T,Module).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
@ -303,22 +316,36 @@ proper_tunable_annotated_disjunction((X;Y)) :-
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
compile_tunable_annotated_disjunction(Head,Body,Facts2,Bodies2,Extra_ID) :- compile_tunable_annotated_disjunction(Head,Body,Facts2,Bodies2,Extra_ID,AD_CPL_Semantics) :-
get_next_unique_id(Extra_ID), get_next_unique_id(Extra_ID),
convert_a_tunable(Head,Extra_ID,[],Facts), (
convert_b(Head,Body,_NewBody,Extra_ID,[],Bodies), AD_CPL_Semantics==true
->
term_variables(Body,Body_Vars);
Body_Vars=[]
),
convert_a_tunable(Head,Extra_ID,[],Facts,Body_Vars),
convert_b(Head,Body,_NewBody,Extra_ID,[],Bodies,Body_Vars),
reverse(Facts,Facts2), reverse(Facts,Facts2),
reverse(Bodies,Bodies2). reverse(Bodies,Bodies2).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
compile_annotated_disjunction(Head,Body,Facts2,Bodies2,Extra_ID) :- compile_annotated_disjunction(Head,Body,Facts2,Bodies2,Extra_ID,AD_CPL_Semantics) :-
get_next_unique_id(Extra_ID), get_next_unique_id(Extra_ID),
convert_a(Head,0.0,_Acc,Extra_ID,[],Facts), (
convert_b(Head,Body,_NewBody,Extra_ID,[],Bodies), AD_CPL_Semantics==true
->
term_variables(Body,Body_Vars);
Body_Vars=[]
),
convert_a(Head,0.0,_Acc,Extra_ID,[],Facts,Body_Vars),
convert_b(Head,Body,_NewBody,Extra_ID,[],Bodies,Body_Vars),
reverse(Facts,Facts2), reverse(Facts,Facts2),
reverse(Bodies,Bodies2). reverse(Bodies,Bodies2).
@ -326,16 +353,17 @@ compile_annotated_disjunction(Head,Body,Facts2,Bodies2,Extra_ID) :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
convert_a((X;Y),OldAcc,Acc,Extra_ID,OldFacts,Facts) :- convert_a((X;Y),OldAcc,Acc,Extra_ID,OldFacts,Facts,Body_Vars) :-
convert_a(X,OldAcc,NewAcc,Extra_ID,OldFacts,NewFacts), convert_a(X,OldAcc,NewAcc,Extra_ID,OldFacts,NewFacts,Body_Vars),
convert_a(Y,NewAcc,Acc,Extra_ID,NewFacts,Facts). convert_a(Y,NewAcc,Acc,Extra_ID,NewFacts,Facts,Body_Vars).
convert_a(P::Atom,OldAcc,NewAcc,Extra_ID,OldFacts,[P1::ProbFact|OldFacts]) :- convert_a(P::Atom,OldAcc,NewAcc,Extra_ID,OldFacts,[P1::ProbFact|OldFacts],Body_Vars) :-
Atom =.. [Functor|AllArguments], Atom =.. [Functor|AllArguments],
append(AllArguments,Body_Vars,NewAllArguments),
length(AllArguments,Arity), length(AllArguments,Arity),
atomic_concat([mvs_fact_,Functor,'_',Arity,'_',Extra_ID],NewAtom), atomic_concat([mvs_fact_,Functor,'_',Arity,'_',Extra_ID],NewAtom),
ProbFact =.. [NewAtom|AllArguments], ProbFact =.. [NewAtom|NewAllArguments],
( (
P=:=0 P=:=0
-> ->
@ -351,29 +379,31 @@ convert_a(P::Atom,OldAcc,NewAcc,Extra_ID,OldFacts,[P1::ProbFact|OldFacts]) :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
convert_a_tunable((X;Y),Extra_ID,OldFacts,Facts) :- convert_a_tunable((X;Y),Extra_ID,OldFacts,Facts,Body_Vars) :-
convert_a_tunable(X,Extra_ID,OldFacts,NewFacts), convert_a_tunable(X,Extra_ID,OldFacts,NewFacts,Body_Vars),
convert_a_tunable(Y,Extra_ID,NewFacts,Facts). convert_a_tunable(Y,Extra_ID,NewFacts,Facts,Body_Vars).
convert_a_tunable(P::Atom,Extra_ID,OldFacts,[P::ProbFact|OldFacts]) :- convert_a_tunable(P::Atom,Extra_ID,OldFacts,[P::ProbFact|OldFacts],Body_Vars) :-
Atom =.. [Functor|AllArguments], Atom =.. [Functor|AllArguments],
append(AllArguments,Body_Vars,NewAllArguments),
length(AllArguments,Arity), length(AllArguments,Arity),
atomic_concat([mvs_fact_,Functor,'_',Arity,'_',Extra_ID],NewAtom), atomic_concat([mvs_fact_,Functor,'_',Arity,'_',Extra_ID],NewAtom),
ProbFact =.. [NewAtom|AllArguments]. ProbFact =.. [NewAtom|NewAllArguments].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
convert_b((X;Y),OldBody,Body,ExtraID,OldBodies,Bodies) :- convert_b((X;Y),OldBody,Body,ExtraID,OldBodies,Bodies,Body_Vars) :-
convert_b(X,OldBody,NewBody,ExtraID,OldBodies,NewBodies), convert_b(X,OldBody,NewBody,ExtraID,OldBodies,NewBodies,Body_Vars),
convert_b(Y,NewBody,Body,ExtraID,NewBodies,Bodies). convert_b(Y,NewBody,Body,ExtraID,NewBodies,Bodies,Body_Vars).
convert_b(_::Atom,OldBody,NewBody,Extra_ID,OldBodies,[(Atom:-ThisBody)|OldBodies]) :- convert_b(_::Atom,OldBody,NewBody,Extra_ID,OldBodies,[(Atom:-ThisBody)|OldBodies],Body_Vars) :-
Atom =.. [Functor|AllArguments], Atom =.. [Functor|AllArguments],
append(AllArguments,Body_Vars,NewAllArguments),
length(AllArguments,Arity), length(AllArguments,Arity),
atomic_concat([mvs_fact_,Functor,'_',Arity,'_',Extra_ID],NewFunctor), atomic_concat([mvs_fact_,Functor,'_',Arity,'_',Extra_ID],NewFunctor),
ProbFact =.. [NewFunctor|AllArguments], ProbFact =.. [NewFunctor|NewAllArguments],
tuple_append(OldBody,ProbFact,ThisBody), tuple_append(OldBody,ProbFact,ThisBody),
tuple_append(OldBody,problog_not(ProbFact),NewBody). tuple_append(OldBody,problog_not(ProbFact),NewBody).

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-10-11 14:14:11 +0200 (Mon, 11 Oct 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4892 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-11-03 19:13:53 +0100 (Wed, 03 Nov 2010) $ % $Date: 2010-12-16 13:33:43 +0100 (Thu, 16 Dec 2010) $
% $Revision: 4986 $ % $Revision: 5156 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -211,9 +211,25 @@
:- module(nestedtries, [nested_trie_to_depth_breadth_trie/4]). :- module(nestedtries, [nested_trie_to_depth_breadth_trie/4]).
:- use_module(library(ordsets), [list_to_ord_set/2, ord_subset/2]). % this two might be better to do a custom fast implementation :- use_module(library(ordsets), [list_to_ord_set/2,
:- use_module(library(lists), [memberchk/2, delete/3]). ord_subset/2,
:- use_module(library(tries), [trie_to_depth_breadth_trie/6, trie_get_depth_breadth_reduction_entry/1, trie_dup/2, trie_close/1, trie_open/1, trie_replace_nested_trie/3, trie_remove_entry/1, trie_get_entry/2, trie_put_entry/3, trie_traverse/2]). ord_union/3,
ord_intersection/3]).
:- use_module(library(lists), [append/3,
memberchk/2,
delete/3]).
:- use_module(library(tries), [trie_to_depth_breadth_trie/6,
trie_get_depth_breadth_reduction_entry/1,
trie_dup/2,
trie_close/1,
trie_open/1,
trie_replace_nested_trie/3,
trie_remove_entry/1,
trie_get_entry/2,
trie_put_entry/3,
trie_traverse/2,
trie_traverse_mode/1,
trie_usage/4]).
:- use_module(flags, [problog_define_flag/5, problog_flag/2]). :- use_module(flags, [problog_define_flag/5, problog_flag/2]).
@ -221,18 +237,21 @@
:- yap_flag(unknown,error). :- yap_flag(unknown,error).
:- initialization(( :- initialization((
% problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true, nested_tries), problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true, nested_tries),
problog_define_flag(loop_refine_ancs, problog_flag_validate_boolean, 'refine ancestors if no loop exists', true, nested_tries) problog_define_flag(loop_refine_ancs, problog_flag_validate_boolean, 'refine ancestors if no loop exists', true, nested_tries),
% problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false, nested_tries), problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false, nested_tries),
% problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false, nested_tries), problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false, nested_tries),
% problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list, nested_tries) problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list, nested_tries)
)). )).
trie_replace_entry(_Trie, Entry, _E, false):- trie_replace_entry(_Trie, Entry, E, false):-
!, trie_remove_entry(Entry). trie_get_entry(Entry, Proof),
memberchk(E, Proof), !,
trie_remove_entry(Entry).
trie_replace_entry(Trie, Entry, E, true):- trie_replace_entry(Trie, Entry, E, true):-
!, trie_get_entry(Entry, Proof), trie_get_entry(Entry, Proof),
memberchk(E, Proof), !,
delete(Proof, E, NewProof), delete(Proof, E, NewProof),
(NewProof == [] -> (NewProof == [] ->
trie_delete(Trie), trie_delete(Trie),
@ -275,6 +294,12 @@ is_label(Label, ID):-
Label = not(NestedLabel), Label = not(NestedLabel),
is_label(NestedLabel, ID). is_label(NestedLabel, ID).
simplify(not(false), true):- !.
simplify(not(true), false):- !.
simplify(not(not(A)), B):-
!, simplify(A, B).
simplify(A, A).
% Ancestor related stuff % Ancestor related stuff
initialise_ancestors(0):- initialise_ancestors(0):-
@ -289,6 +314,13 @@ add_to_ancestors(ID, Ancestors, NewAncestors):-
is_list(Ancestors), is_list(Ancestors),
list_to_ord_set([ID|Ancestors], NewAncestors). list_to_ord_set([ID|Ancestors], NewAncestors).
ancestors_union(Ancestors1, Ancestors2, NewAncestors):-
integer(Ancestors1), !,
NewAncestors is Ancestors1 \/ Ancestors2.
ancestors_union(Ancestors1, Ancestors2, NewAncestors):-
is_list(Ancestors1),
ord_union(Ancestors1, Ancestors2, NewAncestors).
ancestor_subset_check(SubAncestors, Ancestors):- ancestor_subset_check(SubAncestors, Ancestors):-
integer(SubAncestors), !, integer(SubAncestors), !,
SubAncestors is Ancestors /\ SubAncestors. SubAncestors is Ancestors /\ SubAncestors.
@ -302,17 +334,61 @@ ancestor_loop_refine(Loop, Ancestors, []):-
var(Loop), is_list(Ancestors), !. var(Loop), is_list(Ancestors), !.
ancestor_loop_refine(true, Ancestors, Ancestors). ancestor_loop_refine(true, Ancestors, Ancestors).
ancestor_child_refine(true, Ancestors, Childs, NewAncestors):-
integer(Ancestors), !,
NewAncestors is Ancestors /\ Childs.
ancestor_child_refine(true, Ancestors, Childs, NewAncestors):-
is_list(Ancestors), !,
ord_intersection(Ancestors, Childs, NewAncestors).
ancestor_child_refine(false, Ancestors, _, Ancestors).
% Cycle check related stuff % Cycle check related stuff
% missing synonym check % missing synonym check
cycle_check(ID, Ancestors):- cycle_check(ID, Ancestors):-
get_negated_synonym_id(ID, SynID),
cycle_check_intern(SynID, Ancestors).
cycle_check_intern(ID, Ancestors):-
integer(Ancestors), !, integer(Ancestors), !,
Bit is 1 << (ID - 1), Bit is 1 << (ID - 1),
Bit is Bit /\ Ancestors. Bit is Bit /\ Ancestors.
cycle_check(ID, Ancestors):- cycle_check_intern(ID, Ancestors):-
is_list(Ancestors), is_list(Ancestors),
memberchk(ID, Ancestors). memberchk(ID, Ancestors).
get_negated_synonym_id(ID, ID).
get_negated_synonym_id(ID, NegID):-
tabling:has_synonyms,
recorded(problog_table, store(Pred, ID, _, _, _), _),
Pred =.. [Name0|Args],
atomic_concat(problog_, Name1, Name0),
atomic_concat(Name, '_original', Name1),
get_negated_name(Name, NotName1),
atomic_concat([problog_, NotName1, '_original'], NotName),
NegPred =.. [NotName|Args],
recorded(problog_table, store(NegPred, NegID, _, _, _), _).
get_negated_name(Name, NotName1):-
recorded(problog_table_synonyms, negated(Name, NotName1), _), !.
get_negated_name(Name, NotName1):-
recorded(problog_table_synonyms, negated(NotName1, Name), _).
trie_dup_reverse(Trie, DupTrie):-
trie_open(DupTrie),
trie_traverse_mode(backward),
trie_dup_rev(Trie, DupTrie),
trie_traverse_mode(forward).
trie_dup_rev(Trie, DupTrie):-
\+ trie_usage(Trie, 0, 0, 0),
trie_traverse(Trie, Entry),
trie_get_entry(Entry, Term),
trie_put_entry(DupTrie, Term, _),
fail.
trie_dup_rev(_, _).
preprocess(Index, DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount):- preprocess(Index, DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount):-
problog:problog_chktabled(Index, Trie), !, problog:problog_chktabled(Index, Trie), !,
trie_dup(Trie, CopyTrie), trie_dup(Trie, CopyTrie),
@ -332,7 +408,7 @@ make_nested_trie_base_cases(Trie, t(ID), DepthBreadthTrie, OptimizationLevel, St
make_nested_trie_base_cases(Trie, t(ID), DepthBreadthTrie, OptimizationLevel, EndCount, FinalEndCount, NewAncestors) make_nested_trie_base_cases(Trie, t(ID), DepthBreadthTrie, OptimizationLevel, EndCount, FinalEndCount, NewAncestors)
; ;
FinalEndCount = EndCount, FinalEndCount = EndCount,
get_set_trie(ID, Label, Ancestors) set_trie(ID, Label, Ancestors)
). ).
nested_trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel):- nested_trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel):-
@ -344,80 +420,69 @@ nested_trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, FinalLabel, Optimizati
StartCount = 0 StartCount = 0
), ),
initialise_ancestors(Ancestors), initialise_ancestors(Ancestors),
% initialise_ancestors(Childs),
(problog_flag(loop_refine_ancs, true) -> (problog_flag(loop_refine_ancs, true) ->
trie_2_dbtrie_init(Trie, DepthBreadthTrie, OptimizationLevel, StartCount, _, Ancestors, FinalLabel, _) trie_2_dbtrie_init(Trie, DepthBreadthTrie, OptimizationLevel, StartCount, _, Ancestors, FinalLabel, _, _Childs)
; ;
trie_2_dbtrie_init(Trie, DepthBreadthTrie, OptimizationLevel, StartCount, _, Ancestors, FinalLabel, true) trie_2_dbtrie_init(Trie, DepthBreadthTrie, OptimizationLevel, StartCount, _, Ancestors, FinalLabel, true, _Childs)
), ),
eraseall(problog_trie_table). eraseall(problog_trie_table).
trie_2_dbtrie_init(ID, DepthBreadthTrie, OptimizationLevel, StartCount, EndCount, Ancestors, Label, ContainLoop):- trie_2_dbtrie_init(ID, DepthBreadthTrie, OptimizationLevel, StartCount, EndCount, Ancestors, Label, ContainLoop, FinalChilds):-
initialise_ancestors(Childs),
get_trie_pointer(ID, Trie), get_trie_pointer(ID, Trie),
trie_dup(Trie, CopyTrie), trie_dup_reverse(Trie, CopyTrie),
trie_2_dbtrie_intern(CopyTrie, DepthBreadthTrie, OptimizationLevel, StartCount, EndCount, Ancestors, Label, ContainLoop), trie_2_dbtrie_intern(CopyTrie, DepthBreadthTrie, OptimizationLevel, StartCount, EndCount, Ancestors, Label, ContainLoop, Childs, FinalChilds),
trie_close(CopyTrie). trie_close(CopyTrie).
trie_2_dbtrie_intern(Trie, DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount, Ancestors, TrieLabel, ContainLoop):- trie_2_dbtrie_intern(Trie, DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount, Ancestors, TrieLabel, ContainLoop, Childs, FinalChilds):-
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount), trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
(is_trie(Label, ID) -> % Label might have issues with negation (is_trie(Label, ID) ->
problog_flag(refine_anclst, ChildRefineAncestors),
trie_get_depth_breadth_reduction_entry(NestedEntry), trie_get_depth_breadth_reduction_entry(NestedEntry),
% check if Trie introduces a loop
(cycle_check(ID, Ancestors) -> (cycle_check(ID, Ancestors) ->
ContainLoop = true, ContainLoop = true,
NewLabel = false, NewLabel = false,
NewEndCount = EndCount,
initialise_ancestors(GrandChilds)
; get_trie(ID, NewLabel, Ancestors) ->
GrandChilds = Ancestors,
NewEndCount = EndCount NewEndCount = EndCount
; ;
% check if Trie is resolved and extract it add_to_ancestors(ID, Ancestors, NewAncestors),
(get_set_trie(ID, NewLabel, Ancestors) -> trie_2_dbtrie_init(ID, DepthBreadthTrie, OptimizationLevel, EndCount, NewEndCount, NewAncestors, DerefLabel, NewContainLoop, GrandChilds),
NewEndCount = EndCount ancestor_loop_refine(NewContainLoop, Ancestors, RefinedAncestors1),
; ancestor_child_refine(ChildRefineAncestors, RefinedAncestors1, GrandChilds, RefinedAncestors),
% calculate the nested trie simplify(DerefLabel, NewLabel),
add_to_ancestors(ID, Ancestors, NewAncestors), % to be able to support 2 representations set_trie(ID, NewLabel, RefinedAncestors),
trie_2_dbtrie_init(ID, DepthBreadthTrie, OptimizationLevel, EndCount, NewEndCount, NewAncestors, NewLabel, NewContainLoop),
ancestor_loop_refine(NewContainLoop, Ancestors, RefinedAncestors),
get_set_trie(ID, NewLabel, RefinedAncestors),
ContainLoop = NewContainLoop ContainLoop = NewContainLoop
)
), ),
trie_replace_entry(Trie, NestedEntry, t(ID), NewLabel), % should be careful to verify that it works also with not(t(ID)) trie_replace_entry(Trie, NestedEntry, t(ID), NewLabel),
trie_2_dbtrie_intern(Trie, DepthBreadthTrie, OptimizationLevel, NewEndCount, FinalEndCount, Ancestors, TrieLabel, ContainLoop) (ChildRefineAncestors ->
add_to_ancestors(ID, Childs, NewChilds1),
ancestors_union(NewChilds1, GrandChilds, NewChilds)
;
NewChilds = Childs
),
trie_2_dbtrie_intern(Trie, DepthBreadthTrie, OptimizationLevel, NewEndCount, FinalEndCount, Ancestors, TrieLabel, ContainLoop, NewChilds, FinalChilds)
; ;
% else we can terminate and return
FinalEndCount = EndCount, FinalEndCount = EndCount,
TrieLabel = Label TrieLabel = Label,
FinalChilds = Childs
). ).
% predicate to check/remember resolved tries % predicate to check/remember resolved tries
% no refiment of ancestor list included
get_trie_pointer(ID, Trie):- get_trie_pointer(ID, Trie):-
problog:problog_chktabled(ID, Trie), !. problog:problog_chktabled(ID, Trie), !.
get_trie_pointer(Trie, Trie). get_trie_pointer(Trie, Trie).
get_set_trie(Trie, Label, Ancestors):- get_trie(Trie, Label, Ancestors):-
problog_flag(subset_check, true), !,
recorded(problog_trie_table, store(Trie, StoredAncestors, Label), _), recorded(problog_trie_table, store(Trie, StoredAncestors, Label), _),
(problog_flag(subset_check, true) -> ancestor_subset_check(StoredAncestors, Ancestors).
ancestor_subset_check(StoredAncestors, Ancestors) get_trie(Trie, Label, Ancestors):-
; recorded(problog_trie_table, store(Trie, StoredAncestors, Label), _),
StoredAncestors == Ancestors StoredAncestors == Ancestors.
), !.
get_set_trie(Trie, Label, Ancestors):- set_trie(Trie, Label, Ancestors):-
ground(Label),
recordz(problog_trie_table, store(Trie, Ancestors, Label), _). recordz(problog_trie_table, store(Trie, Ancestors, Label), _).
% chk_negated([H|T], ID):-
% simplify(H, not(t(ID))), !.
% chk_negated([_|T], ID):-
% chk_negated(T, ID).
/*
chk_negated([], ID, ID).
chk_negated([H|T], ID, not(ID)):-
simplify(H, not(t(ID))), !.
chk_negated([H|T], ID, ID):-
simplify(H, t(ID)), !.
chk_negated([_|T], ID, FID):-
chk_negated(T, ID, FID).*/

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-11-29 10:58:04 +0100 (Mon, 29 Nov 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 5029 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-10-06 12:56:13 +0200 (Wed, 06 Oct 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4877 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-12-02 14:35:05 +0100 (Thu, 02 Dec 2010) $ % $Date: 2010-12-16 13:33:43 +0100 (Thu, 16 Dec 2010) $
% $Revision: 5041 $ % $Revision: 5156 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -282,12 +282,7 @@
problog_define_flag(use_naive_trie, problog_flag_validate_boolean, 'use the naive algorithm to generate bdd scripts', false), problog_define_flag(use_naive_trie, problog_flag_validate_boolean, 'use the naive algorithm to generate bdd scripts', false),
problog_define_flag(use_old_trie, problog_flag_validate_boolean, 'use the old trie 2 trie transformation no nested', true), problog_define_flag(use_old_trie, problog_flag_validate_boolean, 'use the old trie 2 trie transformation no nested', true),
problog_define_flag(use_dec_trie, problog_flag_validate_boolean, 'use the decomposition method', false), problog_define_flag(use_dec_trie, problog_flag_validate_boolean, 'use the decomposition method', false),
problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true), problog_define_flag(deref_terms, problog_flag_validate_boolean, 'deref BDD terms after last use', false)
problog_define_flag(deref_terms, problog_flag_validate_boolean, 'deref BDD terms after last use', false),
problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false),
problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false),
problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list)
)). )).
@ -1252,6 +1247,8 @@ add_to_vars(V):-
variables_in_dbtrie(Trie, []):- variables_in_dbtrie(Trie, []):-
empty_ptree(Trie), !. empty_ptree(Trie), !.
variables_in_dbtrie(Trie, []):-
trie_check_entry(Trie, [true], _R), !.
variables_in_dbtrie(Trie, L):- variables_in_dbtrie(Trie, L):-
all(V, variable_in_dbtrie(Trie,V), L). all(V, variable_in_dbtrie(Trie,V), L).
@ -1339,10 +1336,11 @@ is_state(false).
nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):- nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
% trie_nested_to_depth_breadth_trie(A, B, LL, OptimizationLevel, problog:problog_chktabled), % trie_nested_to_depth_breadth_trie(A, B, LL, OptimizationLevel, problog:problog_chktabled),
nested_trie_to_depth_breadth_trie(A, B, LL, OptimizationLevel), nested_trie_to_depth_breadth_trie(A, B, LL, OptimizationLevel),
(is_label(LL) -> simplify(LL, FLL),
(is_label(FLL) ->
retractall(deref(_,_)), retractall(deref(_,_)),
(problog_flag(deref_terms, true) -> (problog_flag(deref_terms, true) ->
asserta(deref(LL,no)), asserta(deref(FLL,no)),
mark_for_deref(B), mark_for_deref(B),
V = 3 V = 3
; ;
@ -1358,7 +1356,7 @@ nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
write('@BDD'), write(V), nl, write('@BDD'), write(V), nl,
write(VarCNT), nl, write(VarCNT), nl,
write(0), nl, write(0), nl,
(LL = not(NegL)-> (FLL = not(NegL)->
atomic_concat('L', NegStep, NegL), atomic_concat('L', NegStep, NegL),
number_atom(NegStepN, NegStep), number_atom(NegStepN, NegStep),
InterStep is NegStepN + 1, InterStep is NegStepN + 1,
@ -1368,19 +1366,18 @@ nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
write(FL), write(' = ~'), write(NegL), nl, write(FL), write(' = ~'), write(NegL), nl,
write(FL), nl write(FL), nl
; ;
atomic_concat('L', InterStep, LL), atomic_concat('L', InterStep, FLL),
write(InterStep), nl, write(InterStep), nl,
trie_write(B, LL), trie_write(B, FLL),
write(LL), nl write(FLL), nl
), ),
told told
; ;
(is_state(LL) -> (is_state(FLL) ->
Edges = [] Edges = []
; ;
Edges = [LL] Edges = [FLL]
), ),
writeln(Edges),
tell(FileParam), tell(FileParam),
simplify_list(Edges, SEdges), simplify_list(Edges, SEdges),
bdd_vars_script(SEdges), bdd_vars_script(SEdges),
@ -1390,12 +1387,11 @@ nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
write(1), nl, write(1), nl,
write(0), nl, write(0), nl,
write(1), nl, write(1), nl,
(LL = not(_) -> (FLL = not(_) ->
write('L1 = ~') write('L1 = ~')
; ;
write('L1 = ') write('L1 = ')
), ),
simplify(LL, FLL),
get_var_name(FLL, NLL), get_var_name(FLL, NLL),
write(NLL),nl, write(NLL),nl,
write('L1'), nl, write('L1'), nl,
@ -1830,6 +1826,7 @@ ptree_decomposition(Trie, BDDFileName, VarFileName) :-
tmpnam(TmpFile1), tmpnam(TmpFile1),
nb_setval(next_inter_step, 1), nb_setval(next_inter_step, 1),
variables_in_dbtrie(Trie, T), variables_in_dbtrie(Trie, T),
length(T, VarCnt), length(T, VarCnt),
tell(VarFileName), tell(VarFileName),
bdd_vars_script(T), bdd_vars_script(T),
@ -1860,6 +1857,9 @@ get_next_inter_step(I):-
decompose_trie(Trie, _, false):- decompose_trie(Trie, _, false):-
empty_ptree(Trie), !. empty_ptree(Trie), !.
decompose_trie(Trie, _, 'TRUE'):-
trie_check_entry(Trie, [true], _R),!.
decompose_trie(Trie, [H|[]], Var):- decompose_trie(Trie, [H|[]], Var):-
trie_usage(Trie, 1, _, _), trie_usage(Trie, 1, _, _),
get_var_name(H, VarA), get_var_name(H, VarA),
@ -1871,9 +1871,6 @@ decompose_trie(Trie, [H|[]], Var):-
), ),
!. !.
decompose_trie(Trie, _, 'TRUE'):-
trie_check_entry(Trie, [true], _R),!.
decompose_trie(Trie, [H|_T], L3):- decompose_trie(Trie, [H|_T], L3):-
trie_open(TrieWith), trie_open(TrieWith),
trie_open(TrieWithNeg), trie_open(TrieWithNeg),

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-11-09 15:09:33 +0100 (Tue, 09 Nov 2010) $ % $Date: 2010-12-13 16:29:18 +0100 (Mon, 13 Dec 2010) $
% $Revision: 4992 $ % $Revision: 5122 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -298,12 +298,6 @@ problog_table_next_index(Index):-
NIndex is Index + 1, NIndex is Index + 1,
nb_setval(problog_tabling_next_index, NIndex). nb_setval(problog_tabling_next_index, NIndex).
makeargs(0, []):-!.
makeargs(N, [_Arg|L]):-
N > 0,
NN is N - 1,
makeargs(NN, L).
problog_table(M:P) :- !, problog_table(M:P) :- !,
problog_table(P, M). problog_table(P, M).
problog_table(P) :- problog_table(P) :-
@ -316,12 +310,12 @@ problog_table((P1, P2), M) :-
problog_table(P1, M), problog_table(P1, M),
problog_table(P2, M). problog_table(P2, M).
problog_table(Name/Arity, Module) :- problog_table(Name/Arity, Module) :-
makeargs(Arity, Args), length(Args,Arity),
Head =.. [Name|Args], Head =.. [Name|Args],
\+ predicate_property(Module:Head, dynamic), !, \+ predicate_property(Module:Head, dynamic), !,
throw(error('problog_table: Problog tabling currently requires the predicate to be declared dynamic and compiles it to static.')). throw(error('problog_table: Problog tabling currently requires the predicate to be declared dynamic and compiles it to static.')).
problog_table(Name/Arity, Module) :- problog_table(Name/Arity, Module) :-
makeargs(Arity, Args), length(Args,Arity),
Head =.. [Name|Args], Head =.. [Name|Args],
atom_concat(['problog_', Name, '_original'], OriginalName), atom_concat(['problog_', Name, '_original'], OriginalName),
atom_concat(['problog_', Name, '_mctabled'], MCName), atom_concat(['problog_', Name, '_mctabled'], MCName),
@ -413,8 +407,8 @@ problog_table(Name/Arity, Module) :-
erase(Ref), erase(Ref),
(empty_ptree(HashTrie) -> (empty_ptree(HashTrie) ->
recordz(problog_table, store(OriginalPred, Hash, HashTrie, SuspTrie, fail), _NRef), recordz(problog_table, store(OriginalPred, Hash, HashTrie, SuspTrie, fail), _NRef),
delete_ptree(SuspTrie), delete_ptree(SuspTrie) %,
fail % no justification exists %fail % no justification exists
; ;
recordz(problog_table, store(OriginalPred, Hash, HashTrie, SuspTrie, true), _NRef), recordz(problog_table, store(OriginalPred, Hash, HashTrie, SuspTrie, true), _NRef),
merge_ptree(HashTrie, SuspTrie), merge_ptree(HashTrie, SuspTrie),

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-10-15 17:09:55 +0200 (Fri, 15 Oct 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4939 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-12-02 14:35:05 +0100 (Thu, 02 Dec 2010) $ % $Date: 2010-12-15 15:52:58 +0100 (Wed, 15 Dec 2010) $
% $Revision: -1 $ % $Revision: 5144 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -211,20 +211,21 @@
slice_n/4, slice_n/4,
sorted_overlap_test/2, sorted_overlap_test/2,
prefix_bdd_file_with_header/4, prefix_bdd_file_with_header/4,
split_list/3]). split_list/3,
succeeds_n_times/2,
sum_forall/3]).
:- use_module(library(system), [delete_file/1]). :- use_module(library(system), [delete_file/1]).
% load swi library, at some point vitor will make swi_expand_file_name/2 a built-in
:- load_foreign_files([libplstream], [], initIO).
% load our own modules % load our own modules
:- use_module(os). :- use_module(os).
:- meta_predicate succeeds_n_times(0,?), sum_forall(?,:,?).
%======================================================================== %========================================================================
%= %= deletes File, if it doesn't exists, it will succeed silently
%= %= delete_file_silently(+File)
%======================================================================== %========================================================================
delete_file_silently(File) :- delete_file_silently(File) :-
@ -232,9 +233,10 @@ delete_file_silently(File) :-
!. !.
delete_file_silently(_). delete_file_silently(_).
%======================================================================== %========================================================================
%= %= delete all the files in the list silently
%= %= delete_files_silently(+List)
%======================================================================== %========================================================================
delete_files_silently([]). delete_files_silently([]).
@ -242,23 +244,25 @@ delete_files_silently([H|T]) :-
delete_file_silently(H), delete_file_silently(H),
delete_files_silently(T). delete_files_silently(T).
%======================================================================== %========================================================================
%= delete all the files matching a certain pattern silently
%= i.e. delete_file_pattern_silently('~/a_path/b_path/','*.txt')
%= %=
%= %= delete_file_pattern_silently(+Path,+Pattern)
%======================================================================== %========================================================================
delete_file_pattern_silently(Path,Pattern) :- delete_file_pattern_silently(Path,Pattern) :-
concat_path_with_filename(Path,Pattern,AbsolutePattern), concat_path_with_filename(Path,Pattern,AbsolutePattern),
swi_expand_file_name(AbsolutePattern,Files), expand_file_name(AbsolutePattern,Files),
delete_files_silently(Files). delete_files_silently(Files).
%======================================================================== %========================================================================
%= Split a list into the first n elements and the tail %= Split a list into the first n elements and the tail
%= +List +Integer -Prefix -Residuum %= slice_n(+List, +Integer, -Prefix, -Residuum)
%======================================================================== %========================================================================
slice_n([],_,[],[]) :- slice_n([],_,[],[]) :-
!. !.
slice_n([H|T],N,[H|T2],T3) :- slice_n([H|T],N,[H|T2],T3) :-
@ -268,9 +272,11 @@ slice_n([H|T],N,[H|T2],T3) :-
slice_n(T,N2,T2,T3). slice_n(T,N2,T2,T3).
slice_n(L,0,[],L). slice_n(L,0,[],L).
%======================================================================== %========================================================================
%= succeeds if the variable V appears exactly once in the term T %= succeeds if the variable V appears exactly once in the term T
%======================================================================== %========================================================================
variable_in_term_exactly_once(T,V) :- variable_in_term_exactly_once(T,V) :-
term_variables(T,Vars), term_variables(T,Vars),
var_memberchk_once(Vars,V). var_memberchk_once(Vars,V).
@ -287,6 +293,7 @@ var_memberchk_none([H|T],V) :-
var_memberchk_none(T,V). var_memberchk_none(T,V).
var_memberchk_none([],_). var_memberchk_none([],_).
%======================================================================== %========================================================================
%= sorted_overlap_test(+L1,+L2) %= sorted_overlap_test(+L1,+L2)
%= L1 and L2 are ground sorted lists %= L1 and L2 are ground sorted lists
@ -314,7 +321,7 @@ sorted_overlap_test([_|T1],[H2|T2]) :-
prefix_bdd_file_with_header(BDD_File_Name,VarCount,IntermediateSteps,TmpFile) :- prefix_bdd_file_with_header(BDD_File_Name,VarCount,IntermediateSteps,TmpFile) :-
open(BDD_File_Name,write,H), open(BDD_File_Name,write,H),
% this is the header of the BDD script for problogbdd % this is the header of the BDD script for problogbdd
format(H, '@BDD1~n~q~n0~n~q~n',[VarCount,IntermediateSteps]), format(H, '@BDD1~n~w~n0~n~w~n',[VarCount,IntermediateSteps]),
% append the content of the file TmpFile % append the content of the file TmpFile
open(TmpFile,read,H2), open(TmpFile,read,H2),
@ -333,12 +340,15 @@ prefix_bdd_file_with_header(BDD_File_Name,VarCount,IntermediateSteps,TmpFile) :-
%======================================================================== %========================================================================
%= Split the list L in the two lists L1 and L2 such that
%= append(L1,L2,L) holds.
%= %=
%= if length of L is even, then L1 and L2 will have the same length
%= if length of L is odd, then L1 will be one element longer than L2
%= %=
%= %= split_list(+L,-L1,-L2)
%======================================================================== %========================================================================
split_list([],[],[]). split_list([],[],[]).
split_list([H|T],L1,L2) :- split_list([H|T],L1,L2) :-
length([H|T],Len), length([H|T],Len),
@ -350,3 +360,34 @@ split_list_intern(N,[H|T],[H|T1],L) :-
N>0, N>0,
N2 is N-1, N2 is N-1,
split_list_intern(N2,T,T1,L). split_list_intern(N2,T,T1,L).
%========================================================================
%= Counts how often Goal succeeds
%= (taken from the YAP manual)
%========================================================================
succeeds_n_times(Goal, Times) :-
Counter = counter(0),
( Goal,
arg(1, Counter, N0),
N is N0 + 1,
nb_setarg(1, Counter, N),
fail
; arg(1, Counter, Times)
).
%========================================================================
%=
%=
%========================================================================
sum_forall(X,Goal, Sum) :-
Temp = sum(0),
( Goal,
arg(1, Temp, Sum0),
Sum is Sum0+X,
nb_setarg(1, Temp, Sum),
fail
; arg(1, Temp, Sum)
).

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-12-02 14:35:05 +0100 (Thu, 02 Dec 2010) $ % $Date: 2010-12-14 20:30:07 +0100 (Tue, 14 Dec 2010) $
% $Revision: 5041 $ % $Revision: 5134 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -209,7 +209,10 @@
create_known_values_file_name/2, create_known_values_file_name/2,
create_bdd_file_name/3, create_bdd_file_name/3,
create_bdd_input_file_name/2, create_bdd_input_file_name/2,
create_bdd_output_file_name/4]). create_bdd_output_file_name/4,
create_factprobs_file_name/2,
create_test_predictions_file_name/2,
create_training_predictions_file_name/2]).
@ -249,10 +252,7 @@ empty_output_directory :-
delete_file_pattern_silently(Path,'values_*'), delete_file_pattern_silently(Path,'values_*'),
delete_file_pattern_silently(Path,'factprobs_*'), delete_file_pattern_silently(Path,'factprobs_*'),
delete_file_pattern_silently(Path,'input_*'), delete_file_pattern_silently(Path,'input_*'),
delete_file_pattern_silently(Path,'trainpredictions_*'),
delete_file_pattern_silently(Path,'testpredictions_*'),
delete_file_pattern_silently(Path,'predictions_*'). delete_file_pattern_silently(Path,'predictions_*').
empty_output_directory :- empty_output_directory :-
throw(error(problog_flag_does_not_exist(output_directory))). throw(error(problog_flag_does_not_exist(output_directory))).
@ -312,3 +312,46 @@ create_bdd_input_file_name(Iteration,Absolute_File_Name) :-
concat_path_with_filename(Path,File_Name,Absolute_File_Name). concat_path_with_filename(Path,File_Name,Absolute_File_Name).
create_bdd_input_file_name(_,_) :- create_bdd_input_file_name(_,_) :-
throw(error(problog_flag_does_not_exist(output_directory))). throw(error(problog_flag_does_not_exist(output_directory))).
%========================================================================
%=
%=
%========================================================================
create_factprobs_file_name(Iteration,Absolute_File_Name) :-
problog_flag(output_directory,Path),
!,
atomic_concat(['factprobs_',Iteration,'.pl'],File_Name),
concat_path_with_filename(Path,File_Name,Absolute_File_Name).
create_factprobs_file_name(_,_) :-
throw(error(problog_flag_does_not_exist(output_directory))).
%========================================================================
%=
%=
%========================================================================
create_test_predictions_file_name(Iteration,Absolute_File_Name) :-
problog_flag(output_directory,Path),
!,
atomic_concat(['predictions_test_',Iteration,'.pl'],File_Name),
concat_path_with_filename(Path,File_Name,Absolute_File_Name).
create_test_predictions_file_name(_,_) :-
throw(error(problog_flag_does_not_exist(output_directory))).
%========================================================================
%=
%=
%========================================================================
create_training_predictions_file_name(Iteration,Absolute_File_Name) :-
problog_flag(output_directory,Path),
!,
atomic_concat(['predictions_training_',Iteration,'.pl'],File_Name),
concat_path_with_filename(Path,File_Name,Absolute_File_Name).
create_training_predictions_file_name(_,_) :-
throw(error(problog_flag_does_not_exist(output_directory))).

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-09-28 21:04:43 +0200 (Tue, 28 Sep 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4838 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-10-05 18:15:57 +0200 (Tue, 05 Oct 2010) $ % $Date: 2010-12-02 15:20:15 +0100 (Thu, 02 Dec 2010) $
% $Revision: 4876 $ % $Revision: 5043 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog

View File

@ -2,8 +2,8 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% $Date: 2010-12-02 14:35:05 +0100 (Thu, 02 Dec 2010) $ % $Date: 2010-12-15 15:05:44 +0100 (Wed, 15 Dec 2010) $
% $Revision: 5041 $ % $Revision: 5142 $
% %
% This file is part of ProbLog % This file is part of ProbLog
% http://dtai.cs.kuleuven.be/problog % http://dtai.cs.kuleuven.be/problog
@ -216,7 +216,7 @@
% load modules from the YAP library % load modules from the YAP library
:- use_module(library(lists), [max_list/2, min_list/2, sum_list/2]). :- use_module(library(lists), [max_list/2, min_list/2, sum_list/2]).
:- use_module(library(system), [delete_file/1, file_exists/1, shell/2]). :- use_module(library(system), [file_exists/1, shell/2]).
% load our own modules % load our own modules
:- use_module(problog). :- use_module(problog).
@ -226,6 +226,7 @@
:- use_module('problog/print_learning'). :- use_module('problog/print_learning').
:- use_module('problog/utils_learning'). :- use_module('problog/utils_learning').
:- use_module('problog/utils'). :- use_module('problog/utils').
:- use_module('problog/tabling').
% used to indicate the state of the system % used to indicate the state of the system
:- dynamic(values_correct/0). :- dynamic(values_correct/0).
@ -256,76 +257,14 @@ user:test_example(A,B,C,=) :-
%======================================================================== %========================================================================
%= store the facts with the learned probabilities to a file %= store the facts with the learned probabilities to a file
%= if F is a variable, a filename based on the current iteration is used
%=
%======================================================================== %========================================================================
save_model:- save_model:-
current_iteration(Iteration), current_iteration(Iteration),
atomic_concat(['factprobs_',Iteration,'.pl'],Filename), create_factprobs_file_name(Iteration,Filename),
problog_flag(output_directory,Dir), export_facts(Filename).
concat_path_with_filename(Dir,Filename,Filename2),
export_facts(Filename2).
%========================================================================
%= store the current succes probabilities for training and test examples
%=
%========================================================================
save_predictions:-
current_iteration(Iteration),
atomic_concat(['predictions_',Iteration,'.pl'],Filename),
problog_flag(output_directory,Dir),
concat_path_with_filename(Dir,Filename,Filename2),
open(Filename2,'append',Handle),
format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n",[]),
format(Handle,"% Iteration, train/test, QueryID, Query, GroundTruth, Prediction %\n",[]),
format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n",[]),
!,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start save prediction test examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
( % go over all test examples
current_predicate(user:test_example/4),
user:test_example(Query_ID,Query,TrueQueryProb,_),
query_probability(Query_ID,LearnedQueryProb),
format(Handle,'ex(~q,test,~q,~q,~10f,~10f).\n',
[Iteration,Query_ID,Query,TrueQueryProb,LearnedQueryProb]),
fail; % go to next test example
true
),
!,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop save prediction test examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start save prediction training examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
( % go over all training examples
current_predicate(user:example/4),
user:example(Query_ID,Query,TrueQueryProb,_),
query_probability(Query_ID,LearnedQueryProb),
format(Handle,'ex(~q,train,~q,~q,~10f,~10f).\n',
[Iteration,Query_ID,Query,TrueQueryProb,LearnedQueryProb]),
fail; % go to next training example
true
),
!,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop save prediction training examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
format(Handle,'~3n',[]),
close(Handle).
%======================================================================== %========================================================================
@ -339,7 +278,7 @@ check_examples :-
% Check example IDs % Check example IDs
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
( (
(current_predicate(user:example/4),user:example(ID,_,_,_), \+ atomic(ID)) (user:example(ID,_,_,_), \+ atomic(ID))
-> ->
( (
format(user_error,'The example id of training example ~q ',[ID]), format(user_error,'The example id of training example ~q ',[ID]),
@ -349,7 +288,7 @@ check_examples :-
), ),
( (
(current_predicate(user:test_example/4),user:test_example(ID,_,_,_), \+ atomic(ID)) (user:test_example(ID,_,_,_), \+ atomic(ID))
-> ->
( (
format(user_error,'The example id of test example ~q ',[ID]), format(user_error,'The example id of test example ~q ',[ID]),
@ -362,7 +301,7 @@ check_examples :-
% Check example probabilities % Check example probabilities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
( (
(current_predicate(user:example/4),user:example(ID,_,P,_), (\+ number(P); P>1 ; P<0)) (user:example(ID,_,P,_), (\+ number(P); P>1 ; P<0))
-> ->
( (
format(user_error,'The training example ~q does not have a valid probability value (~q).~n',[ID,P]), format(user_error,'The training example ~q does not have a valid probability value (~q).~n',[ID,P]),
@ -371,7 +310,7 @@ check_examples :-
), ),
( (
(current_predicate(user:test_example/4),user:test_example(ID,_,P,_), (\+ number(P); P>1 ; P<0)) (user:test_example(ID,_,P,_), (\+ number(P); P>1 ; P<0))
-> ->
( (
format(user_error,'The test example ~q does not have a valid probability value (~q).~n',[ID,P]), format(user_error,'The test example ~q does not have a valid probability value (~q).~n',[ID,P]),
@ -387,22 +326,18 @@ check_examples :-
( (
( (
( (
current_predicate(user:example/4),
user:example(ID,QueryA,_,_), user:example(ID,QueryA,_,_),
user:example(ID,QueryB,_,_), user:example(ID,QueryB,_,_),
QueryA \= QueryB QueryA \= QueryB
) ; ) ;
( (
current_predicate(user:test_example/4),
user:test_example(ID,QueryA,_,_), user:test_example(ID,QueryA,_,_),
user:test_example(ID,QueryB,_,_), user:test_example(ID,QueryB,_,_),
QueryA \= QueryB QueryA \= QueryB
); );
( (
current_predicate(user:example/4),
current_predicate(user:test_example/4),
user:example(ID,QueryA,_,_), user:example(ID,QueryA,_,_),
user:test_example(ID,QueryB,_,_), user:test_example(ID,QueryB,_,_),
QueryA \= QueryB QueryA \= QueryB
@ -419,9 +354,7 @@ check_examples :-
%======================================================================== %========================================================================
reset_learning :- reset_learning :-
retractall(current_iteration(_)),
retractall(learning_initialized), retractall(learning_initialized),
retractall(values_correct), retractall(values_correct),
retractall(current_iteration(_)), retractall(current_iteration(_)),
retractall(example_count(_)), retractall(example_count(_)),
@ -474,7 +407,6 @@ do_learning_intern(Iterations,Epsilon) :-
logger_set_variable(iteration,CurrentIteration), logger_set_variable(iteration,CurrentIteration),
logger_start_timer(duration), logger_start_timer(duration),
mse_testset, mse_testset,
ground_truth_difference, ground_truth_difference,
gradient_descent, gradient_descent,
@ -484,10 +416,7 @@ do_learning_intern(Iterations,Epsilon) :-
( (
( Log_Frequency>0, 0 =:= CurrentIteration mod Log_Frequency) ( Log_Frequency>0, 0 =:= CurrentIteration mod Log_Frequency)
-> ->
( once(save_model);
once(save_predictions),
once(save_model)
);
true true
), ),
@ -583,55 +512,29 @@ init_learning :-
) )
-> ->
( (
format('Theory uses continuous facts.~nWill use problog_exact/3 as initalization method.~2n',[]), format_learning(2,'Theory uses continuous facts.~nWill use problog_exact/3 as initalization method.~2n',[]),
set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile)))
);
true
),
(
problog_tabled(_)
->
(
format_learning(2,'Theory uses tabling.~nWill use problog_exact/3 as initalization method.~2n',[]),
set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile))) set_problog_flag(init_method,(Query,Probability,BDDFile,ProbFile,problog_exact_save(Query,Probability,_Status,BDDFile,ProbFile)))
); );
true true
), ),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% succeeds_n_times(user:example(_,_,_,_),TestExampleCount),
% start count test examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
bb_put(test_examples,0),
( % go over all test examples
current_predicate(user:test_example/4),
user:test_example(_,_,_,_),
bb_get(test_examples, OldCounter),
NewCounter is OldCounter+1,
bb_put(test_examples,NewCounter),
fail; % go to next text example
true
),
bb_delete(test_examples,TestExampleCount),
format_learning(3,'~q test examples~n',[TestExampleCount]), format_learning(3,'~q test examples~n',[TestExampleCount]),
!,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop count test examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% succeeds_n_times(user:example(_,_,_,_),TrainingExampleCount),
% start count training examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
bb_put(training_examples,0),
( % go over all training examples
current_predicate(user:example/4),
user:example(_,_,_,_),
bb_get(training_examples, OldCounter),
NewCounter is OldCounter+1,
bb_put(training_examples,NewCounter),
fail; %go to next training example
true
),
bb_delete(training_examples,TrainingExampleCount),
assertz(example_count(TrainingExampleCount)), assertz(example_count(TrainingExampleCount)),
format_learning(3,'~q training examples~n',[TrainingExampleCount]), format_learning(3,'~q training examples~n',[TrainingExampleCount]),
!,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop count training examples
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -647,8 +550,17 @@ init_learning :-
( (
problog_flag(alpha,auto) problog_flag(alpha,auto)
-> ->
auto_alpha; (
true (user:example(_,_,P,_),P<1,P>0)
->
set_problog_flag(alpha,1.0);
(
succeed_n_times((user:example(_,_,P,=),P=:=1.0),Pos_Count),
succeed_n_times((user:example(_,_,P,=),P=:=0.0),Neg_Count),
Alpha is Pos_Count/Neg_Count,
set_problog_flag(alpha,Alpha)
)
)
), ),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -675,33 +587,16 @@ init_learning :-
init_queries :- init_queries :-
format_learning(2,'Build BDDs for examples~n',[]), format_learning(2,'Build BDDs for examples~n',[]),
( % go over all test examples forall(user:test_example(ID,Query,_Prob,_),init_one_query(ID,Query,test)),
current_predicate(user:test_example/4), forall(user:example(ID,Query,_Prob,_),init_one_query(ID,Query,training)).
user:test_example(ID,Query,Prob,_),
format_learning(3,' test example ~q: ~q~n',[ID,Query]),
flush_output(user),
init_one_query(ID,Query,test),
fail; % go to next test example
true
),
( % go over all training examples
current_predicate(user:example/4),
user:example(ID,Query,Prob,_),
format_learning(3,' training example ~q: ~q~n',[ID,Query]),
flush_output(user),
init_one_query(ID,Query,training),
fail; %go to next training example
true
).
bdd_input_file(Filename) :- bdd_input_file(Filename) :-
problog_flag(output_directory,Dir), problog_flag(output_directory,Dir),
concat_path_with_filename(Dir,'input.txt',Filename). concat_path_with_filename(Dir,'input.txt',Filename).
init_one_query(QueryID,Query,Type) :- init_one_query(QueryID,Query,Type) :-
format_learning(3,' ~q example ~q: ~q~n',[Type,QueryID,Query]),
bdd_input_file(Probabilities_File), bdd_input_file(Probabilities_File),
problog_flag(bdd_directory,Query_Directory), problog_flag(bdd_directory,Query_Directory),
@ -718,7 +613,7 @@ init_one_query(QueryID,Query,Type) :-
( (
problog_flag(init_method,(Query,_Prob,Filename,Probabilities_File,Call)), problog_flag(init_method,(Query,_Prob,Filename,Probabilities_File,Call)),
once(Call), once(Call),
delete_file(Probabilities_File) delete_file_silently(Probabilities_File)
) )
), ),
@ -774,27 +669,19 @@ update_values :-
open(Probabilities_File,'write',Handle), open(Probabilities_File,'write',Handle),
( % go over all probabilistic facts forall(get_fact_probability(ID,Prob),
get_fact_probability(ID,Prob), (
inv_sigmoid(Prob,Value), inv_sigmoid(Prob,Value),
( (
non_ground_fact(ID) non_ground_fact(ID)
-> ->
format(Handle,'@x~q_*~n~10f~n',[ID,Value]); format(Handle,'@x~q_*~n~10f~n',[ID,Value]);
format(Handle,'@x~q~n~10f~n',[ID,Value]) format(Handle,'@x~q~n~10f~n',[ID,Value])
), )
)),
fail; % go to next probabilistic fact forall(get_continuous_fact_parameters(ID,gaussian(Mu,Sigma)),
true format(Handle,'@x~q_*~n0~n0~n~10f;~10f~n',[ID,Mu,Sigma])),
),
( % go over all continuous facts
get_continuous_fact_parameters(ID,gaussian(Mu,Sigma)),
format(Handle,'@x~q_*~n0~n0~n~10f;~10f~n',[ID,Mu,Sigma]),
fail; % go to next continuous fact
true
),
close(Handle), close(Handle),
!, !,
@ -885,11 +772,10 @@ update_query(QueryID,Symbol,What_To_Update) :-
) )
), ),
delete_file(Values_Filename), delete_file_silently(Values_Filename),
format_learning(4,'~w',[Symbol]) format_learning(4,'~w',[Symbol])
) )
), ).
flush_output(user).
%======================================================================== %========================================================================
@ -998,51 +884,53 @@ ground_truth_difference :-
%======================================================================== %========================================================================
mse_trainingset_only_for_linesearch(MSE) :- mse_trainingset_only_for_linesearch(MSE) :-
(
current_predicate(user:example/4)
->
(
update_values, update_values,
findall(SquaredError,
(user:example(QueryID,_Query,QueryProb,Type), example_count(Example_Count),
bb_put(error_train_line_search,0.0),
forall(user:example(QueryID,_Query,QueryProb,Type),
(
once(update_query(QueryID,'.',probability)), once(update_query(QueryID,'.',probability)),
query_probability(QueryID,CurrentProb), query_probability(QueryID,CurrentProb),
once(update_query_cleanup(QueryID)), once(update_query_cleanup(QueryID)),
( (
(Type == '='; (Type == '<', CurrentProb>QueryProb); (Type=='>',CurrentProb<QueryProb)) (Type == '='; (Type == '<', CurrentProb>QueryProb); (Type=='>',CurrentProb<QueryProb))
-> ->
SquaredError is (CurrentProb-QueryProb)**2; (
SquaredError = 0.0 bb_get(error_train_line_search,Old_Error),
New_Error is Old_Error + (CurrentProb-QueryProb)**2,
bb_put(error_train_line_search,New_Error)
);true
)
) )
), ),
bb_delete(error_train_line_search,Error),
AllSquaredErrors), MSE is Error/Example_Count,
format_learning(3,' (~8f)~n',[MSE]),
length(AllSquaredErrors,Length),
sum_list(AllSquaredErrors,SumAllSquaredErrors),
MSE is SumAllSquaredErrors/Length,
format_learning(3,' (~8f)~n',[MSE])
); true
),
retractall(values_correct). retractall(values_correct).
mse_testset :- mse_testset :-
( current_iteration(Iteration),
(current_predicate(user:test_example/4),user:test_example(_,_,_,_)) create_test_predictions_file_name(Iteration,File_Name),
-> open(File_Name,'write',Handle),
( format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%~n",[]),
format(Handle,"% Iteration, train/test, QueryID, Query, GroundTruth, Prediction %~n",[]),
format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%~n",[]),
format_learning(2,'MSE_Test ',[]), format_learning(2,'MSE_Test ',[]),
update_values, update_values,
bb_put(llh_test_queries,0.0), bb_put(llh_test_queries,0.0),
findall(SquaredError, findall(SquaredError,
(user:test_example(QueryID,_Query,QueryProb,Type), (user:test_example(QueryID,Query,TrueQueryProb,Type),
once(update_query(QueryID,'+',probability)), once(update_query(QueryID,'+',probability)),
query_probability(QueryID,CurrentProb), query_probability(QueryID,CurrentProb),
format(Handle,'ex(~q,test,~q,~q,~10f,~10f).~n',[Iteration,QueryID,Query,TrueQueryProb,CurrentProb]),
once(update_query_cleanup(QueryID)), once(update_query_cleanup(QueryID)),
( (
(Type == '='; (Type == '<', CurrentProb>QueryProb); (Type=='>',CurrentProb<QueryProb)) (Type == '='; (Type == '<', CurrentProb>QueryProb); (Type=='>',CurrentProb<QueryProb))
-> ->
SquaredError is (CurrentProb-QueryProb)**2; SquaredError is (CurrentProb-TrueQueryProb)**2;
SquaredError = 0.0 SquaredError = 0.0
), ),
bb_get(llh_test_queries,Old_LLH_Test_Queries), bb_get(llh_test_queries,Old_LLH_Test_Queries),
@ -1051,20 +939,31 @@ mse_testset :-
), ),
AllSquaredErrors), AllSquaredErrors),
close(Handle),
bb_delete(llh_test_queries,LLH_Test_Queries),
length(AllSquaredErrors,Length), length(AllSquaredErrors,Length),
(
Length>0
->
(
sum_list(AllSquaredErrors,SumAllSquaredErrors), sum_list(AllSquaredErrors,SumAllSquaredErrors),
min_list(AllSquaredErrors,MinError), min_list(AllSquaredErrors,MinError),
max_list(AllSquaredErrors,MaxError), max_list(AllSquaredErrors,MaxError),
MSE is SumAllSquaredErrors/Length, MSE is SumAllSquaredErrors/Length
bb_delete(llh_test_queries,LLH_Test_Queries), );(
MSE=0.0,
MinError=0.0,
MaxError=0.0
)
),
logger_set_variable(mse_testset,MSE), logger_set_variable(mse_testset,MSE),
logger_set_variable(mse_min_testset,MinError), logger_set_variable(mse_min_testset,MinError),
logger_set_variable(mse_max_testset,MaxError), logger_set_variable(mse_max_testset,MaxError),
logger_set_variable(llh_test_queries,LLH_Test_Queries), logger_set_variable(llh_test_queries,LLH_Test_Queries),
format_learning(2,' (~8f)~n',[MSE]) format_learning(2,' (~8f)~n',[MSE]).
); true
).
%======================================================================== %========================================================================
%= Calculates the sigmoid function respectivly the inverse of it %= Calculates the sigmoid function respectivly the inverse of it
@ -1097,13 +996,10 @@ inv_sigmoid(T,InvSig) :-
%======================================================================== %========================================================================
save_old_probabilities :- save_old_probabilities :-
( % go over all tunable facts forall(tunable_fact(FactID,_),
tunable_fact(FactID,_),
( (
continuous_fact(FactID) continuous_fact(FactID)
-> ->
( (
get_continuous_fact_parameters(FactID,gaussian(OldMu,OldSigma)), get_continuous_fact_parameters(FactID,gaussian(OldMu,OldSigma)),
atomic_concat(['old_mu_',FactID],Key), atomic_concat(['old_mu_',FactID],Key),
@ -1116,17 +1012,13 @@ save_old_probabilities :-
atomic_concat(['old_prob_',FactID],Key), atomic_concat(['old_prob_',FactID],Key),
bb_put(Key,OldProbability) bb_put(Key,OldProbability)
) )
), )
fail; % go to next tunable fact
true
). ).
forget_old_probabilities :- forget_old_probabilities :-
( % go over all tunable facts forall(tunable_fact(FactID,_),
tunable_fact(FactID,_),
( (
continuous_fact(FactID) continuous_fact(FactID)
-> ->
@ -1146,15 +1038,11 @@ forget_old_probabilities :-
bb_delete(Key,_), bb_delete(Key,_),
bb_delete(Key2,_) bb_delete(Key2,_)
) )
), )
fail; % go to next tunable fact
true
). ).
add_gradient(Learning_Rate) :- add_gradient(Learning_Rate) :-
( % go over all tunable facts forall(tunable_fact(FactID,_),
tunable_fact(FactID,_),
( (
continuous_fact(FactID) continuous_fact(FactID)
-> ->
@ -1189,15 +1077,19 @@ add_gradient(Learning_Rate) :-
Prob_Secure is min(0.999999999,max(0.000000001,NewProbability)), Prob_Secure is min(0.999999999,max(0.000000001,NewProbability)),
set_fact_probability(FactID,Prob_Secure) set_fact_probability(FactID,Prob_Secure)
) )
), )
fail; % go to next tunable fact
true
), ),
retractall(values_correct). retractall(values_correct).
gradient_descent :- gradient_descent :-
current_iteration(Iteration),
create_training_predictions_file_name(Iteration,File_Name),
open(File_Name,'write',Handle),
format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%~n",[]),
format(Handle,"% Iteration, train/test, QueryID, Query, GroundTruth, Prediction %~n",[]),
format(Handle,"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%~n",[]),
format_learning(2,'Gradient ',[]), format_learning(2,'Gradient ',[]),
save_old_probabilities, save_old_probabilities,
@ -1206,9 +1098,7 @@ gradient_descent :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start set gradient to zero % start set gradient to zero
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
( % go over all tunable facts forall(tunable_fact(FactID,_),
tunable_fact(FactID,_),
( (
continuous_fact(FactID) continuous_fact(FactID)
-> ->
@ -1223,16 +1113,11 @@ gradient_descent :-
atomic_concat(['grad_',FactID],Key), atomic_concat(['grad_',FactID],Key),
bb_put(Key,0.0) bb_put(Key,0.0)
) )
), )
fail; % go to next tunable fact
true
), ),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop gradient to zero % stop gradient to zero
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
!,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start calculate gradient % start calculate gradient
@ -1246,11 +1131,11 @@ gradient_descent :-
logger_set_variable(alpha,Alpha), logger_set_variable(alpha,Alpha),
example_count(Example_Count), example_count(Example_Count),
( % go over all training examples forall(user:example(QueryID,Query,QueryProb,Type),
current_predicate(user:example/4), (
user:example(QueryID,_Query,QueryProb,Type),
once(update_query(QueryID,'.',all)), once(update_query(QueryID,'.',all)),
query_probability(QueryID,BDDProb), query_probability(QueryID,BDDProb),
format(Handle,'ex(~q,train,~q,~q,~10f,~10f).~n',[Iteration,QueryID,Query,QueryProb,BDDProb]),
( (
QueryProb=:=0.0 QueryProb=:=0.0
-> ->
@ -1293,7 +1178,6 @@ gradient_descent :-
( (
continuous_fact(FactID) continuous_fact(FactID)
-> ->
( (
atomic_concat(['grad_mu_',FactID],Key), atomic_concat(['grad_mu_',FactID],Key),
atomic_concat(['grad_sigma_',FactID],Key2), atomic_concat(['grad_sigma_',FactID],Key2),
@ -1335,15 +1219,15 @@ gradient_descent :-
true true
), ),
once(update_query_cleanup(QueryID)), once(update_query_cleanup(QueryID))
fail; % go to next training example )),
true
),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% stop calculate gradient % stop calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
!, !,
close(Handle),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start statistics on gradient % start statistics on gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -1431,24 +1315,15 @@ lineSearch(Final_X,Final_Value) :-
% init values % init values
Acc is Tol * (B-A), Acc is Tol * (B-A),
InitRight is A + Tau*(B-A), InitRight is A + Tau*(B-A),
InitLeft is A + B - InitRight, InitLeft is B - Tau*(B-A),
line_search_evaluate_point(A,Value_A), line_search_evaluate_point(A,Value_A),
line_search_evaluate_point(B,Value_B), line_search_evaluate_point(B,Value_B),
line_search_evaluate_point(InitRight,Value_InitRight), line_search_evaluate_point(InitRight,Value_InitRight),
line_search_evaluate_point(InitLeft,Value_InitLeft), line_search_evaluate_point(InitLeft,Value_InitLeft),
bb_put(line_search_a,A),
bb_put(line_search_b,B),
bb_put(line_search_left,InitLeft),
bb_put(line_search_right,InitRight),
bb_put(line_search_value_a,Value_A), Parameters=ls(A,B,InitLeft,InitRight,Value_A,Value_B,Value_InitLeft,Value_InitRight,1),
bb_put(line_search_value_b,Value_B),
bb_put(line_search_value_left,Value_InitLeft),
bb_put(line_search_value_right,Value_InitRight),
bb_put(line_search_iteration,1),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%% BEGIN BACK TRACKING %%%% BEGIN BACK TRACKING
@ -1456,16 +1331,7 @@ lineSearch(Final_X,Final_Value) :-
( (
repeat, repeat,
bb_get(line_search_iteration,Iteration), Parameters=ls(Ak,Bk,Left,Right,Fl,Fr,FLeft,FRight,Iteration),
bb_get(line_search_a,Ak),
bb_get(line_search_b,Bk),
bb_get(line_search_left,Left),
bb_get(line_search_right,Right),
bb_get(line_search_value_a,Fl),
bb_get(line_search_value_b,Fr),
bb_get(line_search_value_left,FLeft),
bb_get(line_search_value_right,FRight),
( (
% check for infinity, if there is, go to the left % check for infinity, if there is, go to the left
@ -1476,40 +1342,40 @@ lineSearch(Final_X,Final_Value) :-
FlNew=FLeft, FlNew=FLeft,
LeftNew=Right, LeftNew=Right,
FLeftNew=FRight, FLeftNew=FRight,
RightNew is AkNew + Bk - LeftNew, RightNew is Left + Bk - Right,
line_search_evaluate_point(RightNew,FRightNew), line_search_evaluate_point(RightNew,FRightNew),
BkNew=Bk, BkNew=Bk,
FrNew=Fr FrNew=Fr,
Interval_Size is Bk-Left
); );
( (
BkNew=Right, BkNew=Right,
FrNew=FRight, FrNew=FRight,
RightNew=Left, RightNew=Left,
FRightNew=FLeft, FRightNew=FLeft,
LeftNew is Ak + BkNew - RightNew, LeftNew is Ak + Right - Left,
line_search_evaluate_point(LeftNew,FLeftNew), line_search_evaluate_point(LeftNew,FLeftNew),
AkNew=Ak, AkNew=Ak,
FlNew=Fl FlNew=Fl,
Interval_Size is Right-Ak
) )
), ),
Next_Iteration is Iteration + 1, Next_Iteration is Iteration + 1,
bb_put(line_search_iteration,Next_Iteration), nb_setarg(9,Parameters,Next_Iteration),
nb_setarg(1,Parameters,AkNew),
bb_put(line_search_a,AkNew), nb_setarg(2,Parameters,BkNew),
bb_put(line_search_b,BkNew), nb_setarg(3,Parameters,LeftNew),
bb_put(line_search_left,LeftNew), nb_setarg(4,Parameters,RightNew),
bb_put(line_search_right,RightNew), nb_setarg(5,Parameters,FlNew),
nb_setarg(6,Parameters,FrNew),
bb_put(line_search_value_a,FlNew), nb_setarg(7,Parameters,FLeftNew),
bb_put(line_search_value_b,FrNew), nb_setarg(8,Parameters,FRightNew),
bb_put(line_search_value_left,FLeftNew),
bb_put(line_search_value_right,FRightNew),
% is the search interval smaller than the tolerance level? % is the search interval smaller than the tolerance level?
BkNew-AkNew<Acc, Interval_Size<Acc,
% apperantly it is, so get me out of here and % apperantly it is, so get me out of here and
% cut away the choice point from repeat % cut away the choice point from repeat
@ -1519,16 +1385,7 @@ lineSearch(Final_X,Final_Value) :-
%%%% END BACK TRACKING %%%% END BACK TRACKING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% clean up the blackboard mess
bb_delete(line_search_iteration,_),
bb_delete(line_search_a,_),
bb_delete(line_search_b,_),
bb_delete(line_search_left,_),
bb_delete(line_search_right,_),
bb_delete(line_search_value_a,_),
bb_delete(line_search_value_b,_),
bb_delete(line_search_value_left,_),
bb_delete(line_search_value_right,_),
% it doesn't harm to check also the value in the middle % it doesn't harm to check also the value in the middle
% of the current search interval % of the current search interval
@ -1611,36 +1468,6 @@ my_5_min(V1,V2,V3,V4,V5,F1,F2,F3,F4,F5,VMin,FMin) :-
). ).
%========================================================================
%= set the alpha parameter to the value
%= # positive training examples / # negative training examples
%=
%= training example is positive if P(e)=1
%= training example is negative if P(e)=0
%=
%= if there are training example with 0<P<1, set alpha=1.0
%========================================================================
auto_alpha :-
\+ current_predicate(user:example/4),
!,
set_problog_flag(alpha,1.0).
auto_alpha :-
user:example(_,_,P,_),
P<1,
P>0,
!,
set_problog_flag(alpha,1.0).
auto_alpha :-
findall(1,(user:example(_,_,P,=),P=:=1.0),Pos),
findall(0,(user:example(_,_,P,=),P=:=0.0),Neg),
length(Pos,NP),
length(Neg,NN),
Alpha is NP/NN,
set_problog_flag(alpha,Alpha).
%======================================================================== %========================================================================
%= initialize the logger module and set the flags for learning %= initialize the logger module and set the flags for learning