Fixing minor portability issues
This commit is contained in:
parent
8ceca88564
commit
5d1aa5844a
@ -310,7 +310,7 @@
|
||||
|
||||
|
||||
% general yap modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system)).
|
||||
|
||||
:- problog_define_flag(optimization, problog_flag_validate_atom, 'optimization algorithm [local/global]', global, dtproblog).
|
||||
:- problog_define_flag(forest_type, problog_flag_validate_atom, 'type of BDD forest [dependent/independent]', dependent, dtproblog).
|
||||
@ -413,7 +413,7 @@ set_strategy([Term|R]) :-
|
||||
set_ground_strategy(ID2,LogProb)
|
||||
;
|
||||
copy_term(Decision, Decision2),
|
||||
assert(non_ground_strategy(Decision2,LogProb))
|
||||
assertz(non_ground_strategy(Decision2,LogProb))
|
||||
),
|
||||
set_strategy(R).
|
||||
|
||||
|
@ -302,26 +302,26 @@
|
||||
:- set_prolog_flag(to_chars_mode,quintus).
|
||||
|
||||
% general yap modules
|
||||
:- ensure_loaded(library(charsio)).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(terms)).
|
||||
:- ensure_loaded(library(random)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- ensure_loaded(library(rbtrees)).
|
||||
:- ensure_loaded(library(ordsets)).
|
||||
:- use_module(library(charsio)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(terms)).
|
||||
:- use_module(library(random)).
|
||||
:- use_module(library(system)).
|
||||
:- use_module(library(rbtrees)).
|
||||
:- use_module(library(ordsets)).
|
||||
|
||||
% problog related modules
|
||||
:- ensure_loaded('problog/variables').
|
||||
:- ensure_loaded('problog/extlists').
|
||||
:- ensure_loaded('problog/flags').
|
||||
:- ensure_loaded('problog/print').
|
||||
:- ensure_loaded('problog/os').
|
||||
:- ensure_loaded('problog/tptree').
|
||||
:- ensure_loaded('problog/tabling').
|
||||
:- ensure_loaded('problog/sampling').
|
||||
:- ensure_loaded('problog/intervals').
|
||||
:- ensure_loaded('problog/mc_DNF_sampling').
|
||||
:- catch(ensure_loaded('problog/variable_elimination'),_,true).
|
||||
:- use_module('problog/variables').
|
||||
:- use_module('problog/extlists').
|
||||
:- use_module('problog/flags').
|
||||
:- use_module('problog/print').
|
||||
:- use_module('problog/os').
|
||||
:- use_module('problog/tptree').
|
||||
:- use_module('problog/tabling').
|
||||
:- use_module('problog/sampling').
|
||||
:- use_module('problog/intervals').
|
||||
:- use_module('problog/mc_DNF_sampling').
|
||||
:- use_module('problog/variable_elimination').
|
||||
|
||||
% op attaching probabilities to facts
|
||||
:- op( 550, yfx, :: ).
|
||||
@ -509,7 +509,7 @@ check_existance(FileName):-
|
||||
problog_control(on,X) :-
|
||||
call(X),!.
|
||||
problog_control(on,X) :-
|
||||
assert(X).
|
||||
assertz(X).
|
||||
problog_control(off,X) :-
|
||||
retractall(X).
|
||||
problog_control(check,X) :-
|
||||
@ -572,7 +572,7 @@ term_expansion_intern((Annotation :: Head :- Body), Module, problog:ExpandedClau
|
||||
append(Args,[LProb],LongArgs),
|
||||
probclause_id(ID),
|
||||
ProbFactHead =.. [LongFunctor,ID|LongArgs],
|
||||
assert(decision_fact(ID,Head)),
|
||||
assertz(decision_fact(ID,Head)),
|
||||
ExpandedClause = (ProbFactHead :-
|
||||
user:Body,
|
||||
(problog_control(check,internal_strategy) ->
|
||||
@ -581,14 +581,14 @@ term_expansion_intern((Annotation :: Head :- Body), Module, problog:ExpandedClau
|
||||
LProb = '?'
|
||||
)
|
||||
),
|
||||
assert(dynamic_probability_fact(ID)),
|
||||
assert((dynamic_probability_fact_extract(HeadCopy,P_New) :-
|
||||
assertz(dynamic_probability_fact(ID)),
|
||||
assertz((dynamic_probability_fact_extract(HeadCopy,P_New) :-
|
||||
dtproblog:strategy(ID,HeadCopy,P_New)
|
||||
)),
|
||||
(ground(Head) ->
|
||||
true
|
||||
;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
problog_predicate(Functor, Arity, LongFunctor, Module)
|
||||
;
|
||||
@ -622,7 +622,7 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
(nonvar(P), P = t(TrueProb))
|
||||
->
|
||||
(
|
||||
assert(tunable_fact(ID,TrueProb)),
|
||||
assertz(tunable_fact(ID,TrueProb)),
|
||||
LProb is log(random*0.9+0.05) % set unknown probability randomly in [0.05, 0.95]
|
||||
);
|
||||
(
|
||||
@ -644,8 +644,8 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
)
|
||||
),
|
||||
LProb=log(P),
|
||||
assert(dynamic_probability_fact(ID)),
|
||||
assert(dynamic_probability_fact_extract(Goal_Copy,P_Copy))
|
||||
assertz(dynamic_probability_fact(ID)),
|
||||
assertz(dynamic_probability_fact_extract(Goal_Copy,P_Copy))
|
||||
)
|
||||
)
|
||||
),
|
||||
@ -653,7 +653,7 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
ground(Goal)
|
||||
->
|
||||
true;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
problog_predicate(Name, Arity, ProblogName,Module).
|
||||
|
||||
@ -710,7 +710,7 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
Sigma_Random is 0.4, % random*2+0.5,
|
||||
nth(Pos,Args,_,KeepArgs),
|
||||
nth(Pos,NewArgs,gaussian(Mu_Random,Sigma_Random),KeepArgs),
|
||||
assert(tunable_fact(ID,gaussian(Mu_Arg,Sigma_Arg)))
|
||||
assertz(tunable_fact(ID,gaussian(Mu_Arg,Sigma_Arg)))
|
||||
)
|
||||
),
|
||||
ProbFact =.. [ProblogName,ID|NewArgs],
|
||||
@ -719,9 +719,9 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
ground(Goal)
|
||||
->
|
||||
true;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
assert(continuous_fact(ID)),
|
||||
assertz(continuous_fact(ID)),
|
||||
problog_continuous_predicate(Name, Arity, Pos,ProblogName).
|
||||
|
||||
|
||||
@ -758,7 +758,7 @@ problog_continuous_predicate(Name, Arity, ContinuousArgumentPosition, ProblogNam
|
||||
ProbFact =.. [ProblogName,ID|ProbArgs],
|
||||
prolog_load_context(module,Mod),
|
||||
|
||||
assert( (Mod:OriginalGoal :- ProbFact,
|
||||
assertz( (Mod:OriginalGoal :- ProbFact,
|
||||
% continuous facts always get a grounding ID, even when they are actually ground
|
||||
% this simplifies the BDD script generation
|
||||
non_ground_fact_grounding_id(ProbFact,Ground_ID),
|
||||
@ -766,7 +766,7 @@ problog_continuous_predicate(Name, Arity, ContinuousArgumentPosition, ProblogNam
|
||||
add_continuous_to_proof(ID,ID2)
|
||||
)),
|
||||
|
||||
assert(problog_continuous_predicate(Name, Arity,ContinuousArgumentPosition)),
|
||||
assertz(problog_continuous_predicate(Name, Arity,ContinuousArgumentPosition)),
|
||||
ArityPlus1 is Arity+1,
|
||||
dynamic(problog:ProblogName/ArityPlus1).
|
||||
|
||||
@ -794,11 +794,11 @@ interval_merge((_ID,GroundID,_Type),Interval) :-
|
||||
|
||||
|
||||
|
||||
problog_assert(P::Goal) :-
|
||||
problog_assert(user,P::Goal).
|
||||
problog_assert(Module, P::Goal) :-
|
||||
problog_assertz(P::Goal) :-
|
||||
problog_assertz(user,P::Goal).
|
||||
problog_assertz(Module, P::Goal) :-
|
||||
term_expansion_intern(P::Goal,Module,problog:ProbFact),
|
||||
assert(problog:ProbFact).
|
||||
assertz(problog:ProbFact).
|
||||
|
||||
problog_retractall(Goal) :-
|
||||
Goal =.. [F|Args],
|
||||
@ -817,18 +817,18 @@ problog_predicate(Name, Arity, ProblogName,Mod) :-
|
||||
OriginalGoal =.. [_|Args],
|
||||
append(Args,[Prob],L1),
|
||||
ProbFact =.. [ProblogName,ID|L1],
|
||||
assert( (Mod:OriginalGoal :-
|
||||
assertz( (Mod:OriginalGoal :-
|
||||
ProbFact,
|
||||
grounding_id(ID,OriginalGoal,ID2),
|
||||
prove_problog_fact(ID,ID2,Prob)
|
||||
)),
|
||||
|
||||
assert( (Mod:problog_not(OriginalGoal) :-
|
||||
assertz( (Mod:problog_not(OriginalGoal) :-
|
||||
ProbFact,
|
||||
grounding_id(ID,OriginalGoal,ID2),
|
||||
prove_problog_fact_negated(ID,ID2,Prob)
|
||||
)),
|
||||
assert(problog_predicate(Name, Arity)),
|
||||
assertz(problog_predicate(Name, Arity)),
|
||||
ArityPlus2 is Arity+2,
|
||||
dynamic(problog:ProblogName/ArityPlus2).
|
||||
|
||||
@ -924,7 +924,7 @@ non_ground_fact_grounding_id(Goal,ID) :-
|
||||
nb_getval(non_ground_fact_grounding_id_counter,ID),
|
||||
ID2 is ID+1,
|
||||
nb_setval(non_ground_fact_grounding_id_counter,ID2),
|
||||
assert(grounding_is_known(Goal,ID))
|
||||
assertz(grounding_is_known(Goal,ID))
|
||||
)
|
||||
).
|
||||
|
||||
@ -996,7 +996,7 @@ prob_for_id(dummy,dummy,dummy).
|
||||
|
||||
get_fact_probability(A, Prob) :-
|
||||
ground(A),
|
||||
not(number(A)),
|
||||
\+ number(A),
|
||||
atom_codes(A, A_Codes),
|
||||
once(append(Part1, [95|Part2], A_Codes)), % 95 = '_'
|
||||
number_codes(ID, Part1), !,
|
||||
@ -1054,7 +1054,7 @@ set_fact_probability(ID,Prob) :-
|
||||
NewLogProb is log(Prob),
|
||||
nth(ProblogArity,NewProblogTermArgs,NewLogProb,KeepArgs),
|
||||
NewProblogTerm =.. [ProblogName|NewProblogTermArgs],
|
||||
assert(NewProblogTerm).
|
||||
assertz(NewProblogTerm).
|
||||
|
||||
get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity) :-
|
||||
problog_predicate(Name,Arity),
|
||||
@ -1089,7 +1089,7 @@ set_continuous_fact_parameters(ID,Parameters) :-
|
||||
nth0(ContinuousPos,ProblogTermArgs,_,KeepArgs),
|
||||
nth0(ContinuousPos,NewProblogTermArgs,Parameters,KeepArgs),
|
||||
NewProblogTerm =.. [ProblogName|NewProblogTermArgs],
|
||||
assert(NewProblogTerm).
|
||||
assertz(NewProblogTerm).
|
||||
|
||||
|
||||
|
||||
@ -1141,12 +1141,12 @@ get_fact(ID,OutsideTerm) :-
|
||||
grounding_is_known(OutsideTerm,GID).
|
||||
|
||||
recover_grounding_id(Atom,ID) :-
|
||||
name(Atom,List),
|
||||
atom_codes(Atom,List),
|
||||
reverse(List,Rev),
|
||||
recover_number(Rev,NumRev),
|
||||
reverse(NumRev,Num),
|
||||
name(ID,Num).
|
||||
recover_number([95|_],[]) :- !. % name('_',[95])
|
||||
atom_codes(ID,Num).
|
||||
recover_number([95|_],[]) :- !. % atom_codes('_',[95])
|
||||
recover_number([A|B],[A|C]) :-
|
||||
recover_number(B,C).
|
||||
|
||||
@ -1280,9 +1280,9 @@ montecarlo_check(ComposedID) :-
|
||||
fail.
|
||||
% (c) for unknown groundings of non-ground facts: generate a new sample (decompose the ID first)
|
||||
montecarlo_check(ID) :-
|
||||
name(ID,IDN),
|
||||
atom_codes(ID,IDN),
|
||||
recover_number(IDN,FactIDName),
|
||||
name(FactID,FactIDName),
|
||||
atom_codes(FactID,FactIDName),
|
||||
new_sample_nonground(ID,FactID).
|
||||
|
||||
% sampling from ground fact: set array value to 1 (in) or 2 (out)
|
||||
@ -1318,10 +1318,10 @@ new_sample_nonground(ComposedID,ID) :-
|
||||
% fail.
|
||||
|
||||
split_grounding_id(Composed,Fact,Grounding) :-
|
||||
name(Composed,C),
|
||||
atom_codes(Composed,C),
|
||||
split_g_id(C,F,G),
|
||||
name(Fact,F),
|
||||
name(Grounding,G).
|
||||
atom_codes(Fact,F),
|
||||
atom_codes(Grounding,G).
|
||||
split_g_id([95|Grounding],[],Grounding) :- !.
|
||||
split_g_id([A|B],[A|FactID],GroundingID) :-
|
||||
split_g_id(B,FactID,GroundingID).
|
||||
@ -1779,7 +1779,7 @@ add_solution :-
|
||||
Continuous=[];
|
||||
(
|
||||
Continuous=[continuous(ProofID)],
|
||||
assert(hybrid_proof(ProofID,Cont_IDs,AllIntervals))
|
||||
assertz(hybrid_proof(ProofID,Cont_IDs,AllIntervals))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -1807,7 +1807,7 @@ collect_all_intervals([(ID,GroundID)|T],ProofID,[Interval|T2]) :-
|
||||
Interval \= all, % we do not need to store continuous
|
||||
% variables with domain [-oo,oo] (they have probability 1)
|
||||
!,
|
||||
assert(hybrid_proof(ProofID,ID,GroundID,Interval)),
|
||||
assertz(hybrid_proof(ProofID,ID,GroundID,Interval)),
|
||||
collect_all_intervals(T,ProofID,T2).
|
||||
collect_all_intervals([_|T],ProofID,T2) :-
|
||||
collect_all_intervals(T,ProofID,T2).
|
||||
@ -1864,7 +1864,7 @@ disjoin_hybrid_proofs([GroundID|T]) :-
|
||||
(
|
||||
hybrid_proof(ProofID,ID,GroundID,Interval),
|
||||
intervals_disjoin(Interval,Partition,PInterval),
|
||||
assert(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)),
|
||||
assertz(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)),
|
||||
|
||||
fail; % go to next proof
|
||||
true
|
||||
@ -1959,9 +1959,9 @@ init_problog_delta(Threshold,Delta) :-
|
||||
nb_setval(problog_completed_proofs, Trie_Completed_Proofs),
|
||||
init_ptree(Trie_Stopped_Proofs),
|
||||
nb_setval(problog_stopped_proofs, Trie_Stopped_Proofs),
|
||||
assert(low(0,0.0)),
|
||||
assert(up(0,1.0)),
|
||||
assert(stopDiff(Delta)),
|
||||
assertz(low(0,0.0)),
|
||||
assertz(up(0,1.0)),
|
||||
assertz(stopDiff(Delta)),
|
||||
init_problog(Threshold).
|
||||
|
||||
problog_delta_id(Goal, _) :-
|
||||
@ -2047,7 +2047,7 @@ eval_lower(N,P,Status) :-
|
||||
eval_dnf(Trie_Completed_Proofs,P,Status),
|
||||
(Status = ok ->
|
||||
retract(low(_,_)),
|
||||
assert(low(N,P)),
|
||||
assertz(low(N,P)),
|
||||
(problog_flag(verbose,true) -> format(user,'lower bound: ~6f~n',[P]);true),
|
||||
flush_output(user)
|
||||
;
|
||||
@ -2057,7 +2057,7 @@ eval_lower(N,P,Status) :-
|
||||
eval_upper(0,P,ok) :-
|
||||
retractall(up(_,_)),
|
||||
low(N,P),
|
||||
assert(up(N,P)).
|
||||
assertz(up(N,P)).
|
||||
% else merge proofs and stopped derivations to get upper bound
|
||||
% in case of timeout or other problems, skip and use bound from last level
|
||||
eval_upper(N,UpP,ok) :-
|
||||
@ -2070,7 +2070,7 @@ eval_upper(N,UpP,ok) :-
|
||||
delete_ptree(Trie_All_Proofs),
|
||||
(StatusUp = ok ->
|
||||
retract(up(_,_)),
|
||||
assert(up(N,UpP))
|
||||
assertz(up(N,UpP))
|
||||
;
|
||||
(problog_flag(verbose,true) -> format(user,'~w - continue using old up~n',[StatusUp]);true),
|
||||
flush_output(user),
|
||||
@ -2098,8 +2098,8 @@ problog_max(Goal, Prob, Facts) :-
|
||||
init_problog_max(Threshold) :-
|
||||
retractall(max_probability(_)),
|
||||
retractall(max_proof(_)),
|
||||
assert(max_probability(-999999)),
|
||||
assert(max_proof(unprovable)),
|
||||
assertz(max_probability(-999999)),
|
||||
assertz(max_proof(unprovable)),
|
||||
init_problog(Threshold).
|
||||
|
||||
update_max :-
|
||||
@ -2111,10 +2111,10 @@ update_max :-
|
||||
b_getval(problog_current_proof, IDs),
|
||||
open_end_close_end(IDs, R),
|
||||
retractall(max_proof(_)),
|
||||
assert(max_proof(R)),
|
||||
assertz(max_proof(R)),
|
||||
nb_setval(problog_threshold, CurrP),
|
||||
retractall(max_probability(_)),
|
||||
assert(max_probability(CurrP))
|
||||
assertz(max_probability(CurrP))
|
||||
).
|
||||
|
||||
problog_max_id(Goal, _Prob, _Clauses) :-
|
||||
@ -2195,7 +2195,7 @@ problog_real_kbest(Goal, K, Prob, Status) :-
|
||||
|
||||
init_problog_kbest(Threshold) :-
|
||||
retractall(current_kbest(_,_,_)),
|
||||
assert(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs)
|
||||
assertz(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs)
|
||||
init_ptree(Trie_Completed_Proofs),
|
||||
nb_setval(problog_completed_proofs, Trie_Completed_Proofs),
|
||||
init_problog(Threshold).
|
||||
@ -2236,7 +2236,7 @@ update_current_kbest(K,NewLogProb,Cl) :-
|
||||
sorted_insert(NewLogProb-Cl,List,NewList),
|
||||
NewLength is Length+1,
|
||||
(NewLength < K ->
|
||||
assert(current_kbest(OldThres,NewList,NewLength))
|
||||
assertz(current_kbest(OldThres,NewList,NewLength))
|
||||
;
|
||||
(NewLength>K ->
|
||||
First is NewLength-K+1,
|
||||
@ -2244,7 +2244,7 @@ update_current_kbest(K,NewLogProb,Cl) :-
|
||||
; FinalList=NewList, FinalLength=NewLength),
|
||||
FinalList=[NewThres-_|_],
|
||||
nb_setval(problog_threshold,NewThres),
|
||||
assert(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
assertz(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
|
||||
sorted_insert(A,[],[A]).
|
||||
sorted_insert(A-LA,[B1-LB1|B], [A-LA,B1-LB1|B] ) :-
|
||||
@ -2405,7 +2405,7 @@ montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :-
|
||||
;
|
||||
true
|
||||
),
|
||||
assert(mc_prob(Prob))
|
||||
assertz(mc_prob(Prob))
|
||||
;
|
||||
montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)
|
||||
).
|
||||
@ -2427,7 +2427,7 @@ montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :-
|
||||
% ;
|
||||
% true
|
||||
% ),
|
||||
% assert(mc_prob(Prob))
|
||||
% assertz(mc_prob(Prob))
|
||||
% ;
|
||||
% montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)
|
||||
% ).
|
||||
@ -2472,7 +2472,7 @@ problog_answers(Goal,File) :-
|
||||
set_problog_flag(verbose,false),
|
||||
retractall(answer(_)),
|
||||
% this will not give the exact prob of Goal!
|
||||
problog_exact((Goal,ground(Goal),\+problog:answer(Goal),assert(problog:answer(Goal))),_,_),
|
||||
problog_exact((Goal,ground(Goal),\+problog:answer(Goal),assertz(problog:answer(Goal))),_,_),
|
||||
open(File,write,_,[alias(answer)]),
|
||||
eval_answers,
|
||||
close(answer).
|
||||
@ -2530,13 +2530,13 @@ update_current_kbest_answers(_,NewLogProb,Goal) :-
|
||||
!,
|
||||
keysort(NewList,SortedList),%format(user_error,'updated variant of ~w~n',[Goal]),
|
||||
retract(current_kbest(K,_,Len)),
|
||||
assert(current_kbest(K,SortedList,Len)).
|
||||
assertz(current_kbest(K,SortedList,Len)).
|
||||
update_current_kbest_answers(K,NewLogProb,Goal) :-
|
||||
retract(current_kbest(OldThres,List,Length)),
|
||||
sorted_insert(NewLogProb-Goal,List,NewList),%format(user_error,'inserted new element ~w~n',[Goal]),
|
||||
NewLength is Length+1,
|
||||
(NewLength < K ->
|
||||
assert(current_kbest(OldThres,NewList,NewLength))
|
||||
assertz(current_kbest(OldThres,NewList,NewLength))
|
||||
;
|
||||
(NewLength>K ->
|
||||
First is NewLength-K+1,
|
||||
@ -2544,7 +2544,7 @@ update_current_kbest_answers(K,NewLogProb,Goal) :-
|
||||
; FinalList=NewList, FinalLength=NewLength),
|
||||
FinalList=[NewThres-_|_],
|
||||
nb_setval(problog_threshold,NewThres),
|
||||
assert(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
assertz(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
|
||||
% this fails if there is no variant -> go to second case above
|
||||
update_prob_of_known_answer([OldLogP-OldGoal|List],Goal,NewLogProb,[MaxLogP-OldGoal|List]) :-
|
||||
@ -2861,7 +2861,7 @@ write_bdd_struct_script(Trie,BDDFile,Variables) :-
|
||||
Levels = [ROptLevel]
|
||||
),
|
||||
% Removed forall here, because it hides 'Variables' from what comes afterwards
|
||||
once(member(OptLevel, Levels)),
|
||||
memberchk(OptLevel, Levels),
|
||||
(
|
||||
(problog_flag(use_db_trie, true) ->
|
||||
tries:trie_db_opt_min_prefix(MinPrefix),
|
||||
|
@ -212,7 +212,7 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(extlists, [open_end_memberchk/2, open_end_add/3, open_end_add_unique/3, open_end_close_end/2]).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
open_end_memberchk(_A, []):-!, fail.
|
||||
open_end_memberchk(A, L-E):-
|
||||
|
@ -435,14 +435,14 @@ flag_validate_directory(Value):-
|
||||
flag_validate_directory(Value):-
|
||||
atomic(Value),
|
||||
% fixme : why not inform the user???
|
||||
catch((not(file_exists(Value)), make_directory(Value)), _, fail).
|
||||
catch((\+ file_exists(Value), make_directory(Value)), _, fail).
|
||||
|
||||
flag_validate_file.
|
||||
flag_validate_file(Value):-
|
||||
catch(file_exists(Value), _, fail), file_property(Value, type(regular)), !.
|
||||
flag_validate_file(Value):-
|
||||
atomic(Value),
|
||||
catch((not(file_exists(Value)), tell(Value)), _, fail),
|
||||
catch((\+ file_exists(Value), tell(Value)), _, fail),
|
||||
told,
|
||||
delete_file(Value).
|
||||
|
||||
|
@ -276,7 +276,7 @@
|
||||
hash_table_display/3,
|
||||
problog_key_to_tuple/2]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists)).
|
||||
%
|
||||
% General use predicates
|
||||
%
|
||||
@ -705,7 +705,7 @@ hash_table_get_elements(RevArray, RevSize, RevSize, Tupples):-
|
||||
hash_table_get_elements(_RevArray, RevSize, RevSize, []).
|
||||
|
||||
hash_table_get_chains(Array, Size, Chains):-
|
||||
((array_element(Array, Size, ChainID), not(ChainID == 0)) ->
|
||||
((array_element(Array, Size, ChainID), ChainID \== 0) ->
|
||||
(integer(ChainID) ->
|
||||
get_array_name(ChainID, ChainName)
|
||||
;
|
||||
|
@ -206,9 +206,9 @@
|
||||
|
||||
:- module(mc_DNF_sampling, [problog_dnf_sampling/3]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
:- ensure_loaded(variables).
|
||||
:- use_module(variables).
|
||||
:- use_module(sampling, _, [problog_random/1,
|
||||
problog_convergence_check/6]).
|
||||
|
||||
@ -217,7 +217,7 @@
|
||||
|
||||
:- use_module(os, _, [convert_filename_to_working_path/2]).
|
||||
|
||||
:- ensure_loaded(hash_table).
|
||||
:- use_module(hash_table).
|
||||
|
||||
:- problog_define_flag(search_method, problog_flag_validate_in_list([linear, binary]), 'search method for picking proof', binary, monte_carlo_sampling_dnf).
|
||||
:- problog_define_flag(represent_world, problog_flag_validate_in_list([list, record, array, hash_table]), 'structure that represents sampled world', array, monte_carlo_sampling_dnf).
|
||||
|
@ -216,7 +216,7 @@
|
||||
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system)).
|
||||
|
||||
% load our own modules
|
||||
:- use_module(gflags, _, [flag_get/2]).
|
||||
@ -225,7 +225,7 @@
|
||||
|
||||
set_problog_path(Path):-
|
||||
retractall(problog_path(_)),
|
||||
assert(problog_path(Path)).
|
||||
assertz(problog_path(Path)).
|
||||
|
||||
convert_filename_to_working_path(File_Name, Path):-
|
||||
flag_get(dir, Dir),
|
||||
|
@ -225,11 +225,11 @@
|
||||
problog_help/0]).
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- ensure_loaded(variables).
|
||||
:- use_module(flags).
|
||||
:- use_module(variables).
|
||||
|
||||
|
||||
% size, line_char, line_char_bold
|
||||
|
@ -211,10 +211,10 @@
|
||||
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system)).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- use_module(flags).
|
||||
|
||||
:- initialization(problog_define_flag(verbosity_learning, problog_flag_validate_0to5,'How much output shall be given (0=nothing,5=all)',5, learning_general)).
|
||||
|
||||
|
@ -212,7 +212,7 @@
|
||||
|
||||
:- use_module(os, _, [convert_filename_to_working_path/2]).
|
||||
|
||||
:- ensure_loaded(library(random)).
|
||||
:- use_module(library(random)).
|
||||
|
||||
:- problog_define_flag(mc_batchsize, problog_flag_validate_posint, 'number of samples before update in montecarlo', 1000, monte_carlo_sampling).
|
||||
|
||||
|
@ -233,7 +233,7 @@
|
||||
problog_tabling_get_negated_from_id/2,
|
||||
op(1150, fx, problog_table)]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
:- use_module(extlists, _, [open_end_memberchk/2,
|
||||
open_end_add/3,
|
||||
@ -248,8 +248,8 @@
|
||||
empty_ptree/1]).
|
||||
|
||||
:- op( 1150, fx, problog_table ).
|
||||
:- meta_predicate problog_table(:).
|
||||
:- meta_predicate problog_neg(:).
|
||||
:- meta_predicate(problog_table(0)).
|
||||
:- meta_predicate(problog_neg(0)).
|
||||
:- dynamic problog_tabled/1, has_synonyms/0, problog_tabling_retain/1.
|
||||
:- problog_define_flag(max_depth, problog_flag_validate_integer, 'maximum proof depth', -1).
|
||||
:- problog_define_flag(retain_tables, problog_flag_validate_boolean, 'retain tables after query', false).
|
||||
@ -277,7 +277,7 @@ clear_tabling:-
|
||||
clear_tabling.
|
||||
|
||||
retain_tabling:-
|
||||
forall(problog_chktabled(_, Trie), assert(problog_tabling_retain(Trie))).
|
||||
forall(problog_chktabled(_, Trie), assertz(problog_tabling_retain(Trie))).
|
||||
|
||||
clear_retained_tables:-
|
||||
forall(problog_tabling_retain(Trie), delete_ptree(Trie)),
|
||||
@ -311,7 +311,7 @@ problog_table((P1, P2), M) :-
|
||||
problog_table(Name/Arity, Module) :-
|
||||
makeargs(Arity, Args),
|
||||
Head =.. [Name|Args],
|
||||
not(predicate_property(Module:Head, dynamic)), !,
|
||||
\+ predicate_property(Module:Head, dynamic), !,
|
||||
throw(error('problog_table: Problog tabling currently requires the predicate to be declared dynamic and compiles it to static.')).
|
||||
problog_table(Name/Arity, Module) :-
|
||||
makeargs(Arity, Args),
|
||||
@ -322,7 +322,7 @@ problog_table(Name/Arity, Module) :-
|
||||
|
||||
% Monte carlo tabling
|
||||
table(Module:MCName/Arity),
|
||||
assert(problog_tabled(Module:Name/Arity)),
|
||||
assertz(problog_tabled(Module:Name/Arity)),
|
||||
|
||||
findall(_,(
|
||||
OriginalPred =.. [OriginalName|Args],
|
||||
@ -334,7 +334,7 @@ problog_table(Name/Arity, Module) :-
|
||||
OriginalPred =.. [OriginalName|Args],
|
||||
MCPred =.. [MCName|Args],
|
||||
ExactPred =.. [ExactName|Args],
|
||||
assert(Module:(
|
||||
assertz(Module:(
|
||||
Head:-
|
||||
(problog:problog_control(check, exact) ->
|
||||
ExactPred
|
||||
@ -361,7 +361,7 @@ problog_table(Name/Arity, Module) :-
|
||||
Finished
|
||||
),
|
||||
b_getval(problog_current_proof, IDs),
|
||||
not(open_end_memberchk(not(t(Hash)), IDs)),
|
||||
\+ open_end_memberchk(not(t(Hash)), IDs),
|
||||
open_end_add_unique(t(Hash), IDs, NIDs),
|
||||
b_setval(problog_current_proof, NIDs)
|
||||
;
|
||||
@ -413,7 +413,7 @@ problog_table(Name/Arity, Module) :-
|
||||
delete_ptree(SuspTrie)
|
||||
),
|
||||
b_setval(CurrentControlTrie, OCurTrie),
|
||||
not(open_end_memberchk(not(t(Hash)), OIDs)),
|
||||
\+ open_end_memberchk(not(t(Hash)), OIDs),
|
||||
open_end_add_unique(t(Hash), OIDs, NOIDs),
|
||||
b_setval(problog_current_proof, NOIDs)
|
||||
)
|
||||
@ -435,8 +435,8 @@ problog_abolish_table(M:P/A):-
|
||||
problog_neg(M:G):-
|
||||
problog:problog_control(check, exact),
|
||||
functor(G, Name, Arity),
|
||||
not(problog_tabled(M:Name/Arity)),
|
||||
not(problog:problog_predicate(Name, Arity)),
|
||||
\+ problog_tabled(M:Name/Arity),
|
||||
\+ problog:problog_predicate(Name, Arity),
|
||||
throw(problog_neg_error('Error: goal must be dynamic and tabled', M:G)).
|
||||
problog_neg(M:G):-
|
||||
% exact inference
|
||||
@ -446,20 +446,20 @@ problog_neg(M:G):-
|
||||
M:G,
|
||||
b_getval(problog_current_proof, L),
|
||||
open_end_close_end(L, [Trie]),
|
||||
not(open_end_memberchk(Trie, IDs)),
|
||||
\+ open_end_memberchk(Trie, IDs),
|
||||
open_end_add_unique(not(Trie), IDs, NIDs),
|
||||
b_setval(problog_current_proof, NIDs).
|
||||
problog_neg(M:G):-
|
||||
% monte carlo sampling
|
||||
problog:problog_control(check, mc),
|
||||
not(M:G).
|
||||
\+ M:G.
|
||||
|
||||
% This predicate assigns a synonym for negation that means: NotName = problog_neg(Name)
|
||||
problog_tabling_negated_synonym(Name, NotName):-
|
||||
recorded(problog_table_synonyms, negated(Name, NotName), _), !.
|
||||
problog_tabling_negated_synonym(Name, NotName):-
|
||||
retractall(has_synonyms),
|
||||
assert(has_synonyms),
|
||||
assertz(has_synonyms),
|
||||
recordz(problog_table_synonyms, negated(Name, NotName), _).
|
||||
|
||||
problog_tabling_get_negated_from_pred(Pred, Ref):-
|
||||
|
@ -225,7 +225,7 @@ timer_start(Name) :-
|
||||
throw(timer_already_started(timer_start(Name)));
|
||||
|
||||
statistics(walltime,[StartTime,_]),
|
||||
assert(timer(Name,StartTime))
|
||||
assertz(timer(Name,StartTime))
|
||||
).
|
||||
|
||||
timer_stop(Name,Duration) :-
|
||||
@ -244,7 +244,7 @@ timer_pause(Name) :-
|
||||
->
|
||||
statistics(walltime,[StopTime,_]),
|
||||
Duration is StopTime-StartTime,
|
||||
assert(timer_paused(Name,Duration));
|
||||
assertz(timer_paused(Name,Duration));
|
||||
|
||||
throw(timer_not_started(timer_pause(Name)))
|
||||
).
|
||||
@ -255,7 +255,7 @@ timer_pause(Name, Duration) :-
|
||||
->
|
||||
statistics(walltime,[StopTime,_]),
|
||||
Duration is StopTime-StartTime,
|
||||
assert(timer_paused(Name,Duration));
|
||||
assertz(timer_paused(Name,Duration));
|
||||
|
||||
throw(timer_not_started(timer_pause(Name)))
|
||||
).
|
||||
@ -266,7 +266,7 @@ timer_resume(Name):-
|
||||
->
|
||||
statistics(walltime,[ResumeTime,_]),
|
||||
CorrectedStartTime is ResumeTime-Duration,
|
||||
assert(timer(Name,CorrectedStartTime));
|
||||
assertz(timer(Name,CorrectedStartTime));
|
||||
|
||||
throw(timer_not_paused(timer_resume(Name)))
|
||||
).
|
||||
|
@ -245,13 +245,13 @@
|
||||
]).
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(tries)).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- ensure_loaded(library(ordsets)).
|
||||
:- use_module(library(tries)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(system)).
|
||||
:- use_module(library(ordsets)).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- use_module(flags).
|
||||
|
||||
% switch on all tests to reduce bug searching time
|
||||
:- style_check(all).
|
||||
@ -435,7 +435,7 @@ bdd_struct_ptree_script(Trie, FileBDD, Variables) :-
|
||||
edges_ptree(Trie, Variables),
|
||||
name_vars(Variables), % expected by output_compressed_script/1?
|
||||
length(Variables, VarCount),
|
||||
assert(c_num(1)),
|
||||
assertz(c_num(1)),
|
||||
bdd_pt(Trie, CT),
|
||||
c_num(NN),
|
||||
IntermediateSteps is NN - 1,
|
||||
@ -643,7 +643,7 @@ bdd_ptree_script(Trie, FileBDD, FileParam) :-
|
||||
|
||||
told,
|
||||
length(Edges, VarCount),
|
||||
assert(c_num(1)),
|
||||
assertz(c_num(1)),
|
||||
bdd_pt(Trie, CT),
|
||||
c_num(NN),
|
||||
IntermediateSteps is NN - 1,
|
||||
@ -736,12 +736,12 @@ bdd_pt(Trie, false) :-
|
||||
empty_ptree(Trie),
|
||||
!,
|
||||
retractall(c_num(_)),
|
||||
assert(c_num(2)).
|
||||
assertz(c_num(2)).
|
||||
bdd_pt(Trie, true) :-
|
||||
trie_check_entry(Trie, [true], _),
|
||||
!,
|
||||
retractall(c_num(_)),
|
||||
assert(c_num(2)).
|
||||
assertz(c_num(2)).
|
||||
|
||||
% general case: transform trie to nested tree structure for compression
|
||||
bdd_pt(Trie, CT) :-
|
||||
@ -977,7 +977,7 @@ format_compression_script([A, B|C]) :-
|
||||
get_next_name(Name) :-
|
||||
retract(c_num(N)),
|
||||
NN is N + 1,
|
||||
assert(c_num(NN)),
|
||||
assertz(c_num(NN)),
|
||||
atomic_concat('L', N, Name).
|
||||
|
||||
% create BDD-var as fact id prefixed by x
|
||||
@ -1030,7 +1030,7 @@ print_nested_ptree(Trie, Level, Space):-
|
||||
spacy_print(begin(t(Trie)), Level, Space),
|
||||
fail.
|
||||
print_nested_ptree(Trie, Level, Space):-
|
||||
assert(nested_ptree_printed(Trie)),
|
||||
assertz(nested_ptree_printed(Trie)),
|
||||
trie_path(Trie, Path),
|
||||
NewLevel is Level + 1,
|
||||
spacy_print(Path, NewLevel, Space),
|
||||
@ -1117,7 +1117,7 @@ generate_BDD_from_trie(Trie, TrieInter, Stream):-
|
||||
get_next_intermediate_step(TrieInter),
|
||||
write_bdd_line(OrLineTerms, TrieInter, '+', Stream)
|
||||
),
|
||||
assert(generated_trie(Trie, TrieInter)).
|
||||
assertz(generated_trie(Trie, TrieInter)).
|
||||
|
||||
write_bdd_line([], _LineInter, _Operator, _Stream):-!.
|
||||
write_bdd_line(LineTerms, LineInter, Operator, Stream):-
|
||||
@ -1172,13 +1172,13 @@ bddvars_to_script([H|T], Stream):-
|
||||
bddvars_to_script(T, Stream).
|
||||
|
||||
get_next_intermediate_step('L1'):-
|
||||
not(clause(next_intermediate_step(_), _)), !,
|
||||
assert(next_intermediate_step(2)).
|
||||
\+ clause(next_intermediate_step(_), _), !,
|
||||
assertz(next_intermediate_step(2)).
|
||||
get_next_intermediate_step(Inter):-
|
||||
next_intermediate_step(InterStep),
|
||||
retract(next_intermediate_step(InterStep)),
|
||||
NextInterStep is InterStep + 1,
|
||||
assert(next_intermediate_step(NextInterStep)),
|
||||
assertz(next_intermediate_step(NextInterStep)),
|
||||
atomic_concat(['L', InterStep], Inter).
|
||||
|
||||
make_bdd_var('true', 'TRUE'):-!.
|
||||
@ -1201,9 +1201,9 @@ add_to_vars(V):-
|
||||
clause(get_used_vars(Vars, Cnt), true), !,
|
||||
retract(get_used_vars(Vars, Cnt)),
|
||||
NewCnt is Cnt + 1,
|
||||
assert(get_used_vars([V|Vars], NewCnt)).
|
||||
assertz(get_used_vars([V|Vars], NewCnt)).
|
||||
add_to_vars(V):-
|
||||
assert(get_used_vars([V], 1)).
|
||||
assertz(get_used_vars([V], 1)).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%% depth breadth builtin support %%%%%%%%%%%%%%%%%
|
||||
@ -1232,14 +1232,14 @@ variable_in_dbtrie(Trie, V):-
|
||||
|
||||
get_next_variable(V, depth(L, _S)):-
|
||||
member(V, L),
|
||||
not(islabel(V)).
|
||||
\+ islabel(V).
|
||||
get_next_variable(V, breadth(L, _S)):-
|
||||
member(V, L),
|
||||
not(islabel(V)).
|
||||
\+ islabel(V).
|
||||
get_next_variable(V, L):-
|
||||
member(V, L),
|
||||
not(islabel(V)),
|
||||
not(isnestedtrie(V)).
|
||||
\+ islabel(V),
|
||||
\+ isnestedtrie(V).
|
||||
|
||||
get_variable(not(V), R):-
|
||||
!, get_variable(V, R).
|
||||
@ -1408,7 +1408,7 @@ preprocess(_, _, _, FinalEndCount, FinalEndCount).
|
||||
|
||||
make_nested_trie_base_cases(Trie, t(ID), DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount, Ancestors):-
|
||||
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
|
||||
(not(Label = t(_)) ->
|
||||
(Label \= t(_) ->
|
||||
FinalEndCount = EndCount,
|
||||
problog:problog_chktabled(ID, RTrie),!,
|
||||
get_set_trie_from_id(t(ID), Label, RTrie, Ancestors, _, Ancestors)
|
||||
@ -1439,7 +1439,7 @@ trie_nested_to_db_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel, St
|
||||
|
||||
nested_trie_to_db_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel, StartCount, FinalEndCount, Module:GetTriePredicate, Ancestors, ContainsLoop, Childs, ChildsAcc):-
|
||||
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
|
||||
(not(Label = t(_)) ->
|
||||
(Label \= t(_) ->
|
||||
(var(ContainsLoop) ->
|
||||
ContainsLoop = false
|
||||
;
|
||||
@ -1933,7 +1933,7 @@ dwriteln(A):-
|
||||
|
||||
non_false([], []):-!.
|
||||
non_false([H|T], [H|NT]):-
|
||||
not(H == false),
|
||||
H \== false,
|
||||
non_false(T, NT).
|
||||
non_false([H|T], NT):-
|
||||
H == false,
|
||||
@ -1945,11 +1945,11 @@ one_true(_, _, 'TRUE'):-!.
|
||||
|
||||
all_false(false,false,false).
|
||||
one_non_false(L, false, false, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
one_non_false(false, L, false, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
one_non_false(false, false, L, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
|
||||
trie_seperate(Trie, Var, TrieWith, TrieWithNeg, TrieWithOut):-
|
||||
trie_traverse(Trie, R),
|
||||
@ -1999,7 +1999,7 @@ mark_deref(DB_Trie):-
|
||||
traverse_ptree(DB_Trie, DB_Term),
|
||||
(DB_Term = depth(List, Inter); DB_Term = breadth(List, Inter)),
|
||||
member(L, List),
|
||||
((islabel(L), not(deref(L, _))) ->
|
||||
((islabel(L), \+ deref(L, _)) ->
|
||||
asserta(deref(L, Inter))
|
||||
;
|
||||
true
|
||||
|
@ -214,11 +214,11 @@
|
||||
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(system)).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(os).
|
||||
:- use_module(os).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
|
@ -206,8 +206,8 @@
|
||||
|
||||
:- module(variable_elimination, [trie_check_for_and_cluster/1, trie_replace_and_cluster/2, clean_up/0, variable_elimination_stats/3]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(tries)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(tries)).
|
||||
|
||||
:- use_module('flags', _, [problog_define_flag/5]).
|
||||
|
||||
@ -373,7 +373,7 @@ last_cluster_element(L, Cluster, R):-
|
||||
|
||||
nocluster([], _).
|
||||
nocluster([H|T], L):-
|
||||
not(memberchk(H, L)),
|
||||
\+ memberchk(H, L),
|
||||
nocluster(T, L).
|
||||
|
||||
eliminate_list([], L, L).
|
||||
@ -418,8 +418,8 @@ make_prob_fact(L, P, ID):-
|
||||
(clause(problog:problog_predicate(var_elimination, 1), true) ->
|
||||
true
|
||||
;
|
||||
assert(problog:problog_predicate(var_elimination, 1))
|
||||
assertz(problog:problog_predicate(var_elimination, 1))
|
||||
),
|
||||
assert(problog:problog_var_elimination(ID, L, P))
|
||||
assertz(problog:problog_var_elimination(ID, L, P))
|
||||
).
|
||||
|
||||
|
@ -376,7 +376,8 @@ problog_var_timer_timeout(Variable):-
|
||||
%%% This is possible for future use %%%
|
||||
|
||||
:- use_module(library(timeout)).
|
||||
:- meta_predicate problog_var_time_out(:,_,_,_), problog_time_out(:,_,_,_).
|
||||
:- meta_predicate(problog_var_time_out(0, *, *, *)).
|
||||
:- meta_predicate(problog_time_out(0, *, *, *)).
|
||||
%
|
||||
% Problems with nesting, use with care
|
||||
% always succeeds returns Success = true/fail, Time = Msec taken/timeout
|
||||
|
@ -222,17 +222,17 @@
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
% load modules from the YAP library
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(random)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(random)).
|
||||
:- use_module(library(system)).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(problog).
|
||||
:- ensure_loaded('problog/logger').
|
||||
:- ensure_loaded('problog/flags').
|
||||
:- ensure_loaded('problog/os').
|
||||
:- ensure_loaded('problog/print_learning').
|
||||
:- ensure_loaded('problog/utils_learning').
|
||||
:- use_module(problog).
|
||||
:- use_module('problog/logger').
|
||||
:- use_module('problog/flags').
|
||||
:- use_module('problog/os').
|
||||
:- use_module('problog/print_learning').
|
||||
:- use_module('problog/utils_learning').
|
||||
|
||||
% used to indicate the state of the system
|
||||
:- dynamic values_correct/0.
|
||||
@ -452,7 +452,7 @@ do_learning_intern(Iterations,Epsilon) :-
|
||||
retractall(current_iteration(_)),
|
||||
!,
|
||||
NextIteration is CurrentIteration+1,
|
||||
assert(current_iteration(NextIteration)),
|
||||
assertz(current_iteration(NextIteration)),
|
||||
EndIteration is CurrentIteration+Iterations-1,
|
||||
|
||||
format_learning(1,'~nIteration ~d of ~d~n',[CurrentIteration,EndIteration]),
|
||||
@ -484,12 +484,12 @@ do_learning_intern(Iterations,Epsilon) :-
|
||||
(
|
||||
retractall(last_mse(_)),
|
||||
logger_get_variable(mse_trainingset,Current_MSE),
|
||||
assert(last_mse(Current_MSE)),
|
||||
assertz(last_mse(Current_MSE)),
|
||||
!,
|
||||
MSE_Diff is abs(Last_MSE-Current_MSE)
|
||||
); (
|
||||
logger_get_variable(mse_trainingset,Current_MSE),
|
||||
assert(last_mse(Current_MSE)),
|
||||
assertz(last_mse(Current_MSE)),
|
||||
MSE_Diff is Epsilon+1
|
||||
)
|
||||
),
|
||||
@ -608,7 +608,7 @@ init_learning :-
|
||||
true
|
||||
),
|
||||
bb_delete(training_examples,TrainingExampleCount),
|
||||
assert(example_count(TrainingExampleCount)),
|
||||
assertz(example_count(TrainingExampleCount)),
|
||||
format_learning(3,'~q training examples~n',[TrainingExampleCount]),
|
||||
!,
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -641,8 +641,8 @@ init_learning :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% done
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
assert(current_iteration(0)),
|
||||
assert(learning_initialized),
|
||||
assertz(current_iteration(0)),
|
||||
assertz(learning_initialized),
|
||||
|
||||
format_learning(1,'~n',[]).
|
||||
|
||||
@ -733,10 +733,10 @@ init_one_query(QueryID,Query,Type) :-
|
||||
query_md5(OtherQueryID,Query_MD5,Type)
|
||||
->
|
||||
(
|
||||
assert(query_is_similar(QueryID,OtherQueryID)),
|
||||
assertz(query_is_similar(QueryID,OtherQueryID)),
|
||||
format_learning(3, '~q is similar to ~q~2n', [QueryID,OtherQueryID])
|
||||
);
|
||||
assert(query_md5(QueryID,Query_MD5,Type))
|
||||
assertz(query_md5(QueryID,Query_MD5,Type))
|
||||
)
|
||||
);
|
||||
|
||||
@ -808,7 +808,7 @@ update_values :-
|
||||
% stop write current probabilities to file
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
assert(values_correct).
|
||||
assertz(values_correct).
|
||||
|
||||
|
||||
|
||||
@ -917,14 +917,14 @@ my_load_intern(end_of_file,_,_) :-
|
||||
!.
|
||||
my_load_intern(query_probability(QueryID,Prob),Handle,QueryID) :-
|
||||
!,
|
||||
assert(query_probability_intern(QueryID,Prob)),
|
||||
assertz(query_probability_intern(QueryID,Prob)),
|
||||
read(Handle,X),
|
||||
my_load_intern(X,Handle,QueryID).
|
||||
my_load_intern(query_gradient(QueryID,XFactID,Type,Value),Handle,QueryID) :-
|
||||
!,
|
||||
atomic_concat(x,StringFactID,XFactID),
|
||||
atom_number(StringFactID,FactID),
|
||||
assert(query_gradient_intern(QueryID,FactID,Type,Value)),
|
||||
assertz(query_gradient_intern(QueryID,FactID,Type,Value)),
|
||||
read(Handle,X),
|
||||
my_load_intern(X,Handle,QueryID).
|
||||
my_load_intern(X,Handle,QueryID) :-
|
||||
|
Reference in New Issue
Block a user