This commit is contained in:
Vitor Santos Costa 2019-03-15 12:38:09 +00:00
parent e3c399af48
commit 044329d115
12 changed files with 250 additions and 344 deletions

View File

@ -1135,7 +1135,15 @@ static Int create_static_array(USES_REGS1) {
props = array_of_terms;
if (args[CREATE_ARRAY_NB_TERM].used)
props = array_of_nb_terms;
/* if (args[CREATE_ARRAY_MATRIX].used) {
tprops = args[CREATE_ARRAY_TYPE].tvalue;
if (tprops == TermTrue) {
in_matrix = true;
size += sizeof(MP_INT)/sizeof(CELL);
}
}
*/
StaticArrayEntry *pp;
if (IsVarTerm(t)) {
Yap_Error(INSTANTIATION_ERROR, t, "create static array");

View File

@ -1463,7 +1463,7 @@ static int not_was_reconsulted(PredEntry *p, Term t, int mode) {
// p->src.OwnerFile = Yap_ConsultingFile(PASS_REGS1);
}
LOCAL_LastAssertedPred = p;
ret>urn TRUE; /* careful */
return TRUE; /* careful */
}
static yamop *addcl_permission_error(const char *file, const char *function,
@ -1750,7 +1750,7 @@ bool Yap_addclause(Term t, yamop *cp, Term tmode, Term mod, Term *t4ref)
PELOCK(20, p);
/* we are redefining a prolog module predicate */
if (Yap_constPred(p)) {
addcl_permission_error(__FILE__, __FUNCTION__, __LINE__, tf,
addcl_permission_error(__FILE__, __FUNCTION__, __LINE__, p,
FALSE);
UNLOCKPE(30, p);
return false;
@ -2189,7 +2189,7 @@ static Int p_purge_clauses(USES_REGS1) { /* '$purge_clauses'(+Func) */
PELOCK(21, pred);
if (pred->PredFlags & StandardPredFlag) {
UNLOCKPE(33, pred);
Yap_Error(PERMISSION_ERROR_MODIFY_STATIC_PROCEDURE, Yap_PredicateIndicator(CurrentModule, t), "assert/1");
Yap_Error(PERMISSION_ERROR_MODIFY_STATIC_PROCEDURE, Yap_TermToIndicator(CurrentModule, t), "assert/1");
return (FALSE);
}
purge_clauses(pred);
@ -4085,11 +4085,7 @@ static Int
| TabledPredFlag
#endif /* TABLING */
)) {
<<<<<<< HEAD
Yap_Error(PERMISSION_ERROR_MODIFY_STATIC_PROCEDURE, Yap_PredicateIndicator(CurrentModule, t),
=======
Yap_Error(PERMISSION_ERROR_MODIFY_STATIC_PROCEDURE, Yap_PredicateToIndicator(ap),
>>>>>>> ab56074bb1a1f428c5c0c2a1781e00b02bb58f03
"dbload_get_space/4");
return FALSE;
}

View File

@ -159,48 +159,6 @@ Term Yap_ExecuteCallMetaCall(Term g, Term mod) {
return Yap_MkApplTerm(PredMetaCall->FunctorOfPred, 4, ts);
}
PredEntry *Yap_get_pred(Term t, Term tmod, const char *pname) {
Term t0 = t;
restart:
if (IsVarTerm(t)) {
Yap_ThrowError(INSTANTIATION_ERROR, t0, pname);
return NULL;
} else if (IsAtomTerm(t)) {
PredEntry *ap = RepPredProp(Yap_GetPredPropByAtom(AtomOfTerm(t), tmod));
return ap;
} else if (IsIntegerTerm(t) && tmod == IDB_MODULE) {
return Yap_FindLUIntKey(IntegerOfTerm(t));
} else if (IsPairTerm(t)) {
t = Yap_MkApplTerm(FunctorCsult, 1, &t);
goto restart;
} else if (IsApplTerm(t)) {
Functor fun = FunctorOfTerm(t);
if (IsExtensionFunctor(fun)) {
Yap_ThrowError(TYPE_ERROR_CALLABLE, t, pname);
return NULL;
}
if (fun == FunctorModule) {
Term tmod = ArgOfTerm(1, t);
if (IsVarTerm(tmod)) {
Yap_ThrowError(INSTANTIATION_ERROR, t0, pname);
return NULL;
}
if (!IsAtomTerm(tmod)) {
Yap_ThrowError(TYPE_ERROR_ATOM, t0, pname);
return NULL;
}
t = ArgOfTerm(2, t);
goto restart;
}
PredEntry *ap = RepPredProp(Yap_GetPredPropByFunc(fun, tmod));
return ap;
} else {
Yap_ThrowError(TYPE_ERROR_CALLABLE, t0, pname);
}
return NULL;
}
Term Yap_TermToIndicator(Term t, Term mod) {
CACHE_REGS
// generate predicate indicator in this case

View File

@ -470,7 +470,6 @@
LogUpdClause *lcl = PREG->y_u.OtILl.d;
UInt timestamp = IntegerOfTerm(((CELL *)(B_YREG+1))[ap->ArityOfPE]);
fprintf(stderr,"- %p/%p %lu/%lu %lu-%lu\n",PREG,ap,timestamp,ap->TimeStampOfPred,PREG->y_u.OtILl.d->ClTimeStart,PREG->y_u.OtILl.d->ClTimeEnd);
#if defined(YAPOR) || defined(THREADS)
if (PP != ap) {
if (PP) UNLOCKPE(16,PP);

View File

@ -88,7 +88,14 @@ typedef struct non_single_struct_t {
struct non_single_struct_t *to_visit0=NULL, *to_visit,* to_visit_max;\
CELL *InitialH = HR;\
tr_fr_ptr TR0 = TR;\
reset:\
if (TR > (tr_fr_ptr)LOCAL_TrailTop - 256) { \
/* Trail overflow */\
goto trail_overflow;\
}\
if (HR + 1024 > ASP) { \
goto global_overflow;\
}\
reset:\
to_visit0 = Realloc(to_visit0,auxsz); \
pt0 = pt0_; pt0_end = pt0_end_; \
to_visit = to_visit0, \
@ -189,7 +196,7 @@ aux_overflow : { \
goto reset; }
#define def_trail_overflow() \
trail_overflow : { \
trail_overflow: { \
while (to_visit > to_visit0) { \
to_visit--; \
CELL *ptd0 = to_visit->ptd0; \
@ -234,6 +241,12 @@ if (IS_VISIT_MARKER) { \
return true; \
}
#define def_overflow() \
def_aux_overflow(); \
def_global_overflow(); \
def_trail_overflow()
#define CYC_APPL \
if (IS_VISIT_MARKER) { \
while (to_visit > to_visit0) { \
@ -254,7 +267,7 @@ static Term cyclic_complex_term(CELL *pt0_, CELL *pt0_end_ USES_REGS) {
return false;
def_aux_overflow();
def_overflow();
}
bool Yap_IsCyclicTerm(Term t USES_REGS) {
@ -300,6 +313,10 @@ static int cycles_in_complex_term( CELL *pt0_, CELL *pt0_end_ USES_REGS) {
struct non_single_struct_t *to_visit0=NULL, *to_visit, *to_visit_max;
CELL *InitialH = HR;
tr_fr_ptr TR0 = TR;
if (TR > (tr_fr_ptr)LOCAL_TrailTop - 256) { \
/* Trail overflow */\
goto trail_overflow;\
}\
reset:
pt0 = pt0_, pt0_end = pt0_end_;
@ -341,6 +358,9 @@ static int cycles_in_complex_term( CELL *pt0_, CELL *pt0_end_ USES_REGS) {
to_visit->ptf = ptf;
to_visit++;
ptf = HR;
if (HR + 1024 > ASP) { \
goto global_overflow;\
}\
HR += 2;
*ptd0 = VISIT_MARKER;
pt0 = ptd0;
@ -406,7 +426,7 @@ pop_text_stack(lvl);
return rc;
def_aux_overflow();
def_overflow();
}
@ -466,7 +486,7 @@ static bool ground_complex_term(CELL * pt0_, CELL * pt0_end_ USES_REGS) {
return true;
def_aux_overflow();
def_overflow();
}
bool Yap_IsGroundTerm(Term t) {
@ -523,7 +543,7 @@ if (to_visit > to_visit0) {
pop_text_stack(lvl);
return false;
def_aux_overflow();
def_overflow();
}
static Int var_in_term(
@ -610,11 +630,8 @@ static Term vars_in_complex_term(CELL *pt0_, CELL *pt0_end_ ,
} else {
return (inp);
}
def_trail_overflow();
def_overflow();
def_aux_overflow();
def_global_overflow();
}
/**
@ -774,9 +791,7 @@ static Term attvars_in_complex_term(
/*fprintf(stderr,"<%ld at %s\n", d0, __FUNCTION__)*/;
return output;
def_aux_overflow();
def_global_overflow();
def_trail_overflow();
def_overflow();
}
/** @pred term_attvars(+ _Term_,- _AttVars_)
@ -809,7 +824,6 @@ static Term new_vars_in_complex_term(
Int n=0;
CELL output = TermNil;
{
tr_fr_ptr myTR0 = TR;
int lvl = push_text_stack();
while (!IsVarTerm(inp) && IsPairTerm(inp)) {
Term t = HeadOfTerm(inp);
@ -830,8 +844,8 @@ static Term new_vars_in_complex_term(
}
WALK_COMPLEX_TERM();
output = MkPairTerm((CELL)ptd0, output);
TrailTerm(TR++) = *ptd0;
*ptd0 = TermFoundVar;
TrailTerm(TR++) = *ptd0;
*ptd0 = TermFoundVar;
if ((tr_fr_ptr)LOCAL_TrailTop - TR < 1024) {
goto trail_overflow;
}
@ -846,11 +860,7 @@ pop_text_stack(lvl);
return output;
def_aux_overflow();
def_trail_overflow();
def_global_overflow();
def_overflow();
}
/** @pred new_variables_in_term(+_CurrentVariables_, ? _Term_, -_Variables_)
@ -896,7 +906,6 @@ static Term vars_within_complex_term(
CELL output = AbsPair(HR);
while (!IsVarTerm(inp) && IsPairTerm(inp)) {
tr_fr_ptr myTR0;
Term t = HeadOfTerm(inp);
if (IsVarTerm(t)) {
CELL *ptr = VarOfTerm(t);
@ -923,9 +932,8 @@ static Term vars_within_complex_term(
return TermNil;
}
def_aux_overflow();
def_overflow();
def_global_overflow();
}
/** @pred variables_within_term(+_CurrentVariables_, ? _Term_, -_Variables_)
@ -961,7 +969,7 @@ static Int free_variables_in_term(
Term bounds = TermNil;
t = t0 = Deref(ARG1);
Int delta = 0;
while (!IsVarTerm(t) && IsApplTerm(t)) {
Functor f = FunctorOfTerm(t);
if (f == FunctorHat) {
@ -1027,8 +1035,7 @@ static Term non_singletons_in_complex_term(CELL * pt0_,
return ARG2;
}
def_aux_overflow();
def_trail_overflow();
def_overflow();
}
static Int p_non_singletons_in_term(
@ -1095,9 +1102,8 @@ static Int numbervars_in_complex_term(CELL * pt0_, CELL * pt0_end_, Int numbv,
pop_text_stack(lvl);
return numbv;
def_aux_overflow();
def_overflow();
def_global_overflow();
}
Int Yap_NumberVars(Term inp, Int numbv,
@ -1173,7 +1179,7 @@ static int max_numbered_var(CELL * pt0_, CELL * pt0_end_,
pop_text_stack(lvl);
return 0;
def_aux_overflow();
def_overflow();
}
static Int MaxNumberedVar(Term inp, UInt arity PASS_REGS) {

View File

@ -661,16 +661,16 @@ Unify _NElems_ with the type of the elements in _Matrix_.
X <== matrix( L, [dim=Dims,base=Bases] ).
( X <== '[]'(Dims0, array) of ints ) :- !,
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
matrix_new( ints , Dims, X ),
matrix_new( ints , Dims, _, X ),
matrix_base(X, Bases).
( X <== '[]'(Dims0, array) of floats ) :-
atom(X), !,
foldl( norm_dim, Dims0, _Dims, _Bases, 1, Size ),
static_array( X, Size, [float] ).
( X <== '[]'(Dims0, array) of floats ) :- !,
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
matrix_new( floats , Dims, X ),
matrix_new( floats , Dims,_, X ),
matrix_base(X, Bases).
( X <== '[]'(Dims0, static.array) of floats ) :-
atom(X), !,
foldl( norm_dim, Dims0, Dims, Bases, 1, _Size ),
static_array( Size, floats, X ).
( X <== '[]'(Dims0, array) of (I:J) ) :- !,
foldl( norm_dim, Dims0, Dims, Bases, 1, Size ),
matrix_seq(I, J, Dims, X),
@ -817,6 +817,10 @@ rhs(S, NS) :-
set_lhs(V, R) :- var(V), !, V = R.
set_lhs(V, R) :- number(V), !, V = R.
set_lhs(V, R) :- atom(V), !,
static_array_properties(V, N, _),
N1 is N-1,
foreach(I in 0..N1, V[I] <== R[I]).
set_lhs('[]'([Args], floats(RHS)), Val) :-
!,
integer(RHS),
@ -973,25 +977,6 @@ mtimes(I1, I2, V) :-
% three types of matrix: integers, floats and general terms.
%
matrix_new(terms.terms,Dims, '$matrix'(Dims, NDims, Size, Offsets, Matrix) ) :-
length(Dims,NDims),
foldl(size, Dims, 1, Size),
maplist(zero, Dims, Offsets),
functor( Matrix, c, Size).
matrix_new(opaque.ints,Dims,Matrix) :-
length(Dims,NDims),
new_ints_matrix_set(NDims, Dims, 0, Matrix).
matrix_new(opaque.floats,Dims,Matrix) :-
length(Dims,NDims),
new_floats_matrix_set(NDims, Dims, 0.0, Matrix).
matrix_new(array.Type(Size), Dims, Data, '$array'(Id) ) :-
length(Dims,NDims),
foldl(size, Dims, 1, Size),
maplist(zero, Dims, Offsets),
functor( Matrix, c, Size),
new_array(Size,Type,Dims,Data),
matrix_new(terms, Dims, Data, '$matrix'(Dims, NDims, Size, Offsets, Matrix) ) :-
length(Dims,NDims),
foldl(size, Dims, 1, Size),
@ -1058,7 +1043,7 @@ add_index_prefix( [L|Els0] , H ) --> [[H|L]],
add_index_prefix( Els0 , H ).
matrix_set_range( Mat, Pos, Els) :-
matrix_set( Mat, Pos, Els) :-
slice(Pos, Keys),
maplist( matrix_set(Mat), Keys, Els).

View File

@ -320,13 +320,15 @@ static YAP_Bool new_ints_matrix(void) {
int ndims = YAP_IntOfTerm(YAP_ARG1);
YAP_Term tl = YAP_ARG2, out;
int dims[MAX_DIMS];
YAP_Term data;
if (!scan_dims(ndims, tl, dims))
return FALSE;
out = new_int_matrix(ndims, dims, NULL);
if (out == YAP_TermNil())
return FALSE;
if (!cp_int_matrix(YAP_ARG3, out))
data = YAP_ARG3;
if (!YAP_IsVarTerm(data) && !cp_int_matrix(data, out))
return FALSE;
return YAP_Unify(YAP_ARG4, out);
}
@ -351,14 +353,15 @@ static YAP_Bool new_ints_matrix_set(void) {
static YAP_Bool new_floats_matrix(void) {
int ndims = YAP_IntOfTerm(YAP_ARG1);
YAP_Term tl = YAP_ARG2, out;
YAP_Term tl = YAP_ARG2, out, data;
int dims[MAX_DIMS];
if (!scan_dims(ndims, tl, dims))
return FALSE;
out = new_float_matrix(ndims, dims, NULL);
if (out == YAP_TermNil())
return FALSE;
if (!cp_float_matrix(YAP_ARG3, out))
data = YAP_ARG3;
if (!YAP_IsVarTerm(data) && !cp_float_matrix(data, out))
return FALSE;
return YAP_Unify(YAP_ARG4, out);
}

View File

@ -0,0 +1,146 @@
%========================================================================
%=
%=
%=
%========================================================================
/**
* @file problog/lbdd.yap
* support routines for BDD evaluation.
*
*/
%========================================================================
%= Updates all values of query_probability/2 and query_gradient/4
%= should be called always before these predicates are accessed
%= if the old values are still valid, nothing happens
%========================================================================
update_values :-
values_correct,
!.
update_values :-
\+ values_correct,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% delete old values
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
retractall(query_probability_intern(_,_)),
retractall(query_gradient_intern(_,_,_,_)),
assertz(values_correct).
update_query_cleanup(QueryID) :-
(
(query_is_similar(QueryID,_) ; query_is_similar(_,QueryID))
->
% either this query is similar to another or vice versa,
% therefore we don't delete anything
true;
retractall(query_gradient_intern(QueryID,_,_,_))
).
update_query(QueryID,Symbol,What_To_Update) :-
(
query_is_similar(QueryID,_)
->
% we don't have to evaluate the BDD
format_learning(4,'#',[]);
(
problog_flag(sigmoid_slope,Slope),
((What_To_Update=all;query_is_similar(_,QueryID)) -> Method='g' ; Method='l'),
gradient(QueryID, Method, Slope),
format_learning(4,'~w',[Symbol])
)
).
maplist_to_hash([], H0, H0).
maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash).
bind_maplist([]).
bind_maplist([Node-Theta|MapList]) :-
get_prob(Node, ProbFact),
inv_sigmoid(ProbFact, Theta),
bind_maplist(MapList).
tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :-
node_to_gradient_node(Node, H, GNode),
tree_to_grad(Tree, H, [GNode|Grad0], Grad).
%get_prob(Node, Prob) :-
% query_probability(Node,Prob), !.
get_prob(Node, Prob) :-
get_fact_probability(Node,Prob).
gradient(QueryID, l, Slope) :-
probability( QueryID, Slope, Prob),
assert(query_probability_intern(QueryID,Prob)),
fail.
gradient(_QueryID, l, _).
/* query_probability(21,6.775948e-01). */
gradient(QueryID, g, Slope) :-
recorded(QueryID, BDD, _),
query_gradients(BDD,Slope,I,Grad),
% writeln(grad(QueryID:I:Grad)),
assert(query_gradient_intern(QueryID,I,p,Grad)),
fail.
gradient(QueryID, g, Slope) :-
gradient(QueryID, l, Slope).
query_probability( DBDD, Slope, Prob) :-
DBDD = bdd(Dir, Tree, MapList),
bind_maplist(MapList),
run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
query_gradients(bdd(Dir, Tree, MapList),Slope,I,Grad) :-
bind_maplist(MapList),
member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).
node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
run_sp([], _, P0, P0).
run_sp(gnodep(P,_G, X, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
EP = 1.0 / (1.0 + exp(-X * Slope) ),
P is EP*PL+ (1.0-EP)*PR,
run_sp(Tree, Slope, P, PF).
run_sp(gnoden(P,_G, X, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
EP is 1.0 / (1.0 + exp(-X * Slope) ),
P is EP*PL + (1.0-EP)*(1.0 - PR),
run_sp(Tree, Slope, P, PF).
run_grad([], _I, _, G0, G0).
run_grad([gnodep(P,G, X, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
EP is 1.0/(1.0 + exp(-X * Slope)),
P is EP*PL+ (1.0-EP)*PR,
G0 is EP*GL + (1.0-EP)*GR,
% don' t forget the -X
( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
run_grad([gnoden(P,G, X, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
EP is 1.0 / (1.0 + exp(-X * Slope) ),
P is EP*PL + (1.0-EP)*(1.0 - PR),
G0 is EP*GL - (1.0 - EP) * GR,
( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).

View File

@ -25,8 +25,7 @@ graph2bdd(Query,1,bdd(D,T,Vs)) :-
graph(X,Y, TrieList, Vs),
bdd_new(TrieList, C),
bdd_tree(C, BDD),
BDD = bdd(D,T,_Vs0),
writeln(BDD).
BDD = bdd(D,T,_Vs0).
:- set_problog_flag(init_method,(Q,N,Bdd,user:graph2bdd(Q,N,Bdd))).

View File

@ -421,6 +421,7 @@ do_learning_intern(Iterations,Epsilon) :-
logger_start_timer(duration),
% mse_testset,
% ground_truth_difference,
%leash(0),trace,
gradient_descent,
once(save_model),
@ -486,8 +487,8 @@ init_learning :-
succeeds_n_times(user:example(_,_,_,_),TrainingExampleCount),
assertz(example_count(TrainingExampleCount)),
format_learning(3,'~q training examples~n',[TrainingExampleCount]),
current_probs <== array[TrainingExampleCount ] of floats,
current_lls <== array[TrainingExampleCount ] of floats,
%current_probs <== array[TrainingExampleCount ] of floats,
%current_lls <== array[TrainingExampleCount ] of floats,
forall(tunable_fact(FactID,_GroundTruth),
set_fact_probability(FactID,0.5)
),
@ -507,18 +508,6 @@ init_learning :-
format_learning(1,'~n',[]).
%========================================================================
%= Updates all values of query_probability/2 and query_gradient/4
%= should be called always before these predicates are accessed
%= if the old values are still valid, nothing happensv
%========================================================================
update_values :-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% delete old values
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
qp <== current_probs.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Check, if continuous facts are used.
% if yes, switch to problog_exact
@ -586,7 +575,7 @@ init_one_query(QueryID,Query,_Type) :-
problog_flag(init_method,(Query,N,Bdd,user:graph2bdd(Query,N,Bdd))),
!,
b_setval(problog_required_keep_ground_ids,false),
(QueryID mod 100 =:= 0 -> writeln(QueryID) ; true),
(QueryID mod 100 =:= 0 ->writeln(QueryID) ; true),
Bdd = bdd(Dir, Tree,MapList),
user:graph2bdd(Query,N,Bdd),
rb_new(H0),
@ -792,8 +781,7 @@ inv_sigmoid(T,Slope,InvSig) :-
%= probabilities of the examples have to be recalculated
%========================================================================
save_old_probabilities :-
old_prob <== p.
save_old_probabilities.
% vsc: avoid silly search
@ -828,59 +816,56 @@ set_tunable(I,Slope,P) :-
sigmoid(X,Slope,Pr),
set_fact_probability(I,Pr).
:- include(problog/lbdd).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% start calculate gradient
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
user:evaluate(LLH_Training_Queries, X,Grad,N,_,_) :-
%Handle = user_error,
LLs = current_lls,
Probs = current_probs,
%Handle = user_error,
example_count(TrainingExampleCount),
ExProbs <== array[TrainingExampleCount] of floats,
LLs <== array[N] of floats,
Probs <== array[N] of floats,
problog_flag(sigmoid_slope,Slope),
N1 is N-1,
forall(between(0,N1,I),
(Grad[I] <== 0.0, S <== X[I], sigmoid(S,Slope, P), Probs[I] <== P)
),
writeln(e0),
leash(0),trace,
forall(
user:example(QueryID,_Query,QueryProb),
compute_grad(QueryID, QueryProb,Grad, Probs, Slope,LLs)
recorded(QueryID,BDD,_),
compute_probability(BDD,Slope,QueryID,ExProbs)
),
writeln(Grad),
forall(
user:example(QueryID,_Query,QueryProb),
compute_gradient(QueryID, QueryProb,Grad, Probs, Slope,LLs)
),
trace,
LLH_Training_Queries <== sum(LLs).
compute_probability( BDD, Slope, Id, Probs) :-
query_probability( BDD, Slope, Prob),
Probs[Id] <== Prob.
compute_grad(QueryID,QueryProb, Grad, Probs, Slope, LLs) :-
recorded(QueryID,BDD,_),
BDD = bdd(_Dir, _GradTree, MapList),
bind_maplist(MapList, Slope, Probs),
qprobability(BDD,Slope,BDDProb),
compute_gradient(QueryID,QueryProb, Grad, Probs, ExProbs, Slope, LLs) :-
recorded(QueryID,BDD,_),
BDDProb <== ExProbs[QueryID],
forall(
query_gradients(BDD,Slope,I,GradValue),
gradient_pair(BDDProb, QueryProb, Grad, GradValue, I, Probs)
),
LL is (BDDProb-QueryProb)*(BDDProb-QueryProb),
LLs[QueryID] <== LL,
forall(
member(I-_,MapList),
gradientpair(Slope,BDDProb, QueryProb,Grad,Probs,BDD,I)
),
writeln(LL).
writeln(LL),
LLs[QueryID] <== LL.
gradientpair(Slope,BDDProb, QueryProb, Grad, Probs,BDD,I) :-
qgradient(I, BDD, Slope, FactID, GradValue),
G0 <== Grad[FactID],
Prob <== Probs[FactID],
gradient_pair(BDDProb, QueryProb, Grad, GradValue, I, Probs) :-
G0 <== Grad[I],
Prob <== Probs[I],
GN is G0-GradValue*2*Prob*(1-Prob)*(QueryProb-BDDProb),
Grad[FactID] <== GN.
qprobability(bdd(Dir, Tree, _MapList), Slope, Prob) :-
/* query_probability(21,6.775948e-01). */
run_sp(Tree, Slope, 1, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0).
qgradient(I, bdd(Dir,Tree,_), Slope, I, Grad) :-
run_grad(Tree, I, Slope, 1.0, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0).
writeln(G0),
Grad[I] <== GN.
wrap( X, Grad, GradCount) :-
tunable_fact(FactID,GroundTruth),
@ -894,52 +879,6 @@ wrap( X, Grad, GradCount) :-
wrap( _X, _Grad, _GradCount).
% writeln(grad(QueryID:I:Grad)),
% assert(query_gradient_intern(QueryID,I,p,Grad)),
% fail.
%gradient(QueryID, g, Slope) :-
% gradient(QueryID, l, Slope).
maplist_to_hash([], H0, H0).
maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash).
tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :-
node_to_gradient_node(Node, H, GNode),
tree_to_grad(Tree, H, [GNode|Grad0], Grad).
node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
run_sp([], _, P0, P0).
run_sp(gnodep(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, PL, PF) :-
P is EP*PL+ (1.0-EP)*PR,
run_sp(Tree, Slope, P, PF).
run_sp(gnoden(P,_G, EP, _Id, PL, _GL, PR, _GR).Tree, Slope, PL, PF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR),
run_sp(Tree, Slope, P, PF).
run_grad([], _I, _, _, G0, G0).
run_grad([gnodep(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, PL, GL, GF) :-
P is EP*PL+ (1.0-EP)*PR,
G0 is EP*GL + (1.0-EP)*GR,
% don' t forget the -X
( I == Id -> G is PL-PR ; G = G0 ),
run_grad(Tree, I, Slope, P, G, GF).
run_grad([gnoden(P,G, EP, Id, PL, GL, PR, GR)|Tree], I, Slope, PL, GL, GF) :-
P is EP*PL + (1.0-EP)*(1.0 - PR),
G0 is EP*GL - (1.0 - EP) * GR,
( I == Id -> G is PL-(1.0-PR) ; G = G0 ),
run_grad(Tree, I, Slope, P, G, GF).
prob2log(_X,Slope,FactID,V) :-
@ -1023,4 +962,3 @@ init_logger :-
:- initialization(init_flags).
:- initialization(init_logger).

View File

@ -664,138 +664,6 @@ init_one_query(_QueryID,_Query,_Type) :-
%========================================================================
%= Updates all values of query_probability/2 and query_gradient/4
%= should be called always before these predicates are accessed
%= if the old values are still valid, nothing happens
%========================================================================
update_values :-
values_correct,
!.
update_values :-
\+ values_correct,
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% delete old values
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
retractall(query_probability_intern(_,_)),
retractall(query_gradient_intern(_,_,_,_)),
assertz(values_correct).
%========================================================================
%=
%=
%=
%========================================================================
update_query_cleanup(QueryID) :-
(
(query_is_similar(QueryID,_) ; query_is_similar(_,QueryID))
->
% either this query is similar to another or vice versa,
% therefore we don't delete anything
true;
retractall(query_gradient_intern(QueryID,_,_,_))
).
update_query(QueryID,Symbol,What_To_Update) :-
(
query_is_similar(QueryID,_)
->
% we don't have to evaluate the BDD
format_learning(4,'#',[]);
(
problog_flag(sigmoid_slope,Slope),
((What_To_Update=all;query_is_similar(_,QueryID)) -> Method='g' ; Method='l'),
gradient(QueryID, Method, Slope),
format_learning(4,'~w',[Symbol])
)
).
bind_maplist([]).
bind_maplist([Node-Theta|MapList]) :-
get_prob(Node, ProbFact),
inv_sigmoid(ProbFact, Theta),
bind_maplist(MapList).
%get_prob(Node, Prob) :-
% query_probability(Node,Prob), !.
get_prob(Node, Prob) :-
get_fact_probability(Node,Prob).
gradient(QueryID, l, Slope) :-
/* query_probability(21,6.775948e-01). */
recorded(QueryID, bdd(Dir, Tree, MapList), _),
bind_maplist(MapList),
run_sp(Tree, Slope, 1.0, Prob0),
(Dir == 1 -> Prob0 = Prob ; Prob is 1.0-Prob0),
assert(query_probability_intern(QueryID,Prob)),
fail.
gradient(_QueryID, l, _).
gradient(QueryID, g, Slope) :-
recorded(QueryID, bdd(Dir, Tree, MapList), _),
bind_maplist(MapList),
member(I-_, MapList),
run_grad(Tree, I, Slope, 0.0, Grad0),
( Dir = 1 -> Grad = Grad0 ; Grad is -Grad0),
% writeln(grad(QueryID:I:Grad)),
assert(query_gradient_intern(QueryID,I,p,Grad)),
fail.
gradient(QueryID, g, Slope) :-
gradient(QueryID, l, Slope).
maplist_to_hash([], H0, H0).
maplist_to_hash([I-V|MapList], H0, Hash) :-
rb_insert(H0, V, I, H1),
maplist_to_hash(MapList, H1, Hash).
tree_to_grad([], _, Grad, Grad).
tree_to_grad([Node|Tree], H, Grad0, Grad) :-
node_to_gradient_node(Node, H, GNode),
tree_to_grad(Tree, H, [GNode|Grad0], Grad).
node_to_gradient_node(pp(P-G,X,L,R), H, gnodep(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
node_to_gradient_node(pn(P-G,X,L,R), H, gnoden(P,G,X,Id,PL,GL,PR,GR)) :-
rb_lookup(X,Id,H),
(L == 1 -> GL=0, PL=1 ; L == 0 -> GL = 0, PL=0 ; L = PL-GL),
(R == 1 -> GR=0, PR=1 ; R == 0 -> GR = 0, PR=0 ; R = PR-GR).
run_sp([], _, P0, P0).
run_sp(gnodep(P,_G, X, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
EP = 1.0 / (1.0 + exp(-X * Slope) ),
P is EP*PL+ (1.0-EP)*PR,
run_sp(Tree, Slope, P, PF).
run_sp(gnoden(P,_G, X, _Id, PL, _GL, PR, _GR).Tree, Slope, _, PF) :-
EP is 1.0 / (1.0 + exp(-X * Slope) ),
P is EP*PL + (1.0-EP)*(1.0 - PR),
run_sp(Tree, Slope, P, PF).
run_grad([], _I, _, G0, G0).
run_grad([gnodep(P,G, X, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
EP is 1.0/(1.0 + exp(-X * Slope)),
P is EP*PL+ (1.0-EP)*PR,
G0 is EP*GL + (1.0-EP)*GR,
% don' t forget the -X
( I == Id -> G is G0+(PL-PR)* EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
run_grad([gnoden(P,G, X, Id, PL, GL, PR, GR)|Tree], I, Slope, _, GF) :-
EP is 1.0 / (1.0 + exp(-X * Slope) ),
P is EP*PL + (1.0-EP)*(1.0 - PR),
G0 is EP*GL - (1.0 - EP) * GR,
( I == Id -> G is G0+(PL+PR-1)*EP*(1-EP)*Slope ; G = G0 ),
run_grad(Tree, I, Slope, G, GF).
%========================================================================
%= This predicate reads probability and gradient values from the file

View File

@ -64,4 +64,4 @@ constrain(Q, I, Space, R, J, J1) :-
Sum is I-J,
Diff is J-I,
Space += linear([1,-1], [Q,R], 'IRT_NQ', Diff),
Space += linear([1,-1], [Q,R], 'IRT_NQ', Sum).
Space += linear([1,-1], [Q,R], 'IRT_NQ', Sum).