OSX portability updates

start of support for commons prolog initiative
This commit is contained in:
Vitor Santos Costa 2009-02-16 12:25:03 +00:00
parent 9c9444bece
commit a000af113b
177 changed files with 110 additions and 63460 deletions

View File

@ -11775,16 +11775,22 @@ Yap_absmi(int inp)
Op(add_int_c, ssn);
{
int off = PREG->u.ssn.s0;
if (Yap_isint[PREG->u.ssn.s1]) {
Yap_Ints[off] = Yap_Ints[PREG->u.ssn.s1]+PREG->u.ssn.n;
Yap_isint[off] = TRUE;
if (add_overflow(Yap_Ints[off],Yap_Ints[PREG->u.ssn.s1],PREG->u.ssn.n)) {
Int c2 = PREG->u.ssn.n;
int isint = Yap_isint[PREG->u.ssn.s1];
Yap_isint[off] = isint;
if (isint) {
Int c1 = Yap_Ints[PREG->u.ssn.s1];
Int sum = c1+c2;
if (add_overflow(sum,c1,c2)) {
PREG = ARITH_EXCEPTION;
GONext();
}
Yap_Ints[off] = sum;
} else {
Yap_Floats[off] = Yap_Floats[PREG->u.ssn.s1]+PREG->u.ssn.n;
Yap_isint[off] = FALSE;
Yap_Floats[off] = Yap_Floats[PREG->u.ssn.s1]+c2;
}
}
PREG = NEXTOP(PREG, ssn);
@ -12671,10 +12677,13 @@ Yap_absmi(int inp)
Op(put_fi_var_y, syl);
BEGD(d0);
if (Yap_isint[PREG->u.syl.s]) {
d0 = MkIntegerTerm(Yap_Ints[PREG->u.syl.s]);
} else {
d0 = MkFloatTerm(Yap_Floats[PREG->u.syl.s]);
{
COUNT nid = PREG->u.syl.s;
if (Yap_isint[nid]) {
d0 = MkIntegerTerm(Yap_Ints[nid]);
} else {
d0 = MkFloatTerm(Yap_Floats[nid]);
}
}
YREG[PREG->u.syl.y] = d0;
PREG = PREG->u.syl.l;

View File

@ -524,47 +524,6 @@ eval1(Int fi, Term t) {
}
}
case op_truncate:
{
Float dbl;
switch (ETypeOfTerm(t)) {
case long_int_e:
if (yap_flags[LANGUAGE_MODE_FLAG] == 1) { /* iso */
Yap_Error(TYPE_ERROR_FLOAT, t, "X is truncate(%f)", IntegerOfTerm(t));
P = (yamop *)FAILCODE;
RERROR();
} else {
RFLOAT(IntegerOfTerm(t));
}
case double_e:
dbl = FloatOfTerm(t);
break;
case big_int_e:
#ifdef USE_GMP
if (yap_flags[LANGUAGE_MODE_FLAG] == 1) { /* iso */
process_iso_error(Yap_BigIntOfTerm(t), t, "truncate");
RERROR();
} else {
dbl = mpz_get_d(Yap_BigIntOfTerm(t));
}
break;
#endif
case db_ref_e:
RERROR();
}
if (dbl >= 0 ) {
if (yap_flags[LANGUAGE_MODE_FLAG] == 1) { /* iso */
RBIG_FL(floor(dbl));
} else {
RFLOAT(floor(dbl));
}
} else {
if (yap_flags[LANGUAGE_MODE_FLAG] == 1) { /* iso */
RBIG_FL(ceil(dbl));
} else {
RFLOAT(ceil(dbl));
}
}
}
case op_integer:
{
Float dbl;

View File

@ -186,15 +186,25 @@ char *libdir = NULL;
void
Yap_InitSysPath(void) {
int len;
#if _MSC_VER || defined(__MINGW32__)
int dir_done = FALSE;
int commons_done = FALSE;
{
char *dir;
if ((dir = Yap_RegistryGetString("library"))) {
Yap_PutValue(AtomSystemLibraryDir,
MkAtomTerm(Yap_LookupAtom(dir)));
return;
dir_done = TRUE;
}
if ((dir = Yap_RegistryGetString("prolog_commons"))) {
Yap_PutValue(AtomPrologCommonsDir,
MkAtomTerm(Yap_LookupAtom(dir)));
commons_done = TRUE;
}
}
if (dir_done && commons_done)
return;
#endif
strncpy(Yap_FileNameBuf, SHARE_DIR, YAP_FILENAME_MAX);
#if _MSC_VER || defined(__MINGW32__)
@ -243,9 +253,24 @@ Yap_InitSysPath(void) {
#else
strncat(Yap_FileNameBuf,"/", YAP_FILENAME_MAX);
#endif
len = strlen(Yap_FileNameBuf);
strncat(Yap_FileNameBuf, "Yap", YAP_FILENAME_MAX);
Yap_PutValue(AtomSystemLibraryDir,
MkAtomTerm(Yap_LookupAtom(Yap_FileNameBuf)));
#if _MSC_VER || defined(__MINGW32__)
if (!dir_done)
#endif
{
Yap_PutValue(AtomSystemLibraryDir,
MkAtomTerm(Yap_LookupAtom(Yap_FileNameBuf)));
}
#if _MSC_VER || defined(__MINGW32__)
if (!commons_done)
#endif
{
Yap_FileNameBuf[len] = '\0';
strncat(Yap_FileNameBuf, "PrologCommons", YAP_FILENAME_MAX);
Yap_PutValue(AtomPrologCommonsDir,
MkAtomTerm(Yap_LookupAtom(Yap_FileNameBuf)));
}
}
static Int

View File

@ -1,85 +0,0 @@
#
# default base directory for YAP installation
#
ROOTDIR = @prefix@
#
# where the binary should be
#
BINDIR = $(ROOTDIR)/bin
#
# where YAP should look for libraries
#
LIBDIR=$(ROOTDIR)/lib/Yap
#
# where YAP should look for architecture-independent Prolog libraries
#
SHAREDIR=$(ROOTDIR)/share/Yap
#
#
# You shouldn't need to change what follows.
#
INSTALL=@INSTALL@
INSTALL_DATA=@INSTALL_DATA@
INSTALL_PROGRAM=@INSTALL_PROGRAM@
srcdir=@srcdir@
CLPBN_TOP= $(srcdir)/clpbn.yap
CLPBN_SRCDIR = $(srcdir)/clpbn
CLPBN_LEARNING_SRCDIR = $(srcdir)/learning
CLPBN_EXDIR = $(srcdir)/clpbn/examples
CLPBN_PROGRAMS= \
$(CLPBN_SRCDIR)/aggregates.yap \
$(CLPBN_SRCDIR)/bnt.yap \
$(CLPBN_SRCDIR)/connected.yap \
$(CLPBN_SRCDIR)/discrete_utils.yap \
$(CLPBN_SRCDIR)/display.yap \
$(CLPBN_SRCDIR)/dists.yap \
$(CLPBN_SRCDIR)/evidence.yap \
$(CLPBN_SRCDIR)/gibbs.yap \
$(CLPBN_SRCDIR)/graphs.yap \
$(CLPBN_SRCDIR)/graphviz.yap \
$(CLPBN_SRCDIR)/hmm.yap \
$(CLPBN_SRCDIR)/jt.yap \
$(CLPBN_SRCDIR)/matrix_cpt_utils.yap \
$(CLPBN_SRCDIR)/table.yap \
$(CLPBN_SRCDIR)/topsort.yap \
$(CLPBN_SRCDIR)/utils.yap \
$(CLPBN_SRCDIR)/vel.yap \
$(CLPBN_SRCDIR)/viterbi.yap \
$(CLPBN_SRCDIR)/xbif.yap
CLPBN_LEARNING_PROGRAMS= \
$(CLPBN_LEARNING_SRCDIR)/aleph_parms.yap \
$(CLPBN_LEARNING_SRCDIR)/bnt_parms.yap \
$(CLPBN_LEARNING_SRCDIR)/em.yap \
$(CLPBN_LEARNING_SRCDIR)/learn_utils.yap \
$(CLPBN_LEARNING_SRCDIR)/mle.yap
CLPBN_SCHOOL_EXAMPLES= \
$(CLPBN_EXDIR)/School/README \
$(CLPBN_EXDIR)/School/evidence_128.yap \
$(CLPBN_EXDIR)/School/schema.yap \
$(CLPBN_EXDIR)/School/school_128.yap \
$(CLPBN_EXDIR)/School/school_32.yap \
$(CLPBN_EXDIR)/School/school_64.yap \
$(CLPBN_EXDIR)/School/tables.yap
CLPBN_EXAMPLES= \
$(CLPBN_EXDIR)/cg.yap \
$(CLPBN_EXDIR)/sprinkler.yap
install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_PROGRAMS)
mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn
mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn/learning
mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn/examples/School
for h in $(CLPBN_TOP); do $(INSTALL_DATA) $$h $(DESTDIR)$(SHAREDIR); done
for h in $(CLPBN_PROGRAMS); do $(INSTALL_DATA) $$h $(DESTDIR)$(SHAREDIR)/clpbn; done
for h in $(CLPBN_LEARNING_PROGRAMS); do $(INSTALL_DATA) $$h $(DESTDIR)$(SHAREDIR)/clpbn/learning; done
for h in $(CLPBN_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(SHAREDIR)/clpbn/examples; done
for h in $(CLPBN_SCHOOL_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(SHAREDIR)/clpbn/examples/School; done

View File

@ -1,397 +0,0 @@
:- module(clpbn, [{}/1,
clpbn_flag/2,
set_clpbn_flag/2,
clpbn_flag/3,
clpbn_key/2,
clpbn_init_solver/4,
clpbn_run_solver/3,
clpbn_init_solver/5,
clpbn_run_solver/4]).
:- use_module(library(atts)).
:- use_module(library(lists)).
:- use_module(library(terms)).
:- op( 500, xfy, with).
%
% avoid the overhead of using goal_expansion/2.
%
:- multifile
user:term_expansion/2.
:- dynamic
user:term_expansion/2.
:- attribute key/1, dist/2, evidence/1, starter/0.
:- use_module('clpbn/vel',
[vel/3,
check_if_vel_done/1,
init_vel_solver/4,
run_vel_solver/3
]).
:- use_module('clpbn/jt',
[jt/3,
init_jt_solver/4,
run_jt_solver/3
]).
:- use_module('clpbn/bnt',
[do_bnt/3,
check_if_bnt_done/1
]).
:- use_module('clpbn/gibbs',
[gibbs/3,
check_if_gibbs_done/1,
init_gibbs_solver/4,
run_gibbs_solver/3
]).
:- use_module('clpbn/graphs',
[
clpbn2graph/1
]).
:- use_module('clpbn/dists',
[
dist/4,
get_dist/4,
get_evidence_position/3,
get_evidence_from_position/3
]).
:- use_module('clpbn/evidence',
[
store_evidence/1,
incorporate_evidence/2,
check_stored_evidence/2,
add_evidence/2
]).
:- use_module('clpbn/utils',
[
sort_vars_by_key/3
]).
:- use_module('clpbn/graphviz',
[clpbn2gviz/4]).
:- dynamic solver/1,output/1,use/1,suppress_attribute_display/1, parameter_softening/1, em_solver/1.
solver(vel).
em_solver(vel).
%output(xbif(user_error)).
%output(gviz(user_error)).
output(no).
suppress_attribute_display(false).
parameter_softening(m_estimate(10)).
clpbn_flag(Flag,Option) :-
clpbn_flag(Flag, Option, Option).
set_clpbn_flag(Flag,Option) :-
clpbn_flag(Flag, _, Option).
clpbn_flag(output,Before,After) :-
retract(output(Before)),
assert(output(After)).
clpbn_flag(solver,Before,After) :-
retract(solver(Before)),
assert(solver(After)).
clpbn_flag(em_solver,Before,After) :-
retract(em_solver(Before)),
assert(em_solver(After)).
clpbn_flag(bnt_solver,Before,After) :-
retract(bnt:bnt_solver(Before)),
assert(bnt:bnt_solver(After)).
clpbn_flag(bnt_path,Before,After) :-
retract(bnt:bnt_path(Before)),
assert(bnt:bnt_path(After)).
clpbn_flag(bnt_model,Before,After) :-
retract(bnt:bnt_model(Before)),
assert(bnt:bnt_model(After)).
clpbn_flag(suppress_attribute_display,Before,After) :-
retract(suppress_attribute_display(Before)),
assert(suppress_attribute_display(After)).
clpbn_flag(parameter_softening,Before,After) :-
retract(parameter_softening(Before)),
assert(parameter_softening(After)).
{_} :-
solver(none), !.
{Var = Key with Dist} :-
put_atts(El,[key(Key),dist(DistInfo,Parents)]),
dist(Dist, DistInfo, Key, Parents),
add_evidence(Var,Key,DistInfo,El).
check_constraint(Constraint, _, _, Constraint) :- var(Constraint), !.
check_constraint((A->D), _, _, (A->D)) :- var(A), !.
check_constraint((([A|B].L)->D), Vars, NVars, (([A|B].NL)->D)) :- !,
check_cpt_input_vars(L, Vars, NVars, NL).
check_constraint(Dist, _, _, Dist).
check_cpt_input_vars([], _, _, []).
check_cpt_input_vars([V|L], Vars, NVars, [NV|NL]) :-
replace_var(Vars, V, NVars, NV),
check_cpt_input_vars(L, Vars, NVars, NL).
replace_var([], V, [], V).
replace_var([V|_], V0, [NV|_], NV) :- V == V0, !.
replace_var([_|Vars], V, [_|NVars], NV) :-
replace_var(Vars, V, NVars, NV).
add_evidence(V,Key,Distinfo,NV) :-
nonvar(V), !,
get_evidence_position(V, Distinfo, Pos),
check_stored_evidence(Key, Pos),
clpbn:put_atts(NV,evidence(Pos)).
add_evidence(V,K,_,V) :-
add_evidence(K,V).
clpbn_marginalise(V, Dist) :-
attributes:all_attvars(AVars),
project_attributes([V], AVars),
vel:get_atts(V, posterior(_,_,Dist,_)).
%
% called by top-level
% or by call_residue/2
%
project_attributes(GVars, AVars) :-
suppress_attribute_display(false),
AVars = [_|_],
solver(Solver),
( GVars = [_|_] ; Solver = graphs), !,
clpbn_vars(AVars, DiffVars, AllVars),
get_clpbn_vars(GVars,CLPBNGVars0),
simplify_query_vars(CLPBNGVars0, CLPBNGVars),
(output(xbif(XBifStream)) -> clpbn2xbif(XBifStream,vel,AllVars) ; true),
(output(gviz(XBifStream)) -> clpbn2gviz(XBifStream,sort,AllVars,GVars) ; true),
(
Solver = graphs
->
write_out(Solver, [[]], AllVars, DiffVars)
;
write_out(Solver, [CLPBNGVars], AllVars, DiffVars)
).
project_attributes(_, _).
clpbn_vars(AVars, DiffVars, AllVars) :-
sort_vars_by_key(AVars,SortedAVars,DiffVars),
incorporate_evidence(SortedAVars, AllVars).
get_clpbn_vars([],[]).
get_clpbn_vars([V|GVars],[V|CLPBNGVars]) :-
get_atts(V, [key(_)]), !,
get_clpbn_vars(GVars,CLPBNGVars).
get_clpbn_vars([_|GVars],CLPBNGVars) :-
get_clpbn_vars(GVars,CLPBNGVars).
simplify_query_vars(LVs0, LVs) :-
sort(LVs0,LVs1),
get_rid_of_ev_vars(LVs1,LVs).
%
% some variables might already have evidence in the data-base.
%
get_rid_of_ev_vars([],[]).
get_rid_of_ev_vars([V|LVs0],LVs) :-
clpbn:get_atts(V, [dist(Id,_),evidence(Pos)]), !,
get_evidence_from_position(Ev, Id, Pos),
clpbn_display:put_atts(V, [posterior([],Ev,[],[])]), !,
get_rid_of_ev_vars(LVs0,LVs).
get_rid_of_ev_vars([V|LVs0],[V|LVs]) :-
get_rid_of_ev_vars(LVs0,LVs).
% do nothing if we don't have query variables to compute.
write_out(graphs, _, AVars, _) :-
clpbn2graph(AVars).
write_out(vel, GVars, AVars, DiffVars) :-
vel(GVars, AVars, DiffVars).
write_out(jt, GVars, AVars, DiffVars) :-
jt(GVars, AVars, DiffVars).
write_out(gibbs, GVars, AVars, DiffVars) :-
gibbs(GVars, AVars, DiffVars).
write_out(bnt, GVars, AVars, DiffVars) :-
do_bnt(GVars, AVars, DiffVars).
get_bnode(Var, Goal) :-
get_atts(Var, [key(Key),dist(Dist,Parents)]),
get_dist(Dist,_,Domain,CPT),
(Parents = [] -> X = tab(Domain,CPT) ; X = tab(Domain,CPT,Parents)),
dist_goal(X, Key, Goal0),
include_evidence(Var, Goal0, Key, Goali),
include_starter(Var, Goali, Key, Goal).
include_evidence(Var, Goal0, Key, ((Key:-Ev),Goal0)) :-
get_atts(Var, [evidence(Ev)]), !.
include_evidence(_, Goal0, _, Goal0).
include_starter(Var, Goal0, Key, ((:-Key),Goal0)) :-
get_atts(Var, [starter]), !.
include_starter(_, Goal0, _, Goal0).
dist_goal(Dist, Key, (Key=NDist)) :-
term_variables(Dist, DVars),
process_vars(DVars, DKeys),
my_copy_term(Dist,DVars, NDist,DKeys).
my_copy_term(V, DVars, Key, DKeys) :-
find_var(DVars, V, Key, DKeys).
my_copy_term(A, _, A, _) :- atomic(A), !.
my_copy_term(T, Vs, NT, Ks) :-
T =.. [Na|As],
my_copy_terms(As, Vs, NAs, Ks),
NT =.. [Na|NAs].
my_copy_terms([], _, [], _).
my_copy_terms([A|As], Vs, [NA|NAs], Ks) :-
my_copy_term(A, Vs, NA, Ks),
my_copy_terms(As, Vs, NAs, Ks).
find_var([V1|_], V, Key, [Key|_]) :- V1 == V, !.
find_var([_|DVars], V, Key, [_|DKeys]) :-
find_var(DVars, V, Key, DKeys).
process_vars([], []).
process_vars([V|Vs], [K|Ks]) :-
process_var(V, K),
process_vars(Vs, Ks).
process_var(V, K) :- get_atts(V, [key(K)]), !.
% oops: this variable has no attributes.
process_var(V, _) :- throw(error(instantiation_error,clpbn(attribute_goal(V)))).
%
% unify a CLPBN variable with something.
%
verify_attributes(Var, T, Goals) :-
get_atts(Var, [key(Key),dist(Dist,Parents)]), !,
/* oops, someone trying to bind a clpbn constrained variable */
Goals = [],
bind_clpbn(T, Var, Key, Dist, Parents).
verify_attributes(_, _, []).
bind_clpbn(T, Var, _, _, _) :- nonvar(T),
!, ( add_evidence(Var,T) -> true ; writeln(T:Var), fail ).
bind_clpbn(T, Var, Key, Dist, Parents) :- var(T),
get_atts(T, [key(Key1),dist(Dist1,Parents1)]), !,
bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1),
(
get_atts(T, [evidence(Ev1)]) ->
bind_evidence_from_extra_var(Ev1,Var)
;
get_atts(Var, [evidence(Ev)]) ->
bind_evidence_from_extra_var(Ev,T)
;
true).
bind_clpbn(_, Var, _, _, _, _) :-
use(bnt),
check_if_bnt_done(Var), !.
bind_clpbn(_, Var, _, _, _, _) :-
use(vel),
check_if_vel_done(Var), !.
bind_clpbn(_, Var, _, _, _, _) :-
use(jt),
check_if_vel_done(Var), !.
bind_clpbn(T, Var, Key0, _, _, _) :-
get_atts(Var, [key(Key)]), !,
(
Key = Key0 -> true
;
add_evidence(Var,T)
).
fresh_attvar(Var, NVar) :-
get_atts(Var, LAtts),
put_atts(NVar, LAtts).
% I will now allow two CLPBN variables to be bound together.
%bind_clpbns(Key, Dist, Parents, Key, Dist, Parents).
bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) :-
Key == Key1, !,
get_dist(Dist,Type,Domain,Table),
get_dist(Dist1,Type1,Domain1,Table1),
( Dist == Dist1,
same_parents(Parents,Parents1)
->
true
;
throw(error(domain_error(bayesian_domain),bind_clpbns(var(Dist, Key, Type, Domain, Table, Parents),var(Dist1, Key1, Type1, Domain1, Table1, Parents1))))
).
bind_clpbns(Key, _, _, _, Key1, _, _, _) :-
Key\=Key1, !, fail.
bind_clpbns(_, _, _, _, _, _, _, _) :-
format(user_error, 'unification of two bayesian vars not supported~n', []).
same_parents([],[]).
same_parents([P|Parents],[P1|Parents1]) :-
same_node(P,P1),
same_parents(Parents,Parents1).
same_node(P,P1) :- P == P1, !.
same_node(P,P1) :-
get_atts( P,[key(K)]),
get_atts(P1,[key(K)]),
P = P1.
bind_evidence_from_extra_var(Ev1,Var) :-
get_atts(Var, [evidence(Ev0)]), !,
Ev0 = Ev1.
bind_evidence_from_extra_var(Ev1,Var) :-
put_atts(Var, [evidence(Ev1)]).
user:term_expansion((A :- {}), ( :- true )) :- !, % evidence
prolog_load_context(module, M),
store_evidence(M:A).
clpbn_key(Var,Key) :-
get_atts(Var, [key(Key)]).
%
% This is a routine to start a solver, called by the learning procedures (ie, em).
% LVs is a list of lists of variables one is interested in eventually marginalising out
% Vs0 gives the original graph
% AllDiffs gives variables that are not fully constrainted, ie, we don't fully know
% the key. In this case, we assume different instances will be bound to different
% values at the end of the day.
%
clpbn_init_solver(LVs, Vs0, VarsWithUnboundKeys, State) :-
solver(Solver),
clpbn_init_solver(Solver, LVs, Vs0, VarsWithUnboundKeys, State).
clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :-
init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State).
clpbn_init_solver(vel, LVs, Vs0, VarsWithUnboundKeys, State) :-
init_vel_solver(LVs, Vs0, VarsWithUnboundKeys, State).
clpbn_init_solver(jt, LVs, Vs0, VarsWithUnboundKeys, State) :-
init_jt_solver(LVs, Vs0, VarsWithUnboundKeys, State).
%
% LVs is the list of lists of variables to marginalise
% Vs is the full graph
% Ps are the probabilities on LVs.
%
%
clpbn_run_solver(LVs, LPs, State) :-
solver(Solver, State),
clpbn_run_solver(Solver, LVs, LPs, State).
clpbn_run_solver(gibbs, LVs, LPs, State) :-
run_gibbs_solver(LVs, LPs, State).
clpbn_run_solver(vel, LVs, LPs, State) :-
run_vel_solver(LVs, LPs, State).
clpbn_run_solver(jt, LVs, LPs, State) :-
run_jt_solver(LVs, LPs, State).

View File

@ -1,283 +0,0 @@
%
% generate explicit CPTs
%
:- module(clpbn_aggregates, [
check_for_agg_vars/2,
cpt_average/6,
cpt_average/7,
cpt_max/6,
cpt_min/6
]).
:- use_module(library(clpbn), [{}/1]).
:- use_module(library(lists),
[last/2,
sumlist/2,
sum_list/3,
max_list/2,
min_list/2,
nth0/3
]).
:- use_module(library(matrix),
[matrix_new/3,
matrix_to_list/2,
matrix_set/3]).
:- use_module(library('clpbn/dists'),
[
dist/4,
get_dist_domain_size/2]).
:- use_module(library('clpbn/matrix_cpt_utils'),
[normalise_CPT_on_lines/3]).
check_for_agg_vars([], []).
check_for_agg_vars([V|Vs0], [V|Vs1]) :-
clpbn:get_atts(V, [key(K), dist(Id,Parents)]), !,
simplify_dist(Id, V, K, Parents, Vs0, Vs00),
check_for_agg_vars(Vs00, Vs1).
check_for_agg_vars([_|Vs0], Vs1) :-
check_for_agg_vars(Vs0, Vs1).
% transform aggregate distribution into tree
simplify_dist(avg(Domain), V, Key, Parents, Vs0, VsF) :- !,
cpt_average([V|Parents], Key, Domain, NewDist, Vs0, VsF),
dist(NewDist, Id, Key, ParentsF),
clpbn:put_atts(V, [dist(Id,ParentsF)]).
simplify_dist(_, _, _, _, Vs0, Vs0).
cpt_average(AllVars, Key, Els0, Tab, Vs, NewVs) :-
cpt_average(AllVars, Key, Els0, 1.0, Tab, Vs, NewVs).
% support variables with evidence from domain. This should make everyone's life easier.
cpt_average([Ev|Vars], Key, Els0, Softness, p(Els0, CPT, NewParents), Vs, NewVs) :-
find_evidence(Vars, 0, TotEvidence, RVars),
build_avg_table(RVars, Vars, Els0, Key, TotEvidence, Softness, MAT0, NewParents0, Vs, IVs),
include_qevidence(Ev, MAT0, MAT, NewParents0, NewParents, Vs, IVs, NewVs),
matrix_to_list(MAT, CPT), writeln(NewParents: Vs: NewVs: CPT).
% find all fixed kids, this simplifies significantly the function.
find_evidence([], TotEvidence, TotEvidence, []).
find_evidence([V|Vars], TotEvidence0, TotEvidence, RVars) :-
clpbn:get_atts(V,[evidence(Ev)]), !,
TotEvidenceI is TotEvidence0+Ev,
find_evidence(Vars, TotEvidenceI, TotEvidence, RVars).
find_evidence([V|Vars], TotEvidence0, TotEvidence, [V|RVars]) :-
find_evidence(Vars, TotEvidence0, TotEvidence, RVars).
cpt_max([_|Vars], Key, Els0, CPT, Vs, NewVs) :-
build_max_table(Vars, Els0, Els0, Key, 1.0, CPT, Vs, NewVs).
cpt_min([_|Vars], Key, Els0, CPT, Vs, NewVs) :-
build_min_table(Vars, Els0, Els0, Key, 1.0, CPT, Vs, NewVs).
build_avg_table(Vars, OVars, Domain, _, TotEvidence, Softness, CPT, Vars, Vs, Vs) :-
length(Domain, SDomain),
int_power(Vars, SDomain, 1, TabSize),
TabSize =< 256,
/* case gmp is not there !! */
TabSize > 0, !,
average_cpt(Vars, OVars, Domain, TotEvidence, Softness, CPT).
build_avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, CPT, [V1,V2], Vs, [V1,V2|NewVs]) :-
length(Vars,L),
LL1 is L//2,
LL2 is L-LL1,
list_split(LL1, Vars, L1, L2),
Min = 0,
length(Domain,Max1), Max is Max1-1,
build_intermediate_table(LL1, sum(Min,Max), L1, V1, Key, 1.0, 0, I1, Vs, Vs1),
build_intermediate_table(LL2, sum(Min,Max), L2, V2, Key, 1.0, I1, _, Vs1, NewVs),
average_cpt([V1,V2], OVars, Domain, TotEvidence, Softness, CPT).
build_max_table(Vars, Domain, Softness, p(Domain, CPT, Vars), Vs, Vs) :-
length(Domain, SDomain),
int_power(Vars, SDomain, 1, TabSize),
TabSize =< 16,
/* case gmp is not there !! */
TabSize > 0, !,
max_cpt(Vars, Domain, Softness, CPT).
build_max_table(Vars, Domain, Softness, p(Domain, CPT, [V1,V2]), Vs, [V1,V2|NewVs]) :-
length(Vars,L),
LL1 is L//2,
LL2 is L-LL1,
list_split(LL1, Vars, L1, L2),
build_intermediate_table(LL1, max(Domain,CPT), L1, V1, Key, 1.0, 0, I1, Vs, Vs1),
build_intermediate_table(LL2, max(Domain,CPT), L2, V2, Key, 1.0, I1, _, Vs1, NewVs),
max_cpt([V1,V2], Domain, Softness, CPT).
build_min_table(Vars, Domain, Softness, p(Domain, CPT, Vars), Vs, Vs) :-
length(Domain, SDomain),
int_power(Vars, SDomain, 1, TabSize),
TabSize =< 16,
/* case gmp is not there !! */
TabSize > 0, !,
min_cpt(Vars, Domain, Softness, CPT).
build_min_table(Vars, Domain, Softness, p(Domain, CPT, [V1,V2]), Vs, [V1,V2|NewVs]) :-
length(Vars,L),
LL1 is L//2,
LL2 is L-LL1,
list_split(LL1, Vars, L1, L2),
build_intermediate_table(LL1, min(Domain,CPT), L1, V1, Key, 1.0, 0, I1, Vs, Vs1),
build_intermediate_table(LL2, min(Domain,CPT), L2, V2, Key, 1.0, I1, _, Vs1, NewVs),
min_cpt([V1,V2], Domain, Softness, CPT).
int_power([], _, TabSize, TabSize).
int_power([_|L], X, I0, TabSize) :-
I is I0*X,
int_power(L, X, I, TabSize).
build_intermediate_table(1,_,[V],V, _, _, I, I, Vs, Vs) :- !.
build_intermediate_table(2, Op, [V1,V2], V, Key, Softness, I0, If, Vs, Vs) :- !,
If is I0+1,
generate_tmp_random(Op, 2, [V1,V2], V, Key, Softness, I0).
build_intermediate_table(N, Op, L, V, Key, Softness, I0, If, Vs, [V1,V2|NewVs]) :-
LL1 is N//2,
LL2 is N-LL1,
list_split(LL1, L, L1, L2),
I1 is I0+1,
build_intermediate_table(LL1, Op, L1, V1, Key, Softness, I1, I2, Vs, Vs1),
build_intermediate_table(LL2, Op, L2, V2, Key, Softness, I2, If, Vs1, NewVs),
generate_tmp_random(Op, N, [V1,V2], V, Key, Softness, I0).
% averages are transformed into sums.
generate_tmp_random(sum(Min,Max), N, [V1,V2], V, Key, Softness, I) :-
Lower is Min*N,
Upper is Max*N,
generate_list(Lower, Upper, Nbs),
sum_cpt([V1,V2], Nbs, Softness, CPT),
% write(sum(Nbs, CPT, [V1,V2])),nl, % debugging
{ V = 'AVG'(I,Key) with p(Nbs,CPT,[V1,V2]) }.
generate_tmp_random(max(Domain,CPT), _, [V1,V2], V, Key, I) :-
{ V = 'MAX'(I,Key) with p(Domain,CPT,[V1,V2]) }.
generate_tmp_random(min(Domain,CPT), _, [V1,V2], V, Key, I) :-
{ V = 'MIN'(I,Key) with p(Domain,CPT,[V1,V2]) }.
generate_list(M, M, [M]) :- !.
generate_list(I, M, [I|Nbs]) :-
I1 is I+1,
generate_list(I1, M, Nbs).
list_split(0, L, [], L) :- !.
list_split(I, [H|L], [H|L1], L2) :-
I1 is I-1,
list_split(I1, L, L1, L2).
%
% if we have evidence, we need to check if we are always consistent, never consistent, or can be consistent
%
include_qevidence(V, MAT0, MAT, NewParents0, NewParents, Vs, IVs, NewVs) :-
clpbn:get_atts(V,[evidence(Ev)]), !,
normalise_CPT_on_lines(MAT0, MAT1, L1),
check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs).
include_qevidence(_, MAT, MAT, NewParents, NewParents, _, Vs, Vs).
check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs) :-
sumlist(L1, Tot),
nth0(Ev, L1, Val),
(Val == Tot ->
writeln(Ev:L1:Val:1),
MAT1 = MAT,
NewParents = [],
Vs = NewVs
;
Val == 0.0 ->
writeln(Ev:L1:Val:2),
throw(error(domain_error(incompatible_evidence),evidence(Ev)))
;
writeln(Ev:L1:Val:3),
MAT0 = MAT,
NewParents = NewParents0,
IVs = NewVs
).
%
% generate actual table, instead of trusting the solver
%
average_cpt(Vs, OVars, Vals, Base, _, MCPT) :-
get_ds_lengths(Vs,Lengs),
length(OVars, N),
length(Vals, SVals),
matrix_new(floats,[SVals|Lengs],MCPT),
fill_in_average(Lengs,N,Base,MCPT).
get_ds_lengths([],[]).
get_ds_lengths([V|Vs],[Sz|Lengs]) :-
get_vdist_size(V, Sz),
get_ds_lengths(Vs,Lengs).
fill_in_average(Lengs, N, Base, MCPT) :-
generate(Lengs, Case),
average(Case, N, Base, Val),
matrix_set(MCPT,[Val|Case],1.0),
fail.
fill_in_average(_,_,_,_).
generate([], []).
generate([N|Lengs], [C|Case]) :-
from(0,N,C),
generate(Lengs, Case).
from(I,_,I).
from(I1,M,J) :-
I is I1+1,
I < M,
from(I,M,J).
average(Case, N, Base, Val) :-
sum_list(Case, Base, Tot),
Val is integer(round(Tot/N)).
sum_cpt(Vs,Vals,_,CPT) :-
get_ds_lengths(Vs,Lengs),
length(Vals,SVals),
matrix_new(floats,[SVals|Lengs],MCPT),
fill_in_sum(Lengs,MCPT),
matrix_to_list(MCPT,CPT).
fill_in_sum(Lengs,MCPT) :-
generate(Lengs, Case),
sumlist(Case, Val),
matrix_set(MCPT,[Val|Case],1.0),
fail.
fill_in_sum(_,_).
max_cpt(Vs,Vals,_,CPT) :-
get_ds_lengths(Vs,Lengs),
length(Vals,SVals),
matrix_new(floats,[SVals|Lengs],MCPT),
fill_in_max(Lengs,MCPT),
matrix_to_list(MCPT,CPT).
fill_in_max(Lengs,MCPT) :-
generate(Lengs, Case),
max_list(Case, Val),
matrix_set(MCPT,[Val|Case],1.0),
fail.
fill_in_max(_,_).
min_cpt(Vs,Vals,_,CPT) :-
get_ds_lengths(Vs,Lengs),
length(Vals,SVals),
matrix_new(floats,[SVals|Lengs],MCPT),
fill_in_max(Lengs,MCPT),
matrix_to_list(MCPT,CPT).
fill_in_min(Lengs,MCPT) :-
generate(Lengs, Case),
max_list(Case, Val),
matrix_set(MCPT,[Val|Case],1.0),
fail.
fill_in_min(_,_).
get_vdist_size(V, Sz) :-
clpbn:get_atts(V, [dist(Dist,_)]),
get_dist_domain_size(Dist, Sz).

View File

@ -1,425 +0,0 @@
:- module(bnt, [do_bnt/3,
create_bnt_graph/2,
check_if_bnt_done/1]).
:- use_module(library('clpbn/display'), [
clpbn_bind_vals/3]).
:- use_module(library('clpbn/dists'), [
get_dist_domain_size/2,
get_dist_domain/2,
get_dist_params/2
]).
:- use_module(library('clpbn/discrete_utils'), [
reorder_CPT/5]).
:- use_module(library(matlab), [start_matlab/1,
close_matlab/0,
matlab_on/0,
matlab_eval_string/1,
matlab_eval_string/2,
matlab_matrix/4,
matlab_vector/2,
matlab_sequence/3,
matlab_initialized_cells/4,
matlab_get_variable/2,
matlab_call/2
]).
:- use_module(library(dgraphs), [dgraph_new/1,
dgraph_add_vertices/3,
dgraph_add_edges/3,
dgraph_top_sort/2,
dgraph_vertices/2,
dgraph_edges/2
]).
:- use_module(library(lists), [append/3,
member/2,nth/3]).
:- use_module(library(ordsets), [
ord_insert/3]).
:- yap_flag(write_strings,on).
% syntactic sugar for matlab_call.
:- op(800,yfx,<--).
G <-- Y :-
matlab_call(Y,G).
:- attribute bnt_id/1.
:- dynamic bnt/1.
:- dynamic bnt_solver/1, bnt_path/1, bnt_model/1.
% belprop
bnt_solver(jtree).
% likelihood_weighting
bnt_path("$HOME/Yap/CLPBN/FullBNT-1.0.4/BNT").
%
% What BNT are we using:
% a propositional one
% a tied parameter one.
%
%bnt_model(propositional).
bnt_model(tied).
%bnt_model(dbn).
/*****************************************
BNT uses:
bnet
dag
discrete_nodes: which nodes are discrete (all by now),
node_sizes
engine
evidence
marg
*****************************************/
check_if_bnt_done(Var) :-
get_atts(Var, [map(_)]).
do_bnt([], _, _) :- !.
do_bnt(QueryVars, AllVars, AllDiffs) :-
create_bnt_graph(AllVars, _, SortedVertices, NumberedVertices, Size),
set_inference,
add_evidence(SortedVertices, Size, NumberedVertices),
marginalize(QueryVars, SortedVertices, NumberedVertices, Ps),
clpbn_bind_vals(QueryVars, Ps, AllDiffs).
create_bnt_graph(AllVars, Representatives) :-
create_bnt_graph(AllVars, Representatives, _, _, _).
create_bnt_graph(AllVars, Representatives, SortedVertices, NumberedVertices, Size) :-
init_matlab,
sort_nodes(AllVars, SortedVertices),
number_graph(SortedVertices, NumberedVertices, 0, Size),
bnt_model(ModelType),
init_bnet(ModelType, SortedVertices, NumberedVertices, Size, Representatives).
% make sure MATLAB works.
init_matlab :-
bnt(on), !.
init_matlab :-
start_matlab,
bnt_path(Path),
append("cd ",Path,Command),
% atom_concat('cd ', Path, Command),
matlab_eval_string(Command),
matlab_eval_string('addpath(genpathKPM(pwd))',_),
assert(bnt(on)).
start_matlab :-
matlab_on, !.
start_matlab :-
start_matlab('matlab -nojvm -nosplash').
sort_nodes(AllVars, SortedVertices) :-
bnt_model(tied), !,
extract_tied(AllVars, SortedVertices).
sort_nodes(AllVars, SortedVertices) :-
bnt_model(propositional), !,
extract_graph(AllVars, Graph),
dgraph_top_sort(Graph, SortedVertices).
extract_tied(AllVars, SortedVars) :-
extract_kvars(AllVars,KVars),
keysort(KVars,SVars),
split_tied_vars(SVars,TVars, Vertices),
tied_graph(TVars,TGraph,Vertices),
dgraph_top_sort(TGraph, Sort),
distribute_tied_variables(Sort, TVars, 1, SortedVars).
extract_kvars([],[]).
extract_kvars([V|AllVars],[N-i(V,Parents)|KVars]) :-
clpbn:get_atts(V, [dist(N,Parents)]),
extract_kvars(AllVars,KVars).
split_tied_vars([],[],[]).
split_tied_vars([N-i(V,Par)|More],[N-g(Vs,Ns,Es)|TVars],[N|LNs]) :-
get_pars(Par,N,V,NPs,[],Es0,Es),
get_tied(More,N,Vs,[V],Ns,NPs,Es,Es0,SVars),
split_tied_vars(SVars,TVars,LNs).
get_pars([],_,_,NPs,NPs,Es,Es).
get_pars([V|Par],N,V0,NPs,NPs0,Es,Es0) :-
clpbn:get_atts(V, [dist(N,_)]), !,
get_pars(Par,N,V0,NPs,NPs0,Es,[V-V0|Es0]).
get_pars([V|Par],N,V0,NPs,NPs0,Es,Es0) :-
clpbn:get_atts(V, [dist(M,_)]),
ord_insert(NPs0,M,NPsI),
get_pars(Par,N,V0,NPs,NPsI,Es,Es0).
get_tied([N-i(V,Par)|More],N,Vs,Vs0,Ns,NPs,Es,Es0,SVars) :- !,
get_pars(Par,N,V,NPsI,NPs,EsI,Es0),
get_tied(More,N,Vs,[V|Vs0],Ns,NPsI,Es,EsI,SVars).
get_tied(More,_,Vs,Vs,Ns,Ns,Es,Es,More).
tied_graph(TVars,Graph,Vertices) :-
dgraph_new(Graph0),
dgraph_add_vertices(Graph0, Vertices, Graph1),
get_tied_edges(TVars,Edges),
dgraph_add_edges(Graph1, Edges, Graph).
get_tied_edges([],[]).
get_tied_edges([N-g(_,Vs,_)|TGraph],Edges) :-
add_tied(Vs,N,Edges,Edges0),
get_tied_edges(TGraph,Edges0).
add_tied([],_,Edges,Edges).
add_tied([N1|Vs],N,[N1-N|Edges],Edges0) :-
add_tied(Vs,N,Edges,Edges0).
distribute_tied_variables([], _, _, []).
distribute_tied_variables([N|Sort], TVars, I0, SortedVars) :-
member(N-g(Vs,_,_),TVars),
distribute_tied(Vs,I0,In,SortedVars,SortedVars0),
distribute_tied_variables(Sort, TVars, In, SortedVars0).
distribute_tied([],I,I,Vs,Vs).
distribute_tied([V|Vs],I0,In,[V|NVs],NVs0) :-
I is I0+1,
put_atts(V, [bnt_id(I0)]),
% clpbn:get_atts(V,[key(K)]),
distribute_tied(Vs,I,In,NVs,NVs0).
extract_graph(AllVars, Graph) :-
dgraph_new(Graph0),
dgraph_add_vertices(Graph0, AllVars, Graph1),
get_edges(AllVars,Edges),
dgraph_add_edges(Graph1, Edges, Graph).
get_edges([],[]).
get_edges([V|AllVars],Edges) :-
clpbn:get_atts(V, [dist(_,Parents)]),
add_parent_child(Parents,V,Edges,Edges0),
get_edges(AllVars,Edges0).
add_parent_child([],_,Edges,Edges).
add_parent_child([P|Parents],V,[P-V|Edges],Edges0) :-
add_parent_child(Parents,V,Edges,Edges0).
number_graph([], [], I, I).
number_graph([V|SortedGraph], [I|Is], I0, IF) :-
I is I0+1,
put_atts(V, [bnt_id(I)]),
% clpbn:get_atts(V,[key(K)]),
% write(I:K),nl,
number_graph(SortedGraph, Is, I, IF).
init_bnet(propositional, SortedGraph, NumberedGraph, Size, []) :-
build_dag(SortedGraph, Size),
init_discrete_nodes(SortedGraph, Size),
bnet <-- mk_bnet(dag, node_sizes, \discrete, discrete_nodes),
dump_cpts(SortedGraph, NumberedGraph).
init_bnet(tied, SortedGraph, NumberedGraph, Size, Representatives) :-
build_dag(SortedGraph, Size),
init_discrete_nodes(SortedGraph, Size),
dump_tied_cpts(SortedGraph, NumberedGraph, Representatives).
build_dag(SortedVertices, Size) :-
get_numbered_edges(SortedVertices, Edges),
mkdag(Size, Edges).
get_numbered_edges([], []).
get_numbered_edges([V|SortedVertices], Edges) :-
clpbn:get_atts(V, [dist(_,Ps)]),
v2number(V,N),
add_numbered_edges(Ps, N, Edges, Edges0),
get_numbered_edges(SortedVertices, Edges0).
add_numbered_edges([], _, Edges, Edges).
add_numbered_edges([P|Ps], N, [PN-N|Edges], Edges0) :-
v2number(P,PN),
add_numbered_edges(Ps, N, Edges, Edges0).
v2number(V,N) :-
get_atts(V,[bnt_id(N)]).
init_discrete_nodes(SortedGraph, Size) :-
matlab_sequence(1,Size,discrete_nodes),
mksizes(SortedGraph, Size).
mkdag(N,Els) :-
Tot is N*N,
functor(Dag,dag,Tot),
add_els(Els,N,Dag),
Dag=..[_|L],
addzeros(L),
matlab_matrix(N,N,L,dag).
add_els([],_,_).
add_els([X-Y|Els],N,Dag) :-
Pos is (X-1)*N+Y,
arg(Pos,Dag,1),
add_els(Els,N,Dag).
addzeros([]).
addzeros([0|L]) :- !,
addzeros(L).
addzeros([1|L]) :-
addzeros(L).
mksizes(SortedVertices, Size) :-
get_szs(SortedVertices,Sizes),
matlab_matrix(1,Size,Sizes,node_sizes).
get_szs([],[]).
get_szs([V|SortedVertices],[LD|Sizes]) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id,LD),
get_szs(SortedVertices,Sizes).
dump_cpts([], []).
dump_cpts([V|SortedGraph], [I|Is]) :-
clpbn:get_atts(V, [dist(Id,Parents)]),
get_dist_params(Id,CPT),
reorder_cpt(CPT,V,Parents,Tab),
mkcpt(bnet,I,Tab),
dump_cpts(SortedGraph, Is).
%
% This is complicated, the BNT and we have different orders
%
reorder_cpt(CPT,_, [], CPT) :- !.
reorder_cpt(CPT,V,Parents,Tab) :-
% get BNT label
get_sizes_and_ids(Parents,Ids),
% sort to BNT
keysort(Ids,NIds),
% get vars in order
extract_vars(NIds, [], NParents),
% do the actual work
reorder_CPT([V|Parents],CPT,[V|NParents],STab,_),
STab=..[_|Tab].
get_sizes_and_ids([],[]).
get_sizes_and_ids([V|Parents],[Id-V|Ids]) :-
get_atts(V, [bnt_id(Id)]),
get_sizes_and_ids(Parents,Ids).
extract_vars([], L, L).
extract_vars([_-V|NIds], NParents, Vs) :-
extract_vars(NIds, [V|NParents], Vs).
mkcpt(BayesNet, I, Tab) :-
(BayesNet.'CPD'({I})) <-- tabular_CPD(BayesNet,I,Tab).
dump_tied_cpts(Graph, Is, Reps) :-
create_class_vector(Graph, Is, Classes, Reps0),
matlab_vector(Classes, eclass),
keysort(Reps0,Reps1),
representatives(Reps1,Reps),
bnet <-- mk_bnet(dag, node_sizes, \discrete, discrete_nodes, \equiv_class, eclass),
dump_tied_cpts(Reps).
create_class_vector([], [], [],[]).
create_class_vector([V|Graph], [I|Is], [Id|Classes], [Id-v(V,I,Parents)|Sets]) :-
clpbn:get_atts(V, [dist(Id,Parents)]),
create_class_vector(Graph, Is,Classes,Sets).
representatives([],[]).
representatives([Class-Rep|Reps1],[Class-Rep|Reps]) :-
nonrepresentatives(Reps1, Class, Reps2),
representatives(Reps2,Reps).
nonrepresentatives([Class-_|Reps1], Class, Reps2) :- !,
nonrepresentatives(Reps1, Class, Reps2).
nonrepresentatives(Reps, _, Reps).
dump_tied_cpts([]).
dump_tied_cpts([Class-v(V,Id,Parents)|SortedGraph]) :-
get_dist_params(Class,CPT),
reorder_cpt(CPT,V,Parents,NCPT),
mktiedcpt(bnet,Id,Class,NCPT),
dump_tied_cpts(SortedGraph).
mktiedcpt(BayesNet, V, Class, Tab) :-
(BayesNet.'CPD'({Class})) <-- tabular_CPD(BayesNet,V,Tab).
set_inference :-
bnt_solver(Solver),
init_solver(Solver).
init_solver(jtree) :-
engine <-- jtree_inf_engine(bnet).
init_solver(belprop) :-
engine <-- belprop_inf_engine(bnet).
init_solver(likelihood_weighting) :-
engine <-- likelihood_weighting_inf_engine(bnet).
init_solver(enumerative) :-
engine <-- enumerative_inf_engine(bnet).
init_solver(gibbs) :-
engine <-- gibbs_sampling_inf_engine(bnet).
init_solver(global_joint) :-
engine <-- global_joint_inf_engine(bnet).
init_solver(pearl) :-
engine <-- pearl_inf_engine(bnet).
init_solver(var_elim) :-
engine <-- var_elim_inf_engine(bnet).
add_evidence(Graph, Size, Is) :-
mk_evidence(Graph, Is, LN),
matlab_initialized_cells( 1, Size, LN, evidence),
[engine_ev, loglik] <-- enter_evidence(engine, evidence).
mk_evidence([], [], []).
mk_evidence([V|L], [I|Is], [ar(1,I,EvVal1)|LN]) :-
clpbn:get_atts(V, [evidence(EvVal)]), !,
EvVal1 is EvVal +1,
mk_evidence(L, Is, LN).
mk_evidence([_|L], [_|Is], LN) :-
mk_evidence(L, Is, LN).
evidence_val(Ev,Val,[Ev|_],Val) :- !.
evidence_val(Ev,I0,[_|Domain],Val) :-
I1 is I0+1,
evidence_val(Ev,I1,Domain,Val).
marginalize([[V]], _SortedVars,_NunmberedVars, Ps) :- !,
v2number(V,Pos),
marg <-- marginal_nodes(engine_ev, Pos),
matlab_get_variable( marg.'T', Ps).
marginalize([Vs], SortedVars, NumberedVars,Ps) :-
bnt_solver(jtree),!,
matlab_get_variable(loglik, Den),
clpbn_display:get_all_combs(Vs, Vals),
mk_evidence(SortedVars, NumberedVars, Ev),
length(SortedVars,L),
cycle_values(Den, Ev, Vs, L, Vals, Ps).
cycle_values(_D, _Ev, _Vs, _Size, [], []).
cycle_values(Den,Ev,Vs,Size,[H|T],[HP|TP]):-
mk_evidence_query(Vs, H, EvQuery),
append(EvQuery,Ev,Instantiation),
matlab_initialized_cells( 1, Size, Instantiation, instantiation),
[engine_ev, logll] <-- enter_evidence(engine, instantiation),
matlab_get_variable(logll, Num),
HP is exp(Num-Den),
cycle_values(Den,Ev,Vs,Size,T,TP).
mk_evidence_query([], [], []).
mk_evidence_query([V|L], [H|T], [ar(1,Pos,El)|LN]) :-
v2number(V,Pos),
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain(Id,D),
nth(El,D,H),
mk_evidence_query(L, T, LN).

View File

@ -1,172 +0,0 @@
:- module(clpbn_connected,
[clpbn_subgraphs/2,
influences/4,
init_influences/3,
influences/5]).
:- use_module(library(dgraphs),
[dgraph_new/1,
dgraph_add_edges/3,
dgraph_add_vertex/3,
dgraph_neighbors/3,
dgraph_edge/3]).
:- use_module(library(rbtrees),
[rb_new/1,
rb_insert/4,
rb_lookup/3]).
:- attribute component/1.
% search for connected components, that is, where we know that A influences B or B influences A.
clpbn_subgraphs(Vs, Gs) :-
mark_components(Vs, Components),
keysort(Components, Ordered),
same_key(Ordered, Gs).
% ignore variables with evidence,
% the others mark the MB.
mark_components([], []).
mark_components([V|Vs], Components) :-
clpbn:get_atts(V, [evidence(_),dist(_,Parents)]), !,
merge_parents(Parents, _),
mark_components(Vs, Components).
mark_components([V|Vs], [Mark-V|Components]) :-
mark_var(V, Mark),
mark_components(Vs, Components).
mark_var(V, Mark) :-
get_atts(V, [component(Mark)]), !,
clpbn:get_atts(V, [dist(_,Parents)]), !,
merge_parents(Parents, Mark).
mark_var(V, Mark) :-
clpbn:get_atts(V, [dist(_,Parents)]), !,
put_atts(V,[component(Mark)]),
merge_parents(Parents, Mark).
merge_parents([], _).
merge_parents([V|Parents], Mark) :-
clpbn:get_atts(V,[evidence(_)]), !,
merge_parents(Parents, Mark).
merge_parents([V|Parents], Mark) :-
get_atts(V,[component(Mark)]), !,
merge_parents(Parents, Mark).
merge_parents([V|Parents], Mark) :-
put_atts(V,[component(Mark)]),
merge_parents(Parents, Mark).
same_key([],[]).
same_key([K-El|More],[[El|Els]|Gs]) :-
same_keys(More, K, Els, Rest),
same_key(Rest,Gs).
same_keys([], _, [], []).
same_keys([K1-El|More], K, [El|Els], Rest) :-
K == K1, !,
same_keys(More, K, Els, Rest).
same_keys(Rest, _, [], Rest).
influences_more([], _, _, Is, Is, Evs, Evs, V2, V2).
influences_more([V|LV], G, RG, Is0, Is, Evs0, Evs, GV0, GV2) :-
rb_lookup(V, _, GV0), !,
influences_more(LV, G, RG, Is0, Is, Evs0, Evs, GV0, GV2).
influences_more([V|LV], G, RG, Is0, Is, Evs0, Evs, GV0, GV3) :-
rb_insert(GV0, V, _, GV1),
follow_dgraph(V, G, RG, [V|Is0], Is1, [V|Evs0], Evs1, GV1, GV2),
influences_more(LV, G, RG, Is1, Is, Evs1, Evs, GV2, GV3).
% search for the set of variables that influence V
influences(Vs, LV, Is, Evs) :-
init_influences(Vs, G, RG),
influences(LV, Is, Evs, G, RG).
init_influences(Vs, G, RG) :-
dgraph_new(G0),
dgraph_new(RG0),
to_dgraph(Vs, G0, G, RG0, RG).
influences([], [], [], _, _).
influences([V|LV], Is, Evs, G, RG) :-
rb_new(V0),
rb_insert(V0, V, _, V1),
follow_dgraph(V, G, RG, [V], Is1, [V], Evs1, V1, V2),
influences_more(LV, G, RG, Is1, Is, Evs1, Evs, V2, _).
to_dgraph([], G, G, RG, RG).
to_dgraph([V|Vs], G0, G, RG0, RG) :-
clpbn:get_atts(V, [evidence(_),dist(_,Parents)]), !,
build_edges(Parents, V, Edges, REdges),
dgraph_add_edges(G0,[V-e|Edges],G1),
dgraph_add_edges(RG0,REdges,RG1),
to_dgraph(Vs, G1, G, RG1, RG).
to_dgraph([V|Vs], G0, G, RG0, RG) :-
clpbn:get_atts(V, [dist(_,Parents)]),
build_edges(Parents, V, Edges, REdges),
dgraph_add_vertex(G0,V,G1),
dgraph_add_edges(G1, Edges, G2),
dgraph_add_vertex(RG0,V,RG1),
dgraph_add_edges(RG1, REdges, RG2),
to_dgraph(Vs, G2, G, RG2, RG).
build_edges([], _, [], []).
build_edges([P|Parents], V, [P-V|Edges], [V-P|REdges]) :-
build_edges(Parents, V, Edges, REdges).
follow_dgraph(V, G, RG, Is0, IsF, Evs0, EvsF, Visited0, Visited) :-
dgraph_neighbors(V, RG, Parents),
add_parents(Parents, G, RG, Is0, IsI, Evs0, EvsI, Visited0, Visited1),
dgraph_neighbors(V, G, Kids),
add_kids(Kids, G, RG, IsI, IsF, EvsI, EvsF, Visited1, Visited).
add_parents([], _, _, Is, Is, Evs, Evs, Visited, Visited).
% been here already, can safely ignore.
add_parents([V|Vs], G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF) :-
rb_lookup(V, _, Visited0), !,
add_parents(Vs, G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF).
% evidence node,
% just say that we visited it
add_parents([V|Vs], G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF) :-
dgraph_edge(V,e,G), !, % has evidence
rb_insert(Visited0, V, _, VisitedI),
add_parents(Vs, G, RG, Is0, IsF, [V|Evs0], EvsF, VisitedI, VisitedF).
% non-evidence node,
% we will need to find its parents.
add_parents([V|Vs], G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF) :-
rb_insert(Visited0, V, _, VisitedI),
follow_dgraph(V, G, RG, [V|Is0], IsI, [V|Evs0], EvsI, VisitedI, VisitedII),
add_parents(Vs, G, RG, IsI, IsF, EvsI, EvsF, VisitedII, VisitedF).
add_kids([], _, _, Is, Is, Evs, Evs, Visited, Visited).
add_kids([V|Vs], G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF) :-
dgraph_edge(V,e,G), % has evidence
% we will go there even if it was visited
( rb_insert(Visited0, V, _, Visited1) ->
true
;
% we've been there, but were we there as a father or as a kid?
not_in(Evs0, V),
Visited1 = Visited0
),
!,
dgraph_neighbors(V, RG, Parents),
add_parents(Parents, G, RG, Is0, Is1, [V|Evs0], EvsI, Visited1, VisitedI),
(Is1 = Is0 ->
% ignore whatever we did with this node,
% it didn't lead anywhere (all parents have evidence).
add_kids(Vs, G, RG, Is0, IsF, [V|Evs0], EvsF, Visited1, VisitedF)
;
% insert parents
add_kids(Vs, G, RG, Is1, IsF, EvsI, EvsF, VisitedI, VisitedF)
).
add_kids([_|Vs], G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF) :-
add_kids(Vs, G, RG, Is0, IsF, Evs0, EvsF, Visited0, VisitedF).
not_in([V1|_], V) :- V1 == V, !, fail.
not_in([_|Evs0], V) :-
not_in(Evs0, V).

View File

@ -1,146 +0,0 @@
:- module(discrete_utils, [project_from_CPT/3,
reorder_CPT/5,
get_dist_size/2]).
:- use_module(dists, [get_dist_domain_size/2,
get_dist_domain/2]).
%
% remove columns from a table
%
project_from_CPT(V,tab(Table,Deps,Szs),tab(NewTable,NDeps,NSzs)) :-
propagate_evidence(V,Evs),
functor(Table,_,Max),
find_projection_factor(Deps, V, NDeps, Szs, NSzs, F, Sz),
OLoop is Max//(Sz*F),
project_outer_loop(0,OLoop,F,Sz,Table,Evs,NTabl),
NewTable =.. [t|NTabl].
propagate_evidence(V, Evs) :-
clpbn:get_atts(V, [evidence(Ev),dist(Id,_)]), !,
get_dist_domain(Id, Out),
generate_szs_with_evidence(Out,Ev,0,Evs,Found),
(var(Found) ->
clpbn:get_atts(V, [key(K)]),
throw(clpbn(evidence_does_not_match,K,Ev,[Out]))
;
true
).
propagate_evidence(_, _).
generate_szs_with_evidence([],_,_,[],_).
generate_szs_with_evidence([_|Out],Ev,Ev,[ok|Evs],found) :- !,
I is Ev+1,
generate_szs_with_evidence(Out,Ev,I,Evs,found).
generate_szs_with_evidence([_|Out],Ev,I0,[not_ok|Evs],Found) :-
I is I0+1,
generate_szs_with_evidence(Out,Ev,I,Evs,Found).
find_projection_factor([V|Deps], V1, Deps, [Sz|Szs], Szs, F, Sz) :-
V == V1, !,
mult(Szs, 1, F).
find_projection_factor([V|Deps], V1, [V|NDeps], [Sz|Szs], [Sz|NSzs], F, NSz) :-
find_projection_factor(Deps, V1, NDeps, Szs, NSzs, F, NSz).
mult([], F, F).
mult([Sz|Szs], Sz0, F) :-
SzI is Sz0*Sz,
mult(Szs, SzI, F).
project_outer_loop(OLoop,OLoop,_,_,_,_,[]) :- !.
project_outer_loop(I,OLoop,F,Sz,Table,Evs,NTabl) :-
Base is I*Sz*F,
project_mid_loop(0,F,Base,Sz,Table,Evs,NTabl,NTabl0),
I1 is I+1,
project_outer_loop(I1,OLoop,F,Sz,Table,Evs,NTabl0).
project_mid_loop(F,F,_,_,_,_,NTabl,NTabl) :- !.
project_mid_loop(I,F,Base,Sz,Table,Evs,[Ent|NTablF],NTabl0) :-
I1 is I+1,
NBase is I+Base,
project_inner_loop(0,Sz,Evs,NBase,F,Table,0.0,Ent),
project_mid_loop(I1,F,Base,Sz,Table,Evs,NTablF,NTabl0).
project_inner_loop(Sz,Sz,[],_,_,_,Ent,Ent) :- !.
project_inner_loop(I,Sz,[ok|Evs],NBase,F,Table,Ent0,Ent) :- !,
I1 is I+1,
Pos is NBase+I*F+1,
arg(Pos,Table,E1),
Ent1 is E1+Ent0,
project_inner_loop(I1,Sz,Evs,NBase,F,Table,Ent1,Ent).
project_inner_loop(I,Sz,[_|Evs],NBase,F,Table,Ent0,Ent) :- !,
I1 is I+1,
project_inner_loop(I1,Sz,Evs,NBase,F,Table,Ent0,Ent).
%
% Given a set of variables Vs0 and a discrete CPT T0,
% reorder according to keysort if Vs is unbound, or according to Vs
% resulting in CPT
% TF. Sizes of variables in Vs are given as Sizes.
%
reorder_CPT(Vs0, T0, Vs, TF, Sizes) :-
var(Vs), !,
get_sizes(Vs0, Szs),
numb_vars(Vs0, Szs, _, VPs0, VLs0),
keysort(VLs0, VLs),
compute_new_factors(VLs, _, Vs, Sizes),
get_factors(VLs0,Fs),
length(T0,L),
functor(TF,t,L),
copy_to_new_array(T0, 0, VPs0, Fs, TF).
reorder_CPT(Vs0, T0, Vs, TF, Sizes) :-
get_sizes(Vs0, Szs),
numb_vars(Vs0, Szs, _, VPs0, VLs0),
sort_according_to_parent(Vs, VLs0, VLs),
compute_new_factors(VLs, _, Vs, Sizes),
get_factors(VLs0,Fs),
length(T0,L),
functor(TF,t,L),
copy_to_new_array(T0, 0, VPs0, Fs, TF).
numb_vars([], [], 1, [], []).
numb_vars([V|Vs], [L|Ls], A0, [Ai|VPs], [V-(L,_)|VLs]) :-
numb_vars(Vs, Ls, Ai, VPs, VLs),
A0 is Ai*L.
sort_according_to_parent([],[], []).
sort_according_to_parent([V|Vs],VLs0, [Arg|VLs]) :-
fetch_var(V,VLs0,VLsI,Arg),
sort_according_to_parent(Vs,VLsI, VLs).
fetch_var(V,[V0-(L,A)|VLs],VLs,V0-(L,A)) :- V == V0, !.
fetch_var(V,[A|VLs0],[A|VLsI],Arg) :-
fetch_var(V,VLs0,VLsI,Arg).
compute_new_factors([], 1, [], []).
compute_new_factors([V-(L,F)|VLs], NF, [V|Vs], [L|Szs]) :-
compute_new_factors(VLs, F, Vs, Szs),
NF is F*L.
get_factors([],[]).
get_factors([_-(_,F)|VLs0],[F|Fs]) :-
get_factors(VLs0,Fs).
copy_to_new_array([], _, _, _, _).
copy_to_new_array([P|Ps], I, F0s, Fs, S) :-
convert_factor(F0s, Fs, I, N),
I1 is I+1,
N1 is N+1,
arg(N1,S,P),
copy_to_new_array(Ps, I1, F0s, Fs, S).
convert_factor([], [], _, 0).
convert_factor([F0|F0s], [F|Fs], I, OUT) :-
X is I//F0,
NI is I mod F0,
NEXT is F*X,
convert_factor(F0s, Fs, NI, OUT1),
OUT is OUT1+NEXT.
get_sizes([], []).
get_sizes([V|Deps], [Sz|Sizes]) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id,Sz),
get_sizes(Deps, Sizes).

View File

@ -1,71 +0,0 @@
:- module(clpbn_display, [
clpbn_bind_vals/3]).
:- use_module(library(lists),
[
member/2
]).
:- use_module(dists, [get_dist_domain/2]).
:- attribute posterior/4.
%
% what is actually output
%
attribute_goal(V, G) :-
clpbn:suppress_attribute_display(false),
get_atts(V, [posterior(Vs,Vals,Ps,AllDiffs)]),
massage_out(Vs, Vals, Ps, G, AllDiffs, V).
massage_out([], Ev, _, V=Ev, _, V) :- !.
massage_out(Vs, [D], [P], p(CEqs)=P, AllDiffs, _) :- !,
gen_eqs(Vs,D,Eqs),
add_alldiffs(AllDiffs,Eqs,CEqs).
massage_out(Vs, [D|Ds], [P|Ps], (p(CEqs)=P,G) , AllDiffs, V) :-
gen_eqs(Vs,D,Eqs),
add_alldiffs(AllDiffs,Eqs,CEqs),
massage_out(Vs, Ds, Ps, G, AllDiffs, V).
gen_eqs([V], [D], (V=D)) :- !.
gen_eqs([V], D, (V=D)) :- !.
gen_eqs([V|Vs], [D|Ds], ((V=D),Eqs)) :-
gen_eqs(Vs,Ds,Eqs).
add_alldiffs([],Eqs,Eqs).
add_alldiffs(AllDiffs,Eqs,(Eqs/alldiff(AllDiffs))).
clpbn_bind_vals([],[],_).
clpbn_bind_vals([Vs|MoreVs],[Ps|MorePs],AllDiffs) :-
clpbn_bind_vals2(Vs, Ps, AllDiffs),
clpbn_bind_vals(MoreVs,MorePs,AllDiffs).
clpbn_bind_vals2([],_,_) :- !.
% simple case, we want a distribution on a single variable.
%bind_vals([V],Ps) :- !,
% clpbn:get_atts(V, [dist(Vals,_,_)]),
% put_atts(V, posterior([V], Vals, Ps)).
% complex case, we want a joint distribution, do it on a leader.
% should split on cliques ?
clpbn_bind_vals2(Vs,Ps,AllDiffs) :-
get_all_combs(Vs, Vals),
Vs = [V|_],
put_atts(V, posterior(Vs, Vals, Ps, AllDiffs)).
get_all_combs(Vs, Vals) :-
get_all_doms(Vs,Ds),
findall(L,ms(Ds,L),Vals).
get_all_doms([], []).
get_all_doms([V|Vs], [D|Ds]) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain(Id,D),
get_all_doms(Vs, Ds).
ms([], []).
ms([H|L], [El|Els]) :-
member(El,H),
ms(L, Els).

View File

@ -1,316 +0,0 @@
%
% routines to manipulate distributions
%
:- module(clpbn_dist,
[
dist/1,
dist/4,
dists/1,
dist_new_table/2,
get_dist/4,
get_dist_matrix/5,
get_possibly_deterministic_dist_matrix/5,
get_dist_domain/2,
get_dist_domain_size/2,
get_dist_params/2,
get_dist_key/2,
get_evidence_position/3,
get_evidence_from_position/3,
dist_to_term/2,
empty_dist/2,
all_dist_ids/1,
randomise_all_dists/0,
randomise_dist/1,
uniformise_all_dists/0,
uniformise_dist/1,
reset_all_dists/0
]).
:- use_module(library(lists),[is_list/1,nth0/3]).
:- use_module(library(matrix),
[matrix_new/4,
matrix_new/3,
matrix_to_list/2,
matrix_to_logs/1]).
:- use_module(library('clpbn/matrix_cpt_utils'),
[random_CPT/2,
uniform_CPT/2]).
/*
:- mode dist(+, -).
:- mode get_dist(+, -, -, -).
:- mode get_dist_params(+, -).
:- mode get_dist_domain_size(+, -).
:- mode get_dist_domain(+, -).
:- mode get_dist_nparams(+, -).
:- mode dist(?).
:- mode dist_to_term(+,-).
*/
/*******************************************
store stuff in a DB of the form:
db(Id, Key, CPT, Type, Domain, CPTSize, DSize)
where Id is the id,
Key is a skeleton of the key(main functor only)
cptsize is the table size or -1,
DSize is the domain size,
Type is
tab for tabular
avg for average
max for maximum
min for minimum
trans for HMMs
continuous
Domain is
a list of values
bool for [t,f]
aminoacids for [a,c,d,e,f,g,h,i,k,l,m,n,p,q,r,s,t,v,w,y]
dna for [a,c,g,t]
rna for [a,c,g,u]
reals
********************************************/
:- dynamic id/1.
id(1).
new_id(Id) :-
retract(id(Id)),
Id1 is Id+1,
assert(id(Id1)).
reset_id :-
retract(id(_)),
assert(id(1)).
dists(X) :- id(X1), X is X1-1.
dist(V, Id, Key, Parents) :-
dist_unbound(V, Culprit), !,
when(Culprit, dist(V, Id, Key, Parents)).
dist(V, Id, Key, Parents) :-
var(Key), !,
when(Key, dist(V, Id, Key, Parents)).
dist(avg(Domain, Parents), avg(Domain), _, Parents).
dist(max(Domain, Parents), max(Domain), _, Parents).
dist(min(Domain, Parents), min(Domain), _, Parents).
dist(p(Type, CPT), Id, Key, FParents) :-
copy_structure(Key, Key0),
distribution(Type, CPT, Id, Key0, [], FParents).
dist(p(Type, CPT, Parents), Id, Key, FParents) :-
copy_structure(Key, Key0),
distribution(Type, CPT, Id, Key0, Parents, FParents).
dist_unbound(V, ground(V)) :-
var(V), !.
dist_unbound(p(Type,_), ground(Type)) :-
\+ ground(Type), !.
dist_unbound(p(_,CPT), ground(CPT)) :-
\+ ground(CPT).
dist_unbound(p(Type,_,_), ground(Type)) :-
\+ ground(Type), !.
dist_unbound(p(_,CPT,_), ground(CPT)) :-
\+ ground(CPT).
distribution(bool, trans(CPT), Id, Key, Parents, FParents) :-
is_list(CPT), !,
compress_hmm_table(CPT, Parents, Tab, FParents),
add_dist([t,f], trans, Tab, Parents, Key, Id).
distribution(bool, CPT, Id, Key, Parents, Parents) :-
is_list(CPT), !,
add_dist([t,f], tab, CPT, Parents, Key, Id).
distribution(aminoacids, trans(CPT), Id, Key, Parents, FParents) :-
is_list(CPT), !,
compress_hmm_table(CPT, Parents, Tab, FParents),
add_dist([a,c,d,e,f,g,h,i,k,l,m,n,p,q,r,s,t,v,w,y], trans, Tab, FParents, Key, Id).
distribution(aminoacids, CPT, Id, Key, Parents, Parents) :-
is_list(CPT), !,
add_dist([a,c,d,e,f,g,h,i,k,l,m,n,p,q,r,s,t,v,w,y], tab, CPT, Parents, Key, Id).
distribution(dna, trans(CPT), Key, Id, Parents, FParents) :-
is_list(CPT), !,
compress_hmm_table(CPT, Parents, Tab, FParents),
add_dist([a,c,g,t], trans, Tab, FParents, Key, Id).
distribution(dna, CPT, Id, Key, Parents, Parents) :-
is_list(CPT), !,
add_dist([a,c,g,t], tab, CPT, Key, Id).
distribution(rna, trans(CPT), Id, Key, Parents, FParents) :-
is_list(CPT), !,
compress_hmm_table(CPT, Parents, Tab, FParents, FParents),
add_dist([a,c,g,u], trans, Tab, Key, Id).
distribution(rna, CPT, Id, Key, Parents, Parents) :-
is_list(CPT), !,
add_dist([a,c,g,u], tab, CPT, Parents, Key, Id).
distribution(Domain, trans(CPT), Id, Key, Parents, FParents) :-
is_list(Domain),
is_list(CPT), !,
compress_hmm_table(CPT, Parents, Tab, FParents),
add_dist(Domain, trans, Tab, FParents, Key, Id).
distribution(Domain, CPT, Id, Key, Parents, Parents) :-
is_list(Domain),
is_list(CPT), !,
add_dist(Domain, tab, CPT, Parents, Key, Id).
add_dist(Domain, Type, CPT, _, Key, Id) :-
recorded(clpbn_dist_db, db(Id, Key, CPT, Type, Domain, _, _), _), !.
add_dist(Domain, Type, CPT, Parents, Key, Id) :-
length(CPT, CPTSize),
length(Domain, DSize),
new_id(Id),
record_parent_sizes(Parents, Id, PSizes, [DSize|PSizes]),
recordz(clpbn_dist_db,db(Id, Key, CPT, Type, Domain, CPTSize, DSize),_).
record_parent_sizes([], Id, [], DSizes) :-
recordz(clpbn_dist_psizes,db(Id, DSizes),_).
record_parent_sizes([P|Parents], Id, [Size|Sizes], DSizes) :-
clpbn:get_atts(P,dist(Dist,_)), !,
get_dist_domain_size(Dist, Size),
record_parent_sizes(Parents, Id, Sizes, DSizes).
record_parent_sizes([_|_], _, _, _).
%
% Often, * is used to code empty in HMMs.
%
compress_hmm_table([], [], [], []).
compress_hmm_table([*|L],[_|Parents],NL,NParents) :- !,
compress_hmm_table(L,Parents,NL,NParents).
compress_hmm_table([Prob|L],[P|Parents],[Prob|NL],[P|NParents]) :-
compress_hmm_table(L,Parents,NL,NParents).
dist(Id) :-
recorded(clpbn_dist_db, db(Id, _, _, _, _, _, _), _).
get_dist(Id, Type, Domain, Tab) :-
recorded(clpbn_dist_db, db(Id, _, Tab, Type, Domain, _, _), _).
get_dist_matrix(Id, Parents, Type, Domain, Mat) :-
recorded(clpbn_dist_db, db(Id, _, Tab, Type, Domain, _, DomainSize), _),
get_dsizes(Parents, Sizes, []),
matrix_new(floats, [DomainSize|Sizes], Tab, Mat),
matrix_to_logs(Mat).
get_possibly_deterministic_dist_matrix(Id, Parents, Type, Domain, Mat) :-
recorded(clpbn_dist_db, db(Id, _, Tab, Type, Domain, _, DomainSize), _),
get_dsizes(Parents, Sizes, []),
matrix_new(floats, [DomainSize|Sizes], Tab, Mat).
get_dsizes([], Sizes, Sizes).
get_dsizes([P|Parents], [Sz|Sizes], Sizes0) :-
clpbn:get_atts(P,dist(Dist,_)),
get_dist_domain_size(Dist, Sz),
get_dsizes(Parents, Sizes, Sizes0).
get_dist_params(Id, Parms) :-
recorded(clpbn_dist_db, db(Id, _, Parms, _, _, _, _), _).
get_dist_domain_size(avg(D,_), DSize) :- !,
length(D, DSize).
get_dist_domain_size(Id, DSize) :-
recorded(clpbn_dist_db, db(Id, _, _, _, _, _, DSize), _).
get_dist_domain(Id, Domain) :-
recorded(clpbn_dist_db, db(Id, _, _, _, Domain, _, _), _).
get_dist_key(Id, Key) :-
recorded(clpbn_dist_db, db(Id, Key, _, _, _, _, _), _).
get_dist_nparams(Id, NParms) :-
recorded(clpbn_dist_db, db(Id, _, _, _, _, NParms, _), _).
get_evidence_position(El, avg(Domain), Pos) :- !,
nth0(Pos, Domain, El), !.
get_evidence_position(El, Id, Pos) :-
recorded(clpbn_dist_db, db(Id, _, _, _, Domain, _, _), _),
nth0(Pos, Domain, El), !.
get_evidence_position(El, Id, Pos) :-
recorded(clpbn_dist_db, db(Id, _, _, _, _, _, _), _), !,
throw(error(domain_error(evidence,Id),get_evidence_position(El, Id, Pos))).
get_evidence_position(El, Id, Pos) :-
throw(error(domain_error(no_distribution,Id),get_evidence_position(El, Id, Pos))).
get_evidence_from_position(El, Id, Pos) :-
recorded(clpbn_dist_db, db(Id, _, _, _, Domain, _, _), _),
nth0(Pos, Domain, El), !.
get_evidence_from_position(El, Id, Pos) :-
recorded(clpbn_dist_db, db(Id, _, _, _, _, _, _), _), !,
throw(error(domain_error(evidence,Id),get_evidence_from_position(El, Id, Pos))).
get_evidence_from_position(El, Id, Pos) :-
throw(error(domain_error(no_distribution,Id),get_evidence_from_position(El, Id, Pos))).
dist_to_term(_Id,_Term).
empty_dist(Dist, TAB) :-
recorded(clpbn_dist_psizes,db(Dist, DSizes),_), !,
matrix_new(floats, DSizes, TAB).
empty_dist(Dist, TAB) :-
throw(error(domain_error(no_distribution,Dist),empty_dist(Dist,TAB))).
dist_new_table(Id, NewMat) :-
matrix_to_list(NewMat, List),
recorded(clpbn_dist_db, db(Id, Key, _, A, B, C, D), R),
erase(R),
recorda(clpbn_dist_db, db(Id, Key, List, A, B, C, D), _),
fail.
dist_new_table(_, _).
copy_structure(V, V) :- var(V), !.
copy_structure(V, _) :- primitive(V), !.
copy_structure(Key, Key0) :-
Key =.. [A|LKey],
copy_Lstructure(LKey, LKey0),
Key0 =.. [A|LKey0].
copy_Lstructure([], []).
copy_Lstructure([H|LKey], [NH|LKey0]) :-
copy_structure(H, NH),
copy_Lstructure(LKey, LKey0).
randomise_all_dists :-
randomise_dist(_),
fail.
randomise_all_dists.
randomise_dist(Dist) :-
recorded(clpbn_dist_psizes, db(Dist,DSizes), _),
random_CPT(DSizes, NewCPT),
dist_new_table(Dist, NewCPT).
uniformise_all_dists :-
uniformise_dist(_),
fail.
uniformise_all_dists.
uniformise_dist(Dist) :-
recorded(clpbn_dist_psizes, db(Dist,DSizes), _),
uniform_CPT(DSizes, NewCPT),
dist_new_table(Dist, NewCPT).
reset_all_dists :-
recorded(clpbn_dist_psizes, _, R),
erase(R),
fail.
reset_all_dists :-
recorded(clpbn_dist_db, _, R),
erase(R),
fail.
reset_all_dists :-
reset_id,
fail.
reset_all_dists.

View File

@ -1,136 +0,0 @@
%
%
%
%
:- module(clpbn_evidence,
[
store_evidence/1,
incorporate_evidence/2,
check_stored_evidence/2,
add_evidence/2,
put_evidence/2
]).
:- use_module(library(clpbn), [
{}/1,
clpbn_flag/3,
set_clpbn_flag/2
]).
:- use_module(library('clpbn/dists'), [
get_evidence_position/3
]).
:- use_module(library(rbtrees), [
rb_new/1,
rb_lookup/3,
rb_insert/4
]).
:- meta_predicate store_evidence(:).
:- dynamic node/4, edge/2, evidence/2.
%
% new evidence storage algorithm. The idea is that instead of
% redoing all the evidence every time we query the network, we shall
% keep a precompiled version around.
%
% the format is as follows:
% evidence_store:parent(Key,ParentList,[EvidenceChildren])
%
%
store_evidence(G) :-
clpbn_flag(solver,PreviousSolver, graphs),
compute_evidence(G, PreviousSolver).
compute_evidence(G, PreviousSolver) :-
catch(call_residue(G, Vars), Ball, evidence_error(Ball,PreviousSolver)), !,
store_graph(Vars),
set_clpbn_flag(solver, PreviousSolver).
compute_evidence(_,PreviousSolver) :-
set_clpbn_flag(solver, PreviousSolver).
evidence_error(Ball,PreviousSolver) :-
set_clpbn_flag(solver,PreviousSolver),
throw(Ball).
store_graph([]).
store_graph([_-node(K,Dom,CPT,TVs,Ev)|Vars]) :-
\+ node(K,_,_,_), !,
assert(node(K,Dom,CPT,TVs)),
( nonvar(Ev) -> assert(evidence(K,Ev)) ; true),
add_links(TVs,K),
store_graph(Vars).
store_graph([_|Vars]) :-
store_graph(Vars).
add_links([],_).
add_links([K0|TVs],K) :-
assert(edge(K,K0)),
add_links(TVs,K).
incorporate_evidence(Vs,AllVs) :-
rb_new(Cache0),
create_open_list(Vs, OL, FL, Cache0, CacheI),
do_variables(OL, FL, CacheI),
extract_vars(OL, AllVs).
create_open_list([], L, L, C, C).
create_open_list([V|Vs], [K-V|OL], FL, C0, CF) :-
clpbn:get_atts(V,[key(K)]),
add_evidence(K, V),
rb_insert(C0, K, V, CI),
create_open_list(Vs, OL, FL, CI, CF).
do_variables([], [], _) :- !.
do_variables([K-V|Vs], Vf, C0) :-
check_for_evidence(K, V, Vf, Vff, C0, Ci),
do_variables(Vs, Vff, Ci).
create_new_variable(K, V, Vf0, Vff, C0, Cf) :-
node(K,Dom, CPT, TVs),
{ V = K with p(Dom, CPT, NTVs) },
add_evidence(K, V),
add_variables(TVs, NTVs, Vf0, Vff, C0, Cf).
add_variables([], [], Vf, Vf, C, C).
add_variables([K|TVs], [V|NTVs], Vf0, Vff, C0, Cf) :-
rb_lookup(K, V, C0), !,
add_variables(TVs, NTVs, Vf0, Vff, C0, Cf).
add_variables([K|TVs], [V|NTVs], [K-V|Vf0], Vff, C0, Cf) :-
rb_insert(C0, K, V, C1),
create_new_variable(K, V, Vf0, Vf1, C1, C2),
add_variables(TVs, NTVs, Vf1, Vff, C2, Cf).
extract_vars([], []).
extract_vars([_-V|Cache], [V|AllVs]) :-
extract_vars(Cache, AllVs).
%make sure that we are
check_stored_evidence(K, Ev) :-
evidence(K, Ev0), !, Ev0 = Ev.
check_stored_evidence(_, _).
add_evidence(K, V) :-
evidence(K, Ev), !,
clpbn:put_atts(V, [evidence(Ev)]).
add_evidence(_, _).
check_for_evidence(_, V, Vf, Vf, C, C) :-
clpbn:get_atts(V, [evidence(_)]), !.
check_for_evidence(K, _, Vf0, Vff, C0, Ci) :-
findall(Rt,edge(Rt,K),Rts),
add_variables(Rts, _, Vf0, Vff, C0, Ci).
put_evidence(K, V) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_evidence_position(K, Id, Ev),
clpbn:put_atts(V, [evidence(Ev)]).

View File

@ -1,53 +0,0 @@
This is a version of the school database, based on the PRM School example.
There are four main files:
school_128.yap: a school with 128 professors, 256 courses and 4096 students.
school_64.yap: medium size school
school_32.yap: small school
schema.yap: the schema
tables: CPTs
professor_ability(p0,X).
professor_popularity(p0,X).
professor_ability(p0,X), professor_popularity(p0,h).
professor_ability(p0,h), professor_popularity(p0,X).
registration_grade(r0,X).
registration_grade(r0,X), registration_course(r0,C), course_difficulty(C,h).
registration_grade(r0,X), registration_course(r0,C), course_difficulty(C,h), registration_student(r0,S), student_intelligence(S,h).
registration_grade(r0,X), registration_course(r0,C), course_difficulty(C,l), registration_student(r0,S), student_intelligence(S,h).
registration_satisfaction(r0,X).
registration_satisfaction(r0,X), registration_student(r0,S), student_intelligence(S,h).
registration_satisfaction(r0,X), registration_grade(r0,a).
registration_satisfaction(r0,X), registration_grade(r0,d).
registration_satisfaction(r0,h), registration_grade(r0,X).
course_rating(c0,X).
course_rating(c0,h), course_difficulty(c0,X).
course_difficulty(c0,X).
student_ranking(s0,X).
student_ranking(s0,X), student_intelligence(s0,h).

View File

@ -1,17 +0,0 @@
:- [school_128].
professor_popularity(p0,h) :- {}.
professor_popularity(p3,h) :- {}.
professor_popularity(p5,l) :- {}.
professor_popularity(p45,h) :- {}.
professor_popularity(p15,m) :- {}.
course_rating(c0, h) :- {}.
course_rating(c1, m) :- {}.
course_rating(c2, l) :- {}.
course_rating(c3, h) :- {}.
course_rating(c4, m) :- {}.
course_rating(c5, l) :- {}.
course_rating(c62, m) :- {}.

View File

@ -1,44 +0,0 @@
:- [pos:sample32].
:- ['~/Yap/work/CLPBN/clpbn/examples/School/school_32'].
% These libraries provide same functionality.
:- [library('clpbn/learning/mle')].
%:- [library('clpbn/learning/bnt_parms')].
:- [library(matrix)].
main :-
findall(X,goal(X),L),
learn_parameters(L,CPTs),
write_cpts(CPTs).
goal(professor_ability(P,V)) :-
pos:professor_ability(P,V),
p(pa, M), random < M.
goal(professor_popularity(P,V)) :-
pos:professor_popularity(P,V),
p(pp, M), random < M.
goal(registration_grade(P,V)) :-
pos:registration_grade(P,V),
p(rg, M), random < M.
goal(student_intelligence(P,V)) :-
pos:student_intelligence(P,V),
p(si, M), random < M.
goal(course_difficulty(P,V)) :-
pos:course_difficulty(P,V),
p(cd, M), random < M.
goal(registration_satisfaction(P,V)) :-
pos:registration_satisfaction(P,V),
p(rs, M), random < M.
% sampling parameter
p(_, 1.0).
write_cpts([]).
write_cpts([CPT|CPTs]) :-
matrix_to_list(CPT,L),
format('CPT=~w~n',[L]),
write_cpts(CPTs).

File diff suppressed because it is too large Load Diff

View File

@ -1,71 +0,0 @@
/* base file for school database. Supposed to be called from school_*.yap */
professor_key(Key) :-
professor(Key).
professor_ability(Key,Abi) :-
abi_table(Key, AbiDist),
{ Abi = ability(Key) with p([h,m,l], AbiDist) }.
professor_popularity(Key, Pop) :-
professor_ability(Key, Abi),
pop_table(Key,PopTable),
{ Pop = popularity(Key) with
p([h,m,l], PopTable,[Abi]) }.
registration_key(Key) :-
registration(Key, _, _).
registration_course(Key, CKey) :-
registration(Key, CKey, _).
registration_student(Key, SKey) :-
registration(Key, _, SKey).
registration_grade(Key, Grade) :-
registration(Key, CKey, SKey),
course_difficulty(CKey, Dif),
student_intelligence(SKey, Int),
grade_table(Int, Dif, Table),
{ Grade = grade(Key) with Table }.
% registration_satisfaction(r0, h) :- {}.
registration_satisfaction(Key, Sat) :-
registration_course(Key, CKey),
course_professor(CKey, PKey),
professor_ability(PKey, Abi),
registration_grade(Key, Grade),
satisfaction_table(Abi, Grade, Table),
{ Sat = satisfaction(Key) with Table }.
course_key(Key) :-
course(Key,_).
course_professor(Key, PKey) :-
course(Key, PKey).
course_rating(CKey, Rat) :-
setof(Sat, RKey^(registration_course(RKey,CKey), registration_satisfaction(RKey,Sat)), Sats),
{ Rat = rating(CKey) with avg([h,m,l],Sats) }.
course_difficulty(Key, Dif) :-
dif_table(Key, Dist),
{ Dif = difficulty(Key) with p([h,m,l], Dist) }.
student_key(Key) :-
student(Key).
student_intelligence(Key, Int) :-
int_table(Key, IDist, Domain),
{ Int = intelligence(Key) with p(Domain, IDist) }.
student_ranking(Key, Rank) :-
setof(Grade, CKey^(registration_student(CKey,Key),
registration_grade(CKey, Grade)), Grades),
{ Rank = ranking(Key) with avg([a,b,c,d],Grades) }.
:- ensure_loaded(tables).

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,45 +0,0 @@
int_table(_, [0.5,
0.4,
0.1],[h, m, l]).
grade_table(I, D,
/* h h h m h l m h m m m l l h l m l l */
p([a,b,c,d],
[ 0.2, 0.7, 0.85, 0.1, 0.2, 0.5, 0.01, 0.05,0.1 ,
0.6, 0.25, 0.12, 0.3, 0.6,0.35,0.04, 0.15, 0.4 ,
0.15,0.04, 0.02, 0.4,0.15,0.12, 0.5, 0.6, 0.4,
0.05,0.01, 0.01, 0.2,0.05,0.03, 0.45, 0.2, 0.1 ], [I,D])).
/*
A: professor's ability;
B: student's grade (for course registration).
*/
satisfaction_table(A, G,
/* h a h b h c h d m a m b m c m d l a l b l c l d */
p([h,m,l],
/*h*/ [0.98, 0.9,0.8 , 0.6, 0.9, 0.4, 0.2, 0.01, 0.5, 0.2, 0.01, 0.01,
/*m*/ 0.01, 0.09,0.15, 0.3,0.05, 0.4, 0.3, 0.04,0.35, 0.3, 0.09, 0.01 ,
/*l*/ 0.01, 0.01,0.05, 0.1,0.05, 0.2, 0.5, 0.95,0.15, 0.5, 0.9, 0.98], [A,G])).
% The idea is quite simple:
% hs = h -> r = ( 0.9, 0.1, 0)
% hs = m -> r = ( 0.2, 0.6, 0.2)
% hs = l -> r = ( 0, 0.1, 0.9)
%
% add all and divide on the number of elements on the table!
%
rating_prob_table([0.9,0.05,0.01,
0.09,0.9,0.09,
0.01,0.05,0.9]).
abi_table( _, [0.50, 0.40, 0.10]).
pop_table(_, [0.9, 0.2, 0.01,
0.09, 0.6, 0.09,
0.01, 0.2, 0.9]).
dif_table( _, [0.25, 0.50, 0.25]).

View File

@ -1,35 +0,0 @@
%
% adapted from Hendrik Blockeel's ILP04 paper.
%
:- use_module(library(clpbn)).
cg(X,1,C):-
father(Y,X),
cg(Y,1,C1),cg(Y,2,C2),
parent_cpt(cg(X,1), C1, C2, C).
cg(X,2,C):-
mother(Y,X),
cg(Y,1,C1),cg(Y,2,C2),
parent_cpt(cg(X,2), C1, C2, C).
cg(f,X,C) :-
prior_cpt(cg(f,X),C).
cg(m,X,C) :-
prior_cpt(cg(m,X),C).
prior_cpt(CKEY, C) :-
{ C = CKEY with p([p,w], [0.5,0.5])}.
parent_cpt(CKEY, C1, C2, C) :-
{ C = CKEY with p([p,w], [ 1,0.5,0.5,0.0,
0.0,0.5,0.5, 1],[C1,C2])}.
father(f,s).
mother(m,s).

View File

@ -1,31 +0,0 @@
:- ensure_loaded(library(clpbn)).
wet_grass(W) :-
sprinkler(S),
rain(R),
{ W = wet with p([f,t],
([1.0,0.1,0.1,0.01,
0.0,0.9,0.9,0.99]),
[S,R])
}.
sprinkler(P) :-
cloudy(C),
{ P = sprinkler with p([f,t],
[0.5,0.9,
0.5,0.1],
[C])
}.
rain(R) :-
cloudy(C),
{ R = rain with p([f,t], [0.8,0.2,
0.2,0.8],
[C]) }.
cloudy(C) :-
{ C = cloudy with p([f,t],[0.5,0.5],[]) }.

View File

@ -1,546 +0,0 @@
%
% each variable is represented by a node in a binary tree.
% each node contains:
% key,
% current_value
% Markov Blanket
%
:- module(clpbn_gibbs,
[gibbs/3,
check_if_gibbs_done/1,
init_gibbs_solver/4,
run_gibbs_solver/3]).
:- use_module(library(rbtrees),
[rb_new/1,
rb_insert/4,
rb_lookup/3]).
:- use_module(library(lists),
[member/2,
append/3,
delete/3,
max_list/2,
sum_list/2]).
:- use_module(library(ordsets),
[ord_subtract/3]).
:- use_module(library('clpbn/matrix_cpt_utils'), [
project_from_CPT/3,
reorder_CPT/5,
multiply_possibly_deterministic_factors/3,
column_from_possibly_deterministic_CPT/3,
normalise_possibly_deterministic_CPT/2,
list_from_CPT/2]).
:- use_module(library('clpbn/utils'), [
check_for_hidden_vars/3]).
:- use_module(library('clpbn/dists'), [
get_possibly_deterministic_dist_matrix/5,
get_dist_domain_size/2]).
:- use_module(library('clpbn/topsort'), [
topsort/2]).
:- use_module(library('clpbn/display'), [
clpbn_bind_vals/3]).
:- use_module(library('clpbn/connected'),
[
influences/4
]).
:- dynamic gibbs_params/3.
:- dynamic explicit/1.
% arguments:
%
% list of output variables
% list of attributed variables
%
gibbs(LVs,Vs0,AllDiffs) :-
init_gibbs_solver(LVs, Vs0, AllDiffs, Vs),
run_gibbs_solver(LVs, LPs, Vs),
clpbn_bind_vals(LVs,LPs,AllDiffs),
clean_up.
init_gibbs_solver(GoalVs, Vs0, _, Vs) :-
clean_up,
term_variables(GoalVs, LVs),
check_for_hidden_vars(Vs0, Vs0, Vs1),
influences(Vs1, LVs, _, Vs2),
sort(Vs2,Vs).
run_gibbs_solver(LVs, LPs, Vs) :-
initialise(Vs, Graph, LVs, OutputVars, VarOrder),
process(VarOrder, Graph, OutputVars, Estimates),
sum_up_all(Estimates, LPs),
clean_up.
initialise(LVs, Graph, GVs, OutputVars, VarOrder) :-
init_keys(Keys0),
gen_keys(LVs, 0, VLen, Keys0, Keys),
functor(Graph,graph,VLen),
graph_representation(LVs, Graph, 0, Keys, TGraph),
compile_graph(Graph),
topsort(TGraph, VarOrder),
%writeln(TGraph:VarOrder),
% show_sorted(VarOrder, Graph),
add_all_output_vars(GVs, Keys, OutputVars).
init_keys(Keys0) :-
rb_new(Keys0).
gen_keys([], I, I, Keys, Keys).
gen_keys([V|Vs], I0, If, Keys0, Keys) :-
clpbn:get_atts(V,[evidence(_)]), !,
gen_keys(Vs, I0, If, Keys0, Keys).
gen_keys([V|Vs], I0, If, Keys0, Keys) :-
I is I0+1,
rb_insert(Keys0,V,I,KeysI),
gen_keys(Vs, I, If, KeysI, Keys).
graph_representation([],_,_,_,[]).
graph_representation([V|Vs], Graph, I0, Keys, TGraph) :-
clpbn:get_atts(V,[evidence(_)]), !,
clpbn:get_atts(V, [dist(Id,Parents)]),
get_possibly_deterministic_dist_matrix(Id, Parents, _, Vals, Table),
get_sizes(Parents, Szs),
length(Vals,Sz),
project_evidence_out([V|Parents],[V|Parents],Table,[Sz|Szs],Variables,NewTable),
% all variables are parents
propagate2parents(Variables, NewTable, Variables, Graph, Keys),
graph_representation(Vs, Graph, I0, Keys, TGraph).
graph_representation([V|Vs], Graph, I0, Keys, [I-IParents|TGraph]) :-
I is I0+1,
clpbn:get_atts(V, [dist(Id,Parents)]),
get_possibly_deterministic_dist_matrix(Id, Parents, _, Vals, Table),
get_sizes(Parents, Szs),
length(Vals,Sz),
project_evidence_out([V|Parents],[V|Parents],Table,[Sz|Szs],Variables,NewTable),
Variables = [V|NewParents],
sort_according_to_indices(NewParents,Keys,SortedNVs,SortedIndices),
reorder_CPT(Variables,NewTable,[V|SortedNVs],NewTable2,_),
add2graph(V, Vals, NewTable2, SortedIndices, Graph, Keys),
propagate2parents(NewParents, NewTable, Variables, Graph,Keys),
parent_indices(NewParents, Keys, IVariables0),
sort(IVariables0, IParents),
arg(I, Graph, var(_,_,_,_,_,_,_,NewTable2,SortedIndices)),
graph_representation(Vs, Graph, I, Keys, TGraph).
write_pars([]).
write_pars([V|Parents]) :-
clpbn:get_atts(V, [key(K),dist(I,_)]),write(K:I),nl,
write_pars(Parents).
get_sizes([], []).
get_sizes([V|Parents], [Sz|Szs]) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id, Sz),
get_sizes(Parents, Szs).
parent_indices([], _, []).
parent_indices([V|Parents], Keys, [I|IParents]) :-
rb_lookup(V, I, Keys),
parent_indices(Parents, Keys, IParents).
%
% first, remove nodes that have evidence from tables.
%
project_evidence_out([],Deps,Table,_,Deps,Table).
project_evidence_out([V|Parents],Deps,Table,Szs,NewDeps,NewTable) :-
clpbn:get_atts(V,[evidence(_)]), !,
project_from_CPT(V,tab(Table,Deps,Szs),tab(ITable,IDeps,ISzs)),
project_evidence_out(Parents,IDeps,ITable,ISzs,NewDeps,NewTable).
project_evidence_out([_Par|Parents],Deps,Table,Szs,NewDeps,NewTable) :-
project_evidence_out(Parents,Deps,Table,Szs,NewDeps,NewTable).
propagate2parents([], _, _, _, _).
propagate2parents([V|NewParents], Table, Variables, Graph, Keys) :-
delete(Variables,V,NVs),
sort_according_to_indices(NVs,Keys,SortedNVs,SortedIndices),
reorder_CPT(Variables,Table,[V|SortedNVs],NewTable,_),
add2graph(V, _, NewTable, SortedIndices, Graph, Keys),
propagate2parents(NewParents,Table, Variables, Graph, Keys).
add2graph(V, Vals, Table, IParents, Graph, Keys) :-
rb_lookup(V, Index, Keys),
(var(Vals) -> true ; length(Vals,Sz)),
arg(Index, Graph, var(V,Index,_,Vals,Sz,VarSlot,_,_,_)),
member(tabular(Table,Index,IParents), VarSlot), !.
sort_according_to_indices(NVs,Keys,SortedNVs,SortedIndices) :-
vars2indices(NVs,Keys,ToSort),
keysort(ToSort, Sorted),
split_parents(Sorted, SortedNVs,SortedIndices).
split_parents([], [], []).
split_parents([I-V|Sorted], [V|SortedNVs],[I|SortedIndices]) :-
split_parents(Sorted, SortedNVs, SortedIndices).
vars2indices([],_,[]).
vars2indices([V|Parents],Keys,[I-V|IParents]) :-
rb_lookup(V, I, Keys),
vars2indices(Parents,Keys,IParents).
%
% This is the really cool bit.
%
compile_graph(Graph) :-
Graph =.. [_|VarsInfo],
compile_vars(VarsInfo,Graph).
compile_vars([],_).
compile_vars([var(_,I,_,Vals,Sz,VarSlot,Parents,_,_)|VarsInfo],Graph)
:-
compile_var(I,Vals,Sz,VarSlot,Parents,Graph),
compile_vars(VarsInfo,Graph).
compile_var(I,Vals,Sz,VarSlot,Parents,Graph) :-
fetch_all_parents(VarSlot,Graph,[],Parents,[],Sizes),
mult_list(Sizes,1,TotSize),
compile_var(TotSize,I,Vals,Sz,VarSlot,Parents,Sizes,Graph).
fetch_all_parents([],_,Parents,Parents,Sizes,Sizes) :- !.
fetch_all_parents([tabular(_,_,Ps)|CPTs],Graph,Parents0,ParentsF,Sizes0,SizesF) :-
merge_these_parents(Ps,Graph,Parents0,ParentsI,Sizes0,SizesI),
fetch_all_parents(CPTs,Graph,ParentsI,ParentsF,SizesI,SizesF).
merge_these_parents([],_,Parents,Parents,Sizes,Sizes).
merge_these_parents([I|Ps],Graph,Parents0,ParentsF,Sizes0,SizesF) :-
member(I,Parents0), !,
merge_these_parents(Ps,Graph,Parents0,ParentsF,Sizes0,SizesF).
merge_these_parents([I|Ps],Graph,Parents0,ParentsF,Sizes0,SizesF) :-
arg(I,Graph,var(_,I,_,Vals,_,_,_,_,_)),
length(Vals, Sz),
add_parent(Parents0,I,ParentsI,Sizes0,Sz,SizesI),
merge_these_parents(Ps,Graph,ParentsI,ParentsF,SizesI,SizesF).
add_parent([],I,[I],[],Sz,[Sz]).
add_parent([P|Parents0],I,[I,P|Parents0],Sizes0,Sz,[Sz|Sizes0]) :-
P > I, !.
add_parent([P|Parents0],I,[P|ParentsI],[S|Sizes0],Sz,[S|SizesI]) :-
add_parent(Parents0,I,ParentsI,Sizes0,Sz,SizesI).
mult_list([],Mult,Mult).
mult_list([Sz|Sizes],Mult0,Mult) :-
MultI is Sz*Mult0,
mult_list(Sizes,MultI,Mult).
% compile node as set of facts, faster execution
compile_var(TotSize,I,_Vals,Sz,CPTs,Parents,_Sizes,Graph) :-
TotSize < 1024*64, TotSize > 0, !,
multiply_all(I,Parents,CPTs,Sz,Graph).
% do it dynamically
compile_var(_,_,_,_,_,_,_,_).
multiply_all(I,Parents,CPTs,Sz,Graph) :-
markov_blanket_instance(Parents,Graph,Values),
(
multiply_all(CPTs,Graph,Probs)
->
store_mblanket(I,Values,Probs)
;
throw(error(domain_error(bayesian_domain),gibbs_cpt(I,Parents,Values,Sz)))
),
fail.
multiply_all(I,_,_,_,_) :-
assert(explicit(I)).
% note: what matters is how this predicate instantiates the temp
% slot in the graph!
markov_blanket_instance([],_,[]).
markov_blanket_instance([I|Parents],Graph,[Pos|Values]) :-
arg(I,Graph,var(_,I,Pos,Vals,_,_,_,_,_)),
fetch_val(Vals,0,Pos),
markov_blanket_instance(Parents,Graph,Values).
% backtrack through every value in domain
%
fetch_val([_|_],Pos,Pos).
fetch_val([_|Vals],I0,Pos) :-
I is I0+1,
fetch_val(Vals,I,Pos).
multiply_all([tabular(Table,_,Parents)|CPTs],Graph,Probs) :-
fetch_parents(Parents, Graph, Vals),
column_from_possibly_deterministic_CPT(Table,Vals,Probs0),
multiply_more(CPTs,Graph,Probs0,Probs).
fetch_parents([], _, []).
fetch_parents([P|Parents], Graph, [Val|Vals]) :-
arg(P,Graph,var(_,_,Val,_,_,_,_,_,_)),
fetch_parents(Parents, Graph, Vals).
multiply_more([],_,Probs0,LProbs) :-
normalise_possibly_deterministic_CPT(Probs0, Probs),
list_from_CPT(Probs, LProbs0),
accumulate_up_list(LProbs0, 0.0, LProbs).
multiply_more([tabular(Table,_,Parents)|CPTs],Graph,Probs0,Probs) :-
fetch_parents(Parents, Graph, Vals),
column_from_possibly_deterministic_CPT(Table, Vals, P0),
multiply_possibly_deterministic_factors(Probs0, P0, ProbsI),
multiply_more(CPTs,Graph,ProbsI,Probs).
accumulate_up_list([], _, []).
accumulate_up_list([P|LProbs], P0, [P1|L]) :-
P1 is P0+P,
accumulate_up_list(LProbs, P1, L).
store_mblanket(I,Values,Probs) :-
recordz(mblanket,m(I,Values,Probs),_).
add_all_output_vars([], _, []).
add_all_output_vars([Vs|LVs], Keys, [Is|OutputVars]) :-
add_output_vars(Vs, Keys, Is),
add_all_output_vars(LVs, Keys, OutputVars).
add_output_vars([], _, []).
add_output_vars([V|LVs], Keys, [I|OutputVars]) :-
rb_lookup(V, I, Keys),
add_output_vars(LVs, Keys, OutputVars).
process(VarOrder, Graph, OutputVars, Estimates) :-
gibbs_params(NChains,BurnIn,NSamples),
functor(Graph,_,Len),
init_chains(NChains,VarOrder,Len,Graph,Chains0),
init_estimates(NChains,OutputVars,Graph,Est0),
process_chains(BurnIn,VarOrder,BurnedIn,Chains0,Graph,Len,Est0,_),
process_chains(NSamples,VarOrder,_,BurnedIn,Graph,Len,Est0,Estimates).
%
% I use an uniform distribution to generate the initial sample.
%
init_chains(0,_,_,_,[]) :- !.
init_chains(I,VarOrder,Len,Graph,[Chain|Chains]) :-
init_chain(VarOrder,Len,Graph,Chain),
I1 is I-1,
init_chains(I1,VarOrder,Len,Graph,Chains).
init_chain(VarOrder,Len,Graph,Chain) :-
functor(Chain,sample,Len),
gen_sample(VarOrder,Graph,Chain).
gen_sample([],_,_) :- !.
gen_sample([I|Vs],Graph,Chain) :-
arg(I,Graph,var(_,I,_,_,Sz,_,_,_,_)),
Pos is integer(random*Sz),
arg(I,Chain,Pos),
gen_sample(Vs,Graph,Chain).
init_estimates(0,_,_,[]) :- !.
init_estimates(NChains,OutputVars,Graph,[Est|Est0]) :-
NChainsI is NChains-1,
init_estimate_all_outvs(OutputVars,Graph,Est),
init_estimates(NChainsI,OutputVars,Graph,Est0).
init_estimate_all_outvs([],_,[]).
init_estimate_all_outvs([Vs|OutputVars],Graph,[E|Est]) :-
init_estimate(Vs, Graph, E),
init_estimate_all_outvs(OutputVars,Graph,Est).
init_estimate([],_,[]).
init_estimate([V],Graph,[I|E0L]) :- !,
arg(V,Graph,var(_,I,_,_,Sz,_,_,_,_)),
gen_e0(Sz,E0L).
init_estimate(Vs,Graph,me(Is,Mults,Es)) :-
generate_est_mults(Vs, Is, Graph, Mults, Sz),
gen_e0(Sz,Es).
generate_est_mults([], [], _, [], 1).
generate_est_mults([V|Vs], [I|Is], Graph, [M0|Mults], M) :-
arg(V,Graph,var(_,I,_,_,Sz,_,_,_,_)),
generate_est_mults(Vs, Is, Graph, Mults, M0),
M is M0*Sz.
gen_e0(0,[]) :- !.
gen_e0(Sz,[0|E0L]) :-
Sz1 is Sz-1,
gen_e0(Sz1,E0L).
process_chains(0,_,F,F,_,_,Est,Est) :- !.
process_chains(ToDo,VarOrder,End,Start,Graph,Len,Est0,Estf) :-
%format('ToDo = ~d~n',[ToDo]),
process_chains(Start,VarOrder,Int,Graph,Len,Est0,Esti),
% (ToDo mod 100 =:= 1 -> statistics,cvt2problist(Esti, Probs), Int =[S|_], format('did ~d: ~w~n ~w~n',[ToDo,Probs,S]) ; true),
ToDo1 is ToDo-1,
process_chains(ToDo1,VarOrder,End,Int,Graph,Len,Esti,Estf).
process_chains([], _, [], _, _,[],[]).
process_chains([Sample0|Samples0], VarOrder, [Sample|Samples], Graph, SampLen,[E0|E0s],[Ef|Efs]) :-
functor(Sample,sample,SampLen),
do_sample(VarOrder,Sample,Sample0,Graph),
% format('Sample = ~w~n',[Sample]),
update_estimates(E0,Sample,Ef),
process_chains(Samples0, VarOrder, Samples, Graph, SampLen,E0s,Efs).
do_sample([],_,_,_).
do_sample([I|VarOrder],Sample,Sample0,Graph) :-
do_var(I,Sample,Sample0,Graph),
do_sample(VarOrder,Sample,Sample0,Graph).
do_var(I,Sample,Sample0,Graph) :-
( explicit(I) ->
arg(I,Graph,var(_,_,_,_,_,_,Parents,_,_)),
fetch_parents(Parents,I,Sample,Sample0,Args),
recorded(mblanket,m(I,Args,Vals),_)
;
arg(I,Graph,var(_,_,_,_,_,CPTs,Parents,_,_)),
fetch_parents(Parents,I,Sample,Sample0,Bindings),
multiply_all_in_context(Parents,Bindings,CPTs,Graph,Vals)
),
X is random,
pick_new_value(Vals,X,0,Val),
arg(I,Sample,Val).
multiply_all_in_context(Parents,Args,CPTs,Graph,Vals) :-
set_pos(Parents,Args,Graph),
multiply_all(CPTs,Graph,Vals),
assert(mall(Vals)), fail.
multiply_all_in_context(_,_,_,_,Vals) :-
retract(mall(Vals)).
set_pos([],[],_).
set_pos([I|Is],[Pos|Args],Graph) :-
arg(I,Graph,var(_,I,Pos,_,_,_,_,_,_)),
set_pos(Is,Args,Graph).
fetch_parents([],_,_,_,[]).
fetch_parents([P|Parents],I,Sample,Sample0,[VP|Args]) :-
arg(P,Sample,VP),
nonvar(VP), !,
fetch_parents(Parents,I,Sample,Sample0,Args).
fetch_parents([P|Parents],I,Sample,Sample0,[VP|Args]) :-
arg(P,Sample0,VP),
fetch_parents(Parents,I,Sample,Sample0,Args).
pick_new_value([V|Vals],X,I0,Val) :-
( X < V ->
Val = I0
;
I is I0+1,
pick_new_value(Vals,X,I,Val)
).
update_estimates([],_,[]).
update_estimates([Est|E0],Sample,[NEst|Ef]) :-
update_estimate(Est,Sample,NEst),
update_estimates(E0,Sample,Ef).
update_estimate([I|E],Sample,[I|NE]) :-
arg(I,Sample,V),
update_estimate_for_var(V,E,NE).
update_estimate(me(Is,Mult,E),Sample,me(Is,Mult,NE)) :-
get_estimate_pos(Is, Sample, Mult, 0, V),
update_estimate_for_var(V,E,NE).
get_estimate_pos([], _, [], V, V).
get_estimate_pos([I|Is], Sample, [M|Mult], V0, V) :-
arg(I,Sample,VV),
VI is VV*M+V0,
get_estimate_pos(Is, Sample, Mult, VI, V).
update_estimate_for_var(V0,[X|T],[X1|NT]) :-
( V0 == 0 ->
X1 is X+1,
NT = T
;
V1 is V0-1,
X1 = X,
update_estimate_for_var(V1,T,NT)
).
check_if_gibbs_done(Var) :-
get_atts(Var, [dist(_)]), !.
clean_up :-
eraseall(mblanket),
fail.
clean_up :-
retractall(explicit(_)),
fail.
clean_up.
gibbs_params(5,1000,10000).
cvt2problist([], []).
cvt2problist([[[_|E]]|Est0], [Ps|Probs]) :-
sum_all(E,0,Sum),
do_probs(E,Sum,Ps),
cvt2problist(Est0, Probs) .
sum_all([],Sum,Sum).
sum_all([E|Es],S0,Sum) :-
SI is S0+E,
sum_all(Es,SI,Sum).
do_probs([],_,[]).
do_probs([E|Es],Sum,[P|Ps]) :-
P is E/Sum,
do_probs(Es,Sum,Ps).
show_sorted([], _) :- nl.
show_sorted([I|VarOrder], Graph) :-
arg(I,Graph,var(V,I,_,_,_,_,_,_,_)),
clpbn:get_atts(V,[key(K)]),
format('~w ',[K]),
show_sorted(VarOrder, Graph).
sum_up_all([[]|_], []).
sum_up_all([[C|MoreC]|Chains], [Dist|Dists]) :-
extract_sums(Chains, CurrentChains, LeftChains),
sum_up([C|CurrentChains], Dist),
sum_up_all([MoreC|LeftChains], Dists).
extract_sums([], [], []).
extract_sums([[C|Chains]|MoreChains], [C|CurrentChains], [Chains|LeftChains]) :-
extract_sums(MoreChains, CurrentChains, LeftChains).
sum_up([[_|Counts]|Chains], Dist) :-
add_up(Counts,Chains, Add),
normalise(Add, Dist).
sum_up([me(_,_,Counts)|Chains], Dist) :-
add_up_mes(Counts,Chains, Add),
normalise(Add, Dist).
add_up(Counts,[],Counts).
add_up(Counts,[[_|Cs]|Chains], Add) :-
sum_lists(Counts, Cs, NCounts),
add_up(NCounts, Chains, Add).
add_up_mes(Counts,[],Counts).
add_up_mes(Counts,[me(_,_,Cs)|Chains], Add) :-
sum_lists(Counts, Cs, NCounts),
add_up_mes(NCounts, Chains, Add).
sum_lists([],[],[]).
sum_lists([Count|Counts], [C|Cs], [NC|NCounts]) :-
NC is Count+C,
sum_lists(Counts, Cs, NCounts).
normalise(Add, Dist) :-
sum_list(Add, Sum),
divide_list(Add, Sum, Dist).
divide_list([], _, []).
divide_list([C|Add], Sum, [P|Dist]) :-
P is C/Sum,
divide_list(Add, Sum, Dist).

View File

@ -1,43 +0,0 @@
%
% Just output a graph with all the variables.
%
:- module(clpbn2graph, [clpbn2graph/1]).
:- use_module(library('clpbn/utils'), [
check_for_hidden_vars/3]).
:- use_module(library('clpbn/dists'), [
get_dist/4]).
:- attribute node/0.
clpbn2graph(Vs) :-
check_for_hidden_vars(Vs, Vs, NVs),
clpbn2graph2(NVs).
clpbn2graph2([]).
clpbn2graph2([V|Vs]) :-
put_atts(V,[node]),
clpbn2graph2(Vs).
%
% what is actually output
%
attribute_goal(V, node(K,Dom,CPT,TVs,Ev)) :-
get_atts(V, [node]),
clpbn:get_atts(V, [key(K),dist(Id,Vs)]),
get_dist(Id,_,Dom,CPT),
translate_vars(Vs,TVs),
( clpbn:get_atts(V, [evidence(Ev)]) -> true ; true).
translate_vars([],[]).
translate_vars([V|Vs],[K|Ks]) :-
clpbn:get_atts(V, [key(K)]),
translate_vars(Vs,Ks).

View File

@ -1,71 +0,0 @@
:- module(clpbn_gviz, [clpbn2gviz/4]).
clpbn2gviz(Stream, Name, Network, Output) :-
format(Stream, 'digraph ~w {
graph [ rankdir="LR" ];~n',[Name]),
output_vars(Stream, Network),
info_ouput(Stream, Output),
format(Stream, '}~n',[]).
output_vars(_, []).
output_vars(Stream, [V|Vs]) :-
output_var(Stream, V),
output_vars(Stream, Vs).
output_var(Stream, V) :-
clpbn:get_atts(V,[key(Key),evidence(_)]),
output_key(Stream,Key),
format(Stream, ' [ shape=box, style=filled, fillcolor=red, fontsize=18.0 ]~n',[]),
fail.
output_var(Stream, V) :-
clpbn:get_atts(V,[key(Key),dist(_,Parents)]),
Parents = [_|_], !,
format(Stream, ' ',[]),
output_parents(Stream, Parents),
format(' -> ',[]),
output_key(Stream,Key),
nl(Stream).
output_var(_, _).
info_ouput(_, []).
info_ouput(Stream, [V|Output]) :-
clpbn:get_atts(V,[key(Key)]),
output_key(Stream,Key),
format(Stream, ' [ shape=box, style=filled, fillcolor=green, fontsize=18.0 ]~n',[]),
info_ouput(Stream, Output).
output_parents(Stream, [V]) :- !,
clpbn:get_atts(V,[key(Key)]),
output_key(Stream,Key).
output_parents(Stream, L) :-
format(Stream,'{ ',[]),
output_parents1(Stream,L),
format(Stream,'}',[]).
output_parents1(_,[]).
output_parents1(Stream,[V|L]) :-
clpbn:get_atts(V,[key(Key)]),
output_key(Stream,Key),
put_code(Stream, 0' ),
output_parents1(Stream,L).
output_key(Stream, Key) :-
output_key(Stream, 0, Key).
output_key(Stream, _, Key) :-
primitive(Key), !,
write(Stream, Key).
output_key(Stream, I0, Key) :-
Key =.. [Name|Args],
write(Stream, Name),
I is I0+1,
output_key_args(Stream, I, Args).
output_key_args(_, _, []).
output_key_args(Stream, I, [Arg|Args]) :-
format(Stream, '~*c', [I,0'_]),
output_key(Stream, I, Arg),
output_key_args(Stream, I, Args).

View File

@ -1,83 +0,0 @@
:- module(hmm, [init_hmm/0,
hmm_state/1,
emission/1]).
:- ensure_loaded(library(clpbn)).
:- use_module(library(lists),
[nth/3]).
:- use_module(library(nbhash),
[nb_hash_new/2,
nb_hash_lookup/3,
nb_hash_insert/3
]).
:- ensure_loaded(library(tries)).
:- meta_predicate hmm_state(:).
:- dynamic hmm_tabled/1.
:- attribute emission/1.
:- ensure_loaded(library('clpbn/viterbi')).
init_hmm :-
% retractall(hmm_tabled(_)).
% eraseall(hmm_tabled).
% nb_hash_new(hmm_table, 1000000).
trie_open(Trie), nb_setval(trie,Trie).
hmm_state(Mod:A) :- !, hmm_state(A,Mod).
hmm_state(A) :- prolog_flag(typein_module,Mod), hmm_state(A,Mod).
hmm_state(Mod:N/A,_) :- !,
hmm_state(N/A,Mod).
hmm_state((A,B),Mod) :- !,
hmm_state(A,Mod),
hmm_state(B,Mod).
hmm_state(N/A,Mod) :-
atom_codes(N,[TC|_]),
atom_codes(T,[TC]),
build_args(A,LArgs,KArgs,First,Last),
Key =.. [T|KArgs],
Head =.. [N|LArgs],
asserta_static( (Mod:Head :-
( First > 2 ->
Last = Key, !
;
nb_getval(trie, Trie), trie_check_entry(Trie, Key, _)
->
% leave work for solver!
%
Last = Key, !
;
% first time we saw this entry
nb_getval(trie, Trie), trie_put_entry(Trie, Key, _),
fail
)
)
).
build_args(4,[A,B,C,D],[A,B,C],A,D).
build_args(3, [A,B,C], [A,B],A,C).
build_args(2, [A,B], [A],A,B).
emission(V) :-
put_atts(V,[emission(Prob)]).
cvt_vals(aminoacids,[a, c, d, e, f, g, h, i, k, l, m, n, p, q, r, s, t, v, w, y]).
cvt_vals(bool,[t,f]).
cvt_vals(dna,[a,c,g,t]).
cvt_vals(rna,[a,c,g,u]).
cvt_vals([A|B],[A|B]).
% first, try standard representation
find_probs(Logs,Nth,Log) :-
arg(Nth,Logs,Log).

View File

@ -1,528 +0,0 @@
:- module(jt, [jt/3,
init_jt_solver/4,
run_jt_solver/3]).
:- use_module(library(dgraphs),
[dgraph_new/1,
dgraph_add_edges/3,
dgraph_add_vertex/3,
dgraph_add_vertices/3,
dgraph_edges/2,
dgraph_vertices/2,
dgraph_transpose/2,
dgraph_to_ugraph/2,
ugraph_to_dgraph/2,
dgraph_neighbors/3
]).
:- use_module(library(undgraphs),
[undgraph_new/1,
undgraph_add_edge/4,
undgraph_add_edges/3,
undgraph_del_vertex/3,
undgraph_del_vertices/3,
undgraph_vertices/2,
undgraph_edges/2,
undgraph_neighbors/3,
undgraph_edge/3,
dgraph_to_undgraph/2
]).
:- use_module(library(wundgraphs),
[wundgraph_new/1,
wundgraph_max_tree/3,
wundgraph_add_edges/3,
wundgraph_add_vertices/3,
wundgraph_to_undgraph/2
]).
:- use_module(library(rbtrees),
[rb_new/1,
rb_insert/4,
rb_lookup/3]).
:- use_module(library(ordsets),
[ord_subset/2,
ord_insert/3,
ord_intersection/3,
ord_del_element/3,
ord_memberchk/2]).
:- use_module(library(lists),
[reverse/2]).
:- use_module(library('clpbn/aggregates'),
[check_for_agg_vars/2]).
:- use_module(library('clpbn/dists'),
[get_dist_domain_size/2,
get_dist_domain/2,
get_dist_matrix/5]).
:- use_module(library('clpbn/matrix_cpt_utils'),
[project_from_CPT/3,
reorder_CPT/5,
unit_CPT/2,
multiply_CPTs/4,
divide_CPTs/3,
normalise_CPT/2,
expand_CPT/4,
get_CPT_sizes/2,
reset_CPT_that_disagrees/5,
sum_out_from_CPT/4,
list_from_CPT/2]).
:- use_module(library('clpbn/display'), [
clpbn_bind_vals/3]).
:- use_module(library('clpbn/connected'),
[
init_influences/3,
influences/5
]).
jt([[]],_,_) :- !.
jt(LLVs,Vs0,AllDiffs) :-
init_jt_solver(LLVs, Vs0, AllDiffs, State),
run_jt_solver(LLVs, LLPs, State),
clpbn_bind_vals(LLVs,LLPs,AllDiffs).
init_jt_solver(LLVs, Vs0, _, State) :-
check_for_agg_vars(Vs0, Vs1),
init_influences(Vs1, G, RG),
init_jt_solver_for_questions(LLVs, G, RG, State).
init_jt_solver_for_questions([], _, _, []).
init_jt_solver_for_questions([LLVs|MoreLLVs], G, RG, [state(JTree, Evidence)|State]) :-
influences(LLVs, _, NVs0, G, RG),
sort(NVs0, NVs),
get_graph(NVs, BayesNet, CPTs, Evidence),
build_jt(BayesNet, CPTs, JTree),
init_jt_solver_for_questions(MoreLLVs, G, RG, State).
run_jt_solver([], [], []).
run_jt_solver([LVs|MoreLVs], [LPs|MorePs], [state(JTree, Evidence)|MoreState]) :-
% JTree is a dgraph
% now our tree has cpts
fill_with_cpts(JTree, NewTree),
% write_tree(NewTree,0),
propagate_evidence(Evidence, NewTree, EvTree),
message_passing(EvTree, MTree),
get_margin(MTree, LVs, LPs),
run_jt_solver(MoreLVs, MorePs, MoreState).
get_graph(LVs, BayesNet, CPTs, Evidence) :-
run_vars(LVs, Edges, Vertices, CPTs, Evidence),
dgraph_new(V0),
dgraph_add_edges(V0, Edges, V1),
dgraph_add_vertices(V1, Vertices, V2),
dgraph_to_ugraph(V2, BayesNet).
run_vars([], [], [], [], []).
run_vars([V|LVs], Edges, [V|Vs], [CPTVars-dist([V|Parents],Id)|CPTs], Ev) :-
clpbn:get_atts(V, [dist(Id,Parents)]),
add_evidence_from_vars(V, Ev, Ev0),
sort([V|Parents],CPTVars),
add_edges(Parents, V, Edges, Edges0),
run_vars(LVs, Edges0, Vs, CPTs, Ev0).
add_evidence_from_vars(V, [e(V,P)|Evs], Evs) :-
clpbn:get_atts(V, [evidence(P)]), !.
add_evidence_from_vars(_, Evs, Evs).
find_nth0([Id|_], Id, P, P) :- !.
find_nth0([_|D], Id, P0, P) :-
P1 is P0+1,
find_nth0(D, Id, P1, P).
add_edges([], _, Edges, Edges).
add_edges([P|Parents], V, [V-P|Edges], Edges0) :-
add_edges(Parents, V, Edges, Edges0).
build_jt(BayesNet, CPTs, Tree) :-
init_undgraph(BayesNet, Moral0),
moralised(BayesNet, Moral0, Markov),
undgraph_vertices(Markov, Vertices),
triangulate(Vertices, Markov, Markov, _, Cliques0),
cliques(Cliques0, EndCliques),
wundgraph_max_tree(EndCliques, J0Tree, _),
root(J0Tree, JTree),
populate(CPTs, JTree, Tree).
initial_graph(_,Parents, CPTs) :-
test_graph(0, Graph0, CPTs),
dgraph_new(V0),
dgraph_add_edges(V0, Graph0, V1),
% OK, this is a bit silly, I could have written the transposed graph
% from the very beginning.
dgraph_transpose(V1, V2),
dgraph_to_ugraph(V2, Parents).
problem_graph([], []).
problem_graph([V|BNet], GraphF) :-
clpbn:get_atts(V, [dist(_,_,Parents)]),
add_parents(Parents, V, Graph0, GraphF),
problem_graph(BNet, Graph0).
add_parents([], _, Graph, Graph).
add_parents([P|Parents], V, Graph0, [P-V|GraphF]) :-
add_parents(Parents, V, Graph0, GraphF).
% From David Page's lectures
test_graph(0,
[1-3,2-3,2-4,5-4,5-7,10-7,10-9,11-9,3-6,4-6,7-8,9-8,6-12,8-12],
[[1]-a,
[2]-b,
[1,2,3]-c,
[2,4,5]-d,
[5]-e,
[3,4,6]-f,
[5,7,10]-g,
[7,8,9]-h,
[9]-i,
[10]-j,
[11]-k,
[6,8,12]-l
]).
test_graph(1,[a-b,a-c,b-d,c-e,d-f,e-f],
[]).
init_undgraph(Parents, UndGraph) :-
ugraph_to_dgraph(Parents, DGraph),
dgraph_to_undgraph(DGraph, UndGraph).
get_par_keys([], []).
get_par_keys([P|Parents],[K|KPars]) :-
clpbn:get_atts(P, [key(K)]),
get_par_kets(Parents,KPars).
moralised([],Moral,Moral).
moralised([_-KPars|Ks],Moral0,MoralF) :-
add_moral_edges(KPars, Moral0, MoralI),
moralised(Ks,MoralI,MoralF).
add_moral_edges([], Moral, Moral).
add_moral_edges([_], Moral, Moral).
add_moral_edges([K1,K2|KPars], Moral0, MoralF) :-
undgraph_add_edge(Moral0, K1, K2, MoralI),
add_moral_edges([K1|KPars], MoralI, MoralJ),
add_moral_edges([K2|KPars],MoralJ,MoralF).
triangulate([], _, Triangulated, Triangulated, []) :- !.
triangulate(Vertices, S0, T0, Tf, Cliques) :-
choose(Vertices, S0, +inf, [], -1, BestVertex, _, Cliques0, Cliques, Edges),
ord_del_element(Vertices, BestVertex, NextVertices),
undgraph_add_edges(T0, Edges, T1),
undgraph_del_vertex(S0, BestVertex, Si),
undgraph_add_edges(Si, Edges, Si2),
triangulate(NextVertices, Si2, T1, Tf, Cliques0).
choose([], _, _, NewEdges, Best, Best, Clique, Cliques0, [Clique|Cliques0], NewEdges).
choose([V|Vertices], Graph, Score0, _, _, Best, _, Cliques0, Cliques, EdgesF) :-
undgraph_neighbors(V, Graph, Neighbors),
ord_insert(Neighbors, V, PossibleClique),
new_edges(Neighbors, Graph, NewEdges),
(
% simplicial edge
NewEdges == []
->
!,
Best = V,
NewEdges = EdgesF,
length(PossibleClique,L),
Cliques = [L-PossibleClique|Cliques0]
;
% cliquelength(PossibleClique,1,CL),
length(PossibleClique,CL),
CL < Score0, !,
choose(Vertices,Graph,CL,NewEdges, V, Best, CL-PossibleClique, Cliques0,Cliques,EdgesF)
).
choose([_|Vertices], Graph, Score0, Edges0, BestSoFar, Best, Clique, Cliques0, Cliques, EdgesF) :-
choose(Vertices,Graph,Score0,Edges0, BestSoFar, Best, Clique, Cliques0,Cliques,EdgesF).
new_edges([], _, []).
new_edges([N|Neighbors], Graph, NewEdgesF) :-
new_edges(Neighbors,N,Graph,NewEdges0, NewEdgesF),
new_edges(Neighbors, Graph, NewEdges0).
new_edges([],_,_,NewEdges, NewEdges).
new_edges([N1|Neighbors],N,Graph,NewEdges0, NewEdgesF) :-
undgraph_edge(N, N1, Graph), !,
new_edges(Neighbors,N,Graph,NewEdges0, NewEdgesF).
new_edges([N1|Neighbors],N,Graph,NewEdges0, [N-N1|NewEdgesF]) :-
new_edges(Neighbors,N,Graph,NewEdges0, NewEdgesF).
cliquelength([],CL,CL).
cliquelength([V|Vs],CL0,CL) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id, Sz),
CL1 is CL0*Sz,
cliquelength(Vs,CL1,CL).
%
% This is simple stuff, I just have to remove cliques that
% are subset of the others.
%
cliques(CliqueList, CliquesF) :-
wundgraph_new(Cliques0),
% first step, order by size,
keysort(CliqueList,Sort),
reverse(Sort, Rev),
get_links(Rev, [], Vertices, [], Edges),
wundgraph_add_vertices(Cliques0, Vertices, CliquesI),
wundgraph_add_edges(CliquesI, Edges, CliquesF).
% stupid quadratic algorithm, needs to be improved.
get_links([], Vertices, Vertices, Edges, Edges).
get_links([Sz-Clique|Cliques], SoFar, Vertices, Edges0, Edges) :-
add_clique_edges(SoFar, Clique, Sz, Edges0, EdgesI), !,
get_links(Cliques, [Clique|SoFar], Vertices, EdgesI, Edges).
get_links([_|Cliques], SoFar, Vertices, Edges0, Edges) :-
get_links(Cliques, SoFar, Vertices, Edges0, Edges).
add_clique_edges([], _, _, Edges, Edges).
add_clique_edges([Clique1|Cliques], Clique, Sz, Edges0, EdgesF) :-
ord_intersection(Clique1, Clique, Int),
Int \== Clique,
(
Int = [] ->
add_clique_edges(Cliques, Clique, Sz, Edges0, EdgesF)
;
% we connect
length(Int, LSz),
add_clique_edges(Cliques, Clique, Sz, [Clique-(Clique1-LSz)|Edges0], EdgesF)
).
root(WTree, JTree) :-
wundgraph_to_undgraph(WTree, Tree),
remove_leaves(Tree, SmallerTree),
undgraph_vertices(SmallerTree, InnerVs),
pick_root(InnerVs, Root),
rb_new(M0),
build_tree(Root, M0, Tree, JTree, _).
remove_leaves(Tree, SmallerTree) :-
undgraph_vertices(Tree, Vertices),
Vertices = [_,_,_|_],
get_leaves(Vertices, Tree, Leaves),
Leaves = [_|_], !,
undgraph_del_vertices(Tree, Leaves, NTree),
remove_leaves(NTree, SmallerTree).
remove_leaves(Tree, Tree).
get_leaves([], _, []).
get_leaves([V|Vertices], Tree, [V|Leaves]) :-
undgraph_neighbors(V, Tree, [_]), !,
get_leaves(Vertices, Tree, Leaves).
get_leaves([_|Vertices], Tree, Leaves) :-
get_leaves(Vertices, Tree, Leaves).
pick_root([V|_],V).
direct_edges([], _, [], []) :- !.
direct_edges([], NewVs, RemEdges, Directed) :-
direct_edges(RemEdges, NewVs, [], Directed).
direct_edges([V1-V2|Edges], NewVs0, RemEdges, [V1-V2|Directed]) :-
ord_memberchk(V1, NewVs0), !,
ord_insert(NewVs0, V2, NewVs),
direct_edges(Edges, NewVs, RemEdges, Directed).
direct_edges([V1-V2|Edges], NewVs0, RemEdges, [V2-V1|Directed]) :-
ord_memberchk(V2, NewVs0), !,
ord_insert(NewVs0, V1, NewVs),
direct_edges(Edges, NewVs, RemEdges, Directed).
direct_edges([Edge|Edges], NewVs, RemEdges, Directed) :-
direct_edges(Edges, NewVs, [Edge|RemEdges], Directed).
populate(CPTs, JTree, NewJTree) :-
keysort(CPTs, KCPTs),
populate_cliques(JTree, KCPTs, NewJTree, []).
populate_cliques(tree(Clique,Kids), CPTs, tree(Clique-MyCPTs,NewKids), RemCPTs) :-
get_cpts(CPTs, Clique, MyCPTs, MoreCPTs),
populate_trees_with_cliques(Kids, MoreCPTs, NewKids, RemCPTs).
populate_trees_with_cliques([], MoreCPTs, [], MoreCPTs).
populate_trees_with_cliques([Node|Kids], MoreCPTs, [NewNode|NewKids], RemCPts) :-
populate_cliques(Node, MoreCPTs, NewNode, ExtraCPTs),
populate_trees_with_cliques(Kids, ExtraCPTs, NewKids, RemCPts).
get_cpts([], _, [], []).
get_cpts([CPT|CPts], [], [], [CPT|CPts]) :- !.
get_cpts([[I|MCPT]-Info|CPTs], [J|Clique], MyCPTs, MoreCPTs) :-
compare(C,I,J),
( C == < ->
% our CPT cannot be a part of the clique.
MoreCPTs = [[I|MCPT]-Info|LeftoverCPTs],
get_cpts(CPTs, [J|Clique], MyCPTs, LeftoverCPTs)
;
C == = ->
% our CPT cannot be a part of the clique.
get_cpt(MCPT, Clique, I, Info, MyCPTs, MyCPTs0, MoreCPTs, MoreCPTs0),
get_cpts(CPTs, [J|Clique], MyCPTs0, MoreCPTs0)
;
% the first element in our CPT may not be in a clique
get_cpts([[I|MCPT]-Info|CPTs], Clique, MyCPTs, MoreCPTs)
).
get_cpt(MCPT, Clique, I, Info, [[I|MCPT]-Info|MyCPTs], MyCPTs, MoreCPTs, MoreCPTs) :-
ord_subset(MCPT, Clique), !.
get_cpt(MCPT, _, I, Info, MyCPTs, MyCPTs, [[I|MCPT]-Info|MoreCPTs], MoreCPTs).
translate_edges([], [], []).
translate_edges([E1-E2|Edges], [(E1-A)-(E2-B)|NEdges], [E1-A,E2-B|Vs]) :-
translate_edges(Edges, NEdges, Vs).
match_vs(_,[]).
match_vs([K-A|Cls],[K1-B|KVs]) :-
compare(C, K, K1),
(C == = ->
A = B,
match_vs([K-A|Cls], KVs)
;
C = < ->
match_vs(Cls,[K1-B|KVs])
;
match_vs([K-A|Cls],KVs)
).
fill_with_cpts(tree(Clique-Dists,Leafs), tree(Clique-NewDists,NewLeafs)) :-
compile_cpts(Dists, Clique, NewDists),
fill_tree_with_cpts(Leafs, NewLeafs).
fill_tree_with_cpts([], []).
fill_tree_with_cpts([L|Leafs], [NL|NewLeafs]) :-
fill_with_cpts(L, NL),
fill_tree_with_cpts(Leafs, NewLeafs).
transform([], []).
transform([Clique-Dists|Nodes],[Clique-NewDist|NewNodes]) :-
compile_cpts(Dists, Clique, NewDist),
transform(Nodes, NewNodes).
compile_cpts([Vs-dist(OVs,Id)|Dists], Clique, TAB) :-
OVs = [_|Ps], !,
get_dist_matrix(Id, Ps, _, _, TAB0),
reorder_CPT(OVs, TAB0, Vs, TAB1, Sz1),
multiply_dists(Dists,Vs,TAB1,Sz1,Vars2,ITAB),
expand_CPT(ITAB,Vars2,Clique,TAB).
compile_cpts([], [V|Clique], TAB) :-
unit_CPT(V, CPT0),
expand_CPT(CPT0, [V], [V|Clique], TAB).
multiply_dists([],Vs,TAB,_,Vs,TAB).
multiply_dists([Vs-dist(OVs,Id)|Dists],MVs,TAB2,Sz2,FVars,FTAB) :-
OVs = [_|Ps],
get_dist_matrix(Id, Ps, _, _, TAB0),
reorder_CPT(OVs, TAB0, Vs, TAB1, Sz1),
multiply_CPTs(tab(TAB1,Vs,Sz1),tab(TAB2,MVs,Sz2),tab(TAB3,NVs,Sz),_),
multiply_dists(Dists,NVs,TAB3,Sz,FVars,FTAB).
build_tree(Root, Leafs, WTree, tree(Root,Leaves), NewLeafs) :-
rb_insert(Leafs, Root, [], Leafs0),
undgraph_neighbors(Root, WTree, Children),
build_trees(Children, Leafs0, WTree, Leaves, NewLeafs).
build_trees( [], Leafs, _, [], Leafs).
build_trees([V|Children], Leafs, WTree, NLeaves, NewLeafs) :-
% back pointer
rb_lookup(V, _, Leafs), !,
build_trees(Children, Leafs, WTree, NLeaves, NewLeafs).
build_trees([V|Children], Leafs, WTree, [VT|NLeaves], NewLeafs) :-
build_tree(V, Leafs, WTree, VT, Leafs1),
build_trees(Children, Leafs1, WTree, NLeaves, NewLeafs).
propagate_evidence([], NewTree, NewTree).
propagate_evidence([e(V,P)|Evs], Tree0, NewTree) :-
add_evidence_to_matrix(Tree0, V, P, Tree1), !,
propagate_evidence(Evs, Tree1, NewTree).
add_evidence_to_matrix(tree(Clique-Dist,Kids), V, P, tree(Clique-NDist,Kids)) :-
ord_memberchk(V, Clique), !,
reset_CPT_that_disagrees(Dist, Clique, V, P, NDist).
add_evidence_to_matrix(tree(C,Kids), V, P, tree(C,NKids)) :-
add_evidence_to_kids(Kids, V, P, NKids).
add_evidence_to_kids([K|Kids], V, P, [NK|Kids]) :-
add_evidence_to_matrix(K, V, P, NK), !.
add_evidence_to_kids([K|Kids], V, P, [K|NNKids]) :-
add_evidence_to_kids(Kids, V, P, NNKids).
message_passing(tree(Clique-Dist,Kids), tree(Clique-NDist,NKids)) :-
get_CPT_sizes(Dist, Sizes),
upward(Kids, Clique, tab(Dist, Clique, Sizes), IKids, ITab, 1),
ITab = tab(NDist, _, _),
nb_setval(cnt,0),
downward(IKids, Clique, ITab, NKids).
upward([], _, Dist, [], Dist, _).
upward([tree(Clique1-Dist1,DistKids)|Kids], Clique, Tab, [tree(Clique1-(NewDist1,EDist1),NDistKids)|NKids], NewTab, Lev) :-
get_CPT_sizes(Dist1, Sizes1),
Lev1 is Lev+1,
upward(DistKids, Clique1, tab(Dist1,Clique1,Sizes1), NDistKids, NewTab1, Lev1),
NewTab1 = tab(NewDist1,_,_),
ord_intersection(Clique1, Clique, Int),
sum_out_from_CPT(Int, NewDist1, Clique1, Tab1),
multiply_CPTs(Tab, Tab1, ITab, EDist1),
upward(Kids, Clique, ITab, NKids, NewTab, Lev).
downward([], _, _, []).
downward([tree(Clique1-(Dist1,Msg1),DistKids)|Kids], Clique, Tab, [tree(Clique1-NDist1,NDistKids)|NKids]) :-
get_CPT_sizes(Dist1, Sizes1),
ord_intersection(Clique1, Clique, Int),
Tab = tab(Dist,_,_),
divide_CPTs(Dist, Msg1, Div),
sum_out_from_CPT(Int, Div, Clique, STab),
multiply_CPTs(STab, tab(Dist1, Clique1, Sizes1), NewTab, _),
NewTab = tab(NDist1,_,_),
downward(DistKids, Clique1, NewTab, NDistKids),
downward(Kids, Clique, Tab, NKids).
get_margin(NewTree, LVs0, LPs) :-
sort(LVs0, LVs),
find_clique(NewTree, LVs, Clique, Dist),
sum_out_from_CPT(LVs, Dist, Clique, tab(TAB,_,_)),
reorder_CPT(LVs, TAB, LVs0, NTAB, _),
normalise_CPT(NTAB, Ps),
list_from_CPT(Ps, LPs).
find_clique(tree(Clique-Dist,_), LVs, Clique, Dist) :-
ord_subset(LVs, Clique), !.
find_clique(tree(_,Kids), LVs, Clique, Dist) :-
find_clique_from_kids(Kids, LVs, Clique, Dist).
find_clique_from_kids([K|_], LVs, Clique, Dist) :-
find_clique(K, LVs, Clique, Dist), !.
find_clique_from_kids([_|Kids], LVs, Clique, Dist) :-
find_clique_from_kids(Kids, LVs, Clique, Dist).
write_tree(tree(Clique-(Dist,_),Leaves), I0) :- !,
matrix:matrix_to_list(Dist,L),
format('~*c ~w:~w~n',[I0,0' ,Clique,L]),
I is I0+2,
write_subtree(Leaves, I).
write_tree(tree(Clique-Dist,Leaves), I0) :-
matrix:matrix_to_list(Dist,L),
format('~*c ~w:~w~n',[I0,0' ,Clique, L]),
I is I0+2,
write_subtree(Leaves, I).
write_subtree([], _).
write_subtree([Tree|Leaves], I) :-
write_tree(Tree, I),
write_subtree(Leaves, I).

View File

@ -1,267 +0,0 @@
:- module(clpbn_matrix_utils,
[init_CPT/2,
project_from_CPT/3,
reorder_CPT/5,
get_CPT_sizes/2,
normalise_CPT/2,
multiply_CPTs/4,
divide_CPTs/3,
expand_CPT/4,
reset_CPT_that_disagrees/5,
unit_CPT/2,
sum_out_from_CPT/4,
list_from_CPT/2,
multiply_factors/3,
normalise_possibly_deterministic_CPT/2,
column_from_possibly_deterministic_CPT/3,
multiply_possibly_deterministic_factors/3,
random_CPT/2,
uniform_CPT/2,
uniform_CPT_as_list/2,
normalise_CPT_on_lines/3]).
:- use_module(dists,
[get_dist_domain_size/2,
get_dist_domain/2]).
:- use_module(library(matrix),
[matrix_new/4,
matrix_new_set/4,
matrix_select/4,
matrix_dims/2,
matrix_size/2,
matrix_shuffle/3,
matrix_expand/3,
matrix_op/4,
matrix_dims/2,
matrix_sum/2,
matrix_sum_logs_out/3,
matrix_sum_logs_out_several/3,
matrix_op_to_all/4,
matrix_to_exps2/1,
matrix_to_logs/1,
matrix_set_all_that_disagree/5,
matrix_to_list/2,
matrix_agg_lines/3,
matrix_agg_cols/3,
matrix_op_to_lines/4,
matrix_column/3]).
init_CPT(List, Sizes, TAB) :-
matrix_new(floats, Sizes, List, TAB),
matrix_to_logs(TAB).
init_possibly_deterministic_CPT(List, Sizes, TAB) :-
matrix_new(floats, Sizes, List, TAB).
project_from_CPT(V,tab(Table,Deps,_),tab(NewTable,NDeps,NSzs)) :-
evidence(V,Pos), !,
vnth(Deps, 0, V, N, NDeps),
matrix_select(Table, N, Pos, NewTable),
matrix_dims(NewTable, NSzs).
project_from_CPT(V,tab(Table,Deps,_),tab(NewTable,NDeps,NSzs)) :-
vnth(Deps, 0, V, N, NDeps),
matrix_sum_logs_out(Table, N, NewTable),
matrix_dims(NewTable, NSzs).
evidence(V, Pos) :-
clpbn:get_atts(V, [evidence(Pos)]).
vnth([V1|Deps], N, V, N, Deps) :-
V == V1, !.
vnth([V1|Deps], N0, V, N, [V1|NDeps]) :-
N1 is N0+1,
vnth(Deps, N1, V, N, NDeps).
reorder_CPT(Vs0,T0,Vs,TF,Sizes) :-
var(Vs), !,
order_vec(Vs0,Vs,Map),
(
Vs == Vs0
->
TF = T0
;
matrix_shuffle(T0,Map,TF)
),
matrix_dims(TF, Sizes).
reorder_CPT(Vs0,T0,Vs,TF,Sizes) :-
mapping(Vs0,Vs,Map),
(
Vs == Vs0
->
TF = T0
;
matrix_shuffle(T0,Map,TF)
),
matrix_dims(TF, Sizes).
order_vec(Vs0,Vs,Map) :-
add_indices(Vs0,0,Is),
keysort(Is,NIs),
get_els(NIs, Vs, Map).
add_indices([],_,[]).
add_indices([V|Vs0],I0,[V-I0|Is]) :-
I is I0+1,
add_indices(Vs0,I,Is).
get_els([], [], []).
get_els([V-I|NIs], [V|Vs], [I|Map]) :-
get_els(NIs, Vs, Map).
mapping(Vs0,Vs,Map) :-
add_indices(Vs0,0,I1s),
add_indices( Vs,I2s),
keysort(I1s,Ks),
keysort(I2s,Ks),
split_map(I2s, Map).
add_indices([],[]).
add_indices([V|Vs0],[V-_|I1s]) :-
add_indices(Vs0,I1s).
split_map([], []).
split_map([_-M|Is], [M|Map]) :-
split_map(Is, Map).
divide_CPTs(Tab1, Tab2, OT) :-
matrix_op(Tab1,Tab2,-,OT).
multiply_CPTs(tab(Tab1, Deps1, Sz1), tab(Tab2, Deps2, Sz2), tab(OT, NDeps, NSz), NTab2) :-
expand_tabs(Deps1, Sz1, Deps2, Sz2, Map1, Map2, NDeps),
matrix_expand_compact(Tab1, Map1, NTab1),
matrix_expand_compact(Tab2, Map2, NTab2),
matrix_op(NTab1,NTab2,+,OT),
matrix_dims(OT,NSz).
expand_tabs([], [], [], [], [], [], []).
expand_tabs([V1|Deps1], [S1|Sz1], [], [], [0|Map1], [S1|Map2], [V1|NDeps]) :-
expand_tabs(Deps1, Sz1, [], [], Map1, Map2, NDeps).
expand_tabs([], [], [V2|Deps2], [S2|Sz2], [S2|Map1], [0|Map2], [V2|NDeps]) :-
expand_tabs([], [], Deps2, Sz2, Map1, Map2, NDeps).
expand_tabs([V1|Deps1], [S1|Sz1], [V2|Deps2], [S2|Sz2], Map1, Map2, NDeps) :-
compare(C,V1,V2),
(C == = ->
NDeps = [V1|MDeps],
Map1 = [0|M1],
Map2 = [0|M2],
NDeps = [V1|MDeps],
expand_tabs(Deps1, Sz1, Deps2, Sz2, M1, M2, MDeps)
;
C == < ->
NDeps = [V1|MDeps],
Map1 = [0|M1],
Map2 = [S1|M2],
NDeps = [V1|MDeps],
expand_tabs(Deps1, Sz1, [V2|Deps2], [S2|Sz2], M1, M2, MDeps)
;
NDeps = [V2|MDeps],
Map1 = [S2|M1],
Map2 = [0|M2],
NDeps = [V2|MDeps],
expand_tabs([V1|Deps1], [S1|Sz1], Deps2, Sz2, M1, M2, MDeps)
).
normalise_CPT(MAT,NMAT) :-
matrix_to_exps2(MAT),
matrix_sum(MAT, Sum),
matrix_op_to_all(MAT, /, Sum, NMAT).
list_from_CPT(MAT, List) :-
matrix_to_list(MAT, List).
expand_CPT(MAT0, Dims0, DimsNew, MAT) :-
generate_map(DimsNew, Dims0, Map),
matrix_expand(MAT0, Map, MAT).
generate_map([], [], []).
generate_map([V|DimsNew], [V0|Dims0], [0|Map]) :- V == V0, !,
generate_map(DimsNew, Dims0, Map).
generate_map([V|DimsNew], Dims0, [Sz|Map]) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id, Sz),
generate_map(DimsNew, Dims0, Map).
unit_CPT(V,CPT) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id, Sz),
matrix_new_set(floats,[Sz],1.0,CPT).
reset_CPT_that_disagrees(CPT, Vars, V, Pos, NCPT) :-
vnth(Vars, 0, V, Dim, _),
matrix_set_all_that_disagree(CPT, Dim, Pos, -inf, NCPT).
sum_out_from_CPT(Vs,Table,Deps,tab(NewTable,Vs,Sz)) :-
conversion_matrix(Vs, Deps, Conv),
matrix_sum_logs_out_several(Table, Conv, NewTable),
matrix_dims(NewTable, Sz).
conversion_matrix([], [], []).
conversion_matrix([], [_|Deps], [1|Conv]) :-
conversion_matrix([], Deps, Conv).
conversion_matrix([V|Vs], [V1|Deps], [0|Conv]) :- V==V1, !,
conversion_matrix(Vs, Deps, Conv).
conversion_matrix([V|Vs], [_|Deps], [1|Conv]) :-
conversion_matrix([V|Vs], Deps, Conv).
get_CPT_sizes(CPT, Sizes) :-
matrix_dims(CPT, Sizes).
matrix_expand_compact(M0,Zeros,M0) :-
zero_map(Zeros), !.
matrix_expand_compact(M0,Map,M) :-
matrix_expand(M0, Map, M).
zero_map([]).
zero_map([0|Zeros]) :-
zero_map(Zeros).
col_from_CPT(CPT, Parents, Column) :-
matrix_col(CPT, Parents, Column),
matrix_to_logs(Column).
column_from_possibly_deterministic_CPT(CPT, Parents, Column) :-
matrix_column(CPT, Parents, Column).
multiply_factors(F1, F2, F) :-
matrix_op(F1,F2,+,F).
multiply_possibly_deterministic_factors(F1, F2, F) :-
matrix_op(F1,F2,*,F).
normalise_possibly_deterministic_CPT(MAT,NMAT) :-
matrix_agg_lines(MAT, +, Sum),
matrix_op_to_lines(MAT, Sum, /, NMAT).
random_CPT(Dims, M) :-
mult_all(Dims,1,Size),
generate_random_entries(Size, Randoms),
matrix_new(floats, Dims, Randoms, M1),
normalise_possibly_deterministic_CPT(M1, M).
mult_all([],Size,Size).
mult_all([D|Dims],Size0,Size) :-
Size1 is Size0*D,
mult_all(Dims,Size1,Size).
generate_random_entries(0, []) :- !.
generate_random_entries(Size, [R|Randoms]) :-
R is random,
Size1 is Size-1,
generate_random_entries(Size1, Randoms).
uniform_CPT_as_list(Dims, L) :-
uniform_CPT(Dims, M),
matrix_to_list(M, L).
uniform_CPT(Dims, M) :-
matrix_new_set(floats,Dims,1.0,M1),
normalise_possibly_deterministic_CPT(M1, M).
normalise_CPT_on_lines(MAT0, MAT2, L1) :-
matrix_agg_cols(MAT0, +, MAT1),
matrix_sum(MAT1, SUM),
matrix_op_to_all(MAT1, /, SUM, MAT2),
matrix:matrix_to_list(MAT2,L1).

View File

@ -1,223 +0,0 @@
/*
Deterministcally table a predicate of the form
K -> RV
where the K are the first N-1 arguments.
Note that this does not include support for backtracking
*/
:- module(clpbn_table,
[clpbn_table/1,
clpbn_tabled_clause/2,
clpbn_tabled_abolish/1,
clpbn_tabled_asserta/1,
clpbn_tabled_assertz/1,
clpbn_tabled_asserta/2,
clpbn_tabled_assertz/2,
clpbn_tabled_dynamic/1,
clpbn_tabled_number_of_clauses/2,
clpbn_reset_tables/0,
clpbn_reset_tables/1,
clpbn_is_tabled/1
]).
:- use_module(library(bhash),
[b_hash_new/2,
b_hash_lookup/3,
b_hash_insert/3]).
:- meta_predicate clpbn_table(:),
clpbn_tabled_clause(:.?),
clpbn_tabled_abolish(:),
clpbn_tabled_asserta(:),
clpbn_tabled_assertz(:),
clpbn_tabled_asserta(:,-),
clpbn_tabled_assertz(:,-),
clpbn_tabled_number_of_clauses(:,-),
clpbn_is_tabled(:).
:- dynamic clpbn_table/3.
:- initialization(init).
init :-
clpbn_reset_tables.
clpbn_reset_tables :-
clpbn_reset_tables(1024).
clpbn_reset_tables(Sz) :-
b_hash_new(Tab, Sz),
nb_setval(clpbn_tables, Tab).
clpbn_table(M:X) :- !,
clpbn_table(X,M).
clpbn_table(X) :-
prolog_load_context(module, M),
clpbn_table(X,M).
clpbn_table(M:X,_) :- !,
clpbn_table(X,M).
clpbn_table((P1,P2),M) :- !,
clpbn_table(P1,M),
clpbn_table(P2,M).
clpbn_table(F/N,M) :-
functor(S,F,N),
S =.. L0,
take_tail(L0, V, L1, V1, L2),
Key =.. L1,
atom_concat(F, '___tabled', NF),
L2 = [_|Args],
S1 =.. [NF|Args],
L0 = [_|OArgs],
S2 =.. [NF|OArgs],
asserta(clpbn_table(S, M, S2)),
assert((M:S :- nb_getval(clpbn_tables, Tab), ( b_hash_lookup(Key, V1, Tab) -> true ; M:S1, b_hash_insert(Tab, Key, V1) ; true), ( nonvar(V) -> clpbn_evidence:put_evidence(V, V1) ; V = V1 ), ! ) ).
take_tail([V], V, [], V1, [V1]) :- !.
take_tail([A|L0], V, [A|L1], V1, [A|L2]) :-
take_tail(L0, V, L1, V1, L2).
user:term_expansion((P :- Gs), NC) :-
clpbn_table(P, M, NP),
prolog_load_context(module, M), !,
assert(M:(NP :- Gs)),
NC = (:-true).
in_table(K, V) :-
nb_getval(clpbn_tables, Tab),
b_hash_lookup(K, V, Tab).
store_in_table(K, V) :-
nb_getval(clpbn_tables, Tab),
b_hash_insert(Tab, K, V).
clpbn_tabled_clause(M:Head, Body) :- !,
clpbn_tabled_clause(Head, M, Body).
clpbn_tabled_clause(Head, Body) :-
prolog_load_context(module, M),
clpbn_tabled_clause(Head, M, Body).
clpbn_tabled_clause(M:Head, _, Body) :- !,
clpbn_tabled_clause(Head, M, Body).
clpbn_tabled_clause(Head, M, Body) :-
clpbn_table(Head, M, THead),
clause(M:THead, Body).
clpbn_tabled_assertz(M:Clause) :- !,
clpbn_tabled_assertz2(Clause, M).
clpbn_tabled_assertz(Clause) :-
prolog_load_context(module, M),
clpbn_tabled_assertz2(Clause, M).
clpbn_tabled_assertz2(M:Clause, _) :- !,
clpbn_tabled_assertz2(Clause, M).
clpbn_tabled_assertz2((Head:-Body), M) :- !,
clpbn_table(Head, M, THead),
assertz(M:(THead :- Body)).
clpbn_tabled_assertz2(Head, M) :-
clpbn_table(Head, M, THead),
assertz(THead).
clpbn_tabled_assertz(M:Clause, Ref) :- !,
clpbn_tabled_assertz2(Clause, M, Ref).
clpbn_tabled_assertz(Clause, Ref) :-
prolog_load_context(module, M),
clpbn_tabled_assertz2(Clause, M, Ref).
clpbn_tabled_assertz2(M:Clause, _, Ref) :- !,
clpbn_tabled_assertz2(Clause, M, Ref).
clpbn_tabled_assertz2((Head:-Body), M, Ref) :- !,
clpbn_table(Head, M, THead),
assertz(M:(THead :- Body), Ref).
clpbn_tabled_assertz2(Head, M, Ref) :-
clpbn_table(Head, M, THead, Ref),
assertz(THead).
clpbn_tabled_asserta(M:Clause) :- !,
clpbn_tabled_asserta2(Clause, M).
clpbn_tabled_asserta(Clause) :-
prolog_load_context(module, M),
clpbn_tabled_asserta2(Clause, M).
clpbn_tabled_asserta2(M:Clause, _) :- !,
clpbn_tabled_asserta2(Clause, M).
clpbn_tabled_asserta2((Head:-Body), M) :- !,
clpbn_table(Head, M, THead),
asserta(M:(THead :- Body)).
clpbn_tabled_asserta2(Head, M) :-
clpbn_table(Head, M, THead),
asserta(THead).
clpbn_tabled_asserta(M:Clause, Ref) :- !,
clpbn_tabled_asserta2(Clause, M, Ref).
clpbn_tabled_asserta(Clause, Ref) :-
prolog_load_context(module, M),
clpbn_tabled_asserta2(Clause, M, Ref).
clpbn_tabled_asserta2(M:Clause, _, Ref) :- !,
clpbn_tabled_asserta2(Clause, M, Ref).
clpbn_tabled_asserta2((Head:-Body), M, Ref) :- !,
clpbn_table(Head, M, THead),
asserta(M:(THead :- Body), Ref).
clpbn_tabled_asserta2(Head, M, Ref) :-
clpbn_table(Head, M, THead, Ref),
asserta(THead).
clpbn_tabled_abolish(M:Clause) :- !,
clpbn_tabled_abolish(Clause, M).
clpbn_tabled_abolish(Clause) :-
prolog_load_context(module, M),
clpbn_tabled_abolish(Clause, M).
clpbn_tabled_abolish(M:Clause, _) :- !,
clpbn_tabled_abolish(Clause, M).
clpbn_tabled_abolish(N/A, M) :-
functor(Head, N, A),
clpbn_table(Head, M, THead),
functor(THead, TN, A),
abolish(M:TN/A).
clpbn_tabled_dynamic(M:Clause) :- !,
clpbn_tabled_dynamic(Clause, M).
clpbn_tabled_dynamic(Clause) :-
prolog_load_context(module, M),
clpbn_tabled_dynamic(Clause, M).
clpbn_tabled_dynamic(M:Clause, _) :- !,
clpbn_tabled_dynamic(Clause, M).
clpbn_tabled_dynamic(N/A, M) :-
functor(Head, N, A),
clpbn_table(Head, M, THead),
functor(THead, TN, A),
dynamic(M:TN/A).
clpbn_tabled_number_of_clauses(M:Clause, N) :- !,
clpbn_tabled_number_of_clauses(Clause, M, N).
clpbn_tabled_number_of_clauses(Clause, N) :-
prolog_load_context(module, M),
clpbn_tabled_number_of_clauses(Clause, M, N).
clpbn_tabled_number_of_clauses(M:Clause, _, N) :- !,
clpbn_tabled_number_of_clauses(Clause, M, N).
clpbn_tabled_number_of_clauses(Head, M, N) :-
clpbn_table(Head, M, THead),
predicate_property(M:THead,number_of_clauses(N)).
clpbn_is_tabled(M:Clause) :- !,
clpbn_is_tabled(Clause, M).
clpbn_is_tabled(Clause) :-
prolog_load_context(module, M),
clpbn_is_tabled(Clause, M).
clpbn_is_tabled(M:Clause, _) :- !,
clpbn_is_tabled(Clause, M).
clpbn_is_tabled(Head, M) :-
clpbn_table(Head, M, _).

View File

@ -1,34 +0,0 @@
:- module(topsort, [topsort/2]).
:- use_module(library(dgraphs),
[dgraph_new/1,
dgraph_add_edges/3,
dgraph_add_vertices/3,
dgraph_top_sort/2]).
/* simple implementation of a topological sorting algorithm */
/* graph is as Node-[Parents] */
topsort(Graph0, Sorted) :-
mkedge_list(Graph0, EdgeList, []),
mkvertices_list(Graph0, VList, []),
dgraph_new(DGraph0),
dgraph_add_vertices(DGraph0, VList, DGraph1),
dgraph_add_edges(DGraph1, EdgeList, DGraph2),
dgraph_top_sort(DGraph2, Sorted).
mkvertices_list([]) --> [].
mkvertices_list([V-_|More]) --> [V],
mkvertices_list(More).
mkedge_list([]) --> [].
mkedge_list([V-Parents|More]) -->
add_edges(Parents, V),
mkedge_list(More).
add_edges([], _V) --> [].
add_edges([P|Parents], V) --> [P-V],
add_edges(Parents, V).

View File

@ -1,116 +0,0 @@
:- module(clpbn_utils, [
clpbn_not_var_member/2,
clpbn_var_member/2,
check_for_hidden_vars/3,
sort_vars_by_key/3,
sort_vars_by_key_and_parents/3]).
%
% It may happen that variables from a previous query may still be around.
% and be used in the next evaluation, so we cannot trust the list of *new*
% variables.
%
check_for_hidden_vars([], _, []).
check_for_hidden_vars([V|Vs], AllVs0, [V|NVs]) :-
check_for_extra_variables(V,AllVs0, AllVs, Vs, IVs),
check_for_hidden_vars(IVs, AllVs, NVs).
check_for_extra_variables(V,AllVs0, AllVs, Vs, IVs) :-
var(V),
clpbn:get_atts(V, [dist(_,[V1|LV])]), !,
add_old_variables([V1|LV], AllVs0, AllVs, Vs, IVs).
check_for_extra_variables(_,AllVs, AllVs, Vs, Vs).
add_old_variables([], AllVs, AllVs, Vs, Vs).
add_old_variables([V1|LV], AllVs0, AllVs, Vs, IVs) :-
clpbn_not_var_member(AllVs0, V1), !,
add_old_variables(LV, [V1|AllVs0], AllVs, [V1|Vs], IVs).
add_old_variables([_|LV], AllVs0, AllVs, Vs, IVs) :-
add_old_variables(LV, AllVs0, AllVs, Vs, IVs).
clpbn_var_member([V1|_], V) :- V1 == V, !.
clpbn_var_member([_|Vs], V) :-
clpbn_var_member(Vs, V).
clpbn_not_var_member([], _).
clpbn_not_var_member([V1|Vs], V) :- V1 \== V,
clpbn_not_var_member(Vs, V).
sort_vars_by_key(AVars, SortedAVars, UnifiableVars) :-
get_keys(AVars, KeysVars),
msort(KeysVars, KVars),
merge_same_key(KVars, SortedAVars, [], UnifiableVars).
get_keys([], []).
get_keys([V|AVars], [K-V|KeysVars]) :-
clpbn:get_atts(V, [key(K)]), !,
get_keys(AVars, KeysVars).
get_keys([_|AVars], KeysVars) :- % may be non-CLPBN vars.
get_keys(AVars, KeysVars).
merge_same_key([], [], _, []).
merge_same_key([K1-V1,K2-V2|Vs], SortedAVars, Ks, UnifiableVars) :-
K1 == K2, !,
(clpbn:get_atts(V1, [evidence(E)])
->
clpbn:put_atts(V2, [evidence(E)])
;
clpbn:get_atts(V2, [evidence(E)])
->
clpbn:put_atts(V1, [evidence(E)])
;
true
),
% V1 = V2,
attributes:fast_unify_attributed(V1,V2),
merge_same_key([K1-V1|Vs], SortedAVars, Ks, UnifiableVars).
merge_same_key([K1-V1,K2-V2|Vs], [V1|SortedAVars], Ks, [K1|UnifiableVars]) :-
(in_keys(K1, Ks) ; \+ \+ K1 == K2), !,
add_to_keys(K1, Ks, NKs),
merge_same_key([K2-V2|Vs], SortedAVars, NKs, UnifiableVars).
merge_same_key([K-V|Vs], [V|SortedAVars], Ks, UnifiableVars) :-
add_to_keys(K, Ks, NKs),
merge_same_key(Vs, SortedAVars, NKs, UnifiableVars).
in_keys(K1,[K|_]) :- \+ \+ K1 = K, !.
in_keys(K1,[_|Ks]) :-
in_keys(K1,Ks).
add_to_keys(K1, Ks, Ks) :- ground(K1), !.
add_to_keys(K1, Ks, [K1|Ks]).
sort_vars_by_key_and_parents(AVars, SortedAVars, UnifiableVars) :-
get_keys_and_parents(AVars, KeysVars),
keysort(KeysVars, KVars),
merge_same_key(KVars, SortedAVars, [], UnifiableVars).
get_keys_and_parents([], []).
get_keys_and_parents([V|AVars], [K-V|KeysVarsF]) :-
clpbn:get_atts(V, [key(K),dist(Id,Parents)]), !,
add_parents(Parents,V,Id,KeysVarsF,KeysVars0),
get_keys_and_parents(AVars, KeysVars0).
get_keys_and_parents([_|AVars], KeysVars) :- % may be non-CLPBN vars.
get_keys_and_parents(AVars, KeysVars).
add_parents(Parents,_,_,KeyVars,KeyVars) :-
all_vars(Parents), !.
add_parents(Parents,V,Id,KeyVarsF,KeyVars0) :-
transform_parents(Parents,NParents,KeyVarsF,KeyVars0),
clpbn:put_atts(V, [dist(Id,NParents)]).
all_vars([]).
all_vars([P|Parents]) :-
var(P),
all_vars(Parents).
transform_parents([],[],KeyVars,KeyVars).
transform_parents([P|Parents0],[P|NParents],KeyVarsF,KeyVars0) :-
var(P), !,
transform_parents(Parents0,NParents,KeyVarsF,KeyVars0).
transform_parents([P|Parents0],[V|NParents],[P-V|KeyVarsF],KeyVars0) :-
transform_parents(Parents0,NParents,KeyVarsF,KeyVars0).

View File

@ -1,274 +0,0 @@
/***********************************
Variable Elimination in Prolog
How to do it
Three steps:
build the graph:
- for all variables, find out
all tables they connect to;
multiply their size
order by size
*********************************/
:- module(clpbn_vel, [vel/3,
check_if_vel_done/1,
init_vel_solver/4,
run_vel_solver/3]).
:- attribute size/1, all_diffs/1.
:- use_module(library(ordsets),
[ord_union/3,
ord_member/2]).
:- use_module(library('clpbn/xbif'), [clpbn2xbif/3]).
:- use_module(library('clpbn/graphviz'), [clpbn2gviz/4]).
:- use_module(library('clpbn/dists'),
[
dist/4,
get_dist_domain_size/2,
get_dist_matrix/5]).
:- use_module(library('clpbn/utils'), [
clpbn_not_var_member/2]).
:- use_module(library('clpbn/display'), [
clpbn_bind_vals/3]).
:- use_module(library('clpbn/connected'),
[
init_influences/3,
influences/5
]).
:- use_module(library('clpbn/matrix_cpt_utils'),
[project_from_CPT/3,
reorder_CPT/5,
multiply_CPTs/4,
normalise_CPT/2,
sum_out_from_CPT/4,
list_from_CPT/2]).
:- use_module(library(lists),
[
append/3
]).
:- use_module(library('clpbn/aggregates'),
[check_for_agg_vars/2]).
check_if_vel_done(Var) :-
get_atts(Var, [size(_)]), !.
%
% implementation of the well known variable elimination algorithm
%
vel([[]],_,_) :- !.
vel([LVs],Vs0,AllDiffs) :-
init_vel_solver([LVs], Vs0, AllDiffs, State),
% variable elimination proper
run_vel_solver([LVs], [LPs], State),
% bind Probs back to variables so that they can be output.
clpbn_bind_vals([LVs],[LPs],AllDiffs).
init_vel_solver(Qs, Vs0, _, LVis) :-
check_for_agg_vars(Vs0, Vs1),
% LVi will have a list of CLPBN variables
% Tables0 will have the full data on each variable
init_influences(Vs1, G, RG),
init_vel_solver_for_questions(Qs, G, RG, _, LVis).
init_vel_solver_for_questions([], _, _, [], []).
init_vel_solver_for_questions([Vs|MVs], G, RG, [NVs|MNVs0], [NVs|LVis]) :-
influences(Vs, _, NVs0, G, RG),
sort(NVs0, NVs),
%clpbn_gviz:clpbn2gviz(user_error, test, NVs, Vs),
init_vel_solver_for_questions(MVs, G, RG, MNVs0, LVis).
% use a findall to recover space without needing for GC
run_vel_solver(LVs, LPs, LNVs) :-
findall(Ps, solve_vel(LVs, LNVs, Ps), LPs).
solve_vel([LVs|_], [NVs0|_], Ps) :-
length(NVs0, L), (L > 64 -> clpbn_gviz:clpbn2gviz(user_error,sort,NVs0,LVs) ; true ),
find_all_clpbn_vars(NVs0, NVs0, LV0, LVi, Tables0),
sort(LV0, LV),
% construct the graph
find_all_table_deps(Tables0, LV),
process(LVi, LVs, tab(Dist,_,_)),
% move from potentials back to probabilities
normalise_CPT(Dist,MPs),
list_from_CPT(MPs, Ps).
solve_vel([_|MoreLVs], [_|MoreLVis], Ps) :-
solve_vel(MoreLVs, MoreLVis, Ps).
keys([],[]).
keys([V|NVs0],[K:E|Ks]) :-
clpbn:get_atts(V,[key(K),evidence(E)]), !,
keys(NVs0,Ks).
keys([V|NVs0],[K|Ks]) :-
clpbn:get_atts(V,[key(K)]),
keys(NVs0,Ks).
%
% just get a list of variables plus associated tables
%
find_all_clpbn_vars([], _, [], [], []) :- !.
find_all_clpbn_vars([V|Vs], NVs0, [Var|LV], ProcessedVars, [table(I,Table,Parents,Sizes)|Tables]) :-
var_with_deps(V, NVs0, Table, Parents, Sizes, Ev, Vals), !,
% variables with evidence should not be processed.
(var(Ev) ->
Var = var(V,I,Sz,Vals,Parents,Ev,_,_),
vel_get_dist_size(V,Sz),
ProcessedVars = [Var|ProcessedVars0]
;
ProcessedVars = ProcessedVars0
),
find_all_clpbn_vars(Vs, NVs0, LV, ProcessedVars0, Tables).
var_with_deps(V, NVs0, Table, Deps, Sizes, Ev, Vals) :-
clpbn:get_atts(V, [dist(Id,Parents)]),
get_dist_matrix(Id,Parents,_,Vals,TAB0),
(
clpbn:get_atts(V, [evidence(Ev)])
->
true
;
true
), !,
% set CPT in canonical form
reorder_CPT([V|Parents],TAB0,Deps0,TAB1,Sizes1),
% remove evidence.
simplify_evidence(Deps0, NVs0, TAB1, Deps0, Sizes1, Table, Deps, Sizes).
find_all_table_deps(Tables0, LV) :-
find_dep_graph(Tables0, DepGraph0),
sort(DepGraph0, DepGraph),
add_table_deps_to_variables(LV, DepGraph).
find_dep_graph([], []) :- !.
find_dep_graph([table(I,Tab,Deps,Sizes)|Tables], DepGraph) :-
add_table_deps(Deps, I, Deps, Tab, Sizes, DepGraph0, DepGraph),
find_dep_graph(Tables, DepGraph0).
add_table_deps([], _, _, _, _, DepGraph, DepGraph).
add_table_deps([V|Deps], I, Deps0, Table, Sizes, DepGraph0, [V-tab(Table,Deps0,Sizes)|DepGraph]) :-
add_table_deps(Deps, I, Deps0, Table, Sizes, DepGraph0, DepGraph).
add_table_deps_to_variables([], []).
add_table_deps_to_variables([var(V,_,_,_,_,_,Deps,K)|LV], DepGraph) :-
steal_deps_for_variable(DepGraph, V, NDepGraph, Deps),
compute_size(Deps,[],K),
% ( clpbn:get_atts(V,[key(Key)]) -> format('~w:~w~n',[Key,K]) ; true),
add_table_deps_to_variables(LV, NDepGraph).
steal_deps_for_variable([V-Info|DepGraph], V0, NDepGraph, [Info|Deps]) :-
V == V0, !,
steal_deps_for_variable(DepGraph, V0, NDepGraph, Deps).
steal_deps_for_variable(DepGraph, _, DepGraph, []).
compute_size([],Vs,K) :-
% use sizes now
% length(Vs,K).
multiply_sizes(Vs,1,K).
compute_size([tab(_,Vs,_)|Tabs],Vs0,K) :-
ord_union(Vs,Vs0,VsI),
compute_size(Tabs,VsI,K).
multiply_sizes([],K,K).
multiply_sizes([V|Vs],K0,K) :-
vel_get_dist_size(V, Sz),
KI is K0*Sz,
multiply_sizes(Vs,KI,K).
process(LV0, InputVs, Out) :-
find_best(LV0, V0, -1, V, WorkTables, LVI, InputVs),
V \== V0, !,
% format('1 ~w: ~w~n',[V,WorkTables]),
multiply_tables(WorkTables, tab(Tab0,Deps0,_)),
reorder_CPT(Deps0,Tab0,Deps,Tab,Sizes),
Table = tab(Tab,Deps,Sizes),
% format('2 ~w: ~w~n',[V,Table]),
project_from_CPT(V,Table,NewTable),
% format('3 ~w: ~w~n',[V,NewTable]),
include(LVI,NewTable,V,LV2),
process(LV2, InputVs, Out).
process(LV0, _, Out) :-
fetch_tables(LV0, WorkTables),
multiply_tables(WorkTables, Out).
find_best([], V, _TF, V, _, [], _).
%:-
% clpbn:get_atts(V,[key(K)]), writeln(chosen:K:_TF).
% root_with_single_child
%find_best([var(V,I,_,_,[],Ev,[Dep],K)|LV], _, _, V, [Dep], LVF, Inputs) :- !.
find_best([var(V,I,Sz,Vals,Parents,Ev,Deps,K)|LV], _, Threshold, VF, NWorktables, LVF, Inputs) :-
( K < Threshold ; Threshold < 0),
clpbn_not_var_member(Inputs, V), !,
find_best(LV, V, K, VF, WorkTables,LV0, Inputs),
(V == VF ->
LVF = LV0, Deps = NWorktables
;
LVF = [var(V,I,Sz,Vals,Parents,Ev,Deps,K)|LV0], WorkTables = NWorktables
).
find_best([V|LV], V0, Threshold, VF, WorkTables, [V|LVF], Inputs) :-
find_best(LV, V0, Threshold, VF, WorkTables, LVF, Inputs).
multiply_tables([Table], Table) :- !. %, Table = tab(T,D,S),matrix:matrix_to_list(T,L),writeln(D:S:L).
multiply_tables([TAB1, TAB2| Tables], Out) :-
%TAB1 = tab(T,_,_),matrix:matrix_to_list(T,L),writeln(doing:L),
multiply_CPTs(TAB1, TAB2, TAB, _),
multiply_tables([TAB| Tables], Out).
simplify_evidence([], _, Table, Deps, Sizes, Table, Deps, Sizes).
simplify_evidence([V|VDeps], NVs0, Table0, Deps0, Sizes0, Table, Deps, Sizes) :-
clpbn:get_atts(V, [evidence(_)]), !,
project_from_CPT(V,tab(Table0,Deps0,Sizes0),tab(NewTable,Deps1,Sizes1)),
simplify_evidence(VDeps, NVs0, NewTable, Deps1, Sizes1, Table, Deps, Sizes).
simplify_evidence([V|VDeps], NVs0, Table0, Deps0, Sizes0, Table, Deps, Sizes) :-
ord_member(V, NVs0), !,
simplify_evidence(VDeps, NVs0, Table0, Deps0, Sizes0, Table, Deps, Sizes).
simplify_evidence([V|VDeps], NVs0, Table0, Deps0, Sizes0, Table, Deps, Sizes) :-
project_from_CPT(V,tab(Table0,Deps0,Sizes0),tab(NewTable,Deps1,Sizes1)),
simplify_evidence(VDeps, NVs0, NewTable, Deps1, Sizes1, Table, Deps, Sizes).
fetch_tables([], []).
fetch_tables([var(_,_,_,_,_,_,Deps,_)|LV0], Tables) :-
append(Deps,Tables0,Tables),
fetch_tables(LV0, Tables0).
include([],_,_,[]).
include([var(V,P,VSz,D,Parents,Ev,Tabs,Est)|LV],tab(T,Vs,Sz),V1,[var(V,P,VSz,D,Parents,Ev,Tabs,Est)|NLV]) :-
clpbn_not_var_member(Vs,V), !,
include(LV,tab(T,Vs,Sz),V1,NLV).
include([var(V,P,VSz,D,Parents,Ev,Tabs,_)|LV],Table,NV,[var(V,P,VSz,D,Parents,Ev,NTabs,NEst)|NLV]) :-
update_tables(Tabs,NTabs,Table,NV),
compute_size(NTabs, [], NEst),
include(LV,Table,NV,NLV).
update_tables([],[Table],Table,_).
update_tables([tab(Tab0,Vs,Sz)|Tabs],[tab(Tab0,Vs,Sz)|NTabs],Table,V) :-
clpbn_not_var_member(Vs,V), !,
update_tables(Tabs,NTabs,Table,V).
update_tables([_|Tabs],NTabs,Table,V) :-
update_tables(Tabs,NTabs,Table,V).
vel_get_dist_size(V,Sz) :-
get_atts(V, [size(Sz)]), !.
vel_get_dist_size(V,Sz) :-
clpbn:get_atts(V,dist(Id,_)), !,
get_dist_domain_size(Id,Sz),
put_atts(V, [size(Sz)]).

View File

@ -1,235 +0,0 @@
%:- style_check(all).
:- module(viterbi, [viterbi/4]).
:- use_module(library(lists),
[nth/3,
member/2]).
:- use_module(library(assoc)).
:- use_module(library(dgraphs)).
:- use_module(library(matrix)).
:- use_module(library(clpbn), []).
:- ensure_loaded(library('clpbn/hmm')).
:- use_module(library('clpbn/dists'), [
get_dist_params/2]).
:- meta_predicate viterbi(:,:,+,-).
viterbi(Start,End,String,Trace) :-
init_hmm,
Start,
mk_graph(NOfNodes, Map, ViterbiCode),
compile_trace(String, Emissions),
get_id(Start, Map, SI),
get_id(End, Map, EI),
% add a random symbol in front (for the c/1 state).
compiled_viterbi(NOfNodes, SI, ViterbiCode, Emissions, Dump, L),
backtrace(Dump, EI, Map, L, Trace).
state_from_goal(_:Start,S) :-
state_from_goal(Start,S).
state_from_goal(Start,S) :-
functor(Start, N, Ar),
% get rid of position and random var
NAr is Ar-2,
functor(S, N, NAr).
mk_graph(NOfNodes, Map, ViterbiCode) :-
attributes:all_attvars(Vars0),
empty_assoc(KeyMap0),
get_graph(Vars0, Nodes, Edges, KeyMap0, KeyMap),
dgraph_new(G0),
dgraph_add_vertices(G0, Nodes, G1),
dgraph_add_edges(G1, Edges, G2),
dgraph_top_sort(G2, SortedNodes),
compile_viterbi(SortedNodes, KeyMap, NOfNodes, Map, ViterbiCode).
get_graph([V|Vs], [NKey|Keys], EdgesF, KeyMap0, KeyMap) :-
clpbn:get_atts(V,[key(Key), dist(Id,Parents)]),
( Key =.. [N,2|More] ; Key = s(0), N=s, More=[] ), !,
NKey =.. [N|More],
fetch_edges(Parents, NKey, EdgesF, Edges0, PKeys),
get_emission(V, Key, EmissionProb),
put_assoc(NKey,KeyMap0,nodeinfo(_,Id,EmissionProb,PKeys),KeyMapI),
get_graph(Vs, Keys, Edges0, KeyMapI, KeyMap).
get_graph([_|Vs], Keys, Edges, KeyMap0, KeyMap) :-
get_graph(Vs, Keys, Edges, KeyMap0, KeyMap).
get_graph([], [], [], KeyMap, KeyMap).
get_emission(V, Key, EmissionProbs) :-
hmm:get_atts(V,[emission(_)]), !,
user:emission_cpt(Key, EmissionProbs).
get_emission(_, _, []).
fetch_edges([V|Parents], Key0, EdgesF, Edges0, [Slice-AKey|PKeys]) :-
var(V), !,
clpbn:get_atts(V,[key(Key)]),
abstract_key(Key, AKey, Slice),
(
Slice < 3
->
EdgesF = [Key0-AKey|EdgesI]
;
EdgesF = EdgesI
),
fetch_edges(Parents, Key0, EdgesI, Edges0, PKeys).
fetch_edges([Key|Parents], Key0, EdgesF, Edges0, [Slice-AKey|PKeys]) :-
abstract_key(Key, AKey, Slice),
(
Slice < 3
->
EdgesF = [Key0-AKey|EdgesI]
;
EdgesF = EdgesI
),
fetch_edges(Parents, Key0, EdgesI, Edges0, PKeys).
fetch_edges([], _, Edges, Edges, []).
abstract_key(Key, NKey, Slice) :-
Key =.. [N,Slice|More],
NKey =.. [N|More].
compile_viterbi(Keys, KeyMap, Nodes, Map, ViterbiCode) :-
enum_keys(Keys, KeyMap, 0, Nodes, Map),
compile_keys(Keys, KeyMap, ViterbiCode).
% just enumerate keys
enum_keys([], _, I, I, []).
enum_keys([Key|Keys], KeyMap, I0, Nodes, [I0-Key|Map]) :-
get_assoc(Key,KeyMap,nodeinfo(I0,_,_,_)),
I is I0+1,
enum_keys(Keys, KeyMap, I, Nodes, Map).
compile_keys([Key|Keys], KeyMap, ViterbiCodeF) :-
get_assoc(Key,KeyMap,nodeinfo(IKey,Id,Emission,PKeys)),
compile_emission(Emission,IKey,ViterbiCodeF,ViterbiCodeI),
get_dist_params(Id,Probs),
compile_propagation(PKeys,Probs,IKey,KeyMap,ViterbiCodeI,ViterbiCode0),
compile_keys(Keys, KeyMap, ViterbiCode0).
compile_keys([], _, []).
% add a random symbol to the end.
compile_emission([],_) --> !, [].
compile_emission(EmissionTerm,IKey) --> [emit(IKey,EmissionTerm)].
compile_propagation([],[],_,_) --> [].
compile_propagation([0-PKey|Ps], [Prob|Probs], IKey, KeyMap) -->
[prop_same(IKey,Parent,Prob)],
{ get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) },
compile_propagation(Ps, Probs, IKey, KeyMap).
compile_propagation([2-PKey|Ps], [Prob|Probs], IKey, KeyMap) -->
[prop_same(IKey,Parent,Prob)],
{ get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) },
compile_propagation(Ps, Probs, IKey, KeyMap).
compile_propagation([3-PKey|Ps], [Prob|Probs], IKey, KeyMap) -->
[prop_next(IKey,Parent,Prob)],
{ get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) },
compile_propagation(Ps, Probs, IKey, KeyMap).
get_id(_:S, Map, SI) :- !,
get_id(S, Map, SI).
get_id(S, Map, SI) :-
functor(S,N,A),
A2 is A-2,
functor(S2,N,A2),
once(member(SI-S2,Map)).
compile_trace(Trace, Emissions) :-
user:hmm_domain(Domain),
(atom(Domain) ->
hmm:cvt_vals(Domain, Vals)
;
Vals = Domain
),
compile_trace(Trace, Vals, Emissions).
compile_trace([], _, []).
compile_trace([El|Trace], Vals, [N|Emissions]) :-
once(nth(N, Vals, El)),
compile_trace(Trace, Vals, Emissions).
compiled_viterbi(Nodes, S, Commands, Input, Trace, L) :-
length(Input,L),
prolog_flag(min_tagged_integer, Min),
matrix_new_set(ints,[Nodes], Min, Current),
matrix_new_set(ints,[Nodes], Min, Next),
L1 is L+1,
matrix_new(ints,[L1,Nodes], Trace),
matrix_set(Current, [S], 0),
run_commands(Input, Commands, 0, Current, Next, Trace, Min).
run_commands([], _, _, _, _, _, _).
run_commands([E|Input], Commands, I, Current, Next, Trace, Min) :-
run_code(Commands, E, I, Current, Next, Trace),
matrix_get(Current, [32], M10),
matrix_get(Current, [34], C),
matrix_set_all(Current,Min),
I1 is I+1,
run_commands(Input, Commands, I1, Next, Current, Trace, Min).
run_code([], _, _, _, _, Trace).
run_code([Inst|Input], E, I, Current, Next, Trace) :-
run_inst(Inst, E, I, Current, Next, Trace) ,
run_code(Input, E, I, Current, Next, Trace).
run_inst(emit(Id,T), E, _SP, Current, _, Trace) :-
arg(E,T,P),
matrix_add(Current, [Id], P).
run_inst(prop_same(I,P,Prob), _, SP, Current, _, Trace) :-
matrix_get(Current, [I], PI),
NP is PI+Prob,
matrix_get(Current, [P], P0),
(NP > P0 ->
matrix_set(Current, [P], NP),
matrix_set(Trace, [SP,P], I)
;
true
).
run_inst(prop_next(I,P,Prob), _, SP, Current, Next, Trace) :-
matrix_get(Current, [I], PI),
NP is PI+Prob,
matrix_get(Next, [P], P0),
(NP > P0 ->
matrix_set(Next, [P], NP),
SP1 is SP+1,
IN is -I,
matrix_set(Trace, [SP1,P], IN)
;
true
).
backtrace(Dump, EI, Map, L, Trace) :-
L1 is L-1,
Pos = [L1,EI],
matrix_get(Dump,Pos,Next),
trace(L1,Next,Dump,Map,[],Trace).
trace(0,0,_,_,Trace,Trace) :- !.
trace(L1,Next,Dump,Map,Trace0,Trace) :-
(Next < 0 ->
NL is L1-1,
P is -Next
;
NL = L1,
P = Next
),
once(member(P-AKey,Map)),
AKey=..[N|Args],
Key=..[N,NL|Args],
matrix_get(Dump,[NL,P],New),
trace(NL,New,Dump,Map,[Key|Trace0],Trace).

View File

@ -1,119 +0,0 @@
%
% XMLBIF support for CLP(BN)
%
:- module(xbif, [clpbn2xbif/3]).
:- use_module(library('clpbn/dists'), [
get_dist_domain/2]).
clpbn2xbif(Stream, Name, Network) :-
format(Stream, '<?xml version="1.0" encoding="US-ASCII"?>
<!--
Bayesian network in XMLBIF v0.3 (BayesNet Interchange Format)
Produced by CLP(BN)
-->
<!-- DTD for the XMLBIF 0.3 format -->
<!DOCTYPE BIF [
<!ELEMENT BIF ( NETWORK )*>
<!ATTLIST BIF VERSION CDATA #REQUIRED>
<!ELEMENT NETWORK ( NAME, ( PROPERTY | VARIABLE | DEFINITION )* )>
<!ELEMENT NAME (#PCDATA)>
<!ELEMENT VARIABLE ( NAME, ( OUTCOME | PROPERTY )* ) >
<!ATTLIST VARIABLE TYPE (nature|decision|utility) "nature">
<!ELEMENT OUTCOME (#PCDATA)>
<!ELEMENT DEFINITION ( FOR | GIVEN | TABLE | PROPERTY )* >
<!ELEMENT FOR (#PCDATA)>
<!ELEMENT GIVEN (#PCDATA)>
<!ELEMENT TABLE (#PCDATA)>
<!ELEMENT PROPERTY (#PCDATA)>
]>
<BIF VERSION="0.3">
<NETWORK>
<NAME>~w</NAME>
<!-- Variables -->',[Name]),
output_vars(Stream, Network),
output_dists(Stream, Network),
format(Stream, '</NETWORK>
</BIF>
',[]).
output_vars(_, []).
output_vars(Stream, [V|Vs]) :-
output_var(Stream, V),
output_vars(Stream, Vs).
output_var(Stream, V) :-
clpbn:get_atts(V,[key(Key),dist(Id,_)]),
get_dist_domain(Id, Domain),
format(Stream, '<VARIABLE TYPE="nature">
<NAME>',[]),
output_key(Stream,Key),
format('</NAME>~n',[]),
output_domain(Stream, Domain),
format(Stream, '</VARIABLE>~n~n',[]).
output_domain(_, []).
output_domain(Stream, [El|Domain]) :-
format(Stream, ' <OUTCOME>~q</OUTCOME>~n',[El]),
output_domain(Stream, Domain).
output_dists(_, []).
output_dists(Stream, [V|Network]) :-
output_dist(Stream, V),
output_dists(Stream, Network).
output_dist(Stream, V) :-
clpbn:get_atts(V,[key(Key),dist(_,CPT,Parents)]),
format(Stream, '<DEFINITION>
<FOR>',[]),
output_key(Stream, Key),
format('</FOR>~n',[]),
output_parents(Stream,Parents),
output_cpt(Stream,CPT),
format(Stream, '</DEFINITION>~n~n',[]).
output_parents(_,[]).
output_parents(Stream,[P1|Ps]) :-
clpbn:get_atts(P1,[key(Key)]),
format(Stream, '<GIVEN>',[]),
output_key(Stream,Key),
format('</GIVEN>~n',[]),
output_parents(Stream,Ps).
output_cpt(Stream,CPT) :-
format(Stream, ' <TABLE> ', []),
output_els(Stream, CPT),
format(Stream, '</TABLE>~n', []).
output_els(_, []).
output_els(Stream, [El|Els]) :-
format(Stream,'~f ',[El]),
output_els(Stream, Els).
output_key(Stream, Key) :-
output_key(Stream, 0, Key).
output_key(Stream, _, Key) :-
primitive(Key), !,
write(Stream, Key).
output_key(Stream, I0, Key) :-
Key =.. [Name|Args],
write(Stream, Name),
I is I0+1,
output_key_args(Stream, I, Args).
output_key_args(_, _, []).
output_key_args(Stream, I, [Arg|Args]) :-
format(Stream, '~*c', [I,0'_]),
output_key(Stream, I, Arg),
output_key_args(Stream, I, Args).

View File

@ -1,306 +0,0 @@
%
% Interface the Aleph ILP system to CLP(BN)
%
% Relies on the Aleph cost function.
% It assumes Aleph work as usual, but some variables are of type random.
%
:- module(clpbn_aleph,
[init_clpbn_cost/0,
random_type/2]).
:- dynamic rt/2, inited/1.
:- use_module(library('clpbn'),
[{}/1,
clpbn_flag/2,
clpbn_flag/3,
set_clpbn_flag/2]).
:- use_module(library('clpbn/learning/em')).
:- use_module(library('clpbn/matrix_cpt_utils'),
[uniform_CPT_as_list/2]).
:- use_module(library('clpbn/dists'),
[reset_all_dists/0,
get_dist_key/2,
get_dist_params/2
]).
:- use_module(library('clpbn/table'),
[clpbn_tabled_abolish/1,
clpbn_tabled_asserta/1,
clpbn_tabled_asserta/2,
clpbn_tabled_assertz/1,
clpbn_tabled_clause/2,
clpbn_tabled_number_of_clauses/2,
clpbn_is_tabled/1,
clpbn_tabled_dynamic/1]).
%
% Tell Aleph not to use default solver during saturation
%
% all work will be done by EM
:- set_clpbn_flag(solver,none).
%
% This is the Aleph interface
% examples are stored as example(Id, Type, Example)
% CPT domains are stored as random_type(KeySkeleton, ListOfValues).
%
:- use_module(library(lists),[append/3]).
:- multifile user:cost/3.
% handle uninstantiated examples as hidden variables.
:- user:set(skolem_examples, false).
% avoid doing CLP(BN) stuff except at start
:- user:set(sat_start_hook, clpbn_aleph:disable_solver).
:- user:set(sat_stop_hook, clpbn_aleph:enable_solver).
:- user:set(reduce_start_hook, clpbn_aleph:disable_solver).
:- user:set(reduce_stop_hook, clpbn_aleph:enable_solver).
:- user:set(record_testclause_hook, clpbn_aleph:do_nothing).
:- user:set(newbest_hook, clpbn_aleph:store_theory).
disable_solver(_) :-
clpbn_flag(solver, Old, none),
nb_setval(old_clpbn_solver, Old).
disable_solver(_,_) :-
clpbn_flag(solver, Old, none),
nb_setval(old_clpbn_solver, Old).
enable_solver :-
nb_getval(old_clpbn_solver, Old),
set_clpbn_flag(solver, Old).
enable_solver(_,_) :-
nb_getval(old_clpbn_solver, Old),
set_clpbn_flag(solver, Old).
do_nothing(_).
% backup current best theory in DB.
store_theory(_,_,_) :-
eraseall(best_theory),
fail.
store_theory(_,(H:-_),_) :-
clpbn_is_tabled(user:H), !,
store_tabled_theory(H).
store_theory(_,(H:-_),_) :-
store_theory(H).
store_tabled_theory(H) :-
clpbn_tabled_clause(user:H,B),
add_correct_cpt(B,NB),
store_cl((H:-NB)),
fail.
store_tabled_theory(_).
store_theory(H) :-
clause(user:H,B),
add_correct_cpt(B,NB),
store_cl((H:-NB)),
fail.
store_theory(_).
add_correct_cpt((G,B),(G,NB)) :- !,
add_correct_cpt(B,NB).
add_correct_cpt((clpbn:{V = K with Tab }), ({V = K with NTab})) :-
correct_tab(Tab,K,NTab).
add_correct_cpt(({V = K with Tab }), ({V = K with NTab})) :-
correct_tab(Tab,K,NTab).
correct_tab(p(Vs,_),K,p(Vs,TDist)) :-
get_dist_key(Id, K),
get_dist_params(Id, TDist).
correct_tab(p(Vs,_,Ps),K,p(Vs,TDist,Ps)) :-
get_dist_key(Id, K),
get_dist_params(Id, TDist).
store_cl(Cl) :-
recordz(best_theory, Cl, _).
:- user:set(best_clause_hook, clpbn_aleph:add_new_clause).
add_new_clause(_,(_ :- true),_,_) :- !.
add_new_clause(_,(H :- B),_,_) :-
user:db_usage,
user:db_dynamic,
domain(H, K, V, D),
rewrite_body(B, IB, Vs, _, ( !, { V = K with p(D, CPTList, Vs) })),
% need to remember which CPT we want
get_dist_key(Id, K),
get_dist_params(Id, CPTList),
(
clpbn_is_tabled(user:H)
->
clpbn_tabled_asserta(user:(H :- IB))
;
asserta(user:(H :- IB))
),
user:setting(verbosity,V),
( V >= 1 ->
user:p_message('CLP(BN) Theory'),
functor(H,N,Ar), listing(user:N/Ar)
;
true
).
% user-defined cost function, Aleph knows about this (and only about this).
user:cost((H :- B),Inf,Score) :- !,
domain(H, K, V, D),
check_info(Inf),
rewrite_body(B, IB, Vs, Ds, ( !, { V = K with p(D, CPTList, Vs) })),
uniform_cpt([D|Ds], CPTList),
(
clpbn_is_tabled(user:H)
->
clpbn_tabled_asserta(user:(H :- IB), R)
;
asserta(user:(H :- IB), R)
),
(
cpt_score(Score0)
->
erase(R),
Score is -Score0
;
% illegal clause, just get out of here.
erase(R),
fail
).
user:cost(H,_Inf,Score) :- !,
init_clpbn_cost(H, Score0),
Score is -Score0.
% this is here so that Aleph will actually compute coverage. Aleph computes
% coverage only if cost actually checks Inf.
check_info(_).
init_clpbn_cost(_, Score) :-
inited(Score), !.
init_clpbn_cost(H, Score) :-
functor(H,N,A),
% get rid of Aleph crap
(
clpbn_is_tabled(user:H)
->
clpbn_tabled_abolish(user:N/A),
clpbn_tabled_dynamic(user:N/A)
;
abolish(user:N/A),
% make it easy to add and remove clauses.
dynamic(user:N/A)
),
domain(H, K, V, D),
uniform_cpt([D], CPTList),
% This will be the default cause, called when the other rules fail.
(
clpbn_is_tabled(user:H)
->
clpbn_tabled_assertz(user:(H :- !, { V = K with p(D, CPTList) }))
;
assert(user:(H :- !, { V = K with p(D, CPTList) }))
),
cpt_score(Score),
assert(inited(Score)).
% receives H, and generates a key K, a random variable RV, and a domain D.
domain(H, K, RV, D) :-
functor(H,Name,Arity),
functor(Pred,Name,Arity),
(
recorded(aleph,modeh(_,Pred),_)
->
true
;
user:'$aleph_global'(modeh,modeh(_,Pred))
),
arg(Arity,Pred,+RType),
rt(RType,D), !,
key_from_head(H,K,RV).
domain(H, K, V, D) :-
current_predicate(_,user:domain(_)),
key_from_head(H,K,V),
user:domain(K,D).
key_from_head(H,K,V) :-
H =.. [Name|Args],
(
clpbn_is_tabled(user:H)
->
clpbn_tabled_number_of_clauses(user:H,NClauses)
;
predicate_property(user:H,number_of_clauses(NClauses))
),
atomic_concat(Name,NClauses,NName),
append(H0L,[V],Args),
K =.. [NName|H0L].
% transforms_body into something that is going to be called
% receives G0, and generates a list of goals, a list of variables, and a list of domains.
% receives also a Tail with the constraint to append at the end.
rewrite_body((A,B), (user:NA,NB), [V|Vs], [D|Ds], Tail) :-
rewrite_goal(A, V, D, NA), !,
rewrite_body(B, NB, Vs, Ds, Tail).
rewrite_body((A,B), (user:A,NB), Vs, Ds, Tail) :-
rewrite_body(B,NB, Vs, Ds, Tail).
rewrite_body(A,(user:NA,Tail), [V], [D], Tail) :-
rewrite_goal(A, V, D, NA), !.
rewrite_body(A, (user:A,Tail), [], [], Tail).
% so they need not be rewritten.
rewrite_goal(A,V,D,NA) :-
functor(A,Name,Arity),
functor(Pred,Name,Arity),
(
recorded(aleph,modeb(_,Pred),_)
->
true
;
user:'$aleph_global'(modeb,modeb(_,Pred))
),
arg(Arity,Pred,-RType),
rt(RType,D), !,
A =.. [Name|Args],
replace_last_var(Args,V,NArgs),
NA =.. [Name|NArgs].
replace_last_var([_],V,[V]) :- !.
replace_last_var([A|Args],V,[A|NArgs]) :-
replace_last_var(Args,V,NArgs).
%
% This is the key
%
cpt_score(Lik) :-
findall(user:Ex, user:example(_,pos,Ex), Exs),
clpbn_flag(solver, Solver),
clpbn_flag(em_solver, EMSolver),
set_clpbn_flag(solver, EMSolver),
reset_all_dists,
em(Exs, 0.01, 10, _Tables, Lik),
set_clpbn_flag(solver, Solver).
complete_clpbn_cost(_AlephClause).
random_type(A,B) :-
assert(rt(A,B)).
uniform_cpt(Ds, CPTList) :-
lengths(Ds, Ls),
uniform_CPT_as_list(Ls, CPTList).
lengths([], []).
lengths([D|Ds], [L|Ls]) :-
length(D, L),
lengths(Ds, Ls).

View File

@ -1,121 +0,0 @@
%
% Learn parameters using the BNT toolkit
%
:- yap_flag(unknown,error).
:- style_check(all).
:- module(bnt_parameters, [learn_parameters/2]).
:- use_module(library('clpbn'), [
clpbn_flag/3]).
:- use_module(library('clpbn/bnt'), [
create_bnt_graph/2]).
:- use_module(library('clpbn/display'), [
clpbn_bind_vals/3]).
:- use_module(library('clpbn/dists'), [
get_dist_domain/2
]).
:- use_module(library(matlab), [matlab_initialized_cells/4,
matlab_call/2,
matlab_get_variable/2
]).
:- dynamic bnt_em_max_iter/1.
bnt_em_max_iter(10).
% syntactic sugar for matlab_call.
:- op(800,yfx,<--).
G <-- Y :-
matlab_call(Y,G).
learn_parameters(Items, Tables) :-
run_all(Items),
clpbn_flag(solver, OldSolver, bnt),
clpbn_flag(bnt_model, Old, tied),
attributes:all_attvars(AVars),
% sort and incorporte evidence
clpbn_vars(AVars, AllVars),
length(AllVars,NVars),
create_bnt_graph(AllVars, Reps),
mk_sample(AllVars,NVars,EvVars),
bnt_learn_parameters(NVars,EvVars),
get_parameters(Reps, Tables),
clpbn_flag(solver, bnt, OldSolver),
clpbn_flag(bnt_model, tied, Old).
run_all([]).
run_all([G|Gs]) :-
call(user:G),
run_all(Gs).
clpbn_vars(Vs,BVars) :-
get_clpbn_vars(Vs,CVs),
keysort(CVs,KVs),
merge_vars(KVs,BVars).
get_clpbn_vars([],[]).
get_clpbn_vars([V|GVars],[K-V|CLPBNGVars]) :-
clpbn:get_atts(V, [key(K)]), !,
get_clpbn_vars(GVars,CLPBNGVars).
get_clpbn_vars([_|GVars],CLPBNGVars) :-
get_clpbn_vars(GVars,CLPBNGVars).
merge_vars([],[]).
merge_vars([K-V|KVs],[V|BVars]) :-
get_var_has_same_key(KVs,K,V,KVs0),
merge_vars(KVs0,BVars).
get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !,
get_var_has_same_key(KVs,K,V,KVs0).
get_var_has_same_key(KVs,_,_,KVs).
mk_sample(AllVars,NVars, LL) :-
add2sample(AllVars, LN),
length(LN,LL),
matlab_initialized_cells( NVars, 1, LN, sample).
add2sample([], []).
add2sample([V|Vs],[val(VId,1,Val)|Vals]) :-
clpbn:get_atts(V, [evidence(Ev),dist(Id,_)]), !,
bnt:get_atts(V,[bnt_id(VId)]),
get_dist_domain(Id, Domain),
evidence_val(Ev,1,Domain,Val),
add2sample(Vs, Vals).
add2sample([_V|Vs],Vals) :-
add2sample(Vs, Vals).
evidence_val(Ev,Val,[Ev|_],Val) :- !.
evidence_val(Ev,I0,[_|Domain],Val) :-
I1 is I0+1,
evidence_val(Ev,I1,Domain,Val).
bnt_learn_parameters(_,_) :-
engine <-- jtree_inf_engine(bnet),
% engine <-- var_elim_inf_engine(bnet),
% engine <-- gibbs_sampling_inf_engine(bnet),
% engine <-- belprop_inf_engine(bnet),
% engine <-- pearl_inf_engine(bnet),
bnt_em_max_iter(MaxIters),
[new_bnet, trace] <-- learn_params_em(engine, sample, MaxIters).
get_parameters([],[]).
get_parameters([Rep-v(_,_,_)|Reps],[CPT|CPTs]) :-
get_new_table(Rep,CPT),
get_parameters(Reps,CPTs).
get_new_table(Rep,CPT) :-
s <-- struct(new_bnet.'CPD'({Rep})),
matlab_get_variable( s.'CPT', CPT).

View File

@ -1,229 +0,0 @@
%
% The world famous EM algorithm, in a nutshell
%
:- module(clpbn_em, [em/5]).
:- use_module(library(lists),
[append/3]).
:- use_module(library(clpbn),
[clpbn_init_solver/5,
clpbn_run_solver/4,
clpbn_flag/2]).
:- use_module(library('clpbn/dists'),
[get_dist_domain_size/2,
empty_dist/2,
dist_new_table/2,
get_dist_key/2,
randomise_all_dists/0,
uniformise_all_dists/0]).
:- use_module(library('clpbn/connected'),
[clpbn_subgraphs/2]).
:- use_module(library('clpbn/learning/learn_utils'),
[run_all/1,
clpbn_vars/2,
normalise_counts/2,
compute_likelihood/3,
soften_sample/2]).
:- use_module(library(lists),
[member/2]).
:- use_module(library(matrix),
[matrix_add/3,
matrix_to_list/2]).
:- use_module(library(rbtrees),
[rb_new/1,
rb_insert/4,
rb_lookup/3]).
:- use_module(library('clpbn/utils'),
[
check_for_hidden_vars/3,
sort_vars_by_key/3]).
:- meta_predicate em(:,+,+,-,-), init_em(:,-).
em(Items, MaxError, MaxIts, Tables, Likelihood) :-
catch(init_em(Items, State),Error,handle_em(Error)),
em_loop(0, 0.0, State, MaxError, MaxIts, Likelihood, Tables),
assert(em_found(Tables, Likelihood)),
fail.
% get rid of new random variables the easy way :)
em(_, _, _, Tables, Likelihood) :-
retract(em_found(Tables, Likelihood)).
handle_em(error(repeated_parents)) :-
assert(em_found(_, -inf)),
fail.
% This gets you an initial configuration. If there is a lot of evidence
% tables may be filled in close to optimal, otherwise they may be
% close to uniform.
% it also gets you a run for random variables
% state collects all Info we need for the EM algorithm
% it includes the list of variables without evidence,
% the list of distributions for which we want to compute parameters,
% and more detailed info on distributions, namely with a list of all instances for the distribution.
init_em(Items, state( AllDists, AllDistInstances, MargVars, SolverVars)) :-
run_all(Items),
% randomise_all_dists,
uniformise_all_dists,
attributes:all_attvars(AllVars0),
sort_vars_by_key(AllVars0,AllVars,[]),
% remove variables that do not have to do with this query.
% check_for_hidden_vars(AllVars1, AllVars1, AllVars),
different_dists(AllVars, AllDists, AllDistInstances, MargVars),
clpbn_flag(em_solver, Solver),
clpbn_init_solver(Solver, MargVars, AllVars, _, SolverVars).
% loop for as long as you want.
em_loop(Its, Likelihood0, State, MaxError, MaxIts, LikelihoodF, FTables) :-
estimate(State, LPs),
maximise(State, Tables, LPs, Likelihood),
% writeln(Likelihood:Its:Likelihood0:Tables),
(
(
abs((Likelihood - Likelihood0)/Likelihood) < MaxError
;
Its == MaxIts
)
->
ltables(Tables, FTables),
LikelihoodF = Likelihood
;
Its1 is Its+1,
em_loop(Its1, Likelihood, State, MaxError, MaxIts, LikelihoodF, FTables)
).
ltables([], []).
ltables([Id-T|Tables], [Key-LTable|FTables]) :-
matrix_to_list(T,LTable),
get_dist_key(Id, Key),
ltables(Tables, FTables).
% collect the different dists we are going to learn next.
different_dists(AllVars, AllDists, AllInfo, MargVars) :-
all_dists(AllVars, Dists0),
sort(Dists0, Dists1),
group(Dists1, AllDists, AllInfo, MargVars0, []),
sort(MargVars0, MargVars).
all_dists([], []).
all_dists([V|AllVars], [i(Id, [V|Parents], Cases, Hiddens)|Dists]) :-
clpbn:get_atts(V, [dist(Id,Parents)]),
sort([V|Parents], Sorted),
length(Sorted, LengSorted),
length(Parents, LengParents),
(
LengParents+1 =:= LengSorted
->
true
;
throw(error(repeated_parents))
),
generate_hidden_cases([V|Parents], CompactCases, Hiddens),
uncompact_cases(CompactCases, Cases),
all_dists(AllVars, Dists).
generate_hidden_cases([], [], []).
generate_hidden_cases([V|Parents], [P|Cases], Hiddens) :-
clpbn:get_atts(V, [evidence(P)]), !,
generate_hidden_cases(Parents, Cases, Hiddens).
generate_hidden_cases([V|Parents], [Cases|MoreCases], [V|Hiddens]) :-
clpbn:get_atts(V, [dist(Id,_)]),
get_dist_domain_size(Id, Sz),
gen_cases(0, Sz, Cases),
generate_hidden_cases(Parents, MoreCases, Hiddens).
gen_cases(Sz, Sz, []) :- !.
gen_cases(I, Sz, [I|Cases]) :-
I1 is I+1,
gen_cases(I1, Sz, Cases).
uncompact_cases(CompactCases, Cases) :-
findall(Case, is_case(CompactCases, Case), Cases).
is_case([], []).
is_case([A|CompactCases], [A|Case]) :-
integer(A), !,
is_case(CompactCases, Case).
is_case([L|CompactCases], [C|Case]) :-
member(C, L),
is_case(CompactCases, Case).
group([], [], []) --> [].
group([i(Id,Ps,Cs,[])|Dists1], [Id|Ids], [Id-[i(Id,Ps,Cs,[])|Extra]|AllInfo]) --> !,
same_id(Dists1, Id, Extra, Rest),
group(Rest, Ids, AllInfo).
group([i(Id,Ps,Cs,Hs)|Dists1], [Id|Ids], [Id-[i(Id,Ps,Cs,Hs)|Extra]|AllInfo]) -->
[Hs],
same_id(Dists1, Id, Extra, Rest),
group(Rest, Ids, AllInfo).
same_id([i(Id,Vs,Cases,[])|Dists1], Id, [i(Id, Vs, Cases, [])|Extra], Rest) --> !,
same_id(Dists1, Id, Extra, Rest).
same_id([i(Id,Vs,Cases,Hs)|Dists1], Id, [i(Id, Vs, Cases, Hs)|Extra], Rest) --> !,
[Hs],
same_id(Dists1, Id, Extra, Rest).
same_id(Dists, _, [], Dists) --> [].
compact_mvars([], []).
compact_mvars([X1,X2|MargVars], CMVars) :- X1 == X2, !,
compact_mvars([X2|MargVars], CMVars).
compact_mvars([X|MargVars], [X|CMVars]) :- !,
compact_mvars(MargVars, CMVars).
estimate(state(_, _, Margs, SolverState), LPs) :-
clpbn_flag(em_solver, Solver),
clpbn_run_solver(Solver, Margs, LPs, SolverState).
maximise(state(_,DistInstances,MargVars,_), Tables, LPs, Likelihood) :-
rb_new(MDistTable0),
create_mdist_table(MargVars, LPs, MDistTable0, MDistTable),
compute_parameters(DistInstances, Tables, MDistTable, 0.0, Likelihood, LPs:MargVars).
create_mdist_table([],[],MDistTable,MDistTable).
create_mdist_table([Vs|MargVars],[Ps|LPs],MDistTable0,MDistTable) :-
rb_insert(MDistTable0, Vs, Ps, MDistTableI),
create_mdist_table(MargVars, LPs, MDistTableI ,MDistTable).
compute_parameters([], [], _, Lik, Lik, _).
compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, Lik, LPs:MargVars) :-
empty_dist(Id, Table0),
add_samples(Samples, Table0, MDistTable),
soften_sample(Table0, SoftenedTable),
matrix:matrix_sum(Table0,TotM),
normalise_counts(SoftenedTable, NewTable),
compute_likelihood(Table0, NewTable, DeltaLik),
dist_new_table(Id, NewTable),
NewLik is Lik0+DeltaLik,
compute_parameters(Dists, Tables, MDistTable, NewLik, Lik, LPs:MargVars).
add_samples([], _, _).
add_samples([i(_,_,[Case],[])|Samples], Table, MDistTable) :- !,
matrix_add(Table,Case,1.0),
add_samples(Samples, Table, MDistTable).
add_samples([i(_,_,Cases,Hiddens)|Samples], Table, MDistTable) :-
rb_lookup(Hiddens, Ps, MDistTable),
run_sample(Cases, Ps, Table),
add_samples(Samples, Table, MDistTable).
run_sample([], [], _).
run_sample([C|Cases], [P|Ps], Table) :-
matrix_add(Table, C, P),
run_sample(Cases, Ps, Table).

View File

@ -1,46 +0,0 @@
% learn distribution for school database.
% we do not consider the aggregates yet.
:- [pos:train].
:- ['~/Yap/work/CLPBN/clpbn/examples/School/school_32'].
:- ['~/Yap/work/CLPBN/learning/em'].
main :-
findall(X,goal(X),L),
em(L,0.01,10,CPTs,Lik),
writeln(Lik:CPTs).
%
% change to 0.05, 0.1, 0.2 to make things simpler/harder
%
missing(0.3).
% miss 30% of the examples.
goal(professor_ability(P,V)) :-
pos:professor_ability(P,V1),
missing(X),
( random > X -> V = V1 ; true).
% miss 10% of the examples.
goal(professor_popularity(P,V)) :-
pos:professor_popularity(P,V1),
missing(X),
( random > X -> V = V1 ; true).
goal(registration_grade(P,V)) :-
pos:registration_grade(P,V1),
missing(X),
( random > X -> V = V1 ; true).
goal(student_intelligence(P,V)) :-
pos:student_intelligence(P,V1),
missing(X),
( random > X -> V = V1 ; true).
goal(course_difficulty(P,V)) :-
pos:course_difficulty(P,V1),
missing(X),
( random > X -> V = V1 ; true).
goal(registration_satisfaction(P,V)) :-
pos:registration_satisfaction(P,V1),
missing(X),
( random > X -> V = V1 ; true).

File diff suppressed because it is too large Load Diff

View File

@ -1,100 +0,0 @@
%
% Utilities for learning
%
:- module(clpbn_learn_utils, [run_all/1,
clpbn_vars/2,
normalise_counts/2,
compute_likelihood/3,
soften_sample/2,
soften_sample/3]).
:- use_module(library(clpbn),
[clpbn_flag/2]).
:- use_module(library('clpbn/table'),
[clpbn_reset_tables/0]).
:- use_module(library(matrix),
[matrix_agg_lines/3,
matrix_op_to_lines/4,
matrix_agg_cols/3,
matrix_op_to_cols/4,
matrix_to_logs/2,
matrix_op/4,
matrix_sum/2,
matrix_to_list/2,
matrix_op_to_all/4]).
:- meta_predicate run_all(:).
run_all([]).
run_all([G|Gs]) :-
call(G),
run_all(Gs).
run_all(M:Gs) :-
clpbn_reset_tables,
run_all(Gs,M).
run_all([],_).
run_all([G|Gs],M) :-
( call(M:G) -> true ; writeln(bad:M:G), break),
run_all(Gs,M).
clpbn_vars(Vs,BVars) :-
get_clpbn_vars(Vs,CVs),
keysort(CVs,KVs),
merge_vars(KVs,BVars).
get_clpbn_vars([],[]).
get_clpbn_vars([V|GVars],[K-V|CLPBNGVars]) :-
clpbn:get_atts(V, [key(K)]), !,
get_clpbn_vars(GVars,CLPBNGVars).
get_clpbn_vars([_|GVars],CLPBNGVars) :-
get_clpbn_vars(GVars,CLPBNGVars).
merge_vars([],[]).
merge_vars([K-V|KVs],[V|BVars]) :-
get_var_has_same_key(KVs,K,V,KVs0),
merge_vars(KVs0,BVars).
get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !,
get_var_has_same_key(KVs,K,V,KVs0).
get_var_has_same_key(KVs,_,_,KVs).
soften_sample(T0,T) :-
clpbn_flag(parameter_softening, Soften),
soften_sample(Soften, T0, T).
soften_sample(no,T,T).
soften_sample(m_estimate(M), T0, T) :-
matrix_agg_cols(T0,+,Cols),
matrix_op_to_all(Cols, *, M, R),
matrix_op_to_cols(T0,R,+,T).
soften_sample(auto_m, T0,T) :-
matrix_agg_cols(T0,+,Cols),
matrix_sum(Cols,TotM),
M is sqrt(TotM),
matrix_op_to_all(Cols, *, M, R),
matrix_op_to_cols(T0,R,+,T).
soften_sample(laplace,T0,T) :-
matrix_op_to_all(T0, +, 1, T).
normalise_counts(MAT,NMAT) :-
matrix_agg_lines(MAT, +, Sum),
matrix_op_to_lines(MAT, Sum, /, NMAT).
compute_likelihood(Table0, NewTable, DeltaLik) :-
matrix_to_logs(NewTable, Logs),
matrix_to_list(Table0,L1),
matrix_to_list(Logs,L2),
sum_prods(L1,L2,0,DeltaLik).
sum_prods([],[],DeltaLik,DeltaLik).
sum_prods([0.0|L1],[_|L2],DeltaLik0,DeltaLik) :- !,
sum_prods(L1,L2,DeltaLik0,DeltaLik).
sum_prods([Count|L1],[Log|L2],DeltaLik0,DeltaLik) :- !,
DeltaLik1 is DeltaLik0+Count*Log,
sum_prods(L1,L2,DeltaLik1,DeltaLik).

View File

@ -1,113 +0,0 @@
%
% Maximum likelihood estimator and friends.
%
%
% This assumes we have a single big example.
%
:- module(clpbn_mle, [learn_parameters/2,
learn_parameters/3,
parameters_from_evidence/3]).
:- use_module(library('clpbn')).
:- use_module(library('clpbn/learning/learn_utils'),
[run_all/1,
clpbn_vars/2,
normalise_counts/2,
soften_table/2,
normalise_counts/2]).
:- use_module(library('clpbn/dists'),
[empty_dist/2,
dist_new_table/2]).
:- use_module(library(matrix),
[matrix_inc/2]).
learn_parameters(Items, Tables) :-
learn_parameters(Items, Tables, []).
%
% full evidence learning
%
learn_parameters(Items, Tables, Extras) :-
run_all(Items),
attributes:all_attvars(AVars),
% sort and incorporate evidence
clpbn_vars(AVars, AllVars),
mk_sample(AllVars, Sample),
compute_tables(Extras, Sample, Tables).
parameters_from_evidence(AllVars, Sample, Extras) :-
mk_sample_from_evidence(AllVars, Sample),
compute_tables(Extras, Sample, Tables).
mk_sample_from_evidence(AllVars, SortedSample) :-
add_evidence2sample(AllVars, Sample),
msort(Sample, SortedSample).
mk_sample(AllVars, SortedSample) :-
add2sample(AllVars, Sample),
msort(Sample, SortedSample).
%
% assumes we have full data, meaning evidence for every variable
%
add2sample([], []).
add2sample([V|Vs],[val(Id,[Ev|EParents])|Vals]) :-
clpbn:get_atts(V, [evidence(Ev),dist(Id,Parents)]),
get_eparents(Parents, EParents),
add2sample(Vs, Vals).
get_eparents([P|Parents], [E|EParents]) :-
clpbn:get_atts(P, [evidence(E)]),
get_eparents(Parents, EParents).
get_eparents([], []).
%
% assumes we ignore variables without evidence or without evidence
% on a parent!
%
add_evidence2sample([], []).
add_evidence2sample([V|Vs],[val(Id,[Ev|EParents])|Vals]) :-
clpbn:get_atts(V, [evidence(Ev),dist(Id,Parents)]),
get_eveparents(Parents, EParents), !,
add_evidence2sample(Vs, Vals).
add_evidence2sample([_|Vs],Vals) :-
add_evidence2sample(Vs, Vals).
get_eveparents([P|Parents], [E|EParents]) :-
clpbn:get_atts(P, [evidence(E)]),
get_eparents(Parents, EParents).
get_eveparents([], []).
compute_tables(Parameters, Sample, NewTables) :-
estimator(Sample, Tables),
add_priors(Parameters, Tables, NewTables).
estimator([], []).
estimator([val(Id,Sample)|Samples], [NewDist|Tables]) :-
empty_dist(Id, NewTable),
id_samples(Id, Samples, IdSamples, MoreSamples),
mle([Sample|IdSamples], NewTable),
soften_table(NewTable, SoftenedTable),
normalise_counts(SoftenedTable, NewDist),
% replace matrix in distribution
dist_new_table(Id, NewDist),
estimator(MoreSamples, Tables).
id_samples(_, [], [], []).
id_samples(Id, [val(Id,Sample)|Samples], [Sample|IdSamples], MoreSamples) :- !,
id_samples(Id, Samples, IdSamples, MoreSamples).
id_samples(_, Samples, [], Samples).
mle([Sample|IdSamples], Table) :-
matrix_inc(Table, Sample),
mle(IdSamples, Table).
mle([], _).

View File

@ -179,6 +179,7 @@
AtomOutOfStackError = Yap_LookupAtom("out_of_stack_error");
AtomOutOfTrailError = Yap_LookupAtom("out_of_trail_error");
AtomOutput = Yap_LookupAtom("output");
AtomPrologCommonsDir = Yap_LookupAtom("prolog_commons_directory");
AtomPastEndOfStream = Yap_LookupAtom("past_end_of_stream");
AtomPermissionError = Yap_LookupAtom("permission_error");
AtomPi = Yap_LookupAtom("pi");

View File

@ -181,6 +181,7 @@
AtomOutOfStackError = AtomAdjust(AtomOutOfStackError);
AtomOutOfTrailError = AtomAdjust(AtomOutOfTrailError);
AtomOutput = AtomAdjust(AtomOutput);
AtomPrologCommonsDir = AtomAdjust(AtomPrologCommonsDir);
AtomPastEndOfStream = AtomAdjust(AtomPastEndOfStream);
AtomPermissionError = AtomAdjust(AtomPermissionError);
AtomPi = AtomAdjust(AtomPi);

View File

@ -364,6 +364,8 @@
#define AtomOutOfTrailError Yap_heap_regs->AtomOutOfTrailError_
Atom AtomOutput_;
#define AtomOutput Yap_heap_regs->AtomOutput_
Atom AtomPrologCommonsDir_;
#define AtomPrologCommonsDir Yap_heap_regs->AtomPrologCommonsDir_
Atom AtomPastEndOfStream_;
#define AtomPastEndOfStream Yap_heap_regs->AtomPastEndOfStream_
Atom AtomPermissionError_;

View File

@ -85,7 +85,7 @@ typedef struct canonical_dir * CanonicalDir; /* pl-os.c */
typedef struct on_halt * OnHalt; /* pl-os.c */
/* The GD global variable */
struct {
extern struct {
int io_initialised;
cleanup_status cleaning; /* Inside PL_cleanup() */
@ -237,7 +237,7 @@ typedef struct PL_local_data {
#define features (LD->feature.mask)
PL_local_data_t lds;
extern PL_local_data_t lds;
#define exception_term (LD->exception.term)
@ -265,20 +265,20 @@ PL_local_data_t lds;
#define TRY(goal) if ((goal) == FALSE) fail
atom_t source_file_name; /** source name of the current file that we are
extern atom_t source_file_name; /** source name of the current file that we are
consulting */
int source_line_no; /** guess.... */
extern int source_line_no; /** guess.... */
IOSTREAM * Suser_input;
IOSTREAM * Suser_output;
IOSTREAM * Suser_error;
IOSTREAM * Scurin; /* see/tell */
IOSTREAM * Scurout;
IOSTREAM * Sprotocol; /* protocolling */
extern IOSTREAM * Suser_input;
extern IOSTREAM * Suser_output;
extern IOSTREAM * Suser_error;
extern IOSTREAM * Scurin; /* see/tell */
extern IOSTREAM * Scurout;
extern IOSTREAM * Sprotocol; /* protocolling */
int fileerrors;
extern int fileerrors;
int ttymode;
extern int ttymode;
#define CHARESCAPE_FEATURE 0x00001 /* handle \ in atoms */
#define GC_FEATURE 0x00002 /* do GC */

View File

@ -2882,7 +2882,7 @@ findExecutable(const char *av0, char *buffer)
#endif /*__WINDOWS__*/
#ifdef __unix__
#if defined(__unix__) || defined(__APPLE__)
static char *
okToExec(const char *s)
{ struct stat stbuff;

View File

@ -249,6 +249,7 @@ extern bool SameFile(const char *f1, const char *f2);
extern bool RenameFile(const char *old, const char *new);
extern bool ChDir(const char *path);
extern bool MarkExecutable(const char *name);
extern char *PrologPath(const char *ospath, char *path, size_t len);

@ -1 +0,0 @@
Subproject commit 6f4f1a9f182a6ea62de4683ea9f5923ccf20257f

View File

@ -1,159 +0,0 @@
#ifndef __MYDDAS_H__
#define __MYDDAS_H__
#include "config.h"
#include <stdio.h>
#ifdef MYDDAS_ODBC
#include <sql.h>
#endif
#ifdef MYDDAS_MYSQL
#include <mysql/mysql.h>
#endif
/* MYDDAS TYPES */
/* sizeof(MyddasPointer) Equal to the size of a integer on the given architecture */
/* sizeof(MyddasInt32) = 4 (Always) */
/* sizeof(MyddasUInt32) = 4 (Always) */
#if SIZEOF_INT_P==4
# if SIZEOF_INT==4
/* */ typedef int MyddasInt;
/* */ typedef unsigned int MyddasUInt;
/* */ typedef unsigned int MyddasPointer;
/* */ typedef int MyddasInt32;
/* */ typedef unsigned int MyddasUInt32;
# elif SIZEOF_LONG_INT==4
/* */ typedef long int MyddasInt;
/* */ typedef unsigned long int MyddasUInt;
/* */ typedef unsigned long int MyddasPointer;
/* */ typedef long int MyddasInt32;
/* */ typedef unsigned long int MyddasUInt32;
# else
# error MYDDAS require integer types of the same size as a pointer
# endif
# if SIZEOF_SHORT_INT==2
/* */ typedef short int MyddasSInt;
/* */ typedef unsigned short int MyddasUSInt;
# else
# error MYDDAS requires integer types half the size of a pointer
# endif
# if SIZEOF_LONG_INT==8
/* */ typedef long int MyddasLInt;
/* */ typedef unsigned long int MyddasULInt;
# elif SIZEOF_LONG_LONG_INT==8
/* */ typedef long long int MyddasLInt;
/* */ typedef unsigned long long int MyddasULInt;
# else
# error MYDDAS requires integer types double the size of a pointer
# endif
#elif SIZEOF_INT_P==8
# if SIZEOF_INT==8
/* */ typedef int MyddasInt;
/* */ typedef unsigned int MyddasUInt;
/* */ typedef int MyddasLInt;
/* */ typedef unsigned int MyddasULInt;
/* */ typedef unsigned int MyddasPointer;
# elif SIZEOF_LONG_INT==8
/* */ typedef long int MyddasInt;
/* */ typedef unsigned long int MyddasUInt;
/* */ typedef int MyddasLInt;
/* */ typedef unsigned int MyddasULInt;
/* */ typedef unsigned long int MyddasPointer;
# elif SIZEOF_LONG_LONG_INT==8
/* */ typedef long long int MyddasInt;
/* */ typedef unsigned long long int MyddasUInt;
/* */ typedef int MyddasLInt;
/* */ typedef unsigned int MyddasULInt;
/* */ typedef unsigned long long int MyddasPointer;
# else
# error MYDDAS requires integer types of the same size as a pointer
# endif
# if SIZEOF_SHORT_INT==4
/* */ typedef short int MyddasSInt;
/* */ typedef unsigned short int MyddasUSInt;
/* */ typedef short int MyddasInt32;
/* */ typedef unsigned short int MyddasUInt32;
# elif SIZEOF_INT==4
/* */ typedef int MyddasSInt;
/* */ typedef unsigned int MyddasUSInt;
/* */ typedef int MyddasInt32;
/* */ typedef unsigned int MyddasUInt32;
# else
# error MYDDAS requires integer types half the size of a pointer
# endif
#else
# error MYDDAS requires pointers of size 4 or 8
#endif
/* Passar para o myddas_statictics.h ???????? */
#ifdef MYDDAS_STATS
#include <time.h>
#include <sys/time.h>
#endif
typedef struct myddas_global *MYDDAS_GLOBAL;
typedef struct myddas_util_query *MYDDAS_UTIL_QUERY;
typedef struct myddas_list_connection *MYDDAS_UTIL_CONNECTION;
typedef struct myddas_list_preds *MYDDAS_UTIL_PREDICATE;
#ifdef MYDDAS_STATS
typedef struct myddas_stats_time_struct *MYDDAS_STATS_TIME;
typedef struct myddas_global_stats *MYDDAS_GLOBAL_STATS;
typedef struct myddas_stats_struct *MYDDAS_STATS_STRUCT;
#endif
#ifdef DEBUG
#define MYDDAS_MALLOC(POINTER,TYPE) \
{ \
POINTER = (TYPE *) malloc(sizeof(TYPE)); \
Yap_REGS.MYDDAS_GLOBAL_POINTER->memory_allocated+=sizeof(TYPE); \
/*printf ("MALLOC %p %s %d\n",POINTER,__FILE__,__LINE__);*/ \
Yap_REGS.MYDDAS_GLOBAL_POINTER->malloc_called++; \
}
#else
#define MYDDAS_MALLOC(POINTER,TYPE) \
{ \
POINTER = (TYPE *) malloc(sizeof(TYPE)); \
}
#endif
#ifdef DEBUG
#define MYDDAS_FREE(POINTER,TYPE) \
{ \
Yap_REGS.MYDDAS_GLOBAL_POINTER->memory_freed+=sizeof(TYPE); \
Yap_REGS.MYDDAS_GLOBAL_POINTER->free_called++; \
/*printf ("FREE %p %s %d\n",POINTER,__FILE__,__LINE__);*/ \
free(POINTER); \
}
#else
#define MYDDAS_FREE(POINTER,TYPE) \
{ \
free(POINTER); \
}
#endif
#ifdef DEBUG
#define MYDDAS_MEMORY_MALLOC_NR(NUMBER) \
NUMBER = Yap_REGS.MYDDAS_GLOBAL_POINTER->malloc_called;
#define MYDDAS_MEMORY_MALLOC_SIZE(NUMBER) \
NUMBER = Yap_REGS.MYDDAS_GLOBAL_POINTER->memory_allocated;
#define MYDDAS_MEMORY_FREE_NR(NUMBER) \
NUMBER = Yap_REGS.MYDDAS_GLOBAL_POINTER->free_called;
#define MYDDAS_MEMORY_FREE_SIZE(NUMBER) \
NUMBER = Yap_REGS.MYDDAS_GLOBAL_POINTER->memory_freed;
#endif
#endif /*__MYDDAS_H__*/

View File

@ -1,112 +0,0 @@
#if defined MYDDAS_ODBC || defined MYDDAS_MYSQL
#include "Yap.h"
#include <stdio.h>
#include <stdlib.h>
#include "myddas.h"
#include "myddas_structs.h"
#ifdef MYDDAS_STATS
#include "myddas_statistics.h"
#endif
MYDDAS_GLOBAL
myddas_init_initialize_myddas(void){
MYDDAS_GLOBAL global = NULL;
/* We cannot call MYDDAS_MALLOC were because the global
register isn't yet initialized */
global = (MYDDAS_GLOBAL) malloc (sizeof(struct myddas_global));
#ifdef DEBUG
printf ("MALLOC %p %s %d\n",global,__FILE__,__LINE__);
#endif
global->myddas_top_connections = NULL;
#ifdef MYDDAS_TOP_LEVEL
global->myddas_top_level_connection = NULL;
#endif
#ifdef MYDDAS_STATS
global->myddas_statistics = (MYDDAS_GLOBAL_STATS) malloc (sizeof(struct myddas_global_stats));
#ifdef DEBUG
printf ("MALLOC %p %s %d\n",global->myddas_statistics,__FILE__,__LINE__);
#endif
global->myddas_statistics->stats = NULL;
#endif
#ifdef DEBUG
/* We first malloc for this struct and the stats struct */
#ifdef MYDDAS_STATS
global->malloc_called = 2;
global->memory_allocated = sizeof(struct myddas_global) + sizeof(struct myddas_global_stats);
#else
global->malloc_called = 1;
global->memory_allocated = sizeof(struct myddas_global);
#endif /* MYDDAS_STATS */
global->free_called = 0;
global->memory_freed = 0;
#endif
return global;
}
/* Inserts the new node on the front of the list */
MYDDAS_UTIL_CONNECTION
myddas_init_initialize_connection(void *conn,void *enviromment,
MYDDAS_UTIL_CONNECTION next){
MYDDAS_UTIL_CONNECTION new = NULL;
MYDDAS_MALLOC(new,struct myddas_list_connection);
if (new == NULL)
{
return NULL;
}
new->predicates=NULL;
new->connection=conn;
new->odbc_enviromment=enviromment;
/* It saves n queries, doing at once n+1 queries */
new->total_number_queries=0; //Default
new->actual_number_queries=0;
new->queries = NULL;
/* List integrity */
new->next=next;
new->previous=NULL;
/* If there's already at least one node
on the list */
if (next != NULL)
next->previous=new;
#ifdef MYDDAS_STATS
new->stats = NULL;
new->stats = myddas_stats_initialize_connection_stats();
#endif
return new;
}
MYDDAS_UTIL_PREDICATE
myddas_init_initialize_predicate(char *pred_name, int pred_arity,
char *pred_module, MYDDAS_UTIL_PREDICATE next){
MYDDAS_UTIL_PREDICATE new = NULL;
MYDDAS_MALLOC(new,struct myddas_list_preds);
if (new == NULL)
{
return NULL;
}
new->pred_name=pred_name;
new->pred_arity=pred_arity;
new->pred_module=pred_module;
/* List integrity */
new->next=next;
new->previous=NULL;
/* If there's already at least one node
on the list */
if (next != NULL)
next->previous=new;
return new;
}
#endif

View File

@ -1,725 +0,0 @@
/*************************************************************************
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: myddas_mysql.c *
* Last rev: 22/03/05 *
* mods: *
* comments: Predicates for comunicating with a mysql database system *
* *
*************************************************************************/
#if defined MYDDAS_MYSQL
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <mysql/mysql.h>
#include "Yap.h"
#include "Yatom.h"
#include "cut_c.h"
#include "myddas.h"
#ifdef MYDDAS_STATS
#include "myddas_structs.h"
#include "myddas_statistics.h"
#endif
#include "myddas_wkb2prolog.h"
#define IS_SQL_INT(FIELD) FIELD == FIELD_TYPE_INT24 || \
FIELD == FIELD_TYPE_LONG || \
FIELD == FIELD_TYPE_LONGLONG || \
FIELD == FIELD_TYPE_SHORT || \
FIELD == FIELD_TYPE_TINY
#define IS_SQL_FLOAT(FIELD) FIELD == FIELD_TYPE_DECIMAL || \
FIELD == FIELD_TYPE_DOUBLE || \
FIELD == FIELD_TYPE_FLOAT
#define IS_SQL_GEOMETRY(FIELD) FIELD == FIELD_TYPE_GEOMETRY
static Int null_id = 0;
STATIC_PROTO(Int c_db_my_connect,(void));
STATIC_PROTO(Int c_db_my_disconnect,(void));
STATIC_PROTO(Int c_db_my_number_of_fields,(void));
STATIC_PROTO(Int c_db_my_get_attributes_types,(void));
STATIC_PROTO(Int c_db_my_query,(void));
STATIC_PROTO(Int c_db_my_table_write,(void));
STATIC_PROTO(Int c_db_my_row,(void));
STATIC_PROTO(Int c_db_my_row_cut,(void));
STATIC_PROTO(Int c_db_my_get_fields_properties,(void));
STATIC_PROTO(Int c_db_my_get_next_result_set,(void));
STATIC_PROTO(Int c_db_my_get_database,(void));
STATIC_PROTO(Int c_db_my_change_database,(void));
void Yap_InitMYDDAS_MySQLPreds(void)
{
/* db_connect: Host x User x Passwd x Database x Connection x ERROR_CODE */
Yap_InitCPred("c_db_my_connect", 7, c_db_my_connect, 0);
/* db_number_of_fields: Relation x Connection x NumberOfFields */
Yap_InitCPred("c_db_my_number_of_fields",3, c_db_my_number_of_fields, 0);
/* db_get_attributes_types: Relation x TypesList */
Yap_InitCPred("c_db_my_get_attributes_types", 3, c_db_my_get_attributes_types, 0);
/* db_query: SQLQuery x ResultSet x Connection */
Yap_InitCPred("c_db_my_query", 5, c_db_my_query, 0);
/* db_disconnect: Connection */
Yap_InitCPred("c_db_my_disconnect", 1,c_db_my_disconnect, 0);
/* db_table_write: Result Set */
Yap_InitCPred("c_db_my_table_write", 1, c_db_my_table_write, 0);
/* db_get_fields_properties: PredName x Connnection x PropertiesList*/
Yap_InitCPred("c_db_my_get_fields_properties",3,c_db_my_get_fields_properties,0);
Yap_InitCPred("c_db_my_get_next_result_set",2,c_db_my_get_next_result_set,0);
/* c_db_my_get_database: Connnection x DataBaseName */
Yap_InitCPred("c_db_my_get_database",2,c_db_my_get_database,0);
/* c_db_my_change_database: Connnection x DataBaseName */
Yap_InitCPred("c_db_my_change_database",2,c_db_my_change_database,0);
}
void Yap_InitBackMYDDAS_MySQLPreds(void)
{
/* db_row: ResultSet x Arity x ListOfArgs */
Yap_InitCPredBackCut("c_db_my_row", 3, sizeof(Int),
c_db_my_row,
c_db_my_row,
c_db_my_row_cut, 0);
}
static Int
c_db_my_connect(void) {
Term arg_host = Deref(ARG1);
Term arg_user = Deref(ARG2);
Term arg_passwd = Deref(ARG3);
Term arg_database = Deref(ARG4);
Term arg_port = Deref(ARG5);
Term arg_socket = Deref(ARG6);
Term arg_conn = Deref(ARG7);
MYSQL *conn;
MYDDAS_UTIL_CONNECTION new = NULL;
char *host = AtomName(AtomOfTerm(arg_host));
char *user = AtomName(AtomOfTerm(arg_user));
char *passwd = AtomName(AtomOfTerm(arg_passwd));
char *database = AtomName(AtomOfTerm(arg_database));
Int port = IntegerOfTerm(arg_port);
char *socket;
if (IsNonVarTerm(arg_socket))
socket = AtomName(AtomOfTerm(arg_socket));
else
socket = NULL;
conn = mysql_init(NULL);
if (conn == NULL) {
#ifdef DEBUG
printf("ERROR: ** c_db_my_connect ** error on init\n");
#endif
return FALSE;
}
if (mysql_real_connect(conn, host, user, passwd, database, port, socket, CLIENT_MULTI_STATEMENTS) == NULL) {
#ifdef DEBUG
printf("ERROR: ** c_db_my_connect ** error on connect\n");
#endif
return FALSE;
}
if (!Yap_unify(arg_conn, MkIntegerTerm((Int)conn)))
return FALSE;
else
{
/* Criar um novo no na lista de ligacoes*/
new = myddas_util_add_connection(conn,NULL);
if (new == NULL){
#ifdef DEBUG
printf("ERROR: ** c_db_my_connect ** Error allocating memory\n");
#endif
return FALSE;
}
return TRUE;
}
}
/* db_query: SQLQuery x ResultSet x Connection */
static Int
c_db_my_query(void) {
Term arg_sql_query = Deref(ARG1);
Term arg_result_set = Deref(ARG2);
Term arg_conn = Deref(ARG3);
Term arg_mode = Deref(ARG4);
Term arg_arity = Deref(ARG5);
char *sql = AtomName(AtomOfTerm(arg_sql_query));
char *mode = AtomName(AtomOfTerm(arg_mode));
MYSQL *conn = (MYSQL *) (IntegerOfTerm(arg_conn));
MYSQL_RES *res_set;
MyddasInt length=strlen(sql);
#ifdef MYDDAS_STATS
MYDDAS_UTIL_CONNECTION node = myddas_util_search_connection(conn);
MyddasULInt count = 0;
/* Count the number of querys made to the server */
MyddasULInt number_querys;
MYDDAS_STATS_CON_GET_NUMBER_QUERIES_MADE(node,number_querys);
MYDDAS_STATS_CON_SET_NUMBER_QUERIES_MADE(node,++number_querys);
MYDDAS_STATS_CON_GET_NUMBER_QUERIES_MADE_COUNT(node,count);
MYDDAS_STATS_CON_SET_NUMBER_QUERIES_MADE_COUNT(node,++count);
/* Measure time spent by the MySQL Server
processing the SQL Query */
MYDDAS_STATS_TIME start,end,total_time,diff;
start = myddas_stats_walltime();
#endif
/* Send query to server and process it */
if (mysql_real_query(conn, sql, length) != 0)
{
#ifdef DEBUG
printf("ERROR: **c_db_my_query** Error on query! %s\n",sql);
#endif
return FALSE;
}
#ifdef MYDDAS_STATS
/* Measure time spent by the MySQL Server
processing the SQL Query */
end = myddas_stats_walltime();
MYDDAS_STATS_INITIALIZE_TIME_STRUCT(diff,time_copy);
myddas_stats_subtract_time(diff,end,start);
diff = myddas_stats_time_copy_to_final(diff);
MYDDAS_FREE(end,struct myddas_stats_time_struct);
MYDDAS_FREE(start,struct myddas_stats_time_struct);
MYDDAS_STATS_CON_GET_TOTAL_TIME_DBSERVER(node,total_time);
/* Automacally updates the MYDDAS_STRUCTURE */
myddas_stats_add_time(total_time,diff,total_time);
MYDDAS_STATS_CON_GET_TOTAL_TIME_DBSERVER_COUNT(node,count);
MYDDAS_STATS_CON_SET_TOTAL_TIME_DBSERVER_COUNT(node,++count);
MYDDAS_STATS_TIME time = NULL;
MYDDAS_STATS_CON_GET_LAST_TIME_DBSERVER(node,time);
myddas_stats_move_time(diff,time);
MYDDAS_STATS_CON_GET_LAST_TIME_DBSERVER_COUNT(node,count);
MYDDAS_STATS_CON_SET_LAST_TIME_DBSERVER_COUNT(node,++count);
#endif
/* guardar os tuplos do lado do cliente */
if (strcmp(mode,"store_result")!=0) //True
res_set = mysql_use_result(conn);
else{
#ifdef MYDDAS_STATS
/* Measure time spent by the MySQL Server
transferring the result of the last query
back to the client */
start = myddas_stats_walltime();
#endif
res_set = mysql_store_result(conn);
#ifdef MYDDAS_STATS
/* Measure time spent by the MySQL Server
transferring the result of the last query
back to the client */
end = myddas_stats_walltime();
MYDDAS_STATS_INITIALIZE_TIME_STRUCT(diff,time_copy);
myddas_stats_subtract_time(diff,end,start);
diff = myddas_stats_time_copy_to_final(diff);
MYDDAS_FREE(end,struct myddas_stats_time_struct);
MYDDAS_FREE(start,struct myddas_stats_time_struct);
MYDDAS_STATS_CON_GET_TOTAL_TIME_TRANSFERING(node,total_time);
/* Automacally updates the MYDDAS_STRUCTURE */
myddas_stats_add_time(total_time,diff,total_time);
MYDDAS_STATS_CON_GET_TOTAL_TIME_TRANSFERING_COUNT(node,count);
MYDDAS_STATS_CON_SET_TOTAL_TIME_TRANSFERING_COUNT(node,++count);
time = NULL;
MYDDAS_STATS_CON_GET_LAST_TIME_TRANSFERING(node,time);
MYDDAS_STATS_CON_GET_LAST_TIME_TRANSFERING_COUNT(node,count);
MYDDAS_STATS_CON_SET_LAST_TIME_TRANSFERING_COUNT(node,++count);
myddas_stats_move_time(diff,time);
/* Measure the number of Rows returned from the server */
if (res_set != NULL)
{
/* With an INSERT statement, mysql_(use or store)_result()
returns a NULL pointer*/
/* This is only works if we use mysql_store_result */
MyddasUInt numberRows = mysql_num_rows(res_set);
MyddasUInt rows;
MYDDAS_STATS_CON_GET_TOTAL_ROWS(node,rows);
numberRows = numberRows + rows;
MYDDAS_STATS_CON_SET_TOTAL_ROWS(node,numberRows);
MYDDAS_STATS_CON_GET_TOTAL_ROWS_COUNT(node,count);
MYDDAS_STATS_CON_SET_TOTAL_ROWS_COUNT(node,++count);
/* Calculate the ammount of data sent by the server */
MyddasUInt total,number_fields = mysql_num_fields(res_set);
MYSQL_ROW row;
MyddasULInt i;
total=0;
while ((row = mysql_fetch_row(res_set)) != NULL){
mysql_field_seek(res_set,0);
for(i=0;i<number_fields;i++){
if (row[i] != NULL)
total = total + strlen(row[i]);
}
}
MYDDAS_STATS_CON_SET_LAST_BYTES_TRANSFERING_FROM_DBSERVER(node,total);
MYDDAS_STATS_CON_GET_LAST_BYTES_TRANSFERING_FROM_DBSERVER_COUNT(node,count);
MYDDAS_STATS_CON_SET_LAST_BYTES_TRANSFERING_FROM_DBSERVER_COUNT(node,++count);
MyddasUInt bytes = 0;
MYDDAS_STATS_CON_GET_TOTAL_BYTES_TRANSFERING_FROM_DBSERVER(node,bytes);
total = total + bytes;
MYDDAS_STATS_CON_SET_TOTAL_BYTES_TRANSFERING_FROM_DBSERVER(node,total);
MYDDAS_STATS_CON_GET_TOTAL_BYTES_TRANSFERING_FROM_DBSERVER_COUNT(node,count);
MYDDAS_STATS_CON_SET_TOTAL_BYTES_TRANSFERING_FROM_DBSERVER_COUNT(node,++count);
mysql_data_seek(res_set,0);
}
#endif
}
if (res_set == NULL)
{
//INSERT statements don't return any res_set
if (mysql_field_count(conn) == 0)
return TRUE;
#ifdef DEBUG
printf("Empty Query!\n");
#endif
return FALSE;
}
if (!Yap_unify(arg_arity, MkIntegerTerm(mysql_num_fields(res_set)))){
return FALSE;
}
if (!Yap_unify(arg_result_set, MkIntegerTerm((Int) res_set)))
{
mysql_free_result(res_set);
return FALSE;
}
else
{
return TRUE;
}
}
static Int
c_db_my_number_of_fields(void) {
Term arg_relation = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term arg_fields = Deref(ARG3);
char *relation = AtomName(AtomOfTerm(arg_relation));
MYSQL *conn = (MYSQL *) (IntegerOfTerm(arg_conn));
char sql[256];
MYSQL_RES *res_set;
sprintf(sql,"DESCRIBE `%s`",relation);
/* executar a query SQL */
if (mysql_query(conn, sql) != 0)
{
#ifdef DEBUG
printf("ERROR: **c_db_my_number_of_fields** Error on the query! %s\n",sql);
#endif
return FALSE;
}
/* guardar os tuplos do lado do cliente */
if ((res_set = mysql_store_result(conn)) == NULL)
{
#ifdef DEBUG
printf("ERROR: **c_db_my_number_of_fields** Error storing the query! %s\n",sql);
#endif
return FALSE;
}
if (!Yap_unify(arg_fields, MkIntegerTerm(mysql_num_rows(res_set)))){
mysql_free_result(res_set);
return FALSE;
}
mysql_free_result(res_set);
return TRUE;
}
/* db_get_attributes_types: RelName x Connection -> TypesList */
static Int
c_db_my_get_attributes_types(void) {
Term arg_relation = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term arg_types_list = Deref(ARG3);
char *relation = AtomName(AtomOfTerm(arg_relation));
MYSQL *conn = (MYSQL *) IntegerOfTerm(arg_conn);
char sql[256];
MYSQL_RES *res_set;
MYSQL_ROW row;
Term head, list;
sprintf(sql,"DESCRIBE `%s`",relation);
Int length = strlen(sql);
/* executar a query SQL */
if (mysql_real_query(conn, sql, length) != 0)
{
#ifdef DEBUG
printf("Erro na query! %s\n",sql);
#endif
return FALSE;
}
/* guardar os tuplos do lado do cliente */
if ((res_set = mysql_store_result(conn)) == NULL)
{
#ifdef DEBUG
printf("Query vazia!\n");
#endif
return FALSE;
}
list = arg_types_list;
while ((row = mysql_fetch_row(res_set)) != NULL)
{
head = HeadOfTerm(list);
Yap_unify(head, MkAtomTerm(Yap_LookupAtom(row[0])));
list = TailOfTerm(list);
head = HeadOfTerm(list);
list = TailOfTerm(list);
if (strncmp(row[1], "smallint",8) == 0 || strncmp(row[1],"int",3) == 0 ||
strncmp(row[1], "mediumint",9) == 0 || strncmp(row[1], "tinyint",7) == 0 ||
strncmp(row[1], "bigint",6) == 0 || strcmp(row[1], "year") == 0)
Yap_unify(head, MkAtomTerm(Yap_LookupAtom("integer")));
else if (strcmp(row[1], "float") == 0 || strncmp(row[1], "double",6) == 0
|| strcmp(row[1], "real") == 0)
Yap_unify(head, MkAtomTerm(Yap_LookupAtom("real")));
else Yap_unify(head, MkAtomTerm(Yap_LookupAtom("string")));
}
mysql_free_result(res_set);
return TRUE;
}
/* db_disconnect */
static Int
c_db_my_disconnect(void) {
Term arg_conn = Deref(ARG1);
MYSQL *conn = (MYSQL *) IntegerOfTerm(arg_conn);
if ((myddas_util_search_connection(conn)) != NULL)
{
myddas_util_delete_connection(conn);
mysql_close(conn);
return TRUE;
}
else
{
return FALSE;
}
}
/* db_table_write: Result Set */
static Int
c_db_my_table_write(void) {
Term arg_res_set = Deref(ARG1);
MYSQL_RES *res_set = (MYSQL_RES *) IntegerOfTerm(arg_res_set);
myddas_util_table_write(res_set);
mysql_free_result(res_set);
return TRUE;
}
static Int
c_db_my_row_cut(void) {
MYSQL_RES *mysql_res=NULL;
mysql_res = (MYSQL_RES *) IntegerOfTerm(EXTRA_CBACK_CUT_ARG(Term,1));
mysql_free_result(mysql_res);
return TRUE;
}
/* db_row: ResultSet x Arity_ListOfArgs x ListOfArgs -> */
static Int
c_db_my_row(void) {
#ifdef MYDDAS_STATS
/* Measure time used by the */
/* c_db_my_row function */
MYDDAS_STATS_TIME start,end,total_time,diff;
MyddasULInt count = 0;
start = myddas_stats_walltime();
#endif
Term arg_result_set = Deref(ARG1);
Term arg_arity = Deref(ARG2);
Term arg_list_args = Deref(ARG3);
MYSQL_RES *res_set = (MYSQL_RES *) IntegerOfTerm(arg_result_set);
EXTRA_CBACK_ARG(3,1)=(CELL) MkIntegerTerm((Int)res_set);
MYSQL_ROW row;
MYSQL_FIELD *field;
Term head, list, null_atom[1];
Int i, arity;
arity = IntegerOfTerm(arg_arity);
while(TRUE)
{
if ((row = mysql_fetch_row(res_set)) != NULL)
{
mysql_field_seek(res_set,0);
list = arg_list_args;
for (i = 0; i < arity; i++)
{
/* Aqui serão feitas as conversões de tipos de dados */
field = mysql_fetch_field(res_set);
head = HeadOfTerm(list);
list = TailOfTerm(list);
if (row[i] == NULL)
{
null_atom[0] = MkIntegerTerm(null_id++);
if (!Yap_unify(head, Yap_MkApplTerm(Yap_MkFunctor(Yap_LookupAtom("null"),1),1,null_atom)))
continue;
}
else
{
if (IS_SQL_INT(field->type))
{
if (!Yap_unify(head, MkIntegerTerm(atoi(row[i]))))
continue;
}
else if (IS_SQL_FLOAT(field->type))
{
if (!Yap_unify(head, MkFloatTerm(atof(row[i]))))
continue;
}
else if (IS_SQL_GEOMETRY(field->type))
{
if (!Yap_unify(head, wkb2prolog(row[i])))
continue;
}
else
{
if (!Yap_unify(head, MkAtomTerm(Yap_LookupAtom(row[i]))))
continue;
}
}
}
#ifdef MYDDAS_STATS
end = myddas_stats_walltime();
MYDDAS_STATS_INITIALIZE_TIME_STRUCT(diff,time_copy);
myddas_stats_subtract_time(diff,end,start);
diff = myddas_stats_time_copy_to_final(diff);
MYDDAS_FREE(end,struct myddas_stats_time_struct);
MYDDAS_FREE(start,struct myddas_stats_time_struct);
MYDDAS_STATS_GET_DB_ROW_FUNCTION(total_time);
myddas_stats_add_time(total_time,diff,total_time);
MYDDAS_STATS_GET_DB_ROW_FUNCTION_COUNT(count);
MYDDAS_STATS_SET_DB_ROW_FUNCTION_COUNT(++count);
MYDDAS_FREE(diff,struct myddas_stats_time_struct);
#endif /* MYDDAS_STATS */
return TRUE;
}
else
{
mysql_free_result(res_set);
#ifdef MYDDAS_STATS
end = myddas_stats_walltime();
MYDDAS_STATS_INITIALIZE_TIME_STRUCT(diff,time_copy);
myddas_stats_subtract_time(diff,end,start);
diff = myddas_stats_time_copy_to_final(diff);
MYDDAS_FREE(end,struct myddas_stats_time_struct);
MYDDAS_FREE(start,struct myddas_stats_time_struct);
MYDDAS_STATS_GET_DB_ROW_FUNCTION(total_time);
myddas_stats_add_time(total_time,diff,total_time);
MYDDAS_STATS_GET_DB_ROW_FUNCTION_COUNT(count);
MYDDAS_STATS_SET_DB_ROW_FUNCTION_COUNT(++count);
MYDDAS_FREE(diff,struct myddas_stats_time_struct);
#endif /* MYDDAS_STATS */
cut_fail(); /* This macro already does a return FALSE */
}
}
}
static Int
c_db_my_get_fields_properties(void) {
Term nome_relacao = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term fields_properties_list = Deref(ARG3);
Term head, list;
char *relacao = AtomName(AtomOfTerm(nome_relacao));
char sql[256];
Int num_fields,i;
MYSQL_FIELD *fields;
MYSQL_RES *res_set;
MYSQL *conn = (MYSQL *) (IntegerOfTerm(arg_conn));
/* LIMIT 0 -> We only need the meta information about the fields
to know their properties, we don't need the results of the
query*/
sprintf (sql,"SELECT * FROM `%s` LIMIT 0",relacao);
Int length=strlen(sql);
/* executar a query SQL */
if (mysql_real_query(conn, sql, length) != 0)
{
#ifdef DEBUG
printf("Erro na query! %s\n",sql);
#endif
return FALSE;
}
Functor functor = Yap_MkFunctor(Yap_LookupAtom("property"),4);
Term properties[4];
/* guardar os tuplos do lado do cliente */
/* nao precisamos do resultado, mas apenas no res_set */
/* para obter a informação através do mysql_fetch_fields*/
res_set = mysql_store_result(conn);
num_fields = mysql_num_fields(res_set);
fields = mysql_fetch_fields(res_set);
list = fields_properties_list;
for (i=0;i<num_fields;i++)
{
head = HeadOfTerm(list);
properties[0] = MkAtomTerm(Yap_LookupAtom(fields[i].name));
if (fields[i].flags & NOT_NULL_FLAG)
properties[1] = MkIntegerTerm(1); //Can't be NULL
else
properties[1] = MkIntegerTerm(0);
if (fields[i].flags & PRI_KEY_FLAG)
properties[2] = MkIntegerTerm(1); //It''s a primary key
else
properties[2] = MkIntegerTerm(0);
if (fields[i].flags & AUTO_INCREMENT_FLAG)
properties[3] = MkIntegerTerm(1); //It's auto_incremented field
else
properties[3] = MkIntegerTerm(0);
list = TailOfTerm(list);
if (!Yap_unify(head, Yap_MkApplTerm(functor,4,properties))){
return FALSE;
}
}
mysql_free_result(res_set);
return TRUE;
}
/* c_db_my_get_next_result_set: Connection * NextResSet */
static Int
c_db_my_get_next_result_set(void) {
Term arg_conn = Deref(ARG1);
Term arg_next_res_set = Deref(ARG2);
MYSQL *conn = (MYSQL *) (IntegerOfTerm(arg_conn));
MYSQL_RES *res_set=NULL;
if (mysql_next_result(conn) == 0){
res_set = mysql_store_result(conn);
Yap_unify(arg_next_res_set, MkIntegerTerm((Int) res_set));
}
return TRUE;
}
static Int
c_db_my_get_database(void) {
Term arg_con = Deref(ARG1);
Term arg_database = Deref(ARG2);
MYSQL *con = (MYSQL *) (IntegerOfTerm(arg_con));
if (!Yap_unify(arg_database,MkAtomTerm(Yap_LookupAtom(con->db))))
return FALSE;
return TRUE;
}
static Int
c_db_my_change_database(void) {
Term arg_con = Deref(ARG1);
Term arg_database = Deref(ARG2);
MYSQL *con = (MYSQL *) (IntegerOfTerm(arg_con));
char *database = AtomName(AtomOfTerm(arg_database));
if (mysql_select_db(con,database)!=0)
return FALSE;
return TRUE;
}
#endif /* MYDDAS_MYSQL */

View File

@ -1,745 +0,0 @@
/*************************************************************************
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: myddas_odbc.c *
* Last rev: 22/03/05 *
* mods: *
* comments: Predicates for comunicating with ODBC drivers *
* *
*************************************************************************/
#if defined MYDDAS_ODBC && defined CUT_C
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "Yap.h"
#include "Yatom.h"
#include "myddas.h"
#include "cut_c.h"
#include <sql.h>
#include <sqlucode.h>
static Int null_id = 0;
STATIC_PROTO(Int c_db_odbc_connect,(void));
STATIC_PROTO(Int c_db_odbc_disconnect,(void));
STATIC_PROTO(Int c_db_odbc_number_of_fields,(void));
STATIC_PROTO(Int c_db_odbc_get_attributes_types,(void));
STATIC_PROTO(Int c_db_odbc_query,(void));
STATIC_PROTO(Int c_db_odbc_row,(void));
STATIC_PROTO(Int c_db_odbc_row_cut,(void));
STATIC_PROTO(Int c_db_odbc_get_fields_properties,(void));
STATIC_PROTO(Int c_db_odbc_number_of_fields_in_query,(void));
#define SQLALLOCHANDLE(A,B,C,print) \
{ \
SQLRETURN retcode; \
retcode = SQLAllocHandle(A,B,C); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLAllocHandle(ENV) %s\n",print); \
return FALSE; \
} \
}
#define SQLSETENVATTR(A,B,C,D,print) \
{ \
SQLRETURN retcode; \
retcode = SQLSetEnvAttr(A,B,C,D); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLSetEnvAttr %s\n",print); \
return FALSE; \
} \
}
#define SQLCONNECT(A,B,C,D,E,F,G,print) \
{ \
SQLRETURN retcode; \
retcode = SQLConnect(A,B,C,D,E,F,G); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLConnect %s\n",print); \
return FALSE; \
} \
}
#define SQLEXECDIRECT(A,B,C,print) \
{ \
SQLRETURN retcode; \
retcode = SQLExecDirect(A,B,C); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLExecDirect %s \n",print); \
return FALSE; \
} \
}
#define SQLDESCRIBECOL(A,B,C,D,E,F,G,H,I,print) \
{ \
SQLRETURN retcode; \
retcode = SQLDescribeCol(A,B,C,D,E,F,G,H,I); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLDescribeCol %s\n",print); \
return FALSE; \
} \
}
#define SQLSETCONNECTATTR(A,B,C,D,print) \
{ \
SQLRETURN retcode; \
retcode = SQLSetConnectAttr(A,B,C,D); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLSetConnectAttr %s\n",print); \
return FALSE; \
} \
}
#define SQLBINDCOL(A,B,C,D,E,F,print) \
{ \
SQLRETURN retcode; \
retcode = SQLBindCol(A,B,C,D,E,F); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLbindCol %s\n",print); \
return FALSE; \
} \
}
#define SQLFREESTMT(A,B,print) \
{ \
SQLRETURN retcode; \
retcode = SQLFreeStmt(A,B); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLFreeStmt %s\n",print); \
return FALSE; \
} \
}
#define SQLNUMRESULTCOLS(A,B,print) \
{ \
SQLRETURN retcode; \
retcode = SQLNumResultCols(A,B); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLNumResultCols %s\n",print); \
return FALSE; \
} \
}
#define SQLCLOSECURSOR(A,print) \
{ \
SQLRETURN retcode; \
retcode = SQLCloseCursor(A); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLCloseCursor %s\n",print); \
return FALSE; \
} \
}
/* no db_odbc_row não é utilizada esta macro*/
#define SQLFETCH(A,print) \
{ \
SQLRETURN retcode; \
retcode = SQLFetch(A); \
if (retcode == SQL_NO_DATA) \
break; \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLFETCH %s\n",print); \
return FALSE; \
} \
}
#define SQLGETDATA(A,B,C,D,E,F,print) \
{ \
SQLRETURN retcode; \
retcode = SQLGetData(A,B,C,D,E,F); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLgetdata %s\n",print); \
return FALSE; \
} \
}
#define SQLDISCONNECT(A,print) \
{ \
SQLRETURN retcode; \
retcode = SQLDisconnect(A); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLDisconnect %s\n",print); \
return FALSE; \
} \
}
#define SQLFREEHANDLE(A,B,print) \
{ \
SQLRETURN retcode; \
retcode = SQLFreeHandle(A,B); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLFreeHandle %s\n",print); \
return FALSE; \
} \
}
#define SQLPRIMARYKEYS(A,B,C,D,E,F,G,print) \
{ \
SQLRETURN retcode; \
retcode = SQLPrimaryKeys(A,B,C,D,E,F,G); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLPrimaryKeys %s\n",print); \
return FALSE; \
} \
}
#define SQLGETTYPEINFO(A,B,print) \
{ \
SQLRETURN retcode; \
retcode = SQLGetTypeInfo(A,B); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLGetTypeInfo %s\n",print); \
return FALSE; \
} \
}
#define SQLCOLATTRIBUTE(A,B,C,D,E,F,G,print) \
{ \
SQLRETURN retcode; \
retcode = SQLColAttribute(A,B,C,D,E,F,G); \
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO) \
{ \
printf("erro no SQLColAttribute %s\n",print); \
return FALSE; \
} \
}
/* Verificar tipo de dados*/
#define IS_SQL_INT(FIELD) FIELD == SQL_DECIMAL || \
FIELD == SQL_NUMERIC || \
FIELD == SQL_SMALLINT || \
FIELD == SQL_INTEGER || \
FIELD == SQL_TINYINT || \
FIELD == SQL_BIGINT
#define IS_SQL_FLOAT(FIELD) FIELD == SQL_FLOAT || \
FIELD == SQL_DOUBLE || \
FIELD == SQL_REAL
static Int
c_db_odbc_connect(void) {
Term arg_driver = Deref(ARG1);
Term arg_user = Deref(ARG2);
Term arg_passwd = Deref(ARG3);
Term arg_conn = Deref(ARG4);
MYDDAS_UTIL_CONNECTION new = NULL;
char *driver = AtomName(AtomOfTerm(arg_driver));
char *user = AtomName(AtomOfTerm(arg_user));
char *passwd = AtomName(AtomOfTerm(arg_passwd));
SQLHENV henv;
SQLHDBC hdbc;
/*Allocate environment handle */
SQLALLOCHANDLE(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &henv, "connect");
/* Set the ODBC version environment attribute */
SQLSETENVATTR(henv, SQL_ATTR_ODBC_VERSION, (SQLPOINTER)SQL_OV_ODBC3, 0, "connect");
/* Allocate connection handle */
SQLALLOCHANDLE(SQL_HANDLE_DBC, henv, &hdbc, "connect");
/* Set login timeout to 6 seconds. */
SQLSETCONNECTATTR(hdbc, SQL_LOGIN_TIMEOUT,(SQLPOINTER) 6, 0, "connect");
/* Connect to data source */
SQLCONNECT(hdbc,
(SQLCHAR*) driver, SQL_NTS,
(SQLCHAR*) user, SQL_NTS,
(SQLCHAR*) passwd, SQL_NTS, "connect");
if (!Yap_unify(arg_conn, MkIntegerTerm((Int)(hdbc))))
return FALSE;
else
{
/* Criar um novo no na lista de ligacoes*/
//new = add_connection(&TOP,hdbc,henv);
new = myddas_util_add_connection(hdbc,henv);
if (new == NULL){
printf("Erro ao alocar memoria para lista\n");
return FALSE;
}
return TRUE;
}
}
/* db_query: SQLQuery x ResultSet x Arity x BindList x Connection */
static Int
c_db_odbc_query(void) {
Term arg_sql_query = Deref(ARG1);
Term arg_result_set = Deref(ARG2);
Term arg_arity = Deref(ARG3);
Term arg_bind_list = Deref(ARG4);
Term arg_conn = Deref(ARG5);
SQLCHAR *sql = AtomName(AtomOfTerm(arg_sql_query));
SQLHDBC hdbc =(SQLHDBC) (IntegerOfTerm(arg_conn));
SQLHSTMT hstmt;
SQLSMALLINT type;
/*Allocate an handle for the query*/
SQLALLOCHANDLE(SQL_HANDLE_STMT, hdbc, &hstmt, "db_query");
/* Executes the query*/
SQLEXECDIRECT(hstmt,sql,SQL_NTS, "db_query");
Int arity;
Int i;
if (IsNonVarTerm(arg_arity)){
arity = IntegerOfTerm(arg_arity);
char *bind_space=NULL;
//const Int functor_arity=3;
const Short functor_arity=3;
Functor functor = Yap_MkFunctor(Yap_LookupAtom("bind"),functor_arity);
Term properties[functor_arity];
Term head,list=arg_bind_list;
SQLUINTEGER ColumnSizePtr;
SQLINTEGER *data_info=NULL;
for (i=1;i<=arity;i++)
{
head = HeadOfTerm(list);
list = TailOfTerm(list);
SQLDESCRIBECOL(hstmt,i,NULL,0,NULL,&type,&ColumnSizePtr,NULL,NULL,"db_query");
/* +1 because of '\0' */
bind_space = malloc(sizeof(char)*(ColumnSizePtr+1));
data_info = malloc(sizeof(SQLINTEGER));
SQLBINDCOL(hstmt,i,SQL_C_CHAR,bind_space,(ColumnSizePtr+1),data_info,"db_query");
properties[0] = MkIntegerTerm((Int)bind_space);
properties[2] = MkIntegerTerm((Int)data_info);
if (IS_SQL_INT(type))
properties[1]=MkAtomTerm(Yap_LookupAtom("integer"));
else if (IS_SQL_FLOAT(type))
properties[1]=MkAtomTerm(Yap_LookupAtom("real"));
else
properties[1]=MkAtomTerm(Yap_LookupAtom("string"));
Yap_unify(head,Yap_MkApplTerm(functor,functor_arity,properties));
continue;
}
}
if (!Yap_unify(arg_result_set, MkIntegerTerm((Int) hstmt)))
{
SQLCLOSECURSOR(hstmt,"db_query");
SQLFREESTMT(hstmt,SQL_CLOSE,"db_query");
return FALSE;
}
return TRUE;
}
static Int
c_db_odbc_number_of_fields(void) {
Term arg_relation = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term arg_fields = Deref(ARG3);
char *relation = AtomName(AtomOfTerm(arg_relation));
SQLHDBC hdbc =(SQLHDBC) (IntegerOfTerm(arg_conn));
SQLHSTMT hstmt;
char sql[256];
SQLSMALLINT number_fields;
sprintf(sql,"DESCRIBE %s",relation);
SQLALLOCHANDLE(SQL_HANDLE_STMT, hdbc, &hstmt, "db_number_of_fields");
SQLEXECDIRECT(hstmt,sql,SQL_NTS, "db_number_of_fields");
/* Calcula o numero de campos*/
number_fields=0;
while(TRUE) {
SQLFETCH(hstmt,"db_number_of_fields");
number_fields++;
}
SQLCLOSECURSOR(hstmt,"db_number_of_fields");
SQLFREESTMT(hstmt,SQL_CLOSE,"db_number_of_fields");
if (!Yap_unify(arg_fields, MkIntegerTerm(number_fields)))
return FALSE;
return TRUE;
}
/* db_get_attributes_types: RelName x Connection -> TypesList */
static Int
c_db_odbc_get_attributes_types(void) {
Term arg_relation = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term arg_types_list = Deref(ARG3);
char *relation = AtomName(AtomOfTerm(arg_relation));
SQLHDBC hdbc =(SQLHDBC) (IntegerOfTerm(arg_conn));
SQLHSTMT hstmt;
char sql[256];
Term head, list;
list = arg_types_list;
sprintf(sql,"DESCRIBE %s",relation);
SQLALLOCHANDLE(SQL_HANDLE_STMT, hdbc, &hstmt, "db_get_attributes_types");
SQLEXECDIRECT(hstmt,sql,SQL_NTS, "db_get_attributes_types");
while (TRUE)
{
SQLFETCH(hstmt, "db_get_attributes_types");
/* Tentar fazer de uma maneira que a gente consiga calcular o tamanho que o
nome do campo vai ocupar, assim podemos alocar memoria dinamicamente*/
sql[0]='\0';
SQLGETDATA(hstmt, 1, SQL_C_CHAR, sql, 256, NULL, "db_get_attributes_types");
head = HeadOfTerm(list);
Yap_unify(head, MkAtomTerm(Yap_LookupAtom(sql)));
list = TailOfTerm(list);
head = HeadOfTerm(list);
list = TailOfTerm(list);
sql[0]='\0';
SQLGETDATA(hstmt, 2, SQL_C_CHAR, sql, 256, NULL, "db_get_attributes_types");
if (strncmp(sql, "smallint",8) == 0 || strncmp(sql,"int",3) == 0 ||
strncmp(sql, "mediumint",9) == 0 || strncmp(sql, "tinyint",7) == 0 ||
strncmp(sql, "bigint",6) == 0 || strcmp(sql, "year") == 0)
Yap_unify(head, MkAtomTerm(Yap_LookupAtom("integer")));
else
if (strcmp(sql, "float") == 0 || strncmp(sql, "double",6) == 0
|| strcmp(sql, "real") == 0)
Yap_unify(head, MkAtomTerm(Yap_LookupAtom("real")));
else
Yap_unify(head, MkAtomTerm(Yap_LookupAtom("string")));
}
SQLCLOSECURSOR(hstmt,"db_get_attributes_types");
SQLFREESTMT(hstmt,SQL_CLOSE, "db_get_attributes_types");
return TRUE;
}
/* db_disconnect */
static Int
c_db_odbc_disconnect(void) {
Term arg_conn = Deref(ARG1);
SQLHDBC conn = (SQLHDBC) (IntegerOfTerm(arg_conn));
SQLHENV henv = myddas_util_get_odbc_enviromment(conn);
if ((myddas_util_search_connection(conn)) != NULL)
{
myddas_util_delete_connection(conn);
/* More information about this process on
msdn.microsoft.com*/
SQLDISCONNECT(conn,"db_disconnect");
SQLFREEHANDLE(SQL_HANDLE_DBC,conn,"db_disconnect");
SQLFREEHANDLE(SQL_HANDLE_ENV,henv,"db_disconnect");
return TRUE;
}
else
return FALSE;
}
static Int
c_db_odbc_row_cut(void) {
SQLHSTMT hstmt = (SQLHSTMT) IntegerOfTerm(EXTRA_CBACK_CUT_ARG(Term,1));
SQLCLOSECURSOR(hstmt,"db_row_cut");
SQLFREESTMT(hstmt,SQL_CLOSE,"db_row_cut");
return TRUE;
}
/* db_row: ResultSet x BindList x ListOfArgs -> */
static Int
c_db_odbc_row(void) {
Term arg_result_set = Deref(ARG1);
Term arg_bind_list = Deref(ARG2);
Term arg_list_args = Deref(ARG3);
SQLHSTMT hstmt = (SQLHSTMT) IntegerOfTerm(arg_result_set);
/* EXTRA_CBACK_ARG(ARIDADE,LOCAL_ONDE_COLOCAR_VALOR)*/
EXTRA_CBACK_ARG(3,1)=(CELL) MkIntegerTerm((Int)hstmt);
Term head, list, null_atom[1];
Term head_bind, list_bind;
SQLRETURN retcode = SQLFetch(hstmt);
if (retcode == SQL_NO_DATA)
{
SQLCLOSECURSOR(hstmt,"db_row");
SQLFREESTMT(hstmt,SQL_CLOSE,"db_row");
cut_fail();
return FALSE;
}
if (retcode != SQL_SUCCESS && retcode != SQL_SUCCESS_WITH_INFO)
{
printf("erro no SQLFETCH number of fields\n");
return FALSE;
}
char *bind_value=NULL;
Term type;
list = arg_list_args;
list_bind = arg_bind_list;
SQLINTEGER *data_info=NULL;
while (IsPairTerm(list_bind))
{
head = HeadOfTerm(list);
list = TailOfTerm(list);
head_bind = HeadOfTerm(list_bind);
list_bind = TailOfTerm(list_bind);
bind_value = (char *)IntegerOfTerm(ArgOfTerm(1,head_bind));
type = ArgOfTerm(2,head_bind);
data_info = (SQLINTEGER *)IntegerOfTerm(ArgOfTerm(3,head_bind));
if ((*data_info) == SQL_NULL_DATA){
null_atom[0] = MkIntegerTerm(null_id++);
if (!Yap_unify(head, Yap_MkApplTerm(Yap_MkFunctor(Yap_LookupAtom("null"),1),1,null_atom)))
continue;
}
else
{
if (!strcmp(AtomName(AtomOfTerm(type)),"integer"))
{
if (!Yap_unify(head, MkIntegerTerm(atoi(bind_value))))
continue;
}
else if (!strcmp(AtomName(AtomOfTerm(type)),"real"))
{
if (!Yap_unify(head, MkFloatTerm(atof(bind_value))))
continue;
}
else if (!strcmp(AtomName(AtomOfTerm(type)),"string"))
{
if (!Yap_unify(head, MkAtomTerm(Yap_LookupAtom(bind_value))))
continue;
}
}
}
return TRUE;
}
/* Mudar esta funcao de forma a nao fazer a consulta, pois
no predicate db_sql_selet vai fazer duas vezes a mesma consutla*/
static Int
c_db_odbc_number_of_fields_in_query(void) {
Term arg_query = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term arg_fields = Deref(ARG3);
char *sql = AtomName(AtomOfTerm(arg_query));
SQLHDBC hdbc =(SQLHDBC) (IntegerOfTerm(arg_conn));
SQLHSTMT hstmt;
SQLSMALLINT number_cols=0;
SQLALLOCHANDLE(SQL_HANDLE_STMT, hdbc, &hstmt,
"db_number_of_fields_in_query");
SQLEXECDIRECT(hstmt,sql,SQL_NTS,
"db_number_of_fields_in_query");
SQLNUMRESULTCOLS(hstmt,&number_cols,
"db_number_of_fields_in_query");
if (!Yap_unify(arg_fields, MkIntegerTerm(number_cols))){
return FALSE;
}
SQLCLOSECURSOR(hstmt,"db_number_of_fields_in_query");
SQLFREESTMT(hstmt,SQL_CLOSE, "db_number_of_fields_in_query");
return TRUE;
}
static Int
c_db_odbc_get_fields_properties(void) {
Term nome_relacao = Deref(ARG1);
Term arg_conn = Deref(ARG2);
Term fields_properties_list = Deref(ARG3);
Term head, list;
char *relacao = AtomName(AtomOfTerm(nome_relacao));
char sql[256];
char name[200];
Int i;
SQLSMALLINT num_fields=0;
SQLSMALLINT NullablePtr=0;
SQLSMALLINT AutoIncrementPointer=0;
SQLHSTMT hstmt,hstmt2;
SQLHDBC hdbc =(SQLHDBC) (IntegerOfTerm(arg_conn));
/* LIMIT 0 -> We don't need the results of the query,
only the information about the fields of the relation*/
sprintf (sql,"SELECT * FROM `%s` LIMIT 0",relacao);
/*Allocate an handle for the query*/
SQLALLOCHANDLE(SQL_HANDLE_STMT, hdbc, &hstmt, "db_get_fields_properties");
/* Executes the query*/
SQLEXECDIRECT(hstmt,sql,SQL_NTS, "db_get_fields_properties");
Functor functor = Yap_MkFunctor(Yap_LookupAtom("property"),4);
Term properties[4];
SQLNUMRESULTCOLS(hstmt,&num_fields,
"db_get_fields_properties");
list = fields_properties_list;
SQLSMALLINT bind_prim_key;
//por causa de as rows em odbc começam em 1 :)
Short *null=malloc(sizeof(Short)*(1+num_fields));
SQLALLOCHANDLE(SQL_HANDLE_STMT, hdbc, &hstmt2, "db_get_fields_properties");
/* Executes the query*/
SQLPRIMARYKEYS(hstmt2,NULL,0,NULL,0,relacao,SQL_NTS, "db_get_fields_properties");
/* Associates bind value for the 5 column*/
SQLBINDCOL(hstmt2,5,SQL_C_SSHORT,&bind_prim_key,sizeof(SQLSMALLINT),NULL,
"db_get_fields_properties");
while(1)
{
SQLFETCH(hstmt2,"db_get_fields_properties");
null[bind_prim_key]=1;
}
SQLCLOSECURSOR(hstmt2,"db_get_fields_properties");
SQLFREESTMT(hstmt2,SQL_CLOSE,"db_get_fields_properties");
for (i=1;i<=num_fields;i++)
{
head = HeadOfTerm(list);
name[0]='\0';
SQLDESCRIBECOL(hstmt,i,name,200,NULL,NULL,NULL,NULL,&NullablePtr,
"db_get_fields_properties");
SQLCOLATTRIBUTE(hstmt,i,SQL_DESC_AUTO_UNIQUE_VALUE,NULL,0,NULL,&AutoIncrementPointer,
"db_get_fields_properties");
properties[0] = MkAtomTerm(Yap_LookupAtom(name));
if (NullablePtr & SQL_NULLABLE)
properties[1] = MkIntegerTerm(1); //Can't be NULL
else
properties[1] = MkIntegerTerm(0);
if (null[i] == 1)
properties[2] = MkIntegerTerm(1); //It''s a primary key
else
properties[2] = MkIntegerTerm(0);
if (AutoIncrementPointer & SQL_TRUE)
properties[3] = MkIntegerTerm(1); //It's auto_incremented field
else
properties[3] = MkIntegerTerm(0);
list = TailOfTerm(list);
if (!Yap_unify(head, Yap_MkApplTerm(functor,4,properties))){
return FALSE;
}
}
SQLCLOSECURSOR(hstmt,"db_get_fields_properties");
SQLFREESTMT(hstmt,SQL_CLOSE,"db_get_fields_properties");
return TRUE;
}
void Yap_InitMYDDAS_ODBCPreds(void)
{
/* db_connect: Host x User x Passwd x Database x Connection */
Yap_InitCPred("c_db_odbc_connect", 4, c_db_odbc_connect, 0);
/* db_number_of_fields: Relation x Connection x NumberOfFields */
Yap_InitCPred("c_db_odbc_number_of_fields",3, c_db_odbc_number_of_fields, 0);
/* db_number_of_fields_in_query: SQLQuery x Connection x NumberOfFields */
Yap_InitCPred("c_db_odbc_number_of_fields_in_query",3, c_db_odbc_number_of_fields_in_query, 0);
/* db_get_attributes_types: Relation x TypesList */
Yap_InitCPred("c_db_odbc_get_attributes_types", 3, c_db_odbc_get_attributes_types, 0);
/* db_query: SQLQuery x ResultSet x Connection */
Yap_InitCPred("c_db_odbc_query", 5, c_db_odbc_query, 0);
/* db_disconnect: Connection */
Yap_InitCPred("c_db_odbc_disconnect", 1,c_db_odbc_disconnect, 0);
/* db_get_fields_properties: PredName x Connnection x PropertiesList */
Yap_InitCPred("c_db_odbc_get_fields_properties",3,c_db_odbc_get_fields_properties,0);
}
void Yap_InitBackMYDDAS_ODBCPreds(void)
{
/* db_row: ResultSet x ListOfArgs */
Yap_InitCPredBackCut("c_db_odbc_row", 3, sizeof(Int),
c_db_odbc_row,
c_db_odbc_row,
c_db_odbc_row_cut, 0);
}
#endif /*MYDDAS_ODBC*/

View File

@ -1,700 +0,0 @@
/*************************************************************************
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: myddas_shared.c *
* Last rev: 22/03/05 *
* mods: *
* comments: Predicates for maintaining MYDDAS *
* *
*************************************************************************/
#if defined MYDDAS_MYSQL || defined MYDDAS_ODBC
#include "Yap.h"
#include "Yatom.h"
#include "cut_c.h"
#include "myddas.h"
#include <stdlib.h>
#include "myddas_structs.h"
#ifdef MYDDAS_STATS
#include "myddas_statistics.h"
#endif
STATIC_PROTO(Int c_db_initialize_myddas,(void));
STATIC_PROTO(Int c_db_connection_type,(void));
STATIC_PROTO(Int c_db_add_preds,(void));
STATIC_PROTO(Int c_db_preds_conn_start ,(void));
STATIC_PROTO(Int c_db_preds_conn_continue ,(void));
STATIC_PROTO(Int c_db_connection_start ,(void));
STATIC_PROTO(Int c_db_connection_continue ,(void));
STATIC_PROTO(Int c_db_check_if_exists_pred,(void));
STATIC_PROTO(Int c_db_delete_predicate,(void));
STATIC_PROTO(Int c_db_multi_queries_number,(void));
#ifdef MYDDAS_STATS
STATIC_PROTO(Int c_db_stats,(void));
STATIC_PROTO(Int c_db_stats_walltime,(void));
STATIC_PROTO(Int c_db_stats_translate,(void));
STATIC_PROTO(Int c_db_stats_time,(void));
#endif
#ifdef DEBUG
STATIC_PROTO(Int c_db_check,(void));
#endif
void Yap_InitMYDDAS_SharedPreds(void)
{
/* c_db_initialize_myddas */
Yap_InitCPred("c_db_initialize_myddas",0,c_db_initialize_myddas, 0);
/* c_db_connection_type: Connection x Type */
Yap_InitCPred("c_db_connection_type",2,c_db_connection_type, 0);
/* CORRECT THIS: db_add_preds : PredName * Arity * Connection */
Yap_InitCPred("c_db_add_preds",4,c_db_add_preds, 0);
/* c_db_check_if_exists_pred : PredName * Arity * Connection */
Yap_InitCPred("c_db_check_if_exists_pred",3,c_db_check_if_exists_pred, 0);
/* c_db_delete_pred : Module * PredName * Arity */
Yap_InitCPred("c_db_delete_predicate",3,c_db_delete_predicate, 0);
/* c_db_delete_pred : Module * PredName * Arity */
Yap_InitCPred("c_db_multi_queries_number",2,c_db_multi_queries_number, 0);
#ifdef MYDDAS_STATS
/* c_db_stats: Connection * Stats */
Yap_InitCPred("c_db_stats",2, c_db_stats, 0);
/* c_db_stats_walltime */
Yap_InitCPred("c_db_stats_walltime",1, c_db_stats_walltime, 0);
/* c_db_stats_translate */
Yap_InitCPred("c_db_stats_translate",2,c_db_stats_translate, 0);
/* c_db_stats_time */
Yap_InitCPred("c_db_stats_time",2,c_db_stats_time, 0);
#endif
#ifdef DEBUG
Yap_InitCPred("c_db_check",0, c_db_check, 0);
#endif
}
void Yap_InitBackMYDDAS_SharedPreds(void)
{
/* Gives all the predicates associated to a given connection */
Yap_InitCPredBack("c_db_preds_conn", 4, sizeof(Int),
c_db_preds_conn_start,
c_db_preds_conn_continue, 0);
/* Gives all the connections stored on the MYDDAS Structure*/
Yap_InitCPredBack("c_db_connection", 1, sizeof(Int),
c_db_connection_start,
c_db_connection_continue, 0);
}
/* Initialize all of the MYDDAS global structures */
static Int
c_db_initialize_myddas(void){
Yap_REGS.MYDDAS_GLOBAL_POINTER = myddas_init_initialize_myddas();
#ifdef MYDDAS_STATS
Yap_REGS.MYDDAS_GLOBAL_POINTER = myddas_stats_initialize_global_stats(Yap_REGS.MYDDAS_GLOBAL_POINTER);
#endif /* MYDDAS_STATS */
return TRUE;
}
/* Gives the type of a given connection,
in other words, type will be mysql or odbc
NOTE: In order to use this predicate, the connection*/
/* c_db_connection_type: +Connection * ?Type */
static Int
c_db_connection_type (void){
Term arg_con = Deref(ARG1);
Term arg_type = Deref(ARG2);
Int *con = (Int *) IntegerOfTerm(arg_con);
Int type = myddas_util_connection_type(con);
if (type == 1) /* MYSQL Connection */
Yap_unify(arg_type, MkAtomTerm(Yap_LookupAtom("mysql")));
else if (type ==2) /* ODBC Connection */
Yap_unify(arg_type, MkAtomTerm(Yap_LookupAtom("odbc")));
else /* Not a valid connection*/
return FALSE;
return TRUE;
}
/* db_add_preds: PredName * Arity * Module * Connection*/
static Int
c_db_add_preds (void){
Term arg_nome = Deref(ARG1);
Term arg_aridade = Deref(ARG2);
Term arg_module = Deref(ARG3);
Term arg_conn = Deref(ARG4);
/* PredEntry *pe; */
/* pe = RepPredProp(PredPropByFunc(FunctorOfTerm(arg_pred),arg_module)); */
char *nome = AtomName(AtomOfTerm(arg_nome));
char *module = AtomName(AtomOfTerm(arg_module));
Int aridade = IntegerOfTerm(arg_aridade);
Int *conn = (Int *) IntegerOfTerm(arg_conn);
if (myddas_util_add_predicate(nome,aridade,module,conn) == NULL)
{
#ifdef DEBUG
printf ("ERROR : Could not add Predicate: Line: %d File: %s\n",__LINE__,__FILE__);
#endif
return FALSE;
}
return TRUE;
}
static Int
c_db_check_if_exists_pred (void){
Term arg_nome = Deref(ARG1);
Term arg_aridade = Deref(ARG2);
Term arg_module = Deref(ARG3);
char *nome = AtomName(AtomOfTerm(arg_nome));
char *module = AtomName(AtomOfTerm(arg_module));
Int aridade = IntegerOfTerm(arg_aridade);
if (myddas_util_search_predicate(nome,aridade,module) == NULL)
return FALSE;
else
return TRUE;
}
static Int
c_db_delete_predicate(void){
Term arg_module = Deref(ARG1);
Term arg_name = Deref(ARG2);
Term arg_arity = Deref(ARG3);
char *module = AtomName(AtomOfTerm(arg_module));
char *name = AtomName(AtomOfTerm(arg_name));
Int arity = IntegerOfTerm(arg_arity);
MYDDAS_UTIL_PREDICATE predicate =
myddas_util_search_predicate(name,arity,module);
if (predicate == NULL)
return FALSE;
myddas_util_delete_predicate(predicate);
return TRUE;
}
static Int
c_db_multi_queries_number(void){
Term arg_conn = Deref(ARG1);
Term arg_number = Deref(ARG2);
Int *conn = (Int *) IntegerOfTerm(arg_conn);
MYDDAS_UTIL_CONNECTION node =
myddas_util_search_connection(conn);
if (node == NULL)
return FALSE;
if (IsVarTerm(arg_number)){
Yap_unify(arg_number,MkIntegerTerm(((Int)myddas_util_get_total_multi_queries_number(node))+1));
}
else {
Int number = IntegerOfTerm(arg_number);
number--;
myddas_util_set_total_multi_queries_number(node,number);
}
return TRUE;
}
static Int
c_db_connection_start(void){
MYDDAS_UTIL_CONNECTION node =
Yap_REGS.MYDDAS_GLOBAL_POINTER->myddas_top_connections;
EXTRA_CBACK_ARG(1,1)=(CELL) MkIntegerTerm((Int)node);
return (c_db_connection_continue());
}
static Int
c_db_connection_continue(void){
Term arg_conn = Deref(ARG1);
MYDDAS_UTIL_CONNECTION node;
node = (MYDDAS_UTIL_CONNECTION) IntegerOfTerm(EXTRA_CBACK_ARG(1,1));
/* There is no connections */
if (node == NULL)
{
cut_fail();
return FALSE;
}
Yap_unify(arg_conn, MkIntegerTerm((Int)(node->connection)));
EXTRA_CBACK_ARG(1,1)=(CELL) MkIntegerTerm((Int)(node->next));
return TRUE;
}
/* db_preds_conn : Connection(+) * Pred_name(-) * Pred_arity */
static Int
c_db_preds_conn_start (void){
Term arg_conn = Deref(ARG1);
Int *conn = (Int *) IntegerOfTerm(arg_conn);
MYDDAS_UTIL_CONNECTION node =
myddas_util_search_connection(conn);
/* Caso a ligacao já tenha sido apagada*/
if (node == NULL)
{
cut_fail();
return FALSE;
}
void *pointer = myddas_util_get_list_pred(node);
EXTRA_CBACK_ARG(4,1)=(CELL) MkIntegerTerm((Int)pointer);
return (c_db_preds_conn_continue());
}
/* db_preds_conn : Connection(+) * Pred_name(-) * Pred_arity*/
static Int
c_db_preds_conn_continue (void){
Term module = Deref(ARG2);
Term name = Deref(ARG3);
Term arity = Deref(ARG4);
void *pointer;
pointer = (void *) IntegerOfTerm(EXTRA_CBACK_ARG(4,1));
if (pointer != NULL)
{
EXTRA_CBACK_ARG(4,1)=(CELL) MkIntegerTerm((Int)myddas_util_get_pred_next(pointer));
if (!Yap_unify(module, MkAtomTerm(Yap_LookupAtom(myddas_util_get_pred_module(pointer))))){
return FALSE;
}
if (!Yap_unify(name,MkAtomTerm(Yap_LookupAtom(myddas_util_get_pred_name(pointer))))){
return FALSE;
}
if (!Yap_unify(arity, MkIntegerTerm((Int)myddas_util_get_pred_arity(pointer)))){
return FALSE;
}
return TRUE;
}
else
{
cut_fail();
return FALSE;
}
}
#ifdef DEBUG
static Int
c_db_check(void){
check_int();
return TRUE;
}
#endif /*DEBUG*/
#ifdef MYDDAS_STATS
static Int
c_db_stats_walltime(void){
Term arg_time = Deref(ARG1);
#ifdef DEBUG
if (IsVarTerm(arg_time)){
#endif
Yap_unify(arg_time,MkIntegerTerm((Int)myddas_stats_walltime()));
return TRUE;
#ifdef DEBUG
}
else{
printf ("ERROR: c_db_stats_walltime got a variable\n");
return FALSE;
}
#endif
}
static Int
c_db_stats_translate(void){
Term arg_start = Deref(ARG1);
Term arg_end = Deref(ARG2);
MYDDAS_STATS_TIME start;
MYDDAS_STATS_TIME end;
MYDDAS_STATS_TIME total_time,diff;
#ifdef DEBUG
//Both args must be instanciated
if (IsNonVarTerm(arg_start) && IsNonVarTerm(arg_end)){
#endif
start = (MYDDAS_STATS_TIME) IntegerOfTerm(arg_start);
end = (MYDDAS_STATS_TIME) IntegerOfTerm(arg_end);
MYDDAS_STATS_GET_TRANSLATE(total_time);
MYDDAS_STATS_INITIALIZE_TIME_STRUCT(diff,time_copy);
myddas_stats_subtract_time(diff,end,start);
diff = myddas_stats_time_copy_to_final(diff);
myddas_stats_add_time(total_time,diff,total_time);
MyddasULInt count;
MYDDAS_STATS_GET_TRANSLATE_COUNT(count);
MYDDAS_STATS_SET_TRANSLATE_COUNT(++count);
MYDDAS_FREE(diff,struct myddas_stats_time_struct);
MYDDAS_FREE(start, struct myddas_stats_time_struct);
MYDDAS_FREE(end, struct myddas_stats_time_struct);
return TRUE;
#ifdef DEBUG
}
else{
printf ("ERROR: c_db_stats_translate got a variable\n");
return FALSE;
}
#endif
}
static Int
c_db_stats_time(void){
Term arg_reference = Deref(ARG1);
Term arg_time = Deref(ARG2);
Term final_term;
MYDDAS_STATS_STRUCT struc = (MYDDAS_STATS_STRUCT)IntegerOfTerm(arg_reference);
Functor functor_count = Yap_MkFunctor(Yap_LookupAtom("count"),1);
Term count_number[1];
Functor unit;
Term number[1];
switch(struc->type){
case integer:
{
Functor functor = Yap_MkFunctor(Yap_LookupAtom("myddas_integer"),2);
Term integer_number[1];
MyddasULInt integer;
unit = Yap_MkFunctor(Yap_LookupAtom("number"),1);
integer = struc->u.integer.integer;
number[0] = MkIntegerTerm(integer);
integer_number[0] = Yap_MkApplTerm(unit,1,number);;
count_number[0] = MkIntegerTerm(struc->count);
integer_number[1] = Yap_MkApplTerm(functor_count,1,count_number);
final_term = Yap_MkApplTerm(functor,2,integer_number);
break;
}
case time_str:
{
MYDDAS_STATS_TIME time = struc->u.time_str.time_str;
Functor functor = Yap_MkFunctor(Yap_LookupAtom("myddas_time"),6);
Term time_numbers[6];
MyddasUInt time_number;
unit = Yap_MkFunctor(Yap_LookupAtom("hours"),1);
time_number = MYDDAS_STATS_TIME_HOURS(time);
number[0] = MkIntegerTerm(time_number);
time_numbers[0] = Yap_MkApplTerm(unit,1,number);;
unit = Yap_MkFunctor(Yap_LookupAtom("minutes"),1);
time_number = MYDDAS_STATS_TIME_MINUTES(time);
number[0] = MkIntegerTerm(time_number);
time_numbers[1] = Yap_MkApplTerm(unit,1,number);;
unit = Yap_MkFunctor(Yap_LookupAtom("seconds"),1);
time_number = MYDDAS_STATS_TIME_SECONDS(time);
number[0] = MkIntegerTerm(time_number);
time_numbers[2] = Yap_MkApplTerm(unit,1,number);;
unit = Yap_MkFunctor(Yap_LookupAtom("miliseconds"),1);
time_number = MYDDAS_STATS_TIME_MILISECONDS(time);
number[0] = MkIntegerTerm(time_number);
time_numbers[3] = Yap_MkApplTerm(unit,1,number);;
unit = Yap_MkFunctor(Yap_LookupAtom("microseconds"),1);
time_number = MYDDAS_STATS_TIME_MICROSECONDS(time);
number[0] = MkIntegerTerm(time_number);
time_numbers[4] = Yap_MkApplTerm(unit,1,number);;
count_number[0] = MkIntegerTerm(struc->count);
time_numbers[5] = Yap_MkApplTerm(functor_count,1,count_number);
final_term = Yap_MkApplTerm(functor,6,time_numbers);
break;
}
default:
#ifdef DEBUG
printf ("ERROR: c_db_stats_time unknow option\n");
#endif
return FALSE;
break;
}
if (!Yap_unify(arg_time,final_term )){
return FALSE;
}
return TRUE;
}
//Returns the stats of this module in a list
static Int
c_db_stats(void) {
Term arg_conn = Deref(ARG1);
Term arg_list = Deref(ARG2);
MyddasPointer *conn = (MyddasPointer *) (IntegerOfTerm(arg_conn));
// TODO
if (get_myddas_top() == 0 ){ /* We want all the statistics */
return FALSE;
}
MYDDAS_STATS_STRUCT str;
MYDDAS_UTIL_CONNECTION
node = myddas_util_search_connection(conn);
Term head, list;
list = arg_list;
#ifdef DEBUG
MYDDAS_STATS_TIME time = NULL;
#endif
//[Index 1] -> Total Number of Rows by connection
//Total number of Rows returned by the server
//WARNING: only works with store_result
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,5);
Yap_unify(head, MkIntegerTerm((MyddasInt)str));
#ifdef DEBUG
MyddasUInt number = 0;
MYDDAS_STATS_CON_GET_TOTAL_ROWS(node,number);
printf ("Total Number of Rows returned from the Server\n");
printf ("%lu\n\n",(unsigned long)number);
#endif
//[Index 2] -> Total of Time Spent by the DB Server
// processing all the SQL Querys
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,1);
Yap_unify(head, MkIntegerTerm((MyddasInt)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_TOTAL_TIME_DBSERVER(node,time);
printf ("Reference to time Spent by the Server, on all the SQL Querys\n");
MYDDAS_STATS_PRINT_TIME_STRUCT(time);
printf ("\n\n");
#endif
//[Index 3] -> Total of Time Spent by the DB Server
// processing a the last SQL Query
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,2);
Yap_unify(head, MkIntegerTerm((MyddasInt)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_LAST_TIME_DBSERVER(node,time);
printf ("Reference to time Spent by the Server, on the last SQL Query\n");
MYDDAS_STATS_PRINT_TIME_STRUCT(time);
printf ("\n\n");
#endif
//[Index 4] -> Total of Time Spent by the DB Server
// transfering all the results of the SQL Querys
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,3);
Yap_unify(head, MkIntegerTerm((MyddasInt)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_TOTAL_TIME_TRANSFERING(node,time);
printf ("Refence to time Spent by the Server, transfering all the results SQL Query\n");
MYDDAS_STATS_PRINT_TIME_STRUCT(time);
printf ("\n\n");
#endif
//[Index 5] -> Total of Time Spent by the DB Server
// transfering the result of the last SQL Query
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,4);
Yap_unify(head, MkIntegerTerm((MyddasInt)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_LAST_TIME_TRANSFERING(node,time);
printf ("Reference to time Spent by the Server, transfering the result of the last SQL Query\n");
MYDDAS_STATS_PRINT_TIME_STRUCT(time);
printf ("\n\n");
#endif
//[Index 6] -> Total of Time Spent by the
// db_row_function
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(Yap_REGS.MYDDAS_GLOBAL_POINTER->myddas_statistics->stats,1);
Yap_unify(head, MkIntegerTerm((MyddasInt)str));
#ifdef DEBUG
MYDDAS_STATS_GET_DB_ROW_FUNCTION(time);
printf ("Reference to time Spent by the db_row_function\n");
MYDDAS_STATS_PRINT_TIME_STRUCT(time);
printf ("\n\n");
#endif
//[Index 7] -> Total of Bytes Transfered by the
// DB Server on all SQL Querys
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,6);
Yap_unify(head, MkIntegerTerm((MyddasPointer)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_TOTAL_BYTES_TRANSFERING_FROM_DBSERVER(node,number);
printf ("Bytes Transfered by the DB Server from all querys\n");
printf ("%llu\n\n",(MyddasULInt)number);
#endif
//[Index 8] -> Total of Bytes Transfered by the
// DB Server on the last SQL Query
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,7);
Yap_unify(head, MkIntegerTerm((MyddasPointer)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_LAST_BYTES_TRANSFERING_FROM_DBSERVER(node,number);
printf ("Bytes Transfered by the DB Server on the last query\n");
printf ("%llu\n\n",(MyddasULInt)number);
#endif
//[Index 9] -> Number of querys made to the DBserver
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(node->stats,8);
Yap_unify(head, MkIntegerTerm((MyddasPointer)str));
#ifdef DEBUG
MYDDAS_STATS_CON_GET_NUMBER_QUERIES_MADE(node,number);
printf ("Number of Querys made to the server\n");
printf ("%llu\n\n",(MyddasULInt)number);
#endif
//[Index 10] -> Total of Time Spent by the
// translate predicate
head = HeadOfTerm(list);
list = TailOfTerm(list);
str = myddas_stats_get_stat(Yap_REGS.MYDDAS_GLOBAL_POINTER->myddas_statistics->stats,2);
Yap_unify(head, MkIntegerTerm((Int)str));
#ifdef DEBUG
MYDDAS_STATS_GET_TRANSLATE(time);
printf ("Reference to time Spent by the translate predicate\n");
MYDDAS_STATS_PRINT_TIME_STRUCT(time);
printf ("\n\n");
#endif
/* Memory management */
#ifdef DEBUG
MyddasULInt nr;
MYDDAS_MEMORY_MALLOC_NR(nr);
printf ("Number of times malloc was called in MYDDAS: %lu \n",nr);
MYDDAS_MEMORY_FREE_NR(nr);
printf ("Number of times free was called in MYDDAS : %lu \n",nr);
MYDDAS_MEMORY_MALLOC_SIZE(nr);
printf ("Total memory allocated in MYDDAS: %lu \n",nr);
MYDDAS_MEMORY_FREE_SIZE(nr);
printf ("Total memory freed in MYDDAS : %lu \n",nr);
#endif
return TRUE;
}
#endif /* MYDDAS_STATS */
/* Function to delete all the temporary tables */
/* from the mysql server */
void Yap_MYDDAS_delete_all_myddas_structs(void)
{
/* NAO ESQUECER DE FAZER ISTO TB PARA O DB_CLOSE*/
MYDDAS_GLOBAL global =
Yap_REGS.MYDDAS_GLOBAL_POINTER;
/* In case that the MYDDAS module isn't loaded */
if (global == NULL)
return;
MYDDAS_UTIL_CONNECTION connections =
global->myddas_top_connections;
/* Delete all connections */
for(;connections!=NULL;connections=connections->next)
myddas_util_delete_connection(connections->connection);
#ifdef MYDDAS_STATS
myddas_stats_delete_stats_list(global->myddas_statistics->stats);
MYDDAS_FREE(global->myddas_statistics,struct myddas_global_stats);
#endif
MYDDAS_FREE(global,struct myddas_global);
#ifdef DEBUG
MyddasULInt nr;
MYDDAS_MEMORY_MALLOC_NR(nr);
printf ("Number of times malloc was called in MYDDAS: %lu \n",nr);
MYDDAS_MEMORY_FREE_NR(nr);
printf ("Number of times free was called in MYDDAS : %lu \n",nr);
MYDDAS_MEMORY_MALLOC_SIZE(nr);
printf ("Total memory allocated in MYDDAS: %lu \n",nr);
MYDDAS_MEMORY_FREE_SIZE(nr);
printf ("Total memory freed in MYDDAS : %lu \n",nr);
#endif
}