Merge branch 'master' of ssh://yap.git.sourceforge.net/gitroot/yap/yap-6.3
Conflicts: packages/jpl
This commit is contained in:
commit
bc8c9a423a
@ -3256,21 +3256,20 @@ cont_current_predicate_for_atom( USES_REGS1 )
|
||||
}
|
||||
p = RepPredProp(p->NextOfPE);
|
||||
}
|
||||
READ_UNLOCK(PredHashRWLock);
|
||||
}
|
||||
}
|
||||
READ_UNLOCK(pp->FRWLock);
|
||||
} else if (pp->KindOfPE == PEProp) {
|
||||
PredEntry *pe = RepPredProp(pf);
|
||||
READ_LOCK(pp->FRWLock);
|
||||
PELOCK(31,pe);
|
||||
if (pe->ModuleOfPred == mod ||
|
||||
pe->ModuleOfPred == 0) {
|
||||
/* we found the predicate */
|
||||
EXTRA_CBACK_ARG(3,1) = MkIntegerTerm((Int)(pp->NextOfPE));
|
||||
READ_UNLOCK(pp->FRWLock);
|
||||
UNLOCKPE(31,pe);
|
||||
return Yap_unify(ARG3,MkIntTerm(0));
|
||||
}
|
||||
READ_UNLOCK(pp->FRWLock);
|
||||
UNLOCKPE(31,pe);
|
||||
}
|
||||
pf = pp->NextOfPE;
|
||||
}
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
#ifdef __GNUC__
|
||||
#define INLINE_ONLY __attribute__((gnu_inline,always_inline))
|
||||
//#define INLINE_ONLY
|
||||
#else
|
||||
#define INLINE_ONLY
|
||||
#endif
|
||||
|
@ -122,7 +122,7 @@
|
||||
:- dynamic solver/1,output/1,use/1,suppress_attribute_display/1, parameter_softening/1, em_solver/1, use_parfactors/1.
|
||||
|
||||
solver(ve).
|
||||
em_solver(ve).
|
||||
em_solver(bp).
|
||||
|
||||
:- meta_predicate probability(:,-), conditional_probability(:,:,-).
|
||||
|
||||
|
@ -61,7 +61,7 @@ ground_all_keys([], _).
|
||||
ground_all_keys([V|GVars], AllKeys) :-
|
||||
clpbn:get_atts(V,[key(Key)]),
|
||||
\+ ground(Key), !,
|
||||
wroteln(g:Key),
|
||||
writeln(g:Key),
|
||||
member(Key, AllKeys),
|
||||
ground_all_keys(GVars, AllKeys).
|
||||
ground_all_keys([_V|GVars], AllKeys) :-
|
||||
|
@ -52,25 +52,40 @@
|
||||
|
||||
|
||||
call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :-
|
||||
b_hash_new(Hash0),
|
||||
keys_to_ids(AllKeys, 0, Hash0, Hash),
|
||||
get_factors_type(Factors, Type),
|
||||
evidence_to_ids(Evidence, Hash, EvidenceIds),
|
||||
factors_to_ids(Factors, Hash, FactorIds),
|
||||
%writeln(type:Type), writeln(''),
|
||||
%writeln(allKeys:AllKeys), writeln(''),
|
||||
%sort(AllKeys,SKeys),writeln(allKeys:SKeys), writeln(''),
|
||||
%writeln(factors:Factors), writeln(''),
|
||||
%writeln(factorIds:FactorIds), writeln(''),
|
||||
%writeln(evidence:Evidence), writeln(''),
|
||||
%writeln(evidenceIds:EvidenceIds), writeln(''),
|
||||
cpp_create_ground_network(Type, FactorIds, EvidenceIds, Network),
|
||||
%get_vars_information(AllKeys, StatesNames),
|
||||
%terms_to_atoms(AllKeys, KeysAtoms),
|
||||
%cpp_set_vars_information(KeysAtoms, StatesNames),
|
||||
run_solver(ground(Network,Hash), QueryKeys, Solutions),
|
||||
clpbn_bind_vals([QueryVars], Solutions, Output),
|
||||
cpp_free_ground_network(Network).
|
||||
call_horus_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions),
|
||||
clpbn_bind_vals([QueryVars], Solutions, Output).
|
||||
|
||||
call_horus_ground_solver_for_probabilities(QueryKeys, _AllKeys, Factors, Evidence, Solutions) :-
|
||||
attributes:all_attvars(AVars),
|
||||
keys(AVars, AllKeys),
|
||||
b_hash_new(Hash0),
|
||||
keys_to_ids(AllKeys, 0, Hash0, Hash),
|
||||
get_factors_type(Factors, Type),
|
||||
evidence_to_ids(Evidence, Hash, EvidenceIds),
|
||||
factors_to_ids(Factors, Hash, FactorIds),
|
||||
writeln(queryKeys:QueryKeys), writeln(''),
|
||||
writeln(type:Type), writeln(''),
|
||||
writeln(allKeys:AllKeys), writeln(''),
|
||||
sort(AllKeys,SKeys),writeln(allSortedKeys:SKeys), writeln(''),
|
||||
keys_to_ids(SKeys, 0, Hash0, Hash),
|
||||
writeln(factors:Factors), writeln(''),
|
||||
writeln(factorIds:FactorIds), writeln(''),
|
||||
writeln(evidence:Evidence), writeln(''),
|
||||
writeln(evidenceIds:EvidenceIds), writeln(''),
|
||||
cpp_create_ground_network(Type, FactorIds, EvidenceIds, Network),
|
||||
get_vars_information(AllKeys, StatesNames),
|
||||
terms_to_atoms(AllKeys, KeysAtoms),
|
||||
cpp_set_vars_information(KeysAtoms, StatesNames),
|
||||
run_solver(ground(Network,Hash), QueryKeys, Solutions),
|
||||
cpp_free_ground_network(Network).
|
||||
|
||||
|
||||
keys([], []).
|
||||
keys([V|AVars], [K|AllKeys]) :-
|
||||
clpbn:get_atts(V,[key(K)]), !,
|
||||
keys(AVars, AllKeys).
|
||||
keys([_V|AVars], AllKeys) :-
|
||||
keys(AVars, AllKeys).
|
||||
|
||||
|
||||
run_solver(ground(Network,Hash), QueryKeys, Solutions) :-
|
||||
@ -94,9 +109,13 @@ get_factors_type([f(markov, _, _, _)|_], markov) :- ! .
|
||||
|
||||
|
||||
list_of_keys_to_ids([], _, []).
|
||||
list_of_keys_to_ids([List|Extra], Hash, [IdList|More]) :-
|
||||
List = [_|_], !,
|
||||
list_of_keys_to_ids(List, Hash, IdList),
|
||||
list_of_keys_to_ids(Extra, Hash, More).
|
||||
list_of_keys_to_ids([Key|QueryKeys], Hash, [Id|QueryIds]) :-
|
||||
b_hash_lookup(Key, Id, Hash),
|
||||
list_of_keys_to_ids(QueryKeys, Hash, QueryIds).
|
||||
b_hash_lookup(Key, Id, Hash),
|
||||
list_of_keys_to_ids(QueryKeys, Hash, QueryIds).
|
||||
|
||||
|
||||
factors_to_ids([], _, []).
|
||||
@ -134,10 +153,33 @@ terms_to_atoms(K.Ks, Atom.As) :-
|
||||
finalize_horus_ground_solver(bp(Network, _)) :-
|
||||
cpp_free_ground_network(Network).
|
||||
|
||||
%
|
||||
% QVars: all query variables?
|
||||
%
|
||||
%
|
||||
init_horus_ground_solver(QueryVars, _AllVars, _, horus(GKeys, Keys, Factors, Evidence)) :-
|
||||
trace,
|
||||
generate_networks(QueryVars, GKeys, [], Keys, [], Factors, [], Evidence),
|
||||
writeln(qvs:QueryVars),
|
||||
writeln(Keys), !.
|
||||
|
||||
init_horus_ground_solver(_, _AllVars0, _, bp(_BayesNet, _DistIds)) :- !.
|
||||
%
|
||||
% as you add query vars the network grows
|
||||
% until you reach the last variable.
|
||||
%
|
||||
generate_networks([QVars|QueryVars], [GK|GKeys], _K0, K, _F0, F, _E0, E) :-
|
||||
clpbn:generate_network(QVars, GK, KI, FI, EI),
|
||||
generate_networks(QueryVars, GKeys, KI, K, FI, F, EI, E).
|
||||
generate_networks([], [], K, K, F, F, E, E).
|
||||
|
||||
run_horus_ground_solver(_QueryVars, _Solutions, bp(_Network, _DistIds)) :- !.
|
||||
%
|
||||
% just call horus solver.
|
||||
%
|
||||
run_horus_ground_solver(_QueryVars, Solutions, horus(GKeys, Keys, Factors, Evidence) ) :- !,
|
||||
writeln(sols:Solutions),
|
||||
writeln(state:_State),
|
||||
trace,
|
||||
call_horus_ground_solver_for_probabilities(GKeys, Keys, Factors, Evidence, Solutions).
|
||||
|
||||
%bp([[]],_,_) :- !.
|
||||
%bp([QueryVars], AllVars, Output) :-
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
Interface to Horus Lifted Solvers. Used by:
|
||||
- Lifted Variable Elimination
|
||||
|
||||
- Lifted First-Order Belief Propagation
|
||||
********************************************************/
|
||||
|
||||
:- module(clpbn_horus_lifted,
|
||||
|
@ -81,7 +81,6 @@ ve([LVs],Vs0,AllDiffs) :-
|
||||
init_ve_solver(Qs, Vs0, _, LVis) :-
|
||||
check_for_agg_vars(Vs0, Vs1),
|
||||
% LVi will have a list of CLPBN variables
|
||||
% Tables0 will have the full data on each variable
|
||||
init_influences(Vs1, G, RG),
|
||||
init_ve_solver_for_questions(Qs, G, RG, _, LVis).
|
||||
|
||||
|
56
packages/CLPBN/examples/learning/prof_params.pfl
Normal file
56
packages/CLPBN/examples/learning/prof_params.pfl
Normal file
@ -0,0 +1,56 @@
|
||||
% learn distribution for school database.
|
||||
|
||||
:- use_module(library(pfl)).
|
||||
|
||||
:- use_module(library(clpbn/learning/em)).
|
||||
|
||||
bayes abi(K)::[h,m,l] ; abi_table ; [professor(K)].
|
||||
|
||||
bayes pop(K)::[h,m,l], abi(K) ; pop_table ; [professor(K)].
|
||||
|
||||
abi_table([0.3,0.3,0.4]).
|
||||
|
||||
pop_table([0.3,0.3,0.4,0.3,0.3,0.4,0.3,0.3,0.4]).
|
||||
|
||||
goal_list([/*abi(p0,h),
|
||||
abi(p1,m),
|
||||
abi(p2,m),
|
||||
abi(p3,m),
|
||||
abi(p4,l),*/
|
||||
pop(p5,h),
|
||||
abi(p5,_),
|
||||
abi(p6,_),
|
||||
pop(p7,_)]).
|
||||
|
||||
professor(p1).
|
||||
professor(p2).
|
||||
professor(p3).
|
||||
professor(p4).
|
||||
professor(p5).
|
||||
professor(p6).
|
||||
professor(p7).
|
||||
professor(p8).
|
||||
|
||||
%:- clpbn:set_clpbn_flag(em_solver,gibbs).
|
||||
%:- clpbn:set_clpbn_flag(em_solver,jt).
|
||||
:- clpbn:set_clpbn_flag(em_solver,ve).
|
||||
%:- clpbn:set_clpbn_flag(em_solver,bp).
|
||||
|
||||
timed_main :-
|
||||
statistics(runtime, _),
|
||||
main(Lik),
|
||||
statistics(runtime, [T,_]),
|
||||
format('Took ~d msec and Lik ~3f~n',[T,Lik]).
|
||||
|
||||
main(Lik) :-
|
||||
goal_list(L),
|
||||
% run_queries(L),
|
||||
em(L,0.01,10,_,Lik).
|
||||
|
||||
run_queries([]).
|
||||
run_queries(Q.L) :-
|
||||
call(Q),
|
||||
run_queries(L).
|
||||
|
||||
|
||||
|
File diff suppressed because one or more lines are too long
@ -236,7 +236,7 @@ CountingBp::createClusters (
|
||||
const VarNodes& groupVars = it->second;
|
||||
VarCluster* vc = new VarCluster (groupVars);
|
||||
for (size_t i = 0; i < groupVars.size(); i++) {
|
||||
vid2VarCluster_.insert (make_pair (groupVars[i]->varId(), vc));
|
||||
varClusterMap_.insert (make_pair (groupVars[i]->varId(), vc));
|
||||
}
|
||||
varClusters_.push_back (vc);
|
||||
}
|
||||
@ -250,7 +250,7 @@ CountingBp::createClusters (
|
||||
varClusters.reserve (neighs.size());
|
||||
for (size_t i = 0; i < neighs.size(); i++) {
|
||||
VarId vid = neighs[i]->varId();
|
||||
varClusters.push_back (vid2VarCluster_.find (vid)->second);
|
||||
varClusters.push_back (varClusterMap_.find (vid)->second);
|
||||
}
|
||||
facClusters_.push_back (new FacCluster (it->second, varClusters));
|
||||
}
|
||||
@ -294,8 +294,8 @@ CountingBp::getSignature (const FacNode* facNode)
|
||||
VarId
|
||||
CountingBp::getRepresentative (VarId vid)
|
||||
{
|
||||
assert (Util::contains (vid2VarCluster_, vid));
|
||||
VarCluster* vc = vid2VarCluster_.find (vid)->second;
|
||||
assert (Util::contains (varClusterMap_, vid));
|
||||
VarCluster* vc = varClusterMap_.find (vid)->second;
|
||||
return vc->representative()->varId();
|
||||
}
|
||||
|
||||
|
@ -10,8 +10,6 @@
|
||||
|
||||
class VarCluster;
|
||||
class FacCluster;
|
||||
class VarSignHash;
|
||||
class FacSignHash;
|
||||
class WeightedBp;
|
||||
|
||||
typedef long Color;
|
||||
@ -22,40 +20,44 @@ typedef vector<Color> FacSignature;
|
||||
typedef unordered_map<unsigned, Color> DistColorMap;
|
||||
typedef unordered_map<unsigned, Colors> VarColorMap;
|
||||
|
||||
typedef unordered_map<VarSignature, VarNodes, VarSignHash> VarSignMap;
|
||||
typedef unordered_map<FacSignature, FacNodes, FacSignHash> FacSignMap;
|
||||
typedef unordered_map<VarSignature, VarNodes> VarSignMap;
|
||||
typedef unordered_map<FacSignature, FacNodes> FacSignMap;
|
||||
|
||||
typedef unordered_map<VarId, VarCluster*> VarClusterMap;
|
||||
|
||||
typedef vector<VarCluster*> VarClusters;
|
||||
typedef vector<FacCluster*> FacClusters;
|
||||
|
||||
typedef unordered_map<VarId, VarCluster*> VarId2VarCluster;
|
||||
|
||||
|
||||
struct VarSignHash
|
||||
template <class T>
|
||||
inline size_t hash_combine (size_t seed, const T& v)
|
||||
{
|
||||
size_t operator() (const VarSignature &sig) const
|
||||
{
|
||||
size_t val = hash<size_t>()(sig.size());
|
||||
for (size_t i = 0; i < sig.size(); i++) {
|
||||
val ^= hash<size_t>()(sig[i].first);
|
||||
val ^= hash<size_t>()(sig[i].second);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
};
|
||||
return seed ^ (hash<T>()(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2));
|
||||
}
|
||||
|
||||
|
||||
struct FacSignHash
|
||||
{
|
||||
size_t operator() (const FacSignature &sig) const
|
||||
namespace std {
|
||||
template <typename T1, typename T2> struct hash<std::pair<T1,T2>>
|
||||
{
|
||||
size_t val = hash<size_t>()(sig.size());
|
||||
for (size_t i = 0; i < sig.size(); i++) {
|
||||
val ^= hash<size_t>()(sig[i]);
|
||||
size_t operator() (const std::pair<T1,T2>& p) const
|
||||
{
|
||||
return hash_combine (std::hash<T1>()(p.first), p.second);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
template <typename T> struct hash<std::vector<T>>
|
||||
{
|
||||
size_t operator() (const std::vector<T>& vec) const
|
||||
{
|
||||
size_t h = 0;
|
||||
typename vector<T>::const_iterator first = vec.begin();
|
||||
typename vector<T>::const_iterator last = vec.end();
|
||||
for (; first != last; ++first) {
|
||||
h = hash_combine (h, *first);
|
||||
}
|
||||
return h;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
class VarCluster
|
||||
@ -72,8 +74,8 @@ class VarCluster
|
||||
void setRepresentative (VarNode* vn) { repr_ = vn; }
|
||||
|
||||
private:
|
||||
VarNodes members_;
|
||||
VarNode* repr_;
|
||||
VarNodes members_;
|
||||
VarNode* repr_;
|
||||
};
|
||||
|
||||
|
||||
@ -86,17 +88,17 @@ class FacCluster
|
||||
const FacNode* first (void) const { return members_.front(); }
|
||||
|
||||
const FacNodes& members (void) const { return members_; }
|
||||
|
||||
VarClusters& varClusters (void) { return varClusters_; }
|
||||
|
||||
|
||||
FacNode* representative (void) const { return repr_; }
|
||||
|
||||
void setRepresentative (FacNode* fn) { repr_ = fn; }
|
||||
|
||||
VarClusters& varClusters (void) { return varClusters_; }
|
||||
|
||||
private:
|
||||
FacNodes members_;
|
||||
VarClusters varClusters_;
|
||||
FacNode* repr_;
|
||||
VarClusters varClusters_;
|
||||
};
|
||||
|
||||
|
||||
@ -171,9 +173,9 @@ class CountingBp : public Solver
|
||||
Colors facColors_;
|
||||
VarClusters varClusters_;
|
||||
FacClusters facClusters_;
|
||||
VarId2VarCluster vid2VarCluster_;
|
||||
VarClusterMap varClusterMap_;
|
||||
const FactorGraph* compressedFg_;
|
||||
WeightedBp* solver_;
|
||||
WeightedBp* solver_;
|
||||
};
|
||||
|
||||
#endif // HORUS_COUNTINGBP_H
|
||||
|
@ -184,26 +184,43 @@ ElimGraph::getLowestCostNode (void) const
|
||||
{
|
||||
EgNode* bestNode = 0;
|
||||
unsigned minCost = std::numeric_limits<unsigned>::max();
|
||||
unsigned cost = 0;
|
||||
EGNeighs::const_iterator it;
|
||||
switch (elimHeuristic) {
|
||||
case MIN_NEIGHBORS: {
|
||||
for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) {
|
||||
cost = getNeighborsCost (*it);
|
||||
unsigned cost = getNeighborsCost (*it);
|
||||
if (cost < minCost) {
|
||||
bestNode = *it;
|
||||
minCost = cost;
|
||||
}
|
||||
}}
|
||||
break;
|
||||
case MIN_WEIGHT:
|
||||
//cost = getWeightCost (unmarked_[i]);
|
||||
case MIN_WEIGHT: {
|
||||
for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) {
|
||||
unsigned cost = getWeightCost (*it);
|
||||
if (cost < minCost) {
|
||||
bestNode = *it;
|
||||
minCost = cost;
|
||||
}
|
||||
}}
|
||||
break;
|
||||
case MIN_FILL:
|
||||
//cost = getFillCost (unmarked_[i]);
|
||||
case MIN_FILL: {
|
||||
for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) {
|
||||
unsigned cost = getFillCost (*it);
|
||||
if (cost < minCost) {
|
||||
bestNode = *it;
|
||||
minCost = cost;
|
||||
}
|
||||
}}
|
||||
break;
|
||||
case WEIGHTED_MIN_FILL:
|
||||
//cost = getWeightedFillCost (unmarked_[i]);
|
||||
case WEIGHTED_MIN_FILL: {
|
||||
for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) {
|
||||
unsigned cost = getWeightedFillCost (*it);
|
||||
if (cost < minCost) {
|
||||
bestNode = *it;
|
||||
minCost = cost;
|
||||
}
|
||||
}}
|
||||
break;
|
||||
default:
|
||||
assert (false);
|
||||
|
@ -130,9 +130,9 @@ class ElimGraph
|
||||
|
||||
void connectAllNeighbors (const EgNode*);
|
||||
|
||||
vector<EgNode*> nodes_;
|
||||
TinySet<EgNode*> unmarked_;
|
||||
unordered_map<VarId, EgNode*> varMap_;
|
||||
vector<EgNode*> nodes_;
|
||||
TinySet<EgNode*> unmarked_;
|
||||
unordered_map<VarId, EgNode*> varMap_;
|
||||
};
|
||||
|
||||
#endif // HORUS_ELIMGRAPH_H
|
||||
|
@ -1,5 +1,3 @@
|
||||
- Find a way to decrease the time required to find an
|
||||
elimination order for variable elimination
|
||||
- Consider using hashs instead of vectors of colors to calculate the groups in
|
||||
counting bp
|
||||
|
||||
|
@ -97,7 +97,7 @@ init_em(Items, state( AllDists, AllDistInstances, MargVars, SolverVars)) :-
|
||||
em_loop(Its, Likelihood0, State, MaxError, MaxIts, LikelihoodF, FTables) :-
|
||||
estimate(State, LPs),
|
||||
maximise(State, Tables, LPs, Likelihood),
|
||||
% writeln(Likelihood:Its:Likelihood0:Tables),
|
||||
writeln(iteration:Its:Likelihood:Its:Likelihood0:Tables),
|
||||
(
|
||||
(
|
||||
abs((Likelihood - Likelihood0)/Likelihood) < MaxError
|
||||
@ -166,6 +166,14 @@ find_variables([K|PKeys], AllVars0, [Parent|Parents]) :-
|
||||
find_variable(K, AllVars0, Parent),
|
||||
find_variables(PKeys, AllVars0, Parents).
|
||||
|
||||
%
|
||||
% in clp(bn) the whole network is constructed when you evaluate EM. In
|
||||
% pfl, we want to delay execution until as late as possible.
|
||||
% we just create a new variable and hope for the best.
|
||||
%
|
||||
%
|
||||
find_variable(K, [], Parent) :-
|
||||
clpbn:put_atts(Parent, [key(K)]).
|
||||
find_variable(K, [Parent|_AllVars0], Parent) :-
|
||||
clpbn:get_atts(Parent, [key(K0)]), K0 =@= K, !.
|
||||
find_variable(K, [_|AllVars0], Parent) :-
|
||||
|
Reference in New Issue
Block a user