diff --git a/H/Regs.h b/H/Regs.h index b5bff80e7..58b9d76c4 100644 --- a/H/Regs.h +++ b/H/Regs.h @@ -108,7 +108,7 @@ typedef struct regstore_t yamop *CP_; /* 28 continuation program counter */ CELL *ENV_; /* 1 current environment */ #ifdef CUT_C - cut_c_str_ptr CUT_C_TOP; + struct cut_c_str *CUT_C_TOP; #endif #if defined CUT_C && (defined MYDDAS_ODBC || defined MYDDAS_MYSQL) MYDDAS_GLOBAL MYDDAS_GLOBAL_POINTER; diff --git a/Makefile.in b/Makefile.in index 0a4a8a523..d999ed46b 100755 --- a/Makefile.in +++ b/Makefile.in @@ -50,7 +50,7 @@ DOCSDIR=$(SHAREDIR)/doc/Yap # # check also optimisation options in INSTALL file. # -YAP_EXTRAS=-DCUT_C=1 @YAP_EXTRAS@ +YAP_EXTRAS=@YAP_EXTRAS@ YAPSTARTUP=startup.yss diff --git a/configure b/configure index d9e036b7c..509f2e2a8 100755 --- a/configure +++ b/configure @@ -1,9 +1,11 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69. +# Generated by GNU Autoconf 2.68. # # -# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. +# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, +# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software +# Foundation, Inc. # # # This configure script is free software; the Free Software Foundation @@ -132,31 +134,6 @@ export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH -# Use a proper internal environment variable to ensure we don't fall - # into an infinite loop, continuously re-executing ourselves. - if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then - _as_can_reexec=no; export _as_can_reexec; - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -as_fn_exit 255 - fi - # We don't want this to propagate to other subprocesses. - { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh @@ -190,8 +167,7 @@ if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi -test x\$exitcode = x0 || exit 1 -test -x / || exit 1" +test x\$exitcode = x0 || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && @@ -236,25 +212,21 @@ IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : - export CONFIG_SHELL - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 + # We cannot yet assume a decent shell, so we have to provide a + # neutralization value for shells without unset; and this also + # works around shells that cannot unset nonexistent variables. + # Preserve -v and -x to the replacement shell. + BASH_ENV=/dev/null + ENV=/dev/null + (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV + export CONFIG_SHELL + case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; + esac + exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"} fi if test x$as_have_required = xno; then : @@ -356,14 +328,6 @@ $as_echo X"$as_dir" | } # as_fn_mkdir_p - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take @@ -485,10 +449,6 @@ as_cr_alnum=$as_cr_Letters$as_cr_digits chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - # If we had to re-execute with $CONFIG_SHELL, we're ensured to have - # already done that, so ensure we don't try to do so again and fall - # in an infinite loop. This has already happened in practice. - _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). @@ -523,16 +483,16 @@ if (echo >conf$$.file) 2>/dev/null; then # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. + # In both cases, we have to default to `cp -p'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' + as_ln_s='cp -p' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -pR' + as_ln_s='cp -p' fi else - as_ln_s='cp -pR' + as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -544,8 +504,28 @@ else as_mkdir_p=false fi -as_test_x='test -x' -as_executable_p=as_fn_executable_p +if test -x / >/dev/null 2>&1; then + as_test_x='test -x' +else + if ls -dL / >/dev/null 2>&1; then + as_ls_L_option=L + else + as_ls_L_option= + fi + as_test_x=' + eval sh -c '\'' + if test -d "$1"; then + test -d "$1/."; + else + case $1 in #( + -*)set "./$1";; + esac; + case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( + ???[sx]*):;;*)false;;esac;fi + '\'' sh + ' +fi +as_executable_p=$as_test_x # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -1335,6 +1315,8 @@ target=$target_alias if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe + $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. + If a cross compiler is detected then cross compile mode will be used" >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi @@ -1625,9 +1607,9 @@ test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF configure -generated by GNU Autoconf 2.69 +generated by GNU Autoconf 2.68 -Copyright (C) 2012 Free Software Foundation, Inc. +Copyright (C) 2010 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF @@ -1938,7 +1920,7 @@ $as_echo "$ac_try_echo"; } >&5 test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || - test -x conftest$ac_exeext + $as_test_x conftest$ac_exeext }; then : ac_retval=0 else @@ -2041,8 +2023,7 @@ int main () { static int test_array [1 - 2 * !(($2) >= 0)]; -test_array [0] = 0; -return test_array [0]; +test_array [0] = 0 ; return 0; @@ -2058,8 +2039,7 @@ int main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; -test_array [0] = 0; -return test_array [0]; +test_array [0] = 0 ; return 0; @@ -2085,8 +2065,7 @@ int main () { static int test_array [1 - 2 * !(($2) < 0)]; -test_array [0] = 0; -return test_array [0]; +test_array [0] = 0 ; return 0; @@ -2102,8 +2081,7 @@ int main () { static int test_array [1 - 2 * !(($2) >= $ac_mid)]; -test_array [0] = 0; -return test_array [0]; +test_array [0] = 0 ; return 0; @@ -2137,8 +2115,7 @@ int main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; -test_array [0] = 0; -return test_array [0]; +test_array [0] = 0 ; return 0; @@ -2322,7 +2299,7 @@ This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by $as_me, which was -generated by GNU Autoconf 2.69. Invocation command line was +generated by GNU Autoconf 2.68. Invocation command line was $ $0 $@ @@ -2698,7 +2675,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2738,7 +2715,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2791,7 +2768,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2832,7 +2809,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue @@ -2890,7 +2867,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2934,7 +2911,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3380,7 +3357,8 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include -struct stat; +#include +#include /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); @@ -3493,7 +3471,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3537,7 +3515,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3740,7 +3718,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4090,7 +4068,7 @@ do for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - as_fn_executable_p "$ac_path_GREP" || continue + { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in @@ -4156,7 +4134,7 @@ do for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - as_fn_executable_p "$ac_path_EGREP" || continue + { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in @@ -5278,7 +5256,7 @@ case $as_dir/ in #(( # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then + if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. @@ -5351,7 +5329,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5391,7 +5369,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5443,7 +5421,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_INDENT="${ac_tool_prefix}indent" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5483,7 +5461,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_INDENT="indent" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5535,7 +5513,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_AR="${ac_tool_prefix}ar" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5575,7 +5553,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_AR="ar" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5627,7 +5605,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_MPI_CC="${ac_tool_prefix}mpicc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5667,7 +5645,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_MPI_CC="mpicc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5721,7 +5699,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_path_INSTALL_INFO="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5762,7 +5740,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_path_SHELL="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6752,7 +6730,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_REXE="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6873,7 +6851,7 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_PYTHON="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -8459,7 +8437,7 @@ fi if test "$coroutining" = "yes" then - YAP_EXTRAS="$YAP_EXTRAS -DCOROUTINING=1" + YAP_EXTRAS="$YAP_EXTRAS -DCOROUTINING=1 -DCUT_C=1" fi if test "$rationaltrees" = "yes" @@ -11854,16 +11832,16 @@ if (echo >conf$$.file) 2>/dev/null; then # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. + # In both cases, we have to default to `cp -p'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' + as_ln_s='cp -p' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -pR' + as_ln_s='cp -p' fi else - as_ln_s='cp -pR' + as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -11923,16 +11901,28 @@ else as_mkdir_p=false fi - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p +if test -x / >/dev/null 2>&1; then + as_test_x='test -x' +else + if ls -dL / >/dev/null 2>&1; then + as_ls_L_option=L + else + as_ls_L_option= + fi + as_test_x=' + eval sh -c '\'' + if test -d "$1"; then + test -d "$1/."; + else + case $1 in #( + -*)set "./$1";; + esac; + case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( + ???[sx]*):;;*)false;;esac;fi + '\'' sh + ' +fi +as_executable_p=$as_test_x # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -11954,7 +11944,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # values after options handling. ac_log=" This file was extended by $as_me, which was -generated by GNU Autoconf 2.69. Invocation command line was +generated by GNU Autoconf 2.68. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS @@ -12016,10 +12006,10 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ config.status -configured by $0, generated by GNU Autoconf 2.69, +configured by $0, generated by GNU Autoconf 2.68, with options \\"\$ac_cs_config\\" -Copyright (C) 2012 Free Software Foundation, Inc. +Copyright (C) 2010 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." @@ -12109,7 +12099,7 @@ fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then - set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' diff --git a/configure.in b/configure.in index e7ecdf500..db505b1ef 100755 --- a/configure.in +++ b/configure.in @@ -1487,7 +1487,7 @@ fi if test "$coroutining" = "yes" then - YAP_EXTRAS="$YAP_EXTRAS -DCOROUTINING=1" + YAP_EXTRAS="$YAP_EXTRAS -DCOROUTINING=1 -DCUT_C=1" fi if test "$rationaltrees" = "yes" diff --git a/library/dialect/bprolog.yap b/library/dialect/bprolog.yap index c2cdcc067..d94ce2509 100644 --- a/library/dialect/bprolog.yap +++ b/library/dialect/bprolog.yap @@ -81,10 +81,10 @@ getclauses1(File, Prog, _Opts) :- '$bpe_process_pred'([], _F, N, Mode, _Delay, _Tabled, []) --> { '$init_mode'(N, Mode) }. -'$bpe_process_pred'(Call.Prog0, F,N,Modes,Delay,Tabled, Cls0) --> +'$bpe_process_pred'([Call|Prog0], F,N, Modes, Delay, Tabled, Cls0) --> { '$get_pred'(Call, F, N, Modes, Delay, Tabled, Cls0, ClsI) }, !, - '$bpe_process_pred'(Prog0, F,N,Modes,Delay,Tabled, ClsI). -'$bpe_process_pred'(Call.Prog0, F,N,Modes,Delay,Tabled, Cls0) --> + '$bpe_process_pred'(Prog0, F, N, Modes, Delay, Tabled, ClsI). +'$bpe_process_pred'([Call|Prog0], F, N, Modes, Delay, Tabled, Cls0) --> [ Call ], '$bpe_process_pred'(Prog0, F,N,Modes,Delay,Tabled, Cls0). @@ -97,21 +97,22 @@ getclauses1(File, Prog, _Opts) :- '$get_pred'((P :- Q), F, N, _Modes, _Delay, _Tabled) --> { functor(P, F, N), ! }, [(P:-Q)]. -'$get_pred'((:- mode Q), F, N, _Modes, _Delay, _Tabled) --> +'$get_pred'((:- mode Q), F, N, Modes, _Delay, _Tabled) --> { functor(Q, F, N), !, Q =.. [_|Modes0], - '$bpe_cvt_modes'(Modes0,Modes,[]) + '$bpe_cvt_modes'(Modes0, Modes, []) }, []. %'$get_pred'((:- table _), F, N, Modes, Delay, Tabled) --> % { functor(Q, F, N), !, Q =.. [_|Modes] }, % []. -'$get_pred'((:- _), _F, _N, _Modes, _Delay, _Tabled) --> !, { fail }. +'$get_pred'((:- Q), '$damon_load', 0, _Modes, _Delay, _Tabled) --> + [ ('$damon_load' :- '$query'( Q ) )]. '$get_pred'((P), F, N, _Modes, _Delay, _Tabled) --> { functor(P, F, N), ! }, [(P)]. -'$bpe_cvt_modes'(Mode.Modes0) --> [NewMode], +'$bpe_cvt_modes'([Mode|Modes0]) --> [NewMode], { '$bpe_cvt_mode'(Mode, NewMode) }, '$bpe_cvt_modes'(Modes0). '$bpe_cvt_modes'([]) --> []. @@ -127,20 +128,40 @@ preprocess_cl(Cl, Cl, _, _, _, _). phase_1_process(Prog, Prog). -compileProgToFile(_,_File,[]). -compileProgToFile(_,File,pred(F,N,_,_,Tabled,Clauses).Prog2) :- +compileProgToFile(_, _File, []). +compileProgToFile(_, File, [Pred|Prog2]) :- + consult_pred(Pred), + compileProgToFile(_, File, Prog2). + +consult_preds([], L) :- !, + consult_preds(L). +consult_preds(L0, L) :- + writeln(consult_preds(L0,L)). + +consult_preds([]). +consult_preds([P|L]) :- + consult_pred(P), + consult_preds(L). + +consult_pred(pred(F,N,_Mode,_Delay,Tabled,Clauses)) :- (nonvar(Tabled) -> table(F/N) ; true), functor(S,F,N), assert(b_IS_CONSULTED_c(S)), - '$assert_clauses'(Clauses), - compileProgToFile(_,File,Prog2). + abolish(F/N), + '$assert_clauses'(Clauses). + +add_pred(Name, Arity, _Mode, _Delay, Tabled, Clauses) :- + '$assert_clauses'(Clauses). '$assert_clauses'([]). -'$assert_clauses'(Cl.Clauses) :- +'$assert_clauses'([Cl|Clauses]) :- assert_static(Cl), '$assert_clauses'(Clauses). -'$myload'(_F). +'$myload'(_F) :- + '$damon_load'. + +'$query'(G) :- call(G). initialize_table :- abolish_all_tables. diff --git a/packages/CLPBN/Makefile.in b/packages/CLPBN/Makefile.in index 602565138..53f45c1b6 100644 --- a/packages/CLPBN/Makefile.in +++ b/packages/CLPBN/Makefile.in @@ -28,9 +28,9 @@ INSTALL=@INSTALL@ INSTALL_DATA=@INSTALL_DATA@ INSTALL_PROGRAM=@INSTALL_PROGRAM@ srcdir=@srcdir@ +PDFLATEX=pdflatex -CLPBN_TOP= $(srcdir)/clpbn.yap \ - $(srcdir)/pfl.yap +PFL_MANUAL = $(srcdir)/pfl CLPBN_SRCDIR = $(srcdir)/clpbn @@ -38,6 +38,10 @@ CLPBN_LEARNING_SRCDIR = $(srcdir)/learning CLPBN_EXDIR = $(srcdir)/examples +CLPBN_TOP= \ + $(srcdir)/clpbn.yap \ + $(srcdir)/pfl.yap + CLPBN_PROGRAMS= \ $(CLPBN_SRCDIR)/aggregates.yap \ $(CLPBN_SRCDIR)/bdd.yap \ @@ -74,12 +78,24 @@ CLPBN_LEARNING_PROGRAMS= \ $(CLPBN_LEARNING_SRCDIR)/learn_utils.yap \ $(CLPBN_LEARNING_SRCDIR)/mle.yap +CLPBN_EXAMPLES= \ + $(CLPBN_EXDIR)/burglary-alarm.fg \ + $(CLPBN_EXDIR)/burglary-alarm.pfl \ + $(CLPBN_EXDIR)/burglary-alarm.uai \ + $(CLPBN_EXDIR)/cg.yap \ + $(CLPBN_EXDIR)/city.pfl \ + $(CLPBN_EXDIR)/comp_workshops.pfl \ + $(CLPBN_EXDIR)/social_network1.pfl \ + $(CLPBN_EXDIR)/social_network2.pfl \ + $(CLPBN_EXDIR)/sprinkler.pfl \ + $(CLPBN_EXDIR)/workshop_attrs.pfl + CLPBN_SCHOOL_EXAMPLES= \ $(CLPBN_EXDIR)/School/README \ $(CLPBN_EXDIR)/School/evidence_128.yap \ $(CLPBN_EXDIR)/School/parschema.pfl \ $(CLPBN_EXDIR)/School/school_128.yap \ - $(CLPBN_EXDIR)/School/school32.yap \ + $(CLPBN_EXDIR)/School/school_32.yap \ $(CLPBN_EXDIR)/School/school_64.yap \ $(CLPBN_EXDIR)/School/tables.yap @@ -99,20 +115,8 @@ CLPBN_LEARNING_EXAMPLES= \ $(CLPBN_EXDIR)/learning/sprinkler_params.yap \ $(CLPBN_EXDIR)/learning/train.yap -CLPBN_EXAMPLES= \ - $(CLPBN_EXDIR)/burglary-alarm.fg \ - $(CLPBN_EXDIR)/burglary-alarm.pfl \ - $(CLPBN_EXDIR)/burglary-alarm.uai \ - $(CLPBN_EXDIR)/cg.yap \ - $(CLPBN_EXDIR)/city.pfl \ - $(CLPBN_EXDIR)/comp_workshops.pfl \ - $(CLPBN_EXDIR)/social_domain1.pfl \ - $(CLPBN_EXDIR)/social_domain2.pfl \ - $(CLPBN_EXDIR)/sprinkler.pfl \ - $(CLPBN_EXDIR)/workshop_attrs.pfl - -install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_PROGRAMS) +install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_LEARNING_PROGRAMS) $(CLPBN_SCHOOL_EXAMPLES) $(CLPBN_HMMER_EXAMPLES) $(CLPBN_LEARNING_EXAMPLES) mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn mkdir -p $(DESTDIR)$(SHAREDIR)/clpbn/learning mkdir -p $(DESTDIR)$(EXDIR) @@ -127,3 +131,13 @@ install: $(CLBN_TOP) $(CLBN_PROGRAMS) $(CLPBN_PROGRAMS) for h in $(CLPBN_HMMER_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(EXDIR)/HMMer; done for h in $(CLPBN_LEARNING_EXAMPLES); do $(INSTALL_DATA) $$h $(DESTDIR)$(EXDIR)/learning; done + +docs: $(MANUAL) + $(PDFLATEX) $(PFL_MANUAL) + $(PDFLATEX) $(PFL_MANUAL) + rm pfl.aux pfl.bbl pfl.blg pfl.log pfl.out + + +install_docs: docs + $(INSTALL_DATA) pfl.pdf $(DESTDIR)$(EXDIR) + diff --git a/packages/CLPBN/README.txt b/packages/CLPBN/README.txt deleted file mode 100644 index 76231b268..000000000 --- a/packages/CLPBN/README.txt +++ /dev/null @@ -1,111 +0,0 @@ -Prolog Factor Language (PFL) - -Prolog Factor Language (PFL) is a extension of the Prolog language that -allows a natural representation of this first-order probabilistic models -(either directed or undirected). PFL is also capable of solving probabilistic -queries on this models through the implementation of several inference -techniques: variable elimination, belief propagation, lifted variable -elimination and lifted belief propagation. - -Language -------------------------------------------------------------------------------- -A graphical model in PFL is represented using parfactors. A PFL parfactor -has the following four components: - -Type ; Formulas ; Phi ; Constraint . - -- Type refers the type of the network over which the parfactor is defined. -It can be bayes for directed networks, or markov for undirected ones. -- Formulas is a sequence of Prolog terms that define sets of random variables -under the constraint. -- Phi is either a list of parameters or a call to a Prolog goal that will -unify its last argument with a list of parameters. -- Constraint is a list (possible empty) of Prolog goals that will impose -bindings on the logical variables that appear in the formulas. - -The "examples" directory contains some popular graphical models described -using PFL. - -Querying -------------------------------------------------------------------------------- -Now we show how to use PFL to solve probabilistic queries. We will -use the burlgary alarm network as an example. First, we load the model: - -$ yap -l examples/burglary-alarm.yap - -Now let's suppose that we want to estimate the probability of a earthquake -ocurred given that mary called. We can do it with the following query: - -?- earthquake(X), mary_calls(t). - -Suppose now that we want the joint distribution for john_calls and -mary_calls. We can obtain this with the following query: - -?- john_calls(X), mary_calls(Y). - - -Inference Options -------------------------------------------------------------------------------- -PFL supports both ground and lifted inference. The inference algorithm -can be chosen using the set_solver/1 predicate. The following algorithms -are supported: -- lve: generalized counting first-order variable elimination (GC-FOVE) -- hve: (ground) variable elimination -- lbp: lifted first-order belief propagation -- cbp: counting belief propagation -- bp: (ground) belief propagation -- lkc: lifted first-order knowledge compilation - -For example, if we want to use ground variable elimination to solve some -query, we need to call first the following goal: - -?- set_solver(hve). - -It is possible to tweak several parameters of PFL through the -set_horus_flag/2 predicate. The first argument is a key that -identifies the parameter that we desire to tweak, while the second -is some possible value for this key. - -The verbosity key controls the level of log information that will be -printed by the corresponding solver. Its possible values are positive -integers. The bigger the number, more log information will be printed. -For example, to view some basic log information we need to call the -following goal: - -?- set_horus_flag(verbosity, 1). - -The use_logarithms key controls whether the calculations performed -during inference should be done in the log domain or not. Its values -can be true or false. By default is false. - -There are also keys specific to the inference algorithm. For example, -elim_heuristic key controls the elimination heuristic that will be -used by ground variable elimination. The following heuristics are -supported: -- sequential -- min_neighbors -- min_weight -- min_fill -- weighted_min_fill - -An explanation of this heuristics can be found in Probabilistic Graphical -Models by Daphne Koller. - -The schedule, accuracy and max_iter keys are specific for inference -algorithms based on message passing, namely lbp, cbp and bp. -The key schedule can be used to specify the order in which the messages -are sent in belief propagation. The possible values are: -- seq_fixed: at each iteration, all messages are sent in the same order -- seq_random: at each iteration, the messages are sent with a random order -- parallel: at each iteration, the messages are all calculated using the -values of the previous iteration. -- max_residual: the next message to be sent is the one with maximum residual, -(Residual Belief Propagation:Informed Scheduling for Asynchronous Message -Passing) - -The max_iter key sets the maximum number of iterations. One iteration -consists in sending all possible messages. The accuracy key indicate -when we should stop sending messages. If the largest difference between -a message sent in the current iteration and one message sent in the previous -iteration is less that accuracy value given, we terminate belief propagation. - diff --git a/packages/CLPBN/clpbn.yap b/packages/CLPBN/clpbn.yap index e7b8350ee..8e994330a 100644 --- a/packages/CLPBN/clpbn.yap +++ b/packages/CLPBN/clpbn.yap @@ -1,210 +1,242 @@ -:- module(clpbn, [{}/1, - clpbn_flag/2, - set_clpbn_flag/2, - clpbn_flag/3, - clpbn_key/2, - clpbn_init_solver/4, - clpbn_run_solver/3, - pfl_init_solver/6, - pfl_run_solver/4, - clpbn_finalize_solver/1, - clpbn_init_solver/5, - clpbn_run_solver/4, - clpbn_init_graph/1, - probability/2, - conditional_probability/3, - use_parfactors/1, - op( 500, xfy, with)]). +:- module(clpbn, + [{}/1, + clpbn_flag/2, + set_clpbn_flag/2, + set_solver/1, + set_em_solver/1, + clpbn_flag/3, + clpbn_key/2, + clpbn_init_graph/1, + clpbn_init_solver/4, + clpbn_run_solver/3, + pfl_init_solver/5, + pfl_run_solver/3, + pfl_end_solver/1, + probability/2, + conditional_probability/3, + use_parfactors/1, + op(500, xfy, with) + ]). :- use_module(library(atts)). + :- use_module(library(bhash)). + :- use_module(library(lists)). + :- use_module(library(terms)). + :- use_module(library(maplist)). +:- attribute key/1, dist/2, evidence/1. + +:- use_module('clpbn/ve', + [ve/3, + check_if_ve_done/1, + init_ve_solver/4, + run_ve_solver/3, + init_ve_ground_solver/5, + run_ve_ground_solver/3, + call_ve_ground_solver/6 + ]). + +:- use_module('clpbn/jt', + [jt/3, + init_jt_solver/4, + run_jt_solver/3 + ]). + +:- use_module('clpbn/bdd', + [bdd/3, + init_bdd_solver/4, + run_bdd_solver/3, + init_bdd_ground_solver/5, + run_bdd_ground_solver/3, + call_bdd_ground_solver/6 + ]). + +:- use_module('clpbn/gibbs', + [gibbs/3, + check_if_gibbs_done/1, + init_gibbs_solver/4, + run_gibbs_solver/3 + ]). + +:- use_module('clpbn/pgrammar', + [pcg_init_graph/0, + init_pcg_solver/4, + run_pcg_solver/3 + ]). + +:- use_module('clpbn/horus_ground', + [call_horus_ground_solver/6, + check_if_horus_ground_solver_done/1, + init_horus_ground_solver/5, + run_horus_ground_solver/3, + end_horus_ground_solver/1 + ]). + +:- use_module('clpbn/horus_lifted', + [call_horus_lifted_solver/3, + check_if_horus_lifted_solver_done/1, + init_horus_lifted_solver/4, + run_horus_lifted_solver/3, + end_horus_lifted_solver/1 + ]). + +%% :- use_module('clpbn/bnt', +%% [do_bnt/3, +%% check_if_bnt_done/1 +%% ]). + +:- use_module('clpbn/dists', + [dist/4, + get_dist/4, + get_evidence_position/3, + get_evidence_from_position/3, + additive_dists/6 + ]). + +:- use_module('clpbn/evidence', + [store_evidence/1, + add_stored_evidence/2, + incorporate_evidence/2, + check_stored_evidence/2, + put_evidence/2 + ]). + +:- use_module('clpbn/ground_factors', + [generate_network/5]). + +:- use_module('clpbn/utils', + [sort_vars_by_key/3]). + +:- use_module('clpbn/graphs', + [clpbn2graph/1]). + +:- use_module('clpbn/graphviz', + [clpbn2gviz/4]). + % % avoid the overhead of using goal_expansion/2. % -:- multifile - user:term_expansion/2. +:- multifile user:term_expansion/2. + +:- dynamic user:term_expansion/2. :- dynamic - user:term_expansion/2. - -:- attribute key/1, dist/2, evidence/1. - - -:- use_module('clpbn/ve', - [ve/3, - check_if_ve_done/1, - init_ve_solver/4, - run_ve_solver/3, - init_ve_ground_solver/5, - run_ve_ground_solver/3, - call_ve_ground_solver/6 - ]). - -:- use_module('clpbn/horus_ground', - [call_horus_ground_solver/6, - check_if_horus_ground_solver_done/1, - init_horus_ground_solver/5, - run_horus_ground_solver/4, - finalize_horus_ground_solver/1 - ]). - -:- use_module('clpbn/horus_lifted', - [call_horus_lifted_solver/3, - check_if_horus_lifted_solver_done/1, - init_horus_lifted_solver/4, - run_horus_lifted_solver/3, - finalize_horus_lifted_solver/1 - ]). - -:- use_module('clpbn/jt', - [jt/3, - init_jt_solver/4, - run_jt_solver/3 - ]). - -:- use_module('clpbn/bdd', - [bdd/3, - init_bdd_solver/4, - run_bdd_solver/3, - init_bdd_ground_solver/5, - run_bdd_ground_solver/3, - call_bdd_ground_solver/6 - ]). - -%% :- use_module('clpbn/bnt', -%% [do_bnt/3, -%% check_if_bnt_done/1 -%% ]). - -:- use_module('clpbn/gibbs', - [gibbs/3, - check_if_gibbs_done/1, - init_gibbs_solver/4, - run_gibbs_solver/3 - ]). - -:- use_module('clpbn/pgrammar', - [init_pcg_solver/4, - run_pcg_solver/3, - pcg_init_graph/0 - ]). - -:- use_module('clpbn/graphs', - [ - clpbn2graph/1 - ]). - -:- use_module('clpbn/dists', - [ - dist/4, - get_dist/4, - get_evidence_position/3, - get_evidence_from_position/3, - additive_dists/6 - ]). - -:- use_module('clpbn/evidence', - [ - store_evidence/1, - add_stored_evidence/2, - incorporate_evidence/2, - check_stored_evidence/2, - put_evidence/2 - ]). - -:- use_module('clpbn/utils', - [ - sort_vars_by_key/3 - ]). - -:- use_module('clpbn/graphviz', - [clpbn2gviz/4]). - -:- use_module(clpbn/ground_factors, - [generate_network/5]). - - -:- dynamic solver/1,output/1,use/1,suppress_attribute_display/1, parameter_softening/1, em_solver/1, use_parfactors/1. - -solver(ve). -em_solver(bp). + solver/1, + em_solver/1, + suppress_attribute_display/1, + parameter_softening/1, + use_parfactors/1, + output/1, + use/1. :- meta_predicate probability(:,-), conditional_probability(:,:,-). -%output(xbif(user_error)). -%output(gviz(user_error)). -output(no). + +solver(hve). +em_solver(hve). suppress_attribute_display(false). parameter_softening(m_estimate(10)). use_parfactors(off). +output(no). +%output(xbif(user_error)). +%output(gviz(user_error)). -clpbn_flag(Flag,Option) :- +ground_solver(ve). +ground_solver(hve). +ground_solver(jt). +ground_solver(bdd). +ground_solver(bp). +ground_solver(cbp). +ground_solver(gibbs). + +lifted_solver(lve). +lifted_solver(lkc). +lifted_solver(lbp). + + +clpbn_flag(Flag, Option) :- clpbn_flag(Flag, Option, Option). set_clpbn_flag(Flag,Option) :- clpbn_flag(Flag, _, Option). -clpbn_flag(output,Before,After) :- - retract(output(Before)), - assert(output(After)). clpbn_flag(solver,Before,After) :- retract(solver(Before)), assert(solver(After)). + clpbn_flag(em_solver,Before,After) :- retract(em_solver(Before)), assert(em_solver(After)). + clpbn_flag(bnt_solver,Before,After) :- retract(bnt:bnt_solver(Before)), assert(bnt:bnt_solver(After)). + clpbn_flag(bnt_path,Before,After) :- retract(bnt:bnt_path(Before)), assert(bnt:bnt_path(After)). + clpbn_flag(bnt_model,Before,After) :- retract(bnt:bnt_model(Before)), assert(bnt:bnt_model(After)). + clpbn_flag(suppress_attribute_display,Before,After) :- retract(suppress_attribute_display(Before)), assert(suppress_attribute_display(After)). + clpbn_flag(parameter_softening,Before,After) :- retract(parameter_softening(Before)), assert(parameter_softening(After)). + clpbn_flag(use_factors,Before,After) :- retract(use_parfactors(Before)), assert(use_parfactors(After)). +clpbn_flag(output,Before,After) :- + retract(output(Before)), + assert(output(After)). + +set_solver(Solver) :- + set_clpbn_flag(solver,Solver). + +set_em_solver(Solver) :- + set_clpbn_flag(em_solver,Solver). + {_} :- solver(none), !. -{Var = Key with Dist} :- +{ Var = Key with Dist } :- put_atts(El,[key(Key),dist(DistInfo,Parents)]), dist(Dist, DistInfo, Key, Parents), add_evidence(Var,Key,DistInfo,El) % ,writeln({Var = Key with Dist}) -. + . % % make sure a query variable is reachable by the garbage collector. % % we use a mutable variable to avoid unnecessary trailing. % -store_var(El) :- - nb_current(clpbn_qvars, Mutable), +store_var(El) :- + nb_current(clpbn_qvars, Mutable), nonvar(Mutable), !, get_mutable(Tail, Mutable), update_mutable(El.Tail, Mutable). -store_var(El) :- - init_clpbn_vars(El). - +store_var(El) :- + init_clpbn_vars(El). + init_clpbn_vars(El) :- create_mutable(El, Mutable), b_setval(clpbn_qvars, Mutable). -check_constraint(Constraint, _, _, Constraint) :- var(Constraint), !. -check_constraint((A->D), _, _, (A->D)) :- var(A), !. +check_constraint(Constraint, _, _, Constraint) :- + var(Constraint), !. +check_constraint((A->D), _, _, (A->D)) :- + var(A), !. check_constraint((([A|B].L)->D), Vars, NVars, (([A|B].NL)->D)) :- !, check_cpt_input_vars(L, Vars, NVars, NL). check_constraint(Dist, _, _, Dist). @@ -240,17 +272,19 @@ clpbn_marginalise(V, Dist) :- % project_attributes(GVars0, _AVars0) :- use_parfactors(on), - clpbn_flag(solver, Solver), Solver \= fove, !, + clpbn_flag(solver, Solver), + ground_solver(Solver), generate_network(GVars0, GKeys, Keys, Factors, Evidence), b_setval(clpbn_query_variables, f(GVars0,Evidence)), simplify_query(GVars0, GVars), - ( GKeys = [] - -> + ( + GKeys = [] + -> GVars0 = [V|_], clpbn_display:put_atts(V, [posterior([],[],[],[])]) ; call_ground_solver(Solver, GVars, GKeys, Keys, Factors, Evidence) - ). + ). project_attributes(GVars, AVars) :- suppress_attribute_display(false), AVars = [_|_], @@ -264,11 +298,11 @@ project_attributes(GVars, AVars) :- (output(xbif(XBifStream)) -> clpbn2xbif(XBifStream,ve,AllVars) ; true), (output(gviz(XBifStream)) -> clpbn2gviz(XBifStream,sort,AllVars,GVars) ; true), ( - Solver = graphs + Solver = graphs -> - write_out(Solver, [[]], AllVars, DiffVars) + write_out(Solver, [[]], AllVars, DiffVars) ; - write_out(Solver, [CLPBNGVars], AllVars, DiffVars) + write_out(Solver, [CLPBNGVars], AllVars, DiffVars) ). project_attributes(_, _). @@ -322,37 +356,29 @@ get_rid_of_ev_vars([V|LVs0],[V|LVs]) :- get_rid_of_ev_vars(LVs0,LVs). -% do nothing if we don't have query variables to compute. -write_out(_, [], _, _) :- !. -write_out(graphs, _, AVars, _) :- - clpbn2graph(AVars). -write_out(ve, GVars, AVars, DiffVars) :- - ve(GVars, AVars, DiffVars). -write_out(jt, GVars, AVars, DiffVars) :- - jt(GVars, AVars, DiffVars). -write_out(bdd, GVars, AVars, DiffVars) :- - bdd(GVars, AVars, DiffVars). -write_out(bp, _GVars, _AVars, _DiffVars) :- - writeln('interface not supported any longer'). - %bp(GVars, AVars, DiffVars). -write_out(gibbs, GVars, AVars, DiffVars) :- - gibbs(GVars, AVars, DiffVars). -write_out(bnt, GVars, AVars, DiffVars) :- - do_bnt(GVars, AVars, DiffVars). -write_out(fove, GVars, AVars, DiffVars) :- - call_horus_lifted_solver(GVars, AVars, DiffVars). - -% call a solver with keys, not actual variables -call_ground_solver(bp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, - call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). -call_ground_solver(bdd, GVars, GoalKeys, Keys, Factors, Evidence) :- !, - call_bdd_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). +% Call a solver with keys, not actual variables call_ground_solver(ve, GVars, GoalKeys, Keys, Factors, Evidence) :- !, call_ve_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(hve, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + clpbn_horus:set_horus_flag(ground_solver, ve), + call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(bdd, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + call_bdd_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(bp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + clpbn_horus:set_horus_flag(ground_solver, bp), + call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + +call_ground_solver(cbp, GVars, GoalKeys, Keys, Factors, Evidence) :- !, + clpbn_horus:set_horus_flag(ground_solver, cbp), + call_horus_ground_solver(GVars, GoalKeys, Keys, Factors, Evidence, _Answ). + call_ground_solver(Solver, GVars, _GoalKeys, Keys, Factors, Evidence) :- - % traditional solver + % fall back to traditional solver b_hash_new(Hash0), - foldl(gvar_in_hash, GVars, Hash0, HashI), + foldl(gvar_in_hash, GVars, Hash0, HashI), foldl(key_to_var, Keys, AllVars, HashI, Hash1), foldl(evidence_to_v, Evidence, _EVars, Hash1, Hash), %writeln(Keys:AllVars), @@ -362,13 +388,51 @@ call_ground_solver(Solver, GVars, _GoalKeys, Keys, Factors, Evidence) :- write_out(Solver, [GVars], AllVars, _), assert(use_parfactors(on)). + +% do nothing if we don't have query variables to compute. +write_out(_, [], _, _) :- !. + +write_out(graphs, _, AVars, _) :- !, + clpbn2graph(AVars). + +write_out(ve, GVars, AVars, DiffVars) :- !, + ve(GVars, AVars, DiffVars). + +write_out(jt, GVars, AVars, DiffVars) :- !, + jt(GVars, AVars, DiffVars). + +write_out(bdd, GVars, AVars, DiffVars) :- !, + bdd(GVars, AVars, DiffVars). + +write_out(gibbs, GVars, AVars, DiffVars) :- !, + gibbs(GVars, AVars, DiffVars). + +write_out(lve, GVars, AVars, DiffVars) :- !, + clpbn_horus:set_horus_flag(lifted_solver, lve), + call_horus_lifted_solver(GVars, AVars, DiffVars). + +write_out(lkc, GVars, AVars, DiffVars) :- !, + clpbn_horus:set_horus_flag(lifted_solver, lkc), + call_horus_lifted_solver(GVars, AVars, DiffVars). + +write_out(lbp, GVars, AVars, DiffVars) :- !, + clpbn_horus:set_horus_flag(lifted_solver, lbp), + call_horus_lifted_solver(GVars, AVars, DiffVars). + +write_out(bnt, GVars, AVars, DiffVars) :- !, + do_bnt(GVars, AVars, DiffVars). + +write_out(Solver, _, _, _) :- + format("Error: solver '~w' is unknown.", [Solver]), + fail. + % % convert a PFL network (without constraints) % into CLP(BN) for evaluation % gvar_in_hash(V, Hash0, Hash) :- get_atts(V, [key(K)]), - b_hash_insert(Hash0, K, V, Hash). + b_hash_insert(Hash0, K, V, Hash). key_to_var(K, V, Hash0, Hash0) :- b_hash_lookup(K, V, Hash0), !. @@ -429,15 +493,15 @@ find_var([_|DVars], V, Key, [_|DKeys]) :- process_vars([], []). process_vars([V|Vs], [K|Ks]) :- - process_var(V, K), + process_var(V, K), process_vars(Vs, Ks). -process_var(V, K) :- get_atts(V, [key(K)]), !. +process_var(V, K) :- get_atts(V, [key(K)]), !. % oops: this variable has no attributes. process_var(V, _) :- throw(error(instantiation_error,clpbn(attribute_goal(V)))). % -% unify a CLPBN variable with something. +% unify a CLPBN variable with something. % verify_attributes(Var, T, Goal) :- get_atts(Var, [key(Key),dist(Dist,Parents)]), !, @@ -452,28 +516,25 @@ bind_clpbn(T, Var, _, _, _, do_not_bind_variable([put_evidence(T,Var)])) :- bind_clpbn(T, Var, Key, Dist, Parents, []) :- var(T), get_atts(T, [key(Key1),dist(Dist1,Parents1)]), ( - bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) + bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) -> - ( - get_atts(T, [evidence(Ev1)]) -> - bind_evidence_from_extra_var(Ev1,Var) - ; - get_atts(Var, [evidence(Ev)]) -> - bind_evidence_from_extra_var(Ev,T) - ; - true - ) + ( + get_atts(T, [evidence(Ev1)]) -> + bind_evidence_from_extra_var(Ev1,Var) + ; + get_atts(Var, [evidence(Ev)]) -> + bind_evidence_from_extra_var(Ev,T) + ; + true + ) ; - fail + fail ). -bind_clpbn(_, Var, _, _, _, _, []) :- - use(bnt), - check_if_bnt_done(Var), !. bind_clpbn(_, Var, _, _, _, _, []) :- use(ve), check_if_ve_done(Var), !. bind_clpbn(_, Var, _, _, _, _, []) :- - use(bp), + use(hve), check_if_horus_ground_solver_done(Var), !. bind_clpbn(_, Var, _, _, _, _, []) :- use(jt), @@ -481,12 +542,21 @@ bind_clpbn(_, Var, _, _, _, _, []) :- bind_clpbn(_, Var, _, _, _, _, []) :- use(bdd), check_if_bdd_done(Var), !. +bind_clpbn(_, Var, _, _, _, _, []) :- + use(bp), + check_if_horus_ground_solver_done(Var), !. +bind_clpbn(_, Var, _, _, _, _, []) :- + use(cbp), + check_if_horus_ground_solver_done(Var), !. +bind_clpbn(_, Var, _, _, _, _, []) :- + use(bnt), + check_if_bnt_done(Var), !. bind_clpbn(T, Var, Key0, _, _, _, []) :- get_atts(Var, [key(Key)]), !, ( Key = Key0 -> true ; - % let us not loose whatever we had. + % let us not loose whatever we had. put_evidence(T,Var) ). @@ -495,8 +565,8 @@ fresh_attvar(Var, NVar) :- put_atts(NVar, LAtts). % I will now allow two CLPBN variables to be bound together. -%bind_clpbns(Key, Dist, Parents, Key, Dist, Parents). -bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) :- +% bind_clpbns(Key, Dist, Parents, Key, Dist, Parents). +bind_clpbns(Key, Dist, Parents, Key1, Dist1, Parents1) :- Key == Key1, !, get_dist(Dist,_Type,_Domain,_Table), get_dist(Dist1,_Type1,_Domain1,_Table1), @@ -525,13 +595,22 @@ bind_evidence_from_extra_var(Ev1,Var) :- bind_evidence_from_extra_var(Ev1,Var) :- put_atts(Var, [evidence(Ev1)]). -user:term_expansion((A :- {}), ( :- true )) :- !, % evidence +user:term_expansion((A :- {}), ( :- true )) :- !, % evidence prolog_load_context(module, M), store_evidence(M:A). clpbn_key(Var,Key) :- get_atts(Var, [key(Key)]). + +% +% only useful for probabilistic context free grammars +% +clpbn_init_graph(pcg) :- !, + pcg_init_graph. +clpbn_init_graph(_). + + % % This is a routine to start a solver, called by the learning procedures (ie, em). % LVs is a list of lists of variables one is interested in eventually marginalising out @@ -544,94 +623,116 @@ clpbn_init_solver(LVs, Vs0, VarsWithUnboundKeys, State) :- solver(Solver), clpbn_init_solver(Solver, LVs, Vs0, VarsWithUnboundKeys, State). -clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :- - init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State). clpbn_init_solver(ve, LVs, Vs0, VarsWithUnboundKeys, State) :- init_ve_solver(LVs, Vs0, VarsWithUnboundKeys, State). -clpbn_init_solver(bp, LVs, Vs0, VarsWithUnboundKeys, State) :- - init_horus_ground_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(jt, LVs, Vs0, VarsWithUnboundKeys, State) :- init_jt_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(bdd, LVs, Vs0, VarsWithUnboundKeys, State) :- init_bdd_solver(LVs, Vs0, VarsWithUnboundKeys, State). + +clpbn_init_solver(gibbs, LVs, Vs0, VarsWithUnboundKeys, State) :- + init_gibbs_solver(LVs, Vs0, VarsWithUnboundKeys, State). + clpbn_init_solver(pcg, LVs, Vs0, VarsWithUnboundKeys, State) :- init_pcg_solver(LVs, Vs0, VarsWithUnboundKeys, State). -% -% This is a routine to start a solver, called by the learning procedures (ie, em). -% LVs is a list of lists of variables one is interested in eventually marginalising out -% Vs0 gives the original graph -% AllDiffs gives variables that are not fully constrainted, ie, we don't fully know -% the key. In this case, we assume different instances will be bound to different -% values at the end of the day. -% -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bdd) :- - init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, ve) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, bp) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). -pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, VE, hve) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE). - - % % LVs is the list of lists of variables to marginalise % Vs is the full graph % Ps are the probabilities on LVs. -% % clpbn_run_solver(LVs, LPs, State) :- solver(Solver), clpbn_run_solver(Solver, LVs, LPs, State). -clpbn_run_solver(gibbs, LVs, LPs, State) :- - run_gibbs_solver(LVs, LPs, State). - clpbn_run_solver(ve, LVs, LPs, State) :- run_ve_solver(LVs, LPs, State). -clpbn_run_solver(bp, LVs, LPs, State) :- - run_horus_ground_solver(LVs, LPs, State). - clpbn_run_solver(jt, LVs, LPs, State) :- run_jt_solver(LVs, LPs, State). clpbn_run_solver(bdd, LVs, LPs, State) :- run_bdd_solver(LVs, LPs, State). +clpbn_run_solver(gibbs, LVs, LPs, State) :- + run_gibbs_solver(LVs, LPs, State). + clpbn_run_solver(pcg, LVs, LPs, State) :- run_pcg_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, ve) :- +% +% This is a routine to start a solver, called by the learning procedures (ie, em). +% +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State) :- + em_solver(Solver), + (lifted_solver(Solver) -> + format("Error: you cannot use a lifted solver for learning.", [Solver]), fail + ; + true + ), + (ground_solver(Solver) -> + true + ; + format("Error: solver '~w' is unknown.", [Solver]), fail + ), + pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, Solver). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, ve) :- !, + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, hve) :- !, + clpbn_horus:set_horus_flag(ground_solver, ve), + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bdd) :- !, + init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, bp) :- !, + clpbn_horus:set_horus_flag(ground_solver, bp), + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(QueryKeys, AllKeys, Factors, Evidence, State, cbp) :- !, + clpbn_horus:set_horus_flag(ground_solver, cbp), + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State). + +pfl_init_solver(_, _, _, _, _, Solver) :- + format("Error: solver '~w' can't be used for learning.", [Solver]), + fail. + + +pfl_run_solver(LVs, LPs, State) :- + em_solver(Solver), + pfl_run_solver(LVs, LPs, State, Solver). + +pfl_run_solver(LVs, LPs, State, ve) :- !, run_ve_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, bdd) :- + +pfl_run_solver(LVs, LPs, State, hve) :- !, + run_horus_ground_solver(LVs, LPs, State). + +pfl_run_solver(LVs, LPs, State, bdd) :- !, run_bdd_ground_solver(LVs, LPs, State). -pfl_run_solver(LVs, LPs, State, bp) :- - run_horus_ground_solver(LVs, LPs, State, bp). -pfl_run_solver(LVs, LPs, State, hve) :- - run_horus_ground_solver(LVs, LPs, State, hve). + +pfl_run_solver(LVs, LPs, State, bp) :- !, + run_horus_ground_solver(LVs, LPs, State). + +pfl_run_solver(LVs, LPs, State, cbp) :- !, + run_horus_ground_solver(LVs, LPs, State). + +pfl_end_solver(State) :- + (em_solver(hve) ; em_solver(bp) ; em_solver(cbp)), + end_horus_ground_solver(State). +pfl_end_solver(_State). add_keys(Key1+V1,_Key2,Key1+V1). -% -% only useful for probabilistic context free grammars -% -clpbn_init_graph(pcg) :- !, - pcg_init_graph. -clpbn_init_graph(_). - -clpbn_finalize_solver(State) :- - solver(bp), !, - functor(State, _, Last), - arg(Last, State, Info), - finalize_horus_ground_solver(Info). -clpbn_finalize_solver(_State). probability(Goal, Prob) :- findall(Prob, do_probability(Goal, [], Prob), [Prob]). - + conditional_probability(Goal, ListOfGoals, Prob) :- \+ ground(Goal), throw(error(ground(Goal),conditional_probability(Goal, ListOfGoals, Prob))). @@ -665,26 +766,26 @@ evidence_to_var(Goal, C, VItem, V) :- Goal =.. [L|Args], variabilise_last(Args, C, NArgs, V), VItem =.. [L|NArgs]. - + variabilise_last([Arg], Arg, [V], V). variabilise_last([Arg1,Arg2|Args], Arg, Arg1.NArgs, V) :- variabilise_last(Arg2.Args, Arg, NArgs, V). match_probability(VPs, Goal, C, V, Prob) :- - match_probabilities(VPs, Goal, C, V, Prob). + match_probabilities(VPs, Goal, C, V, Prob). match_probabilities([p(V0=C)=Prob|_], _, C, V, Prob) :- - V0 == V, - !. + V0 == V, + !. match_probabilities([_|Probs], G, C, V, Prob) :- - match_probabilities(Probs, G, C, V, Prob). + match_probabilities(Probs, G, C, V, Prob). goal_to_key(_:Goal, Skolem) :- - goal_to_key(Goal, Skolem). + goal_to_key(Goal, Skolem). goal_to_key(Goal, Skolem) :- - functor(Goal, Na, Ar), - Ar1 is Ar-1, - functor(Skolem, Na, Ar1). + functor(Goal, Na, Ar), + Ar1 is Ar-1, + functor(Skolem, Na, Ar1). :- use_parfactors(on) -> true ; assert(use_parfactors(off)). diff --git a/packages/CLPBN/clpbn/aggregates.yap b/packages/CLPBN/clpbn/aggregates.yap index 20394516b..90c1ef51e 100644 --- a/packages/CLPBN/clpbn/aggregates.yap +++ b/packages/CLPBN/clpbn/aggregates.yap @@ -1,42 +1,45 @@ - % +% % generate explicit CPTs % -:- module(clpbn_aggregates, [ - check_for_agg_vars/2, - cpt_average/6, - cpt_average/7, - cpt_max/6, - cpt_min/6, - avg_factors/5 - ]). +:- module(clpbn_aggregates, + [check_for_agg_vars/2, + cpt_average/6, + cpt_average/7, + cpt_max/6, + cpt_min/6, + avg_factors/5 + ]). -:- use_module(library(clpbn), [{}/1]). +:- use_module(library(clpbn), + [{}/1]). :- use_module(library(lists), - [last/2, - sumlist/2, - sum_list/3, - max_list/2, - min_list/2, - nth0/3 - ]). + [last/2, + sumlist/2, + sum_list/3, + max_list/2, + min_list/2, + nth0/3 + ]). :- use_module(library(matrix), - [matrix_new/3, - matrix_to_list/2, - matrix_set/3]). + [matrix_new/3, + matrix_to_list/2, + matrix_set/3 + ]). :- use_module(library(clpbn/dists), - [ - add_dist/6, - get_dist_domain_size/2]). + [add_dist/6, + get_dist_domain_size/2 + ]). :- use_module(library(clpbn/matrix_cpt_utils), - [normalise_CPT_on_lines/3]). + [normalise_CPT_on_lines/3]). :- use_module(library(pfl), - [skolem/2, - add_ground_factor/5]). + [skolem/2, + add_ground_factor/5 + ]). :- use_module(library(bhash)). @@ -60,9 +63,9 @@ simplify_dist(_, _, _, _, Vs0, Vs0). % avg_factors(Key, Parents, _Smoothing, NewParents, Id) :- - % we keep ev as a list - skolem(Key, Domain), - avg_table(Parents, Parents, Domain, Key, 0, 1.0, NewParents, [], _ExtraSkolems, Id). + % we keep ev as a list + skolem(Key, Domain), + avg_table(Parents, Parents, Domain, Key, 0, 1.0, NewParents, [], _ExtraSkolems, Id). % there are 4 cases: % no evidence on top node @@ -70,17 +73,17 @@ avg_factors(Key, Parents, _Smoothing, NewParents, Id) :- % evidence on top node *entailed* by values of parents (so there is no real connection) % evidence incompatible with parents query_evidence(Key, EvHash, MAT0, MAT, NewParents0, NewParents, Vs, IVs, NewVs) :- - b_hash_lookup(Key, Ev, EvHash), !, - normalise_CPT_on_lines(MAT0, MAT1, L1), - check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs). + b_hash_lookup(Key, Ev, EvHash), !, + normalise_CPT_on_lines(MAT0, MAT1, L1), + check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs). query_evidence(_, _, MAT, MAT, NewParents, NewParents, _, Vs, Vs). hash_ev(K=V, Es0, Es) :- - b_hash_insert(Es0, K, V, Es). + b_hash_insert(Es0, K, V, Es). find_ev(Ev, Key, RemKeys, RemKeys, Ev0, EvF) :- - b_hash_lookup(Key, V, Ev), !, - EvF is Ev0+V. + b_hash_lookup(Key, V, Ev), !, + EvF is Ev0+V. find_ev(_Evs, Key, RemKeys, [Key|RemKeys], Ev, Ev). @@ -93,11 +96,11 @@ find_ev(_Evs, Key, RemKeys, [Key|RemKeys], Ev, Ev). % +final CPT % - New Parents % + - list of new keys -% +% avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, Vars, Vs, Vs, Id) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 256, + TabSize =< 256, /* case gmp is not there !! */ TabSize > 0, !, average_cpt(Vars, OVars, Domain, TotEvidence, Softness, CPT), @@ -115,7 +118,7 @@ avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, [V1,V2], Vs, [V1,V2|N average_cpt([V1,V2], OVars, Domain, TotEvidence, Softness, CPT), matrix_to_list(CPT, Mat), add_ground_factor(bayes, Domain, [Key,V1,V2], Mat, Id). - + intermediate_table(1,_,[V],V, _, _, I, I, Vs, Vs) :- !. intermediate_table(2, Op, [V1,V2], V, Key, Softness, I0, If, Vs, Vs) :- !, If is I0+1, @@ -167,7 +170,7 @@ cpt_min([_|Vars], Key, Els0, CPT, Vs, NewVs) :- build_avg_table(Vars, OVars, Domain, _, TotEvidence, Softness, CPT, Vars, Vs, Vs) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 256, + TabSize =< 256, /* case gmp is not there !! */ TabSize > 0, !, average_cpt(Vars, OVars, Domain, TotEvidence, Softness, CPT). @@ -181,11 +184,11 @@ build_avg_table(Vars, OVars, Domain, Key, TotEvidence, Softness, CPT, [V1,V2], V build_intermediate_table(LL1, sum(Min,Max), L1, V1, Key, 1.0, 0, I1, Vs, Vs1), build_intermediate_table(LL2, sum(Min,Max), L2, V2, Key, 1.0, I1, _, Vs1, NewVs), average_cpt([V1,V2], OVars, Domain, TotEvidence, Softness, CPT). - + build_max_table(Vars, Domain, Softness, p(Domain, CPT, Vars), Vs, Vs) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 16, + TabSize =< 16, /* case gmp is not there !! */ TabSize > 0, !, max_cpt(Vars, Domain, Softness, CPT). @@ -197,11 +200,11 @@ build_max_table(Vars, Domain, Softness, p(Domain, CPT, [V1,V2]), Vs, [V1,V2|NewV build_intermediate_table(LL1, max(Domain,CPT), L1, V1, Key, 1.0, 0, I1, Vs, Vs1), build_intermediate_table(LL2, max(Domain,CPT), L2, V2, Key, 1.0, I1, _, Vs1, NewVs), max_cpt([V1,V2], Domain, Softness, CPT). - + build_min_table(Vars, Domain, Softness, p(Domain, CPT, Vars), Vs, Vs) :- length(Domain, SDomain), int_power(Vars, SDomain, 1, TabSize), - TabSize =< 16, + TabSize =< 16, /* case gmp is not there !! */ TabSize > 0, !, min_cpt(Vars, Domain, Softness, CPT). @@ -213,7 +216,7 @@ build_min_table(Vars, Domain, Softness, p(Domain, CPT, [V1,V2]), Vs, [V1,V2|NewV build_intermediate_table(LL1, min(Domain,CPT), L1, V1, Key, 1.0, 0, I1, Vs, Vs1), build_intermediate_table(LL2, min(Domain,CPT), L2, V2, Key, 1.0, I1, _, Vs1, NewVs), min_cpt([V1,V2], Domain, Softness, CPT). - + int_power([], _, TabSize, TabSize). int_power([_|L], X, I0, TabSize) :- I is I0*X, @@ -270,19 +273,21 @@ include_qevidence(_, MAT, MAT, NewParents, NewParents, _, Vs, Vs). check_consistency(L1, Ev, MAT0, MAT1, L1, MAT, NewParents0, NewParents, Vs, IVs, NewVs) :- sumlist(L1, Tot), nth0(Ev, L1, Val), - (Val == Tot -> - MAT1 = MAT, - NewParents = [], - Vs = NewVs + ( + Val == Tot + -> + MAT1 = MAT, + NewParents = [], + Vs = NewVs ; - Val == 0.0 -> + Val == 0.0 -> throw(error(domain_error(incompatible_evidence),evidence(Ev))) - ; + ; MAT0 = MAT, NewParents = NewParents0, IVs = NewVs ). - + % % generate actual table, instead of trusting the solver @@ -299,7 +304,7 @@ get_ds_lengths([],[]). get_ds_lengths([V|Vs],[Sz|Lengs]) :- get_vdist_size(V, Sz), get_ds_lengths(Vs,Lengs). - + fill_in_average(Lengs, N, Base, MCPT) :- generate(Lengs, Case), average(Case, N, Base, Val), @@ -369,10 +374,10 @@ fill_in_min(_,_). get_vdist_size(V, Sz) :- - var(V), !, + var(V), !, clpbn:get_atts(V, [dist(Dist,_)]), get_dist_domain_size(Dist, Sz). get_vdist_size(V, Sz) :- - skolem(V, Dom), + skolem(V, Dom), length(Dom, Sz). diff --git a/packages/CLPBN/clpbn/bdd.yap b/packages/CLPBN/clpbn/bdd.yap index 4c45084fc..b0335eb08 100644 --- a/packages/CLPBN/clpbn/bdd.yap +++ b/packages/CLPBN/clpbn/bdd.yap @@ -9,41 +9,41 @@ V = v(Va, Vb, Vc) The generic formula is -V <- X, Y +V <- X, Y Va <- P*X1*Y1 + Q*X2*Y2 + ... - + **************************************************/ :- module(clpbn_bdd, - [bdd/3, - set_solver_parameter/2, - init_bdd_solver/4, - init_bdd_ground_solver/5, - run_bdd_solver/3, - run_bdd_ground_solver/3, - finalize_bdd_solver/1, - check_if_bdd_done/1, - call_bdd_ground_solver/6 - ]). + [bdd/3, + set_solver_parameter/2, + init_bdd_solver/4, + init_bdd_ground_solver/5, + run_bdd_solver/3, + run_bdd_ground_solver/3, + finalize_bdd_solver/1, + check_if_bdd_done/1, + call_bdd_ground_solver/6 + ]). :- use_module(library('clpbn/dists'), - [dist/4, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_all_sizes/2, - get_dist_params/2 - ]). + [dist/4, + get_dist_domain/2, + get_dist_domain_size/2, + get_dist_all_sizes/2, + get_dist_params/2 + ]). :- use_module(library('clpbn/display'), - [clpbn_bind_vals/3]). + [clpbn_bind_vals/3]). :- use_module(library('clpbn/aggregates'), - [check_for_agg_vars/2]). + [check_for_agg_vars/2]). :- use_module(library(atts)). @@ -80,8 +80,8 @@ bdds(bdd). % % QVars: all query variables? -% -% +% +% init_bdd_ground_solver(QueryKeys, AllKeys, Factors, Evidence, bdd(QueryKeys, AllKeys, Factors, Evidence)). % @@ -93,37 +93,37 @@ run_bdd_ground_solver(_QueryVars, Solutions, bdd(GKeys, Keys, Factors, Evidence) check_if_bdd_done(_Var). call_bdd_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - call_bdd_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), - clpbn_bind_vals([QueryVars], Solutions, Output). + call_bdd_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), + clpbn_bind_vals([QueryVars], Solutions, Output). call_bdd_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions) :- - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - init_bdd(FactorIds, EvidenceIds, Hash4, Id4, BDD), - run_solver(QueryKeys, Solutions, BDD). + keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), + init_bdd(FactorIds, EvidenceIds, Hash4, Id4, BDD), + run_solver(QueryKeys, Solutions, BDD). init_bdd(FactorIds, EvidenceIds, Hash, Id, bdd(Term, Leaves, Tops, Hash, Id)) :- - sort_keys(FactorIds, AllVars, Leaves), - rb_new(OrderVs0), - foldl2(order_key, AllVars, 0, _, OrderVs0, OrderVs), - rb_new(Vars0), - rb_new(Pars0), - rb_new(Ev0), - foldl(evtotree,EvidenceIds,Ev0,Ev), - rb_new(Fs0), - foldl(ftotree,FactorIds,Fs0,Fs), - init_tops(Leaves,Tops), - get_keys_info(AllVars, Ev, Fs, OrderVs, Vars0, _Vars, Pars0, _Pars, Leaves, Tops, Term, []). + sort_keys(FactorIds, AllVars, Leaves), + rb_new(OrderVs0), + foldl2(order_key, AllVars, 0, _, OrderVs0, OrderVs), + rb_new(Vars0), + rb_new(Pars0), + rb_new(Ev0), + foldl(evtotree,EvidenceIds,Ev0,Ev), + rb_new(Fs0), + foldl(ftotree,FactorIds,Fs0,Fs), + init_tops(Leaves,Tops), + get_keys_info(AllVars, Ev, Fs, OrderVs, Vars0, _Vars, Pars0, _Pars, Leaves, Tops, Term, []). order_key( Id, I0, I, OrderVs0, OrderVs) :- - I is I0+1, - rb_insert(OrderVs0, Id, I0, OrderVs). + I is I0+1, + rb_insert(OrderVs0, Id, I0, OrderVs). evtotree(K=V,Ev0,Ev) :- - rb_insert(Ev0, K, V, Ev). + rb_insert(Ev0, K, V, Ev). ftotree(F, Fs0, Fs) :- - F = f([K|_Parents],_,_,_), - rb_insert(Fs0, K, F, Fs). + F = f([K|_Parents],_,_,_), + rb_insert(Fs0, K, F, Fs). bdd([[]],_,_) :- !. bdd([QueryVars], AllVars, AllDiffs) :- @@ -155,59 +155,59 @@ init_tops([_|Leaves],[_|Tops]) :- init_tops(Leaves,Tops). sort_keys(AllFs, AllVars, Leaves) :- - dgraph_new(Graph0), - foldl(add_node, AllFs, Graph0, Graph), - dgraph_leaves(Graph, Leaves), - dgraph_top_sort(Graph, AllVars). + dgraph_new(Graph0), + foldl(add_node, AllFs, Graph0, Graph), + dgraph_leaves(Graph, Leaves), + dgraph_top_sort(Graph, AllVars). add_node(f([K|Parents],_,_,_), Graph0, Graph) :- - dgraph_add_vertex(Graph0, K, Graph1), - foldl(add_edge(K), Parents, Graph1, Graph). - + dgraph_add_vertex(Graph0, K, Graph1), + foldl(add_edge(K), Parents, Graph1, Graph). + add_edge(K, K0, Graph0, Graph) :- - dgraph_add_edge(Graph0, K0, K, Graph). + dgraph_add_edge(Graph0, K0, K, Graph). sort_vars(AllVars0, AllVars, Leaves) :- - dgraph_new(Graph0), - build_graph(AllVars0, Graph0, Graph), - dgraph_leaves(Graph, Leaves), - dgraph_top_sort(Graph, AllVars). + dgraph_new(Graph0), + build_graph(AllVars0, Graph0, Graph), + dgraph_leaves(Graph, Leaves), + dgraph_top_sort(Graph, AllVars). build_graph([], Graph, Graph). build_graph([V|AllVars0], Graph0, Graph) :- - clpbn:get_atts(V, [dist(_DistId, Parents)]), !, - dgraph_add_vertex(Graph0, V, Graph1), - add_parents(Parents, V, Graph1, GraphI), - build_graph(AllVars0, GraphI, Graph). + clpbn:get_atts(V, [dist(_DistId, Parents)]), !, + dgraph_add_vertex(Graph0, V, Graph1), + add_parents(Parents, V, Graph1, GraphI), + build_graph(AllVars0, GraphI, Graph). build_graph(_V.AllVars0, Graph0, Graph) :- - build_graph(AllVars0, Graph0, Graph). + build_graph(AllVars0, Graph0, Graph). add_parents([], _V, Graph, Graph). add_parents([V0|Parents], V, Graph0, GraphF) :- - dgraph_add_edge(Graph0, V0, V, GraphI), - add_parents(Parents, V, GraphI, GraphF). + dgraph_add_edge(Graph0, V0, V, GraphI), + add_parents(Parents, V, GraphI, GraphF). get_keys_info([], _, _, _, Vs, Vs, Ps, Ps, _, _) --> []. get_keys_info([V|MoreVs], Evs, Fs, OrderVs, Vs, VsF, Ps, PsF, Lvs, Outs) --> - { rb_lookup(V, F, Fs) }, !, - { F = f([V|Parents], _, _, DistId) }, + { rb_lookup(V, F, Fs) }, !, + { F = f([V|Parents], _, _, DistId) }, %{writeln(v:DistId:Parents)}, - [DIST], - { get_key_info(V, F, Fs, Evs, OrderVs, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, - get_keys_info(MoreVs, Evs, Fs, OrderVs, Vs2, VsF, Ps1, PsF, Lvs, Outs). + [DIST], + { get_key_info(V, F, Fs, Evs, OrderVs, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, + get_keys_info(MoreVs, Evs, Fs, OrderVs, Vs2, VsF, Ps1, PsF, Lvs, Outs). get_key_info(V, F, Fs, Evs, OrderVs, DistId, Parents0, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) :- - reorder_keys(Parents0, OrderVs, Parents, Map), - check_key_p(DistId, F, Map, Parms, _ParmVars, Ps, Ps1), - unbound_parms(Parms, ParmVars), - F = f(_,[Size|_],_,_), - check_key(V, Size, DIST, Vs, Vs1), - DIST = info(V, Tree, Ev, Values, Formula, ParmVars, Parms), - % get a list of form [[P00,P01], [P10,P11], [P20,P21]] - foldl(get_key_parent(Fs), Parents, PVars, Vs1, Vs2), - cross_product(Values, Ev, PVars, ParmVars, Formula0), + reorder_keys(Parents0, OrderVs, Parents, Map), + check_key_p(DistId, F, Map, Parms, _ParmVars, Ps, Ps1), + unbound_parms(Parms, ParmVars), + F = f(_,[Size|_],_,_), + check_key(V, Size, DIST, Vs, Vs1), + DIST = info(V, Tree, Ev, Values, Formula, ParmVars, Parms), + % get a list of form [[P00,P01], [P10,P11], [P20,P21]] + foldl(get_key_parent(Fs), Parents, PVars, Vs1, Vs2), + cross_product(Values, Ev, PVars, ParmVars, Formula0), % (numbervars(Formula0,0,_),writeln(formula0:Ev:Formula0), fail ; true), - get_key_evidence(V, Evs, DistId, Tree, Ev, Formula0, Formula, Lvs, Outs). + get_key_evidence(V, Evs, DistId, Tree, Ev, Formula0, Formula, Lvs, Outs). % (numbervars(Formula,0,_),writeln(formula:Formula), fail ; true). get_vars_info([], Vs, Vs, Ps, Ps, _, _) --> []. @@ -215,7 +215,7 @@ get_vars_info([V|MoreVs], Vs, VsF, Ps, PsF, Lvs, Outs) --> { clpbn:get_atts(V, [dist(DistId, Parents)]) }, !, %{writeln(v:DistId:Parents)}, [DIST], - { get_var_info(V, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, + { get_var_info(V, DistId, Parents, Vs, Vs2, Ps, Ps1, Lvs, Outs, DIST) }, get_vars_info(MoreVs, Vs2, VsF, Ps1, PsF, Lvs, Outs). get_vars_info([_|MoreVs], Vs0, VsF, Ps0, PsF, VarsInfo, Lvs, Outs) :- get_vars_info(MoreVs, Vs0, VsF, Ps0, PsF, VarsInfo, Lvs, Outs). @@ -298,17 +298,17 @@ generate_3tree(OUT, [[P0,P1,P2]], I00, I10, I20, IR0, N0, N1, N2, R, Exp, _ExpF) IR is IR0-1, ( satisf(I00+1, I10, I20, IR, N0, N1, N2, R, Exp) -> L0 = [P0|L1] - ; + ; L0 = L1 ), ( satisf(I00, I10+1, I20, IR, N0, N1, N2, R, Exp) -> L1 = [P1|L2] - ; + ; L1 = L2 ), ( satisf(I00, I10, I20+1, IR, N0, N1, N2, R, Exp) -> L2 = [P2] - ; + ; L2 = [] ), to_disj(L0, OUT). @@ -316,23 +316,23 @@ generate_3tree(OUT, [[P0,P1,P2]|Ps], I00, I10, I20, IR0, N0, N1, N2, R, Exp, Exp IR is IR0-1, ( satisf(I00+1, I10, I20, IR, N0, N1, N2, R, Exp) -> I0 is I00+1, generate_3tree(O0, Ps, I0, I10, I20, IR, N0, N1, N2, R, Exp, ExpF) - -> + -> L0 = [P0*O0|L1] - ; + ; L0 = L1 ), ( satisf(I00, I10+1, I20, IR0, N0, N1, N2, R, Exp) -> I1 is I10+1, generate_3tree(O1, Ps, I00, I1, I20, IR, N0, N1, N2, R, Exp, ExpF) - -> + -> L1 = [P1*O1|L2] - ; + ; L1 = L2 ), ( satisf(I00, I10, I20+1, IR0, N0, N1, N2, R, Exp) -> I2 is I20+1, generate_3tree(O2, Ps, I00, I10, I2, IR, N0, N1, N2, R, Exp, ExpF) - -> + -> L2 = [P2*O2] - ; + ; L2 = [] ), to_disj(L0, OUT). @@ -378,18 +378,18 @@ avg_tree([Vals|PVars], P, Max, Im, IM, Size, O, H0, HF) :- MaxI is Max-(Size-1), avg_exp(Vals, PVars, 0, P, MaxI, Size, Im, IM, HI, HF, Exp), simplify_exp(Exp, Simp). - + avg_exp([], _, _, _P, _Max, _Size, _Im, _IM, H, H, 0). avg_exp([Val|Vals], PVars, I0, P0, Max, Size, Im, IM, HI, HF, O) :- (Vals = [] -> O=O1 ; O = Val*O1+not(Val)*O2 ), Im1 is max(0, Im-I0), IM1 is IM-I0, - ( IM1 < 0 -> O1 = 0, H2 = HI; /* we have exceed maximum */ - Im1 > Max -> O1 = 0, H2 = HI; /* we cannot make to minimum */ - Im1 = 0, IM1 > Max -> O1 = 1, H2 = HI; /* we cannot exceed maximum */ + ( IM1 < 0 -> O1 = 0, H2 = HI ; /* we have exceed maximum */ + Im1 > Max -> O1 = 0, H2 = HI ; /* we cannot make to minimum */ + Im1 = 0, IM1 > Max -> O1 = 1, H2 = HI ; /* we cannot exceed maximum */ P is P0+1, avg_tree(PVars, P, Max, Im1, IM1, Size, O1, HI, H2) - ), + ), I is I0+1, avg_exp(Vals, PVars, I, P0, Max, Size, Im, IM, H2, HF, O2). @@ -434,14 +434,14 @@ bup_avg(V, Size, Domain, Parents0, Vs, Vs2, Lvs, Outs, DIST) :- bin_sums(Vs, Sums, F) :- vs_to_sums(Vs, Sums0), bin_sums(Sums0, Sums, F, []). - + vs_to_sums([], []). vs_to_sums([V|Vs], [Sum|Sums0]) :- - Sum =.. [sum|V], - vs_to_sums(Vs, Sums0). + Sum =.. [sum|V], + vs_to_sums(Vs, Sums0). bin_sums([Sum], Sum) --> !. -bin_sums(LSums, Sum) --> +bin_sums(LSums, Sum) --> { halve(LSums, Sums1, Sums2) }, bin_sums(Sums1, Sum1), bin_sums(Sums2, Sum2), @@ -458,14 +458,14 @@ head(Take, [H|L], [H|Sums1], Sum2) :- head(Take1, L, Sums1, Sum2). sum(Sum1, Sum2, Sum) --> - { functor(Sum1, _, M1), - functor(Sum2, _, M2), - Max is M1+M2-2, - Max1 is Max+1, - Max0 is M2-1, - functor(Sum, sum, Max1), - Sum1 =.. [_|PVals] }, - expand_sums(PVals, 0, Max0, Max1, M2, Sum2, Sum). + { functor(Sum1, _, M1), + functor(Sum2, _, M2), + Max is M1+M2-2, + Max1 is Max+1, + Max0 is M2-1, + functor(Sum, sum, Max1), + Sum1 =.. [_|PVals] }, + expand_sums(PVals, 0, Max0, Max1, M2, Sum2, Sum). % % bottom up step by step @@ -509,12 +509,12 @@ expand_sums(Parents, I0, Max0, Max, Size, Sums, Prot, NewSums, [O=SUM*1|F], F0) arg(I, NewSums, O), sum_all(Parents, 0, I0, Max0, Sums, List), to_disj(List, SUM), - expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). + expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). expand_sums(Parents, I0, Max0, Max, Size, Sums, Prot, NewSums, F, F0) :- I is I0+1, arg(I, Sums, O), arg(I, NewSums, O), - expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). + expand_sums(Parents, I, Max0, Max, Size, Sums, Prot, NewSums, F, F0). % %inner loop: find all parents that contribute to A_ji, @@ -536,14 +536,14 @@ sum_all([_V|Vs], Pos, I, Max0, Sums, List) :- gen_arg(J, Sums, Max, S0) :- gen_arg(0, Max, J, Sums, S0). - + gen_arg(Max, Max, J, Sums, S0) :- !, - I is Max+1, - arg(I, Sums, A), + I is Max+1, + arg(I, Sums, A), ( Max = J -> S0 = A ; S0 = not(A)). gen_arg(I0, Max, J, Sums, S) :- - I is I0+1, - arg(I, Sums, A), + I is I0+1, + arg(I, Sums, A), ( I0 = J -> S = A*S0 ; S = not(A)*S0), gen_arg(I, Max, J, Sums, S0). @@ -647,19 +647,19 @@ copy(N, [], [], Ms, Parms0, Parms, ParmVars) :-!, copy(N, Ms, NewMs, NewMs, Parms0, Parms, ParmVars). copy(N, D.Ds, ND.NDs, New, El.Parms0, NEl.Parms, V.ParmVars) :- N1 is N-1, - (El == 0.0 -> + (El == 0.0 -> NEl = 0, V = NEl, ND = D - ;El == 1.0 -> + ;El == 1.0 -> NEl = 1, V = NEl, ND = 0.0 - ;El == 0 -> + ;El == 0 -> NEl = 0, V = NEl, ND = D - ;El =:= 1 -> + ;El =:= 1 -> NEl = 1, V = NEl, ND = 0.0, @@ -692,9 +692,9 @@ get_parents(V.Parents, Values.PVars, Vs0, Vs) :- get_parents(Parents, PVars, Vs1, Vs). get_key_parent(Fs, V, Values, Vs0, Vs) :- - INFO = info(V, _Parent, _Ev, Values, _, _, _), - rb_lookup(V, f(_, [Size|_], _, _), Fs), - check_key(V, Size, INFO, Vs0, Vs). + INFO = info(V, _Parent, _Ev, Values, _, _, _), + rb_lookup(V, f(_, [Size|_], _, _), Fs), + check_key(V, Size, INFO, Vs0, Vs). check_key(V, _, INFO, Vs, Vs) :- rb_lookup(V, INFO, Vs), !. @@ -809,20 +809,20 @@ skim_for_theta([[P|Other]|More], not(P)*Ps, [Other|Left], New ) :- skim_for_theta(More, Ps, Left, New ). get_key_evidence(V, Evs, _, Tree, Ev, F0, F, Leaves, Finals) :- - rb_lookup(V, Pos, Evs), !, - zero_pos(0, Pos, Ev), - insert_output(Leaves, V, Finals, Tree, Outs, SendOut), - get_outs(F0, F, SendOut, Outs). + rb_lookup(V, Pos, Evs), !, + zero_pos(0, Pos, Ev), + insert_output(Leaves, V, Finals, Tree, Outs, SendOut), + get_outs(F0, F, SendOut, Outs). % hidden deterministic node, can be removed. %% get_key_evidence(V, _, DistId, _Tree, Ev, F0, [], _Leaves, _Finals) :- -%% deterministic(V, DistId), +%% deterministic(V, DistId), %% !, %% one_list(Ev), -%% eval_outs(F0). +%% eval_outs(F0). %% no evidence !!! get_key_evidence(V, _, _, Tree, _Values, F0, F1, Leaves, Finals) :- - insert_output(Leaves, V, Finals, Tree, Outs, SendOut), - get_outs(F0, F1, SendOut, Outs). + insert_output(Leaves, V, Finals, Tree, Outs, SendOut), + get_outs(F0, F1, SendOut, Outs). get_evidence(V, Tree, Ev, F0, F, Leaves, Finals) :- clpbn:get_atts(V, [evidence(Pos)]), !, @@ -836,17 +836,17 @@ get_evidence(V, _Tree, Ev, F0, [], _Leaves, _Finals) :- ( Name = 'AVG' ; Name = 'MAX' ; Name = 'MIN' ), !, one_list(Ev), - eval_outs(F0). + eval_outs(F0). %% no evidence !!! get_evidence(V, Tree, _Values, F0, F1, Leaves, Finals) :- insert_output(Leaves, V, Finals, Tree, Outs, SendOut), get_outs(F0, F1, SendOut, Outs). zero_pos(_, _Pos, []). -zero_pos(Pos, Pos, [1|Values]) :- !, +zero_pos(Pos, Pos, [1|Values]) :- !, I is Pos+1, zero_pos(I, Pos, Values). -zero_pos(I0, Pos, [0|Values]) :- +zero_pos(I0, Pos, [0|Values]) :- I is I0+1, zero_pos(I, Pos, Values). @@ -855,7 +855,7 @@ one_list(1.Ev) :- one_list(Ev). % -% insert a node with the disj of all alternatives, this is only done if node ends up to be in the output +% insert a node with the disj of all alternatives, this is only done if node ends up to be in the output % insert_output([], _V, [], _Out, _Outs, []). insert_output(V._Leaves, V0, [Top|_], Top, Outs, [Top = Outs]) :- V == V0, !. @@ -863,7 +863,7 @@ insert_output(_.Leaves, V, _.Finals, Top, Outs, SendOut) :- insert_output(Leaves, V, Finals, Top, Outs, SendOut). -get_outs([V=F], [V=NF|End], End, V) :- !, +get_outs([V=F], [V=NF|End], End, V) :- !, % writeln(f0:F), simplify_exp(F,NF). get_outs([(V=F)|Outs], [(V=NF)|NOuts], End, (F0 + V)) :- @@ -878,11 +878,11 @@ eval_outs([(V=F)|Outs]) :- eval_outs(Outs). run_solver(Qs, LLPs, bdd(Term, Leaves, Nodes, Hash, Id)) :- - lists_of_keys_to_ids(Qs, QIds, Hash, _, Id, _), - findall(LPs, - (member(Q, QIds), - run_bdd_solver([Q],LPs,bdd(Term,Leaves,Nodes))), - LLPs). + lists_of_keys_to_ids(Qs, QIds, Hash, _, Id, _), + findall(LPs, + (member(Q, QIds), + run_bdd_solver([Q],LPs,bdd(Term,Leaves,Nodes))), + LLPs). run_bdd_solver([Vs], LPs, bdd(Term, _Leaves, Nodes)) :- build_out_node(Nodes, Node), @@ -988,7 +988,7 @@ all_cnfs([info(_V, Tree, Ev, Values, Formula, ParmVars, Parms)|Term], BindsF, IV v_in(V, [V0|_]) :- V == V0, !. v_in(V, [_|Vs]) :- - v_in(V, Vs). + v_in(V, Vs). all_indicators(Values) --> { values_to_disj(Values, Disj) }, @@ -1017,7 +1017,7 @@ parameters([(V0=Disj*_I0)|Formula], Tree) --> parameters(Formula, Tree). % transform V0<- A*B+C*(D+not(E)) -% [V0+not(A)+not(B),V0+not(C)+not(D),V0+not(C)+E] +% [V0+not(A)+not(B),V0+not(C)+not(D),V0+not(C)+E] conj(Disj, V0) --> { conj2(Disj, [[V0]], LVs) }, to_disjs(LVs). @@ -1057,11 +1057,10 @@ generate_exclusions([V0|SeenVs], V) --> build_cnf(CNF, IVs, Indics, AllParms, AllParmValues, Val) :- %(numbervars(CNF,1,_), writeln(cnf_to_ddnnf(CNF, Vars, IVs, [], F)), fail ; true ), - cnf_to_ddnnf(CNF, AllParms, F), + cnf_to_ddnnf(CNF, AllParms, F), AllParms = AllParmValues, IVs = Indics, term_variables(CNF, Extra), set_to_ones(Extra), ddnnf_is(F, Val). - diff --git a/packages/CLPBN/clpbn/bnt.yap b/packages/CLPBN/clpbn/bnt.yap index f510abacd..c09786b20 100644 --- a/packages/CLPBN/clpbn/bnt.yap +++ b/packages/CLPBN/clpbn/bnt.yap @@ -1,45 +1,51 @@ -:- module(bnt, [do_bnt/3, - create_bnt_graph/2, - check_if_bnt_done/1]). +:- module(bnt, + [do_bnt/3, + create_bnt_graph/2, + check_if_bnt_done/1 + ]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). -:- use_module(library('clpbn/dists'), [ - get_dist_domain_size/2, - get_dist_domain/2, - get_dist_params/2 - ]). +:- use_module(library('clpbn/dists'), + [get_dist_domain_size/2, + get_dist_domain/2, + get_dist_params/2 + ]). -:- use_module(library('clpbn/discrete_utils'), [ - reorder_CPT/5]). +:- use_module(library('clpbn/discrete_utils'), + [reorder_CPT/5]). -:- use_module(library(matlab), [start_matlab/1, - close_matlab/0, - matlab_on/0, - matlab_eval_string/1, - matlab_eval_string/2, - matlab_matrix/4, - matlab_vector/2, - matlab_sequence/3, - matlab_initialized_cells/4, - matlab_get_variable/2, - matlab_call/2 - ]). +:- use_module(library(matlab), + [start_matlab/1, + close_matlab/0, + matlab_on/0, + matlab_eval_string/1, + matlab_eval_string/2, + matlab_matrix/4, + matlab_vector/2, + matlab_sequence/3, + matlab_initialized_cells/4, + matlab_get_variable/2, + matlab_call/2 + ]). -:- use_module(library(dgraphs), [dgraph_new/1, - dgraph_add_vertices/3, - dgraph_add_edges/3, - dgraph_top_sort/2, - dgraph_vertices/2, - dgraph_edges/2 - ]). +:- use_module(library(dgraphs), + [dgraph_new/1, + dgraph_add_vertices/3, + dgraph_add_edges/3, + dgraph_top_sort/2, + dgraph_vertices/2, + dgraph_edges/2 + ]). -:- use_module(library(lists), [append/3, - member/2,nth/3]). +:- use_module(library(lists), + [append/3, + member/2,nth/3 + ]). -:- use_module(library(ordsets), [ - ord_insert/3]). +:- use_module(library(ordsets), + [ord_insert/3]). :- yap_flag(write_strings,on). @@ -95,7 +101,7 @@ do_bnt(QueryVars, AllVars, AllDiffs) :- add_evidence(SortedVertices, Size, NumberedVertices), marginalize(QueryVars, SortedVertices, NumberedVertices, Ps), clpbn_bind_vals(QueryVars, Ps, AllDiffs). - + create_bnt_graph(AllVars, Representatives) :- create_bnt_graph(AllVars, Representatives, _, _, _). @@ -148,7 +154,7 @@ extract_kvars([V|AllVars],[N-i(V,Parents)|KVars]) :- extract_kvars(AllVars,KVars). split_tied_vars([],[],[]). -split_tied_vars([N-i(V,Par)|More],[N-g(Vs,Ns,Es)|TVars],[N|LNs]) :- +split_tied_vars([N-i(V,Par)|More],[N-g(Vs,Ns,Es)|TVars],[N|LNs]) :- get_pars(Par,N,V,NPs,[],Es0,Es), get_tied(More,N,Vs,[V],Ns,NPs,Es,Es0,SVars), split_tied_vars(SVars,TVars,LNs). @@ -200,7 +206,7 @@ extract_graph(AllVars, Graph) :- dgraph_add_vertices(Graph0, AllVars, Graph1), get_edges(AllVars,Edges), dgraph_add_edges(Graph1, Edges, Graph). - + get_edges([],[]). get_edges([V|AllVars],Edges) :- clpbn:get_atts(V, [dist(_,Parents)]), @@ -218,13 +224,13 @@ number_graph([V|SortedGraph], [I|Is], I0, IF) :- % clpbn:get_atts(V,[key(K)]), % write(I:K),nl, number_graph(SortedGraph, Is, I, IF). - + init_bnet(propositional, SortedGraph, NumberedGraph, Size, []) :- build_dag(SortedGraph, Size), init_discrete_nodes(SortedGraph, Size), bnet <-- mk_bnet(dag, node_sizes, \discrete, discrete_nodes), dump_cpts(SortedGraph, NumberedGraph). - + init_bnet(tied, SortedGraph, NumberedGraph, Size, Representatives) :- build_dag(SortedGraph, Size), init_discrete_nodes(SortedGraph, Size), @@ -314,7 +320,7 @@ get_sizes_and_ids([V|Parents],[Id-V|Ids]) :- extract_vars([], L, L). extract_vars([_-V|NIds], NParents, Vs) :- extract_vars(NIds, [V|NParents], Vs). - + mkcpt(BayesNet, I, Tab) :- (BayesNet.'CPD'({I})) <-- tabular_CPD(BayesNet,I,Tab). @@ -330,7 +336,7 @@ create_class_vector([], [], [],[]). create_class_vector([V|Graph], [I|Is], [Id|Classes], [Id-v(V,I,Parents)|Sets]) :- clpbn:get_atts(V, [dist(Id,Parents)]), create_class_vector(Graph, Is,Classes,Sets). - + representatives([],[]). representatives([Class-Rep|Reps1],[Class-Rep|Reps]) :- nonrepresentatives(Reps1, Class, Reps2), @@ -376,7 +382,7 @@ add_evidence(Graph, Size, Is) :- mk_evidence(Graph, Is, LN), matlab_initialized_cells( 1, Size, LN, evidence), [engine_ev, loglik] <-- enter_evidence(engine, evidence). - + mk_evidence([], [], []). mk_evidence([V|L], [I|Is], [ar(1,I,EvVal1)|LN]) :- clpbn:get_atts(V, [evidence(EvVal)]), !, @@ -384,7 +390,7 @@ mk_evidence([V|L], [I|Is], [ar(1,I,EvVal1)|LN]) :- mk_evidence(L, Is, LN). mk_evidence([_|L], [_|Is], LN) :- mk_evidence(L, Is, LN). - + evidence_val(Ev,Val,[Ev|_],Val) :- !. evidence_val(Ev,I0,[_|Domain],Val) :- I1 is I0+1, @@ -403,7 +409,7 @@ marginalize([Vs], SortedVars, NumberedVars,Ps) :- length(SortedVars,L), cycle_values(Den, Ev, Vs, L, Vals, Ps). -cycle_values(_D, _Ev, _Vs, _Size, [], []). +cycle_values(_D, _Ev, _Vs, _Size, [], []). cycle_values(Den,Ev,Vs,Size,[H|T],[HP|TP]):- mk_evidence_query(Vs, H, EvQuery), @@ -421,5 +427,4 @@ mk_evidence_query([V|L], [H|T], [ar(1,Pos,El)|LN]) :- get_dist_domain(Id,D), nth(El,D,H), mk_evidence_query(L, T, LN). - diff --git a/packages/CLPBN/clpbn/connected.yap b/packages/CLPBN/clpbn/connected.yap index 450c61bd9..e71d90bee 100644 --- a/packages/CLPBN/clpbn/connected.yap +++ b/packages/CLPBN/clpbn/connected.yap @@ -1,26 +1,28 @@ :- module(clpbn_connected, - [influences/3, - factor_influences/4, - init_influences/3, - influences/4] - ). + [influences/3, + factor_influences/4, + init_influences/3, + influences/4 + ]). :- use_module(library(maplist)). :- use_module(library(dgraphs), - [dgraph_new/1, - dgraph_add_edges/3, - dgraph_add_vertex/3, - dgraph_neighbors/3, - dgraph_edge/3, - dgraph_transpose/2]). + [dgraph_new/1, + dgraph_add_edges/3, + dgraph_add_vertex/3, + dgraph_neighbors/3, + dgraph_edge/3, + dgraph_transpose/2 + ]). :- use_module(library(rbtrees), - [rb_new/1, - rb_lookup/3, - rb_insert/4, - rb_visit/2]). + [rb_new/1, + rb_lookup/3, + rb_insert/4, + rb_visit/2 + ]). factor_influences(Vs, QVars, Ev, LV) :- init_factor_influences(Vs, G, RG), @@ -59,13 +61,13 @@ build_edges([P|Parents], V, [P-V|Edges]) :- % search for the set of variables that influence V influences(Vs, G, RG, Vars) :- - influences(Vs, [], G, RG, Vars). + influences(Vs, [], G, RG, Vars). % search for the set of variables that influence V influences(Vs, Evs, G, RG, Vars) :- - rb_new(Visited0), - foldl(influence(Evs, G, RG), Vs, Visited0, Visited), - all_top(Visited, Evs, Vars). + rb_new(Visited0), + foldl(influence(Evs, G, RG), Vs, Visited0, Visited), + all_top(Visited, Evs, Vars). influence(_, _G, _RG, V, Vs, Vs) :- rb_lookup(V, [T|B], Vs), T == t, B == b, !. @@ -89,76 +91,78 @@ process_new_variable(V, Evs, G, RG, Vs0, Vs2) :- % visited throw_below(Evs, G, RG, Child, Vs0, Vs1) :- rb_lookup(Child, [_|B], Vs0), !, - ( - B == b -> + ( + B == b + -> Vs0 = Vs1 % been there before - ; + ; B = b, % mark it - handle_ball_from_above(Child, Evs, G, RG, Vs0, Vs1) - ). + handle_ball_from_above(Child, Evs, G, RG, Vs0, Vs1) + ). throw_below(Evs, G, RG, Child, Vs0, Vs2) :- rb_insert(Vs0, Child, [_|b], Vs1), handle_ball_from_above(Child, Evs, G, RG, Vs1, Vs2). % share this with parents, if we have evidence handle_ball_from_above(V, Evs, G, RG, Vs0, Vs1) :- - var(V), - clpbn:get_atts(V,[evidence(_)]), !, - dgraph_neighbors(V, RG, Parents), - foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). + var(V), + clpbn:get_atts(V,[evidence(_)]), !, + dgraph_neighbors(V, RG, Parents), + foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). handle_ball_from_above(V, Evs, G, RG, Vs0, Vs1) :- - nonvar(V), - rb_lookup(V,_,Evs), !, - dgraph_neighbors(V, RG, Parents), - foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). + nonvar(V), + rb_lookup(V,_,Evs), !, + dgraph_neighbors(V, RG, Parents), + foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). % propagate to kids, if we do not handle_ball_from_above(V, Evs, G, RG, Vs0, Vs1) :- - dgraph_neighbors(V, G, Children), - foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). - + dgraph_neighbors(V, G, Children), + foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). + % visited throw_above(Evs, G, RG, Parent, Vs0, Vs1) :- rb_lookup(Parent, [T|_], Vs0), !, - ( - T == t -> + ( + T == t + -> Vs1 = Vs0 % been there before - ; + ; T = t, % mark it - handle_ball_from_below(Parent, Evs, G, RG, Vs0, Vs1) - ). + handle_ball_from_below(Parent, Evs, G, RG, Vs0, Vs1) + ). throw_above(Evs, G, RG, Parent, Vs0, Vs2) :- rb_insert(Vs0, Parent, [t|_], Vs1), handle_ball_from_below(Parent, Evs, G, RG, Vs1, Vs2). % share this with parents, if we have evidence handle_ball_from_below(V, _Evs, _, _, Vs, Vs) :- - var(V), - clpbn:get_atts(V,[evidence(_)]), !. + var(V), + clpbn:get_atts(V,[evidence(_)]), !. handle_ball_from_below(V, Evs, _, _, Vs, Vs) :- - nonvar(V), - rb_lookup(V, _, Evs), !. + nonvar(V), + rb_lookup(V, _, Evs), !. % propagate to kids, if we do not handle_ball_from_below(V, Evs, G, RG, Vs0, Vs1) :- - dgraph_neighbors(V, RG, Parents), - propagate_ball_from_below(Parents, Evs, V, G, RG, Vs0, Vs1). + dgraph_neighbors(V, RG, Parents), + propagate_ball_from_below(Parents, Evs, V, G, RG, Vs0, Vs1). propagate_ball_from_below([], Evs, V, G, RG, Vs0, Vs1) :- !, - dgraph_neighbors(V, G, Children), - foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). + dgraph_neighbors(V, G, Children), + foldl(throw_below(Evs, G, RG), Children, Vs0, Vs1). propagate_ball_from_below(Parents, Evs, _V, G, RG, Vs0, Vs1) :- - foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). + foldl(throw_above(Evs, G, RG), Parents, Vs0, Vs1). all_top(T, Evs, Vs) :- - rb_visit(T, Pairs), - foldl( get_top(Evs), Pairs, [], Vs). + rb_visit(T, Pairs), + foldl( get_top(Evs), Pairs, [], Vs). get_top(_EVs, V-[T|_], Vs, [V|Vs]) :- - T == t, !. + T == t, !. get_top(_EVs, V-_, Vs, [V|Vs]) :- - var(V), - clpbn:get_atts(V,[evidence(_)]), !. + var(V), + clpbn:get_atts(V,[evidence(_)]), !. get_top(EVs, V-_, Vs, [V|Vs]) :- - nonvar(V), - rb_lookup(V, _, EVs), !. + nonvar(V), + rb_lookup(V, _, EVs), !. get_top(_, _, Vs, Vs). diff --git a/packages/CLPBN/clpbn/discrete_utils.yap b/packages/CLPBN/clpbn/discrete_utils.yap index f8f3c9906..8ba385f4e 100644 --- a/packages/CLPBN/clpbn/discrete_utils.yap +++ b/packages/CLPBN/clpbn/discrete_utils.yap @@ -1,10 +1,14 @@ -:- module(discrete_utils, [project_from_CPT/3, - reorder_CPT/5, - get_dist_size/2]). +:- module(discrete_utils, + [project_from_CPT/3, + reorder_CPT/5, + get_dist_size/2 + ]). -:- use_module(library(clpbn/dists), [get_dist_domain_size/2, - get_dist_domain/2]). +:- use_module(library(clpbn/dists), + [get_dist_domain_size/2, + get_dist_domain/2 + ]). % % remove columns from a table % @@ -20,11 +24,11 @@ propagate_evidence(V, Evs) :- clpbn:get_atts(V, [evidence(Ev),dist(Id,_)]), !, get_dist_domain(Id, Out), generate_szs_with_evidence(Out,Ev,0,Evs,Found), - (var(Found) -> - clpbn:get_atts(V, [key(K)]), - throw(clpbn(evidence_does_not_match,K,Ev,[Out])) + (var(Found) -> + clpbn:get_atts(V, [key(K)]), + throw(clpbn(evidence_does_not_match,K,Ev,[Out])) ; - true + true ). propagate_evidence(_, _). @@ -143,4 +147,3 @@ get_sizes([V|Deps], [Sz|Sizes]) :- get_dist_domain_size(Id,Sz), get_sizes(Deps, Sizes). - diff --git a/packages/CLPBN/clpbn/display.yap b/packages/CLPBN/clpbn/display.yap index b8c9575c6..7ecba7880 100644 --- a/packages/CLPBN/clpbn/display.yap +++ b/packages/CLPBN/clpbn/display.yap @@ -1,17 +1,20 @@ -:- module(clpbn_display, [ - clpbn_bind_vals/3]). + +:- module(clpbn_display, + [clpbn_bind_vals/3]). :- use_module(library(lists), - [ - member/2 - ]). + [member/2]). -:- use_module(library(clpbn/dists), [get_dist_domain/2]). +:- use_module(library(clpbn/dists), + [get_dist_domain/2]). -:- use_module(library(clpbn), [use_parfactors/1]). +:- use_module(library(clpbn), + [use_parfactors/1]). :- use_module(library(maplist)). +:- use_module(library(atts)). + :- attribute posterior/4. @@ -75,7 +78,7 @@ clpbn_bind_vals([Vs|MoreVs],[Ps|MorePs],AllDiffs) :- clpbn_bind_vals2([],_,_) :- !. % simple case, we want a distribution on a single variable. -clpbn_bind_vals2([V],Ps,AllDiffs) :- +clpbn_bind_vals2([V],Ps,AllDiffs) :- use_parfactors(on), !, clpbn:get_atts(V, [key(K)]), pfl:skolem(K,Vals), diff --git a/packages/CLPBN/clpbn/dists.yap b/packages/CLPBN/clpbn/dists.yap index 8f71e6fae..ce796e524 100644 --- a/packages/CLPBN/clpbn/dists.yap +++ b/packages/CLPBN/clpbn/dists.yap @@ -3,47 +3,51 @@ % :- module(clpbn_dist, - [ - dist/1, - dist/4, - dists/1, - dist_new_table/2, - get_dist/4, - get_dist_matrix/5, - get_possibly_deterministic_dist_matrix/5, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_params/2, - get_dist_key/2, - get_dist_all_sizes/2, - get_evidence_position/3, - get_evidence_from_position/3, - dist_to_term/2, - empty_dist/2, - all_dist_ids/1, - randomise_all_dists/0, - randomise_dist/1, - uniformise_all_dists/0, - uniformise_dist/1, - reset_all_dists/0, - add_dist/6, - additive_dists/6 - ]). + [dist/1, + dist/4, + dists/1, + dist_new_table/2, + get_dist/4, + get_dist_matrix/5, + get_possibly_deterministic_dist_matrix/5, + get_dist_domain/2, + get_dist_domain_size/2, + get_dist_params/2, + get_dist_key/2, + get_dist_all_sizes/2, + get_evidence_position/3, + get_evidence_from_position/3, + dist_to_term/2, + empty_dist/2, + all_dist_ids/1, + randomise_all_dists/0, + randomise_dist/1, + uniformise_all_dists/0, + uniformise_dist/1, + reset_all_dists/0, + add_dist/6, + additive_dists/6 + ]). -:- use_module(library(lists),[nth0/3,append/3]). +:- use_module(library(lists), + [nth0/3, + append/3 + ]). :- use_module(library(clpbn), - [use_parfactors/1]). + [use_parfactors/1]). :- use_module(library(matrix), - [matrix_new/4, - matrix_new/3, - matrix_to_list/2, - matrix_to_logs/1]). + [matrix_new/4, + matrix_new/3, + matrix_to_list/2, + matrix_to_logs/1 + ]). :- use_module(library(clpbn/matrix_cpt_utils), - [random_CPT/2, - uniform_CPT/2]). + [random_CPT/2, + uniform_CPT/2 + ]). /* :- mode dist(+, -). @@ -86,7 +90,7 @@ where Id is the id, dna for [a,c,g,t] rna for [a,c,g,u] reals - + ********************************************/ @@ -365,3 +369,4 @@ reset_all_dists. additive_dists(ip(Domain,Tabs1), ip(Domain,Tabs2), Parents1, Parents2, ip(Domain,Tabs), Parents) :- append(Tabs1, Tabs2, Tabs), append(Parents1, Parents2, Parents). + diff --git a/packages/CLPBN/clpbn/evidence.yap b/packages/CLPBN/clpbn/evidence.yap index 62b221858..4f6982a13 100644 --- a/packages/CLPBN/clpbn/evidence.yap +++ b/packages/CLPBN/clpbn/evidence.yap @@ -4,36 +4,34 @@ % :- module(clpbn_evidence, - [ - store_evidence/1, - incorporate_evidence/2, - check_stored_evidence/2, - add_stored_evidence/2, - put_evidence/2 - ]). + [store_evidence/1, + incorporate_evidence/2, + check_stored_evidence/2, + add_stored_evidence/2, + put_evidence/2 + ]). -:- use_module(library(clpbn), [ - {}/1, - clpbn_flag/3, - set_clpbn_flag/2 - ]). +:- use_module(library(clpbn), + [{}/1, + clpbn_flag/3, + set_clpbn_flag/2 + ]). -:- use_module(library('clpbn/dists'), [ - get_dist/4 - ]). +:- use_module(library('clpbn/dists'), + [get_dist/4]). -:- use_module(library(rbtrees), [ - rb_new/1, - rb_lookup/3, - rb_insert/4 - ]). +:- use_module(library(rbtrees), + [rb_new/1, + rb_lookup/3, + rb_insert/4 + ]). :- meta_predicate store_evidence(:). :- dynamic node/3, edge/2, evidence/2. % -% new evidence storage algorithm. The idea is that instead of +% new evidence storage algorithm. The idea is that instead of % redoing all the evidence every time we query the network, we shall % keep a precompiled version around. % @@ -53,9 +51,9 @@ compute_evidence(_,PreviousSolver) :- set_clpbn_flag(solver, PreviousSolver). get_clpbn_vars(G, Vars) :- -% attributes:all_attvars(Vars0), +% attributes:all_attvars(Vars0), once(G), - attributes:all_attvars(Vars). + attributes:all_attvars(Vars). evidence_error(Ball,PreviousSolver) :- set_clpbn_flag(solver,PreviousSolver), @@ -63,7 +61,7 @@ evidence_error(Ball,PreviousSolver) :- store_graph([]). store_graph([V|Vars]) :- - clpbn:get_atts(V,[key(K),dist(Id,Vs)]), + clpbn:get_atts(V,[key(K),dist(Id,Vs)]), \+ node(K, Id, _), !, translate_vars(Vs,TVs), assert(node(K,Id,TVs)), @@ -86,7 +84,6 @@ add_links([K0|TVs],K) :- assert(edge(K,K0)), add_links(TVs,K). - incorporate_evidence(Vs,AllVs) :- rb_new(Cache0), create_open_list(Vs, OL, FL, Cache0, CacheI), diff --git a/packages/CLPBN/clpbn/gibbs.yap b/packages/CLPBN/clpbn/gibbs.yap index 2a499b302..ce45313f7 100644 --- a/packages/CLPBN/clpbn/gibbs.yap +++ b/packages/CLPBN/clpbn/gibbs.yap @@ -8,51 +8,54 @@ % :- module(clpbn_gibbs, - [gibbs/3, - check_if_gibbs_done/1, - init_gibbs_solver/4, - run_gibbs_solver/3]). + [gibbs/3, + check_if_gibbs_done/1, + init_gibbs_solver/4, + run_gibbs_solver/3 + ]). :- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3]). + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(lists), - [member/2, - append/3, - delete/3, - max_list/2, - sum_list/2]). + [member/2, + append/3, + delete/3, + max_list/2, + sum_list/2 + ]). :- use_module(library(ordsets), - [ord_subtract/3]). + [ord_subtract/3]). -:- use_module(library('clpbn/matrix_cpt_utils'), [ - project_from_CPT/3, - reorder_CPT/5, - multiply_possibly_deterministic_factors/3, - column_from_possibly_deterministic_CPT/3, - normalise_possibly_deterministic_CPT/2, - list_from_CPT/2]). +:- use_module(library('clpbn/matrix_cpt_utils'), + [project_from_CPT/3, + reorder_CPT/5, + multiply_possibly_deterministic_factors/3, + column_from_possibly_deterministic_CPT/3, + normalise_possibly_deterministic_CPT/2, + list_from_CPT/2 + ]). -:- use_module(library('clpbn/utils'), [ - check_for_hidden_vars/3]). +:- use_module(library('clpbn/utils'), + [check_for_hidden_vars/3]). -:- use_module(library('clpbn/dists'), [ - get_possibly_deterministic_dist_matrix/5, - get_dist_domain_size/2]). +:- use_module(library('clpbn/dists'), + [get_possibly_deterministic_dist_matrix/5, + get_dist_domain_size/2 + ]). -:- use_module(library('clpbn/topsort'), [ - topsort/2]). +:- use_module(library('clpbn/topsort'), + [topsort/2]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). :- use_module(library('clpbn/connected'), - [ - influences/3 - ]). + [influences/3]). :- dynamic gibbs_params/3. @@ -134,7 +137,7 @@ graph_representation([V|Vs], Graph, I0, Keys, [I-IParents|TGraph]) :- graph_representation(Vs, Graph, I, Keys, TGraph). write_pars([]). -write_pars([V|Parents]) :- +write_pars([V|Parents]) :- clpbn:get_atts(V, [key(K),dist(I,_)]),write(K:I),nl, write_pars(Parents). @@ -146,7 +149,7 @@ get_sizes([V|Parents], [Sz|Szs]) :- parent_indices([], _, []). parent_indices([V|Parents], Keys, [I|IParents]) :- - rb_lookup(V, I, Keys), + rb_lookup(V, I, Keys), parent_indices(Parents, Keys, IParents). @@ -171,7 +174,7 @@ propagate2parents([V|NewParents], Table, Variables, Graph, Keys) :- propagate2parents(NewParents,Table, Variables, Graph, Keys). add2graph(V, Vals, Table, IParents, Graph, Keys) :- - rb_lookup(V, Index, Keys), + rb_lookup(V, Index, Keys), (var(Vals) -> true ; length(Vals,Sz)), arg(Index, Graph, var(V,Index,_,Vals,Sz,VarSlot,_,_,_)), member(tabular(Table,Index,IParents), VarSlot), !. @@ -236,7 +239,7 @@ mult_list([Sz|Sizes],Mult0,Mult) :- MultI is Sz*Mult0, mult_list(Sizes,MultI,Mult). -% compile node as set of facts, faster execution +% compile node as set of facts, faster execution compile_var(TotSize,I,_Vals,Sz,CPTs,Parents,_Sizes,Graph) :- TotSize < 1024*64, TotSize > 0, !, multiply_all(I,Parents,CPTs,Sz,Graph). @@ -246,11 +249,11 @@ compile_var(_,_,_,_,_,_,_,_). multiply_all(I,Parents,CPTs,Sz,Graph) :- markov_blanket_instance(Parents,Graph,Values), ( - multiply_all(CPTs,Graph,Probs) + multiply_all(CPTs,Graph,Probs) -> - store_mblanket(I,Values,Probs) + store_mblanket(I,Values,Probs) ; - throw(error(domain_error(bayesian_domain),gibbs_cpt(I,Parents,Values,Sz))) + throw(error(domain_error(bayesian_domain),gibbs_cpt(I,Parents,Values,Sz))) ), fail. multiply_all(I,_,_,_,_) :- @@ -280,7 +283,7 @@ fetch_parents([], _, []). fetch_parents([P|Parents], Graph, [Val|Vals]) :- arg(P,Graph,var(_,_,Val,_,_,_,_,_,_)), fetch_parents(Parents, Graph, Vals). - + multiply_more([],_,Probs0,LProbs) :- normalise_possibly_deterministic_CPT(Probs0, Probs), list_from_CPT(Probs, LProbs0), @@ -296,7 +299,7 @@ accumulate_up_list([P|LProbs], P0, [P1|L]) :- P1 is P0+P, accumulate_up_list(LProbs, P1, L). - + store_mblanket(I,Values,Probs) :- recordz(mblanket,m(I,Values,Probs),_). @@ -364,8 +367,8 @@ generate_est_mults([], [], _, [], 1). generate_est_mults([V|Vs], [I|Is], Graph, [M0|Mults], M) :- arg(V,Graph,var(_,I,_,_,Sz,_,_,_,_)), generate_est_mults(Vs, Is, Graph, Mults, M0), - M is M0*Sz. - + M is M0*Sz. + gen_e0(0,[]) :- !. gen_e0(Sz,[0|E0L]) :- Sz1 is Sz-1, @@ -455,7 +458,7 @@ get_estimate_pos([I|Is], Sample, [M|Mult], V0, V) :- get_estimate_pos(Is, Sample, Mult, VI, V). update_estimate_for_var(V0,[X|T],[X1|NT]) :- - ( V0 == 0 -> + (V0 == 0 -> X1 is X+1, NT = T ; @@ -496,7 +499,7 @@ do_probs([E|Es],Sum,[P|Ps]) :- show_sorted([], _) :- nl. show_sorted([I|VarOrder], Graph) :- - arg(I,Graph,var(V,I,_,_,_,_,_,_,_)), + arg(I,Graph,var(V,I,_,_,_,_,_,_,_)), clpbn:get_atts(V,[key(K)]), format('~w ',[K]), show_sorted(VarOrder, Graph). @@ -528,7 +531,7 @@ add_up_mes(Counts,[me(_,_,Cs)|Chains], Add) :- sum_lists(Counts, Cs, NCounts), add_up_mes(NCounts, Chains, Add). -sum_lists([],[],[]). +sum_lists([],[],[]). sum_lists([Count|Counts], [C|Cs], [NC|NCounts]) :- NC is Count+C, sum_lists(Counts, Cs, NCounts). @@ -542,5 +545,3 @@ divide_list([C|Add], Sum, [P|Dist]) :- P is C/Sum, divide_list(Add, Sum, Dist). - - diff --git a/packages/CLPBN/clpbn/graphs.yap b/packages/CLPBN/clpbn/graphs.yap index e481161fb..c13afcb7b 100644 --- a/packages/CLPBN/clpbn/graphs.yap +++ b/packages/CLPBN/clpbn/graphs.yap @@ -3,13 +3,14 @@ % Just output a graph with all the variables. % -:- module(clpbn2graph, [clpbn2graph/1]). +:- module(clpbn2graph, + [clpbn2graph/1]). -:- use_module(library('clpbn/utils'), [ - check_for_hidden_vars/3]). +:- use_module(library('clpbn/utils'), + [check_for_hidden_vars/3]). -:- use_module(library('clpbn/dists'), [ - get_dist/4]). +:- use_module(library('clpbn/dists'), + [get_dist/4]). :- attribute node/0. @@ -37,7 +38,3 @@ translate_vars([V|Vs],[K|Ks]) :- clpbn:get_atts(V, [key(K)]), translate_vars(Vs,Ks). - - - - diff --git a/packages/CLPBN/clpbn/graphviz.yap b/packages/CLPBN/clpbn/graphviz.yap index 825fae3a1..2d0a10cda 100644 --- a/packages/CLPBN/clpbn/graphviz.yap +++ b/packages/CLPBN/clpbn/graphviz.yap @@ -1,4 +1,6 @@ -:- module(clpbn_gviz, [clpbn2gviz/4]). + +:- module(clpbn_gviz, + [clpbn2gviz/4]). clpbn2gviz(Stream, Name, Network, Output) :- format(Stream, 'digraph ~w { @@ -48,7 +50,7 @@ output_parents1(Stream,[V|L]) :- put_code(Stream, 0' ), %' output_parents1(Stream,L). -output_v(V,Stream) :- +output_v(V,Stream) :- clpbn:get_atts(V,[key(Key)]), output_key(Stream,Key). diff --git a/packages/CLPBN/clpbn/ground_factors.yap b/packages/CLPBN/clpbn/ground_factors.yap index d2732f238..50d7a113d 100644 --- a/packages/CLPBN/clpbn/ground_factors.yap +++ b/packages/CLPBN/clpbn/ground_factors.yap @@ -1,40 +1,34 @@ -%parfactor( -% [ability(P),grade(C,S), satisfaction(C,S,P)], -% \phi = [....], -% [P,C,S], -% [P \in [p1,p2,p4], C \in [c1,c3], S \in [s2,s3]]). -% [S \= s2]) +:- module(pfl_ground_factors, + [generate_network/5, + f/3 + ]). +:- use_module(library(bhash), + [b_hash_new/1, + b_hash_lookup/3, + b_hash_insert/4, + b_hash_to_list/2 + ]). -:- module(pfl_ground_factors, [ - generate_network/5, - f/3 - ]). - -:- use_module(library(bhash), [ - b_hash_new/1, - b_hash_lookup/3, - b_hash_insert/4, - b_hash_to_list/2]). - -:- use_module(library(lists), [ - delete/3, - nth0/3, - member/2]). +:- use_module(library(lists), + [member/2]). :- use_module(library(maplist)). -:- use_module(library(pfl), [ - factor/6, - defined_in_factor/2, - skolem/2]). +:- use_module(library(atts)). -:- use_module(library(clpbn/aggregates), [ - avg_factors/5]). +:- use_module(library(pfl), + [factor/6, + defined_in_factor/2, + skolem/2 + ]). -:- use_module(library(clpbn/dists), [ - dist/4]). +:- use_module(library(clpbn/aggregates), + [avg_factors/5]). + +:- use_module(library(clpbn/dists), + [dist/4]). :- dynamic currently_defined/1, queue/1, f/4. @@ -48,7 +42,7 @@ generate_network(QueryVars, QueryKeys, Keys, Factors, EList) :- b_hash_new(Evidence0), foldl(include_evidence,AVars, Evidence0, Evidence1), static_evidence(Evidence1, Evidence), - b_hash_to_list(Evidence, EList0), + b_hash_to_list(Evidence, EList0), maplist(pair_to_evidence,EList0, EList), maplist(queue_evidence, EList), foldl(run_through_query(Evidence), QueryVars, [], QueryKeys), @@ -59,20 +53,20 @@ generate_network(QueryVars, QueryKeys, Keys, Factors, EList) :- % clean global stateq % init_global_search :- - retractall(queue(_)), - retractall(currently_defined(_)), - retractall(f(_,_,_)). + retractall(queue(_)), + retractall(currently_defined(_)), + retractall(f(_,_,_)). pair_to_evidence(K-E, K=E). include_evidence(V, Evidence0, Evidence) :- clpbn:get_atts(V,[key(K),evidence(E)]), !, ( - b_hash_lookup(K, E1, Evidence0) + b_hash_lookup(K, E1, Evidence0) -> - (E \= E1 -> throw(clpbn:incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) + (E \= E1 -> throw(clpbn:incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) ; - b_hash_insert(Evidence0, K, E, Evidence) + b_hash_insert(Evidence0, K, E, Evidence) ). include_evidence(_, Evidence, Evidence). @@ -82,16 +76,16 @@ static_evidence(Evidence0, Evidence) :- include_static_evidence(K=E, Evidence0, Evidence) :- ( - b_hash_lookup(K, E1, Evidence0) + b_hash_lookup(K, E1, Evidence0) -> - (E \= E1 -> throw(incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) + (E \= E1 -> throw(incompatible_evidence(K,E,E1)) ; Evidence = Evidence0) ; - b_hash_insert(Evidence0, K, E, Evidence) + b_hash_insert(Evidence0, K, E, Evidence) ). queue_evidence(K=_) :- - queue_in(K). + queue_in(K). run_through_query(Evidence, V, QueryKeys, QueryKeys) :- clpbn:get_atts(V,[key(K)]), @@ -122,11 +116,11 @@ propagate. do_propagate(K) :- %writeln(-K), \+ currently_defined(K), - ( ground(K) -> assert(currently_defined(K)) ; true), + ( ground(K) -> assert(currently_defined(K)) ; true), ( defined_in_factor(K, ParFactor), add_factor(ParFactor, Ks) - *-> + *-> true ; throw(error(no_defining_factor(K))) @@ -136,28 +130,29 @@ do_propagate(K) :- queue_in(K1), fail. do_propagate(_K) :- - propagate. + propagate. add_factor(factor(Type, Id, Ks, _, _Phi, Constraints), NKs) :- -% writeln(+Ks), - ( Ks = [K,Els], var(Els) - -> - % aggregate factor +% writeln(+Ks), + ( + Ks = [K,Els], var(Els) + -> + % aggregate factor once(run(Constraints)), avg_factors(K, Els, 0.0, NewKeys, NewId), NKs = [K|NewKeys] - ; + ; run(Constraints), NKs = Ks, Id = NewId ), ( - f(Type, NewId, NKs) - -> + f(Type, NewId, NKs) + -> true - ; + ; assert(f(Type, NewId, NKs)) - ). + ). run([Goal|Goals]) :- call(user:Goal), diff --git a/packages/CLPBN/clpbn/hmm.yap b/packages/CLPBN/clpbn/hmm.yap index 9cea42f0e..623955160 100644 --- a/packages/CLPBN/clpbn/hmm.yap +++ b/packages/CLPBN/clpbn/hmm.yap @@ -1,19 +1,20 @@ - -:- module(hmm, [init_hmm/0, - hmm_state/1, - emission/1]). +:- module(hmm, + [init_hmm/0, + hmm_state/1, + emission/1 + ]). :- ensure_loaded(library(clpbn)). :- use_module(library(lists), - [nth/3]). + [nth/3]). :- use_module(library(nbhash), - [nb_hash_new/2, - nb_hash_lookup/3, - nb_hash_insert/3 - ]). + [nb_hash_new/2, + nb_hash_lookup/3, + nb_hash_insert/3 + ]). :- ensure_loaded(library(tries)). @@ -46,22 +47,19 @@ hmm_state(N/A,Mod) :- Key =.. [T|KArgs], Head =.. [N|LArgs], asserta_static( (Mod:Head :- - ( First > 2 -> - Last = Key, ! - ; - nb_getval(trie, Trie), trie_check_entry(Trie, Key, _) - -> - % leave work for solver! - % - Last = Key, ! - ; - % first time we saw this entry - nb_getval(trie, Trie), trie_put_entry(Trie, Key, _), - fail - ) - ) - ). - + (First > 2 -> + Last = Key, ! + ; + nb_getval(trie, Trie), trie_check_entry(Trie, Key, _) -> + % leave work for solver! + Last = Key, ! + ; + % first time we saw this entry + nb_getval(trie, Trie), trie_put_entry(Trie, Key, _), + fail + ) + )). + build_args(4,[A,B,C,D],[A,B,C],A,D). build_args(3, [A,B,C], [A,B],A,C). build_args(2, [A,B], [A],A,B). @@ -79,5 +77,3 @@ cvt_vals([A|B],[A|B]). find_probs(Logs,Nth,Log) :- arg(Nth,Logs,Log). - - diff --git a/packages/CLPBN/clpbn/horus.yap b/packages/CLPBN/clpbn/horus.yap index 6bed62fa2..03d5662ff 100644 --- a/packages/CLPBN/clpbn/horus.yap +++ b/packages/CLPBN/clpbn/horus.yap @@ -1,65 +1,56 @@ /******************************************************* Horus Interface - + ********************************************************/ :- module(clpbn_horus, - [set_solver/1, - set_horus_flag/1, - cpp_create_lifted_network/3, - cpp_create_ground_network/4, - cpp_set_parfactors_params/2, - cpp_set_factors_params/2, - cpp_run_lifted_solver/3, - cpp_run_ground_solver/3, - cpp_set_vars_information/2, - cpp_set_horus_flag/2, - cpp_free_lifted_network/1, - cpp_free_ground_network/1 - ]). - -:- use_module(library(clpbn), - [set_clpbn_flag/2]). - - -patch_things_up :- - assert_static(clpbn_horus:cpp_set_horus_flag(_,_)). - - -warning :- - format(user_error,"Horus library not installed: cannot use bp, fove~n.",[]). + [set_horus_flag/2, + cpp_create_lifted_network/3, + cpp_create_ground_network/4, + cpp_set_parfactors_params/3, + cpp_set_factors_params/3, + cpp_run_lifted_solver/3, + cpp_run_ground_solver/3, + cpp_set_vars_information/2, + cpp_set_horus_flag/2, + cpp_free_lifted_network/1, + cpp_free_ground_network/1 + ]). :- catch(load_foreign_files([horus], [], init_predicates), _, patch_things_up) - -> true ; warning. + -> true ; warning. -set_solver(ve) :- !, set_clpbn_flag(solver,ve). -set_solver(bdd) :- !, set_clpbn_flag(solver,bdd). -set_solver(jt) :- !, set_clpbn_flag(solver,jt). -set_solver(gibbs) :- !, set_clpbn_flag(solver,gibbs). -set_solver(lve) :- !, set_clpbn_flag(solver,fove), set_horus_flag(lifted_solver, lve). -set_solver(lbp) :- !, set_clpbn_flag(solver,fove), set_horus_flag(lifted_solver, lbp). -set_solver(lkc) :- !, set_clpbn_flag(solver,fove), set_horus_flag(lifted_solver, lkc). -set_solver(hve) :- !, set_clpbn_flag(solver,bp), set_horus_flag(ground_solver, ve). -set_solver(bp) :- !, set_clpbn_flag(solver,bp), set_horus_flag(ground_solver, bp). -set_solver(cbp) :- !, set_clpbn_flag(solver,bp), set_horus_flag(ground_solver, cbp). -set_solver(S) :- throw(error('unknown solver ', S)). +patch_things_up :- + assert_static(clpbn_horus:cpp_set_horus_flag(_,_)). + + +warning :- + format(user_error,"Horus library not installed: cannot use hve, bp, cbp, lve, lkc and lbp~n.",[]). set_horus_flag(K,V) :- cpp_set_horus_flag(K,V). -:- cpp_set_horus_flag(schedule, seq_fixed). -%:- cpp_set_horus_flag(schedule, seq_random). -%:- cpp_set_horus_flag(schedule, parallel). -%:- cpp_set_horus_flag(schedule, max_residual). +:- cpp_set_horus_flag(verbosity, 0). -:- cpp_set_horus_flag(accuracy, 0.0001). +%:- cpp_set_horus_flag(use_logarithms, false). +:- cpp_set_horus_flag(use_logarithms, true). -:- cpp_set_horus_flag(max_iter, 1000). +%:- cpp_set_horus_flag(hve_elim_heuristic, sequential). +%:- cpp_set_horus_flag(hve_elim_heuristic, min_neighbors). +%:- cpp_set_horus_flag(hve_elim_heuristic, min_weight). +%:- cpp_set_horus_flag(hve_elim_heuristic, min_fill). +:- cpp_set_horus_flag(hve_elim_heuristic, weighted_min_fill). -:- cpp_set_horus_flag(use_logarithms, false). -% :- cpp_set_horus_flag(use_logarithms, true). +:- cpp_set_horus_flag(bp_msg_schedule, seq_fixed). +%:- cpp_set_horus_flag(bp_msg_schedule, seq_random). +%:- cpp_set_horus_flag(bp_msg_schedule, parallel). +%:- cpp_set_horus_flag(bp_msg_schedule, max_residual). + +:- cpp_set_horus_flag(bp_accuracy, 0.0001). + +:- cpp_set_horus_flag(bp_max_iter, 1000). diff --git a/packages/CLPBN/clpbn/horus_ground.yap b/packages/CLPBN/clpbn/horus_ground.yap index 993fea5df..b4b3f8d25 100644 --- a/packages/CLPBN/clpbn/horus_ground.yap +++ b/packages/CLPBN/clpbn/horus_ground.yap @@ -1,102 +1,89 @@ /******************************************************* Interface to Horus Ground Solvers. Used by: - - Variable Elimination - - Belief Propagation - - Counting Belief Propagation - + - Variable Elimination + - Belief Propagation + - Counting Belief Propagation + ********************************************************/ :- module(clpbn_horus_ground, - [call_horus_ground_solver/6, - check_if_horus_ground_solver_done/1, - init_horus_ground_solver/5, - run_horus_ground_solver/4, - finalize_horus_ground_solver/1 - ]). + [call_horus_ground_solver/6, + check_if_horus_ground_solver_done/1, + init_horus_ground_solver/5, + run_horus_ground_solver/3, + end_horus_ground_solver/1 + ]). :- use_module(horus, - [cpp_create_ground_network/4, - cpp_set_factors_params/2, - cpp_run_ground_solver/3, - cpp_set_vars_information/2, - cpp_free_ground_network/1, - set_solver/1 - ]). + [cpp_create_ground_network/4, + cpp_set_factors_params/3, + cpp_run_ground_solver/3, + cpp_free_ground_network/1, + cpp_set_vars_information/2 + ]). -:- use_module(library('clpbn/dists'), - [dist/4, - get_dist_domain/2, - get_dist_domain_size/2, - get_dist_params/2 - ]). +:- use_module(library('clpbn/numbers'), + [lists_of_keys_to_ids/6, + keys_to_numbers/7 + ]). :- use_module(library('clpbn/display'), - [clpbn_bind_vals/3]). - -:- use_module(library(clpbn/numbers)). - -:- use_module(library(charsio), - [term_to_atom/2]). + [clpbn_bind_vals/3]). :- use_module(library(pfl), - [skolem/2]). + [get_pfl_parameters/2, + skolem/2 + ]). + +:- use_module(library(charsio), + [term_to_atom/2]). :- use_module(library(maplist)). -:- use_module(library(lists)). -:- use_module(library(atts)). - -:- use_module(library(bhash)). +call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, + Output) :- + init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), + run_horus_ground_solver([QueryKeys], Solutions, State), + clpbn_bind_vals([QueryVars], Solutions, Output), + end_horus_ground_solver(State). -call_horus_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, State), - run_solver(State, [QueryKeys], Solutions), - clpbn_bind_vals([QueryVars], Solutions, Output), - finalize_horus_ground_solver(State). +init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, + state(Network,Hash,Id,DistIds)) :- + factors_type(Factors, Type), + keys_to_numbers(AllKeys, Factors, Evidence, Hash, Id, FacIds, EvIds), + %writeln(network:(type=Type, factors=FacIds, evidence=EvIds)), nl, + cpp_create_ground_network(Type, FacIds, EvIds, Network), + %maplist(term_to_atom, AllKeys, VarNames), + %maplist(get_domain, AllKeys, Domains), + %cpp_set_vars_information(VarNames, Domains), + maplist(get_dist_id, FacIds, DistIds0), + sort(DistIds0, DistIds). -init_horus_ground_solver(QueryKeys, AllKeys, Factors, Evidence, state(Network,Hash4,Id4)) :- - get_factors_type(Factors, Type), - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - cpp_create_ground_network(Type, FactorIds, EvidenceIds, Network), - %writeln(network:(Type, FactorIds, EvidenceIds, Network)), writeln(''), - maplist(get_var_information, AllKeys, StatesNames), - maplist(term_to_atom, AllKeys, KeysAtoms), - cpp_set_vars_information(KeysAtoms, StatesNames). +run_horus_ground_solver(QueryKeys, Solutions, + state(Network,Hash,Id, DistIds)) :- + lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), + %maplist(get_pfl_parameters, DistIds, DistParams), + %cpp_set_factors_params(Network, DistIds, DistParams), + cpp_run_ground_solver(Network, QueryIds, Solutions). -run_horus_ground_solver(_QueryVars, Solutions, horus(GKeys, Keys, Factors, Evidence), Solver) :- - set_solver(Solver), - call_horus_ground_solver_for_probabilities(GKeys, Keys, Factors, Evidence, Solutions). +end_horus_ground_solver(state(Network,_Hash,_Id, _DistIds)) :- + cpp_free_ground_network(Network). -% TODO this is not beeing called! -finalize_horus_ground_solver(state(Network,_Hash,_Id)) :- - cpp_free_ground_network(Network). +factors_type([f(bayes, _, _)|_], bayes) :- ! . +factors_type([f(markov, _, _)|_], markov) :- ! . -run_solver(state(Network,Hash,Id), QueryKeys, Solutions) :- - %get_dists_parameters(DistIds, DistsParams), - %cpp_set_factors_params(Network, DistsParams), - lists_of_keys_to_ids(QueryKeys, QueryIds, Hash, _, Id, _), - cpp_run_ground_solver(Network, QueryIds, Solutions). +get_dist_id(f(_, _, _, DistId), DistId). -get_factors_type([f(bayes, _, _)|_], bayes) :- ! . -get_factors_type([f(markov, _, _)|_], markov) :- ! . - - -get_var_information(_:Key, Domain) :- !, - skolem(Key, Domain). -get_var_information(Key, Domain) :- - skolem(Key, Domain). - - -%get_dists_parameters([],[]). -%get_dists_parameters([Id|Ids], [dist(Id, Params)|DistsInfo]) :- -% get_dist_params(Id, Params), -% get_dists_parameters(Ids, DistsInfo). +get_domain(_:Key, Domain) :- !, + skolem(Key, Domain). +get_domain(Key, Domain) :- + skolem(Key, Domain). diff --git a/packages/CLPBN/clpbn/horus_lifted.yap b/packages/CLPBN/clpbn/horus_lifted.yap index 3de7d0481..0dd13152f 100644 --- a/packages/CLPBN/clpbn/horus_lifted.yap +++ b/packages/CLPBN/clpbn/horus_lifted.yap @@ -1,148 +1,115 @@ /******************************************************* Interface to Horus Lifted Solvers. Used by: - - Generalized Counting First-Order Variable Elimination (GC-FOVE) - - Lifted First-Order Belief Propagation - - Lifted First-Order Knowledge Compilation + - Generalized Counting First-Order Variable Elimination (GC-FOVE) + - Lifted First-Order Belief Propagation + - Lifted First-Order Knowledge Compilation ********************************************************/ :- module(clpbn_horus_lifted, - [call_horus_lifted_solver/3, - check_if_horus_lifted_solver_done/1, - init_horus_lifted_solver/4, - run_horus_lifted_solver/3, - finalize_horus_lifted_solver/1 - ]). + [call_horus_lifted_solver/3, + check_if_horus_lifted_solver_done/1, + init_horus_lifted_solver/4, + run_horus_lifted_solver/3, + end_horus_lifted_solver/1 + ]). :- use_module(horus, - [cpp_create_lifted_network/3, - cpp_set_parfactors_params/2, - cpp_run_lifted_solver/3, - cpp_free_lifted_network/1 - ]). + [cpp_create_lifted_network/3, + cpp_set_parfactors_params/3, + cpp_run_lifted_solver/3, + cpp_free_lifted_network/1 + ]). :- use_module(library('clpbn/display'), - [clpbn_bind_vals/3]). - -:- use_module(library('clpbn/dists'), - [get_dist_params/2]). + [clpbn_bind_vals/3]). :- use_module(library(pfl), - [factor/6, - skolem/2, - get_pfl_parameters/2 - ]). + [factor/6, + skolem/2, + get_pfl_parameters/2 + ]). + +:- use_module(library(maplist)). call_horus_lifted_solver(QueryVars, AllVars, Output) :- - init_horus_lifted_solver(_, AllVars, _, State), - run_horus_lifted_solver(QueryVars, Solutions, State), - clpbn_bind_vals(QueryVars, Solutions, Output), - finalize_horus_lifted_solver(State). + init_horus_lifted_solver(_, AllVars, _, State), + run_horus_lifted_solver(QueryVars, Solutions, State), + clpbn_bind_vals(QueryVars, Solutions, Output), + end_horus_lifted_solver(State). -init_horus_lifted_solver(_, AllVars, _, state(ParfactorList, DistIds)) :- - get_parfactors(Parfactors), - get_dist_ids(Parfactors, DistIds0), - sort(DistIds0, DistIds), - get_observed_vars(AllVars, ObservedVars), - %writeln(parfactors:Parfactors:'\n'), - %writeln(evidence:ObservedVars:'\n'), - cpp_create_lifted_network(Parfactors, ObservedVars, ParfactorList). +init_horus_lifted_solver(_, AllVars, _, state(Network, DistIds)) :- + get_parfactors(Parfactors), + get_observed_keys(AllVars, ObservedKeys), + %writeln(network:(parfactors=Parfactors, evidence=ObservedKeys)), nl, + cpp_create_lifted_network(Parfactors, ObservedKeys, Network), + maplist(get_dist_id, Parfactors, DistIds0), + sort(DistIds0, DistIds). -run_horus_lifted_solver(QueryVars, Solutions, state(ParfactorList, DistIds)) :- - get_query_keys(QueryVars, QueryKeys), - get_dists_parameters(DistIds, DistsParams), - %writeln(dists:DistsParams), writeln(''), - cpp_set_parfactors_params(ParfactorList, DistsParams), - cpp_run_lifted_solver(ParfactorList, QueryKeys, Solutions). +run_horus_lifted_solver(QueryVars, Solutions, state(Network, DistIds)) :- + maplist(get_query_keys, QueryVars, QueryKeys), + %maplist(get_pfl_parameters, DistIds,DistsParams), + %cpp_set_parfactors_params(Network, DistIds, DistsParams), + cpp_run_lifted_solver(Network, QueryKeys, Solutions). -finalize_horus_lifted_solver(state(ParfactorList, _)) :- - cpp_free_lifted_network(ParfactorList). - +end_horus_lifted_solver(state(Network, _)) :- + cpp_free_lifted_network(Network). +% +% Enumerate all parfactors and enumerate their domain as tuples. +% :- table get_parfactors/1. -% -% enumerate all parfactors and enumerate their domain as tuples. -% -% output is list of pf( -% Id: an unique number -% Ks: a list of keys, also known as the pf formula [a(X),b(Y),c(X,Y)] -% Vs: the list of free variables [X,Y] -% Phi: the table following usual CLP(BN) convention -% Tuples: ground bindings for variables in Vs, of the form [fv(x,y)] -% get_parfactors(Factors) :- - findall(F, is_factor(F), Factors). + findall(F, is_factor(F), Factors). is_factor(pf(Id, Ks, Rs, Phi, Tuples)) :- - factor(_Type, Id, Ks, Vs, Table, Constraints), - get_ranges(Ks,Rs), - Table \= avg, - gen_table(Table, Phi), - all_tuples(Constraints, Vs, Tuples). + factor(_Type, Id, Ks, Vs, Table, Constraints), + maplist(get_range, Ks, Rs), + Table \= avg, + gen_table(Table, Phi), + all_tuples(Constraints, Vs, Tuples). -get_ranges([],[]). -get_ranges(K.Ks, Range.Rs) :- !, - skolem(K,Domain), - length(Domain,Range), - get_ranges(Ks, Rs). +get_range(K, Range) :- + skolem(K, Domain), + length(Domain, Range). gen_table(Table, Phi) :- - ( is_list(Table) - -> - Phi = Table - ; - call(user:Table, Phi) - ). + ( is_list(Table) -> Phi = Table ; call(user:Table, Phi) ). all_tuples(Constraints, Tuple, Tuples) :- - setof(Tuple, Constraints^run(Constraints), Tuples). + setof(Tuple, Constraints^run(Constraints), Tuples). run([]). run(Goal.Constraints) :- - user:Goal, - run(Constraints). + user:Goal, + run(Constraints). -get_dist_ids([], []). -get_dist_ids(pf(Id, _, _, _, _).Parfactors, Id.DistIds) :- - get_dist_ids(Parfactors, DistIds). +get_dist_id(pf(DistId, _, _, _, _), DistId). -get_observed_vars([], []). -get_observed_vars(V.AllAttVars, [K:E|ObservedVars]) :- - clpbn:get_atts(V,[key(K)]), - ( clpbn:get_atts(V,[evidence(E)]) ; pfl:evidence(K,E) ), !, - get_observed_vars(AllAttVars, ObservedVars). -get_observed_vars(V.AllAttVars, ObservedVars) :- - clpbn:get_atts(V,[key(_K)]), !, - get_observed_vars(AllAttVars, ObservedVars). +get_observed_keys([], []). +get_observed_keys(V.AllAttVars, [K:E|ObservedKeys]) :- + clpbn:get_atts(V,[key(K)]), + ( clpbn:get_atts(V,[evidence(E)]) ; pfl:evidence(K,E) ), !, + get_observed_keys(AllAttVars, ObservedKeys). +get_observed_keys(_V.AllAttVars, ObservedKeys) :- + get_observed_keys(AllAttVars, ObservedKeys). get_query_keys([], []). -get_query_keys(E1.L1, E2.L2) :- - get_query_keys_2(E1,E2), - get_query_keys(L1, L2). - - -get_query_keys_2([], []). -get_query_keys_2(V.AttVars, [RV|RVs]) :- - clpbn:get_atts(V,[key(RV)]), !, - get_query_keys_2(AttVars, RVs). - - -get_dists_parameters([], []). -get_dists_parameters([Id|Ids], [dist(Id, Params)|DistsInfo]) :- - get_pfl_parameters(Id, Params), - get_dists_parameters(Ids, DistsInfo). +get_query_keys(V.AttVars, K.Ks) :- + clpbn:get_atts(V,[key(K)]), !, + get_query_keys(AttVars, Ks). diff --git a/packages/CLPBN/clpbn/jt.yap b/packages/CLPBN/clpbn/jt.yap index 9a9aa9027..d46f13430 100644 --- a/packages/CLPBN/clpbn/jt.yap +++ b/packages/CLPBN/clpbn/jt.yap @@ -1,89 +1,93 @@ -:- module(jt, [jt/3, - init_jt_solver/4, - run_jt_solver/3]). - +:- module(jt, + [jt/3, + init_jt_solver/4, + run_jt_solver/3 + ]). :- use_module(library(dgraphs), - [dgraph_new/1, - dgraph_add_edges/3, - dgraph_add_vertex/3, - dgraph_add_vertices/3, - dgraph_edges/2, - dgraph_vertices/2, - dgraph_transpose/2, - dgraph_to_ugraph/2, - ugraph_to_dgraph/2, - dgraph_neighbors/3 - ]). + [dgraph_new/1, + dgraph_add_edges/3, + dgraph_add_vertex/3, + dgraph_add_vertices/3, + dgraph_edges/2, + dgraph_vertices/2, + dgraph_transpose/2, + dgraph_to_ugraph/2, + ugraph_to_dgraph/2, + dgraph_neighbors/3 + ]). :- use_module(library(undgraphs), - [undgraph_new/1, - undgraph_add_edge/4, - undgraph_add_edges/3, - undgraph_del_vertex/3, - undgraph_del_vertices/3, - undgraph_vertices/2, - undgraph_edges/2, - undgraph_neighbors/3, - undgraph_edge/3, - dgraph_to_undgraph/2 - ]). + [undgraph_new/1, + undgraph_add_edge/4, + undgraph_add_edges/3, + undgraph_del_vertex/3, + undgraph_del_vertices/3, + undgraph_vertices/2, + undgraph_edges/2, + undgraph_neighbors/3, + undgraph_edge/3, + dgraph_to_undgraph/2 + ]). :- use_module(library(wundgraphs), - [wundgraph_new/1, - wundgraph_max_tree/3, - wundgraph_add_edges/3, - wundgraph_add_vertices/3, - wundgraph_to_undgraph/2 - ]). + [wundgraph_new/1, + wundgraph_max_tree/3, + wundgraph_add_edges/3, + wundgraph_add_vertices/3, + wundgraph_to_undgraph/2 + ]). :- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3]). + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(ordsets), - [ord_subset/2, - ord_insert/3, - ord_intersection/3, - ord_del_element/3, - ord_memberchk/2]). + [ord_subset/2, + ord_insert/3, + ord_intersection/3, + ord_del_element/3, + ord_memberchk/2 + ]). :- use_module(library(lists), - [reverse/2]). + [reverse/2]). :- use_module(library(maplist)). :- use_module(library('clpbn/aggregates'), - [check_for_agg_vars/2]). + [check_for_agg_vars/2]). :- use_module(library('clpbn/dists'), - [get_dist_domain_size/2, - get_dist_domain/2, - get_dist_matrix/5]). + [get_dist_domain_size/2, + get_dist_domain/2, + get_dist_matrix/5 + ]). :- use_module(library('clpbn/matrix_cpt_utils'), - [project_from_CPT/3, - reorder_CPT/5, - unit_CPT/2, - multiply_CPTs/4, - divide_CPTs/3, - normalise_CPT/2, - expand_CPT/4, - get_CPT_sizes/2, - reset_CPT_that_disagrees/5, - sum_out_from_CPT/4, - list_from_CPT/2]). + [project_from_CPT/3, + reorder_CPT/5, + unit_CPT/2, + multiply_CPTs/4, + divide_CPTs/3, + normalise_CPT/2, + expand_CPT/4, + get_CPT_sizes/2, + reset_CPT_that_disagrees/5, + sum_out_from_CPT/4, + list_from_CPT/2 + ]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). :- use_module(library('clpbn/connected'), - [ - init_influences/3, - influences/4 - ]). + [init_influences/3, + influences/4 + ]). jt([[]],_,_) :- !. @@ -94,7 +98,7 @@ jt(LLVs,Vs0,AllDiffs) :- init_jt_solver(LLVs, Vs0, _, State) :- - check_for_agg_vars(Vs0, Vs1), + check_for_agg_vars(Vs0, Vs1), init_influences(Vs1, G, RG), maplist(init_jt_solver_for_question(G, RG), LLVs, State). @@ -131,7 +135,7 @@ run_vars([V|LVs], Edges, [V|Vs], [CPTVars-dist([V|Parents],Id)|CPTs], Ev) :- add_evidence_from_vars(V, [e(V,P)|Evs], Evs) :- clpbn:get_atts(V, [evidence(P)]), !. add_evidence_from_vars(_, Evs, Evs). - + find_nth0([Id|_], Id, P, P) :- !. find_nth0([_|D], Id, P0, P) :- P1 is P0+1, @@ -159,7 +163,7 @@ initial_graph(_,Parents, CPTs) :- % from the very beginning. dgraph_transpose(V1, V2), dgraph_to_ugraph(V2, Parents). - + problem_graph([], []). problem_graph([V|BNet], GraphF) :- @@ -171,7 +175,7 @@ add_parents([], _, Graph, Graph). add_parents([P|Parents], V, Graph0, [P-V|GraphF]) :- add_parents(Parents, V, Graph0, GraphF). - + % From David Page's lectures test_graph(0, [1-3,2-3,2-4,5-4,5-7,10-7,10-9,11-9,3-6,4-6,7-8,9-8,6-12,8-12], @@ -228,19 +232,19 @@ choose([V|Vertices], Graph, Score0, _, _, Best, _, Cliques0, Cliques, EdgesF) :- ord_insert(Neighbors, V, PossibleClique), new_edges(Neighbors, Graph, NewEdges), ( - % simplicial edge - NewEdges == [] + % simplicial edge + NewEdges == [] -> - !, - Best = V, - NewEdges = EdgesF, - length(PossibleClique,L), - Cliques = [L-PossibleClique|Cliques0] + !, + Best = V, + NewEdges = EdgesF, + length(PossibleClique,L), + Cliques = [L-PossibleClique|Cliques0] ; -% cliquelength(PossibleClique,1,CL), - length(PossibleClique,CL), - CL < Score0, !, - choose(Vertices,Graph,CL,NewEdges, V, Best, CL-PossibleClique, Cliques0,Cliques,EdgesF) +% cliquelength(PossibleClique,1,CL), + length(PossibleClique,CL), + CL < Score0, !, + choose(Vertices,Graph,CL,NewEdges, V, Best, CL-PossibleClique, Cliques0,Cliques,EdgesF) ). choose([_|Vertices], Graph, Score0, Edges0, BestSoFar, Best, Clique, Cliques0, Cliques, EdgesF) :- choose(Vertices,Graph,Score0,Edges0, BestSoFar, Best, Clique, Cliques0,Cliques,EdgesF). @@ -285,18 +289,17 @@ get_links([Sz-Clique|Cliques], SoFar, Vertices, Edges0, Edges) :- get_links(Cliques, [Clique|SoFar], Vertices, EdgesI, Edges). get_links([_|Cliques], SoFar, Vertices, Edges0, Edges) :- get_links(Cliques, SoFar, Vertices, Edges0, Edges). - + add_clique_edges([], _, _, Edges, Edges). add_clique_edges([Clique1|Cliques], Clique, Sz, Edges0, EdgesF) :- ord_intersection(Clique1, Clique, Int), Int \== Clique, - ( - Int = [] -> - add_clique_edges(Cliques, Clique, Sz, Edges0, EdgesF) + (Int = [] -> + add_clique_edges(Cliques, Clique, Sz, Edges0, EdgesF) ; - % we connect - length(Int, LSz), - add_clique_edges(Cliques, Clique, Sz, [Clique-(Clique1-LSz)|Edges0], EdgesF) + % we connect + length(Int, LSz), + add_clique_edges(Cliques, Clique, Sz, [Clique-(Clique1-LSz)|Edges0], EdgesF) ). root(WTree, JTree) :- @@ -358,25 +361,25 @@ get_cpts([], _, [], []). get_cpts([CPT|CPts], [], [], [CPT|CPts]) :- !. get_cpts([[I|MCPT]-Info|CPTs], [J|Clique], MyCPTs, MoreCPTs) :- compare(C,I,J), - ( C == < -> + (C == < -> % our CPT cannot be a part of the clique. MoreCPTs = [[I|MCPT]-Info|LeftoverCPTs], get_cpts(CPTs, [J|Clique], MyCPTs, LeftoverCPTs) ; - C == = -> - % our CPT cannot be a part of the clique. - get_cpt(MCPT, Clique, I, Info, MyCPTs, MyCPTs0, MoreCPTs, MoreCPTs0), - get_cpts(CPTs, [J|Clique], MyCPTs0, MoreCPTs0) - ; - % the first element in our CPT may not be in a clique - get_cpts([[I|MCPT]-Info|CPTs], Clique, MyCPTs, MoreCPTs) + C == = -> + % our CPT cannot be a part of the clique. + get_cpt(MCPT, Clique, I, Info, MyCPTs, MyCPTs0, MoreCPTs, MoreCPTs0), + get_cpts(CPTs, [J|Clique], MyCPTs0, MoreCPTs0) + ; + % the first element in our CPT may not be in a clique + get_cpts([[I|MCPT]-Info|CPTs], Clique, MyCPTs, MoreCPTs) ). get_cpt(MCPT, Clique, I, Info, [[I|MCPT]-Info|MyCPTs], MyCPTs, MoreCPTs, MoreCPTs) :- ord_subset(MCPT, Clique), !. get_cpt(MCPT, _, I, Info, MyCPTs, MyCPTs, [[I|MCPT]-Info|MoreCPTs], MoreCPTs). - + translate_edges([], [], []). translate_edges([E1-E2|Edges], [(E1-A)-(E2-B)|NEdges], [E1-A,E2-B|Vs]) :- translate_edges(Edges, NEdges, Vs). @@ -385,13 +388,13 @@ match_vs(_,[]). match_vs([K-A|Cls],[K1-B|KVs]) :- compare(C, K, K1), (C == = -> - A = B, - match_vs([K-A|Cls], KVs) + A = B, + match_vs([K-A|Cls], KVs) ; - C = < -> - match_vs(Cls,[K1-B|KVs]) + C = < -> + match_vs(Cls,[K1-B|KVs]) ; - match_vs([K-A|Cls],KVs) + match_vs([K-A|Cls],KVs) ). fill_with_cpts(tree(Clique-Dists,Leafs), tree(Clique-NewDists,NewLeafs)) :- diff --git a/packages/CLPBN/clpbn/matrix_cpt_utils.yap b/packages/CLPBN/clpbn/matrix_cpt_utils.yap index da69fad2b..a3d7d228f 100644 --- a/packages/CLPBN/clpbn/matrix_cpt_utils.yap +++ b/packages/CLPBN/clpbn/matrix_cpt_utils.yap @@ -1,51 +1,54 @@ + :- module(clpbn_matrix_utils, - [init_CPT/3, - project_from_CPT/3, - sum_out_from_CPT/5, - project_from_CPT/6, - reorder_CPT/5, - get_CPT_sizes/2, - normalise_CPT/2, - multiply_CPTs/4, - multiply_CPTs/6, - divide_CPTs/3, - expand_CPT/4, - reset_CPT_that_disagrees/5, - unit_CPT/2, - sum_out_from_CPT/4, - list_from_CPT/2, - multiply_factors/3, - normalise_possibly_deterministic_CPT/2, - column_from_possibly_deterministic_CPT/3, - multiply_possibly_deterministic_factors/3, - random_CPT/2, - uniform_CPT/2, - uniform_CPT_as_list/2, - normalise_CPT_on_lines/3]). + [init_CPT/3, + project_from_CPT/3, + sum_out_from_CPT/5, + project_from_CPT/6, + reorder_CPT/5, + get_CPT_sizes/2, + normalise_CPT/2, + multiply_CPTs/4, + multiply_CPTs/6, + divide_CPTs/3, + expand_CPT/4, + reset_CPT_that_disagrees/5, + unit_CPT/2, + sum_out_from_CPT/4, + list_from_CPT/2, + multiply_factors/3, + normalise_possibly_deterministic_CPT/2, + column_from_possibly_deterministic_CPT/3, + multiply_possibly_deterministic_factors/3, + random_CPT/2, + uniform_CPT/2, + uniform_CPT_as_list/2, + normalise_CPT_on_lines/3 + ]). :- use_module(library(matrix), - [matrix_new/4, - matrix_new_set/4, - matrix_select/4, - matrix_dims/2, - matrix_size/2, - matrix_shuffle/3, - matrix_expand/3, - matrix_op/4, - matrix_dims/2, - matrix_sum/2, - matrix_sum_logs_out/3, - matrix_sum_out/3, - matrix_sum_logs_out_several/3, - matrix_op_to_all/4, - matrix_to_exps2/1, - matrix_to_logs/1, - matrix_set_all_that_disagree/5, - matrix_to_list/2, - matrix_agg_lines/3, - matrix_agg_cols/3, - matrix_op_to_lines/4, - matrix_column/3]). + [matrix_new/4, + matrix_new_set/4, + matrix_select/4, + matrix_dims/2, + matrix_size/2, + matrix_shuffle/3, + matrix_expand/3, + matrix_op/4, + matrix_dims/2, + matrix_sum/2, + matrix_sum_logs_out/3, + matrix_sum_out/3, + matrix_sum_logs_out_several/3, + matrix_op_to_all/4, + matrix_to_exps2/1, + matrix_to_logs/1, + matrix_set_all_that_disagree/5, + matrix_to_list/2, + matrix_agg_lines/3, + matrix_agg_cols/3, + matrix_op_to_lines/4, + matrix_column/3 + ]). init_CPT(List, Sizes, TAB) :- matrix_new(floats, Sizes, List, TAB), @@ -84,7 +87,7 @@ evidence(V, Pos) :- clpbn:get_atts(V, [evidence(Pos)]). vnth([V1|Deps], N, V, N, Deps) :- - V == V1, !. + V == V1, !. vnth([V1|Deps], N0, V, N, [V1|NDeps]) :- N1 is N0+1, vnth(Deps, N1, V, N, NDeps). @@ -93,21 +96,21 @@ reorder_CPT(Vs0,T0,Vs,TF,Sizes) :- var(Vs), !, order_vec(Vs0,Vs,Map), ( - Vs == Vs0 + Vs == Vs0 -> - TF = T0 + TF = T0 ; - matrix_shuffle(T0,Map,TF) + matrix_shuffle(T0,Map,TF) ), matrix_dims(TF, Sizes). reorder_CPT(Vs0,T0,Vs,TF,Sizes) :- mapping(Vs0,Vs,Map), ( - Vs == Vs0 + Vs == Vs0 -> - TF = T0 + TF = T0 ; - matrix_shuffle(T0,Map,TF) + matrix_shuffle(T0,Map,TF) ), matrix_dims(TF, Sizes). @@ -124,7 +127,7 @@ add_indices([V|Vs0],I0,[V-I0|Is]) :- get_els([], [], []). get_els([V-I|NIs], [V|Vs], [I|Map]) :- get_els(NIs, Vs, Map). - + mapping(Vs0,Vs,Map) :- add_indices(Vs0,0,I1s), add_indices( Vs,I2s), @@ -167,26 +170,26 @@ expand_tabs([], [], [V2|Deps2], [S2|Sz2], [S2|Map1], [0|Map2], [V2|NDeps]) :- expand_tabs([V1|Deps1], [S1|Sz1], [V2|Deps2], [S2|Sz2], Map1, Map2, NDeps) :- compare(C,V1,V2), (C == = -> - NDeps = [V1|MDeps], - Map1 = [0|M1], - Map2 = [0|M2], - NDeps = [V1|MDeps], - expand_tabs(Deps1, Sz1, Deps2, Sz2, M1, M2, MDeps) + NDeps = [V1|MDeps], + Map1 = [0|M1], + Map2 = [0|M2], + NDeps = [V1|MDeps], + expand_tabs(Deps1, Sz1, Deps2, Sz2, M1, M2, MDeps) ; - C == < -> - NDeps = [V1|MDeps], - Map1 = [0|M1], - Map2 = [S1|M2], - NDeps = [V1|MDeps], - expand_tabs(Deps1, Sz1, [V2|Deps2], [S2|Sz2], M1, M2, MDeps) - ; - NDeps = [V2|MDeps], - Map1 = [S2|M1], - Map2 = [0|M2], - NDeps = [V2|MDeps], - expand_tabs([V1|Deps1], [S1|Sz1], Deps2, Sz2, M1, M2, MDeps) + C == < -> + NDeps = [V1|MDeps], + Map1 = [0|M1], + Map2 = [S1|M2], + NDeps = [V1|MDeps], + expand_tabs(Deps1, Sz1, [V2|Deps2], [S2|Sz2], M1, M2, MDeps) + ; + NDeps = [V2|MDeps], + Map1 = [S2|M1], + Map2 = [0|M2], + NDeps = [V2|MDeps], + expand_tabs([V1|Deps1], [S1|Sz1], Deps2, Sz2, M1, M2, MDeps) ). - + normalise_CPT(MAT,NMAT) :- matrix_to_exps2(MAT), matrix_sum(MAT, Sum), @@ -204,9 +207,9 @@ generate_map([V|DimsNew], [V0|Dims0], [0|Map]) :- V == V0, !, generate_map(DimsNew, Dims0, Map). generate_map([V|DimsNew], Dims0, [Sz|Map]) :- clpbn:get_atts(V, [dist(Id,_)]), - clpbn_dist:get_dist_domain_size(Id, Sz), + clpbn_dist:get_dist_domain_size(Id, Sz), generate_map(DimsNew, Dims0, Map). - + unit_CPT(V,CPT) :- clpbn:get_atts(V, [dist(Id,_)]), clpbn_dist:get_dist_domain_size(Id, Sz), @@ -284,7 +287,7 @@ uniform_CPT(Dims, M) :- normalise_possibly_deterministic_CPT(M1, M). normalise_CPT_on_lines(MAT0, MAT2, L1) :- - matrix_agg_cols(MAT0, +, MAT1), + matrix_agg_cols(MAT0, +, MAT1), matrix_sum(MAT1, SUM), matrix_op_to_all(MAT1, /, SUM, MAT2), matrix:matrix_to_list(MAT2,L1). diff --git a/packages/CLPBN/clpbn/numbers.yap b/packages/CLPBN/clpbn/numbers.yap index f7805a397..15aad1e1f 100644 --- a/packages/CLPBN/clpbn/numbers.yap +++ b/packages/CLPBN/clpbn/numbers.yap @@ -1,17 +1,17 @@ :- module(clpbn_numbers, - [ - keys_to_numbers/7, - keys_to_numbers/9, - lists_of_keys_to_ids/6 - ]). + [keys_to_numbers/7, + keys_to_numbers/9, + lists_of_keys_to_ids/6 + ]). :- use_module(library(bhash)). :- use_module(library(maplist)). + :- use_module(library(pfl), - [skolem/2, - get_pfl_cpt/5 - ]). + [skolem/2, + get_pfl_cpt/5 + ]). % % convert key representation into numeric representation @@ -30,16 +30,16 @@ keys_to_numbers(AllKeys, Factors, Evidence, Hash0, Hash4, Id0, Id4, FactorIds, E foldl2(key_to_id, SKeys, _, Hash3, Hash4, Id3, Id4). lists_of_keys_to_ids(QueryKeys, QueryIds, Hash0, Hash, Id0, Id) :- - foldl2(list_of_keys_to_ids, QueryKeys, QueryIds, Hash0, Hash, Id0, Id). + foldl2(list_of_keys_to_ids, QueryKeys, QueryIds, Hash0, Hash, Id0, Id). list_of_keys_to_ids(List, IdList, Hash0, Hash, I0, I) :- foldl2(key_to_id, List, IdList, Hash0, Hash, I0, I). key_to_id(Key, Id, Hash0, Hash0, I0, I0) :- - b_hash_lookup(Key, Id, Hash0), !. + b_hash_lookup(Key, Id, Hash0), !. key_to_id(Key, I0, Hash0, Hash, I0, I) :- - b_hash_insert(Hash0, Key, I0, Hash), - I is I0+1. + b_hash_insert(Hash0, Key, I0, Hash), + I is I0+1. factor_to_id(Ev, f(_, DistId, Keys), f(Ids, Ranges, CPT, DistId), Hash0, Hash, I0, I) :- get_pfl_cpt(DistId, Keys, Ev, NKeys, CPT), @@ -60,4 +60,3 @@ evidence_to_id(Key=Ev, I0=Ev, Hash0, Hash, I0, I) :- b_hash_insert(Hash0, Key, I0, Hash), I is I0+1. - diff --git a/packages/CLPBN/clpbn/pgrammar.yap b/packages/CLPBN/clpbn/pgrammar.yap index 6466253e4..4c6829595 100644 --- a/packages/CLPBN/clpbn/pgrammar.yap +++ b/packages/CLPBN/clpbn/pgrammar.yap @@ -2,30 +2,29 @@ :- style_check(all). -:- module(clpbn_pgrammar,[grammar_to_atts/1, - grammar_prob/2, - grammar_mle/2, - init_pcg_solver/4, - run_pcg_solver/3, - pcg_init_graph/0]). +:- module(clpbn_pgrammar, + [grammar_to_atts/1, + grammar_prob/2, + grammar_mle/2, + init_pcg_solver/4, + run_pcg_solver/3, + pcg_init_graph/0 + ]). :- load_files([library(clpbn)], - [ if(not_loaded), - silent(true) - ]). + [if(not_loaded), silent(true)]). :- use_module([library(lists)], - [ sum_list/2 - ]). + [sum_list/2]). :- use_module([library(matrix)], - [ matrix_new/3, - matrix_add/3, - matrix_get/3, - matrix_op/4, - matrix_op_to_all/4, - matrix_set_all/2 - ]). + [matrix_new/3, + matrix_add/3, + matrix_get/3, + matrix_op/4, + matrix_op_to_all/4, + matrix_set_all/2 + ]). :- op(600, xfy,'::'). @@ -71,9 +70,9 @@ grammar_mle(S,_,P) :- nb_getval(best,p(P,S)), P > 0.0. user:term_expansion((P::H --> B), Goal) :- - functor(H,A0,_), - % a-->b to a(p(K,P,C,[Cs])) --> b(Cs) - convert_to_internal(H, B, IH, IB, Id), + functor(H,A0,_), + % a-->b to a(p(K,P,C,[Cs])) --> b(Cs) + convert_to_internal(H, B, IH, IB, Id), expand_term((IH --> IB),(NH :- NB)), prolog_load_context(module, Mod), functor(NH,N,A), @@ -99,8 +98,8 @@ add_to_predicate(M:EH1,M:EH,M:H0,NH,NB,Key,Choice,P,Id,(EH1:-NB)) :- % now ensure_tabled works. ensure_tabled(M,H0,EH), assert_static(M:(EH :- - clpbn_pgrammar:p_rule(M,EH,Key,Choice), - M:EH1)), + clpbn_pgrammar:p_rule(M,EH,Key,Choice), + M:EH1)), Choice = 1, new_id(Key,P,Choice,Id), assert_static(M:ptab(EH,Choice,P)), @@ -140,18 +139,18 @@ convert_body_to_internal({A}, {A}) --> !. convert_body_to_internal(B, IB) --> [V], { - B =.. [Na|Args], - build_internal(Na,NaInternal), - IB =.. [NaInternal,V|Args] + B =.. [Na|Args], + build_internal(Na,NaInternal), + IB =.. [NaInternal,V|Args] }. new_id(Key,P,Choice,Id) :- ( - predicate_property(id(_,_,_,_),number_of_clauses(Id)) + predicate_property(id(_,_,_,_),number_of_clauses(Id)) -> - true + true ; - Id = 0 + Id = 0 ), assert(id(Id,Key,P,Choice)). @@ -177,7 +176,7 @@ get_internal(S, InternalS, Arg) :- extract_probability(p(Id,Goals), P) :- id(Id,_,P0,_), - LogP0 is log(P0), + LogP0 is log(P0), extract_logprobability(Goals, LogP0, LogP), P is exp(LogP). @@ -211,11 +210,11 @@ path_choices(InternalS, Proof) :- new_id(Id) :- (nb_getval(grammar_id,Id) -> - I1 is Id+1, - nb_setval(grammar_id,I1) + I1 is Id+1, + nb_setval(grammar_id,I1) ; - nb_setval(grammar_id,1), - Id = 0 + nb_setval(grammar_id,1), + Id = 0 ). find_dom(K, Vs, Ps) :- diff --git a/packages/CLPBN/clpbn/table.yap b/packages/CLPBN/clpbn/table.yap index 9153c6ec9..155db3941 100644 --- a/packages/CLPBN/clpbn/table.yap +++ b/packages/CLPBN/clpbn/table.yap @@ -8,49 +8,49 @@ */ :- module(clpbn_table, - [clpbn_table/1, - clpbn_tableallargs/1, - clpbn_table_nondet/1, - clpbn_tabled_clause/2, - clpbn_tabled_clause_ref/3, - clpbn_tabled_retract/2, - clpbn_tabled_abolish/1, - clpbn_tabled_asserta/1, - clpbn_tabled_assertz/1, - clpbn_tabled_asserta/2, - clpbn_tabled_assertz/2, - clpbn_tabled_dynamic/1, - clpbn_tabled_number_of_clauses/2, - clpbn_reset_tables/0, - clpbn_reset_tables/1, - clpbn_is_tabled/1 - ]). + [clpbn_table/1, + clpbn_tableallargs/1, + clpbn_table_nondet/1, + clpbn_tabled_clause/2, + clpbn_tabled_clause_ref/3, + clpbn_tabled_retract/2, + clpbn_tabled_abolish/1, + clpbn_tabled_asserta/1, + clpbn_tabled_assertz/1, + clpbn_tabled_asserta/2, + clpbn_tabled_assertz/2, + clpbn_tabled_dynamic/1, + clpbn_tabled_number_of_clauses/2, + clpbn_reset_tables/0, + clpbn_reset_tables/1, + clpbn_is_tabled/1 + ]). :- use_module(library(bhash), - [b_hash_new/4, - b_hash_lookup/3, - b_hash_insert/4]). + [b_hash_new/4, + b_hash_lookup/3, + b_hash_insert/4 + ]). :- meta_predicate clpbn_table(:), clpbn_tabled_clause(:.?), clpbn_tabled_clause_ref(:.?,?), clpbn_tabled_retract(:), clpbn_tabled_abolish(:), - clpbn_tabled_asserta(:), - clpbn_tabled_assertz(:), - clpbn_tabled_asserta(:,-), - clpbn_tabled_assertz(:,-), - clpbn_tabled_number_of_clauses(:,-), + clpbn_tabled_asserta(:), + clpbn_tabled_assertz(:), + clpbn_tabled_asserta(:,-), + clpbn_tabled_assertz(:,-), + clpbn_tabled_number_of_clauses(:,-), clpbn_is_tabled(:). -:- use_module(library(terms), [ - instantiated_term_hash/4, - variant/2 - ]). +:- use_module(library(terms), + [instantiated_term_hash/4, + variant/2 + ]). -:- use_module(evidence, [ - put_evidence/2 - ]). +:- use_module(evidence, + [put_evidence/2]). :- dynamic clpbn_table/3. @@ -108,30 +108,28 @@ clpbn_table(F/N,M) :- L0 = [_|Args0], IGoal =.. [NF|Args0], asserta(clpbn_table(S, M, IGoal)), - assert( - (M:S :- - !, -% write(S: ' ' ), - b_getval(clpbn_tables, Tab), - % V2 is unbound. - ( b_hash_lookup(Key, V2, Tab) -> -% (attvar(V2) -> writeln(ok:A0:V2) ; writeln(error(V2:should_be_attvar(S)))), - ( var(A0) -> A0 = V2 ; put_evidence(A0, V2) ) - ; -% writeln(new), - b_hash_insert(Tab, Key, V2, NewTab), - b_setval(clpbn_tables,NewTab), - once(M:Goal), !, - % enter evidence after binding. - ( var(A0) -> A0 = V2 ; put_evidence(A0, V2) ) - ; - clpbn:clpbn_flag(solver,none) -> - true - ; - throw(error(tabled_clpbn_predicate_should_never_fail,S)) - ) - ) - ). + assert((M:S :- + !, +% write(S: ' ' ), + b_getval(clpbn_tables, Tab), + % V2 is unbound. + (b_hash_lookup(Key, V2, Tab) -> +% (attvar(V2) -> writeln(ok:A0:V2) ; writeln(error(V2:should_be_attvar(S)))), + (var(A0) -> A0 = V2 ; put_evidence(A0, V2)) + ; +% writeln(new), + b_hash_insert(Tab, Key, V2, NewTab), + b_setval(clpbn_tables,NewTab), + once(M:Goal), !, + % enter evidence after binding. + (var(A0) -> A0 = V2 ; put_evidence(A0, V2)) + ; + clpbn:clpbn_flag(solver,none) -> + true + ; + throw(error(tabled_clpbn_predicate_should_never_fail,S)) + ) + )). take_tail([V], V, [], V1, [V1]) :- !. take_tail([A|L0], V, [A|L1], V1, [A|L2]) :- @@ -154,19 +152,17 @@ clpbn_tableallargs(F/N,M) :- atom_concat(F, '___tabled', NF), NKey =.. [NF|Args], asserta(clpbn_table(Key, M, NKey)), - assert( - (M:Key :- - !, - b_getval(clpbn_tables, Tab), - ( b_hash_lookup(Key, Out, Tab) -> - true - ; - b_hash_insert(Tab, Key, Out, NewTab), - b_setval(clpbn_tables, NewTab), - once(M:NKey) - ) - ) - ). + assert((M:Key :- + !, + b_getval(clpbn_tables, Tab), + (b_hash_lookup(Key, Out, Tab) -> + true + ; + b_hash_insert(Tab, Key, Out, NewTab), + b_setval(clpbn_tables, NewTab), + once(M:NKey) + ) + )). clpbn_table_nondet(M:X) :- !, clpbn_table_nondet(X,M). @@ -185,18 +181,17 @@ clpbn_table_nondet(F/N,M) :- atom_concat(F, '___tabled', NF), NKey =.. [NF|Args], asserta(clpbn_table(Key, M, NKey)), - assert( - (M:Key :- % writeln(in:Key), - b_getval(clpbn_tables, Tab), - ( b_hash_lookup(Key, Out, Tab) -> - fail - ; - b_hash_insert(Tab, Key, Out, NewTab), - b_setval(clpbn_tables, NewTab), - M:NKey - ) - ) - ). + assert((M:Key :- + % writeln(in:Key), + b_getval(clpbn_tables, Tab), + (b_hash_lookup(Key, Out, Tab) -> + fail + ; + b_hash_insert(Tab, Key, Out, NewTab), + b_setval(clpbn_tables, NewTab), + M:NKey + ) + )). user:term_expansion((P :- Gs), NC) :- clpbn_table(P, M, NP), @@ -364,4 +359,3 @@ clpbn_is_tabled(M:Clause, _) :- !, clpbn_is_tabled(Head, M) :- clpbn_table(Head, M, _). - diff --git a/packages/CLPBN/clpbn/topsort.yap b/packages/CLPBN/clpbn/topsort.yap index 9c40f9ad9..7beb31e05 100644 --- a/packages/CLPBN/clpbn/topsort.yap +++ b/packages/CLPBN/clpbn/topsort.yap @@ -1,11 +1,13 @@ -:- module(topsort, [topsort/2]). +:- module(topsort, + [topsort/2]). :- use_module(library(dgraphs), - [dgraph_new/1, - dgraph_add_edges/3, - dgraph_add_vertices/3, - dgraph_top_sort/2]). + [dgraph_new/1, + dgraph_add_edges/3, + dgraph_add_vertices/3, + dgraph_top_sort/2 + ]). /* simple implementation of a topological sorting algorithm */ /* graph is as Node-[Parents] */ @@ -31,4 +33,3 @@ add_edges([], _V) --> []. add_edges([P|Parents], V) --> [P-V], add_edges(Parents, V). - diff --git a/packages/CLPBN/clpbn/utils.yap b/packages/CLPBN/clpbn/utils.yap index 67f7cfe15..d8179a69e 100644 --- a/packages/CLPBN/clpbn/utils.yap +++ b/packages/CLPBN/clpbn/utils.yap @@ -1,9 +1,11 @@ -:- module(clpbn_utils, [ - clpbn_not_var_member/2, - clpbn_var_member/2, - check_for_hidden_vars/3, - sort_vars_by_key/3, - sort_vars_by_key_and_parents/3]). + +:- module(clpbn_utils, + [clpbn_not_var_member/2, + clpbn_var_member/2, + check_for_hidden_vars/3, + sort_vars_by_key/3, + sort_vars_by_key_and_parents/3 + ]). % % It may happen that variables from a previous query may still be around. @@ -52,21 +54,19 @@ get_keys([_|AVars], KeysVars) :- % may be non-CLPBN vars. merge_same_key([], [], _, []). merge_same_key([K1-V1,K2-V2|Vs], SortedAVars, Ks, UnifiableVars) :- K1 == K2, !, - (clpbn:get_atts(V1, [evidence(E)]) - -> - clpbn:put_atts(V2, [evidence(E)]) + (clpbn:get_atts(V1, [evidence(E)]) -> + clpbn:put_atts(V2, [evidence(E)]) ; - clpbn:get_atts(V2, [evidence(E)]) - -> + clpbn:get_atts(V2, [evidence(E)]) -> clpbn:put_atts(V1, [evidence(E)]) - ; - true + ; + true ), % V1 = V2, attributes:fast_unify_attributed(V1,V2), merge_same_key([K1-V1|Vs], SortedAVars, Ks, UnifiableVars). merge_same_key([K1-V1,K2-V2|Vs], [V1|SortedAVars], Ks, [K1|UnifiableVars]) :- - (in_keys(K1, Ks) ; \+ \+ K1 == K2), !, + (in_keys(K1, Ks) ; \+ \+ K1 == K2), !, add_to_keys(K1, Ks, NKs), merge_same_key([K2-V2|Vs], SortedAVars, NKs, UnifiableVars). merge_same_key([K-V|Vs], [V|SortedAVars], Ks, UnifiableVars) :- @@ -74,9 +74,9 @@ merge_same_key([K-V|Vs], [V|SortedAVars], Ks, UnifiableVars) :- merge_same_key(Vs, SortedAVars, NKs, UnifiableVars). in_keys(K1,[K|_]) :- \+ \+ K1 = K, !. -in_keys(K1,[_|Ks]) :- +in_keys(K1,[_|Ks]) :- in_keys(K1,Ks). - + add_to_keys(K1, Ks, Ks) :- ground(K1), !. add_to_keys(K1, Ks, [K1|Ks]). @@ -102,7 +102,7 @@ add_parents(Parents,V,Id,KeyVarsF,KeyVars0) :- all_vars([]). all_vars([P|Parents]) :- - var(P), + var(P), all_vars(Parents). @@ -113,4 +113,3 @@ transform_parents([P|Parents0],[P|NParents],KeyVarsF,KeyVars0) :- transform_parents([P|Parents0],[V|NParents],[P-V|KeyVarsF],KeyVars0) :- transform_parents(Parents0,NParents,KeyVarsF,KeyVars0). - diff --git a/packages/CLPBN/clpbn/ve.yap b/packages/CLPBN/clpbn/ve.yap index b19bf020c..21f0d931a 100644 --- a/packages/CLPBN/clpbn/ve.yap +++ b/packages/CLPBN/clpbn/ve.yap @@ -11,58 +11,61 @@ all tables they connect to; multiply their size order by size - + *********************************/ -:- module(clpbn_ve, [ve/3, - check_if_ve_done/1, - init_ve_solver/4, - run_ve_solver/3, - init_ve_ground_solver/5, - run_ve_ground_solver/3, - call_ve_ground_solver/6]). +:- module(clpbn_ve, + [ve/3, + check_if_ve_done/1, + init_ve_solver/4, + run_ve_solver/3, + init_ve_ground_solver/5, + run_ve_ground_solver/3, + call_ve_ground_solver/6 + ]). -:- attribute size/1, all_diffs/1. +:- use_module(library(atts)). :- use_module(library(ordsets), - [ord_union/3, - ord_member/2]). + [ord_union/3, + ord_member/2 + ]). -:- use_module(library('clpbn/xbif'), [clpbn2xbif/3]). +:- use_module(library('clpbn/xbif'), + [clpbn2xbif/3]). -:- use_module(library('clpbn/graphviz'), [clpbn2gviz/4]). +:- use_module(library('clpbn/graphviz'), + [clpbn2gviz/4]). :- use_module(library('clpbn/dists'), - [ - dist/4, - get_dist_domain_size/2, - get_dist_params/2, - get_dist_domain_size/2, - get_dist_matrix/5]). + [dist/4, + get_dist_domain_size/2, + get_dist_params/2, + get_dist_domain_size/2, + get_dist_matrix/5 + ]). -:- use_module(library('clpbn/utils'), [ - clpbn_not_var_member/2]). +:- use_module(library('clpbn/utils'), + [clpbn_not_var_member/2]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). :- use_module(library('clpbn/connected'), - [ - init_influences/3, - influences/4, - factor_influences/4 - ]). + [init_influences/3, + influences/4, + factor_influences/4 + ]). :- use_module(library(clpbn/matrix_cpt_utils)). :- use_module(library(clpbn/numbers)). :- use_module(library(lists), - [ - member/2, - append/3, - delete/3 - ]). + [member/2, + append/3, + delete/3 + ]). :- use_module(library(maplist)). @@ -71,7 +74,9 @@ :- use_module(library(clpbn/vmap)). :- use_module(library('clpbn/aggregates'), - [check_for_agg_vars/2]). + [check_for_agg_vars/2]). + +:- attribute size/1, all_diffs/1. % % uses a bipartite graph where bigraph(Vs, NFs, Fs) @@ -88,23 +93,23 @@ check_if_ve_done(Var) :- % new PFL like interface... % call_ve_ground_solver(QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - call_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), - clpbn_bind_vals([QueryVars], Solutions, Output). + call_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Solutions), + clpbn_bind_vals([QueryVars], Solutions, Output). call_ve_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), - run_ve_ground_solver(QueryKeys, Solutions, VE). + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), + run_ve_ground_solver(QueryKeys, Solutions, VE). simulate_ve_ground_solver(_QueryVars, QueryKeys, AllKeys, Factors, Evidence, Output) :- - simulate_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Output). + simulate_ve_ground_solver_for_probabilities([QueryKeys], AllKeys, Factors, Evidence, Output). simulate_ve_ground_solver_for_probabilities(QueryKeys, AllKeys, Factors, Evidence, Solutions) :- - init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), - simulate_solver(QueryKeys, Solutions, VE). + init_ve_ground_solver(QueryKeys, AllKeys, Factors, Evidence, VE), + simulate_solver(QueryKeys, Solutions, VE). init_ve_ground_solver(_QueryKeys, AllKeys, Factors, Evidence, VE) :- - keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), - init_ve(FactorIds, EvidenceIds, Hash4, Id4, VE). + keys_to_numbers(AllKeys, Factors, Evidence, Hash4, Id4, FactorIds, EvidenceIds), + init_ve(FactorIds, EvidenceIds, Hash4, Id4, VE). % @@ -112,11 +117,11 @@ init_ve_ground_solver(_QueryKeys, AllKeys, Factors, Evidence, VE) :- % ve([[]],_,_) :- !. ve(LLVs,Vs0,AllDiffs) :- - init_ve_solver(LLVs, Vs0, AllDiffs, State), - % variable elimination proper - run_ve_solver(LLVs, LLPs, State), - % bind Probs back to variables so that they can be output. - clpbn_bind_vals(LLVs,LLPs,AllDiffs). + init_ve_solver(LLVs, Vs0, AllDiffs, State), + % variable elimination proper + run_ve_solver(LLVs, LLPs, State), + % bind Probs back to variables so that they can be output. + clpbn_bind_vals(LLVs,LLPs,AllDiffs). init_ve(FactorIds, EvidenceIds, Hash, Id, ve(FactorIds, Hash, Id, Ev)) :- @@ -129,7 +134,7 @@ evtotree(K=V,Ev0,Ev) :- factor_to_graph( f(Nodes, Sizes, _Pars0, Id), Factors0, Factors, Edges0, Edges, I0, I) :- I is I0+1, pfl:get_pfl_parameters(Id, Pars0), - init_CPT(Pars0, Sizes, CPT0), + init_CPT(Pars0, Sizes, CPT0), reorder_CPT(Nodes, CPT0, FIPs, CPT, _), F = f(I0, FIPs, CPT), rb_insert(Factors0, I0, F, Factors), @@ -172,7 +177,7 @@ vars_to_bigraph(VMap, bigraph(VInfo, IF, Fs), Evs) :- id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- % process evidence for variable - clpbn:get_atts(V, [evidence(E), dist(_,Ps)]), + clpbn:get_atts(V, [evidence(E), dist(_,Ps)]), checklist(noparent_of_interest(VMap), Ps), !, % I don't need to get a factor here Evs = [I=E|Evs0], @@ -181,17 +186,17 @@ id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- id_to_factor(VMap, V-I, IF0, IF, Fs0, Fs, Evs0, Evs) :- % process distribution/factors ( - clpbn:get_atts(V, [evidence(E)]) - -> - Evs = [I=E|Evs0] + clpbn:get_atts(V, [evidence(E)]) + -> + Evs = [I=E|Evs0] ; - Evs = Evs0 - ), + Evs = Evs0 + ), clpbn:get_atts(V, [dist(D, Ps)]), get_dist_params(D, Pars0), get_dist_domain_size(D, DS), maplist(parent_to_id(VMap), Ps, Sizes, IPs), - init_CPT(Pars0, [DS|Sizes], CPT0), + init_CPT(Pars0, [DS|Sizes], CPT0), reorder_CPT([I|IPs], CPT0, FIPs, CPT, _), rb_insert(Fs0, IF0, f(IF0, FIPs, CPT), Fs), IF is IF0+1. @@ -239,29 +244,29 @@ collect_factors(SFVs, _Fs, _V, [], SFVs). % solve each query independently % use a findall to recover space without needing for GC run_ve_ground_solver(LQVs, LLPs, ve(FactorIds, Hash, Id, Ev)) :- - rb_new(Fs0), - foldl3(factor_to_graph, FactorIds, Fs0, Fs, [], FVs, 0, IF), - sort(FVs, SFVs), - rb_new(VInfo0), - add_vs(SFVs, Fs, VInfo0, VInfo), - BG = bigraph(VInfo, IF, Fs), - lists_of_keys_to_ids(LQVs, LQIds, Hash, _, Id, _), - findall(LPs, solve(LQIds, FactorIds, BG, Ev, LPs), LLPs). + rb_new(Fs0), + foldl3(factor_to_graph, FactorIds, Fs0, Fs, [], FVs, 0, IF), + sort(FVs, SFVs), + rb_new(VInfo0), + add_vs(SFVs, Fs, VInfo0, VInfo), + BG = bigraph(VInfo, IF, Fs), + lists_of_keys_to_ids(LQVs, LQIds, Hash, _, Id, _), + findall(LPs, solve(LQIds, FactorIds, BG, Ev, LPs), LLPs). solve([QVs|_], FIds, Bigraph, Evs, LPs) :- - factor_influences(FIds, QVs, Evs, LVs), - do_solve(QVs, LVs, Bigraph, Evs, LPs). + factor_influences(FIds, QVs, Evs, LVs), + do_solve(QVs, LVs, Bigraph, Evs, LPs). solve([_|LQVs], FIds, Bigraph, Ev, LPs) :- - solve(LQVs, FIds, Bigraph, Ev, LPs). + solve(LQVs, FIds, Bigraph, Ev, LPs). do_solve(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Ps) :- - % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), - % and also prune using evidence - rb_visit(Ev, EvL), - foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), - % eliminate - eliminate(IQVs, digraph(EVs, IF, Fs2), Dist), + % get only what is relevant to query, + project_to_query_related(IVs, OldVs, SVs, Fs1), + % and also prune using evidence + rb_visit(Ev, EvL), + foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), + % eliminate + eliminate(IQVs, digraph(EVs, IF, Fs2), Dist), % writeln(m:Dist),matrix:matrix_to_list(Dist,LD),writeln(LD), %exps(LD,LDE),writeln(LDE), % move from potentials back to probabilities @@ -269,18 +274,18 @@ do_solve(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Ps) :- list_from_CPT(MPs, Ps). simulate_solver(LQVs, Choices, ve(FIds, Hash, Id, BG, Evs)) :- - lists_of_keys_to_ids(LQVs, [QVs], Hash, _, Id, _), - factor_influences(FIds, QVs, Evs, LVs), - do_simulate(QVs, LVs, BG, Evs, Choices). + lists_of_keys_to_ids(LQVs, [QVs], Hash, _, Id, _), + factor_influences(FIds, QVs, Evs, LVs), + do_simulate(QVs, LVs, BG, Evs, Choices). do_simulate(IQVs, IVs, bigraph(OldVs, IF, _Fs), Ev, Choices) :- - % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), - % and also prune using evidence - rb_visit(Ev, EvL), - foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), - % eliminate - simulate_eiminate(IQVs, digraph(EVs, IF, Fs2), Choices). + % get only what is relevant to query, + project_to_query_related(IVs, OldVs, SVs, Fs1), + % and also prune using evidence + rb_visit(Ev, EvL), + foldl2(clean_v_ev, EvL, Fs1, Fs2, SVs, EVs), + % eliminate + simulate_eiminate(IQVs, digraph(EVs, IF, Fs2), Choices). % solve each query independently % use a findall to recover space without needing for GC @@ -295,9 +300,9 @@ run_ve_solver(_, LLPs, state(LQVs, LVs, _VMap, Bigraph, Ev)) :- % solve_ve([IQVs|_], [IVs|_], bigraph(OldVs, IF, _Fs), Ev, Ps) :- % get only what is relevant to query, - project_to_query_related(IVs, OldVs, SVs, Fs1), + project_to_query_related(IVs, OldVs, SVs, Fs1), % and also prune using evidence - foldl2(clean_v_ev, Ev, Fs1, Fs2, SVs, EVs), + foldl2(clean_v_ev, Ev, Fs1, Fs2, SVs, EVs), % eliminate eliminate(IQVs, digraph(EVs, IF, Fs2), Dist), % writeln(m:Dist),matrix:matrix_to_list(Dist,LD),writeln(LD), @@ -314,7 +319,7 @@ solve_ve([_|MoreLVs], [_|MoreLVis], Digraph, Ev, Ps) :- project_to_query_related(IVs0, OldVs, NVs, NFs) :- sort(IVs0, IVs), rb_new(Vs0), - foldl(cp_to_vs, IVs, Vs0, AuxVs), + foldl(cp_to_vs, IVs, Vs0, AuxVs), rb_new(NFs0), foldl(simplify_graph_node(OldVs, AuxVs), IVs, VFs, NFs0, NFs), list_to_rbtree(VFs, NVs). @@ -338,31 +343,31 @@ simplify_graph_node(OldVs, NVs, V, V-RemFs, NFs0, NFs) :- % % Two cases: first time factor comes up: all its vars must be in subgraph % second case: second time it comes up, it must be already in graph -% -% args: +Factor F, +current V (int), +rbtree with all Vs, +% +% args: +Factor F, +current V (int), +rbtree with all Vs, % -Factors in new Graph, +factors in current graph, -rbtree of factors % % check_factor(V, NVs, F, NFs0, NFs, RemFs, NewRemFs) :- F = f(IF, [V|More], _), !, - ( + ( checklist(check_v(NVs), More) -> rb_insert(NFs0, IF, F, NFs), NewRemFs = [F|RemFs] - ; + ; NFs0 = NFs, NewRemFs = RemFs - ). + ). check_factor(_V, _NVs, F, NFs, NFs, RemFs, NewRemFs) :- F = f(Id, _, _), - ( + ( rb_lookup(Id, F, NFs) -> NewRemFs = [F|RemFs] - ; + ; NewRemFs = RemFs - ). + ). check_v(NVs, V) :- rb_lookup(V, _, NVs). @@ -425,15 +430,15 @@ best_var(QVs, I, _Node, Info, Info) :- !. % pick the variable with less factors best_var(_Qs, I, Node, i(ValSoFar,_,_), i(NewVal,I,Node)) :- - foldl(szfac,Node,1,NewVal), + foldl(szfac,Node,1,NewVal), %length(Node, NewVal), NewVal < ValSoFar, !. best_var(_, _I, _Node, Info, Info). szfac(f(_,Vs,_), I0, I) :- - length(Vs,L), - I is I0*L. + length(Vs,L), + I is I0*L. % delete one factor, need to also touch all variables del_fac(f(I,FVs,_), Fs0, Fs, Vs0, Vs) :- @@ -472,4 +477,3 @@ multiply([F0|Fs], Vs, T) :- multiply_factor(f(_,Vs1,T1), f(_,Vs0,T0), f(_,Vs,T)) :- multiply_CPTs(T1, Vs1, T0, Vs0, T, Vs). - diff --git a/packages/CLPBN/clpbn/viterbi.yap b/packages/CLPBN/clpbn/viterbi.yap index 1dc1037f5..0d496d63b 100644 --- a/packages/CLPBN/clpbn/viterbi.yap +++ b/packages/CLPBN/clpbn/viterbi.yap @@ -1,11 +1,13 @@ %:- style_check(all). -:- module(viterbi, [viterbi/4]). +:- module(viterbi, + [viterbi/4]). :- use_module(library(lists), - [nth/3, - member/2]). + [nth/3, + member/2 + ]). :- use_module(library(assoc)). @@ -17,8 +19,8 @@ :- ensure_loaded(library('clpbn/hmm')). -:- use_module(library('clpbn/dists'), [ - get_dist_params/2]). +:- use_module(library('clpbn/dists'), + [get_dist_params/2]). :- meta_predicate viterbi(:,:,+,-). @@ -75,21 +77,21 @@ fetch_edges([V|Parents], Key0, EdgesF, Edges0, [Slice-AKey|PKeys]) :- clpbn:get_atts(V,[key(Key)]), abstract_key(Key, AKey, Slice), ( - Slice < 3 + Slice < 3 -> - EdgesF = [Key0-AKey|EdgesI] + EdgesF = [Key0-AKey|EdgesI] ; - EdgesF = EdgesI + EdgesF = EdgesI ), fetch_edges(Parents, Key0, EdgesI, Edges0, PKeys). fetch_edges([Key|Parents], Key0, EdgesF, Edges0, [Slice-AKey|PKeys]) :- abstract_key(Key, AKey, Slice), ( - Slice < 3 + Slice < 3 -> - EdgesF = [Key0-AKey|EdgesI] + EdgesF = [Key0-AKey|EdgesI] ; - EdgesF = EdgesI + EdgesF = EdgesI ), fetch_edges(Parents, Key0, EdgesI, Edges0, PKeys). fetch_edges([], _, Edges, Edges, []). @@ -122,20 +124,20 @@ compile_keys([], _, []). % add a random symbol to the end. compile_emission([],_) --> !, []. compile_emission(EmissionTerm,IKey) --> [emit(IKey,EmissionTerm)]. - + compile_propagation([],[],_,_) --> []. compile_propagation([0-PKey|Ps], [Prob|Probs], IKey, KeyMap) --> - [prop_same(IKey,Parent,Prob)], - { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, - compile_propagation(Ps, Probs, IKey, KeyMap). + [prop_same(IKey,Parent,Prob)], + { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, + compile_propagation(Ps, Probs, IKey, KeyMap). compile_propagation([2-PKey|Ps], [Prob|Probs], IKey, KeyMap) --> - [prop_same(IKey,Parent,Prob)], - { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, - compile_propagation(Ps, Probs, IKey, KeyMap). + [prop_same(IKey,Parent,Prob)], + { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, + compile_propagation(Ps, Probs, IKey, KeyMap). compile_propagation([3-PKey|Ps], [Prob|Probs], IKey, KeyMap) --> - [prop_next(IKey,Parent,Prob)], - { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, - compile_propagation(Ps, Probs, IKey, KeyMap). + [prop_next(IKey,Parent,Prob)], + { get_assoc(PKey,KeyMap,nodeinfo(Parent,_,_,_)) }, + compile_propagation(Ps, Probs, IKey, KeyMap). get_id(_:S, Map, SI) :- !, get_id(S, Map, SI). @@ -148,9 +150,9 @@ get_id(S, Map, SI) :- compile_trace(Trace, Emissions) :- user:hmm_domain(Domain), (atom(Domain) -> - hmm:cvt_vals(Domain, Vals) + hmm:cvt_vals(Domain, Vals) ; - Vals = Domain + Vals = Domain ), compile_trace(Trace, Vals, Emissions). @@ -192,22 +194,22 @@ run_inst(prop_same(I,P,Prob), _, SP, Current, _, Trace) :- NP is PI+Prob, matrix_get(Current, [P], P0), (NP > P0 -> - matrix_set(Current, [P], NP), - matrix_set(Trace, [SP,P], I) + matrix_set(Current, [P], NP), + matrix_set(Trace, [SP,P], I) ; - true + true ). run_inst(prop_next(I,P,Prob), _, SP, Current, Next, Trace) :- matrix_get(Current, [I], PI), NP is PI+Prob, matrix_get(Next, [P], P0), (NP > P0 -> - matrix_set(Next, [P], NP), - SP1 is SP+1, - IN is -I, - matrix_set(Trace, [SP1,P], IN) + matrix_set(Next, [P], NP), + SP1 is SP+1, + IN is -I, + matrix_set(Trace, [SP1,P], IN) ; - true + true ). backtrace(Dump, EI, Map, L, Trace) :- @@ -219,11 +221,11 @@ backtrace(Dump, EI, Map, L, Trace) :- trace(0,0,_,_,Trace,Trace) :- !. trace(L1,Next,Dump,Map,Trace0,Trace) :- (Next < 0 -> - NL is L1-1, - P is -Next + NL is L1-1, + P is -Next ; - NL = L1, - P = Next + NL = L1, + P = Next ), once(member(P-AKey,Map)), AKey=..[N|Args], @@ -231,5 +233,3 @@ trace(L1,Next,Dump,Map,Trace0,Trace) :- matrix_get(Dump,[NL,P],New), trace(NL,New,Dump,Map,[Key|Trace0],Trace). - - diff --git a/packages/CLPBN/clpbn/vmap.yap b/packages/CLPBN/clpbn/vmap.yap index ac0fb83df..79fd7f566 100644 --- a/packages/CLPBN/clpbn/vmap.yap +++ b/packages/CLPBN/clpbn/vmap.yap @@ -1,23 +1,22 @@ :- module(clpbn_vmap, - [ - init_vmap/1, % init_vmap(-Vmap) - add_to_vmap/4, % add_to_vmap(+V,-I,+VMap0,VMapF) - get_from_vmap/3, % add_to_vmap(+V,-I,+VMap0) - vars_to_numbers/4, % vars_to_numbers(+Vs,-Is,+VMap0,VMapF) - lvars_to_numbers/4, % lvars_to_numbers(+LVs,-LIs,+VMap0,VMapF) - vmap_to_list/2 - ]). + [init_vmap/1, % init_vmap(-Vmap) + add_to_vmap/4, % add_to_vmap(+V,-I,+VMap0,VMapF) + get_from_vmap/3, % add_to_vmap(+V,-I,+VMap0) + vars_to_numbers/4, % vars_to_numbers(+Vs,-Is,+VMap0,VMapF) + lvars_to_numbers/4, % lvars_to_numbers(+LVs,-LIs,+VMap0,VMapF) + vmap_to_list/2 + ]). :- use_module(library(rbtrees)). :- use_module(library(maplist)). % -% vmap: map V->I +% vmap: map V->I % contiguous Vs to contiguous integers % init_vmap(vmap(0,Empty)) :- - rb_new(Empty). + rb_new(Empty). get_from_vmap(V, I, VMap0) :- VMap0 = vmap(_I,Map0), @@ -39,6 +38,3 @@ lvars_to_numbers(LVs, LIs, VMap0, VMap) :- vmap_to_list(vmap(_,Map), L) :- rb_visit(Map, L). - - - diff --git a/packages/CLPBN/clpbn/xbif.yap b/packages/CLPBN/clpbn/xbif.yap index df010c6bf..657e350ba 100644 --- a/packages/CLPBN/clpbn/xbif.yap +++ b/packages/CLPBN/clpbn/xbif.yap @@ -2,10 +2,11 @@ % XMLBIF support for CLP(BN) % -:- module(xbif, [clpbn2xbif/3]). +:- module(xbif, + [clpbn2xbif/3]). -:- use_module(library('clpbn/dists'), [ - get_dist_domain/2]). +:- use_module(library('clpbn/dists'), + [get_dist_domain/2]). clpbn2xbif(Stream, Name, Network) :- format(Stream, ' diff --git a/packages/CLPBN/examples/HMMer/fasta.yap b/packages/CLPBN/examples/HMMer/fasta.yap index 9a3e7450a..d4bf53085 100644 --- a/packages/CLPBN/examples/HMMer/fasta.yap +++ b/packages/CLPBN/examples/HMMer/fasta.yap @@ -5,7 +5,10 @@ % support for a single sequence. % -:- module(fasta, [fa2atoms/2,fa2atoms/3]). +:- module(fasta, + [fa2atoms/2, + fa2atoms/3 + ]). fa2atoms(F, L) :- fa2atoms(F, L, []). @@ -25,8 +28,8 @@ read_chars(10,S) --> !, read_chars(C,S) --> [AC], { - cvt_c(C,AC), - get0(S,MC) + cvt_c(C,AC), + get0(S,MC) }, read_chars(MC, S). @@ -44,4 +47,3 @@ skip_header(_,S) :- skip_header(C,S). - diff --git a/packages/CLPBN/examples/HMMer/globin.yap b/packages/CLPBN/examples/HMMer/globin.yap index 06c896048..704ca40b3 100644 --- a/packages/CLPBN/examples/HMMer/globin.yap +++ b/packages/CLPBN/examples/HMMer/globin.yap @@ -32,9 +32,9 @@ g_f_cpt(-8455,1.0,0.00284964910984409). %Null state emission CPT. nule_cpt( - e(595,-1558,85,338,-294,453,-1158,197,249,902,-1085,-142,-21,-313,45,531,201,384,-1998,-644), - 0.05, - e(0.0755236292781413,0.0169810785568618,0.0530343870684108,0.0632001549226403,0.0407818746669505,0.0684441906545919,0.0224066674892351,0.0573156092864189,0.0594191552528466,0.093432734688318,0.023569613397956,0.0453130969133667,0.0492774668469685,0.0402483068810561,0.051584158965068,0.0722465198961763,0.0574747424017338,0.0652477473844479,0.0125173406963917,0.0319968103461077)). + e(595,-1558,85,338,-294,453,-1158,197,249,902,-1085,-142,-21,-313,45,531,201,384,-1998,-644), + 0.05, + e(0.0755236292781413,0.0169810785568618,0.0530343870684108,0.0632001549226403,0.0407818746669505,0.0684441906545919,0.0224066674892351,0.0573156092864189,0.0594191552528466,0.093432734688318,0.023569613397956,0.0453130969133667,0.0492774668469685,0.0402483068810561,0.051584158965068,0.0722465198961763,0.0574747424017338,0.0652477473844479,0.0125173406963917,0.0319968103461077)). %Reaching first D. b_d_cpt(-110,-3765,-110). diff --git a/packages/CLPBN/examples/HMMer/scan.yap b/packages/CLPBN/examples/HMMer/scan.yap index 33948a57c..a874f2ed7 100644 --- a/packages/CLPBN/examples/HMMer/scan.yap +++ b/packages/CLPBN/examples/HMMer/scan.yap @@ -14,7 +14,7 @@ stop(S,W,Info) :- gen_program(W, Info). stop(_,_,_) :- format(user_error,"Bad HMM~n", []). - + parse_model(S,Info) :- get_line(S, Line, Info), % format('~s~n',[Line]), @@ -45,7 +45,7 @@ match_field(hmmer(_,_,_,Alph,_,_,_,_),_) --> "ALPH", !, % aminos or bases match_field(_,_) --> "RF", !, scanner_skip. match_field(_,_) --> "CS", !, scanner_skip. match_field(hmmer(_,_,_,_,_,_,_,MAP),_) --> "MAP", !, - scanner_skip_blanks, + scanner_skip_blanks, to_lower(Codes), { map_code(Codes,MAP) }. match_field(_,_) --> "COM", !, scanner_skip. @@ -76,11 +76,11 @@ match_field(_,_) --> "EVD", !, match_field(Info,S) --> "HMM", !, scanner_skip, { - get_line(S,_,Info), - Info = hmmer(_,_,NOfStates,Alph,_,_,model(BD,NBD,Transitions),MAP), - nof_symbols(Alph,N), - scan_model(S,NOfStates,N,BD,NBD,Transitions,MAP,Info), - throw(done(Info)) + get_line(S,_,Info), + Info = hmmer(_,_,NOfStates,Alph,_,_,model(BD,NBD,Transitions),MAP), + nof_symbols(Alph,N), + scan_model(S,NOfStates,N,BD,NBD,Transitions,MAP,Info), + throw(done(Info)) }. scan_model(S,NOfStates,N,BD,NBD,Transitions,MAP,Info) :- @@ -95,7 +95,7 @@ scan_states(NOfStates, N, Stream, MAP, [t(E,I,S)|Transitions], Info) :- scan_states(NOfStates1, N, Stream, NMAP, Transitions, Info). scan_state(Stream, E,I,MAP,s(MM,MI,MD,IM,II,DM,DD,BM,ME), N, NMAP, Info) :- - get_line(Stream, ELine, Info), + get_line(Stream, ELine, Info), get_line(Stream, ILine, Info), get_line(Stream, SLine, Info), % format('~s~n~s~n~s~n',[ELine,ILine,SLine]), @@ -265,7 +265,7 @@ gen_model(W, model(BD,NBD,States),PsCPT) :- format(W, '~n%Reaching first D.~n',[]), format(W, 'b_d_cpt(~w,~w,~w).~n',[BD,NBD,BDCPT]), gen_states(W, States,1,PsCPT). - + gen_states(_, [],_,_). gen_states(W, [State|States],StateNo,PsCPT) :- gen_state(W, State,StateNo,PsCPT), @@ -327,4 +327,3 @@ max_index([_|L],I0,Max0,MaxIndex0,Max,MaxIndex) :- I is I0+1, max_index(L,I,Max0,MaxIndex0,Max,MaxIndex). - diff --git a/packages/CLPBN/examples/HMMer/score.yap b/packages/CLPBN/examples/HMMer/score.yap index db7693b3b..ef79b6624 100644 --- a/packages/CLPBN/examples/HMMer/score.yap +++ b/packages/CLPBN/examples/HMMer/score.yap @@ -6,13 +6,12 @@ :- ensure_loaded(library('clpbn/viterbi')). :- use_module(fasta, - [fa2atoms/3]). + [fa2atoms/3]). :- use_module(library(lists), - [ - nth/3, - append/3 - ]). + [nth/3, + append/3 + ]). :- [plan7]. diff --git a/packages/CLPBN/examples/School/parlearn.yap b/packages/CLPBN/examples/School/parlearn.yap index e722d8334..f089fb848 100644 --- a/packages/CLPBN/examples/School/parlearn.yap +++ b/packages/CLPBN/examples/School/parlearn.yap @@ -41,4 +41,4 @@ write_cpts([CPT|CPTs]) :- matrix_to_list(CPT,L), format('CPT=~w~n',[L]), write_cpts(CPTs). - + diff --git a/packages/CLPBN/examples/School/parschema.pfl b/packages/CLPBN/examples/School/parschema.pfl index 3d708936a..b799c60d7 100644 --- a/packages/CLPBN/examples/School/parschema.pfl +++ b/packages/CLPBN/examples/School/parschema.pfl @@ -55,7 +55,7 @@ professor_popularity(P,A) :- pop(P,A). course_difficulty(P,A) :- diff(P,A). student_intelligence(P,A) :- int(P,A). - + course_rating(C,X) :- rat(C,X). registration_grade(R,A) :- diff --git a/packages/CLPBN/examples/School/school_128.yap b/packages/CLPBN/examples/School/school_128.yap index ecbf398cb..3a59cd9d6 100644 --- a/packages/CLPBN/examples/School/school_128.yap +++ b/packages/CLPBN/examples/School/school_128.yap @@ -18,9 +18,6 @@ total_students(4096). :- ensure_loaded('parschema.pfl'). -:- set_solver(hve). - - professor(p0). professor(p1). professor(p2). diff --git a/packages/CLPBN/examples/School/school_32.yap b/packages/CLPBN/examples/School/school_32.yap index 48fcbcabc..7c993ef24 100644 --- a/packages/CLPBN/examples/School/school_32.yap +++ b/packages/CLPBN/examples/School/school_32.yap @@ -18,9 +18,6 @@ total_students(256). :- ensure_loaded('parschema.pfl'). -:- set_solver(hve). - - professor(p0). professor(p1). professor(p2). diff --git a/packages/CLPBN/examples/School/school_64.yap b/packages/CLPBN/examples/School/school_64.yap index 7564cc1b9..d72ecf7e8 100644 --- a/packages/CLPBN/examples/School/school_64.yap +++ b/packages/CLPBN/examples/School/school_64.yap @@ -18,9 +18,6 @@ total_students(1024). :- ensure_loaded('parschema.pfl'). -:- set_solver(hve). - - professor(p0). professor(p1). professor(p2). diff --git a/packages/CLPBN/examples/burglary-alarm.uai b/packages/CLPBN/examples/burglary-alarm.uai index 4e950cd9f..c584a98aa 100644 --- a/packages/CLPBN/examples/burglary-alarm.uai +++ b/packages/CLPBN/examples/burglary-alarm.uai @@ -1,28 +1,26 @@ -MARKOV +BAYES + 5 2 2 2 2 2 5 1 0 1 1 -3 2 0 1 -2 3 2 -2 4 2 +3 0 1 2 +2 2 3 +2 2 4 2 - .001 .999 + 0.001 0.999 2 - .002 .998 + 0.002 0.998 8 - .95 .94 .29 .001 - .05 .06 .71 .999 + 0.95 0.05 0.94 0.06 0.29 0.71 0.001 0.999 4 - .9 .05 - .1 .95 + 0.9 0.1 0.05 0.95 4 - .7 .01 - .3 .99 - + 0.7 0.3 0.01 0.99 + diff --git a/packages/CLPBN/examples/cg.yap b/packages/CLPBN/examples/cg.yap deleted file mode 100644 index 04423248a..000000000 --- a/packages/CLPBN/examples/cg.yap +++ /dev/null @@ -1,35 +0,0 @@ - -% -% adapted from Hendrik Blockeel's ILP04 paper. -% - -:- use_module(library(clpbn)). - -cg(X,1,C):- - father(Y,X), - cg(Y,1,C1),cg(Y,2,C2), - parent_cpt(cg(X,1), C1, C2, C). - -cg(X,2,C):- - mother(Y,X), - cg(Y,1,C1),cg(Y,2,C2), - parent_cpt(cg(X,2), C1, C2, C). - - - -cg(f,X,C) :- - prior_cpt(cg(f,X),C). - -cg(m,X,C) :- - prior_cpt(cg(m,X),C). - - -prior_cpt(CKEY, C) :- - { C = CKEY with p([p,w], [0.5,0.5])}. - -parent_cpt(CKEY, C1, C2, C) :- - { C = CKEY with p([p,w], [ 1,0.5,0.5,0.0, - 0.0,0.5,0.5, 1],[C1,C2])}. - -father(f,s). -mother(m,s). diff --git a/packages/CLPBN/examples/city.pfl b/packages/CLPBN/examples/city.pfl index c891ae163..d79e824a8 100644 --- a/packages/CLPBN/examples/city.pfl +++ b/packages/CLPBN/examples/city.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "First-order + probabilistic inference" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -11,14 +16,14 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile people/2. +:- multifile person/2. :- multifile ev/1. -people(joe,nyc). -people(p2, nyc). -people(p3, nyc). -people(p4, nyc). -people(p5, nyc). +person(joe,nyc). +person(p2, nyc). +person(p3, nyc). +person(p4, nyc). +person(p5, nyc). ev(descn(p2, fits)). ev(descn(p3, fits)). @@ -26,41 +31,41 @@ ev(descn(p4, fits)). ev(descn(p5, fits)). bayes city_conservativeness(C)::[high,low] ; - cons_table ; - [people(_,C)]. + cons_table ; + [person(_,C)]. bayes gender(P)::[male,female] ; - gender_table ; - [people(P,_)]. + gender_table ; + [person(P,_)]. bayes hair_color(P)::[dark,bright], city_conservativeness(C) ; - hair_color_table ; - [people(P,C)]. + hair_color_table ; + [person(P,C)]. bayes car_color(P)::[dark,bright], hair_color(P) ; - car_color_table ; - [people(P,_)]. + car_color_table ; + [person(P,_)]. bayes height(P)::[tall,short], gender(P) ; - height_table ; - [people(P,_)]. + height_table ; + [person(P,_)]. bayes shoe_size(P)::[big,small], height(P) ; - shoe_size_table ; - [people(P,_)]. + shoe_size_table ; + [person(P,_)]. bayes guilty(P)::[y,n] ; - guilty_table ; - [people(P,_)]. + guilty_table ; + [person(P,_)]. bayes descn(P)::[fits,dont_fit], car_color(P), - hair_color(P), height(P), guilty(P) ; - descn_table ; - [people(P,_)]. + hair_color(P), height(P), guilty(P) ; + descn_table ; + [person(P,_)]. bayes witness(C), descn(Joe), descn(P2) ; - witness_table ; - [people(_,C), Joe=joe, P2=p2]. + witness_table ; + [person(_,C), Joe=joe, P2=p2]. cons_table( @@ -75,17 +80,17 @@ hair_color_table( /* high low */ /* dark */ [ 0.05, 0.1, /* bright */ 0.95, 0.9 ]). - + car_color_table( /* dark bright */ /* dark */ [ 0.9, 0.2, /* bright */ 0.1, 0.8 ]). - + height_table( /* male female */ /* tall */ [ 0.6, 0.4, /* short */ 0.4, 0.6 ]). - + shoe_size_table( /* tall short */ /* big */ [ 0.9, 0.1, @@ -99,7 +104,7 @@ descn_table( /* car_color(P), hair_color(P), height(P), guilty(P) */ /* fits */ [ 0.99, 0.5, 0.23, 0.88, 0.41, 0.3, 0.76, 0.87, /* fits */ 0.44, 0.43, 0.29, 0.72, 0.23, 0.91, 0.95, 0.92, -/* dont_fit */ 0.01, 0.5, 0.77, 0.12, 0.59, 0.7, 0.24, 0.13, +/* dont_fit */ 0.01, 0.5, 0.77, 0.12, 0.59, 0.7, 0.24, 0.13, /* dont_fit */ 0.56, 0.57, 0.71, 0.28, 0.77, 0.09, 0.05, 0.08 ]). witness_table( @@ -109,20 +114,20 @@ witness_table( runall(G, Wrapper) :- - findall(G, Wrapper, L), - execute_all(L). + findall(G, Wrapper, L), + execute_all(L). execute_all([]). execute_all(G.L) :- - call(G), - execute_all(L). + call(G), + execute_all(L). is_joe_guilty(Guilty) :- - witness(nyc, t), - runall(X, ev(X)), - guilty(joe, Guilty). + witness(nyc, t), + runall(X, ev(X)), + guilty(joe, Guilty). % ?- is_joe_guilty(Guilty). diff --git a/packages/CLPBN/examples/comp_workshops.pfl b/packages/CLPBN/examples/comp_workshops.pfl index b14a0fba2..1b020035b 100644 --- a/packages/CLPBN/examples/comp_workshops.pfl +++ b/packages/CLPBN/examples/comp_workshops.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "Lifted Probabilistic + Inference with Counting Formulas" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -10,31 +15,31 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile c/2. +:- multifile reg/2. -c(p1,w1). -c(p1,w2). -c(p1,w3). -c(p2,w1). -c(p2,w2). -c(p2,w3). -c(p3,w1). -c(p3,w2). -c(p3,w3). -c(p4,w1). -c(p4,w2). -c(p4,w3). -c(p5,w1). -c(p5,w2). -c(p5,w3). +reg(p1,w1). +reg(p1,w2). +reg(p1,w3). +reg(p2,w1). +reg(p2,w2). +reg(p2,w3). +reg(p3,w1). +reg(p3,w2). +reg(p3,w3). +reg(p4,w1). +reg(p4,w2). +reg(p4,w3). +reg(p5,w1). +reg(p5,w2). +reg(p5,w3). markov attends(P), hot(W) ; - [0.2, 0.8, 0.8, 0.8] ; - [c(P,W)]. + [0.2, 0.8, 0.8, 0.8] ; + [reg(P,W)]. markov attends(P), series ; - [0.501, 0.499, 0.499, 0.499] ; - [c(P,_)]. + [0.501, 0.499, 0.499, 0.499] ; + [reg(P,_)]. -?- series(X). +% ?- series(X). diff --git a/packages/CLPBN/examples/learning/debug_school.yap b/packages/CLPBN/examples/learning/debug_school.yap index 3ee14fa96..907efc802 100644 --- a/packages/CLPBN/examples/learning/debug_school.yap +++ b/packages/CLPBN/examples/learning/debug_school.yap @@ -2,16 +2,17 @@ /* We do not consider aggregates yet. */ +:- use_module(library(clpbn/learning/em)). + :- [pos:train]. :- ['../../examples/School/parschema.pfl']. -:- use_module(library(clpbn/learning/em)). - -%:- clpbn:set_clpbn_flag(em_solver,gibbs). -%:- clpbn:set_clpbn_flag(em_solver,jt). - :- clpbn:set_clpbn_flag(em_solver,ve). -%:- clpbn:set_clpbn_flag(em_solver,bp). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). debug_school :- graph(L), diff --git a/packages/CLPBN/examples/learning/prof_params.pfl b/packages/CLPBN/examples/learning/prof_params.pfl index d43e5b18d..32df5081b 100644 --- a/packages/CLPBN/examples/learning/prof_params.pfl +++ b/packages/CLPBN/examples/learning/prof_params.pfl @@ -4,12 +4,11 @@ :- use_module(library(clpbn/learning/em)). -%:- clpbn:set_clpbn_flag(em_solver,gibbs). -%:- clpbn:set_clpbn_flag(em_solver,jt). -%:- clpbn:set_clpbn_flag(em_solver,hve). -:- clpbn:set_clpbn_flag(em_solver,ve). -%:- clpbn:set_clpbn_flag(em_solver,bp). -%:- clpbn:set_clpbn_flag(em_solver,bdd). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). professor(p0). professor(p1). diff --git a/packages/CLPBN/examples/learning/school_params.yap b/packages/CLPBN/examples/learning/school_params.yap index 61c535b5f..fd93edb27 100644 --- a/packages/CLPBN/examples/learning/school_params.yap +++ b/packages/CLPBN/examples/learning/school_params.yap @@ -2,16 +2,17 @@ /* We do not consider aggregates yet. */ +:- use_module(library(clpbn/learning/em)). + :- [pos:train]. :- ['../../examples/School/school_32']. -:- use_module(library(clpbn/learning/em)). - -%:- clpbn:set_clpbn_flag(em_solver,gibbs). -%:- clpbn:set_clpbn_flag(em_solver,jt). -% :- clpbn:set_clpbn_flag(em_solver,ve). -:- clpbn:set_clpbn_flag(em_solver,bp). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). timed_main :- statistics(runtime, _), diff --git a/packages/CLPBN/examples/learning/sprinkler_params.yap b/packages/CLPBN/examples/learning/sprinkler_params.yap index 05e3ae3c9..730f7fd5c 100644 --- a/packages/CLPBN/examples/learning/sprinkler_params.yap +++ b/packages/CLPBN/examples/learning/sprinkler_params.yap @@ -4,12 +4,11 @@ :- use_module(library(clpbn/learning/em)). -%:- set_pfl_flag(em_solver,gibbs). -%:- set_pfl_flag(em_solver,jt). -%:- set_pfl_flag(em_solver,hve). -%:- set_pfl_flag(em_solver,bp). -%:- set_pfl_flag(em_solver,ve). -:- set_pfl_flag(em_solver,bdd). +:- set_em_solver(ve). +%:- set_em_solver(hve). +%:- set_em_solver(bdd). +%:- set_em_solver(bp). +%:- set_em_solver(cbp). :- dynamic id/1. diff --git a/packages/CLPBN/examples/social_domain1.pfl b/packages/CLPBN/examples/social_domain1.pfl deleted file mode 100644 index 8330ebe84..000000000 --- a/packages/CLPBN/examples/social_domain1.pfl +++ /dev/null @@ -1,38 +0,0 @@ -:- use_module(library(pfl)). - -:- set_solver(hve). -%:- set_solver(ve). -%:- set_solver(jt). -%:- set_solver(bdd). -%:- set_solver(bp). -%:- set_solver(cbp). -%:- set_solver(gibbs). -%:- set_solver(lve). -%:- set_solver(lkc). -%:- set_solver(lbp). - -:- multifile people/1. - -people @ 5. - -people(X,Y) :- - people(X), - people(Y), - X \== Y. - -markov smokes(X) ; [1.0, 4.0552]; [people(X)]. - -markov cancer(X) ; [1.0, 9.9742]; [people(X)]. - -markov friends(X,Y) ; [1.0, 99.48432] ; [people(X,Y)]. - -markov smokes(X), cancer(X) ; - [4.48169, 4.48169, 1.0, 4.48169] ; - [people(X)]. - -markov friends(X,Y), smokes(X), smokes(Y) ; - [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; - [people(X,Y)]. - -% ?- friends(p1,p2,X). - diff --git a/packages/CLPBN/examples/social_domain2.pfl b/packages/CLPBN/examples/social_domain2.pfl deleted file mode 100644 index b030fc0a0..000000000 --- a/packages/CLPBN/examples/social_domain2.pfl +++ /dev/null @@ -1,38 +0,0 @@ -:- use_module(library(pfl)). - -:- set_solver(hve). -%:- set_solver(ve). -%:- set_solver(jt). -%:- set_solver(bdd). -%:- set_solver(bp). -%:- set_solver(cbp). -%:- set_solver(gibbs). -%:- set_solver(lve). -%:- set_solver(lkc). -%:- set_solver(lbp). - -:- multifile people/1. - -people @ 5. - -people(X,Y) :- - people(X), - people(Y). -% X \== Y. - -markov smokes(X) ; [1.0, 4.0552]; [people(X)]. - -markov asthma(X) ; [1.0, 9.9742] ; [people(X)]. - -markov friends(X,Y) ; [1.0, 99.48432] ; [people(X,Y)]. - -markov asthma(X), smokes(X) ; - [4.48169, 4.48169, 1.0, 4.48169] ; - [people(X)]. - -markov asthma(X), friends(X,Y), smokes(Y) ; - [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; - [people(X,Y)]. - -% ?- smokes(p1,t), smokes(p2,t), friends(p1,p2,X). - diff --git a/packages/CLPBN/examples/social_network1.pfl b/packages/CLPBN/examples/social_network1.pfl new file mode 100644 index 000000000..e8f976e3a --- /dev/null +++ b/packages/CLPBN/examples/social_network1.pfl @@ -0,0 +1,44 @@ +/* + Model from the paper "Lifted First-Order + Belief Propagation" +*/ + +:- use_module(library(pfl)). + +:- set_solver(hve). +%:- set_solver(ve). +%:- set_solver(jt). +%:- set_solver(bdd). +%:- set_solver(bp). +%:- set_solver(cbp). +%:- set_solver(gibbs). +%:- set_solver(lve). +%:- set_solver(lkc). +%:- set_solver(lbp). + +:- multifile person/1. + +person @ 5. + +person(X,Y) :- + person(X), + person(Y) +% ,X \== Y + . + +markov smokes(X) ; [1.0, 4.0552]; [person(X)]. + +markov cancer(X) ; [1.0, 9.9742]; [person(X)]. + +markov friends(X,Y) ; [1.0, 99.48432] ; [person(X,Y)]. + +markov smokes(X), cancer(X) ; + [4.48169, 4.48169, 1.0, 4.48169] ; + [person(X)]. + +markov friends(X,Y), smokes(X), smokes(Y) ; + [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; + [person(X,Y)]. + +% ?- friends(p1,p2,X). + diff --git a/packages/CLPBN/examples/social_network2.pfl b/packages/CLPBN/examples/social_network2.pfl new file mode 100644 index 000000000..8dc823da4 --- /dev/null +++ b/packages/CLPBN/examples/social_network2.pfl @@ -0,0 +1,44 @@ +/* + Model from the paper "Lifted Inference Seen + from the Other Side: The Tractable Features" +*/ + +:- use_module(library(pfl)). + +:- set_solver(hve). +%:- set_solver(ve). +%:- set_solver(jt). +%:- set_solver(bdd). +%:- set_solver(bp). +%:- set_solver(cbp). +%:- set_solver(gibbs). +%:- set_solver(lve). +%:- set_solver(lkc). +%:- set_solver(lbp). + +:- multifile person/1. + +person @ 5. + +person(X,Y) :- + person(X), + person(Y) +% ,X \== Y + . + +markov smokes(X) ; [1.0, 4.0552]; [person(X)]. + +markov asthma(X) ; [1.0, 9.9742] ; [person(X)]. + +markov friends(X,Y) ; [1.0, 99.48432] ; [person(X,Y)]. + +markov asthma(X), smokes(X) ; + [4.48169, 4.48169, 1.0, 4.48169] ; + [person(X)]. + +markov asthma(X), friends(X,Y), smokes(Y) ; + [3.004166, 3.004166, 3.004166, 3.004166, 3.004166, 1.0, 1.0, 3.004166] ; + [person(X,Y)]. + +% ?- smokes(p1,t), smokes(p2,t), friends(p1,p2,X). + diff --git a/packages/CLPBN/examples/sprinkler.pfl b/packages/CLPBN/examples/sprinkler.pfl index daceb3786..a69c2158e 100644 --- a/packages/CLPBN/examples/sprinkler.pfl +++ b/packages/CLPBN/examples/sprinkler.pfl @@ -24,16 +24,16 @@ cloudy_table( 0.5 ]). sprinkler_table( - [ 0.5, 0.9, - 0.5, 0.1 ]). + [ 0.1, 0.5, + 0.9, 0.5 ]). rain_table( [ 0.8, 0.2, 0.2, 0.8 ]). wet_grass_table( - [ 1.0, 0.1, 0.1, 0.01, - 0.0, 0.9, 0.9, 0.99 ]). + [ 0.99, 0.9, 0.9, 0.0, + 0.01, 0.1, 0.1, 1.0 ]). % ?- wet_grass(X). diff --git a/packages/CLPBN/examples/workshop_attrs.pfl b/packages/CLPBN/examples/workshop_attrs.pfl index c5e9d08f1..248529980 100644 --- a/packages/CLPBN/examples/workshop_attrs.pfl +++ b/packages/CLPBN/examples/workshop_attrs.pfl @@ -1,3 +1,8 @@ +/* + Model from the paper "Lifted Probabilistic + Inference with Counting Formulas" +*/ + :- use_module(library(pfl)). :- set_solver(hve). @@ -11,23 +16,23 @@ %:- set_solver(lkc). %:- set_solver(lbp). -:- multifile people/1. +:- multifile person/1. -people @ 5. +person @ 5. -markov attends(P), attr1 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr1 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr2 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr2 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr3 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr3 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr4 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr4 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr5 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr5 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), attr6 ; [0.7, 0.3, 0.3, 0.3] ; [people(P)]. +markov attends(P), attr6 ; [0.7, 0.3, 0.3, 0.3] ; [person(P)]. -markov attends(P), series ; [0.501, 0.499, 0.499, 0.499] ; [people(P)]. +markov attends(P), series ; [0.501, 0.499, 0.499, 0.499] ; [person(P)]. % ?- series(X). diff --git a/packages/CLPBN/horus/BayesBall.cpp b/packages/CLPBN/horus/BayesBall.cpp index 0fac25056..da0c73ff5 100644 --- a/packages/CLPBN/horus/BayesBall.cpp +++ b/packages/CLPBN/horus/BayesBall.cpp @@ -1,12 +1,6 @@ -#include #include -#include -#include -#include - #include "BayesBall.h" -#include "Util.h" FactorGraph* diff --git a/packages/CLPBN/horus/BayesBall.h b/packages/CLPBN/horus/BayesBall.h index 4efbd2ed1..2057b6f01 100644 --- a/packages/CLPBN/horus/BayesBall.h +++ b/packages/CLPBN/horus/BayesBall.h @@ -4,7 +4,6 @@ #include #include #include -#include #include "FactorGraph.h" #include "BayesBallGraph.h" @@ -15,8 +14,8 @@ using namespace std; struct ScheduleInfo { - ScheduleInfo (BBNode* n, bool vfp, bool vfc) : - node(n), visitedFromParent(vfp), visitedFromChild(vfc) { } + ScheduleInfo (BBNode* n, bool vfp, bool vfc) + : node(n), visitedFromParent(vfp), visitedFromChild(vfc) { } BBNode* node; bool visitedFromParent; @@ -30,7 +29,7 @@ typedef queue> Scheduling; class BayesBall { public: - BayesBall (FactorGraph& fg) + BayesBall (FactorGraph& fg) : fg_(fg) , dag_(fg.getStructure()) { dag_.clear(); @@ -63,7 +62,7 @@ inline void BayesBall::scheduleParents (const BBNode* n, Scheduling& sch) const { const vector& ps = n->parents(); - for (vector::const_iterator it = ps.begin(); + for (vector::const_iterator it = ps.begin(); it != ps.end(); ++it) { sch.push (ScheduleInfo (*it, false, true)); } diff --git a/packages/CLPBN/horus/BayesBallGraph.cpp b/packages/CLPBN/horus/BayesBallGraph.cpp index 626d940d9..60db22bfe 100644 --- a/packages/CLPBN/horus/BayesBallGraph.cpp +++ b/packages/CLPBN/horus/BayesBallGraph.cpp @@ -2,8 +2,8 @@ #include #include -#include #include +#include #include "BayesBallGraph.h" #include "Util.h" @@ -79,9 +79,8 @@ BayesBallGraph::exportToGraphViz (const char* fileName) { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "BayesBallGraph::exportToDotFile()" << endl; - abort(); + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; } out << "digraph {" << endl; out << "ranksep=1" << endl; diff --git a/packages/CLPBN/horus/BayesBallGraph.h b/packages/CLPBN/horus/BayesBallGraph.h index 8359b598f..eb44f0ae8 100644 --- a/packages/CLPBN/horus/BayesBallGraph.h +++ b/packages/CLPBN/horus/BayesBallGraph.h @@ -2,9 +2,7 @@ #define HORUS_BAYESBALLGRAPH_H #include -#include -#include -#include +#include #include "Var.h" #include "Horus.h" @@ -14,7 +12,7 @@ using namespace std; class BBNode : public Var { public: - BBNode (Var* v) : Var (v) , visited_(false), + BBNode (Var* v) : Var (v), visited_(false), markedOnTop_(false), markedOnBottom_(false) { } const vector& childs (void) const { return childs_; } @@ -30,15 +28,15 @@ class BBNode : public Var void addChild (BBNode* c) { childs_.push_back (c); } bool isVisited (void) const { return visited_; } - + void setAsVisited (void) { visited_ = true; } bool isMarkedOnTop (void) const { return markedOnTop_; } - + void markOnTop (void) { markedOnTop_ = true; } bool isMarkedOnBottom (void) const { return markedOnBottom_; } - + void markOnBottom (void) { markedOnBottom_ = true; } void clear (void) { visited_ = markedOnTop_ = markedOnBottom_ = false; } @@ -63,7 +61,7 @@ class BayesBallGraph void addEdge (VarId vid1, VarId vid2); const BBNode* getNode (VarId vid) const; - + BBNode* getNode (VarId vid); bool empty (void) const { return nodes_.empty(); } diff --git a/packages/CLPBN/horus/BeliefProp.cpp b/packages/CLPBN/horus/BeliefProp.cpp index d96384cfd..5ec3aafd5 100644 --- a/packages/CLPBN/horus/BeliefProp.cpp +++ b/packages/CLPBN/horus/BeliefProp.cpp @@ -1,17 +1,19 @@ #include -#include #include #include #include "BeliefProp.h" -#include "FactorGraph.h" -#include "Factor.h" #include "Indexer.h" #include "Horus.h" +double BeliefProp::accuracy_ = 0.0001; +unsigned BeliefProp::maxIter_ = 1000; +MsgSchedule BeliefProp::schedule_ = MsgSchedule::SEQ_FIXED; + + BeliefProp::BeliefProp (const FactorGraph& fg) : GroundSolver (fg) { runned_ = false; @@ -50,16 +52,15 @@ BeliefProp::printSolverFlags (void) const { stringstream ss; ss << "belief propagation [" ; - ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + ss << "bp_msg_schedule=" ; + switch (schedule_) { + case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; + case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; + case MsgSchedule::PARALLEL: ss << "parallel"; break; + case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << Util::toString (BpOptions::maxIter); - ss << ",accuracy=" << Util::toString (BpOptions::accuracy); + ss << ",bp_max_iter=" << Util::toString (maxIter_); + ss << ",bp_accuracy=" << Util::toString (accuracy_); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; @@ -146,7 +147,7 @@ BeliefProp::getFactorJoint ( if (Globals::logDomain) { Util::exp (jointDist); } - return jointDist; + return jointDist; } @@ -156,21 +157,21 @@ BeliefProp::runSolver (void) { initializeSolver(); nIters_ = 0; - while (!converged() && nIters_ < BpOptions::maxIter) { + while (!converged() && nIters_ < maxIter_) { nIters_ ++; if (Globals::verbosity > 1) { Util::printHeader (string ("Iteration ") + Util::toString (nIters_)); } - switch (BpOptions::schedule) { - case BpOptions::Schedule::SEQ_RANDOM: + switch (schedule_) { + case MsgSchedule::SEQ_RANDOM: std::random_shuffle (links_.begin(), links_.end()); // no break - case BpOptions::Schedule::SEQ_FIXED: + case MsgSchedule::SEQ_FIXED: for (size_t i = 0; i < links_.size(); i++) { calculateAndUpdateMessage (links_[i]); } break; - case BpOptions::Schedule::PARALLEL: + case MsgSchedule::PARALLEL: for (size_t i = 0; i < links_.size(); i++) { calculateMessage (links_[i]); } @@ -178,14 +179,14 @@ BeliefProp::runSolver (void) updateMessage(links_[i]); } break; - case BpOptions::Schedule::MAX_RESIDUAL: + case MsgSchedule::MAX_RESIDUAL: maxResidualSchedule(); break; } } if (Globals::verbosity > 0) { - if (nIters_ < BpOptions::maxIter) { - cout << "Belief propagation converged in " ; + if (nIters_ < maxIter_) { + cout << "Belief propagation converged in " ; cout << nIters_ << " iterations" << endl; } else { cout << "The maximum number of iterations was hit, terminating..." ; @@ -236,7 +237,7 @@ BeliefProp::maxResidualSchedule (void) SortedOrder::iterator it = sortedOrder_.begin(); BpLink* link = *it; - if (link->residual() < BpOptions::accuracy) { + if (link->residual() < accuracy_) { return; } updateMessage (link); @@ -410,7 +411,7 @@ BeliefProp::initializeSolver (void) bool BeliefProp::converged (void) { - if (links_.size() == 0) { + if (links_.empty()) { return true; } if (nIters_ == 0) { @@ -426,9 +427,9 @@ BeliefProp::converged (void) return false; } bool converged = true; - if (BpOptions::schedule == BpOptions::Schedule::MAX_RESIDUAL) { + if (schedule_ == MsgSchedule::MAX_RESIDUAL) { double maxResidual = (*(sortedOrder_.begin()))->residual(); - if (maxResidual > BpOptions::accuracy) { + if (maxResidual > accuracy_) { converged = false; } else { converged = true; @@ -439,7 +440,7 @@ BeliefProp::converged (void) if (Globals::verbosity > 1) { cout << links_[i]->toString() + " residual = " << residual << endl; } - if (residual > BpOptions::accuracy) { + if (residual > accuracy_) { converged = false; if (Globals::verbosity < 2) { break; @@ -459,7 +460,7 @@ void BeliefProp::printLinkInformation (void) const { for (size_t i = 0; i < links_.size(); i++) { - BpLink* l = links_[i]; + BpLink* l = links_[i]; cout << l->toString() << ":" << endl; cout << " curr msg = " ; cout << l->message() << endl; diff --git a/packages/CLPBN/horus/BeliefProp.h b/packages/CLPBN/horus/BeliefProp.h index 6c1d5c46b..5fad0c496 100644 --- a/packages/CLPBN/horus/BeliefProp.h +++ b/packages/CLPBN/horus/BeliefProp.h @@ -3,21 +3,29 @@ #include #include + #include #include "GroundSolver.h" -#include "Factor.h" #include "FactorGraph.h" -#include "Util.h" + using namespace std; +enum MsgSchedule { + SEQ_FIXED, + SEQ_RANDOM, + PARALLEL, + MAX_RESIDUAL +}; + + class BpLink { public: BpLink (FacNode* fn, VarNode* vn) - { + { fac_ = fn; var_ = vn; v1_.resize (vn->range(), LogAware::log (1.0 / vn->range())); @@ -43,10 +51,10 @@ class BpLink void updateResidual (void) { - residual_ = LogAware::getMaxNorm (v1_,v2_); + residual_ = LogAware::getMaxNorm (v1_, v2_); } - virtual void updateMessage (void) + virtual void updateMessage (void) { swap (currMsg_, nextMsg_); } @@ -59,7 +67,7 @@ class BpLink ss << var_->label(); return ss.str(); } - + protected: FacNode* fac_; VarNode* var_; @@ -68,6 +76,9 @@ class BpLink Params* currMsg_; Params* nextMsg_; double residual_; + + private: + DISALLOW_COPY_AND_ASSIGN (BpLink); }; typedef vector BpLinks; @@ -76,10 +87,12 @@ typedef vector BpLinks; class SPNodeInfo { public: + SPNodeInfo (void) { } void addBpLink (BpLink* link) { links_.push_back (link); } const BpLinks& getLinks (void) { return links_; } private: BpLinks links_; + DISALLOW_COPY_AND_ASSIGN (SPNodeInfo); }; @@ -97,23 +110,21 @@ class BeliefProp : public GroundSolver virtual Params getPosterioriOf (VarId); virtual Params getJointDistributionOf (const VarIds&); - - protected: - void runSolver (void); - virtual void createLinks (void); - - virtual void maxResidualSchedule (void); - - virtual void calcFactorToVarMsg (BpLink*); - - virtual Params getVarToFactorMsg (const BpLink*) const; - - virtual Params getJointByConditioning (const VarIds&) const; - - public: Params getFactorJoint (FacNode* fn, const VarIds&); + static double accuracy (void) { return accuracy_; } + + static void setAccuracy (double acc) { accuracy_ = acc; } + + static unsigned maxIterations (void) { return maxIter_; } + + static void setMaxIterations (unsigned mi) { maxIter_ = mi; } + + static MsgSchedule msgSchedule (void) { return schedule_; } + + static void setMsgSchedule (MsgSchedule sch) { schedule_ = sch; } + protected: SPNodeInfo* ninf (const VarNode* var) const { @@ -164,6 +175,18 @@ class BeliefProp : public GroundSolver } }; + void runSolver (void); + + virtual void createLinks (void); + + virtual void maxResidualSchedule (void); + + virtual void calcFactorToVarMsg (BpLink*); + + virtual Params getVarToFactorMsg (const BpLink*) const; + + virtual Params getJointByConditioning (const VarIds&) const; + BpLinks links_; unsigned nIters_; vector varsI_; @@ -176,12 +199,18 @@ class BeliefProp : public GroundSolver typedef unordered_map BpLinkMap; BpLinkMap linkMap_; + static double accuracy_; + static unsigned maxIter_; + static MsgSchedule schedule_; + private: void initializeSolver (void); bool converged (void); virtual void printLinkInformation (void) const; + + DISALLOW_COPY_AND_ASSIGN (BeliefProp); }; #endif // HORUS_BELIEFPROP_H diff --git a/packages/CLPBN/horus/ConstraintTree.cpp b/packages/CLPBN/horus/ConstraintTree.cpp index bfabc982c..599d28f37 100644 --- a/packages/CLPBN/horus/ConstraintTree.cpp +++ b/packages/CLPBN/horus/ConstraintTree.cpp @@ -120,7 +120,7 @@ CTNode::copySubtree (const CTNode* root1) chIt != n1->childs().end(); ++ chIt) { CTNode* chCopy = new CTNode (**chIt); n2->childs().insert_sorted (chCopy); - if ((*chIt)->nrChilds() != 0) { + if ((*chIt)->nrChilds() > 0) { stack.push_back (StackPair (*chIt, chCopy)); } } @@ -190,7 +190,7 @@ ConstraintTree::ConstraintTree ( ConstraintTree::ConstraintTree (vector> names) { assert (names.empty() == false); - assert (names.front().empty() == false); + assert (names.front().empty() == false); unsigned nrLvs = names[0].size(); for (size_t i = 0; i < nrLvs; i++) { logVars_.push_back (LogVar (i)); @@ -201,7 +201,7 @@ ConstraintTree::ConstraintTree (vector> names) Tuple t; for (size_t j = 0; j < names[i].size(); j++) { assert (names[i].size() == nrLvs); - t.push_back (LiftedUtils::getSymbol (names[i][j])); + t.push_back (LiftedUtils::getSymbol (names[i][j])); } addTuple (t); } @@ -266,7 +266,7 @@ ConstraintTree::moveToTop (const LogVars& lvs) assert (pos != logVars_.size()); for (size_t j = pos; j-- > i; ) { swapLogVar (logVars_[j]); - } + } } } @@ -318,7 +318,7 @@ ConstraintTree::join (ConstraintTree* ct, bool oneTwoOne) } else { moveToTop (intersect.elements()); ct->moveToTop (intersect.elements()); - + Tuples tuples; CTNodes appendNodes; getTuples (ct->root(), Tuples(), intersect.size(), @@ -455,7 +455,7 @@ ConstraintTree::singletons (void) if (isSingleton (logVars_[i])) { singletons.insert (logVars_[i]); } - } + } return singletons; } @@ -521,13 +521,12 @@ ConstraintTree::exportToGraphViz ( { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "ConstraintTree::exportToDotFile()" << endl; - abort(); + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; } out << "digraph {" << endl; ConstraintTree copy (*this); - // copy.moveToTop (copy.logVarSet_.elements()); + copy.moveToTop (copy.logVarSet_.elements()); CTNodes nodes = getNodesBelow (copy.root_); out << "\"" << copy.root_ << "\"" << " [label=\"R\"]" << endl; for (CTNodes::const_iterator it = ++ nodes.begin(); @@ -586,13 +585,13 @@ ConstraintTree::isCountNormalized (const LogVarSet& Ys) if (countTuples (*it) != count) { return false; } - } + } return true; } -unsigned +unsigned ConstraintTree::getConditionalCount (const LogVarSet& Ys) { assert (isCountNormalized (Ys)); @@ -793,7 +792,7 @@ ConstraintTree::jointCountNormalize ( } for (size_t i = 0; i < normCts1.size(); i++) { - unsigned j; + unsigned j; for (j = 0; counts1[i] + counts2[j] != N; j++) ; // cout << "joint-count(" << counts1[i] ; // cout << "," << counts2[j] << ")" << endl; @@ -814,10 +813,10 @@ ConstraintTree::jointCountNormalize ( cts[i]->join (exclCt); } - if (excl1 != 0) { + if (excl1) { cts.push_back (excl1); } - if (excl2 != 0) { + if (excl2) { cts.push_back (excl2); } @@ -883,7 +882,7 @@ ConstraintTree::ground (LogVar X) void -ConstraintTree::copyLogVar (LogVar X_1, LogVar X_2) +ConstraintTree::cloneLogVar (LogVar X_1, LogVar X_2) { moveToBottom ({X_1}); CTNodes leafs = getNodesAtLevel (logVars_.size()); @@ -948,7 +947,7 @@ ConstraintTree::getNodesBelow (CTNode* fromHere) const CTNodes -ConstraintTree::getNodesAtLevel (unsigned level) const +ConstraintTree::getNodesAtLevel (unsigned level) const { assert (level <= logVars_.size()); if (level == 0) { @@ -1058,7 +1057,7 @@ ConstraintTree::join ( } else { tupleFounded = join (*it, tuple, currIdx + 1, appendNode); } - } + } return tupleFounded; } @@ -1066,14 +1065,14 @@ ConstraintTree::join ( void ConstraintTree::getTuples ( - CTNode* n, + CTNode* n, Tuples currTuples, unsigned stopLevel, Tuples& tuplesCollected, CTNodes& continuationNodes) const { if (n->isRoot() == false) { - if (currTuples.size() == 0) { + if (currTuples.empty()) { currTuples.push_back ({ n->symbol()}); } else { for (size_t i = 0; i < currTuples.size(); i++) { @@ -1148,7 +1147,7 @@ ConstraintTree::split ( CTNode* n2, CTChilds& commChilds, CTChilds& exclChilds, - unsigned stopLevel) + unsigned stopLevel) { CTChilds& childs1 = n1->childs(); for (CTChilds::const_iterator chIt1 = childs1.begin(); diff --git a/packages/CLPBN/horus/ConstraintTree.h b/packages/CLPBN/horus/ConstraintTree.h index 0b48c3650..2c0c09464 100644 --- a/packages/CLPBN/horus/ConstraintTree.h +++ b/packages/CLPBN/horus/ConstraintTree.h @@ -23,7 +23,6 @@ typedef vector ConstraintTrees; class CTNode { public: - struct CompareSymbol { bool operator() (const CTNode* n1, const CTNode* n2) const @@ -33,11 +32,9 @@ class CTNode }; private: - typedef TinySet CTChilds_; public: - CTNode (const CTNode& n, const CTChilds_& chs = CTChilds_()) : symbol_(n.symbol()), childs_(chs), level_(n.level()) { } @@ -52,12 +49,10 @@ class CTNode void setSymbol (const Symbol s) { symbol_ = s; } - public: - CTChilds_& childs (void) { return childs_; } const CTChilds_& childs (void) const { return childs_; } - + size_t nrChilds (void) const { return childs_.size(); } bool isRoot (void) const { return level_ == 0; } @@ -89,9 +84,11 @@ class CTNode private: void updateChildLevels (CTNode*, unsigned); - Symbol symbol_; - CTChilds_ childs_; - unsigned level_; + Symbol symbol_; + CTChilds_ childs_; + unsigned level_; + + DISALLOW_ASSIGN (CTNode); }; ostream& operator<< (ostream &out, const CTNode&); @@ -108,7 +105,7 @@ class ConstraintTree ConstraintTree (const LogVars&); ConstraintTree (const LogVars&, const Tuples&); - + ConstraintTree (vector> names); ConstraintTree (const ConstraintTree&); @@ -121,7 +118,7 @@ class ConstraintTree ~ConstraintTree (void); CTNode* root (void) const { return root_; } - + bool empty (void) const { return root_->childs().empty(); } const LogVars& logVars (void) const @@ -135,17 +132,17 @@ class ConstraintTree assert (LogVarSet (logVars_) == logVarSet_); return logVarSet_; } - + size_t nrLogVars (void) const { return logVars_.size(); assert (LogVarSet (logVars_) == logVarSet_); } - + void addTuple (const Tuple&); - + bool containsTuple (const Tuple&); - + void moveToTop (const LogVars&); void moveToBottom (const LogVars&); @@ -159,7 +156,7 @@ class ConstraintTree void applySubstitution (const Substitution&); void project (const LogVarSet&); - + ConstraintTree projectedCopy (const LogVarSet&); void remove (const LogVarSet&); @@ -200,10 +197,10 @@ class ConstraintTree ConstraintTrees ground (LogVar); - void copyLogVar (LogVar,LogVar); - + void cloneLogVar (LogVar, LogVar); + ConstraintTree& operator= (const ConstraintTree& ct); - + private: unsigned countTuples (const CTNode*) const; diff --git a/packages/CLPBN/horus/CountingBp.cpp b/packages/CLPBN/horus/CountingBp.cpp index d248c602c..4dc1b249e 100644 --- a/packages/CLPBN/horus/CountingBp.cpp +++ b/packages/CLPBN/horus/CountingBp.cpp @@ -2,7 +2,7 @@ #include "WeightedBp.h" -bool CountingBp::checkForIdenticalFactors = true; +bool CountingBp::fif_ = true; CountingBp::CountingBp (const FactorGraph& fg) @@ -36,19 +36,17 @@ CountingBp::printSolverFlags (void) const { stringstream ss; ss << "counting bp [" ; - ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + ss << "bp_msg_schedule=" ; + switch (WeightedBp::msgSchedule()) { + case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; + case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; + case MsgSchedule::PARALLEL: ss << "parallel"; break; + case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << BpOptions::maxIter; - ss << ",accuracy=" << BpOptions::accuracy; + ss << ",bp_max_iter=" << WeightedBp::maxIterations(); + ss << ",bp_accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); - ss << ",chkif=" << - Util::toString (CountingBp::checkForIdenticalFactors); + ss << ",fif=" << Util::toString (CountingBp::fif_); ss << "]" ; cout << ss.str() << endl; } @@ -82,7 +80,7 @@ CountingBp::solveQuery (VarIds queryVids) reprArgs.push_back (getRepresentative (queryVids[i])); } FacNode* reprFac = getRepresentative (facNodes[idx]); - assert (reprFac != 0); + assert (reprFac); res = solver_->getFactorJoint (reprFac, reprArgs); } } @@ -95,8 +93,7 @@ void CountingBp::findIdenticalFactors() { const FacNodes& facNodes = fg.facNodes(); - if (checkForIdenticalFactors == false || - facNodes.size() == 1) { + if (fif_ == false || facNodes.size() == 1) { return; } for (size_t i = 0; i < facNodes.size(); i++) { @@ -139,7 +136,7 @@ CountingBp::setInitialColors (void) VarColorMap::iterator it = colorMap.find (range); if (it == colorMap.end()) { it = colorMap.insert (make_pair ( - range, Colors (range + 1, -1))).first; + range, Colors (range + 1, -1))).first; } unsigned idx = varNodes[i]->hasEvidence() ? varNodes[i]->getEvidence() diff --git a/packages/CLPBN/horus/CountingBp.h b/packages/CLPBN/horus/CountingBp.h index a553e9307..605fa8b22 100644 --- a/packages/CLPBN/horus/CountingBp.h +++ b/packages/CLPBN/horus/CountingBp.h @@ -5,7 +5,6 @@ #include "GroundSolver.h" #include "FactorGraph.h" -#include "Util.h" #include "Horus.h" class VarCluster; @@ -76,6 +75,8 @@ class VarCluster private: VarNodes members_; VarNode* repr_; + + DISALLOW_COPY_AND_ASSIGN (VarCluster); }; @@ -88,17 +89,19 @@ class FacCluster const FacNode* first (void) const { return members_.front(); } const FacNodes& members (void) const { return members_; } - + FacNode* representative (void) const { return repr_; } void setRepresentative (FacNode* fn) { repr_ = fn; } VarClusters& varClusters (void) { return varClusters_; } - + private: FacNodes members_; FacNode* repr_; VarClusters varClusters_; + + DISALLOW_COPY_AND_ASSIGN (FacCluster); }; @@ -112,9 +115,9 @@ class CountingBp : public GroundSolver void printSolverFlags (void) const; Params solveQuery (VarIds); - - static bool checkForIdenticalFactors; - + + static void setFindIdenticalFactorsFlag (bool fif) { fif_ = fif; } + private: Color getNewColor (void) { @@ -167,7 +170,6 @@ class CountingBp : public GroundSolver unsigned getWeight (const FacCluster*, const VarCluster*, size_t index) const; - Color freeColor_; Colors varColors_; Colors facColors_; @@ -176,6 +178,10 @@ class CountingBp : public GroundSolver VarClusterMap varClusterMap_; const FactorGraph* compressedFg_; WeightedBp* solver_; + + static bool fif_; + + DISALLOW_COPY_AND_ASSIGN (CountingBp); }; #endif // HORUS_COUNTINGBP_H diff --git a/packages/CLPBN/horus/ElimGraph.cpp b/packages/CLPBN/horus/ElimGraph.cpp index 50870d1b6..3a808a8c2 100644 --- a/packages/CLPBN/horus/ElimGraph.cpp +++ b/packages/CLPBN/horus/ElimGraph.cpp @@ -1,39 +1,34 @@ -#include - #include #include "ElimGraph.h" -ElimHeuristic ElimGraph::elimHeuristic = MIN_NEIGHBORS; +ElimHeuristic ElimGraph::elimHeuristic_ = MIN_NEIGHBORS; ElimGraph::ElimGraph (const vector& factors) { for (size_t i = 0; i < factors.size(); i++) { - if (factors[i] == 0) { // if contained just one var with evidence - continue; - } - const VarIds& vids = factors[i]->arguments(); - for (size_t j = 0; j < vids.size() - 1; j++) { - EgNode* n1 = getEgNode (vids[j]); - if (n1 == 0) { - n1 = new EgNode (vids[j], factors[i]->range (j)); - addNode (n1); - } - for (size_t k = j + 1; k < vids.size(); k++) { - EgNode* n2 = getEgNode (vids[k]); - if (n2 == 0) { - n2 = new EgNode (vids[k], factors[i]->range (k)); - addNode (n2); + if (factors[i]) { + const VarIds& args = factors[i]->arguments(); + for (size_t j = 0; j < args.size() - 1; j++) { + EgNode* n1 = getEgNode (args[j]); + if (!n1) { + n1 = new EgNode (args[j], factors[i]->range (j)); + addNode (n1); + } + for (size_t k = j + 1; k < args.size(); k++) { + EgNode* n2 = getEgNode (args[k]); + if (!n2) { + n2 = new EgNode (args[k], factors[i]->range (k)); + addNode (n2); + } + if (!neighbors (n1, n2)) { + addEdge (n1, n2); + } } - if (neighbors (n1, n2) == false) { - addEdge (n1, n2); - } } - } - if (vids.size() == 1) { - if (getEgNode (vids[0]) == 0) { - addNode (new EgNode (vids[0], factors[i]->range (0))); + if (args.size() == 1 && !getEgNode (args[0])) { + addNode (new EgNode (args[0], factors[i]->range (0))); } } } @@ -44,23 +39,23 @@ ElimGraph::ElimGraph (const vector& factors) ElimGraph::~ElimGraph (void) { for (size_t i = 0; i < nodes_.size(); i++) { - delete nodes_[i]; + delete nodes_[i]; } } VarIds -ElimGraph::getEliminatingOrder (const VarIds& exclude) +ElimGraph::getEliminatingOrder (const VarIds& excludedVids) { VarIds elimOrder; unmarked_.reserve (nodes_.size()); for (size_t i = 0; i < nodes_.size(); i++) { - if (Util::contains (exclude, nodes_[i]->varId()) == false) { + if (Util::contains (excludedVids, nodes_[i]->varId()) == false) { unmarked_.insert (nodes_[i]); } } - size_t nrVarsToEliminate = nodes_.size() - exclude.size(); + size_t nrVarsToEliminate = nodes_.size() - excludedVids.size(); for (size_t i = 0; i < nrVarsToEliminate; i++) { EgNode* node = getLowestCostNode(); unmarked_.remove (node); @@ -86,7 +81,7 @@ ElimGraph::print (void) const cout << " " << neighs[j]->label(); } cout << endl; - } + } } @@ -99,30 +94,26 @@ ElimGraph::exportToGraphViz ( { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "Markov::exportToDotFile()" << endl; - abort(); + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; } - out << "strict graph {" << endl; - for (size_t i = 0; i < nodes_.size(); i++) { - if (showNeighborless || nodes_[i]->neighbors().size() != 0) { + if (showNeighborless || nodes_[i]->neighbors().empty() == false) { out << '"' << nodes_[i]->label() << '"' << endl; } } - for (size_t i = 0; i < highlightVarIds.size(); i++) { EgNode* node =getEgNode (highlightVarIds[i]); if (node) { out << '"' << node->label() << '"' ; out << " [shape=box3d]" << endl; } else { - cout << "error: invalid variable id: " << highlightVarIds[i] << endl; - abort(); + cerr << "Error: invalid variable id: " << highlightVarIds[i] << "." ; + cerr << endl; + exit (EXIT_FAILURE); } } - for (size_t i = 0; i < nodes_.size(); i++) { EGNeighs neighs = nodes_[i]->neighbors(); for (size_t j = 0; j < neighs.size(); j++) { @@ -130,7 +121,6 @@ ElimGraph::exportToGraphViz ( out << '"' << neighs[j]->label() << '"' << endl; } } - out << "}" << endl; out.close(); } @@ -142,12 +132,12 @@ ElimGraph::getEliminationOrder ( const Factors& factors, VarIds excludedVids) { - if (elimHeuristic == ElimHeuristic::SEQUENTIAL) { + if (elimHeuristic_ == ElimHeuristic::SEQUENTIAL) { VarIds allVids; Factors::const_iterator first = factors.begin(); Factors::const_iterator end = factors.end(); for (; first != end; ++first) { - Util::addToVector (allVids, (*first)->arguments()); + Util::addToVector (allVids, (*first)->arguments()); } TinySet elimOrder (allVids); elimOrder -= TinySet (excludedVids); @@ -183,9 +173,9 @@ EgNode* ElimGraph::getLowestCostNode (void) const { EgNode* bestNode = 0; - unsigned minCost = std::numeric_limits::max(); + unsigned minCost = Util::maxUnsigned(); EGNeighs::const_iterator it; - switch (elimHeuristic) { + switch (elimHeuristic_) { case MIN_NEIGHBORS: { for (it = unmarked_.begin(); it != unmarked_.end(); ++ it) { unsigned cost = getNeighborsCost (*it); @@ -238,7 +228,7 @@ ElimGraph::connectAllNeighbors (const EgNode* n) if (neighs.size() > 0) { for (size_t i = 0; i < neighs.size() - 1; i++) { for (size_t j = i + 1; j < neighs.size(); j++) { - if ( ! neighbors (neighs[i], neighs[j])) { + if (!neighbors (neighs[i], neighs[j])) { addEdge (neighs[i], neighs[j]); } } diff --git a/packages/CLPBN/horus/ElimGraph.h b/packages/CLPBN/horus/ElimGraph.h index 2f4d60d15..a636d316d 100644 --- a/packages/CLPBN/horus/ElimGraph.h +++ b/packages/CLPBN/horus/ElimGraph.h @@ -7,10 +7,9 @@ #include "TinySet.h" #include "Horus.h" - using namespace std; -enum ElimHeuristic +enum ElimHeuristic { SEQUENTIAL, MIN_NEIGHBORS, @@ -49,7 +48,7 @@ class ElimGraph ElimGraph (const Factors&); ~ElimGraph (void); - + VarIds getEliminatingOrder (const VarIds&); void print (void) const; @@ -59,10 +58,11 @@ class ElimGraph static VarIds getEliminationOrder (const Factors&, VarIds); - static ElimHeuristic elimHeuristic; + static ElimHeuristic elimHeuristic (void) { return elimHeuristic_; } + + static void setElimHeuristic (ElimHeuristic eh) { elimHeuristic_ = eh; } private: - void addEdge (EgNode* n1, EgNode* n2) { assert (n1 != n2); @@ -133,6 +133,10 @@ class ElimGraph vector nodes_; TinySet unmarked_; unordered_map varMap_; + + static ElimHeuristic elimHeuristic_; + + DISALLOW_COPY_AND_ASSIGN (ElimGraph); }; #endif // HORUS_ELIMGRAPH_H diff --git a/packages/CLPBN/horus/Factor.cpp b/packages/CLPBN/horus/Factor.cpp index 9b8ad0be7..d0acade1f 100644 --- a/packages/CLPBN/horus/Factor.cpp +++ b/packages/CLPBN/horus/Factor.cpp @@ -7,7 +7,7 @@ #include #include "Factor.h" -#include "Indexer.h" +#include "Var.h" Factor::Factor (const Factor& g) @@ -100,11 +100,11 @@ Factor::sumOutAllExceptIndex (size_t idx) void Factor::multiply (Factor& g) { - if (args_.size() == 0) { + if (args_.empty()) { clone (g); - return; + } else { + TFactor::multiply (g); } - TFactor::multiply (g); } diff --git a/packages/CLPBN/horus/Factor.h b/packages/CLPBN/horus/Factor.h index ca330e4c3..ea11d1137 100644 --- a/packages/CLPBN/horus/Factor.h +++ b/packages/CLPBN/horus/Factor.h @@ -3,7 +3,6 @@ #include -#include "Var.h" #include "Indexer.h" #include "Util.h" @@ -34,7 +33,7 @@ class TFactor void setDistId (unsigned id) { distId_ = id; } void normalize (void) { LogAware::normalize (params_); } - + void randomize (void) { for (size_t i = 0; i < params_.size(); ++i) { @@ -143,7 +142,7 @@ class TFactor assert (idx != args_.size()); assert (obsIdx < ranges_[idx]); Params newps; - newps.reserve (params_.size() / ranges_[idx]); + newps.reserve (params_.size() / ranges_[idx]); Indexer indexer (ranges_); for (unsigned i = 0; i < obsIdx; ++i) { indexer.incrementDimension (idx); @@ -207,7 +206,7 @@ class TFactor Ranges ranges_; Params params_; unsigned distId_; - + private: void extend (unsigned range_prod) { @@ -285,9 +284,10 @@ class Factor : public TFactor void sumOutLastVariable (void); void sumOutArgs (const vector& mask); - + void clone (const Factor& f); + DISALLOW_ASSIGN (Factor); }; #endif // HORUS_FACTOR_H diff --git a/packages/CLPBN/horus/FactorGraph.cpp b/packages/CLPBN/horus/FactorGraph.cpp index 417499c4d..1f4c614b3 100644 --- a/packages/CLPBN/horus/FactorGraph.cpp +++ b/packages/CLPBN/horus/FactorGraph.cpp @@ -1,17 +1,23 @@ -#include -#include #include +#include +#include + #include -#include #include +#include #include "FactorGraph.h" -#include "Factor.h" #include "BayesBall.h" #include "Util.h" +bool FactorGraph::exportLd_ = false; +bool FactorGraph::exportUai_ = false; +bool FactorGraph::exportGv_ = false; +bool FactorGraph::printFg_ = false; + + FactorGraph::FactorGraph (const FactorGraph& fg) { const VarNodes& varNodes = fg.varNodes(); @@ -32,20 +38,36 @@ FactorGraph::FactorGraph (const FactorGraph& fg) +FactorGraph::~FactorGraph (void) +{ + for (size_t i = 0; i < varNodes_.size(); i++) { + delete varNodes_[i]; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + delete facNodes_[i]; + } +} + + + void FactorGraph::readFromUaiFormat (const char* fileName) { std::ifstream is (fileName); if (!is.is_open()) { - cerr << "error: cannot read from file " << fileName << endl; - abort(); + cerr << "Error: couldn't open file '" << fileName << "'." ; + exit (EXIT_FAILURE); } ignoreLines (is); string line; getline (is, line); - if (line != "MARKOV") { - cerr << "error: the network must be a MARKOV network " << endl; - abort(); + if (line == "BAYES") { + bayesFactors_ = true; + } else if (line == "MARKOV") { + bayesFactors_ = false; + } else { + cerr << "Error: the type of network is missing." << endl; + exit (EXIT_FAILURE); } // read the number of vars ignoreLines (is); @@ -61,23 +83,23 @@ FactorGraph::readFromUaiFormat (const char* fileName) unsigned nrArgs; unsigned vid; is >> nrFactors; - vector factorVarIds; - vector factorRanges; + vector allVarIds; + vector allRanges; for (unsigned i = 0; i < nrFactors; i++) { ignoreLines (is); is >> nrArgs; - factorVarIds.push_back ({ }); - factorRanges.push_back ({ }); + allVarIds.push_back ({ }); + allRanges.push_back ({ }); for (unsigned j = 0; j < nrArgs; j++) { is >> vid; if (vid >= ranges.size()) { - cerr << "error: invalid variable identifier `" << vid << "'" << endl; - cerr << "identifiers must be between 0 and " << ranges.size() - 1 ; - cerr << endl; - abort(); + cerr << "Error: invalid variable identifier `" << vid << "'. " ; + cerr << "Identifiers must be between 0 and " << ranges.size() - 1 ; + cerr << "." << endl; + exit (EXIT_FAILURE); } - factorVarIds.back().push_back (vid); - factorRanges.back().push_back (ranges[vid]); + allVarIds.back().push_back (vid); + allRanges.back().push_back (ranges[vid]); } } // read the parameters @@ -85,11 +107,11 @@ FactorGraph::readFromUaiFormat (const char* fileName) for (unsigned i = 0; i < nrFactors; i++) { ignoreLines (is); is >> nrParams; - if (nrParams != Util::sizeExpected (factorRanges[i])) { - cerr << "error: invalid number of parameters for factor nº " << i ; - cerr << ", expected: " << Util::sizeExpected (factorRanges[i]); - cerr << ", given: " << nrParams << endl; - abort(); + if (nrParams != Util::sizeExpected (allRanges[i])) { + cerr << "Error: invalid number of parameters for factor nº " << i ; + cerr << ", " << Util::sizeExpected (allRanges[i]); + cerr << " expected, " << nrParams << " given." << endl; + exit (EXIT_FAILURE); } Params params (nrParams); for (unsigned j = 0; j < nrParams; j++) { @@ -98,7 +120,14 @@ FactorGraph::readFromUaiFormat (const char* fileName) if (Globals::logDomain) { Util::log (params); } - addFactor (Factor (factorVarIds[i], factorRanges[i], params)); + Factor f (allVarIds[i], allRanges[i], params); + if (bayesFactors_ && allVarIds[i].size() > 1) { + // In this format the child is the last variable, + // move it to be the first + std::swap (allVarIds[i].front(), allVarIds[i].back()); + f.reorderArguments (allVarIds[i]); + } + addFactor (f); } is.close(); } @@ -110,8 +139,8 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) { std::ifstream is (fileName); if (!is.is_open()) { - cerr << "error: cannot read from file " << fileName << endl; - abort(); + cerr << "Error: couldn't open file '" << fileName << "'." ; + exit (EXIT_FAILURE); } ignoreLines (is); unsigned nrFactors; @@ -134,9 +163,9 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) ignoreLines (is); is >> ranges[j]; VarNode* var = getVarNode (vids[j]); - if (var != 0 && ranges[j] != var->range()) { - cerr << "error: variable `" << vids[j] << "' appears in two or " ; - cerr << "more factors with a different range" << endl; + if (var && ranges[j] != var->range()) { + cerr << "Error: variable `" << vids[j] << "' appears in two or " ; + cerr << "more factors with a different range." << endl; } } // read parameters @@ -159,7 +188,7 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) std::reverse (vids.begin(), vids.end()); Factor f (vids, ranges, params); std::reverse (vids.begin(), vids.end()); - f.reorderArguments (vids); + f.reorderArguments (vids); addFactor (f); } is.close(); @@ -167,18 +196,6 @@ FactorGraph::readFromLibDaiFormat (const char* fileName) -FactorGraph::~FactorGraph (void) -{ - for (size_t i = 0; i < varNodes_.size(); i++) { - delete varNodes_[i]; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - delete facNodes_[i]; - } -} - - - void FactorGraph::addFactor (const Factor& factor) { @@ -188,7 +205,7 @@ FactorGraph::addFactor (const Factor& factor) for (size_t i = 0; i < vids.size(); i++) { VarMap::const_iterator it = varMap_.find (vids[i]); if (it != varMap_.end()) { - addEdge (it->second, fn); + addEdge (it->second, fn); } else { VarNode* vn = new VarNode (vids[i], fn->factor().range (i)); addVarNode (vn); @@ -277,81 +294,12 @@ FactorGraph::print (void) const void -FactorGraph::exportToGraphViz (const char* fileName) const +FactorGraph::exportToLibDai (const char* fileName) const { ofstream out (fileName); if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "FactorGraph::exportToDotFile()" << endl; - abort(); - } - out << "graph \"" << fileName << "\" {" << endl; - for (size_t i = 0; i < varNodes_.size(); i++) { - if (varNodes_[i]->hasEvidence()) { - out << '"' << varNodes_[i]->label() << '"' ; - out << " [style=filled, fillcolor=yellow]" << endl; - } - } - for (size_t i = 0; i < facNodes_.size(); i++) { - out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " [label=\"" << facNodes_[i]->getLabel(); - out << "\"" << ", shape=box]" << endl; - } - for (size_t i = 0; i < facNodes_.size(); i++) { - const VarNodes& myVars = facNodes_[i]->neighbors(); - for (size_t j = 0; j < myVars.size(); j++) { - out << '"' << facNodes_[i]->getLabel() << '"' ; - out << " -- " ; - out << '"' << myVars[j]->label() << '"' << endl; - } - } - out << "}" << endl; - out.close(); -} - - - -void -FactorGraph::exportToUaiFormat (const char* fileName) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "error: cannot open file " << fileName << endl; - abort(); - } - out << "MARKOV" << endl; - out << varNodes_.size() << endl; - VarNodes sortedVns = varNodes_; - std::sort (sortedVns.begin(), sortedVns.end(), sortByVarId()); - for (size_t i = 0; i < sortedVns.size(); i++) { - out << ((i != 0) ? " " : "") << sortedVns[i]->range(); - } - out << endl << facNodes_.size() << endl; - for (size_t i = 0; i < facNodes_.size(); i++) { - VarIds args = facNodes_[i]->factor().arguments(); - out << args.size() << " " << Util::elementsToString (args) << endl; - } - out << endl; - for (size_t i = 0; i < facNodes_.size(); i++) { - Params params = facNodes_[i]->factor().params(); - if (Globals::logDomain) { - Util::exp (params); - } - out << params.size() << endl << " " ; - out << Util::elementsToString (params) << endl << endl; - } - out.close(); -} - - - -void -FactorGraph::exportToLibDaiFormat (const char* fileName) const -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "error: cannot open file " << fileName << endl; - abort(); + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; } out << facNodes_.size() << endl << endl; for (size_t i = 0; i < facNodes_.size(); i++) { @@ -376,6 +324,84 @@ FactorGraph::exportToLibDaiFormat (const char* fileName) const +void +FactorGraph::exportToUai (const char* fileName) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << (bayesFactors_ ? "BAYES" : "MARKOV") ; + out << endl << endl; + out << varNodes_.size() << endl; + VarNodes sortedVns = varNodes_; + std::sort (sortedVns.begin(), sortedVns.end(), sortByVarId()); + for (size_t i = 0; i < sortedVns.size(); i++) { + out << ((i != 0) ? " " : "") << sortedVns[i]->range(); + } + out << endl << facNodes_.size() << endl; + for (size_t i = 0; i < facNodes_.size(); i++) { + VarIds args = facNodes_[i]->factor().arguments(); + if (bayesFactors_) { + std::swap (args.front(), args.back()); + } + out << args.size() << " " << Util::elementsToString (args) << endl; + } + out << endl; + for (size_t i = 0; i < facNodes_.size(); i++) { + Factor f = facNodes_[i]->factor(); + if (bayesFactors_) { + VarIds args = f.arguments(); + std::swap (args.front(), args.back()); + f.reorderArguments (args); + } + Params params = f.params(); + if (Globals::logDomain) { + Util::exp (params); + } + out << params.size() << endl << " " ; + out << Util::elementsToString (params) << endl << endl; + } + out.close(); +} + + + +void +FactorGraph::exportToGraphViz (const char* fileName) const +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "graph \"" << fileName << "\" {" << endl; + for (size_t i = 0; i < varNodes_.size(); i++) { + if (varNodes_[i]->hasEvidence()) { + out << '"' << varNodes_[i]->label() << '"' ; + out << " [style=filled, fillcolor=yellow]" << endl; + } + } + for (size_t i = 0; i < facNodes_.size(); i++) { + out << '"' << facNodes_[i]->getLabel() << '"' ; + out << " [label=\"" << facNodes_[i]->getLabel(); + out << "\"" << ", shape=box]" << endl; + } + for (size_t i = 0; i < facNodes_.size(); i++) { + const VarNodes& myVars = facNodes_[i]->neighbors(); + for (size_t j = 0; j < myVars.size(); j++) { + out << '"' << facNodes_[i]->getLabel() << '"' ; + out << " -- " ; + out << '"' << myVars[j]->label() << '"' << endl; + } + } + out << "}" << endl; + out.close(); +} + + + void FactorGraph::ignoreLines (std::ifstream& is) const { diff --git a/packages/CLPBN/horus/FactorGraph.h b/packages/CLPBN/horus/FactorGraph.h index c2ed01046..e1cc9277c 100644 --- a/packages/CLPBN/horus/FactorGraph.h +++ b/packages/CLPBN/horus/FactorGraph.h @@ -9,14 +9,13 @@ using namespace std; - class FacNode; class VarNode : public Var { public: - VarNode (VarId varId, unsigned nrStates, - int evidence = Constants::NO_EVIDENCE) + VarNode (VarId varId, unsigned nrStates, + int evidence = Constants::NO_EVIDENCE) : Var (varId, nrStates, evidence) { } VarNode (const Var* v) : Var (v) { } @@ -26,9 +25,9 @@ class VarNode : public Var const FacNodes& neighbors (void) const { return neighs_; } private: - DISALLOW_COPY_AND_ASSIGN (VarNode); - FacNodes neighs_; + + DISALLOW_COPY_AND_ASSIGN (VarNode); }; @@ -53,11 +52,11 @@ class FacNode string getLabel (void) { return factor_.getLabel(); } private: - DISALLOW_COPY_AND_ASSIGN (FacNode); - VarNodes neighs_; Factor factor_; size_t index_; + + DISALLOW_COPY_AND_ASSIGN (FacNode); }; @@ -76,8 +75,8 @@ class FactorGraph const FacNodes& facNodes (void) const { return facNodes_; } void setFactorsAsBayesian (void) { bayesFactors_ = true; } - - bool bayesianFactors (void) const { return bayesFactors_ ; } + + bool bayesianFactors (void) const { return bayesFactors_; } size_t nrVarNodes (void) const { return varNodes_.size(); } @@ -107,15 +106,37 @@ class FactorGraph void print (void) const; + void exportToLibDai (const char*) const; + + void exportToUai (const char*) const; + void exportToGraphViz (const char*) const; - void exportToUaiFormat (const char*) const; + static bool exportToLibDai (void) { return exportLd_; } + + static bool exportToUai (void) { return exportUai_; } + + static bool exportGraphViz (void) { return exportGv_; } + + static bool printFactorGraph (void) { return printFg_; } + + static void enableExportToLibDai (void) { exportLd_ = true; } + + static void disableExportToLibDai (void) { exportLd_ = false; } + + static void enableExportToUai (void) { exportUai_ = true; } + + static void disableExportToUai (void) { exportUai_ = false; } + + static void enableExportToGraphViz (void) { exportGv_ = true; } + + static void disableExportToGraphViz (void) { exportGv_ = false; } + + static void enablePrintFactorGraph (void) { printFg_ = true; } + + static void disablePrintFactorGraph (void) { printFg_ = false; } - void exportToLibDaiFormat (const char*) const; - private: - // DISALLOW_COPY_AND_ASSIGN (FactorGraph); - void ignoreLines (std::ifstream&) const; bool containsCycle (void) const; @@ -129,18 +150,25 @@ class FactorGraph VarNodes varNodes_; FacNodes facNodes_; - BayesBallGraph structure_; - bool bayesFactors_; + BayesBallGraph structure_; + bool bayesFactors_; typedef unordered_map VarMap; VarMap varMap_; + + static bool exportLd_; + static bool exportUai_; + static bool exportGv_; + static bool printFg_; + + DISALLOW_ASSIGN (FactorGraph); }; struct sortByVarId -{ - bool operator()(VarNode* vn1, VarNode* vn2) { +{ + bool operator()(VarNode* vn1, VarNode* vn2) { return vn1->varId() < vn2->varId(); } }; diff --git a/packages/CLPBN/horus/GroundSolver.cpp b/packages/CLPBN/horus/GroundSolver.cpp index 4cd3fdbd2..1916315bb 100644 --- a/packages/CLPBN/horus/GroundSolver.cpp +++ b/packages/CLPBN/horus/GroundSolver.cpp @@ -1,8 +1,8 @@ #include "GroundSolver.h" -#include "Util.h" +#include "VarElim.h" #include "BeliefProp.h" #include "CountingBp.h" -#include "VarElim.h" +#include "Util.h" void @@ -47,7 +47,7 @@ Params GroundSolver::getJointByConditioning ( GroundSolverType solverType, FactorGraph fg, - const VarIds& jointVarIds) const + const VarIds& jointVarIds) { VarNodes jointVars; for (size_t i = 0; i < jointVarIds.size(); i++) { diff --git a/packages/CLPBN/horus/GroundSolver.h b/packages/CLPBN/horus/GroundSolver.h index 3e2959605..eac28b045 100644 --- a/packages/CLPBN/horus/GroundSolver.h +++ b/packages/CLPBN/horus/GroundSolver.h @@ -4,7 +4,6 @@ #include #include "FactorGraph.h" -#include "Var.h" #include "Horus.h" @@ -25,11 +24,13 @@ class GroundSolver void printAllPosterioris (void); - Params getJointByConditioning (GroundSolverType, - FactorGraph, const VarIds& jointVarIds) const; - + static Params getJointByConditioning (GroundSolverType, + FactorGraph, const VarIds& jointVarIds); + protected: const FactorGraph& fg; + + DISALLOW_COPY_AND_ASSIGN (GroundSolver); }; #endif // HORUS_GROUNDSOLVER_H diff --git a/packages/CLPBN/horus/Histogram.cpp b/packages/CLPBN/horus/Histogram.cpp index a9e96cfdd..d5cf729e9 100644 --- a/packages/CLPBN/horus/Histogram.cpp +++ b/packages/CLPBN/horus/Histogram.cpp @@ -59,10 +59,10 @@ HistogramSet::reset (void) -vector +vector HistogramSet::getHistograms (unsigned N, unsigned R) { - HistogramSet hs (N, R); + HistogramSet hs (N, R); unsigned H = hs.nrHistograms(); vector histograms; histograms.reserve (H); @@ -135,7 +135,7 @@ HistogramSet::maxCount (size_t idx) const } return size_ - sum; } - + void diff --git a/packages/CLPBN/horus/Histogram.h b/packages/CLPBN/horus/Histogram.h index 6e0f93411..d60c2d22f 100644 --- a/packages/CLPBN/horus/Histogram.h +++ b/packages/CLPBN/horus/Histogram.h @@ -2,8 +2,11 @@ #define HORUS_HISTOGRAM_H #include + #include +#include "Horus.h" + using namespace std; typedef vector Histogram; @@ -12,17 +15,17 @@ class HistogramSet { public: HistogramSet (unsigned, unsigned); - + void nextHistogram (void); unsigned operator[] (size_t idx) const; - + unsigned nrHistograms (void) const; void reset (void); - static vector getHistograms (unsigned ,unsigned); - + static vector getHistograms (unsigned, unsigned); + static unsigned nrHistograms (unsigned, unsigned); static size_t findIndex ( @@ -31,14 +34,16 @@ class HistogramSet static vector getNumAssigns (unsigned, unsigned); friend std::ostream& operator<< (ostream &os, const HistogramSet& hs); - + private: unsigned maxCount (size_t) const; void clearAfter (size_t); - unsigned size_; - Histogram hist_; + unsigned size_; + Histogram hist_; + + DISALLOW_COPY_AND_ASSIGN (HistogramSet); }; #endif // HORUS_HISTOGRAM_H diff --git a/packages/CLPBN/horus/Horus.h b/packages/CLPBN/horus/Horus.h index 7e5f12c8e..045ca42f2 100644 --- a/packages/CLPBN/horus/Horus.h +++ b/packages/CLPBN/horus/Horus.h @@ -1,31 +1,34 @@ #ifndef HORUS_HORUS_H #define HORUS_HORUS_H -#include - -#include - #define DISALLOW_COPY_AND_ASSIGN(TypeName) \ TypeName(const TypeName&); \ void operator=(const TypeName&) -using namespace std; +#define DISALLOW_COPY(TypeName) \ + TypeName(const TypeName&) + +#define DISALLOW_ASSIGN(TypeName) \ + void operator=(const TypeName&) + +#include +#include class Var; class Factor; class VarNode; class FacNode; -typedef vector Params; -typedef unsigned VarId; -typedef vector VarIds; -typedef vector Vars; -typedef vector VarNodes; -typedef vector FacNodes; -typedef vector Factors; -typedef vector States; -typedef vector Ranges; -typedef unsigned long long ullong; +typedef std::vector Params; +typedef unsigned VarId; +typedef std::vector VarIds; +typedef std::vector Vars; +typedef std::vector VarNodes; +typedef std::vector FacNodes; +typedef std::vector Factors; +typedef std::vector States; +typedef std::vector Ranges; +typedef unsigned long long ullong; enum LiftedSolverType @@ -69,19 +72,5 @@ const unsigned PRECISION = 6; }; - -namespace BpOptions -{ - enum Schedule { - SEQ_FIXED, - SEQ_RANDOM, - PARALLEL, - MAX_RESIDUAL - }; - extern Schedule schedule; - extern double accuracy; - extern unsigned maxIter; -} - #endif // HORUS_HORUS_H diff --git a/packages/CLPBN/horus/HorusCli.cpp b/packages/CLPBN/horus/HorusCli.cpp index 639b91739..82e995921 100644 --- a/packages/CLPBN/horus/HorusCli.cpp +++ b/packages/CLPBN/horus/HorusCli.cpp @@ -16,22 +16,39 @@ VarIds readQueryAndEvidence (FactorGraph&, int, const char* [], int); void runSolver (const FactorGraph&, const VarIds&); -const string USAGE = "usage: ./hcli [HORUS_FLAG=VALUE] \ -NETWORK_FILE [VARIABLE | OBSERVED_VARIABLE=EVIDENCE] ..." ; +const string USAGE = "usage: ./hcli [solver=hve|bp|cbp] \ +[=]... [|=]... " ; int main (int argc, const char* argv[]) { if (argc <= 1) { - cerr << "error: no graphical model specified" << endl; + cerr << "Error: no probabilistic graphical model was given." << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } int idx = readHorusFlags (argc, argv); FactorGraph fg; readFactorGraph (fg, argv[idx]); VarIds queryIds = readQueryAndEvidence (fg, argc, argv, idx + 1); + if (FactorGraph::exportToLibDai()) { + fg.exportToLibDai ("model.fg"); + } + if (FactorGraph::exportToUai()) { + fg.exportToUai ("model.uai"); + } + if (FactorGraph::exportGraphViz()) { + fg.exportToGraphViz ("model.dot"); + } + if (FactorGraph::printFactorGraph()) { + fg.print(); + } + if (Globals::verbosity > 0) { + cout << "factor graph contains " ; + cout << fg.nrVarNodes() << " variables and " ; + cout << fg.nrFacNodes() << " factors " << endl; + } runSolver (fg, queryIds); return 0; } @@ -51,14 +68,14 @@ readHorusFlags (int argc, const char* argv[]) string leftArg = arg.substr (0, pos); string rightArg = arg.substr (pos + 1); if (leftArg.empty()) { - cerr << "error: missing left argument" << endl; + cerr << "Error: missing left argument." << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } if (rightArg.empty()) { - cerr << "error: missing right argument" << endl; + cerr << "Error: missing right argument." << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } Util::setHorusFlag (leftArg, rightArg); } @@ -77,9 +94,9 @@ readFactorGraph (FactorGraph& fg, const char* s) } else if (extension == "fg") { fg.readFromLibDaiFormat (fileName.c_str()); } else { - cerr << "error: the graphical model must be defined either " ; - cerr << "in a UAI or libDAI file" << endl; - exit (0); + cerr << "Error: the probabilistic graphical model must be " ; + cerr << "defined either in a UAI or libDAI file." << endl; + exit (EXIT_FAILURE); } } @@ -97,17 +114,17 @@ readQueryAndEvidence ( const string& arg = argv[i]; if (arg.find ('=') == std::string::npos) { if (Util::isInteger (arg) == false) { - cerr << "error: `" << arg << "' " ; - cerr << "is not a variable id" ; + cerr << "Error: `" << arg << "' " ; + cerr << "is not a variable id." ; cerr << endl; - exit (0); + exit (EXIT_FAILURE); } VarId vid = Util::stringToUnsigned (arg); VarNode* queryVar = fg.getVarNode (vid); if (queryVar == false) { - cerr << "error: unknow variable with id " ; - cerr << "`" << vid << "'" << endl; - exit (0); + cerr << "Error: unknow variable with id " ; + cerr << "`" << vid << "'." << endl; + exit (EXIT_FAILURE); } queryIds.push_back (vid); } else { @@ -115,39 +132,38 @@ readQueryAndEvidence ( string leftArg = arg.substr (0, pos); string rightArg = arg.substr (pos + 1); if (leftArg.empty()) { - cerr << "error: missing left argument" << endl; + cerr << "Error: missing left argument." << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } if (Util::isInteger (leftArg) == false) { - cerr << "error: `" << leftArg << "' " ; - cerr << "is not a variable id" << endl ; - exit (0); - continue; + cerr << "Error: `" << leftArg << "' " ; + cerr << "is not a variable id." << endl ; + exit (EXIT_FAILURE); } VarId vid = Util::stringToUnsigned (leftArg); VarNode* observedVar = fg.getVarNode (vid); if (observedVar == false) { - cerr << "error: unknow variable with id " ; - cerr << "`" << vid << "'" << endl; - exit (0); + cerr << "Error: unknow variable with id " ; + cerr << "`" << vid << "'." << endl; + exit (EXIT_FAILURE); } if (rightArg.empty()) { - cerr << "error: missing right argument" << endl; + cerr << "Error: missing right argument." << endl; cerr << USAGE << endl; - exit (0); + exit (EXIT_FAILURE); } if (Util::isInteger (rightArg) == false) { - cerr << "error: `" << rightArg << "' " ; - cerr << "is not a state index" << endl ; - exit (0); + cerr << "Error: `" << rightArg << "' " ; + cerr << "is not a state index." << endl ; + exit (EXIT_FAILURE); } unsigned stateIdx = Util::stringToUnsigned (rightArg); if (observedVar->isValidState (stateIdx) == false) { - cerr << "error: `" << stateIdx << "' " ; + cerr << "Error: `" << stateIdx << "' " ; cerr << "is not a valid state index for variable with id " ; - cerr << "`" << vid << "'" << endl; - exit (0); + cerr << "`" << vid << "'." << endl; + exit (EXIT_FAILURE); } observedVar->setEvidence (stateIdx); } diff --git a/packages/CLPBN/horus/HorusYap.cpp b/packages/CLPBN/horus/HorusYap.cpp index 586d5a170..63a2b69a5 100644 --- a/packages/CLPBN/horus/HorusYap.cpp +++ b/packages/CLPBN/horus/HorusYap.cpp @@ -57,7 +57,7 @@ createLiftedNetwork (void) } ParfactorList* pfList = new ParfactorList (parfactors); - + if (Globals::verbosity > 2) { Util::printHeader ("SHATTERED PARFACTORS"); pfList->print(); @@ -91,7 +91,7 @@ createGroundNetwork (void) // read the ranges Ranges ranges = readUnsignedList (YAP_ArgOfTerm (2, factor)); // read the parameters - Params params = readParameters (YAP_ArgOfTerm (3, factor)); + Params params = readParameters (YAP_ArgOfTerm (3, factor)); // read dist id unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (4, factor)); fg->addFactor (Factor (varIds, ranges, params, distId)); @@ -108,10 +108,21 @@ createGroundNetwork (void) evidenceList = YAP_TailOfTerm (evidenceList); nrObservedVars ++; } + if (FactorGraph::exportToLibDai()) { + fg->exportToLibDai ("model.fg"); + } + if (FactorGraph::exportToUai()) { + fg->exportToUai ("model.uai"); + } + if (FactorGraph::exportGraphViz()) { + fg->exportToGraphViz ("model.dot"); + } + if (FactorGraph::printFactorGraph()) { + fg->print(); + } if (Globals::verbosity > 0) { cout << "factor graph contains " ; - cout << fg->nrVarNodes() << " variables " ; - cout << "(" << nrObservedVars << " observed) and " ; + cout << fg->nrVarNodes() << " variables and " ; cout << fg->nrFacNodes() << " factors " << endl; } YAP_Int p = (YAP_Int) (fg); @@ -126,19 +137,19 @@ runLiftedSolver (void) LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); ParfactorList pfListCopy (*network->first); LiftedOperations::absorveEvidence (pfListCopy, *network->second); - + LiftedSolver* solver = 0; switch (Globals::liftedSolver) { case LiftedSolverType::LVE: solver = new LiftedVe (pfListCopy); break; case LiftedSolverType::LBP: solver = new LiftedBp (pfListCopy); break; case LiftedSolverType::LKC: solver = new LiftedKc (pfListCopy); break; } - + if (Globals::verbosity > 0) { solver->printSolverFlags(); cout << endl; } - + YAP_Term taskList = YAP_ARG2; vector results; while (taskList != YAP_TermNil()) { @@ -181,7 +192,7 @@ int runGroundSolver (void) { FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - + vector tasks; YAP_Term taskList = YAP_ARG2; while (taskList != YAP_TermNil()) { @@ -200,7 +211,7 @@ runGroundSolver (void) } GroundSolver* solver = 0; - CountingBp::checkForIdenticalFactors = false; + CountingBp::setFindIdenticalFactorsFlag (false); switch (Globals::groundSolver) { case GroundSolverType::VE: solver = new VarElim (*mfg); break; case GroundSolverType::BP: solver = new BeliefProp (*mfg); break; @@ -233,19 +244,21 @@ setParfactorsParams (void) { LiftedNetwork* network = (LiftedNetwork*) YAP_IntOfTerm (YAP_ARG1); ParfactorList* pfList = network->first; - YAP_Term distList = YAP_ARG2; + YAP_Term distIdsList = YAP_ARG2; + YAP_Term paramsList = YAP_ARG3; unordered_map paramsMap; - while (distList != YAP_TermNil()) { - YAP_Term dist = YAP_HeadOfTerm (distList); - unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (1, dist)); + while (distIdsList != YAP_TermNil()) { + unsigned distId = (unsigned) YAP_IntOfTerm ( + YAP_HeadOfTerm (distIdsList)); assert (Util::contains (paramsMap, distId) == false); - paramsMap[distId] = readParameters (YAP_ArgOfTerm (2, dist)); - distList = YAP_TailOfTerm (distList); + paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); + distIdsList = YAP_TailOfTerm (distIdsList); + paramsList = YAP_TailOfTerm (paramsList); } ParfactorList::iterator it = pfList->begin(); while (it != pfList->end()) { assert (Util::contains (paramsMap, (*it)->distId())); - // (*it)->setParams (paramsMap[(*it)->distId()]); + (*it)->setParams (paramsMap[(*it)->distId()]); ++ it; } return TRUE; @@ -256,16 +269,17 @@ setParfactorsParams (void) int setFactorsParams (void) { - return TRUE; // TODO FactorGraph* fg = (FactorGraph*) YAP_IntOfTerm (YAP_ARG1); - YAP_Term distList = YAP_ARG2; + YAP_Term distIdsList = YAP_ARG2; + YAP_Term paramsList = YAP_ARG3; unordered_map paramsMap; - while (distList != YAP_TermNil()) { - YAP_Term dist = YAP_HeadOfTerm (distList); - unsigned distId = (unsigned) YAP_IntOfTerm (YAP_ArgOfTerm (1, dist)); + while (distIdsList != YAP_TermNil()) { + unsigned distId = (unsigned) YAP_IntOfTerm ( + YAP_HeadOfTerm (distIdsList)); assert (Util::contains (paramsMap, distId) == false); - paramsMap[distId] = readParameters (YAP_ArgOfTerm (2, dist)); - distList = YAP_TailOfTerm (distList); + paramsMap[distId] = readParameters (YAP_HeadOfTerm (paramsList)); + distIdsList = YAP_TailOfTerm (distIdsList); + paramsList = YAP_TailOfTerm (paramsList); } const FacNodes& facNodes = fg->facNodes(); for (size_t i = 0; i < facNodes.size(); i++) { @@ -317,11 +331,11 @@ setHorusFlag (void) stringstream ss; ss << (int) YAP_IntOfTerm (YAP_ARG2); ss >> value; - } else if (key == "accuracy") { + } else if (key == "bp_accuracy") { stringstream ss; ss << (float) YAP_FloatOfTerm (YAP_ARG2); ss >> value; - } else if (key == "max_iter") { + } else if (key == "bp_max_iter") { stringstream ss; ss << (int) YAP_IntOfTerm (YAP_ARG2); ss >> value; @@ -404,7 +418,7 @@ readParfactor (YAP_Term pfTerm) } // read the parameters - const Params& params = readParameters (YAP_ArgOfTerm (4, pfTerm)); + const Params& params = readParameters (YAP_ArgOfTerm (4, pfTerm)); // read the constraint Tuples tuples; @@ -420,8 +434,8 @@ readParfactor (YAP_Term pfTerm) for (unsigned i = 1; i <= arity; i++) { YAP_Term ti = YAP_ArgOfTerm (i, term); if (YAP_IsAtomTerm (ti) == false) { - cerr << "error: constraint has free variables" << endl; - abort(); + cerr << "Error: the constraint contains free variables." << endl; + exit (EXIT_FAILURE); } string name ((char*) YAP_AtomName (YAP_AtomOfTerm (ti))); tuple[i - 1] = LiftedUtils::getSymbol (name); @@ -475,7 +489,7 @@ readLiftedEvidence ( obsFormulas.push_back (ObservedFormula (functor, evidence, args)); } observedList = YAP_TailOfTerm (observedList); - } + } } @@ -534,15 +548,34 @@ fillAnswersPrologList (vector& results) extern "C" void init_predicates (void) { - YAP_UserCPredicate ("cpp_create_lifted_network", createLiftedNetwork, 3); - YAP_UserCPredicate ("cpp_create_ground_network", createGroundNetwork, 4); - YAP_UserCPredicate ("cpp_run_lifted_solver", runLiftedSolver, 3); - YAP_UserCPredicate ("cpp_run_ground_solver", runGroundSolver, 3); - YAP_UserCPredicate ("cpp_set_parfactors_params", setParfactorsParams, 2); - YAP_UserCPredicate ("cpp_cpp_set_factors_params", setFactorsParams, 2); - YAP_UserCPredicate ("cpp_set_vars_information", setVarsInformation, 2); - YAP_UserCPredicate ("cpp_set_horus_flag", setHorusFlag, 2); - YAP_UserCPredicate ("cpp_free_lifted_network", freeLiftedNetwork, 1); - YAP_UserCPredicate ("cpp_free_ground_network", freeGroundNetwork, 1); + YAP_UserCPredicate ("cpp_create_lifted_network", + createLiftedNetwork, 3); + + YAP_UserCPredicate ("cpp_create_ground_network", + createGroundNetwork, 4); + + YAP_UserCPredicate ("cpp_run_lifted_solver", + runLiftedSolver, 3); + + YAP_UserCPredicate ("cpp_run_ground_solver", + runGroundSolver, 3); + + YAP_UserCPredicate ("cpp_set_parfactors_params", + setParfactorsParams, 3); + + YAP_UserCPredicate ("cpp_set_factors_params", + setFactorsParams, 3); + + YAP_UserCPredicate ("cpp_set_vars_information", + setVarsInformation, 2); + + YAP_UserCPredicate ("cpp_set_horus_flag", + setHorusFlag, 2); + + YAP_UserCPredicate ("cpp_free_lifted_network", + freeLiftedNetwork, 1); + + YAP_UserCPredicate ("cpp_free_ground_network", + freeGroundNetwork, 1); } diff --git a/packages/CLPBN/horus/Indexer.h b/packages/CLPBN/horus/Indexer.h index db99cf1a7..a4141ebed 100644 --- a/packages/CLPBN/horus/Indexer.h +++ b/packages/CLPBN/horus/Indexer.h @@ -120,6 +120,8 @@ class Indexer const Ranges& ranges_; size_t size_; vector offsets_; + + DISALLOW_COPY_AND_ASSIGN (Indexer); }; @@ -167,7 +169,7 @@ class MapIndexer } } } - + template MapIndexer ( const vector& allArgs, @@ -239,6 +241,8 @@ class MapIndexer const Ranges& ranges_; bool valid_; vector offsets_; + + DISALLOW_COPY_AND_ASSIGN (MapIndexer); }; diff --git a/packages/CLPBN/horus/LiftedBp.cpp b/packages/CLPBN/horus/LiftedBp.cpp index 66e82a8c4..b85e87cd5 100644 --- a/packages/CLPBN/horus/LiftedBp.cpp +++ b/packages/CLPBN/horus/LiftedBp.cpp @@ -1,7 +1,7 @@ #include "LiftedBp.h" +#include "LiftedOperations.h" #include "WeightedBp.h" #include "FactorGraph.h" -#include "LiftedOperations.h" LiftedBp::LiftedBp (const ParfactorList& parfactorList) @@ -62,16 +62,15 @@ LiftedBp::printSolverFlags (void) const { stringstream ss; ss << "lifted bp [" ; - ss << "schedule=" ; - typedef BpOptions::Schedule Sch; - switch (BpOptions::schedule) { - case Sch::SEQ_FIXED: ss << "seq_fixed"; break; - case Sch::SEQ_RANDOM: ss << "seq_random"; break; - case Sch::PARALLEL: ss << "parallel"; break; - case Sch::MAX_RESIDUAL: ss << "max_residual"; break; + ss << "bp_msg_schedule=" ; + switch (WeightedBp::msgSchedule()) { + case MsgSchedule::SEQ_FIXED: ss << "seq_fixed"; break; + case MsgSchedule::SEQ_RANDOM: ss << "seq_random"; break; + case MsgSchedule::PARALLEL: ss << "parallel"; break; + case MsgSchedule::MAX_RESIDUAL: ss << "max_residual"; break; } - ss << ",max_iter=" << BpOptions::maxIter; - ss << ",accuracy=" << BpOptions::accuracy; + ss << ",bp_max_iter=" << WeightedBp::maxIterations(); + ss << ",bp_accuracy=" << WeightedBp::accuracy(); ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; cout << ss.str() << endl; @@ -182,10 +181,10 @@ LiftedBp::rangeOfGround (const Ground& gr) } ++ it; } - return std::numeric_limits::max(); + return Util::maxUnsigned(); } - + Params LiftedBp::getJointByConditioning ( diff --git a/packages/CLPBN/horus/LiftedBp.h b/packages/CLPBN/horus/LiftedBp.h index cb6e9f3a4..01807ddfb 100644 --- a/packages/CLPBN/horus/LiftedBp.h +++ b/packages/CLPBN/horus/LiftedBp.h @@ -28,7 +28,7 @@ class LiftedBp : public LiftedSolver void createFactorGraph (void); vector> getWeights (void) const; - + unsigned rangeOfGround (const Ground&); Params getJointByConditioning (const ParfactorList&, const Grounds&); @@ -37,6 +37,8 @@ class LiftedBp : public LiftedSolver WeightedBp* solver_; FactorGraph* fg_; + DISALLOW_COPY_AND_ASSIGN (LiftedBp); + }; #endif // HORUS_LIFTEDBP_H diff --git a/packages/CLPBN/horus/LiftedCircuit.cpp b/packages/CLPBN/horus/LiftedCircuit.cpp deleted file mode 100644 index 41ea4f2ae..000000000 --- a/packages/CLPBN/horus/LiftedCircuit.cpp +++ /dev/null @@ -1,1149 +0,0 @@ -#include - -#include "LiftedCircuit.h" - - -double -OrNode::weight (void) const -{ - double lw = leftBranch_->weight(); - double rw = rightBranch_->weight(); - return Globals::logDomain ? Util::logSum (lw, rw) : lw + rw; -} - - - -double -AndNode::weight (void) const -{ - double lw = leftBranch_->weight(); - double rw = rightBranch_->weight(); - return Globals::logDomain ? lw + rw : lw * rw; -} - - - -int SetOrNode::nrPos_ = -1; -int SetOrNode::nrNeg_ = -1; - - - -double -SetOrNode::weight (void) const -{ - double weightSum = LogAware::addIdenty(); - for (unsigned i = 0; i < nrGroundings_ + 1; i++) { - nrPos_ = nrGroundings_ - i; - nrNeg_ = i; - if (Globals::logDomain) { - double nrCombs = Util::nrCombinations (nrGroundings_, i); - double w = follow_->weight(); - weightSum = Util::logSum (weightSum, std::log (nrCombs) + w); - } else { - double w = follow_->weight(); - weightSum += Util::nrCombinations (nrGroundings_, i) * w; - } - } - nrPos_ = -1; - nrNeg_ = -1; - return weightSum; -} - - - -double -SetAndNode::weight (void) const -{ - return LogAware::pow (follow_->weight(), nrGroundings_); -} - - - -double -IncExcNode::weight (void) const -{ - double w = 0.0; - if (Globals::logDomain) { - w = Util::logSum (plus1Branch_->weight(), plus2Branch_->weight()); - w = std::log (std::exp (w) - std::exp (minusBranch_->weight())); - } else { - w = plus1Branch_->weight() + plus2Branch_->weight(); - w -= minusBranch_->weight(); - } - return w; -} - - - -double -LeafNode::weight (void) const -{ - assert (clause_->isUnit()); - if (clause_->posCountedLogVars().empty() == false - || clause_->negCountedLogVars().empty() == false) { - if (SetOrNode::isSet() == false) { - // return a NaN if we have a SetOrNode - // ancester that is not set. This can only - // happen when calculating the weights - // for the edge labels in graphviz - return 0.0 / 0.0; - } - } - double weight = clause_->literals()[0].isPositive() - ? lwcnf_.posWeight (clause_->literals().front().lid()) - : lwcnf_.negWeight (clause_->literals().front().lid()); - LogVarSet lvs = clause_->constr().logVarSet(); - lvs -= clause_->ipgLogVars(); - lvs -= clause_->posCountedLogVars(); - lvs -= clause_->negCountedLogVars(); - unsigned nrGroundings = 1; - if (lvs.empty() == false) { - nrGroundings = clause_->constr().projectedCopy (lvs).size(); - } - if (clause_->posCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrPositives(), - clause_->nrPosCountedLogVars()); - } - if (clause_->negCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrNegatives(), - clause_->nrNegCountedLogVars()); - } - return LogAware::pow (weight, nrGroundings); -} - - - -double -SmoothNode::weight (void) const -{ - Clauses cs = clauses(); - double totalWeight = LogAware::multIdenty(); - for (size_t i = 0; i < cs.size(); i++) { - double posWeight = lwcnf_.posWeight (cs[i]->literals()[0].lid()); - double negWeight = lwcnf_.negWeight (cs[i]->literals()[0].lid()); - LogVarSet lvs = cs[i]->constr().logVarSet(); - lvs -= cs[i]->ipgLogVars(); - lvs -= cs[i]->posCountedLogVars(); - lvs -= cs[i]->negCountedLogVars(); - unsigned nrGroundings = 1; - if (lvs.empty() == false) { - nrGroundings = cs[i]->constr().projectedCopy (lvs).size(); - } - if (cs[i]->posCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrPositives(), - cs[i]->nrPosCountedLogVars()); - } - if (cs[i]->negCountedLogVars().empty() == false) { - nrGroundings *= std::pow (SetOrNode::nrNegatives(), - cs[i]->nrNegCountedLogVars()); - } - if (Globals::logDomain) { - totalWeight += Util::logSum (posWeight, negWeight) * nrGroundings; - } else { - totalWeight *= std::pow (posWeight + negWeight, nrGroundings); - } - } - return totalWeight; -} - - - -double -TrueNode::weight (void) const -{ - return LogAware::multIdenty(); -} - - - -double -CompilationFailedNode::weight (void) const -{ - // weighted model counting in compilation - // failed nodes should give NaN - return 0.0 / 0.0; -} - - - -LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) - : lwcnf_(lwcnf) -{ - root_ = 0; - compilationSucceeded_ = true; - Clauses clauses = Clause::copyClauses (lwcnf->clauses()); - compile (&root_, clauses); - if (compilationSucceeded_) { - smoothCircuit (root_); - } - if (Globals::verbosity > 1) { - if (compilationSucceeded_) { - double wmc = LogAware::exp (getWeightedModelCount()); - cout << "Weighted model count = " << wmc << endl << endl; - } - cout << "Exporting circuit to graphviz (circuit.dot)..." ; - cout << endl << endl; - exportToGraphViz ("circuit.dot"); - } -} - - - -bool -LiftedCircuit::isCompilationSucceeded (void) const -{ - return compilationSucceeded_; -} - - - -double -LiftedCircuit::getWeightedModelCount (void) const -{ - assert (compilationSucceeded_); - return root_->weight(); -} - - - -void -LiftedCircuit::exportToGraphViz (const char* fileName) -{ - ofstream out (fileName); - if (!out.is_open()) { - cerr << "error: cannot open file to write at " ; - cerr << "BayesBallGraph::exportToDotFile()" << endl; - abort(); - } - out << "digraph {" << endl; - out << "ranksep=1" << endl; - exportToGraphViz (root_, out); - out << "}" << endl; - out.close(); -} - - - -void -LiftedCircuit::compile ( - CircuitNode** follow, - Clauses& clauses) -{ - if (compilationSucceeded_ == false - && Globals::verbosity <= 1) { - return; - } - - if (clauses.empty()) { - *follow = new TrueNode(); - return; - } - - if (clauses.size() == 1 && clauses[0]->isUnit()) { - *follow = new LeafNode (clauses[0], *lwcnf_); - return; - } - - if (tryUnitPropagation (follow, clauses)) { - return; - } - - if (tryIndependence (follow, clauses)) { - return; - } - - if (tryShannonDecomp (follow, clauses)) { - return; - } - - if (tryInclusionExclusion (follow, clauses)) { - return; - } - - if (tryIndepPartialGrounding (follow, clauses)) { - return; - } - - if (tryAtomCounting (follow, clauses)) { - return; - } - - *follow = new CompilationFailedNode(); - if (Globals::verbosity > 1) { - originClausesMap_[*follow] = clauses; - explanationMap_[*follow] = "" ; - } - compilationSucceeded_ = false; -} - - - -bool -LiftedCircuit::tryUnitPropagation ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - if (clauses[i]->isUnit()) { - Clauses propagClauses; - for (size_t j = 0; j < clauses.size(); j++) { - if (i != j) { - LiteralId lid = clauses[i]->literals()[0].lid(); - LogVarTypes types = clauses[i]->logVarTypes (0); - if (clauses[i]->literals()[0].isPositive()) { - if (clauses[j]->containsPositiveLiteral (lid, types) == false) { - clauses[j]->removeNegativeLiterals (lid, types); - if (clauses[j]->nrLiterals() > 0) { - propagClauses.push_back (clauses[j]); - } else { - delete clauses[j]; - } - } else { - delete clauses[j]; - } - } else if (clauses[i]->literals()[0].isNegative()) { - if (clauses[j]->containsNegativeLiteral (lid, types) == false) { - clauses[j]->removePositiveLiterals (lid, types); - if (clauses[j]->nrLiterals() > 0) { - propagClauses.push_back (clauses[j]); - } else { - delete clauses[j]; - } - } else { - delete clauses[j]; - } - } - } - } - - AndNode* andNode = new AndNode(); - if (Globals::verbosity > 1) { - originClausesMap_[andNode] = backupClauses_; - stringstream explanation; - explanation << " UP on " << clauses[i]->literals()[0]; - explanationMap_[andNode] = explanation.str(); - } - - Clauses unitClause = { clauses[i] }; - compile (andNode->leftBranch(), unitClause); - compile (andNode->rightBranch(), propagClauses); - (*follow) = andNode; - return true; - } - } - return false; -} - - - -bool -LiftedCircuit::tryIndependence ( - CircuitNode** follow, - Clauses& clauses) -{ - if (clauses.size() == 1) { - return false; - } - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - Clauses depClauses = { clauses[0] }; - Clauses indepClauses (clauses.begin() + 1, clauses.end()); - bool finish = false; - while (finish == false) { - finish = true; - for (size_t i = 0; i < indepClauses.size(); i++) { - if (independentClause (*indepClauses[i], depClauses) == false) { - depClauses.push_back (indepClauses[i]); - indepClauses.erase (indepClauses.begin() + i); - finish = false; - break; - } - } - } - if (indepClauses.empty() == false) { - AndNode* andNode = new AndNode (); - if (Globals::verbosity > 1) { - originClausesMap_[andNode] = backupClauses_; - explanationMap_[andNode] = " Independence" ; - } - compile (andNode->leftBranch(), depClauses); - compile (andNode->rightBranch(), indepClauses); - (*follow) = andNode; - return true; - } - return false; -} - - - -bool -LiftedCircuit::tryShannonDecomp ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - const Literals& literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].isGround ( - clauses[i]->constr(), clauses[i]->ipgLogVars())) { - - Clause* c1 = lwcnf_->createClause (literals[j].lid()); - Clause* c2 = new Clause (*c1); - c2->literals().front().complement(); - - Clauses otherClauses = Clause::copyClauses (clauses); - clauses.push_back (c1); - otherClauses.push_back (c2); - - OrNode* orNode = new OrNode(); - if (Globals::verbosity > 1) { - originClausesMap_[orNode] = backupClauses_; - stringstream explanation; - explanation << " SD on " << literals[j]; - explanationMap_[orNode] = explanation.str(); - } - - compile (orNode->leftBranch(), clauses); - compile (orNode->rightBranch(), otherClauses); - (*follow) = orNode; - return true; - } - } - } - return false; -} - - - -bool -LiftedCircuit::tryInclusionExclusion ( - CircuitNode** follow, - Clauses& clauses) -{ - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - Literals depLits = { clauses[i]->literals().front() }; - Literals indepLits (clauses[i]->literals().begin() + 1, - clauses[i]->literals().end()); - bool finish = false; - while (finish == false) { - finish = true; - for (size_t j = 0; j < indepLits.size(); j++) { - if (independentLiteral (indepLits[j], depLits) == false) { - depLits.push_back (indepLits[j]); - indepLits.erase (indepLits.begin() + j); - finish = false; - break; - } - } - } - if (indepLits.empty() == false) { - LogVarSet lvs1; - for (size_t j = 0; j < depLits.size(); j++) { - lvs1 |= depLits[j].logVarSet(); - } - if (clauses[i]->constr().isCountNormalized (lvs1) == false) { - break; - } - LogVarSet lvs2; - for (size_t j = 0; j < indepLits.size(); j++) { - lvs2 |= indepLits[j].logVarSet(); - } - if (clauses[i]->constr().isCountNormalized (lvs2) == false) { - break; - } - Clause* c1 = new Clause (clauses[i]->constr().projectedCopy (lvs1)); - for (size_t j = 0; j < depLits.size(); j++) { - c1->addLiteral (depLits[j]); - } - Clause* c2 = new Clause (clauses[i]->constr().projectedCopy (lvs2)); - for (size_t j = 0; j < indepLits.size(); j++) { - c2->addLiteral (indepLits[j]); - } - - clauses.erase (clauses.begin() + i); - Clauses plus1Clauses = Clause::copyClauses (clauses); - Clauses plus2Clauses = Clause::copyClauses (clauses); - - plus1Clauses.push_back (c1); - plus2Clauses.push_back (c2); - clauses.push_back (c1); - clauses.push_back (c2); - - IncExcNode* ieNode = new IncExcNode(); - if (Globals::verbosity > 1) { - originClausesMap_[ieNode] = backupClauses_; - stringstream explanation; - explanation << " IncExc on clause nº " << i + 1; - explanationMap_[ieNode] = explanation.str(); - } - compile (ieNode->plus1Branch(), plus1Clauses); - compile (ieNode->plus2Branch(), plus2Clauses); - compile (ieNode->minusBranch(), clauses); - *follow = ieNode; - return true; - } - } - return false; -} - - - -bool -LiftedCircuit::tryIndepPartialGrounding ( - CircuitNode** follow, - Clauses& clauses) -{ - // assumes that all literals have logical variables - // else, shannon decomp was possible - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - LogVars rootLogVars; - LogVarSet lvs = clauses[0]->ipgCandidates(); - for (size_t i = 0; i < lvs.size(); i++) { - rootLogVars.clear(); - rootLogVars.push_back (lvs[i]); - ConstraintTree ct = clauses[0]->constr().projectedCopy ({lvs[i]}); - if (tryIndepPartialGroundingAux (clauses, ct, rootLogVars)) { - for (size_t j = 0; j < clauses.size(); j++) { - clauses[j]->addIpgLogVar (rootLogVars[j]); - } - SetAndNode* setAndNode = new SetAndNode (ct.size()); - if (Globals::verbosity > 1) { - originClausesMap_[setAndNode] = backupClauses_; - explanationMap_[setAndNode] = " IPG" ; - } - *follow = setAndNode; - compile (setAndNode->follow(), clauses); - return true; - } - } - return false; -} - - - -bool -LiftedCircuit::tryIndepPartialGroundingAux ( - Clauses& clauses, - ConstraintTree& ct, - LogVars& rootLogVars) -{ - for (size_t i = 1; i < clauses.size(); i++) { - LogVarSet lvs = clauses[i]->ipgCandidates(); - for (size_t j = 0; j < lvs.size(); j++) { - ConstraintTree ct2 = clauses[i]->constr().projectedCopy ({lvs[j]}); - if (ct.tupleSet() == ct2.tupleSet()) { - rootLogVars.push_back (lvs[j]); - break; - } - } - if (rootLogVars.size() != i + 1) { - return false; - } - } - // verifies if the IPG logical vars appear in the same positions - unordered_map positions; - for (size_t i = 0; i < clauses.size(); i++) { - const Literals& literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - size_t idx = literals[j].indexOfLogVar (rootLogVars[i]); - assert (idx != literals[j].nrLogVars()); - unordered_map::iterator it; - it = positions.find (literals[j].lid()); - if (it != positions.end()) { - if (it->second != idx) { - return false; - } - } else { - positions[literals[j].lid()] = idx; - } - } - } - return true; -} - - - -bool -LiftedCircuit::tryAtomCounting ( - CircuitNode** follow, - Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size(); i++) { - if (clauses[i]->nrPosCountedLogVars() > 0 - || clauses[i]->nrNegCountedLogVars() > 0) { - // only allow one atom counting node per branch - return false; - } - } - if (Globals::verbosity > 1) { - backupClauses_ = Clause::copyClauses (clauses); - } - for (size_t i = 0; i < clauses.size(); i++) { - Literals literals = clauses[i]->literals(); - for (size_t j = 0; j < literals.size(); j++) { - if (literals[j].nrLogVars() == 1 - && ! clauses[i]->isIpgLogVar (literals[j].logVars().front()) - && ! clauses[i]->isCountedLogVar (literals[j].logVars().front())) { - unsigned nrGroundings = clauses[i]->constr().projectedCopy ( - literals[j].logVars()).size(); - SetOrNode* setOrNode = new SetOrNode (nrGroundings); - if (Globals::verbosity > 1) { - originClausesMap_[setOrNode] = backupClauses_; - explanationMap_[setOrNode] = " AC" ; - } - Clause* c1 = new Clause ( - clauses[i]->constr().projectedCopy (literals[j].logVars())); - Clause* c2 = new Clause ( - clauses[i]->constr().projectedCopy (literals[j].logVars())); - c1->addLiteral (literals[j]); - c2->addLiteralComplemented (literals[j]); - c1->addPosCountedLogVar (literals[j].logVars().front()); - c2->addNegCountedLogVar (literals[j].logVars().front()); - clauses.push_back (c1); - clauses.push_back (c2); - shatterCountedLogVars (clauses); - compile (setOrNode->follow(), clauses); - *follow = setOrNode; - return true; - } - } - } - return false; -} - - - -void -LiftedCircuit::shatterCountedLogVars (Clauses& clauses) -{ - while (shatterCountedLogVarsAux (clauses)) ; -} - - - -bool -LiftedCircuit::shatterCountedLogVarsAux (Clauses& clauses) -{ - for (size_t i = 0; i < clauses.size() - 1; i++) { - for (size_t j = i + 1; j < clauses.size(); j++) { - bool splitedSome = shatterCountedLogVarsAux (clauses, i, j); - if (splitedSome) { - return true; - } - } - } - return false; -} - - - -bool -LiftedCircuit::shatterCountedLogVarsAux ( - Clauses& clauses, - size_t idx1, - size_t idx2) -{ - Literals lits1 = clauses[idx1]->literals(); - Literals lits2 = clauses[idx2]->literals(); - for (size_t i = 0; i < lits1.size(); i++) { - for (size_t j = 0; j < lits2.size(); j++) { - if (lits1[i].lid() == lits2[j].lid()) { - LogVars lvs1 = lits1[i].logVars(); - LogVars lvs2 = lits2[j].logVars(); - for (size_t k = 0; k < lvs1.size(); k++) { - if (clauses[idx1]->isCountedLogVar (lvs1[k]) - && clauses[idx2]->isCountedLogVar (lvs2[k]) == false) { - clauses.push_back (new Clause (*clauses[idx2])); - clauses[idx2]->addPosCountedLogVar (lvs2[k]); - clauses.back()->addNegCountedLogVar (lvs2[k]); - return true; - } - if (clauses[idx2]->isCountedLogVar (lvs2[k]) - && clauses[idx1]->isCountedLogVar (lvs1[k]) == false) { - clauses.push_back (new Clause (*clauses[idx1])); - clauses[idx1]->addPosCountedLogVar (lvs1[k]); - clauses.back()->addNegCountedLogVar (lvs1[k]); - return true; - } - } - } - } - } - return false; -} - - - -bool -LiftedCircuit::independentClause ( - Clause& clause, - Clauses& otherClauses) const -{ - for (size_t i = 0; i < otherClauses.size(); i++) { - if (Clause::independentClauses (clause, *otherClauses[i]) == false) { - return false; - } - } - return true; -} - - - -bool -LiftedCircuit::independentLiteral ( - const Literal& lit, - const Literals& otherLits) const -{ - for (size_t i = 0; i < otherLits.size(); i++) { - if (lit.lid() == otherLits[i].lid() - || (lit.logVarSet() & otherLits[i].logVarSet()).empty() == false) { - return false; - } - } - return true; -} - - - -LitLvTypesSet -LiftedCircuit::smoothCircuit (CircuitNode* node) -{ - assert (node != 0); - LitLvTypesSet propagLits; - - switch (getCircuitNodeType (node)) { - - case CircuitNodeType::OR_NODE: { - OrNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); - LitLvTypesSet missingLeft = lids2 - lids1; - LitLvTypesSet missingRight = lids1 - lids2; - createSmoothNode (missingLeft, casted->leftBranch()); - createSmoothNode (missingRight, casted->rightBranch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::AND_NODE: { - AndNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::SET_OR_NODE: { - SetOrNode* casted = dynamic_cast(node); - propagLits = smoothCircuit (*casted->follow()); - TinySet> litSet; - for (size_t i = 0; i < propagLits.size(); i++) { - litSet.insert (make_pair (propagLits[i].lid(), - propagLits[i].logVarTypes().size())); - } - LitLvTypesSet missingLids; - for (size_t i = 0; i < litSet.size(); i++) { - vector allTypes = getAllPossibleTypes (litSet[i].second); - for (size_t j = 0; j < allTypes.size(); j++) { - bool typeFound = false; - for (size_t k = 0; k < propagLits.size(); k++) { - if (litSet[i].first == propagLits[k].lid() - && containsTypes (propagLits[k].logVarTypes(), allTypes[j])) { - typeFound = true; - break; - } - } - if (typeFound == false) { - missingLids.insert (LitLvTypes (litSet[i].first, allTypes[j])); - } - } - } - createSmoothNode (missingLids, casted->follow()); - // setAllFullLogVars() can cause repeated elements in - // the set. Fix this by reconstructing the set again - LitLvTypesSet copy = propagLits; - propagLits.clear(); - for (size_t i = 0; i < copy.size(); i++) { - copy[i].setAllFullLogVars(); - propagLits.insert (copy[i]); - } - break; - } - - case CircuitNodeType::SET_AND_NODE: { - SetAndNode* casted = dynamic_cast(node); - propagLits = smoothCircuit (*casted->follow()); - break; - } - - case CircuitNodeType::INC_EXC_NODE: { - IncExcNode* casted = dynamic_cast(node); - LitLvTypesSet lids1 = smoothCircuit (*casted->plus1Branch()); - LitLvTypesSet lids2 = smoothCircuit (*casted->plus2Branch()); - LitLvTypesSet missingPlus1 = lids2 - lids1; - LitLvTypesSet missingPlus2 = lids1 - lids2; - createSmoothNode (missingPlus1, casted->plus1Branch()); - createSmoothNode (missingPlus2, casted->plus2Branch()); - propagLits |= lids1; - propagLits |= lids2; - break; - } - - case CircuitNodeType::LEAF_NODE: { - LeafNode* casted = dynamic_cast(node); - propagLits.insert (LitLvTypes ( - casted->clause()->literals()[0].lid(), - casted->clause()->logVarTypes(0))); - } - - default: - break; - } - - return propagLits; -} - - - -void -LiftedCircuit::createSmoothNode ( - const LitLvTypesSet& missingLits, - CircuitNode** prev) -{ - if (missingLits.empty() == false) { - if (Globals::verbosity > 1) { - unordered_map::iterator it; - it = originClausesMap_.find (*prev); - if (it != originClausesMap_.end()) { - backupClauses_ = it->second; - } else { - backupClauses_ = Clause::copyClauses ( - {((dynamic_cast(*prev))->clause())}); - } - } - Clauses clauses; - for (size_t i = 0; i < missingLits.size(); i++) { - LiteralId lid = missingLits[i].lid(); - const LogVarTypes& types = missingLits[i].logVarTypes(); - Clause* c = lwcnf_->createClause (lid); - for (size_t j = 0; j < types.size(); j++) { - LogVar X = c->literals().front().logVars()[j]; - if (types[j] == LogVarType::POS_LV) { - c->addPosCountedLogVar (X); - } else if (types[j] == LogVarType::NEG_LV) { - c->addNegCountedLogVar (X); - } - } - c->addLiteralComplemented (c->literals()[0]); - clauses.push_back (c); - } - SmoothNode* smoothNode = new SmoothNode (clauses, *lwcnf_); - *prev = new AndNode (smoothNode, *prev); - if (Globals::verbosity > 1) { - originClausesMap_[*prev] = backupClauses_; - explanationMap_[*prev] = " Smoothing" ; - } - } -} - - - -vector -LiftedCircuit::getAllPossibleTypes (unsigned nrLogVars) const -{ - if (nrLogVars == 0) { - return {}; - } - if (nrLogVars == 1) { - return {{LogVarType::POS_LV},{LogVarType::NEG_LV}}; - } - vector res; - Ranges ranges (nrLogVars, 2); - Indexer indexer (ranges); - while (indexer.valid()) { - LogVarTypes types; - for (size_t i = 0; i < nrLogVars; i++) { - if (indexer[i] == 0) { - types.push_back (LogVarType::POS_LV); - } else { - types.push_back (LogVarType::NEG_LV); - } - } - res.push_back (types); - ++ indexer; - } - return res; -} - - - -bool -LiftedCircuit::containsTypes ( - const LogVarTypes& typesA, - const LogVarTypes& typesB) const -{ - for (size_t i = 0; i < typesA.size(); i++) { - if (typesA[i] == LogVarType::FULL_LV) { - - } else if (typesA[i] == LogVarType::POS_LV - && typesB[i] == LogVarType::POS_LV) { - - } else if (typesA[i] == LogVarType::NEG_LV - && typesB[i] == LogVarType::NEG_LV) { - - } else { - return false; - } - } - return true; -} - - - -CircuitNodeType -LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const -{ - CircuitNodeType type; - if (dynamic_cast(node) != 0) { - type = CircuitNodeType::OR_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::AND_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SET_OR_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SET_AND_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::INC_EXC_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::LEAF_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::SMOOTH_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::TRUE_NODE; - } else if (dynamic_cast(node) != 0) { - type = CircuitNodeType::COMPILATION_FAILED_NODE; - } else { - assert (false); - } - return type; -} - - - -void -LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) -{ - assert (node != 0); - - static unsigned nrAuxNodes = 0; - stringstream ss; - ss << "n" << nrAuxNodes; - string auxNode = ss.str(); - nrAuxNodes ++; - string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; - - switch (getCircuitNodeType (node)) { - - case OR_NODE: { - OrNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∨\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->leftBranch()); - os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->rightBranch()); - os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->leftBranch(), os); - exportToGraphViz (*casted->rightBranch(), os); - break; - } - - case AND_NODE: { - AndNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∧\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->leftBranch()); - os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->rightBranch()) << endl; - os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->leftBranch(), os); - exportToGraphViz (*casted->rightBranch(), os); - break; - } - - case SET_OR_NODE: { - SetOrNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∨(X)\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->follow()); - os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->follow(), os); - break; - } - - case SET_AND_NODE: { - SetAndNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"∧(X)\"]" << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->follow()); - os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->follow(), os); - break; - } - - case INC_EXC_NODE: { - IncExcNode* casted = dynamic_cast(node); - printClauses (casted, os); - - os << auxNode << " [" << opStyle << "label=\"+ - +\"]" ; - os << endl; - os << escapeNode (node) << " -> " << auxNode; - os << " [label=\"" << getExplanationString (node) << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->plus1Branch()); - os << " [label=\" " << (*casted->plus1Branch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->minusBranch()) << endl; - os << " [label=\" " << (*casted->minusBranch())->weight() << "\"]" ; - os << endl; - - os << auxNode << " -> " ; - os << escapeNode (*casted->plus2Branch()); - os << " [label=\" " << (*casted->plus2Branch())->weight() << "\"]" ; - os << endl; - - exportToGraphViz (*casted->plus1Branch(), os); - exportToGraphViz (*casted->plus2Branch(), os); - exportToGraphViz (*casted->minusBranch(), os); - break; - } - - case LEAF_NODE: { - printClauses (node, os, "style=filled,fillcolor=palegreen,"); - break; - } - - case SMOOTH_NODE: { - printClauses (node, os, "style=filled,fillcolor=lightblue,"); - break; - } - - case TRUE_NODE: { - os << escapeNode (node); - os << " [shape=box,label=\"⊤\"]" ; - os << endl; - break; - } - - case COMPILATION_FAILED_NODE: { - printClauses (node, os, "style=filled,fillcolor=salmon,"); - break; - } - - default: - assert (false); - } -} - - - -string -LiftedCircuit::escapeNode (const CircuitNode* node) const -{ - stringstream ss; - ss << "\"" << node << "\"" ; - return ss.str(); -} - - - -string -LiftedCircuit::getExplanationString (CircuitNode* node) -{ - return Util::contains (explanationMap_, node) - ? explanationMap_[node] - : "" ; -} - - - -void -LiftedCircuit::printClauses ( - CircuitNode* node, - ofstream& os, - string extraOptions) -{ - Clauses clauses; - if (Util::contains (originClausesMap_, node)) { - clauses = originClausesMap_[node]; - } else if (getCircuitNodeType (node) == CircuitNodeType::LEAF_NODE) { - clauses = { (dynamic_cast(node))->clause() } ; - } else if (getCircuitNodeType (node) == CircuitNodeType::SMOOTH_NODE) { - clauses = (dynamic_cast(node))->clauses(); - } - assert (clauses.empty() == false); - os << escapeNode (node); - os << " [shape=box," << extraOptions << "label=\"" ; - for (size_t i = 0; i < clauses.size(); i++) { - if (i != 0) os << "\\n" ; - os << *clauses[i]; - } - os << "\"]" ; - os << endl; -} - diff --git a/packages/CLPBN/horus/LiftedCircuit.h b/packages/CLPBN/horus/LiftedCircuit.h deleted file mode 100644 index 3acdf7285..000000000 --- a/packages/CLPBN/horus/LiftedCircuit.h +++ /dev/null @@ -1,262 +0,0 @@ -#ifndef HORUS_LIFTEDCIRCUIT_H -#define HORUS_LIFTEDCIRCUIT_H - -#include - -#include "LiftedWCNF.h" - - -enum CircuitNodeType { - OR_NODE, - AND_NODE, - SET_OR_NODE, - SET_AND_NODE, - INC_EXC_NODE, - LEAF_NODE, - SMOOTH_NODE, - TRUE_NODE, - COMPILATION_FAILED_NODE -}; - - - -class CircuitNode -{ - public: - CircuitNode (void) { } - - virtual double weight (void) const = 0; -}; - - - -class OrNode : public CircuitNode -{ - public: - OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - - CircuitNode** leftBranch (void) { return &leftBranch_; } - CircuitNode** rightBranch (void) { return &rightBranch_; } - - double weight (void) const; - - private: - CircuitNode* leftBranch_; - CircuitNode* rightBranch_; -}; - - - -class AndNode : public CircuitNode -{ - public: - AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } - - AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) - : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } - - CircuitNode** leftBranch (void) { return &leftBranch_; } - CircuitNode** rightBranch (void) { return &rightBranch_; } - - double weight (void) const; - - private: - CircuitNode* leftBranch_; - CircuitNode* rightBranch_; -}; - - - -class SetOrNode : public CircuitNode -{ - public: - SetOrNode (unsigned nrGroundings) - : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } - - CircuitNode** follow (void) { return &follow_; } - - static unsigned nrPositives (void) { return nrPos_; } - - static unsigned nrNegatives (void) { return nrNeg_; } - - static bool isSet (void) { return nrPos_ >= 0; } - - double weight (void) const; - - private: - CircuitNode* follow_; - unsigned nrGroundings_; - static int nrPos_; - static int nrNeg_; -}; - - - -class SetAndNode : public CircuitNode -{ - public: - SetAndNode (unsigned nrGroundings) - : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } - - CircuitNode** follow (void) { return &follow_; } - - double weight (void) const; - - private: - CircuitNode* follow_; - unsigned nrGroundings_; -}; - - - -class IncExcNode : public CircuitNode -{ - public: - IncExcNode (void) - : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } - - CircuitNode** plus1Branch (void) { return &plus1Branch_; } - CircuitNode** plus2Branch (void) { return &plus2Branch_; } - CircuitNode** minusBranch (void) { return &minusBranch_; } - - double weight (void) const; - - private: - CircuitNode* plus1Branch_; - CircuitNode* plus2Branch_; - CircuitNode* minusBranch_; -}; - - - -class LeafNode : public CircuitNode -{ - public: - LeafNode (Clause* clause, const LiftedWCNF& lwcnf) - : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } - - const Clause* clause (void) const { return clause_; } - - Clause* clause (void) { return clause_; } - - double weight (void) const; - - private: - Clause* clause_; - const LiftedWCNF& lwcnf_; -}; - - - -class SmoothNode : public CircuitNode -{ - public: - SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) - : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } - - const Clauses& clauses (void) const { return clauses_; } - - Clauses clauses (void) { return clauses_; } - - double weight (void) const; - - private: - Clauses clauses_; - const LiftedWCNF& lwcnf_; -}; - - - -class TrueNode : public CircuitNode -{ - public: - TrueNode (void) : CircuitNode() { } - - double weight (void) const; -}; - - - -class CompilationFailedNode : public CircuitNode -{ - public: - CompilationFailedNode (void) : CircuitNode() { } - - double weight (void) const; -}; - - - -class LiftedCircuit -{ - public: - LiftedCircuit (const LiftedWCNF* lwcnf); - - bool isCompilationSucceeded (void) const; - - double getWeightedModelCount (void) const; - - void exportToGraphViz (const char*); - - private: - - void compile (CircuitNode** follow, Clauses& clauses); - - bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); - - bool tryIndependence (CircuitNode** follow, Clauses& clauses); - - bool tryShannonDecomp (CircuitNode** follow, Clauses& clauses); - - bool tryInclusionExclusion (CircuitNode** follow, Clauses& clauses); - - bool tryIndepPartialGrounding (CircuitNode** follow, Clauses& clauses); - - bool tryIndepPartialGroundingAux (Clauses& clauses, ConstraintTree& ct, - LogVars& rootLogVars); - - bool tryAtomCounting (CircuitNode** follow, Clauses& clauses); - - void shatterCountedLogVars (Clauses& clauses); - - bool shatterCountedLogVarsAux (Clauses& clauses); - - bool shatterCountedLogVarsAux (Clauses& clauses, size_t idx1, size_t idx2); - - bool independentClause (Clause& clause, Clauses& otherClauses) const; - - bool independentLiteral (const Literal& lit, - const Literals& otherLits) const; - - LitLvTypesSet smoothCircuit (CircuitNode* node); - - void createSmoothNode (const LitLvTypesSet& lids, - CircuitNode** prev); - - vector getAllPossibleTypes (unsigned nrLogVars) const; - - bool containsTypes (const LogVarTypes& typesA, - const LogVarTypes& typesB) const; - - CircuitNodeType getCircuitNodeType (const CircuitNode* node) const; - - void exportToGraphViz (CircuitNode* node, ofstream&); - - void printClauses (CircuitNode* node, ofstream&, - string extraOptions = ""); - - string escapeNode (const CircuitNode* node) const; - - string getExplanationString (CircuitNode* node); - - CircuitNode* root_; - const LiftedWCNF* lwcnf_; - - Clauses backupClauses_; - unordered_map originClausesMap_; - unordered_map explanationMap_; - bool compilationSucceeded_; -}; - -#endif // HORUS_LIFTEDCIRCUIT_H - diff --git a/packages/CLPBN/horus/LiftedKc.cpp b/packages/CLPBN/horus/LiftedKc.cpp index d9560f6df..c366c282d 100644 --- a/packages/CLPBN/horus/LiftedKc.cpp +++ b/packages/CLPBN/horus/LiftedKc.cpp @@ -1,10 +1,1239 @@ +#include + #include "LiftedKc.h" -#include "LiftedWCNF.h" -#include "LiftedCircuit.h" #include "LiftedOperations.h" #include "Indexer.h" +OrNode::~OrNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + +double +OrNode::weight (void) const +{ + double lw = leftBranch_->weight(); + double rw = rightBranch_->weight(); + return Globals::logDomain ? Util::logSum (lw, rw) : lw + rw; +} + + + +AndNode::~AndNode (void) +{ + delete leftBranch_; + delete rightBranch_; +} + + + +double +AndNode::weight (void) const +{ + double lw = leftBranch_->weight(); + double rw = rightBranch_->weight(); + return Globals::logDomain ? lw + rw : lw * rw; +} + + + +int SetOrNode::nrPos_ = -1; +int SetOrNode::nrNeg_ = -1; + + + +SetOrNode::~SetOrNode (void) +{ + delete follow_; +} + + + +double +SetOrNode::weight (void) const +{ + double weightSum = LogAware::addIdenty(); + for (unsigned i = 0; i < nrGroundings_ + 1; i++) { + nrPos_ = nrGroundings_ - i; + nrNeg_ = i; + if (Globals::logDomain) { + double nrCombs = Util::nrCombinations (nrGroundings_, i); + double w = follow_->weight(); + weightSum = Util::logSum (weightSum, std::log (nrCombs) + w); + } else { + double w = follow_->weight(); + weightSum += Util::nrCombinations (nrGroundings_, i) * w; + } + } + nrPos_ = -1; + nrNeg_ = -1; + return weightSum; +} + + + +SetAndNode::~SetAndNode (void) +{ + delete follow_; +} + + + +double +SetAndNode::weight (void) const +{ + return LogAware::pow (follow_->weight(), nrGroundings_); +} + + + +IncExcNode::~IncExcNode (void) +{ + delete plus1Branch_; + delete plus2Branch_; + delete minusBranch_; +} + + + +double +IncExcNode::weight (void) const +{ + double w = 0.0; + if (Globals::logDomain) { + w = Util::logSum (plus1Branch_->weight(), plus2Branch_->weight()); + w = std::log (std::exp (w) - std::exp (minusBranch_->weight())); + } else { + w = plus1Branch_->weight() + plus2Branch_->weight(); + w -= minusBranch_->weight(); + } + return w; +} + + + +LeafNode::~LeafNode (void) +{ + delete clause_; +} + + + +double +LeafNode::weight (void) const +{ + assert (clause_->isUnit()); + if (clause_->posCountedLogVars().empty() == false + || clause_->negCountedLogVars().empty() == false) { + if (SetOrNode::isSet() == false) { + // return a NaN if we have a SetOrNode + // ancester that is not set. This can only + // happen when calculating the weights + // for the edge labels in graphviz + return 0.0 / 0.0; + } + } + double weight = clause_->literals()[0].isPositive() + ? lwcnf_.posWeight (clause_->literals().front().lid()) + : lwcnf_.negWeight (clause_->literals().front().lid()); + LogVarSet lvs = clause_->constr().logVarSet(); + lvs -= clause_->ipgLogVars(); + lvs -= clause_->posCountedLogVars(); + lvs -= clause_->negCountedLogVars(); + unsigned nrGroundings = 1; + if (lvs.empty() == false) { + nrGroundings = clause_->constr().projectedCopy (lvs).size(); + } + if (clause_->posCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrPositives(), + clause_->nrPosCountedLogVars()); + } + if (clause_->negCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrNegatives(), + clause_->nrNegCountedLogVars()); + } + return LogAware::pow (weight, nrGroundings); +} + + + +SmoothNode::~SmoothNode (void) +{ + Clause::deleteClauses (clauses_); +} + + + +double +SmoothNode::weight (void) const +{ + Clauses cs = clauses(); + double totalWeight = LogAware::multIdenty(); + for (size_t i = 0; i < cs.size(); i++) { + double posWeight = lwcnf_.posWeight (cs[i]->literals()[0].lid()); + double negWeight = lwcnf_.negWeight (cs[i]->literals()[0].lid()); + LogVarSet lvs = cs[i]->constr().logVarSet(); + lvs -= cs[i]->ipgLogVars(); + lvs -= cs[i]->posCountedLogVars(); + lvs -= cs[i]->negCountedLogVars(); + unsigned nrGroundings = 1; + if (lvs.empty() == false) { + nrGroundings = cs[i]->constr().projectedCopy (lvs).size(); + } + if (cs[i]->posCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrPositives(), + cs[i]->nrPosCountedLogVars()); + } + if (cs[i]->negCountedLogVars().empty() == false) { + nrGroundings *= std::pow (SetOrNode::nrNegatives(), + cs[i]->nrNegCountedLogVars()); + } + if (Globals::logDomain) { + totalWeight += Util::logSum (posWeight, negWeight) * nrGroundings; + } else { + totalWeight *= std::pow (posWeight + negWeight, nrGroundings); + } + } + return totalWeight; +} + + + +double +TrueNode::weight (void) const +{ + return LogAware::multIdenty(); +} + + + +double +CompilationFailedNode::weight (void) const +{ + // weighted model counting in compilation + // failed nodes should give NaN + return 0.0 / 0.0; +} + + + +LiftedCircuit::LiftedCircuit (const LiftedWCNF* lwcnf) + : lwcnf_(lwcnf) +{ + root_ = 0; + compilationSucceeded_ = true; + Clauses clauses = Clause::copyClauses (lwcnf->clauses()); + compile (&root_, clauses); + if (compilationSucceeded_) { + smoothCircuit (root_); + } + if (Globals::verbosity > 1) { + if (compilationSucceeded_) { + double wmc = LogAware::exp (getWeightedModelCount()); + cout << "Weighted model count = " << wmc << endl << endl; + } + cout << "Exporting circuit to graphviz (circuit.dot)..." ; + cout << endl << endl; + exportToGraphViz ("circuit.dot"); + } +} + + + +LiftedCircuit::~LiftedCircuit (void) +{ + delete root_; + unordered_map::iterator it; + it = originClausesMap_.begin(); + while (it != originClausesMap_.end()) { + Clause::deleteClauses (it->second); + ++ it; + } +} + + + +bool +LiftedCircuit::isCompilationSucceeded (void) const +{ + return compilationSucceeded_; +} + + + +double +LiftedCircuit::getWeightedModelCount (void) const +{ + assert (compilationSucceeded_); + return root_->weight(); +} + + + +void +LiftedCircuit::exportToGraphViz (const char* fileName) +{ + ofstream out (fileName); + if (!out.is_open()) { + cerr << "Error: couldn't open file '" << fileName << "'." ; + return; + } + out << "digraph {" << endl; + out << "ranksep=1" << endl; + exportToGraphViz (root_, out); + out << "}" << endl; + out.close(); +} + + + +void +LiftedCircuit::compile ( + CircuitNode** follow, + Clauses& clauses) +{ + if (compilationSucceeded_ == false + && Globals::verbosity <= 1) { + return; + } + + if (clauses.empty()) { + *follow = new TrueNode(); + return; + } + + if (clauses.size() == 1 && clauses[0]->isUnit()) { + *follow = new LeafNode (clauses[0], *lwcnf_); + return; + } + + if (tryUnitPropagation (follow, clauses)) { + return; + } + + if (tryIndependence (follow, clauses)) { + return; + } + + if (tryShannonDecomp (follow, clauses)) { + return; + } + + if (tryInclusionExclusion (follow, clauses)) { + return; + } + + if (tryIndepPartialGrounding (follow, clauses)) { + return; + } + + if (tryAtomCounting (follow, clauses)) { + return; + } + + *follow = new CompilationFailedNode(); + if (Globals::verbosity > 1) { + originClausesMap_[*follow] = clauses; + explanationMap_[*follow] = "" ; + } + compilationSucceeded_ = false; +} + + + +bool +LiftedCircuit::tryUnitPropagation ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + if (clauses[i]->isUnit()) { + Clauses propagClauses; + for (size_t j = 0; j < clauses.size(); j++) { + if (i != j) { + LiteralId lid = clauses[i]->literals()[0].lid(); + LogVarTypes types = clauses[i]->logVarTypes (0); + if (clauses[i]->literals()[0].isPositive()) { + if (clauses[j]->containsPositiveLiteral (lid, types) == false) { + clauses[j]->removeNegativeLiterals (lid, types); + if (clauses[j]->nrLiterals() > 0) { + propagClauses.push_back (clauses[j]); + } else { + delete clauses[j]; + } + } else { + delete clauses[j]; + } + } else if (clauses[i]->literals()[0].isNegative()) { + if (clauses[j]->containsNegativeLiteral (lid, types) == false) { + clauses[j]->removePositiveLiterals (lid, types); + if (clauses[j]->nrLiterals() > 0) { + propagClauses.push_back (clauses[j]); + } else { + delete clauses[j]; + } + } else { + delete clauses[j]; + } + } + } + } + + AndNode* andNode = new AndNode(); + if (Globals::verbosity > 1) { + originClausesMap_[andNode] = backupClauses_; + stringstream explanation; + explanation << " UP on " << clauses[i]->literals()[0]; + explanationMap_[andNode] = explanation.str(); + } + + Clauses unitClause = { clauses[i] }; + compile (andNode->leftBranch(), unitClause); + compile (andNode->rightBranch(), propagClauses); + (*follow) = andNode; + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndependence ( + CircuitNode** follow, + Clauses& clauses) +{ + if (clauses.size() == 1) { + return false; + } + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + Clauses depClauses = { clauses[0] }; + Clauses indepClauses (clauses.begin() + 1, clauses.end()); + bool finish = false; + while (finish == false) { + finish = true; + for (size_t i = 0; i < indepClauses.size(); i++) { + if (independentClause (*indepClauses[i], depClauses) == false) { + depClauses.push_back (indepClauses[i]); + indepClauses.erase (indepClauses.begin() + i); + finish = false; + break; + } + } + } + if (indepClauses.empty() == false) { + AndNode* andNode = new AndNode (); + if (Globals::verbosity > 1) { + originClausesMap_[andNode] = backupClauses_; + explanationMap_[andNode] = " Independence" ; + } + compile (andNode->leftBranch(), depClauses); + compile (andNode->rightBranch(), indepClauses); + (*follow) = andNode; + return true; + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryShannonDecomp ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + const Literals& literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].isGround ( + clauses[i]->constr(), clauses[i]->ipgLogVars())) { + + Clause* c1 = lwcnf_->createClause (literals[j].lid()); + Clause* c2 = new Clause (*c1); + c2->literals().front().complement(); + + Clauses otherClauses = Clause::copyClauses (clauses); + clauses.push_back (c1); + otherClauses.push_back (c2); + + OrNode* orNode = new OrNode(); + if (Globals::verbosity > 1) { + originClausesMap_[orNode] = backupClauses_; + stringstream explanation; + explanation << " SD on " << literals[j]; + explanationMap_[orNode] = explanation.str(); + } + + compile (orNode->leftBranch(), clauses); + compile (orNode->rightBranch(), otherClauses); + (*follow) = orNode; + return true; + } + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryInclusionExclusion ( + CircuitNode** follow, + Clauses& clauses) +{ + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + Literals depLits = { clauses[i]->literals().front() }; + Literals indepLits (clauses[i]->literals().begin() + 1, + clauses[i]->literals().end()); + bool finish = false; + while (finish == false) { + finish = true; + for (size_t j = 0; j < indepLits.size(); j++) { + if (independentLiteral (indepLits[j], depLits) == false) { + depLits.push_back (indepLits[j]); + indepLits.erase (indepLits.begin() + j); + finish = false; + break; + } + } + } + if (indepLits.empty() == false) { + LogVarSet lvs1; + for (size_t j = 0; j < depLits.size(); j++) { + lvs1 |= depLits[j].logVarSet(); + } + if (clauses[i]->constr().isCountNormalized (lvs1) == false) { + break; + } + LogVarSet lvs2; + for (size_t j = 0; j < indepLits.size(); j++) { + lvs2 |= indepLits[j].logVarSet(); + } + if (clauses[i]->constr().isCountNormalized (lvs2) == false) { + break; + } + Clause* c1 = new Clause (clauses[i]->constr().projectedCopy (lvs1)); + for (size_t j = 0; j < depLits.size(); j++) { + c1->addLiteral (depLits[j]); + } + Clause* c2 = new Clause (clauses[i]->constr().projectedCopy (lvs2)); + for (size_t j = 0; j < indepLits.size(); j++) { + c2->addLiteral (indepLits[j]); + } + + clauses.erase (clauses.begin() + i); + Clauses plus1Clauses = Clause::copyClauses (clauses); + Clauses plus2Clauses = Clause::copyClauses (clauses); + + plus1Clauses.push_back (c1); + plus2Clauses.push_back (c2); + clauses.push_back (c1); + clauses.push_back (c2); + + IncExcNode* ieNode = new IncExcNode(); + if (Globals::verbosity > 1) { + originClausesMap_[ieNode] = backupClauses_; + stringstream explanation; + explanation << " IncExc on clause nº " << i + 1; + explanationMap_[ieNode] = explanation.str(); + } + compile (ieNode->plus1Branch(), plus1Clauses); + compile (ieNode->plus2Branch(), plus2Clauses); + compile (ieNode->minusBranch(), clauses); + *follow = ieNode; + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndepPartialGrounding ( + CircuitNode** follow, + Clauses& clauses) +{ + // assumes that all literals have logical variables + // else, shannon decomp was possible + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + LogVars rootLogVars; + LogVarSet lvs = clauses[0]->ipgCandidates(); + for (size_t i = 0; i < lvs.size(); i++) { + rootLogVars.clear(); + rootLogVars.push_back (lvs[i]); + ConstraintTree ct = clauses[0]->constr().projectedCopy ({lvs[i]}); + if (tryIndepPartialGroundingAux (clauses, ct, rootLogVars)) { + for (size_t j = 0; j < clauses.size(); j++) { + clauses[j]->addIpgLogVar (rootLogVars[j]); + } + SetAndNode* setAndNode = new SetAndNode (ct.size()); + if (Globals::verbosity > 1) { + originClausesMap_[setAndNode] = backupClauses_; + explanationMap_[setAndNode] = " IPG" ; + } + *follow = setAndNode; + compile (setAndNode->follow(), clauses); + return true; + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +bool +LiftedCircuit::tryIndepPartialGroundingAux ( + Clauses& clauses, + ConstraintTree& ct, + LogVars& rootLogVars) +{ + for (size_t i = 1; i < clauses.size(); i++) { + LogVarSet lvs = clauses[i]->ipgCandidates(); + for (size_t j = 0; j < lvs.size(); j++) { + ConstraintTree ct2 = clauses[i]->constr().projectedCopy ({lvs[j]}); + if (ct.tupleSet() == ct2.tupleSet()) { + rootLogVars.push_back (lvs[j]); + break; + } + } + if (rootLogVars.size() != i + 1) { + return false; + } + } + // verifies if the IPG logical vars appear in the same positions + unordered_map positions; + for (size_t i = 0; i < clauses.size(); i++) { + const Literals& literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + size_t idx = literals[j].indexOfLogVar (rootLogVars[i]); + assert (idx != literals[j].nrLogVars()); + unordered_map::iterator it; + it = positions.find (literals[j].lid()); + if (it != positions.end()) { + if (it->second != idx) { + return false; + } + } else { + positions[literals[j].lid()] = idx; + } + } + } + return true; +} + + + +bool +LiftedCircuit::tryAtomCounting ( + CircuitNode** follow, + Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + if (clauses[i]->nrPosCountedLogVars() > 0 + || clauses[i]->nrNegCountedLogVars() > 0) { + // only allow one atom counting node per branch + return false; + } + } + if (Globals::verbosity > 1) { + backupClauses_ = Clause::copyClauses (clauses); + } + for (size_t i = 0; i < clauses.size(); i++) { + Literals literals = clauses[i]->literals(); + for (size_t j = 0; j < literals.size(); j++) { + if (literals[j].nrLogVars() == 1 + && ! clauses[i]->isIpgLogVar (literals[j].logVars().front()) + && ! clauses[i]->isCountedLogVar (literals[j].logVars().front())) { + unsigned nrGroundings = clauses[i]->constr().projectedCopy ( + literals[j].logVars()).size(); + SetOrNode* setOrNode = new SetOrNode (nrGroundings); + if (Globals::verbosity > 1) { + originClausesMap_[setOrNode] = backupClauses_; + explanationMap_[setOrNode] = " AC" ; + } + Clause* c1 = new Clause ( + clauses[i]->constr().projectedCopy (literals[j].logVars())); + Clause* c2 = new Clause ( + clauses[i]->constr().projectedCopy (literals[j].logVars())); + c1->addLiteral (literals[j]); + c2->addLiteralComplemented (literals[j]); + c1->addPosCountedLogVar (literals[j].logVars().front()); + c2->addNegCountedLogVar (literals[j].logVars().front()); + clauses.push_back (c1); + clauses.push_back (c2); + shatterCountedLogVars (clauses); + compile (setOrNode->follow(), clauses); + *follow = setOrNode; + return true; + } + } + } + if (Globals::verbosity > 1) { + Clause::deleteClauses (backupClauses_); + } + return false; +} + + + +void +LiftedCircuit::shatterCountedLogVars (Clauses& clauses) +{ + while (shatterCountedLogVarsAux (clauses)) ; +} + + + +bool +LiftedCircuit::shatterCountedLogVarsAux (Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size() - 1; i++) { + for (size_t j = i + 1; j < clauses.size(); j++) { + bool splitedSome = shatterCountedLogVarsAux (clauses, i, j); + if (splitedSome) { + return true; + } + } + } + return false; +} + + + +bool +LiftedCircuit::shatterCountedLogVarsAux ( + Clauses& clauses, + size_t idx1, + size_t idx2) +{ + Literals lits1 = clauses[idx1]->literals(); + Literals lits2 = clauses[idx2]->literals(); + for (size_t i = 0; i < lits1.size(); i++) { + for (size_t j = 0; j < lits2.size(); j++) { + if (lits1[i].lid() == lits2[j].lid()) { + LogVars lvs1 = lits1[i].logVars(); + LogVars lvs2 = lits2[j].logVars(); + for (size_t k = 0; k < lvs1.size(); k++) { + if (clauses[idx1]->isCountedLogVar (lvs1[k]) + && clauses[idx2]->isCountedLogVar (lvs2[k]) == false) { + clauses.push_back (new Clause (*clauses[idx2])); + clauses[idx2]->addPosCountedLogVar (lvs2[k]); + clauses.back()->addNegCountedLogVar (lvs2[k]); + return true; + } + if (clauses[idx2]->isCountedLogVar (lvs2[k]) + && clauses[idx1]->isCountedLogVar (lvs1[k]) == false) { + clauses.push_back (new Clause (*clauses[idx1])); + clauses[idx1]->addPosCountedLogVar (lvs1[k]); + clauses.back()->addNegCountedLogVar (lvs1[k]); + return true; + } + } + } + } + } + return false; +} + + + +bool +LiftedCircuit::independentClause ( + Clause& clause, + Clauses& otherClauses) const +{ + for (size_t i = 0; i < otherClauses.size(); i++) { + if (Clause::independentClauses (clause, *otherClauses[i]) == false) { + return false; + } + } + return true; +} + + + +bool +LiftedCircuit::independentLiteral ( + const Literal& lit, + const Literals& otherLits) const +{ + for (size_t i = 0; i < otherLits.size(); i++) { + if (lit.lid() == otherLits[i].lid() + || (lit.logVarSet() & otherLits[i].logVarSet()).empty() == false) { + return false; + } + } + return true; +} + + + +LitLvTypesSet +LiftedCircuit::smoothCircuit (CircuitNode* node) +{ + assert (node); + LitLvTypesSet propagLits; + + switch (getCircuitNodeType (node)) { + + case CircuitNodeType::OR_NODE: { + OrNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); + LitLvTypesSet missingLeft = lids2 - lids1; + LitLvTypesSet missingRight = lids1 - lids2; + createSmoothNode (missingLeft, casted->leftBranch()); + createSmoothNode (missingRight, casted->rightBranch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::AND_NODE: { + AndNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->leftBranch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->rightBranch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::SET_OR_NODE: { + SetOrNode* casted = dynamic_cast(node); + propagLits = smoothCircuit (*casted->follow()); + TinySet> litSet; + for (size_t i = 0; i < propagLits.size(); i++) { + litSet.insert (make_pair (propagLits[i].lid(), + propagLits[i].logVarTypes().size())); + } + LitLvTypesSet missingLids; + for (size_t i = 0; i < litSet.size(); i++) { + vector allTypes = getAllPossibleTypes (litSet[i].second); + for (size_t j = 0; j < allTypes.size(); j++) { + bool typeFound = false; + for (size_t k = 0; k < propagLits.size(); k++) { + if (litSet[i].first == propagLits[k].lid() + && containsTypes (propagLits[k].logVarTypes(), allTypes[j])) { + typeFound = true; + break; + } + } + if (typeFound == false) { + missingLids.insert (LitLvTypes (litSet[i].first, allTypes[j])); + } + } + } + createSmoothNode (missingLids, casted->follow()); + // setAllFullLogVars() can cause repeated elements in + // the set. Fix this by reconstructing the set again + LitLvTypesSet copy = propagLits; + propagLits.clear(); + for (size_t i = 0; i < copy.size(); i++) { + copy[i].setAllFullLogVars(); + propagLits.insert (copy[i]); + } + break; + } + + case CircuitNodeType::SET_AND_NODE: { + SetAndNode* casted = dynamic_cast(node); + propagLits = smoothCircuit (*casted->follow()); + break; + } + + case CircuitNodeType::INC_EXC_NODE: { + IncExcNode* casted = dynamic_cast(node); + LitLvTypesSet lids1 = smoothCircuit (*casted->plus1Branch()); + LitLvTypesSet lids2 = smoothCircuit (*casted->plus2Branch()); + LitLvTypesSet missingPlus1 = lids2 - lids1; + LitLvTypesSet missingPlus2 = lids1 - lids2; + createSmoothNode (missingPlus1, casted->plus1Branch()); + createSmoothNode (missingPlus2, casted->plus2Branch()); + propagLits |= lids1; + propagLits |= lids2; + break; + } + + case CircuitNodeType::LEAF_NODE: { + LeafNode* casted = dynamic_cast(node); + propagLits.insert (LitLvTypes ( + casted->clause()->literals()[0].lid(), + casted->clause()->logVarTypes(0))); + } + + default: + break; + } + + return propagLits; +} + + + +void +LiftedCircuit::createSmoothNode ( + const LitLvTypesSet& missingLits, + CircuitNode** prev) +{ + if (missingLits.empty() == false) { + if (Globals::verbosity > 1) { + unordered_map::iterator it; + it = originClausesMap_.find (*prev); + if (it != originClausesMap_.end()) { + backupClauses_ = it->second; + } else { + backupClauses_ = Clause::copyClauses ( + {((dynamic_cast(*prev))->clause())}); + } + } + Clauses clauses; + for (size_t i = 0; i < missingLits.size(); i++) { + LiteralId lid = missingLits[i].lid(); + const LogVarTypes& types = missingLits[i].logVarTypes(); + Clause* c = lwcnf_->createClause (lid); + for (size_t j = 0; j < types.size(); j++) { + LogVar X = c->literals().front().logVars()[j]; + if (types[j] == LogVarType::POS_LV) { + c->addPosCountedLogVar (X); + } else if (types[j] == LogVarType::NEG_LV) { + c->addNegCountedLogVar (X); + } + } + c->addLiteralComplemented (c->literals()[0]); + clauses.push_back (c); + } + SmoothNode* smoothNode = new SmoothNode (clauses, *lwcnf_); + *prev = new AndNode (smoothNode, *prev); + if (Globals::verbosity > 1) { + originClausesMap_[*prev] = backupClauses_; + explanationMap_[*prev] = " Smoothing" ; + } + } +} + + + +vector +LiftedCircuit::getAllPossibleTypes (unsigned nrLogVars) const +{ + if (nrLogVars == 0) { + return {}; + } + if (nrLogVars == 1) { + return {{LogVarType::POS_LV},{LogVarType::NEG_LV}}; + } + vector res; + Ranges ranges (nrLogVars, 2); + Indexer indexer (ranges); + while (indexer.valid()) { + LogVarTypes types; + for (size_t i = 0; i < nrLogVars; i++) { + if (indexer[i] == 0) { + types.push_back (LogVarType::POS_LV); + } else { + types.push_back (LogVarType::NEG_LV); + } + } + res.push_back (types); + ++ indexer; + } + return res; +} + + + +bool +LiftedCircuit::containsTypes ( + const LogVarTypes& typesA, + const LogVarTypes& typesB) const +{ + for (size_t i = 0; i < typesA.size(); i++) { + if (typesA[i] == LogVarType::FULL_LV) { + + } else if (typesA[i] == LogVarType::POS_LV + && typesB[i] == LogVarType::POS_LV) { + + } else if (typesA[i] == LogVarType::NEG_LV + && typesB[i] == LogVarType::NEG_LV) { + + } else { + return false; + } + } + return true; +} + + + +CircuitNodeType +LiftedCircuit::getCircuitNodeType (const CircuitNode* node) const +{ + CircuitNodeType type = CircuitNodeType::OR_NODE; + if (dynamic_cast(node)) { + type = CircuitNodeType::OR_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::AND_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::SET_OR_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::SET_AND_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::INC_EXC_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::LEAF_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::SMOOTH_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::TRUE_NODE; + } else if (dynamic_cast(node)) { + type = CircuitNodeType::COMPILATION_FAILED_NODE; + } else { + assert (false); + } + return type; +} + + + +void +LiftedCircuit::exportToGraphViz (CircuitNode* node, ofstream& os) +{ + assert (node); + + static unsigned nrAuxNodes = 0; + stringstream ss; + ss << "n" << nrAuxNodes; + string auxNode = ss.str(); + nrAuxNodes ++; + string opStyle = "shape=circle,width=0.7,margin=\"0.0,0.0\"," ; + + switch (getCircuitNodeType (node)) { + + case OR_NODE: { + OrNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∨\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->leftBranch()); + os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->rightBranch()); + os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->leftBranch(), os); + exportToGraphViz (*casted->rightBranch(), os); + break; + } + + case AND_NODE: { + AndNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∧\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->leftBranch()); + os << " [label=\" " << (*casted->leftBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->rightBranch()) << endl; + os << " [label=\" " << (*casted->rightBranch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->leftBranch(), os); + exportToGraphViz (*casted->rightBranch(), os); + break; + } + + case SET_OR_NODE: { + SetOrNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∨(X)\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->follow()); + os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->follow(), os); + break; + } + + case SET_AND_NODE: { + SetAndNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"∧(X)\"]" << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->follow()); + os << " [label=\" " << (*casted->follow())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->follow(), os); + break; + } + + case INC_EXC_NODE: { + IncExcNode* casted = dynamic_cast(node); + printClauses (casted, os); + + os << auxNode << " [" << opStyle << "label=\"+ - +\"]" ; + os << endl; + os << escapeNode (node) << " -> " << auxNode; + os << " [label=\"" << getExplanationString (node) << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->plus1Branch()); + os << " [label=\" " << (*casted->plus1Branch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->minusBranch()) << endl; + os << " [label=\" " << (*casted->minusBranch())->weight() << "\"]" ; + os << endl; + + os << auxNode << " -> " ; + os << escapeNode (*casted->plus2Branch()); + os << " [label=\" " << (*casted->plus2Branch())->weight() << "\"]" ; + os << endl; + + exportToGraphViz (*casted->plus1Branch(), os); + exportToGraphViz (*casted->plus2Branch(), os); + exportToGraphViz (*casted->minusBranch(), os); + break; + } + + case LEAF_NODE: { + printClauses (node, os, "style=filled,fillcolor=palegreen,"); + break; + } + + case SMOOTH_NODE: { + printClauses (node, os, "style=filled,fillcolor=lightblue,"); + break; + } + + case TRUE_NODE: { + os << escapeNode (node); + os << " [shape=box,label=\"⊤\"]" ; + os << endl; + break; + } + + case COMPILATION_FAILED_NODE: { + printClauses (node, os, "style=filled,fillcolor=salmon,"); + break; + } + + default: + assert (false); + } +} + + + +string +LiftedCircuit::escapeNode (const CircuitNode* node) const +{ + stringstream ss; + ss << "\"" << node << "\"" ; + return ss.str(); +} + + + +string +LiftedCircuit::getExplanationString (CircuitNode* node) +{ + return Util::contains (explanationMap_, node) + ? explanationMap_[node] + : "" ; +} + + + +void +LiftedCircuit::printClauses ( + CircuitNode* node, + ofstream& os, + string extraOptions) +{ + Clauses clauses; + if (Util::contains (originClausesMap_, node)) { + clauses = originClausesMap_[node]; + } else if (getCircuitNodeType (node) == CircuitNodeType::LEAF_NODE) { + clauses = { (dynamic_cast(node))->clause() } ; + } else if (getCircuitNodeType (node) == CircuitNodeType::SMOOTH_NODE) { + clauses = (dynamic_cast(node))->clauses(); + } + assert (clauses.empty() == false); + os << escapeNode (node); + os << " [shape=box," << extraOptions << "label=\"" ; + for (size_t i = 0; i < clauses.size(); i++) { + if (i != 0) os << "\\n" ; + os << *clauses[i]; + } + os << "\"]" ; + os << endl; +} + + + LiftedKc::~LiftedKc (void) { delete lwcnf_; @@ -22,8 +1251,8 @@ LiftedKc::solveQuery (const Grounds& query) lwcnf_ = new LiftedWCNF (pfList_); circuit_ = new LiftedCircuit (lwcnf_); if (circuit_->isCompilationSucceeded() == false) { - cerr << "error: compilation failed" << endl; - abort(); + cerr << "Error: the circuit compilation has failed." << endl; + exit (EXIT_FAILURE); } vector groups; Ranges ranges; diff --git a/packages/CLPBN/horus/LiftedKc.h b/packages/CLPBN/horus/LiftedKc.h index cba6499e1..949787f01 100644 --- a/packages/CLPBN/horus/LiftedKc.h +++ b/packages/CLPBN/horus/LiftedKc.h @@ -2,10 +2,280 @@ #define HORUS_LIFTEDKC_H #include "LiftedSolver.h" +#include "LiftedWCNF.h" #include "ParfactorList.h" -class LiftedWCNF; -class LiftedCircuit; + +enum CircuitNodeType { + OR_NODE, + AND_NODE, + SET_OR_NODE, + SET_AND_NODE, + INC_EXC_NODE, + LEAF_NODE, + SMOOTH_NODE, + TRUE_NODE, + COMPILATION_FAILED_NODE +}; + + + +class CircuitNode +{ + public: + CircuitNode (void) { } + + virtual ~CircuitNode (void) { } + + virtual double weight (void) const = 0; +}; + + + +class OrNode : public CircuitNode +{ + public: + OrNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + + ~OrNode (void); + + CircuitNode** leftBranch (void) { return &leftBranch_; } + CircuitNode** rightBranch (void) { return &rightBranch_; } + + double weight (void) const; + + private: + CircuitNode* leftBranch_; + CircuitNode* rightBranch_; +}; + + + +class AndNode : public CircuitNode +{ + public: + AndNode (void) : CircuitNode(), leftBranch_(0), rightBranch_(0) { } + + AndNode (CircuitNode* leftBranch, CircuitNode* rightBranch) + : CircuitNode(), leftBranch_(leftBranch), rightBranch_(rightBranch) { } + + ~AndNode (void); + + CircuitNode** leftBranch (void) { return &leftBranch_; } + CircuitNode** rightBranch (void) { return &rightBranch_; } + + double weight (void) const; + + private: + CircuitNode* leftBranch_; + CircuitNode* rightBranch_; +}; + + + +class SetOrNode : public CircuitNode +{ + public: + SetOrNode (unsigned nrGroundings) + : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + + ~SetOrNode (void); + + CircuitNode** follow (void) { return &follow_; } + + static unsigned nrPositives (void) { return nrPos_; } + + static unsigned nrNegatives (void) { return nrNeg_; } + + static bool isSet (void) { return nrPos_ >= 0; } + + double weight (void) const; + + private: + CircuitNode* follow_; + unsigned nrGroundings_; + static int nrPos_; + static int nrNeg_; +}; + + + +class SetAndNode : public CircuitNode +{ + public: + SetAndNode (unsigned nrGroundings) + : CircuitNode(), follow_(0), nrGroundings_(nrGroundings) { } + + ~SetAndNode (void); + + CircuitNode** follow (void) { return &follow_; } + + double weight (void) const; + + private: + CircuitNode* follow_; + unsigned nrGroundings_; +}; + + + +class IncExcNode : public CircuitNode +{ + public: + IncExcNode (void) + : CircuitNode(), plus1Branch_(0), plus2Branch_(0), minusBranch_(0) { } + + ~IncExcNode (void); + + CircuitNode** plus1Branch (void) { return &plus1Branch_; } + CircuitNode** plus2Branch (void) { return &plus2Branch_; } + CircuitNode** minusBranch (void) { return &minusBranch_; } + + double weight (void) const; + + private: + CircuitNode* plus1Branch_; + CircuitNode* plus2Branch_; + CircuitNode* minusBranch_; +}; + + + +class LeafNode : public CircuitNode +{ + public: + LeafNode (Clause* clause, const LiftedWCNF& lwcnf) + : CircuitNode(), clause_(clause), lwcnf_(lwcnf) { } + + ~LeafNode (void); + + const Clause* clause (void) const { return clause_; } + + Clause* clause (void) { return clause_; } + + double weight (void) const; + + private: + Clause* clause_; + const LiftedWCNF& lwcnf_; +}; + + + +class SmoothNode : public CircuitNode +{ + public: + SmoothNode (const Clauses& clauses, const LiftedWCNF& lwcnf) + : CircuitNode(), clauses_(clauses), lwcnf_(lwcnf) { } + + ~SmoothNode (void); + + const Clauses& clauses (void) const { return clauses_; } + + Clauses clauses (void) { return clauses_; } + + double weight (void) const; + + private: + Clauses clauses_; + const LiftedWCNF& lwcnf_; +}; + + + +class TrueNode : public CircuitNode +{ + public: + TrueNode (void) : CircuitNode() { } + + double weight (void) const; +}; + + + +class CompilationFailedNode : public CircuitNode +{ + public: + CompilationFailedNode (void) : CircuitNode() { } + + double weight (void) const; +}; + + + +class LiftedCircuit +{ + public: + LiftedCircuit (const LiftedWCNF* lwcnf); + + ~LiftedCircuit (void); + + bool isCompilationSucceeded (void) const; + + double getWeightedModelCount (void) const; + + void exportToGraphViz (const char*); + + private: + void compile (CircuitNode** follow, Clauses& clauses); + + bool tryUnitPropagation (CircuitNode** follow, Clauses& clauses); + + bool tryIndependence (CircuitNode** follow, Clauses& clauses); + + bool tryShannonDecomp (CircuitNode** follow, Clauses& clauses); + + bool tryInclusionExclusion (CircuitNode** follow, Clauses& clauses); + + bool tryIndepPartialGrounding (CircuitNode** follow, Clauses& clauses); + + bool tryIndepPartialGroundingAux (Clauses& clauses, ConstraintTree& ct, + LogVars& rootLogVars); + + bool tryAtomCounting (CircuitNode** follow, Clauses& clauses); + + void shatterCountedLogVars (Clauses& clauses); + + bool shatterCountedLogVarsAux (Clauses& clauses); + + bool shatterCountedLogVarsAux (Clauses& clauses, size_t idx1, size_t idx2); + + bool independentClause (Clause& clause, Clauses& otherClauses) const; + + bool independentLiteral (const Literal& lit, + const Literals& otherLits) const; + + LitLvTypesSet smoothCircuit (CircuitNode* node); + + void createSmoothNode (const LitLvTypesSet& lids, + CircuitNode** prev); + + vector getAllPossibleTypes (unsigned nrLogVars) const; + + bool containsTypes (const LogVarTypes& typesA, + const LogVarTypes& typesB) const; + + CircuitNodeType getCircuitNodeType (const CircuitNode* node) const; + + void exportToGraphViz (CircuitNode* node, ofstream&); + + void printClauses (CircuitNode* node, ofstream&, + string extraOptions = ""); + + string escapeNode (const CircuitNode* node) const; + + string getExplanationString (CircuitNode* node); + + CircuitNode* root_; + const LiftedWCNF* lwcnf_; + bool compilationSucceeded_; + Clauses backupClauses_; + unordered_map originClausesMap_; + unordered_map explanationMap_; + + DISALLOW_COPY_AND_ASSIGN (LiftedCircuit); +}; + class LiftedKc : public LiftedSolver @@ -24,6 +294,8 @@ class LiftedKc : public LiftedSolver LiftedWCNF* lwcnf_; LiftedCircuit* circuit_; ParfactorList pfList_; + + DISALLOW_COPY_AND_ASSIGN (LiftedKc); }; #endif // HORUS_LIFTEDKC_H diff --git a/packages/CLPBN/horus/LiftedOperations.cpp b/packages/CLPBN/horus/LiftedOperations.cpp index 03cafdee7..6ccc41b3b 100644 --- a/packages/CLPBN/horus/LiftedOperations.cpp +++ b/packages/CLPBN/horus/LiftedOperations.cpp @@ -35,9 +35,9 @@ LiftedOperations::shatterAgainstQuery ( } } if (found == false) { - cerr << "error: could not find a parfactor with ground " ; - cerr << "`" << query[i] << "'" << endl; - exit (0); + cerr << "Error: could not find a parfactor with ground " ; + cerr << "`" << query[i] << "'." << endl; + exit (EXIT_FAILURE); } pfList.add (newPfs); } @@ -60,12 +60,12 @@ LiftedOperations::runWeakBayesBall ( const Grounds& query) { queue todo; // groups to process - set done; // processed or in queue + set done; // processed or in queue for (size_t i = 0; i < query.size(); i++) { ParfactorList::iterator it = pfList.begin(); while (it != pfList.end()) { PrvGroup group = (*it)->findGroup (query[i]); - if (group != numeric_limits::max()) { + if (group != std::numeric_limits::max()) { todo.push (group); done.insert (group); break; @@ -128,7 +128,7 @@ LiftedOperations::absorveEvidence ( it = pfList.remove (it); Parfactors absorvedPfs = absorve (obsFormulas[i], pf); if (absorvedPfs.empty() == false) { - if (absorvedPfs.size() == 1 && absorvedPfs[0] == 0) { + if (absorvedPfs.size() == 1 && !absorvedPfs[0]) { // just remove pf; } else { Util::addToVector (newPfs, absorvedPfs); @@ -225,7 +225,7 @@ LiftedOperations::absorve ( absorvedPfs.push_back (0); } break; - } + } g->constr()->moveToTop (formulas[i].logVars()); std::pair res; diff --git a/packages/CLPBN/horus/LiftedOperations.h b/packages/CLPBN/horus/LiftedOperations.h index 1e21f317c..d17bb5359 100644 --- a/packages/CLPBN/horus/LiftedOperations.h +++ b/packages/CLPBN/horus/LiftedOperations.h @@ -8,19 +8,22 @@ class LiftedOperations public: static void shatterAgainstQuery ( ParfactorList& pfList, const Grounds& query); - + static void runWeakBayesBall ( - ParfactorList& pfList, const Grounds&); - + ParfactorList& pfList, const Grounds&); + static void absorveEvidence ( ParfactorList& pfList, ObservedFormulas& obsFormulas); - + static Parfactors countNormalize (Parfactor*, const LogVarSet&); static Parfactor calcGroundMultiplication (Parfactor pf); private: - static Parfactors absorve (ObservedFormula&, Parfactor*); + static Parfactors absorve (ObservedFormula&, Parfactor*); + + DISALLOW_COPY_AND_ASSIGN (LiftedOperations); }; #endif // HORUS_LIFTEDOPERATIONS_H + diff --git a/packages/CLPBN/horus/LiftedSolver.h b/packages/CLPBN/horus/LiftedSolver.h index 5429fc5b3..afdfe1461 100644 --- a/packages/CLPBN/horus/LiftedSolver.h +++ b/packages/CLPBN/horus/LiftedSolver.h @@ -21,6 +21,9 @@ class LiftedSolver protected: const ParfactorList& parfactorList; + + private: + DISALLOW_COPY_AND_ASSIGN (LiftedSolver); }; #endif // HORUS_LIFTEDSOLVER_H diff --git a/packages/CLPBN/horus/LiftedUtils.cpp b/packages/CLPBN/horus/LiftedUtils.cpp index 9ad750f90..b85990ec1 100644 --- a/packages/CLPBN/horus/LiftedUtils.cpp +++ b/packages/CLPBN/horus/LiftedUtils.cpp @@ -1,6 +1,5 @@ #include -#include #include #include @@ -61,7 +60,7 @@ ostream& operator<< (ostream &os, const Symbol& s) ostream& operator<< (ostream &os, const LogVar& X) { const string labels[] = { - "A", "B", "C", "D", "E", "F", + "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "M" }; (X >= 12) ? os << "X_" << X.id_ : os << labels[X]; return os; diff --git a/packages/CLPBN/horus/LiftedUtils.h b/packages/CLPBN/horus/LiftedUtils.h index 458afdf7d..66d5c6e07 100644 --- a/packages/CLPBN/horus/LiftedUtils.h +++ b/packages/CLPBN/horus/LiftedUtils.h @@ -1,12 +1,11 @@ #ifndef HORUS_LIFTEDUTILS_H #define HORUS_LIFTEDUTILS_H -#include #include + #include #include - #include "TinySet.h" #include "Util.h" @@ -51,7 +50,7 @@ class LogVar } bool valid (void) const - { + { return id_ != Util::maxUnsigned(); } @@ -107,7 +106,7 @@ class Ground size_t arity (void) const { return args_.size(); } - bool isAtom (void) const { return args_.size() == 0; } + bool isAtom (void) const { return args_.empty(); } friend ostream& operator<< (ostream &os, const Ground& gr); @@ -145,11 +144,11 @@ class Substitution return X; } - bool containsReplacementFor (LogVar X) const + bool containsReplacementFor (LogVar X) const { return Util::contains (subs_, X); } - + size_t nrReplacements (void) const { return subs_.size(); } LogVars getDiscardedLogVars (void) const; @@ -161,6 +160,5 @@ class Substitution }; - #endif // HORUS_LIFTEDUTILS_H diff --git a/packages/CLPBN/horus/LiftedVe.cpp b/packages/CLPBN/horus/LiftedVe.cpp index 141006c46..5c4c7464e 100644 --- a/packages/CLPBN/horus/LiftedVe.cpp +++ b/packages/CLPBN/horus/LiftedVe.cpp @@ -1,4 +1,5 @@ #include + #include #include "LiftedVe.h" @@ -133,7 +134,7 @@ ProductOperator::toString (void) stringstream ss; ss << "just multiplicate " ; ss << (*g1_)->getAllGroups(); - ss << " x " ; + ss << " x " ; ss << (*g2_)->getAllGroups(); ss << " [cost=" << std::exp (getLogCost()) << "]" << endl; return ss.str(); @@ -155,7 +156,7 @@ ProductOperator::validOp (Parfactor* g1, Parfactor* g2) } size_t idx1 = g1->indexOfGroup (intersect[i]); size_t idx2 = g2->indexOfGroup (intersect[i]); - if (g1->range (idx1) != g2->range (idx2)) { + if (g1->range (idx1) != g2->range (idx2)) { return false; } } @@ -710,10 +711,10 @@ LiftedVe::getBestOperation (const Grounds& query) validOps = LiftedOperator::getValidOps (pfList_, query); for (size_t i = 0; i < validOps.size(); i++) { double cost = validOps[i]->getLogCost(); - if ((bestOp == 0) || (cost < bestCost)) { + if (!bestOp || cost < bestCost) { bestOp = validOps[i]; bestCost = cost; - } + } } if (bestCost > largestCost_) { largestCost_ = bestCost; diff --git a/packages/CLPBN/horus/LiftedVe.h b/packages/CLPBN/horus/LiftedVe.h index 7d9974294..d66f42e7f 100644 --- a/packages/CLPBN/horus/LiftedVe.h +++ b/packages/CLPBN/horus/LiftedVe.h @@ -9,7 +9,7 @@ class LiftedOperator { public: virtual ~LiftedOperator (void) { } - + virtual double getLogCost (void) = 0; virtual void apply (void) = 0; @@ -23,6 +23,9 @@ class LiftedOperator static vector getParfactorsWithGroup ( ParfactorList&, PrvGroup group); + + private: + DISALLOW_ASSIGN (LiftedOperator); }; @@ -48,6 +51,8 @@ class ProductOperator : public LiftedOperator ParfactorList::iterator g1_; ParfactorList::iterator g2_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (ProductOperator); }; @@ -55,7 +60,7 @@ class ProductOperator : public LiftedOperator class SumOutOperator : public LiftedOperator { public: - SumOutOperator (PrvGroup group, ParfactorList& pfList) + SumOutOperator (PrvGroup group, ParfactorList& pfList) : group_(group), pfList_(pfList) { } double getLogCost (void); @@ -74,6 +79,8 @@ class SumOutOperator : public LiftedOperator PrvGroup group_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (SumOutOperator); }; @@ -101,6 +108,8 @@ class CountingOperator : public LiftedOperator ParfactorList::iterator pfIter_; LogVar X_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (CountingOperator); }; @@ -123,11 +132,13 @@ class GroundOperator : public LiftedOperator string toString (void); private: - vector> getAffectedFormulas (void); + vector> getAffectedFormulas (void); PrvGroup group_; unsigned lvIndex_; ParfactorList& pfList_; + + DISALLOW_COPY_AND_ASSIGN (GroundOperator); }; @@ -149,6 +160,8 @@ class LiftedVe : public LiftedSolver ParfactorList pfList_; double largestCost_; + + DISALLOW_COPY_AND_ASSIGN (LiftedVe); }; #endif // HORUS_LIFTEDVE_H diff --git a/packages/CLPBN/horus/LiftedWCNF.cpp b/packages/CLPBN/horus/LiftedWCNF.cpp index b2272b6d6..682dddd6d 100644 --- a/packages/CLPBN/horus/LiftedWCNF.cpp +++ b/packages/CLPBN/horus/LiftedWCNF.cpp @@ -3,11 +3,10 @@ #include "Indexer.h" - bool Literal::isGround (ConstraintTree constr, LogVarSet ipgLogVars) const { - if (logVars_.size() == 0) { + if (logVars_.empty()) { return true; } LogVarSet lvs (logVars_); @@ -108,7 +107,7 @@ Clause::containsPositiveLiteral ( } - + bool Clause::containsNegativeLiteral ( LiteralId lid, @@ -195,7 +194,7 @@ Clause::isPositiveCountedLogVar (LogVar X) const assert (constr_.logVarSet().contains (X)); return posCountedLvs_.contains (X); } - + bool @@ -235,7 +234,7 @@ Clause::ipgCandidates (void) const LogVarSet allLvs = constr_.logVarSet(); allLvs -= ipgLvs_; allLvs -= posCountedLvs_; - allLvs -= negCountedLvs_; + allLvs -= negCountedLvs_; for (size_t i = 0; i < allLvs.size(); i++) { bool valid = true; for (size_t j = 0; j < literals_.size(); j++) { @@ -262,7 +261,7 @@ Clause::logVarTypes (size_t litIdx) const if (posCountedLvs_.contains (lvs[i])) { types.push_back (LogVarType::POS_LV); } else if (negCountedLvs_.contains (lvs[i])) { - types.push_back (LogVarType::NEG_LV); + types.push_back (LogVarType::NEG_LV); } else { types.push_back (LogVarType::FULL_LV); } @@ -327,6 +326,16 @@ Clause::printClauses (const Clauses& clauses) +void +Clause::deleteClauses (Clauses& clauses) +{ + for (size_t i = 0; i < clauses.size(); i++) { + delete clauses[i]; + } +} + + + std::ostream& operator<< (ostream &os, const Clause& clause) { @@ -381,7 +390,7 @@ LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) { addIndicatorClauses (pfList); addParameterClauses (pfList); - + /* // INCLUSION-EXCLUSION TEST clauses_.clear(); @@ -427,7 +436,7 @@ LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) c2->addLiteralComplemented (Literal (1, {1,0})); clauses_.push_back(c2); */ - + if (Globals::verbosity > 1) { cout << "FORMULA INDICATORS:" << endl; printFormulaIndicators(); @@ -445,7 +454,7 @@ LiftedWCNF::LiftedWCNF (const ParfactorList& pfList) LiftedWCNF::~LiftedWCNF (void) { - + Clause::deleteClauses (clauses_); } @@ -569,7 +578,7 @@ LiftedWCNF::addParameterClauses (const ParfactorList& pfList) // ¬θxi|u1,...,un v λu2 -> tempClause double posWeight = (**it)[indexer]; addWeight (paramVarLid, posWeight, LogAware::one()); - + Clause* clause1 = new Clause (*(*it)->constr()); for (unsigned i = 0; i < groups.size(); i++) { @@ -583,7 +592,7 @@ LiftedWCNF::addParameterClauses (const ParfactorList& pfList) tempClause->addLiteralComplemented (Literal ( paramVarLid, (*it)->constr()->logVars())); tempClause->addLiteral (Literal (lid, (*it)->argument(i).logVars())); - clauses_.push_back (tempClause); + clauses_.push_back (tempClause); } clause1->addLiteral (Literal (paramVarLid, (*it)->constr()->logVars())); clauses_.push_back (clause1); @@ -631,7 +640,7 @@ LiftedWCNF::printWeights (void) const unordered_map>::const_iterator it; it = weights_.begin(); while (it != weights_.end()) { - cout << "λ" << it->first << " weights: " ; + cout << "λ" << it->first << " weights: " ; cout << it->second.first << " " << it->second.second; cout << endl; ++ it; diff --git a/packages/CLPBN/horus/LiftedWCNF.h b/packages/CLPBN/horus/LiftedWCNF.h index 92f07d192..619d2c5a5 100644 --- a/packages/CLPBN/horus/LiftedWCNF.h +++ b/packages/CLPBN/horus/LiftedWCNF.h @@ -1,15 +1,14 @@ #ifndef HORUS_LIFTEDWCNF_H #define HORUS_LIFTEDWCNF_H +#include + #include "ParfactorList.h" using namespace std; -typedef long LiteralId; - class ConstraintTree; - enum LogVarType { FULL_LV, @@ -17,8 +16,8 @@ enum LogVarType NEG_LV }; -typedef vector LogVarTypes; - +typedef long LiteralId; +typedef vector LogVarTypes; class Literal @@ -33,19 +32,19 @@ class Literal LiteralId lid (void) const { return lid_; } LogVars logVars (void) const { return logVars_; } - - size_t nrLogVars (void) const { return logVars_.size(); } + + size_t nrLogVars (void) const { return logVars_.size(); } LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } - + void complement (void) { negated_ = !negated_; } bool isPositive (void) const { return negated_ == false; } bool isNegative (void) const { return negated_; } - + bool isGround (ConstraintTree constr, LogVarSet ipgLogVars) const; - + size_t indexOfLogVar (LogVar X) const; string toString (LogVarSet ipgLogVars = LogVarSet(), @@ -74,9 +73,9 @@ class Clause void addLiteral (const Literal& l) { literals_.push_back (l); } const Literals& literals (void) const { return literals_; } - + Literals& literals (void) { return literals_; } - + size_t nrLiterals (void) const { return literals_.size(); } const ConstraintTree& constr (void) const { return constr_; } @@ -100,7 +99,7 @@ class Clause unsigned nrPosCountedLogVars (void) const { return posCountedLvs_.size(); } unsigned nrNegCountedLogVars (void) const { return negCountedLvs_.size(); } - + void addLiteralComplemented (const Literal& lit); bool containsLiteral (LiteralId lid) const; @@ -119,8 +118,8 @@ class Clause bool isPositiveCountedLogVar (LogVar X) const; - bool isNegativeCountedLogVar (LogVar X) const; - + bool isNegativeCountedLogVar (LogVar X) const; + bool isIpgLogVar (LogVar X) const; TinySet lidSet (void) const; @@ -130,13 +129,15 @@ class Clause LogVarTypes logVarTypes (size_t litIdx) const; void removeLiteral (size_t litIdx); - + static bool independentClauses (Clause& c1, Clause& c2); - - static vector copyClauses (const vector& clauses); + + static vector copyClauses (const vector& clauses); static void printClauses (const vector& clauses); - + + static void deleteClauses (vector& clauses); + friend std::ostream& operator<< (ostream &os, const Clause& clause); private: @@ -147,6 +148,8 @@ class Clause LogVarSet posCountedLvs_; LogVarSet negCountedLvs_; ConstraintTree constr_; + + DISALLOW_ASSIGN (Clause); }; typedef vector Clauses; @@ -171,14 +174,14 @@ class LitLvTypes return false; } }; - + LitLvTypes (LiteralId lid, const LogVarTypes& lvTypes) : lid_(lid), lvTypes_(lvTypes) { } - + LiteralId lid (void) const { return lid_; } - + const LogVarTypes& logVarTypes (void) const { return lvTypes_; } - + void setAllFullLogVars (void) { std::fill (lvTypes_.begin(), lvTypes_.end(), LogVarType::FULL_LV); } @@ -219,7 +222,6 @@ class LiftedWCNF void printClauses (void) const; private: - LiteralId getLiteralId (PrvGroup prvGroup, unsigned range); void addIndicatorClauses (const ParfactorList& pfList); @@ -228,9 +230,11 @@ class LiftedWCNF Clauses clauses_; LiteralId freeLiteralId_; - const ParfactorList& pfList_; + const ParfactorList& pfList_; unordered_map> map_; unordered_map> weights_; + + DISALLOW_COPY_AND_ASSIGN (LiftedWCNF); }; #endif // HORUS_LIFTEDWCNF_H diff --git a/packages/CLPBN/horus/Makefile.in b/packages/CLPBN/horus/Makefile.in index 59936c776..cbf9d700e 100644 --- a/packages/CLPBN/horus/Makefile.in +++ b/packages/CLPBN/horus/Makefile.in @@ -23,10 +23,10 @@ CC=@CC@ CXX=@CXX@ # normal -#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG +CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -DNDEBUG # debug -CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra +#CXXFLAGS= -std=c++0x @SHLIB_CXXFLAGS@ $(YAP_EXTRAS) $(DEFS) -D_YAP_NOT_INSTALLED_=1 -I$(srcdir) -I../../.. -I$(srcdir)/../../../include @CPPFLAGS@ -g -O0 -Wextra # @@ -43,6 +43,7 @@ SO=@SO@ #4.1VPATH=@srcdir@:@srcdir@/OPTYap CWD=$(PWD) +HCLI = $(srcdir)/hcli HEADERS = \ $(srcdir)/BayesBall.h \ @@ -57,13 +58,12 @@ HEADERS = \ $(srcdir)/Horus.h \ $(srcdir)/Indexer.h \ $(srcdir)/LiftedBp.h \ - $(srcdir)/LiftedCircuit.h \ - $(srcdir)/LiftedKc.h \ - $(srcdir)/LiftedOperations.h \ - $(srcdir)/LiftedSolver.h \ + $(srcdir)/LiftedKc.h \ + $(srcdir)/LiftedOperations.h \ + $(srcdir)/LiftedSolver.h \ $(srcdir)/LiftedUtils.h \ $(srcdir)/LiftedVe.h \ - $(srcdir)/LiftedWCNF.h \ + $(srcdir)/LiftedWCNF.h \ $(srcdir)/Parfactor.h \ $(srcdir)/ParfactorList.h \ $(srcdir)/ProbFormula.h \ @@ -73,7 +73,7 @@ HEADERS = \ $(srcdir)/Var.h \ $(srcdir)/VarElim.h \ $(srcdir)/WeightedBp.h - + CPP_SOURCES = \ $(srcdir)/BayesBall.cpp \ $(srcdir)/BayesBallGraph.cpp \ @@ -87,12 +87,11 @@ CPP_SOURCES = \ $(srcdir)/HorusCli.cpp \ $(srcdir)/HorusYap.cpp \ $(srcdir)/LiftedBp.cpp \ - $(srcdir)/LiftedCircuit.cpp \ - $(srcdir)/LiftedKc.cpp \ - $(srcdir)/LiftedOperations.cpp \ + $(srcdir)/LiftedKc.cpp \ + $(srcdir)/LiftedOperations.cpp \ $(srcdir)/LiftedUtils.cpp \ $(srcdir)/LiftedVe.cpp \ - $(srcdir)/LiftedWCNF.cpp \ + $(srcdir)/LiftedWCNF.cpp \ $(srcdir)/Parfactor.cpp \ $(srcdir)/ParfactorList.cpp \ $(srcdir)/ProbFormula.cpp \ @@ -114,12 +113,11 @@ OBJS = \ Histogram.o \ HorusYap.o \ LiftedBp.o \ - LiftedCircuit.o \ - LiftedKc.o \ - LiftedOperations.o \ + LiftedKc.o \ + LiftedOperations.o \ LiftedUtils.o \ LiftedVe.o \ - LiftedWCNF.o \ + LiftedWCNF.o \ ProbFormula.o \ Parfactor.o \ ParfactorList.o \ @@ -159,15 +157,16 @@ all: $(SOBJS) hcli hcli: $(HCLI_OBJS) - $(CXX) -o hcli $(HCLI_OBJS) + $(CXX) -o $(HCLI) $(HCLI_OBJS) install: all $(INSTALL_PROGRAM) $(SOBJS) $(DESTDIR)$(YAPLIBDIR) + $(INSTALL_PROGRAM) $(HCLI) $(DESTDIR)$(BINDIR) clean: - rm -f *.o *~ $(OBJS) $(SOBJS) *.BAK hcli + rm -f *.o *~ $(OBJS) $(SOBJS) $(HCLI) *.BAK erase_dots: @@ -178,8 +177,8 @@ depend: $(HEADERS) $(CPP_SOURCES) -@if test "$(GCC)" = yes; then\ $(CC) -std=c++0x -MM -MG $(CFLAGS) -I$(srcdir) -I$(srcdir)/../../../../include -I$(srcdir)/../../../../H $(CPP_SOURCES) >> Makefile;\ else\ - makedepend -f - -- $(CFLAGS) -I$(srcdir)/../../../../H -I$(srcdir)/../../../../include -- $(CPP_SOURCES) |\ - sed 's|.*/\([^:]*\):|\1:|' >> Makefile ;\ + makedepend -f - -- $(CFLAGS) -I$(srcdir)/../../../../H -I$(srcdir)/../../../../include -- $(CPP_SOURCES) |\ + sed 's|.*/\([^:]*\):|\1:|' >> Makefile ;\ fi # DO NOT DELETE THIS LINE -- make depend depends on it. diff --git a/packages/CLPBN/horus/Parfactor.cpp b/packages/CLPBN/horus/Parfactor.cpp index 6eaa32e72..bc326801b 100644 --- a/packages/CLPBN/horus/Parfactor.cpp +++ b/packages/CLPBN/horus/Parfactor.cpp @@ -1,4 +1,3 @@ - #include "Parfactor.h" #include "Histogram.h" #include "Indexer.h" @@ -8,7 +7,7 @@ Parfactor::Parfactor ( const ProbFormulas& formulas, - const Params& params, + const Params& params, const Tuples& tuples, unsigned distId) { @@ -26,7 +25,24 @@ Parfactor::Parfactor ( } } } + LogVar newLv = logVars.size(); constr_ = new ConstraintTree (logVars, tuples); + // Change formulas like f(X,X), X in {(p1),(p2),...} + // to be like f(X,Y), (X,Y) in {(p1,p1),(p2,p2),...}. + // This will simplify shattering on the constraint tree. + for (size_t i = 0; i < args_.size(); i++) { + LogVarSet lvSet; + LogVars& lvs = args_[i].logVars(); + for (size_t j = 0; j < lvs.size(); j++) { + if (lvSet.contains (lvs[j]) == false) { + lvSet |= lvs[j]; + } else { + constr_->cloneLogVar (lvs[j], newLv); + lvs[j] = newLv; + ++ newLv; + } + } + } assert (params_.size() == Util::sizeExpected (ranges_)); } @@ -204,7 +220,7 @@ Parfactor::countConvert (LogVar X) assert (constr_->isCountNormalized (X)); assert (constr_->getConditionalCount (X) > 1); assert (canCountConvert (X)); - + unsigned N = constr_->getConditionalCount (X); unsigned R = ranges_[fIdx]; unsigned H = HistogramSet::nrHistograms (N, R); @@ -319,7 +335,7 @@ Parfactor::fullExpand (LogVar X) sumIndexes.push_back (HistogramSet::findIndex (hist, originHists)); ++ indexer; } - + expandPotential (fIdx, std::pow (R, N), sumIndexes); ProbFormula f = args_[fIdx]; @@ -343,7 +359,7 @@ Parfactor::reorderAccordingGrounds (const Grounds& grounds) ProbFormulas newFormulas; for (size_t i = 0; i < grounds.size(); i++) { for (size_t j = 0; j < args_.size(); j++) { - if (grounds[i].functor() == args_[j].functor() && + if (grounds[i].functor() == args_[j].functor() && grounds[i].arity() == args_[j].arity()) { constr_->moveToTop (args_[j].logVars()); if (constr_->containsTuple (grounds[i].args())) { @@ -407,7 +423,7 @@ Parfactor::indexOfGround (const Ground& ground) const { size_t idx = args_.size(); for (size_t i = 0; i < args_.size(); i++) { - if (args_[i].functor() == ground.functor() && + if (args_[i].functor() == ground.functor() && args_[i].arity() == ground.arity()) { constr_->moveToTop (args_[i].logVars()); if (constr_->containsTuple (ground.args())) { @@ -426,7 +442,7 @@ Parfactor::findGroup (const Ground& ground) const { size_t idx = indexOfGround (ground); return idx == args_.size() - ? numeric_limits::max() + ? std::numeric_limits::max() : args_[idx].group(); } @@ -435,7 +451,7 @@ Parfactor::findGroup (const Ground& ground) const bool Parfactor::containsGround (const Ground& ground) const { - return findGroup (ground) != numeric_limits::max(); + return findGroup (ground) != std::numeric_limits::max(); } @@ -672,9 +688,9 @@ Parfactor::expandPotential ( { ullong newSize = (params_.size() / ranges_[fIdx]) * newRange; if (newSize > params_.max_size()) { - cerr << "error: an overflow occurred when performing expansion" ; + cerr << "Error: an overflow occurred when performing expansion." ; cerr << endl; - abort(); + exit (EXIT_FAILURE); } Params backup = params_; @@ -789,7 +805,7 @@ Parfactor::simplifyParfactor (size_t fIdx1, size_t fIdx2) while (indexer.valid()) { if (indexer[fIdx1] == indexer[fIdx2]) { params_.push_back (backup[indexer]); - } + } ++ indexer; } for (size_t i = 0; i < args_[fIdx2].logVars().size(); i++) { @@ -812,7 +828,7 @@ Parfactor::getAlignLogVars (Parfactor* g1, Parfactor* g2) TinySet matchedI; TinySet matchedJ; ProbFormulas& formulas1 = g1->arguments(); - ProbFormulas& formulas2 = g2->arguments(); + ProbFormulas& formulas2 = g2->arguments(); for (size_t i = 0; i < formulas1.size(); i++) { for (size_t j = 0; j < formulas2.size(); j++) { if (formulas1[i].group() == formulas2[j].group() && @@ -865,7 +881,7 @@ Parfactor::alignLogicalVars (Parfactor* g1, Parfactor* g2) LogVar freeLogVar (0); Substitution theta1, theta2; for (size_t i = 0; i < alignLvs1.size(); i++) { - bool b1 = theta1.containsReplacementFor (alignLvs1[i]); + bool b1 = theta1.containsReplacementFor (alignLvs1[i]); bool b2 = theta2.containsReplacementFor (alignLvs2[i]); if (b1 == false && b2 == false) { theta1.add (alignLvs1[i], freeLogVar); @@ -894,11 +910,11 @@ Parfactor::alignLogicalVars (Parfactor* g1, Parfactor* g2) } // handle this type of situation: - // g1 = p(X), q(X) ; X in {(p1),(p2)} + // g1 = p(X), q(X) ; X in {(p1),(p2)} // g2 = p(X), q(Y) ; (X,Y) in {(p1,p2),(p2,p1)} LogVars discardedLvs1 = theta1.getDiscardedLogVars(); for (size_t i = 0; i < discardedLvs1.size(); i++) { - if (g1->constr()->isSingleton (discardedLvs1[i]) && + if (g1->constr()->isSingleton (discardedLvs1[i]) && g1->nrFormulas (discardedLvs1[i]) == 1) { g1->constr()->remove (discardedLvs1[i]); } else { diff --git a/packages/CLPBN/horus/Parfactor.h b/packages/CLPBN/horus/Parfactor.h index 5f6aec550..f21fc5f69 100644 --- a/packages/CLPBN/horus/Parfactor.h +++ b/packages/CLPBN/horus/Parfactor.h @@ -1,15 +1,12 @@ #ifndef HORUS_PARFACTOR_H #define HORUS_PARFACTOR_H -#include -#include - +#include "Factor.h" #include "ProbFormula.h" #include "ConstraintTree.h" #include "LiftedUtils.h" #include "Horus.h" -#include "Factor.h" class Parfactor : public TFactor { @@ -33,21 +30,21 @@ class Parfactor : public TFactor const ConstraintTree* constr (void) const { return constr_; } const LogVars& logVars (void) const { return constr_->logVars(); } - + const LogVarSet& logVarSet (void) const { return constr_->logVarSet(); } LogVarSet countedLogVars (void) const; LogVarSet uncountedLogVars (void) const; - + LogVarSet elimLogVars (void) const; - + LogVarSet exclusiveLogVars (size_t fIdx) const; - + void sumOutIndex (size_t fIdx); void multiply (Parfactor&); - + bool canCountConvert (LogVar X); void countConvert (LogVar); @@ -75,7 +72,7 @@ class Parfactor : public TFactor bool containsGroup (PrvGroup) const; bool containsGroups (vector) const; - + unsigned nrFormulas (LogVar) const; int indexOfLogVar (LogVar) const; @@ -99,7 +96,6 @@ class Parfactor : public TFactor static bool canMultiply (Parfactor*, Parfactor*); private: - void simplifyCountingFormulas (size_t fIdx); void simplifyParfactor (size_t fIdx1, size_t fIdx2); @@ -113,11 +109,11 @@ class Parfactor : public TFactor static void alignAndExponentiate (Parfactor*, Parfactor*); static void alignLogicalVars (Parfactor*, Parfactor*); - - ConstraintTree* constr_; - -}; + ConstraintTree* constr_; + + DISALLOW_ASSIGN (Parfactor); +}; typedef vector Parfactors; diff --git a/packages/CLPBN/horus/ParfactorList.cpp b/packages/CLPBN/horus/ParfactorList.cpp index b4496a683..3481696de 100644 --- a/packages/CLPBN/horus/ParfactorList.cpp +++ b/packages/CLPBN/horus/ParfactorList.cpp @@ -1,5 +1,7 @@ #include +#include + #include "ParfactorList.h" @@ -9,7 +11,7 @@ ParfactorList::ParfactorList (const ParfactorList& pfList) while (it != pfList.end()) { addShattered (new Parfactor (**it)); ++ it; - } + } } @@ -74,7 +76,7 @@ ParfactorList::insertShattered ( list::iterator -ParfactorList::remove (list::iterator it) +ParfactorList::remove (list::iterator it) { return pfList_.erase (it); } @@ -221,7 +223,7 @@ ParfactorList::isShattered ( } - + void ParfactorList::addToShatteredList (Parfactor* g) { @@ -334,9 +336,9 @@ ParfactorList::shatterAgainstMySelf ( ProbFormula& f1 = g->argument (fIdx1); ProbFormula& f2 = g->argument (fIdx2); if (f1.isAtom()) { - cerr << "error: a ground occurs twice in a parfactor" << endl; + cerr << "Error: a ground occurs twice in the same parfactor." << endl; cerr << endl; - abort(); + exit (EXIT_FAILURE); } assert (g->constr()->empty() == false); ConstraintTree ctCopy (*g->constr()); @@ -412,13 +414,13 @@ ParfactorList::shatter (Parfactor* g1, Parfactor* g2) { ProbFormulas& formulas1 = g1->arguments(); ProbFormulas& formulas2 = g2->arguments(); - assert (g1 != 0 && g2 != 0 && g1 != g2); + assert (g1 && g2 && g1 != g2); for (size_t i = 0; i < formulas1.size(); i++) { for (size_t j = 0; j < formulas2.size(); j++) { if (formulas1[i].sameSkeletonAs (formulas2[j])) { std::pair res; res = shatter (i, g1, j, g2); - if (res.first.empty() == false || + if (res.first.empty() == false || res.second.empty() == false) { return res; } @@ -470,7 +472,7 @@ ParfactorList::shatter ( ConstraintTree* exclCt1 = split1.second; if (commCt1->empty()) { - // disjoint + // disjoint delete commCt1; delete exclCt1; return { }; @@ -481,7 +483,7 @@ ParfactorList::shatter ( ConstraintTree* commCt2 = split2.first; ConstraintTree* exclCt2 = split2.second; - assert (commCt1->tupleSet (f1.logVars()) == + assert (commCt1->tupleSet (f1.logVars()) == commCt2->tupleSet (f2.logVars())); // stringstream ss1; ss1 << "" << count << "_A.dot" ; @@ -549,11 +551,11 @@ ParfactorList::shatter ( Parfactor* newPf = new Parfactor (g, cts[i]); if (cts[i]->nrLogVars() == g->constr()->nrLogVars() + 1) { newPf->expand (f.countedLogVar(), X_new1, X_new2); - assert (g->constr()->getConditionalCount (f.countedLogVar()) == + assert (g->constr()->getConditionalCount (f.countedLogVar()) == cts[i]->getConditionalCount (X_new1) + cts[i]->getConditionalCount (X_new2)); } else { - assert (g->constr()->getConditionalCount (f.countedLogVar()) == + assert (g->constr()->getConditionalCount (f.countedLogVar()) == cts[i]->getConditionalCount (f.countedLogVar())); } newPf->setNewGroups(); diff --git a/packages/CLPBN/horus/ParfactorList.h b/packages/CLPBN/horus/ParfactorList.h index 48008b253..377299952 100644 --- a/packages/CLPBN/horus/ParfactorList.h +++ b/packages/CLPBN/horus/ParfactorList.h @@ -2,7 +2,6 @@ #define HORUS_PARFACTORLIST_H #include -#include #include "Parfactor.h" #include "ProbFormula.h" @@ -11,6 +10,8 @@ using namespace std; +class Parfactor; + class ParfactorList { public: @@ -56,11 +57,10 @@ class ParfactorList bool isAllShattered (void) const; void print (void) const; - + ParfactorList& operator= (const ParfactorList& pfList); private: - bool isShattered (const Parfactor*) const; bool isShattered (const Parfactor*, const Parfactor*) const; @@ -73,7 +73,7 @@ class ParfactorList Parfactors shatterAgainstMySelf ( Parfactor* g, size_t fIdx1, size_t fIdx2); - + std::pair shatter ( Parfactor*, Parfactor*); diff --git a/packages/CLPBN/horus/ProbFormula.cpp b/packages/CLPBN/horus/ProbFormula.cpp index fa2d26d05..67473734c 100644 --- a/packages/CLPBN/horus/ProbFormula.cpp +++ b/packages/CLPBN/horus/ProbFormula.cpp @@ -40,7 +40,7 @@ ProbFormula::indexOf (LogVar X) const bool ProbFormula::isAtom (void) const { - return logVars_.size() == 0; + return logVars_.empty(); } @@ -61,7 +61,7 @@ ProbFormula::countedLogVar (void) const } - + void ProbFormula::setCountedLogVar (LogVar lv) { @@ -92,9 +92,10 @@ ProbFormula::rename (LogVar oldName, LogVar newName) } + bool operator== (const ProbFormula& f1, const ProbFormula& f2) -{ - return f1.group_ == f2.group_ && +{ + return f1.group_ == f2.group_ && f1.logVars_ == f2.logVars_; } @@ -124,7 +125,7 @@ PrvGroup ProbFormula::getNewGroup (void) { freeGroup_ ++; - assert (freeGroup_ != numeric_limits::max()); + assert (freeGroup_ != std::numeric_limits::max()); return freeGroup_; } diff --git a/packages/CLPBN/horus/ProbFormula.h b/packages/CLPBN/horus/ProbFormula.h index 61b016288..48824b5db 100644 --- a/packages/CLPBN/horus/ProbFormula.h +++ b/packages/CLPBN/horus/ProbFormula.h @@ -12,12 +12,13 @@ typedef unsigned long PrvGroup; class ProbFormula { public: - ProbFormula (Symbol f, const LogVars& lvs, unsigned range) + ProbFormula (Symbol f, const LogVars& lvs, unsigned range) : functor_(f), logVars_(lvs), range_(range), - countedLogVar_(), group_(numeric_limits::max()) { } + countedLogVar_(), group_(std::numeric_limits::max()) { } - ProbFormula (Symbol f, unsigned r) - : functor_(f), range_(r), group_(numeric_limits::max()) { } + ProbFormula (Symbol f, unsigned r) + : functor_(f), range_(r), + group_(std::numeric_limits::max()) { } Symbol functor (void) const { return functor_; } @@ -30,11 +31,11 @@ class ProbFormula const LogVars& logVars (void) const { return logVars_; } LogVarSet logVarSet (void) const { return LogVarSet (logVars_); } - + PrvGroup group (void) const { return group_; } void setGroup (PrvGroup g) { group_ = g; } - + bool sameSkeletonAs (const ProbFormula&) const; bool contains (LogVar) const; @@ -48,20 +49,20 @@ class ProbFormula bool isCounting (void) const; LogVar countedLogVar (void) const; - + void setCountedLogVar (LogVar); void clearCountedLogVar (void); - + void rename (LogVar, LogVar); - + static PrvGroup getNewGroup (void); friend std::ostream& operator<< (ostream &os, const ProbFormula& f); friend bool operator== (const ProbFormula& f1, const ProbFormula& f2); - private: + private: Symbol functor_; LogVars logVars_; unsigned range_; @@ -76,10 +77,10 @@ typedef vector ProbFormulas; class ObservedFormula { public: - ObservedFormula (Symbol f, unsigned a, unsigned ev) + ObservedFormula (Symbol f, unsigned a, unsigned ev) : functor_(f), arity_(a), evidence_(ev), constr_(a) { } - ObservedFormula (Symbol f, unsigned ev, const Tuple& tuple) + ObservedFormula (Symbol f, unsigned ev, const Tuple& tuple) : functor_(f), arity_(tuple.size()), evidence_(ev), constr_(arity_) { constr_.addTuple (tuple); diff --git a/packages/CLPBN/horus/TODO b/packages/CLPBN/horus/TODO deleted file mode 100644 index 360fa65ca..000000000 --- a/packages/CLPBN/horus/TODO +++ /dev/null @@ -1,2 +0,0 @@ -- Handle formulas like f(X,X) - diff --git a/packages/CLPBN/horus/TinySet.h b/packages/CLPBN/horus/TinySet.h index ed810bcde..f307e4530 100644 --- a/packages/CLPBN/horus/TinySet.h +++ b/packages/CLPBN/horus/TinySet.h @@ -1,9 +1,10 @@ #ifndef HORUS_TINYSET_H #define HORUS_TINYSET_H -#include #include +#include + using namespace std; @@ -21,7 +22,7 @@ class TinySet TinySet (const Compare& cmp = Compare()) : vec_(), cmp_(cmp) { } - TinySet (const T& t, const Compare& cmp = Compare()) + TinySet (const T& t, const Compare& cmp = Compare()) : vec_(1, t), cmp_(cmp) { } TinySet (const vector& elements, const Compare& cmp = Compare()) @@ -153,12 +154,12 @@ class TinySet { return vec_[i]; } - + T& operator[] (typename vector::size_type i) { return vec_[i]; } - + T front (void) const { return vec_.front(); @@ -186,7 +187,7 @@ class TinySet bool empty (void) const { - return size() == 0; + return vec_.empty(); } typename vector::size_type size (void) const @@ -219,13 +220,13 @@ class TinySet return ! (s1.vec_ == s2.vec_); } - friend std::ostream& operator << (std::ostream& out, const TinySet& s) + friend std::ostream& operator<< (std::ostream& out, const TinySet& s) { out << "{" ; typename vector::size_type i; for (i = 0; i < s.size(); i++) { out << ((i != 0) ? "," : "") << s.vec_[i]; - } + } out << "}" ; return out; } diff --git a/packages/CLPBN/horus/Util.cpp b/packages/CLPBN/horus/Util.cpp index d3dbd588d..ca681b9d7 100644 --- a/packages/CLPBN/horus/Util.cpp +++ b/packages/CLPBN/horus/Util.cpp @@ -1,11 +1,9 @@ -#include - -#include #include #include "Util.h" #include "Indexer.h" #include "ElimGraph.h" +#include "BeliefProp.h" namespace Globals { @@ -21,17 +19,6 @@ GroundSolverType groundSolver = GroundSolverType::VE; -namespace BpOptions { -Schedule schedule = BpOptions::Schedule::SEQ_FIXED; -//Schedule schedule = BpOptions::Schedule::SEQ_RANDOM; -//Schedule schedule = BpOptions::Schedule::PARALLEL; -//Schedule schedule = BpOptions::Schedule::MAX_RESIDUAL; -double accuracy = 0.0001; -unsigned maxIter = 1000; -} - - - namespace Util { @@ -53,8 +40,8 @@ stringToUnsigned (string str) ss << str; ss >> val; if (val < 0) { - cerr << "error: the readed number is negative" << endl; - abort(); + cerr << "Error: the number readed is negative." << endl; + exit (EXIT_FAILURE); } return static_cast (val); } @@ -140,7 +127,7 @@ nrDigits (int num) { unsigned count = 1; while (num >= 10) { - num /= 10; + num /= 10; count ++; } return count; @@ -166,7 +153,7 @@ parametersToString (const Params& v, unsigned precision) { stringstream ss; ss.precision (precision); - ss << "[" ; + ss << "[" ; for (size_t i = 0; i < v.size(); i++) { if (i != 0) ss << ", " ; ss << v[i]; @@ -201,88 +188,104 @@ getStateLines (const Vars& vars) +bool invalidValue (string key, string value) +{ + cerr << "Warning: invalid value `" << value << "' " ; + cerr << "for `" << key << "'" ; + cerr << endl; + return false; +} + + + bool setHorusFlag (string key, string value) { bool returnVal = true; - if (key == "verbosity") { + if (key == "lifted_solver") { + if (value == "lve") Globals::liftedSolver = LiftedSolverType::LVE; + else if (value == "lbp") Globals::liftedSolver = LiftedSolverType::LBP; + else if (value == "lkc") Globals::liftedSolver = LiftedSolverType::LKC; + else returnVal = invalidValue (key, value); + + } else if (key == "ground_solver" || key == "solver") { + if (value == "hve") Globals::groundSolver = GroundSolverType::VE; + else if (value == "bp") Globals::groundSolver = GroundSolverType::BP; + else if (value == "cbp") Globals::groundSolver = GroundSolverType::CBP; + else returnVal = invalidValue (key, value); + + } else if (key == "verbosity") { stringstream ss; ss << value; ss >> Globals::verbosity; - } else if (key == "lifted_solver") { - if ( value == "lve") { - Globals::liftedSolver = LiftedSolverType::LVE; - } else if (value == "lbp") { - Globals::liftedSolver = LiftedSolverType::LBP; - } else if (value == "lkc") { - Globals::liftedSolver = LiftedSolverType::LKC; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "ground_solver") { - if ( value == "ve") { - Globals::groundSolver = GroundSolverType::VE; - } else if (value == "bp") { - Globals::groundSolver = GroundSolverType::BP; - } else if (value == "cbp") { - Globals::groundSolver = GroundSolverType::CBP; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "elim_heuristic") { - if ( value == "sequential") { - ElimGraph::elimHeuristic = ElimHeuristic::SEQUENTIAL; - } else if (value == "min_neighbors") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_NEIGHBORS; - } else if (value == "min_weight") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_WEIGHT; - } else if (value == "min_fill") { - ElimGraph::elimHeuristic = ElimHeuristic::MIN_FILL; - } else if (value == "weighted_min_fill") { - ElimGraph::elimHeuristic = ElimHeuristic::WEIGHTED_MIN_FILL; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "schedule") { - if ( value == "seq_fixed") { - BpOptions::schedule = BpOptions::Schedule::SEQ_FIXED; - } else if (value == "seq_random") { - BpOptions::schedule = BpOptions::Schedule::SEQ_RANDOM; - } else if (value == "parallel") { - BpOptions::schedule = BpOptions::Schedule::PARALLEL; - } else if (value == "max_residual") { - BpOptions::schedule = BpOptions::Schedule::MAX_RESIDUAL; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } - } else if (key == "accuracy") { - stringstream ss; - ss << value; - ss >> BpOptions::accuracy; - } else if (key == "max_iter") { - stringstream ss; - ss << value; - ss >> BpOptions::maxIter; + } else if (key == "use_logarithms") { - if ( value == "true") { - Globals::logDomain = true; - } else if (value == "false") { - Globals::logDomain = false; - } else { - cerr << "warning: invalid value `" << value << "' " ; - cerr << "for `" << key << "'" << endl; - returnVal = false; - } + if (value == "true") Globals::logDomain = true; + else if (value == "false") Globals::logDomain = false; + else returnVal = invalidValue (key, value); + + } else if (key == "hve_elim_heuristic") { + if (value == "sequential") + ElimGraph::setElimHeuristic (ElimHeuristic::SEQUENTIAL); + else if (value == "min_neighbors") + ElimGraph::setElimHeuristic (ElimHeuristic::MIN_NEIGHBORS); + else if (value == "min_weight") + ElimGraph::setElimHeuristic (ElimHeuristic::MIN_WEIGHT); + else if (value == "min_fill") + ElimGraph::setElimHeuristic (ElimHeuristic::MIN_FILL); + else if (value == "weighted_min_fill") + ElimGraph::setElimHeuristic (ElimHeuristic::WEIGHTED_MIN_FILL); + else + returnVal = invalidValue (key, value); + + } else if (key == "bp_msg_schedule") { + if (value == "seq_fixed") + BeliefProp::setMsgSchedule (MsgSchedule::SEQ_FIXED); + else if (value == "seq_random") + BeliefProp::setMsgSchedule (MsgSchedule::SEQ_RANDOM); + else if (value == "parallel") + BeliefProp::setMsgSchedule (MsgSchedule::PARALLEL); + else if (value == "max_residual") + BeliefProp::setMsgSchedule (MsgSchedule::MAX_RESIDUAL); + else + returnVal = invalidValue (key, value); + + } else if (key == "bp_accuracy") { + stringstream ss; + double acc; + ss << value; + ss >> acc; + BeliefProp::setAccuracy (acc); + + } else if (key == "bp_max_iter") { + stringstream ss; + unsigned mi; + ss << value; + ss >> mi; + BeliefProp::setMaxIterations (mi); + + } else if (key == "export_libdai") { + if (value == "true") FactorGraph::enableExportToLibDai(); + else if (value == "false") FactorGraph::disableExportToLibDai(); + else returnVal = invalidValue (key, value); + + } else if (key == "export_uai") { + if (value == "true") FactorGraph::enableExportToUai(); + else if (value == "false") FactorGraph::disableExportToUai(); + else returnVal = invalidValue (key, value); + + } else if (key == "export_graphviz") { + if (value == "true") FactorGraph::enableExportToGraphViz(); + else if (value == "false") FactorGraph::disableExportToGraphViz(); + else returnVal = invalidValue (key, value); + + } else if (key == "print_fg") { + if (value == "true") FactorGraph::enablePrintFactorGraph(); + else if (value == "false") FactorGraph::disablePrintFactorGraph(); + else returnVal = invalidValue (key, value); + } else { - cerr << "warning: invalid key `" << key << "'" << endl; + cerr << "Warning: invalid key `" << key << "'" << endl; returnVal = false; } return returnVal; @@ -341,7 +344,7 @@ normalize (Params& v) if (Globals::logDomain) { double sum = std::accumulate (v.begin(), v.end(), LogAware::addIdenty(), Util::logSum); - assert (sum != -numeric_limits::infinity()); + assert (sum != -std::numeric_limits::infinity()); v -= sum; } else { double sum = std::accumulate (v.begin(), v.end(), 0.0); diff --git a/packages/CLPBN/horus/Util.h b/packages/CLPBN/horus/Util.h index 38a088714..f73651013 100644 --- a/packages/CLPBN/horus/Util.h +++ b/packages/CLPBN/horus/Util.h @@ -3,16 +3,17 @@ #include #include -#include #include +#include + #include -#include #include +#include #include -#include #include +#include #include "Horus.h" @@ -20,7 +21,7 @@ using namespace std; namespace { -const double NEG_INF = -numeric_limits::infinity(); +const double NEG_INF = -std::numeric_limits::infinity(); }; @@ -42,7 +43,8 @@ template bool contains ( template size_t indexOf (const vector&, const T&); template -void apply_n_times (Params& v1, const Params& v2, unsigned repetitions, Operation); +void apply_n_times (Params& v1, const Params& v2, + unsigned repetitions, Operation); template void log (vector&); @@ -245,7 +247,7 @@ Util::logSum (double x, double y) inline unsigned Util::maxUnsigned (void) { - return numeric_limits::max(); + return std::numeric_limits::max(); } @@ -373,8 +375,8 @@ void operator^=(std::vector& v, int iexp) -template -std::ostream& operator << (std::ostream& os, const vector& v) +template +std::ostream& operator<< (std::ostream& os, const vector& v) { os << "[" ; os << Util::elementsToString (v, ", "); diff --git a/packages/CLPBN/horus/Var.cpp b/packages/CLPBN/horus/Var.cpp index 44ab6b1e4..99540718a 100644 --- a/packages/CLPBN/horus/Var.cpp +++ b/packages/CLPBN/horus/Var.cpp @@ -1,10 +1,7 @@ -#include #include #include "Var.h" -using namespace std; - unordered_map Var::varsInfo_; @@ -14,7 +11,7 @@ Var::Var (const Var* v) varId_ = v->varId(); range_ = v->range(); evidence_ = v->getEvidence(); - index_ = std::numeric_limits::max(); + index_ = Util::maxUnsigned(); } @@ -26,7 +23,7 @@ Var::Var (VarId varId, unsigned range, int evidence) varId_ = varId; range_ = range; evidence_ = evidence; - index_ = std::numeric_limits::max(); + index_ = Util::maxUnsigned(); } @@ -39,35 +36,11 @@ Var::isValidState (int stateIndex) -bool -Var::isValidState (const string& stateName) -{ - States states = Var::getVarInfo (varId_).states; - return Util::contains (states, stateName); -} - - - void -Var::setEvidence (int ev) +Var::setEvidence (int evidence) { - assert (ev < (int) range_); - evidence_ = ev; -} - - - -void -Var::setEvidence (const string& ev) -{ - States states = Var::getVarInfo (varId_).states; - for (size_t i = 0; i < states.size(); i++) { - if (states[i] == ev) { - evidence_ = i; - return; - } - } - assert (false); + assert (evidence < (int) range_); + evidence_ = evidence; } diff --git a/packages/CLPBN/horus/Var.h b/packages/CLPBN/horus/Var.h index 8ab580c3a..3ae6eeed8 100644 --- a/packages/CLPBN/horus/Var.h +++ b/packages/CLPBN/horus/Var.h @@ -3,8 +3,6 @@ #include -#include - #include "Util.h" #include "Horus.h" @@ -14,7 +12,8 @@ using namespace std; struct VarInfo { - VarInfo (string l, const States& sts) : label(l), states(sts) { } + VarInfo (string l, const States& sts) + : label(l), states(sts) { } string label; States states; }; @@ -55,18 +54,13 @@ class Var bool operator!= (const Var& var) const { - assert (!(varId_ == var.varId() && range_ != var.range())); - return varId_ != var.varId(); + return !(*this == var); } bool isValidState (int); - bool isValidState (const string&); - void setEvidence (int); - void setEvidence (const string&); - string label (void) const; States states (void) const; @@ -86,7 +80,7 @@ class Var static bool varsHaveInfo (void) { - return varsInfo_.size() != 0; + return varsInfo_.empty() == false; } static void clearVarsInfo (void) diff --git a/packages/CLPBN/horus/VarElim.cpp b/packages/CLPBN/horus/VarElim.cpp index b2c4dc4ec..e1b11edf8 100644 --- a/packages/CLPBN/horus/VarElim.cpp +++ b/packages/CLPBN/horus/VarElim.cpp @@ -6,13 +6,6 @@ #include "Util.h" -VarElim::~VarElim (void) -{ - delete factorList_.back(); -} - - - Params VarElim::solveQuery (VarIds queryVids) { @@ -24,14 +17,13 @@ VarElim::solveQuery (VarIds queryVids) } cout << endl; } + totalFactorSize_ = 0; + largestFactorSize_ = 0; factorList_.clear(); - varFactors_.clear(); - elimOrder_.clear(); + varMap_.clear(); createFactorList(); absorveEvidence(); - findEliminationOrder (queryVids); - processFactorList (queryVids); - Params params = factorList_.back()->params(); + Params params = processFactorList (queryVids); if (Globals::logDomain) { Util::exp (params); } @@ -46,13 +38,12 @@ VarElim::printSolverFlags (void) const stringstream ss; ss << "variable elimination [" ; ss << "elim_heuristic=" ; - ElimHeuristic eh = ElimGraph::elimHeuristic; - switch (eh) { - case SEQUENTIAL: ss << "sequential"; break; - case MIN_NEIGHBORS: ss << "min_neighbors"; break; - case MIN_WEIGHT: ss << "min_weight"; break; - case MIN_FILL: ss << "min_fill"; break; - case WEIGHTED_MIN_FILL: ss << "weighted_min_fill"; break; + switch (ElimGraph::elimHeuristic()) { + case ElimHeuristic::SEQUENTIAL: ss << "sequential"; break; + case ElimHeuristic::MIN_NEIGHBORS: ss << "min_neighbors"; break; + case ElimHeuristic::MIN_WEIGHT: ss << "min_weight"; break; + case ElimHeuristic::MIN_FILL: ss << "min_fill"; break; + case ElimHeuristic::WEIGHTED_MIN_FILL: ss << "weighted_min_fill"; break; } ss << ",log_domain=" << Util::toString (Globals::logDomain); ss << "]" ; @@ -68,18 +59,18 @@ VarElim::createFactorList (void) factorList_.reserve (facNodes.size() * 2); for (size_t i = 0; i < facNodes.size(); i++) { factorList_.push_back (new Factor (facNodes[i]->factor())); - const VarNodes& neighs = facNodes[i]->neighbors(); - for (size_t j = 0; j < neighs.size(); j++) { - unordered_map>::iterator it - = varFactors_.find (neighs[j]->varId()); - if (it == varFactors_.end()) { - it = varFactors_.insert (make_pair ( - neighs[j]->varId(), vector())).first; + const VarIds& args = facNodes[i]->factor().arguments(); + for (size_t j = 0; j < args.size(); j++) { + unordered_map>::iterator it; + it = varMap_.find (args[j]); + if (it != varMap_.end()) { + it->second.push_back (i); + } else { + varMap_[args[j]] = { i }; } - it->second.push_back (i); } } -} +} @@ -99,15 +90,15 @@ VarElim::absorveEvidence (void) cout << varNodes[i]->label() << " = " ; cout << varNodes[i]->getEvidence() << endl; } - const vector& idxs = - varFactors_.find (varNodes[i]->varId())->second; - for (size_t j = 0; j < idxs.size(); j++) { - Factor* factor = factorList_[idxs[j]]; - if (factor->nrArguments() == 1) { - factorList_[idxs[j]] = 0; - } else { - factorList_[idxs[j]]->absorveEvidence ( + const vector& indices = varMap_[varNodes[i]->varId()]; + for (size_t j = 0; j < indices.size(); j++) { + size_t idx = indices[j]; + if (factorList_[idx]->nrArguments() > 1) { + factorList_[idx]->absorveEvidence ( varNodes[i]->varId(), varNodes[i]->getEvidence()); + } else { + delete factorList_[idx]; + factorList_[idx] = 0; } } } @@ -116,72 +107,60 @@ VarElim::absorveEvidence (void) -void -VarElim::findEliminationOrder (const VarIds& vids) +Params +VarElim::processFactorList (const VarIds& queryVids) { - elimOrder_ = ElimGraph::getEliminationOrder (factorList_, vids); -} - - - -void -VarElim::processFactorList (const VarIds& vids) -{ - totalFactorSize_ = 0; - largestFactorSize_ = 0; - for (size_t i = 0; i < elimOrder_.size(); i++) { + VarIds elimOrder = ElimGraph::getEliminationOrder ( + factorList_, queryVids); + for (size_t i = 0; i < elimOrder.size(); i++) { if (Globals::verbosity >= 2) { if (Globals::verbosity >= 3) { Util::printDashedLine(); printActiveFactors(); } cout << "-> summing out " ; - cout << fg.getVarNode (elimOrder_[i])->label() << endl; + cout << fg.getVarNode (elimOrder[i])->label() << endl; } - eliminate (elimOrder_[i]); + eliminate (elimOrder[i]); } - Factor* finalFactor = new Factor(); + Factor result; for (size_t i = 0; i < factorList_.size(); i++) { if (factorList_[i]) { - finalFactor->multiply (*factorList_[i]); + result.multiply (*factorList_[i]); delete factorList_[i]; factorList_[i] = 0; } } VarIds unobservedVids; - for (size_t i = 0; i < vids.size(); i++) { - if (fg.getVarNode (vids[i])->hasEvidence() == false) { - unobservedVids.push_back (vids[i]); + for (size_t i = 0; i < queryVids.size(); i++) { + if (fg.getVarNode (queryVids[i])->hasEvidence() == false) { + unobservedVids.push_back (queryVids[i]); } } - finalFactor->reorderArguments (unobservedVids); - finalFactor->normalize(); - factorList_.push_back (finalFactor); + result.reorderArguments (unobservedVids); + result.normalize(); if (Globals::verbosity > 0) { cout << "total factor size: " << totalFactorSize_ << endl; cout << "largest factor size: " << largestFactorSize_ << endl; cout << endl; } + return result.params(); } void -VarElim::eliminate (VarId elimVar) +VarElim::eliminate (VarId vid) { - Factor* result = 0; - vector& idxs = varFactors_.find (elimVar)->second; - for (size_t i = 0; i < idxs.size(); i++) { - size_t idx = idxs[i]; + Factor* result = new Factor(); + const vector& indices = varMap_[vid]; + for (size_t i = 0; i < indices.size(); i++) { + size_t idx = indices[i]; if (factorList_[idx]) { - if (result == 0) { - result = new Factor (*factorList_[idx]); - } else { - result->multiply (*factorList_[idx]); - } + result->multiply (*factorList_[idx]); delete factorList_[idx]; factorList_[idx] = 0; } @@ -190,15 +169,16 @@ VarElim::eliminate (VarId elimVar) if (result->size() > largestFactorSize_) { largestFactorSize_ = result->size(); } - if (result != 0 && result->nrArguments() != 1) { - result->sumOut (elimVar); - factorList_.push_back (result); - const VarIds& resultVarIds = result->arguments(); - for (size_t i = 0; i < resultVarIds.size(); i++) { - vector& idxs = - varFactors_.find (resultVarIds[i])->second; - idxs.push_back (factorList_.size() - 1); + if (result->nrArguments() > 1) { + result->sumOut (vid); + const VarIds& args = result->arguments(); + for (size_t i = 0; i < args.size(); i++) { + vector& indices2 = varMap_[args[i]]; + indices2.push_back (factorList_.size()); } + factorList_.push_back (result); + } else { + delete result; } } @@ -208,9 +188,10 @@ void VarElim::printActiveFactors (void) { for (size_t i = 0; i < factorList_.size(); i++) { - if (factorList_[i] != 0) { + if (factorList_[i]) { cout << factorList_[i]->getLabel() << " " ; - cout << factorList_[i]->params() << endl; + cout << factorList_[i]->params(); + cout << endl; } } } diff --git a/packages/CLPBN/horus/VarElim.h b/packages/CLPBN/horus/VarElim.h index fe1327fc0..da05e51a1 100644 --- a/packages/CLPBN/horus/VarElim.h +++ b/packages/CLPBN/horus/VarElim.h @@ -16,7 +16,7 @@ class VarElim : public GroundSolver public: VarElim (const FactorGraph& fg) : GroundSolver (fg) { } - ~VarElim (void); + ~VarElim (void) { } Params solveQuery (VarIds); @@ -27,19 +27,18 @@ class VarElim : public GroundSolver void absorveEvidence (void); - void findEliminationOrder (const VarIds&); - - void processFactorList (const VarIds&); + Params processFactorList (const VarIds&); void eliminate (VarId); void printActiveFactors (void); - Factors factorList_; - VarIds elimOrder_; - unsigned largestFactorSize_; - unsigned totalFactorSize_; - unordered_map> varFactors_; + Factors factorList_; + unsigned largestFactorSize_; + unsigned totalFactorSize_; + unordered_map> varMap_; + + DISALLOW_COPY_AND_ASSIGN (VarElim); }; #endif // HORUS_VARELIM_H diff --git a/packages/CLPBN/horus/WeightedBp.cpp b/packages/CLPBN/horus/WeightedBp.cpp index d8a32a246..269891f78 100644 --- a/packages/CLPBN/horus/WeightedBp.cpp +++ b/packages/CLPBN/horus/WeightedBp.cpp @@ -18,7 +18,7 @@ WeightedBp::getPosterioriOf (VarId vid) runSolver(); } VarNode* var = fg.getVarNode (vid); - assert (var != 0); + assert (var); Params probs; if (var->hasEvidence()) { probs.resize (var->range(), LogAware::noEvidence()); @@ -107,7 +107,7 @@ WeightedBp::maxResidualSchedule (void) if (Globals::verbosity >= 1) { cout << "updating " << (*sortedOrder_.begin())->toString() << endl; } - if (link->residual() < BpOptions::accuracy) { + if (link->residual() < accuracy_) { return; } link->updateMessage(); @@ -132,7 +132,7 @@ WeightedBp::maxResidualSchedule (void) } } // in counting bp, the message that a variable X sends to - // to a factor F depends on the message that F sent to the X + // to a factor F depends on the message that F sent to the X const BpLinks& links = ninf(link->facNode())->getLinks(); for (size_t i = 0; i < links.size(); i++) { if (links[i]->varNode() != link->varNode()) { @@ -258,7 +258,7 @@ WeightedBp::getVarToFactorMsg (const BpLink* _link) const if ( ! (l->facNode() == dst && l->index() == link->index())) { msg *= l->powMessage(); if (Constants::SHOW_BP_CALCS) { - cout << " x " << l->nextMessage() << "^" << link->weight(); + cout << " x " << l->nextMessage() << "^" << link->weight(); } } } diff --git a/packages/CLPBN/horus/WeightedBp.h b/packages/CLPBN/horus/WeightedBp.h index 844011ab5..9bb40fe95 100644 --- a/packages/CLPBN/horus/WeightedBp.h +++ b/packages/CLPBN/horus/WeightedBp.h @@ -6,7 +6,7 @@ class WeightedLink : public BpLink { public: - WeightedLink (FacNode* fn, VarNode* vn, size_t idx, unsigned weight) + WeightedLink (FacNode* fn, VarNode* vn, size_t idx, unsigned weight) : BpLink (fn, vn), index_(idx), weight_(weight), pwdMsg_(vn->range(), LogAware::one()) { } @@ -16,14 +16,16 @@ class WeightedLink : public BpLink const Params& powMessage (void) const { return pwdMsg_; } - void updateMessage (void) + void updateMessage (void) { pwdMsg_ = *nextMsg_; swap (currMsg_, nextMsg_); LogAware::pow (pwdMsg_, weight_); } - + private: + DISALLOW_COPY_AND_ASSIGN (WeightedLink); + size_t index_; unsigned weight_; Params pwdMsg_; @@ -33,17 +35,16 @@ class WeightedLink : public BpLink class WeightedBp : public BeliefProp { - public: + public: WeightedBp (const FactorGraph& fg, const vector>& weights) : BeliefProp (fg), weights_(weights) { } ~WeightedBp (void); - + Params getPosterioriOf (VarId); private: - void createLinks (void); void maxResidualSchedule (void); @@ -53,8 +54,10 @@ class WeightedBp : public BeliefProp Params getVarToFactorMsg (const BpLink*) const; void printLinkInformation (void) const; - + vector> weights_; + + DISALLOW_COPY_AND_ASSIGN (WeightedBp); }; #endif // HORUS_WEIGHTEDBP_H diff --git a/packages/CLPBN/learning/aleph_params.yap b/packages/CLPBN/learning/aleph_params.yap index 8162c8bf7..5fcf9be9d 100644 --- a/packages/CLPBN/learning/aleph_params.yap +++ b/packages/CLPBN/learning/aleph_params.yap @@ -10,44 +10,47 @@ % but some variables are of special type random. % :- module(clpbn_aleph, - [init_clpbn_cost/0, - random_type/2]). + [init_clpbn_cost/0, + random_type/2 + ]). :- dynamic rt/2, inited/1. :- use_module(library('clpbn'), - [{}/1, - clpbn_flag/2, - clpbn_flag/3, - set_clpbn_flag/2]). + [{}/1, + clpbn_flag/2, + clpbn_flag/3, + set_clpbn_flag/2 + ]). :- use_module(library('clpbn/learning/em')). :- use_module(library('clpbn/matrix_cpt_utils'), - [uniform_CPT_as_list/2]). + [uniform_CPT_as_list/2]). :- use_module(library('clpbn/dists'), - [reset_all_dists/0, - get_dist_key/2, - get_dist_params/2 - ]). + [reset_all_dists/0, + get_dist_key/2, + get_dist_params/2 + ]). :- use_module(library('clpbn/table'), - [clpbn_tabled_abolish/1, - clpbn_tabled_asserta/1, - clpbn_tabled_asserta/2, - clpbn_tabled_assertz/1, - clpbn_tabled_clause/2, - clpbn_tabled_clause_ref/3, - clpbn_tabled_number_of_clauses/2, - clpbn_is_tabled/1, - clpbn_reset_tables/0, - clpbn_tabled_dynamic/1]). + [clpbn_tabled_abolish/1, + clpbn_tabled_asserta/1, + clpbn_tabled_asserta/2, + clpbn_tabled_assertz/1, + clpbn_tabled_clause/2, + clpbn_tabled_clause_ref/3, + clpbn_tabled_number_of_clauses/2, + clpbn_is_tabled/1, + clpbn_reset_tables/0, + clpbn_tabled_dynamic/1 + ]). % % Tell Aleph not to use default solver during saturation % -% all work will be done by EM +% all work will be done by EM %:- set_clpbn_flag(solver,none). % @@ -94,11 +97,11 @@ enable_solver :- add_new_clause(_,(H :- _),_,_) :- ( clpbn_is_tabled(user:H) - -> + -> update_tabled_theory(H) - ; + ; update_theory(H) - ), + ), fail. % step 2: add clause add_new_clause(_,(_ :- true),_,_) :- !. @@ -113,18 +116,18 @@ add_new_clause(_,(H :- B),_,_) :- get_dist_key(Id, K), get_dist_params(Id, CPTList), ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_asserta(user:(H :- IB)) + clpbn_tabled_asserta(user:(H :- IB)) ; - asserta(user:(H :- IB)) + asserta(user:(H :- IB)) ), user:setting(verbosity,V), - ( V >= 1 -> - user:p_message('CLP(BN) Theory'), - functor(H,N,Ar), listing(user:N/Ar) + ( V >= 1 -> + user:p_message('CLP(BN) Theory'), + functor(H,N,Ar), listing(user:N/Ar) ; - true + true ). @@ -135,7 +138,7 @@ update_tabled_theory(H) :- clpbn_tabled_assertz((user:(H:-NB))), fail. update_tabled_theory(_). - + update_theory(H) :- clause(user:H,B,Ref), add_correct_cpt(B,NB), @@ -158,29 +161,29 @@ correct_tab(p(Vs,_,Ps),K,p(Vs,TDist,Ps)) :- get_dist_key(Id, K), get_dist_params(Id, TDist). -% user-defined cost function, Aleph knows about this (and only about this). +% user-defined cost function, Aleph knows about this (and only about this). user:cost((H :- B),Inf,Score) :- domain(H, K, V, D), check_info(Inf), rewrite_body(B, IB, Vs, Ds, ( !, { V = K with p(D, CPTList, Vs) })), uniform_cpt([D|Ds], CPTList), ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_reset_tables, - clpbn_tabled_asserta(user:(H :- IB), R) + clpbn_reset_tables, + clpbn_tabled_asserta(user:(H :- IB), R) ; - asserta(user:(H :- IB), R) + asserta(user:(H :- IB), R) ), ( - cpt_score(Score0) + cpt_score(Score0) -> - erase(R), - Score is -Score0 - ; - % illegal clause, just get out of here. - erase(R), - fail + erase(R), + Score is -Score0 + ; + % illegal clause, just get out of here. + erase(R), + fail ). user:cost(H,_Inf,Score) :- !, init_clpbn_cost(H, Score0), @@ -196,38 +199,38 @@ init_clpbn_cost(H, Score) :- functor(H,N,A), % get rid of Aleph crap ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_abolish(user:N/A), - clpbn_tabled_dynamic(user:N/A) + clpbn_tabled_abolish(user:N/A), + clpbn_tabled_dynamic(user:N/A) ; - abolish(user:N/A), - % make it easy to add and remove clauses. - dynamic(user:N/A) + abolish(user:N/A), + % make it easy to add and remove clauses. + dynamic(user:N/A) ), domain(H, K, V, D), uniform_cpt([D], CPTList), % This will be the default cause, called when the other rules fail. ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_assertz(user:(H :- !, { V = K with p(D, CPTList) })) + clpbn_tabled_assertz(user:(H :- !, { V = K with p(D, CPTList) })) ; - assert(user:(H :- !, { V = K with p(D, CPTList) })) - ), + assert(user:(H :- !, { V = K with p(D, CPTList) })) + ), cpt_score(Score), assert(inited(Score)). -% receives H, and generates a key K, a random variable RV, and a domain D. +% receives H, and generates a key K, a random variable RV, and a domain D. domain(H, K, RV, D) :- functor(H,Name,Arity), functor(Pred,Name,Arity), ( - recorded(aleph,modeh(_,Pred),_) - -> - true + recorded(aleph,modeh(_,Pred),_) + -> + true ; - user:'$aleph_global'(modeh,modeh(_,Pred)) + user:'$aleph_global'(modeh,modeh(_,Pred)) ), arg(Arity,Pred,+RType), rt(RType,D), !, @@ -240,11 +243,11 @@ domain(H, K, V, D) :- key_from_head(H,K,V) :- H =.. [Name|Args], ( - clpbn_is_tabled(user:H) + clpbn_is_tabled(user:H) -> - clpbn_tabled_number_of_clauses(user:H,NClauses) + clpbn_tabled_number_of_clauses(user:H,NClauses) ; - predicate_property(user:H,number_of_clauses(NClauses)) + predicate_property(user:H,number_of_clauses(NClauses)) ), atomic_concat(Name,NClauses,NName), append(H0L,[V],Args), @@ -258,7 +261,7 @@ rewrite_body((A,B), (user:NA,NB), [V|Vs], [D|Ds], Tail) :- rewrite_body(B, NB, Vs, Ds, Tail). rewrite_body((A,B), (user:A,NB), Vs, Ds, Tail) :- !, rewrite_body(B,NB, Vs, Ds, Tail). -rewrite_body(A,(user:NA,Tail), [V], [D], Tail) :- +rewrite_body(A,(user:NA,Tail), [V], [D], Tail) :- rewrite_goal(A, V, D, NA), !. rewrite_body(A, (user:A,Tail), [], [], Tail). @@ -267,11 +270,11 @@ rewrite_goal(A,V,D,NA) :- functor(A,Name,Arity), functor(Pred,Name,Arity), ( - recorded(aleph,modeb(_,Pred),_) - -> - true + recorded(aleph,modeb(_,Pred),_) + -> + true ; - user:'$aleph_global'(modeb,modeb(_,Pred)) + user:'$aleph_global'(modeb,modeb(_,Pred)) ), arg(Arity,Pred,-RType), rt(RType,D), !, @@ -288,7 +291,7 @@ replace_last_var([A|Args],V,[A|NArgs]) :- % This is the key % cpt_score(Lik) :- - findall(user:Ex, user:example(_,pos,Ex), Exs), + findall(user:Ex, user:example(_,pos,Ex), Exs), clpbn_flag(solver, Solver), clpbn_flag(em_solver, EMSolver), set_clpbn_flag(solver, EMSolver), diff --git a/packages/CLPBN/learning/bnt_parms.yap b/packages/CLPBN/learning/bnt_parms.yap index d3e8d9734..55be9d0ce 100644 --- a/packages/CLPBN/learning/bnt_parms.yap +++ b/packages/CLPBN/learning/bnt_parms.yap @@ -8,23 +8,23 @@ :- module(bnt_parameters, [learn_parameters/2]). -:- use_module(library('clpbn'), [ - clpbn_flag/3]). +:- use_module(library('clpbn'), + [clpbn_flag/3]). -:- use_module(library('clpbn/bnt'), [ - create_bnt_graph/2]). +:- use_module(library('clpbn/bnt'), + [create_bnt_graph/2]). -:- use_module(library('clpbn/display'), [ - clpbn_bind_vals/3]). +:- use_module(library('clpbn/display'), + [clpbn_bind_vals/3]). -:- use_module(library('clpbn/dists'), [ - get_dist_domain/2 - ]). +:- use_module(library('clpbn/dists'), + [get_dist_domain/2]). -:- use_module(library(matlab), [matlab_initialized_cells/4, - matlab_call/2, - matlab_get_variable/2 - ]). +:- use_module(library(matlab), + [matlab_initialized_cells/4, + matlab_call/2, + matlab_get_variable/2 + ]). :- dynamic bnt_em_max_iter/1. bnt_em_max_iter(10). @@ -61,7 +61,7 @@ clpbn_vars(Vs,BVars) :- get_clpbn_vars(Vs,CVs), keysort(CVs,KVs), merge_vars(KVs,BVars). - + get_clpbn_vars([],[]). get_clpbn_vars([V|GVars],[K-V|CLPBNGVars]) :- clpbn:get_atts(V, [key(K)]), !, @@ -73,8 +73,8 @@ merge_vars([],[]). merge_vars([K-V|KVs],[V|BVars]) :- get_var_has_same_key(KVs,K,V,KVs0), merge_vars(KVs0,BVars). - -get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !, + +get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !, get_var_has_same_key(KVs,K,V,KVs0). get_var_has_same_key(KVs,_,_,KVs). @@ -84,7 +84,7 @@ mk_sample(AllVars,NVars, LL) :- length(LN,LL), matlab_initialized_cells( NVars, 1, LN, sample). -add2sample([], []). +add2sample([], []). add2sample([V|Vs],[val(VId,1,Val)|Vals]) :- clpbn:get_atts(V, [evidence(Ev),dist(Id,_)]), !, bnt:get_atts(V,[bnt_id(VId)]), @@ -113,9 +113,9 @@ get_parameters([],[]). get_parameters([Rep-v(_,_,_)|Reps],[CPT|CPTs]) :- get_new_table(Rep,CPT), get_parameters(Reps,CPTs). - + get_new_table(Rep,CPT) :- s <-- struct(new_bnet.'CPD'({Rep})), matlab_get_variable( s.'CPT', CPT). - - + + diff --git a/packages/CLPBN/learning/em.yap b/packages/CLPBN/learning/em.yap index 335612442..7ace0b9b3 100644 --- a/packages/CLPBN/learning/em.yap +++ b/packages/CLPBN/learning/em.yap @@ -4,74 +4,78 @@ :- module(clpbn_em, [em/5]). -:- use_module(library(lists), - [append/3, - delete/3]). - :- reexport(library(clpbn), - [ - clpbn_flag/2, - clpbn_flag/3]). + [clpbn_flag/2, + clpbn_flag/3 + ]). :- use_module(library(clpbn), - [clpbn_init_graph/1, - clpbn_init_solver/5, - clpbn_run_solver/4, - pfl_init_solver/6, - pfl_run_solver/4, - clpbn_finalize_solver/1, - conditional_probability/3, - clpbn_flag/2]). + [clpbn_init_graph/1, + clpbn_init_solver/4, + clpbn_run_solver/3, + pfl_init_solver/5, + pfl_run_solver/3, + pfl_end_solver/1, + conditional_probability/3, + clpbn_flag/2 + ]). :- use_module(library('clpbn/dists'), - [get_dist_domain_size/2, - empty_dist/2, - dist_new_table/2, - get_dist_key/2, - randomise_all_dists/0, - uniformise_all_dists/0]). + [get_dist_domain_size/2, + empty_dist/2, + dist_new_table/2, + get_dist_key/2, + randomise_all_dists/0, + uniformise_all_dists/0 + ]). -:- use_module(library(clpbn/ground_factors), - [generate_network/5, - f/3]). +:- use_module(library('clpbn/ground_factors'), + [generate_network/5, + f/3 + ]). -:- use_module(library(bhash), [ - b_hash_new/1, - b_hash_lookup/3, - b_hash_insert/4]). +:- use_module(library('clpbn/utils'), + [check_for_hidden_vars/3, + sort_vars_by_key/3 + ]). :- use_module(library('clpbn/learning/learn_utils'), - [run_all/1, - clpbn_vars/2, - normalise_counts/2, - compute_likelihood/3, - soften_sample/2]). + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + compute_likelihood/3, + soften_sample/2 + ]). + +:- use_module(library(bhash), + [b_hash_new/1, + b_hash_lookup/3, + b_hash_insert/4 + ]). + +:- use_module(library(matrix), + [matrix_add/3, + matrix_to_list/2 + ]). :- use_module(library(lists), - [member/2]). + [member/2]). + +:- use_module(library(rbtrees), + [rb_new/1, + rb_insert/4, + rb_lookup/3 + ]). :- use_module(library(maplist)). -:- use_module(library(matrix), - [matrix_add/3, - matrix_to_list/2]). - -:- use_module(library(rbtrees), - [rb_new/1, - rb_insert/4, - rb_lookup/3]). - -:- use_module(library('clpbn/utils'), - [ - check_for_hidden_vars/3, - sort_vars_by_key/3]). :- meta_predicate em(:,+,+,-,-), init_em(:,-). em(Items, MaxError, MaxIts, Tables, Likelihood) :- catch(init_em(Items, State),Error,handle_em(Error)), em_loop(0, 0.0, State, MaxError, MaxIts, Likelihood, Tables), - clpbn_finalize_solver(State), + end_em(State), assert(em_found(Tables, Likelihood)), fail. % get rid of new random variables the easy way :) @@ -81,9 +85,15 @@ em(_, _, _, Tables, Likelihood) :- handle_em(error(repeated_parents)) :- !, assert(em_found(_, -inf)), - fail. + fail. handle_em(Error) :- - throw(Error). + throw(Error). + + +end_em(state(_AllDists, _AllDistInstances, _MargKeys, SolverState)) :- + clpbn:use_parfactors(on), !, + pfl_end_solver(SolverState). +end_em(_). % This gets you an initial configuration. If there is a lot of evidence % tables may be filled in close to optimal, otherwise they may be @@ -101,17 +111,17 @@ init_em(Items, State) :- % randomise_all_dists, % set initial values for distributions uniformise_all_dists, - setup_em_network(Items, Solver, State). + setup_em_network(Items, State). -setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargKeys, SolverState)) :- +setup_em_network(Items, state(AllDists, AllDistInstances, MargKeys, SolverState)) :- clpbn:use_parfactors(on), !, % get all variables to marginalise run_examples(Items, Keys, Factors, EList), % get the EM CPT connections info from the factors generate_dists(Factors, EList, AllDists, AllDistInstances, MargKeys), % setup solver, if necessary - pfl_init_solver(MargKeys, Keys, Factors, EList, SolverState, Solver). -setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargVars, SolverVars)) :- + pfl_init_solver(MargKeys, Keys, Factors, EList, SolverState). +setup_em_network(Items, state(AllDists, AllDistInstances, MargVars, SolverState)) :- % create the ground network call_run_all(Items), % get all variables to marginalise @@ -121,35 +131,34 @@ setup_em_network(Items, Solver, state( AllDists, AllDistInstances, MargVars, Sol % remove variables that do not have to do with this query. different_dists(AllVars, AllDists, AllDistInstances, MargVars), % setup solver by doing parameter independent work. - clpbn_init_solver(Solver, MargVars, AllVars, _, SolverVars). + clpbn_init_solver(MargVars, AllVars, _, SolverState). run_examples(user:Exs, Keys, Factors, EList) :- - Exs = [_:_|_], !, - findall(ex(EKs, EFs, EEs), run_example(Exs, EKs, EFs, EEs), - VExs), - foldl4(join_example, VExs, [], Keys, [], Factors, [], EList, 0, _). + Exs = [_:_|_], !, + findall(ex(EKs, EFs, EEs), run_example(Exs, EKs, EFs, EEs), VExs), + foldl4(join_example, VExs, [], Keys, [], Factors, [], EList, 0, _). run_examples(Items, Keys, Factors, EList) :- - run_ex(Items, Keys, Factors, EList). + run_ex(Items, Keys, Factors, EList). join_example( ex(EKs, EFs, EEs), Keys0, Keys, Factors0, Factors, EList0, EList, I0, I) :- - I is I0+1, - foldl(process_key(I0), EKs, Keys0, Keys), - foldl(process_factor(I0), EFs, Factors0, Factors), - foldl(process_ev(I0), EEs, EList0, EList). + I is I0+1, + foldl(process_key(I0), EKs, Keys0, Keys), + foldl(process_factor(I0), EFs, Factors0, Factors), + foldl(process_ev(I0), EEs, EList0, EList). process_key(I0, K, Keys0, [I0:K|Keys0]). process_factor(I0, f(Type, Id, Keys), Keys0, [f(Type, Id, NKeys)|Keys0]) :- - maplist(update_key(I0), Keys, NKeys). + maplist(update_key(I0), Keys, NKeys). update_key(I0, K, I0:K). process_ev(I0, K=V, Es0, [(I0:K)=V|Es0]). run_example([_:Items|_], Keys, Factors, EList) :- - run_ex(user:Items, Keys, Factors, EList). + run_ex(user:Items, Keys, Factors, EList). run_example([_|LItems], Keys, Factors, EList) :- - run_example(LItems, Keys, Factors, EList). + run_example(LItems, Keys, Factors, EList). run_ex(Items, Keys, Factors, EList) :- % create the ground network @@ -166,19 +175,19 @@ em_loop(Its, Likelihood0, State, MaxError, MaxIts, LikelihoodF, FTables) :- estimate(State, LPs), maximise(State, Tables, LPs, Likelihood), ltables(Tables, F0Tables), - writeln(iteration:Its:Likelihood:Its:Likelihood0:F0Tables), + %writeln(iteration:Its:Likelihood:Its:Likelihood0:F0Tables), ( - ( - abs((Likelihood - Likelihood0)/Likelihood) < MaxError - ; - Its == MaxIts - ) + ( + abs((Likelihood - Likelihood0)/Likelihood) < MaxError + ; + Its == MaxIts + ) -> - ltables(Tables, FTables), - LikelihoodF = Likelihood + ltables(Tables, FTables), + LikelihoodF = Likelihood ; - Its1 is Its+1, - em_loop(Its1, Likelihood, State, MaxError, MaxIts, LikelihoodF, FTables) + Its1 is Its+1, + em_loop(Its1, Likelihood, State, MaxError, MaxIts, LikelihoodF, FTables) ). ltables([], []). @@ -188,13 +197,13 @@ ltables([Id-T|Tables], [Key-LTable|FTables]) :- ltables(Tables, FTables). -generate_dists(Factors, EList, AllDists, AllInfo, MargVars) :- - b_hash_new(Ev0), - foldl(elist_to_hash, EList, Ev0, Ev), - maplist(process_factor(Ev), Factors, Dists0), - sort(Dists0, Dists1), - group(Dists1, AllDists, AllInfo, MargVars0, []), - sort(MargVars0, MargVars). +generate_dists(Factors, EList, AllDists, AllInfo, MargVars) :- + b_hash_new(Ev0), + foldl(elist_to_hash, EList, Ev0, Ev), + maplist(process_factor(Ev), Factors, Dists0), + sort(Dists0, Dists1), + group(Dists1, AllDists, AllInfo, MargVars0, []), + sort(MargVars0, MargVars). elist_to_hash(K=V, Ev0, Ev) :- b_hash_insert(Ev0, K, V, Ev). @@ -211,7 +220,7 @@ fetch_evidence(_Ev, K, Ns, NonEvs, [K|NonEvs]) :- domain_to_number(_, I0, I0, I) :- I is I0+1. - + % collect the different dists we are going to learn next. different_dists(AllVars, AllDists, AllInfo, MargVars) :- @@ -223,24 +232,24 @@ different_dists(AllVars, AllDists, AllInfo, MargVars) :- % % V -> to Id defining V. We get: % the random variables that are parents -% the cases that can happen, eg if we have A <- B, C +% the cases that can happen, eg if we have A <- B, C % A and B are boolean w/o evidence, and C is f, the cases could be -% [0,0,1], [0,1,1], [1,0,0], [1,1,0], +% [0,0,1], [0,1,1], [1,0,0], [1,1,0], % Hiddens will be C % all_dists([], _, []). all_dists([V|AllVars], AllVars0, [i(Id, [V|Parents], Cases, Hiddens)|Dists]) :- % V is an instance of Id clpbn:get_atts(V, [dist(Id,Parents)]), - sort([V|Parents], Sorted), + sort([V|Parents], Sorted), length(Sorted, LengSorted), - length(Parents, LengParents), + length(Parents, LengParents), ( - LengParents+1 =:= LengSorted - -> - true + LengParents+1 =:= LengSorted + -> + true ; - throw(error(repeated_parents)) + throw(error(repeated_parents)) ), generate_hidden_cases([V|Parents], CompactCases, Hiddens), uncompact_cases(CompactCases, Cases), @@ -297,11 +306,9 @@ compact_mvars([X|MargVars], [X|CMVars]) :- !, estimate(state(_, _, Margs, SolverState), LPs) :- clpbn:use_parfactors(on), !, - clpbn_flag(em_solver, Solver), - pfl_run_solver(Margs, LPs, SolverState, Solver). + pfl_run_solver(Margs, LPs, SolverState). estimate(state(_, _, Margs, SolverState), LPs) :- - clpbn_flag(em_solver, Solver), - clpbn_run_solver(Solver, Margs, LPs, SolverState). + clpbn_run_solver(Margs, LPs, SolverState). maximise(state(_,DistInstances,MargVars,_), Tables, LPs, Likelihood) :- rb_new(MDistTable0), @@ -312,7 +319,7 @@ create_mdist_table(Vs, Ps, MDistTable0, MDistTable) :- rb_insert(MDistTable0, Vs, Ps, MDistTable). compute_parameters([], [], _, Lik, Lik, _). -compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, Lik, LPs:MargVars) :- +compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, Lik, LPs:MargVars) :- empty_dist(Id, Table0), add_samples(Samples, Table0, MDistTable), %matrix_to_list(Table0,Mat), lists:sumlist(Mat, Sum), format(user_error, 'FINAL ~d ~w ~w~n', [Id,Sum,Mat]), @@ -322,7 +329,7 @@ compute_parameters([Id-Samples|Dists], [Id-NewTable|Tables], MDistTable, Lik0, compute_likelihood(Table0, NewTable, DeltaLik), dist_new_table(Id, NewTable), NewLik is Lik0+DeltaLik, - compute_parameters(Dists, Tables, MDistTable, NewLik, Lik, LPs:MargVars). + compute_parameters(Dists, Tables, MDistTable, NewLik, Lik, LPs:MargVars). add_samples([], _, _). add_samples([i(_,_,[Case],[])|Samples], Table, MDistTable) :- !, diff --git a/packages/CLPBN/learning/learn_utils.yap b/packages/CLPBN/learning/learn_utils.yap index fee6cf5df..463b9030a 100644 --- a/packages/CLPBN/learning/learn_utils.yap +++ b/packages/CLPBN/learning/learn_utils.yap @@ -2,29 +2,31 @@ % Utilities for learning % -:- module(clpbn_learn_utils, [run_all/1, - clpbn_vars/2, - normalise_counts/2, - compute_likelihood/3, - soften_sample/2, - soften_sample/3]). +:- module(clpbn_learn_utils, + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + compute_likelihood/3, + soften_sample/2, + soften_sample/3 + ]). :- use_module(library(clpbn), - [clpbn_flag/2]). + [clpbn_flag/2]). :- use_module(library('clpbn/table'), - [clpbn_reset_tables/0]). + [clpbn_reset_tables/0]). :- use_module(library(matrix), - [matrix_agg_lines/3, - matrix_op_to_lines/4, - matrix_agg_cols/3, - matrix_op_to_cols/4, - matrix_to_logs/2, - matrix_op/4, - matrix_sum/2, - matrix_to_list/2, - matrix_op_to_all/4]). + [matrix_agg_lines/3, + matrix_op_to_lines/4, + matrix_agg_cols/3, + matrix_op_to_cols/4, + matrix_to_logs/2, + matrix_op/4, + matrix_sum/2, + matrix_to_list/2, + matrix_op_to_all/4]). :- meta_predicate run_all(:). @@ -47,7 +49,7 @@ clpbn_vars(Vs,BVars) :- get_clpbn_vars(Vs,CVs), keysort(CVs,KVs), merge_vars(KVs,BVars). - + get_clpbn_vars([],[]). get_clpbn_vars([V|GVars],[K-V|CLPBNGVars]) :- clpbn:get_atts(V, [key(K)]), !, @@ -59,7 +61,7 @@ merge_vars([],[]). merge_vars([K-V|KVs],[V|BVars]) :- get_var_has_same_key(KVs,K,V,KVs0), merge_vars(KVs0,BVars). - + get_var_has_same_key([K-V|KVs],K,V,KVs0) :- !, get_var_has_same_key(KVs,K,V,KVs0). get_var_has_same_key(KVs,_,_,KVs). diff --git a/packages/CLPBN/learning/mle.yap b/packages/CLPBN/learning/mle.yap index ce6cd0132..4456db86e 100644 --- a/packages/CLPBN/learning/mle.yap +++ b/packages/CLPBN/learning/mle.yap @@ -5,25 +5,29 @@ % This assumes we have a single big example. % -:- module(clpbn_mle, [learn_parameters/2, - learn_parameters/3, - parameters_from_evidence/3]). +:- module(clpbn_mle, + [learn_parameters/2, + learn_parameters/3, + parameters_from_evidence/3 + ]). :- use_module(library('clpbn')). - + :- use_module(library('clpbn/learning/learn_utils'), - [run_all/1, - clpbn_vars/2, - normalise_counts/2, - soften_table/2, - normalise_counts/2]). + [run_all/1, + clpbn_vars/2, + normalise_counts/2, + soften_table/2, + normalise_counts/2 + ]). :- use_module(library('clpbn/dists'), - [empty_dist/2, - dist_new_table/2]). + [empty_dist/2, + dist_new_table/2 + ]). :- use_module(library(matrix), - [matrix_inc/2]). + [matrix_inc/2]). learn_parameters(Items, Tables) :- @@ -53,7 +57,7 @@ mk_sample(AllVars, SortedSample) :- msort(Sample, SortedSample). % -% assumes we have full data, meaning evidence for every variable +% assumes we have full data, meaning evidence for every variable % add2sample([], []). add2sample([V|Vs],[val(Id,[Ev|EParents])|Vals]) :- diff --git a/packages/CLPBN/pfl.tex b/packages/CLPBN/pfl.tex new file mode 100644 index 000000000..52ac251ab --- /dev/null +++ b/packages/CLPBN/pfl.tex @@ -0,0 +1,395 @@ +\documentclass{article} + +\usepackage{hyperref} +\usepackage{setspace} +\usepackage{fancyvrb} +\usepackage{tikz} +\usetikzlibrary{arrows,shapes,positioning} + +\begin{document} + +\DefineVerbatimEnvironment{pflcodeve}{Verbatim} {xleftmargin=3.0em,fontsize=\small} + +\newenvironment{pflcode} + {\VerbatimEnvironment \setstretch{0.8} \begin{pflcodeve}} + {\end{pflcodeve} } + +\newcommand{\true} {\mathtt{t}} +\newcommand{\false} {\mathtt{f}} +\newcommand{\pathsep} { $\triangleright$ } +\newcommand{\tableline} {\noalign{\hrule height 0.8pt}} + +\tikzstyle{nodestyle} = [draw, thick, circle, minimum size=0.9cm] +\tikzstyle{bnedgestyle} = [-triangle 45,thick] + +\setlength{\parskip}{\baselineskip} + +\title{\Huge\textbf{Prolog Factor Language (PFL) Manual}} + +\author{Tiago Gomes\\\texttt{tiago.avv@gmail.com} \and V\'{i}tor Santos Costa\\\texttt{vsc@fc.up.pt}\\\\ +CRACS \& INESC TEC, Faculty of Sciences, University of Porto +} + + +\date{} + +\maketitle +\thispagestyle{empty} +\vspace{5cm} +\begin{center} + \large Last revision: January 8, 2013 +\end{center} +\newpage + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Introduction} +The Prolog Factor Language (PFL) is a language that extends Prolog for providing a syntax to describe first-order probabilistic graphical models. These models can be either directed (bayesian networks) or undirected (markov networks). This language replaces the old one known as CLP($\mathcal{BN}$). + +The package also includes implementations for a set of well-known inference algorithms for solving probabilistic queries on these models. Both ground and lifted inference methods are support. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Installation} +PFL is included with the \href{http://www.dcc.fc.up.pt/~vsc/Yap/}{YAP} Prolog system. However, there isn't yet a stable release of YAP that includes PFL. So it is required to install a development version of YAP. To to this, you will need to have installed the Git version control system. The commands to do a default installation of YAP in the user's home in a Unix-based environment are shown next. + +\begin{enumerate} + \setlength\itemindent{-0.01cm} + \item \texttt{\$ cd \$HOME} + \item \texttt{\$ git clone git://yap.git.sourceforge.net/gitroot/yap/yap-6.3} + \item \texttt{\$ cd yap-6.3/} + \item \texttt{\$ ./configure --enable-clpbn-bp --prefix=\$HOME} + \item \texttt{\$ make depend \& make install} +\end{enumerate} + +In case you want to install YAP somewhere else or with different settings, please consult the YAP documentation. From now on, we will assume that the directory \texttt{\$HOME\pathsep bin} (where the binary can be found) is in your \texttt{\$PATH} environment variable. + +\label{examples-directory} +Once in a while, we will refer to the PFL examples directory. In a default installation, this directory will be located at \texttt{\$HOME\pathsep share\pathsep doc\pathsep Yap\pathsep packages\pathsep examples\pathsep CLPBN}. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Language} +A first-order probabilistic graphical model is described using parametric factors, or just parfactors. The PFL syntax for a parfactor is + +$$Type~~F~~;~~Phi~~;~~C.$$ + +, where +\begin{itemize} +\item $Type$ refers the type of network over which the parfactor is defined. It can be \texttt{bayes} for directed networks, or \texttt{markov} for undirected ones. + +\item $F$ is a comma-separated sequence of Prolog terms that will define sets of random variables under the constraint $C$. If $Type$ is \texttt{bayes}, the first term defines the node while the others defines its parents. + +\item $Phi$ is either a Prolog list of potential values or a Prolog goal that unifies with one. If $Type$ is \texttt{bayes}, this will correspond to the conditional probability table. Domain combinations are implicitly assumed in ascending order, with the first term being the 'most significant' (e.g. $\mathtt{x_0y_0}$, $\mathtt{x_0y_1}$, $\mathtt{x_0y_2}$, $\mathtt{x_1y_0}$, $\mathtt{x_1y_1}$, $\mathtt{x_1y_2}$). + +\item $C$ is a (possibly empty) list of Prolog goals that will instantiate the logical variables that appear in $F$, that is, the successful substitutions for the goals in $C$ will be the valid values for the logical variables. This allows the constraint to be any relation (set of tuples) over the logical variables. +\end{itemize} + + +\begin{figure}[t!] +\begin{center} +\begin{tikzpicture}[>=latex',line join=bevel,transform shape,scale=0.8] + +\node (cloudy) at (50bp, 122bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Cloudy$}; +\node (sprinker) at ( 0bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Sprinker$}; +\node (rain) at (100bp, 66bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$Rain$}; +\node (wetgrass) at (50bp, 10bp) [nodestyle,ellipse,inner sep=0pt,minimum width=2.7cm] {$WetGrass$}; +\draw [bnedgestyle] (cloudy) -- (sprinker); +\draw [bnedgestyle] (cloudy) -- (rain); +\draw [bnedgestyle] (sprinker) -- (wetgrass); +\draw [bnedgestyle] (rain) -- (wetgrass); + +\node [above=0.4cm of cloudy,inner sep=0pt] { +\begin{tabular}[b]{lc} + $C$ & $P(C)$ \\ \tableline + $\true$ & 0.5 \\ + $\false$ & 0.5 \\ +\end{tabular} +}; + +\node [left=0.4cm of sprinker,inner sep=0pt] { +\begin{tabular}{lcc} + $S$ & $C$ & $P(S|C)$ \\ \tableline + $\true$ & $\true$ & 0.1 \\ + $\true$ & $\false$ & 0.5 \\ + $\false$ & $\true$ & 0.9 \\ + $\false$ & $\false$ & 0.5 \\ +\end{tabular} +}; + +\node [right=0.4cm of rain,inner sep=0pt] { +\begin{tabular}{llc} + $R$ & $C$ & $P(R|C)$ \\ \tableline + $\true$ & $\true$ & 0.8 \\ + $\true$ & $\false$ & 0.2 \\ + $\false$ & $\true$ & 0.2 \\ + $\false$ & $\false$ & 0.8 \\ +\end{tabular} +}; + +\node [below=0.4cm of wetgrass,inner sep=0pt] { +\begin{tabular}{llll} + $W$ & $S$ & $R$ & $P(W|S,R)$ \\ \tableline + $\true$ & $\true$ & $\true$ & \hspace{1em} 0.99 \\ + $\true$ & $\true$ & $\false$ & \hspace{1em} 0.9 \\ + $\true$ & $\false$ & $\true$ & \hspace{1em} 0.9 \\ + $\true$ & $\false$ & $\false$ & \hspace{1em} 0.0 \\ + $\false$ & $\true$ & $\true$ & \hspace{1em} 0.01 \\ + $\false$ & $\true$ & $\false$ & \hspace{1em} 0.1 \\ + $\false$ & $\false$ & $\true$ & \hspace{1em} 0.1 \\ + $\false$ & $\false$ & $\false$ & \hspace{1em} 1.0 \\ +\end{tabular} +}; + +\end{tikzpicture} +\caption{The sprinkler network.} +\label{fig:sprinkler-bn} +\end{center} +\end{figure} + +Towards a better understanding of the language, next we show the PFL representation for the network found in Figure~\ref{fig:sprinkler-bn}. + +\begin{pflcode} +:- use_module(library(pfl)). + +bayes cloudy ; cloudy_table ; []. + +bayes sprinkler, cloudy ; sprinkler_table ; []. + +bayes rain, cloudy ; rain_table ; []. + +bayes wet_grass, sprinkler, rain ; wet_grass_table ; []. + +cloudy_table( + [ 0.5, + 0.5 ]). + +sprinkler_table( + [ 0.1, 0.5, + 0.9, 0.5 ]). + +rain_table( + [ 0.8, 0.2, + 0.2, 0.8 ]). + +wet_grass_table( + [ 0.99, 0.9, 0.9, 0.0, + 0.01, 0.1, 0.1, 1.0 ]). +\end{pflcode} + +Note that this network is fully grounded, as the constraints are all empty. Next we present the PFL representation for a well-known markov logic network - the social network model. The weighted formulas of this model are shown below. + +\begin{pflcode} +1.5 : Smokes(x) => Cancer(x) +1.1 : Smokes(x) ^ Friends(x,y) => Smokes(y) +\end{pflcode} + +We can represent this model using PFL with the following code. + +\begin{pflcode} +:- use_module(library(pfl)). + +person(anna). +person(bob). + +markov smokes(X), cancer(X) ; + [4.482, 4.482, 1.0, 4.482] ; + [person(X)]. + +markov friends(X,Y), smokes(X), smokes(Y) ; + [3.004, 3.004, 3.004, 3.004, 3.004, 1.0, 1.0, 3.004] ; + [person(X), person(Y)]. +\end{pflcode} +%markov smokes(X) ; [1.0, 4.055]; [person(X)]. +%markov cancer(X) ; [1.0, 9.974]; [person(X)]. +%markov friends(X,Y) ; [1.0, 99.484] ; [person(X), person(Y)]. + +Notice that we defined the world to be consisted of two persons, \texttt{anne} and \texttt{bob}. We can easily add as many persons as we want by inserting in the program a fact like \texttt{person @ 10.}~. This would create ten persons named \texttt{p1}, \texttt{p2}, \dots, \texttt{p10}. + +Unlike other fist-order probabilistic languages, in PFL the logical variables that appear in the terms are not directly typed, and they will be only constrained by the goals that appear in the constraint of the parfactor. This allows the logical variables to be constrained by any relation (set of tuples), and not by pairwise (in)equalities. For instance, the next example defines a ground network with three factors, each over the random variables \texttt{p(a,b)}, \texttt{p(b,d)} and \texttt{p(d,e)}. + +\begin{pflcode} +constraint(a,b). +constraint(b,d). +constraint(d,e). + +markov p(A,B); some_table; [constraint(A,B)]. +\end{pflcode} + +We can easily add static evidence to PFL programs by inserting a fact with the same functor and arguments as the random variable, plus one extra argument with the observed state or value. For instance, suppose that we now that \texttt{anna} and \texttt{bob} are friends. We can add this knowledge to the program with the following fact: \texttt{friends(anna,bob,t).}~. + +One last note for the domain of the random variables. By default all terms will generate boolean (\texttt{t}/\texttt{f}) random variables. It is possible to chose a different domain by appending a list of the possible values or states to the term. Next we present a self-explanatory example of how this can be done. + +\begin{pflcode} +bayes professor_ability::[high, medium, low] ; [0.5, 0.4, 0.1]. +\end{pflcode} + +More probabilistic models defined using PFL can be found in the examples directory. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Querying} +In this section we demonstrate how to use PFL to solve probabilistic queries. We will use the sprinkler network as an example. + +Assuming that the current directory is the one where the examples are located, first we load the model as follows. + +\texttt{\$ yap -l sprinker.pfl} + +Let's suppose that we want to estimate the marginal probability for the $WetGrass$ random variable. We can do it calling the following goal: + +\texttt{?- wet\_grass(X).} + +The output of the goal will show the marginal probability for each $WetGrass$ possible state or value, that is, \texttt{t} and \texttt{f}. Notice that in PFL a random variable is identified by a term with the same functor and arguments plus one extra argument. + +Now let's suppose that we want to estimate the probability for the same random variable, but this time we have evidence that it had rained the day before. We can estimate this probability without resorting to static evidence with: + +\texttt{?- wet\_grass(X), rain(t).} + +PFL also supports calculating joint probability distributions. For instance, we can obtain the joint probability for $Sprinkler$ and $Rain$ with: + +\texttt{?- sprinkler(X), rain(Y).} + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Parameter Learning} +PFL is capable to learn the parameters for bayesian networks, through an implementation of the expectation-maximization algorithm. + +Inside the \texttt{learning} directory from the examples directory, one can find some examples of how learning works in PFL. + +We can define the solver that will be used for the inference part during parameter learning with the \texttt{set\_em\_solver/1} predicate (defaults to \texttt{hve}). At the moment, only the following solvers support parameter learning: \texttt{ve}, \texttt{hve}, \texttt{bdd}, \texttt{bp} and \texttt{cbp}. + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Inference Options} +PFL supports both ground and lifted inference methods. The inference algorithm can be chosen by calling \texttt{set\_solver/1}. The following are supported: +\begin{itemize} + \item \texttt{ve}, variable elimination (written in Prolog) + \item \texttt{hve}, variable elimination (written in C++) + \item \texttt{jt}, junction tree + \item \texttt{bdd}, binary decision diagrams + \item \texttt{bp}, belief propagation + \item \texttt{cbp}, counting belief propagation + \item \texttt{gibbs}, gibbs sampling + \item \texttt{lve}, generalized counting first-order variable elimination (GC-FOVE) + \item \texttt{lkc}, lifted first-order knowledge compilation + \item \texttt{lbp}, lifted first-order belief propagation +\end{itemize} + +For instance, if we want to use belief propagation to solve some probabilistic query, we need to call first: + +\texttt{?- set\_solver(bp).} + +It is possible to tweak some parameters of PFL through \texttt{set\_horus\_flag/2} predicate. The first argument is a key that identifies the parameter that we desire to tweak, while the second is some possible value for this key. + +The \texttt{verbosity} key controls the level of debugging information that will be printed. Its possible values are positive integers. The higher the number, the more information that will be shown. For example, to view some basic debugging information we call: + +\texttt{?- set\_horus\_flag(verbosity, 1).} + +This key defaults to 0 (no debugging information) and only \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers have support for this key. + +The \texttt{use\_logarithms} key controls whether the calculations performed during inference should be done in a logarithm domain or not. Its values can be \texttt{true} or \texttt{false}. By default is \texttt{true} and only affects \texttt{hve}, \texttt{bp}, \texttt{cbp}, \texttt{lve}, \texttt{lkc} and \texttt{lbp} solvers. The remaining solvers always do their calculations in a logarithm domain. + +There are keys specific only to some algorithms. The key \texttt{elim\_heuristic} key allows to chose which elimination heuristic will be used by the \texttt{hve} solver (but not \texttt{ve}). The following are supported: +\begin{itemize} + \item \texttt{sequential} + \item \texttt{min\_neighbors} + \item \texttt{min\_weight} + \item \texttt{min\_fill} + \item \texttt{weighted\_min\_fill} +\end{itemize} + +It defaults to \texttt{weighted\_min\_fill}. An explanation of each of these heuristics can be found in Daphne Koller's book \textit{Probabilistic Graphical Models}. + +The \texttt{bp\_msg\_schedule}, \texttt{bp\_accuracy} and \texttt{bp\_max\_iter} keys are specific for message passing based algorithms, namely \texttt{bp}, \texttt{cbp} and \texttt{lbp}. + +The \texttt{bp\_max\_iter} key establishes a maximum number of iterations. One iteration consists in sending all possible messages. It defaults to 1000. + +The \texttt{bp\_accuracy} key indicates when the message passing should cease. Be the residual of one message the difference (according some metric) between the one sent in the current iteration and the one sent in the previous. If the highest residual is lesser than the given value, the message passing is stopped and the probabilities are calculated using the last messages that were sent. This key defaults to 0.0001. + +The key \texttt{bp\_msg\_schedule} controls the message sending order. Its possible values are: +\begin{itemize} + \item \texttt{seq\_fixed}, at each iteration, all messages are sent with the same order. + + \item \texttt{seq\_random}, at each iteration, all messages are sent with a random order. + + \item \texttt{parallel}, at each iteration, all messages are calculated using only the values of the previous iteration. + + \item \texttt{max\_residual}, the next message to be sent is the one with maximum residual (as explained in the paper \textit{Residual Belief Propagation: Informed Scheduling for Asynchronous Message Passing}). +\end{itemize} +It defaults to \texttt{seq\_fixed}. + +The \texttt{export\_libdai} and \texttt{export\_uai} keys can be used to export the current model respectively to \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI}, and \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08} formats. With the \texttt{export\_graphviz} key it is possible to save the factor graph into a format that can be read by \href{http://www.graphviz.org/}{Graphviz}. The \texttt{print\_fg} key allows to print all factors before perform inference. All these four keys accept \texttt{true} and \texttt{false} as their values and only produce effect in \texttt{hve}, \texttt{bp}, and \texttt{cbp} solvers. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Horus Command Line} +This package also includes an external interface to YAP for perform inference over probabilistic graphical models described in formats other than PFL. Currently two are support, the \href{http://cs.ru.nl/~jorism/libDAI/doc/fileformats.html}{libDAI file format}, and the \href{http://graphmod.ics.uci.edu/uai08/FileFormat}{UAI08 file format}. + +This utility is called \texttt{hcli} and its usage is as follows. + +\begin{verbatim} + $ ./hcli [solver=hve|bp|cbp] [=]... + [|=]... +\end{verbatim} + +Let's assume that the current directory is the one where the examples are located. We can perform inference in any supported model by passing the file name where the model is defined as argument. Next, we show how to load a model using the \texttt{hcli} utility. + +\texttt{\$ ./hcli burglary-alarm.uai} + +With the above command, the program will load the model and print the marginal probabilities for all defined random variables. We can view only the marginal probability for some variable with a identifier $X$, if we pass $X$ as an extra argument following the file name. For instance, the following command will output only the marginal probability for the variable with identifier $0$. + +\texttt{\$ ./hcli burglary-alarm.uai 0} + +If we give more than one variable identifier as argument, the program will output the joint probability for all variables given. + +Evidence can be given as a pair containing a variable identifier and its observed state (index), separated by a '=`. For instance, we can introduce knowledge that some variable with identifier $0$ has evidence on its second state as follows. + +\texttt{\$ ./hcli burglary-alarm.uai 0=1} + +By default, all probability tasks are resolved using the \texttt{hve} solver. It is possible to choose another solver using the \texttt{solver} key as follows. + +\texttt{\$ ./hcli solver=bp burglary-alarm.uai} + +Notice that only the \texttt{hve}, \texttt{bp} and \texttt{cbp} solvers can be used with \texttt{hcli}. + +The options that are available with the \texttt{set\_horus\_flag/2} predicate can be used in \texttt{hcli} too. The syntax to use are pairs \texttt{=} before the model's file name. + + + +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +%------------------------------------------------------------------------------ +\section{Further Information} +Please check the paper \textit{Evaluating Inference Algorithms for the Prolog Factor Language} for further information. + +Any question don't hesitate to contact us! + +\end{document} diff --git a/packages/CLPBN/pfl.yap b/packages/CLPBN/pfl.yap index 3da42d0c7..b36c74950 100644 --- a/packages/CLPBN/pfl.yap +++ b/packages/CLPBN/pfl.yap @@ -1,54 +1,58 @@ % -% This module defines PFL, the prolog factor language. +% This module defines PFL, the Prolog Factor Language. % % -:- module(pfl, [ - op(550,yfx,@), - op(550,yfx,::), - op(1150,fx,bayes), - op(1150,fx,markov), - factor/6, - skolem/2, - defined_in_factor/2, - get_pfl_cpt/5, % given id and keys, return new keys and cpt - get_pfl_parameters/2, % given id return par factor parameter - new_pfl_parameters/2, % given id set new parameters - get_first_pvariable/2, % given id get firt pvar (useful in bayesian) - get_factor_pvariable/2, % given id get any pvar - add_ground_factor/5 %add a new bayesian variable (for now) +:- module(pfl, + [op(550,yfx,@), + op(550,yfx,::), + op(1150,fx,bayes), + op(1150,fx,markov), + factor/6, + skolem/2, + defined_in_factor/2, + get_pfl_cpt/5, % given id and keys, return new keys and cpt + get_pfl_parameters/2, % given id return par factor parameter + new_pfl_parameters/2, % given id set new parameters + get_first_pvariable/2, % given id get firt pvar (useful in bayesian) + get_factor_pvariable/2, % given id get any pvar + add_ground_factor/5 %add a new bayesian variable (for now) ]). :- reexport(library(clpbn), - [clpbn_flag/2 as pfl_flag, - set_clpbn_flag/2 as set_pfl_flag, - conditional_probability/3, - pfl_init_solver/6, - pfl_run_solver/4]). - -:- reexport(library(clpbn/horus), - [set_solver/1]). + [clpbn_flag/2 as pfl_flag, + set_clpbn_flag/2 as set_pfl_flag, + set_solver/1, + set_em_solver/1, + conditional_probability/3, + pfl_init_solver/5, + pfl_run_solver/3 + ]). :- reexport(library(clpbn/aggregates), - [avg_factors/5]). + [avg_factors/5]). +:- reexport('clpbn/horus', + [set_horus_flag/2]). :- ( % if clp(bn) has done loading, we're top-level predicate_property(set_pfl_flag(_,_), imported_from(clpbn)) - -> + -> % we're using factor language % set appropriate flag set_pfl_flag(use_factors,on) - ; + ; % we're within clp(bn), no need to do anything true - ). + ). +:- use_module(library(atts)). :- use_module(library(lists), - [nth0/3, - append/3, - member/2]). + [nth0/3, + append/3, + member/2 + ]). :- dynamic factor/6, skolem_in/2, skolem/2, preprocess/3, evidence/2, id/1. @@ -131,19 +135,19 @@ process_args(Arg1, Id, I0, I ) --> process_arg(Sk::D, Id, _I) --> !, { - new_skolem(Sk,D), - assert(skolem_in(Sk, Id)) - }, + new_skolem(Sk,D), + assert(skolem_in(Sk, Id)) + }, [Sk]. process_arg(Sk, Id, _I) --> !, { - % if :: been used before for this skolem - % just keep on using it, - % otherwise, assume it is t,f - ( \+ \+ skolem(Sk,_D) -> true ; new_skolem(Sk,[t,f]) ), - assert(skolem_in(Sk, Id)) - }, + % if :: been used before for this skolem + % just keep on using it, + % otherwise, assume it is t,f + ( \+ \+ skolem(Sk,_D) -> true ; new_skolem(Sk,[t,f]) ), + assert(skolem_in(Sk, Id)) + }, [Sk]. new_skolem(Sk,D) :- @@ -163,11 +167,10 @@ interface_predicate(Sk) :- assert(preprocess(ESk, Sk, Var)), % transform from PFL to CLP(BN) call assert_static((user:ESk :- - evidence(Sk,Ev) -> Ev = Var; - var(Var) -> insert_atts(Var,Sk) ; - add_evidence(Sk,Var) - ) - ). + evidence(Sk,Ev) -> Ev = Var; + var(Var) -> insert_atts(Var,Sk) ; + add_evidence(Sk,Var) + )). insert_atts(Var,Sk) :- clpbn:put_atts(Var,[key(Sk)]). @@ -178,13 +181,13 @@ add_evidence(Sk,Var) :- clpbn:put_atts(_V,[key(Sk),evidence(E)]). -%% get_pfl_cpt(Id, Keys, Ev, NewKeys, Out) :- +%% get_pfl_cpt(Id, Keys, Ev, NewKeys, Out) :- %% factor(_Type,Id,[Key|_],_FV,avg,_Constraints), !, %% Keys = [Key|Parents], %% writeln(Key:Parents), %% avg_factors(Key, Parents, 0.0, Ev, NewKeys, Out). get_pfl_cpt(Id, Keys, _, Keys, Out) :- - get_pfl_parameters(Id,Out). + get_pfl_parameters(Id,Out). get_pfl_parameters(Id,Out) :- factor(_Type,Id,_FList,_FV,Phi,_Constraints), @@ -206,7 +209,7 @@ get_sizes(Key.FList, Sz.DSizes) :- skolem(Key, Domain), length(Domain, Sz), get_sizes(FList, DSizes). - + % only makes sense for bayesian networks get_first_pvariable(Id,Var) :- factor(_Type, Id,Var._FList,_FV,_Phi,_Constraints). diff --git a/packages/prism/src/prolog/up/expl.pl b/packages/prism/src/prolog/up/expl.pl index 7c054c1dc..79cbd7b7c 100644 --- a/packages/prism/src/prolog/up/expl.pl +++ b/packages/prism/src/prolog/up/expl.pl @@ -106,7 +106,7 @@ $pp_expl_one_goal(failure) :- !, $pp_expl_failure. $pp_expl_one_goal(Goal) :- $pp_is_dummy_goal(Goal),!, - call(Goal). + ( call(Goal), fail ; true ). $pp_expl_one_goal(Goal) :- % FIXME: handling non-tabled probabilistic predicate is future work $pp_require_tabled_probabilistic_atom(Goal,$msg(0006),$pp_expl_one_goal/1),