Merge branch 'master' of yap.dcc.fc.up.pt:yap-6
This commit is contained in:
commit
48cc87e882
1
C/agc.c
1
C/agc.c
@ -187,6 +187,7 @@ AtomAdjust(Atom a)
|
||||
#define HoldEntryAdjust(P) (P)
|
||||
#define CodeCharPAdjust(P) (P)
|
||||
#define CodeVoidPAdjust(P) (P)
|
||||
#define HaltHookAdjust(P) (P)
|
||||
|
||||
#define recompute_mask(dbr)
|
||||
|
||||
|
@ -1032,6 +1032,7 @@ static InitBinEntry InitBinTab[] = {
|
||||
{"#", op_xor},
|
||||
{"><", op_xor},
|
||||
{"xor", op_xor},
|
||||
{"atan", op_atan2},
|
||||
{"atan2", op_atan2},
|
||||
/* C-Prolog exponentiation */
|
||||
{"^", op_power},
|
||||
|
@ -505,6 +505,7 @@ X_API Term STD_PROTO(YAP_TermNil,(void));
|
||||
X_API int STD_PROTO(YAP_AtomGetHold,(Atom));
|
||||
X_API int STD_PROTO(YAP_AtomReleaseHold,(Atom));
|
||||
X_API Agc_hook STD_PROTO(YAP_AGCRegisterHook,(Agc_hook));
|
||||
X_API int STD_PROTO(YAP_HaltRegisterHook,(HaltHookFunc, void *));
|
||||
X_API char *STD_PROTO(YAP_cwd,(void));
|
||||
X_API Term STD_PROTO(YAP_OpenList,(int));
|
||||
X_API Term STD_PROTO(YAP_ExtendList,(Term, Term));
|
||||
@ -2957,6 +2958,12 @@ YAP_AGCRegisterHook(Agc_hook hook)
|
||||
return old;
|
||||
}
|
||||
|
||||
X_API int
|
||||
YAP_HaltRegisterHook(HaltHookFunc hook, void * closure)
|
||||
{
|
||||
return Yap_HaltRegisterHook(hook, closure);
|
||||
}
|
||||
|
||||
X_API char *
|
||||
YAP_cwd(void)
|
||||
{
|
||||
|
37
C/init.c
37
C/init.c
@ -1333,6 +1333,33 @@ Yap_InitWorkspace(UInt Heap, UInt Stack, UInt Trail, UInt Atts, UInt max_table_s
|
||||
}
|
||||
}
|
||||
|
||||
int
|
||||
Yap_HaltRegisterHook (HaltHookFunc f, void * env)
|
||||
{
|
||||
struct halt_hook *h;
|
||||
|
||||
if (!(h = (struct halt_hook *)Yap_AllocCodeSpace(sizeof(struct halt_hook))))
|
||||
return FALSE;
|
||||
h->environment = env;
|
||||
h->hook = f;
|
||||
LOCK(BGL);
|
||||
h->next = Yap_HaltHooks;
|
||||
Yap_HaltHooks = h;
|
||||
UNLOCK(BGL);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
run_halt_hooks(int code)
|
||||
{
|
||||
struct halt_hook *hooke = Yap_HaltHooks;
|
||||
|
||||
while (hooke) {
|
||||
hooke->hook(code, hooke->environment);
|
||||
hooke = hooke->next;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
Yap_exit (int value)
|
||||
{
|
||||
@ -1340,15 +1367,17 @@ Yap_exit (int value)
|
||||
unmap_memory();
|
||||
#endif /* YAPOR */
|
||||
|
||||
if (! (Yap_PrologMode & BootMode) ) {
|
||||
#ifdef LOW_PROF
|
||||
remove("PROFPREDS");
|
||||
remove("PROFILING");
|
||||
remove("PROFPREDS");
|
||||
remove("PROFILING");
|
||||
#endif
|
||||
#if defined MYDDAS_MYSQL || defined MYDDAS_ODBC
|
||||
Yap_MYDDAS_delete_all_myddas_structs();
|
||||
Yap_MYDDAS_delete_all_myddas_structs();
|
||||
#endif
|
||||
if (! (Yap_PrologMode & BootMode) )
|
||||
run_halt_hooks(value);
|
||||
Yap_ShutdownLoadForeign();
|
||||
}
|
||||
exit(value);
|
||||
}
|
||||
|
||||
|
@ -6462,6 +6462,9 @@ p_file_base_name (void)
|
||||
char *c = RepAtom(at)->StrOfAE;
|
||||
Int i = strlen(c);
|
||||
while (i && !Yap_dir_separator((int)c[--i]));
|
||||
if (Yap_dir_separator((int)c[i])) {
|
||||
i++;
|
||||
}
|
||||
return Yap_unify(ARG2, MkAtomTerm(Yap_LookupAtom(c+i)));
|
||||
}
|
||||
}
|
||||
|
10
H/YapHeap.h
10
H/YapHeap.h
@ -62,6 +62,16 @@ typedef struct gc_ma_hash_entry_struct {
|
||||
struct gc_ma_hash_entry_struct *next;
|
||||
} gc_ma_hash_entry;
|
||||
|
||||
typedef void (*HaltHookFunc)(int, void *);
|
||||
|
||||
typedef struct halt_hook {
|
||||
void * environment;
|
||||
HaltHookFunc hook;
|
||||
struct halt_hook *next;
|
||||
} halt_hook_entry;
|
||||
|
||||
int STD_PROTO(Yap_HaltRegisterHook,(HaltHookFunc, void *));
|
||||
|
||||
typedef struct atom_hash_entry {
|
||||
#if defined(YAPOR) || defined(THREADS)
|
||||
rwlock_t AERWLock;
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
#define Stream Yap_heap_regs->yap_streams
|
||||
|
||||
#define Yap_HaltHooks Yap_heap_regs->yap_halt_hook
|
||||
|
||||
#define NOfFileAliases Yap_heap_regs->n_of_file_aliases
|
||||
#define SzOfFileAliases Yap_heap_regs->sz_of_file_aliases
|
||||
#define FileAliases Yap_heap_regs->file_aliases
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
struct stream_desc *yap_streams;
|
||||
|
||||
struct halt_hook *yap_halt_hook;
|
||||
|
||||
UInt n_of_file_aliases;
|
||||
UInt sz_of_file_aliases;
|
||||
struct AliasDescS *file_aliases;
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
Yap_heap_regs->yap_streams = NULL;
|
||||
|
||||
Yap_heap_regs->yap_halt_hook = NULL;
|
||||
|
||||
Yap_heap_regs->n_of_file_aliases = 0;
|
||||
Yap_heap_regs->sz_of_file_aliases = 0;
|
||||
Yap_heap_regs->file_aliases = NULL;
|
||||
|
12
H/rheap.h
12
H/rheap.h
@ -899,6 +899,18 @@ RestoreDBErasedIList(void)
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
RestoreHaltHooks(void)
|
||||
{
|
||||
struct halt_hook *hooke = Yap_HaltHooks = HaltHookAdjust(Yap_HaltHooks);
|
||||
|
||||
while (hooke) {
|
||||
hooke->next = HaltHookAdjust(hooke->next);
|
||||
hooke = hooke->next;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
RestoreStreams(void)
|
||||
{
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
RestoreStreams();
|
||||
|
||||
RestoreHaltHooks();
|
||||
|
||||
|
||||
|
||||
RestoreAliases();
|
||||
|
10
H/sshift.h
10
H/sshift.h
@ -610,7 +610,15 @@ CodeVoidPAdjust (void * addr)
|
||||
return addr + HDiff;
|
||||
}
|
||||
|
||||
inline EXTERN struct halt_hook *HaltHookAdjust (struct halt_hook *);
|
||||
|
||||
inline EXTERN struct halt_hook *
|
||||
HaltHookAdjust (struct halt_hook * addr)
|
||||
{
|
||||
if (!addr)
|
||||
return NULL;
|
||||
return (struct halt_hook *) (CharP (addr) + HDiff);
|
||||
}
|
||||
|
||||
inline EXTERN BlockHeader *BlockAdjust (BlockHeader *);
|
||||
|
||||
@ -620,8 +628,6 @@ BlockAdjust (BlockHeader * addr)
|
||||
return (BlockHeader *) ((BlockHeader *) (CharP (addr) + HDiff));
|
||||
}
|
||||
|
||||
|
||||
|
||||
inline EXTERN yamop *PtoOpAdjust (yamop *);
|
||||
|
||||
inline EXTERN yamop *
|
||||
|
139
configure.in
139
configure.in
@ -99,13 +99,16 @@ AC_ARG_ENABLE(cygwin,
|
||||
[ --enable-cygwin use cygwin library in WIN32 ],
|
||||
cygwin="$enableval", cygwin=no)
|
||||
AC_ARG_ENABLE(dynamic_loading,
|
||||
[ --enable-dynamic-loading compile Yap as a DLL ],
|
||||
[ --enable-dynamic-loading compile YAP as a DLL ],
|
||||
dynamic_loading="$enableval", dynamic_loading=no)
|
||||
AC_ARG_ENABLE(static_compilation,
|
||||
[ --enable-static-compilation compile YAP statically ],
|
||||
static_compilation="$enableval", static_compilation=no)
|
||||
AC_ARG_ENABLE(use-malloc,
|
||||
[ --enable-use-malloc use malloc to allocate memory ],
|
||||
use_malloc="$enableval", use_malloc=no)
|
||||
AC_ARG_ENABLE(condor,
|
||||
[ --enable-condor allow Yap to be used from condor ],
|
||||
[ --enable-condor allow YAP to be used from condor ],
|
||||
use_condor="$enableval", use_condor=no)
|
||||
AC_ARG_ENABLE(chr,
|
||||
[ --enable-chr install chr library ],
|
||||
@ -126,7 +129,7 @@ AC_ARG_ENABLE(dlcompat,
|
||||
use_dlcompat="$enableval", use_dlcompat=no)
|
||||
|
||||
AC_ARG_ENABLE(cplint,
|
||||
[ --enable-cplint[=DIR] enable the cplint library using the CUDD software in DIR],
|
||||
[ --enable-cplint[=DIR] enable the cplint library using the glu library in DIR/lib],
|
||||
yap_cv_cplint=$enableval, yap_cv_cplint=no)
|
||||
|
||||
AC_ARG_WITH(gmp,
|
||||
@ -138,7 +141,7 @@ AC_ARG_WITH(gmp,
|
||||
else
|
||||
yap_cv_gmp=$with_gmp
|
||||
LDFLAGS="$LDFLAGS -L${yap_cv_gmp}/lib"
|
||||
LIBS="$LIBS -L${yap_cv_gmp}/lib"
|
||||
LIBS="$LIBS -L${yap_cv_gmp}/lib"
|
||||
CPPFLAGS="$CPPFLAGS -I${yap_cv_gmp}/include"
|
||||
CFLAGS="$CFLAGS -I${yap_cv_gmp}/include"
|
||||
fi,
|
||||
@ -172,7 +175,7 @@ AC_ARG_WITH(cudd,
|
||||
AC_ARG_ENABLE(myddas,
|
||||
[ --enable-myddas[[=DIR]] enable the MYDDAS library],
|
||||
if test "$enableval" = yes; then
|
||||
yap_cv_myddas=/usr/
|
||||
yap_cv_myddas=/usr
|
||||
elif test "$enableval" = no; then
|
||||
yap_cv_myddas=no
|
||||
else
|
||||
@ -180,7 +183,7 @@ AC_ARG_ENABLE(myddas,
|
||||
LDFLAGS="$LDFLAGS -L${yap_cv_myddas}/lib "
|
||||
CPPFLAGS="$CPPFLAGS -I${yap_cv_myddas}/include "
|
||||
fi,
|
||||
[yap_cv_myddas=no])
|
||||
[yap_cv_myddas=/usr])
|
||||
|
||||
AC_ARG_ENABLE(myddas-stats,
|
||||
[ --enable-myddas-stats enable the MYDDAS library statistics support],
|
||||
@ -405,6 +408,24 @@ else
|
||||
USE_CUDD=""
|
||||
fi
|
||||
|
||||
dnl condor universe does not like dynamic linking on Linux, DEC, and HP-UX platforms.
|
||||
if test "$use_condor" = yes
|
||||
then
|
||||
static_compilation="yes"
|
||||
use_malloc="yes"
|
||||
CC="condor_compile $CC"
|
||||
CXX="condor_compile $CXX"
|
||||
dnl no readline with condor.
|
||||
yap_cv_readline="no"
|
||||
AC_DEFINE(SUPPORT_CONDOR, 1)
|
||||
dnl and it disables minisat
|
||||
yap_cv_minisat=no
|
||||
STATIC_MODE=""
|
||||
else
|
||||
static_compilation="no"
|
||||
STATIC_MODE="# "
|
||||
fi
|
||||
|
||||
if test "$yap_cv_minisat" = no
|
||||
then
|
||||
USE_MINISAT="@# "
|
||||
@ -415,19 +436,6 @@ else
|
||||
USE_MINISAT=""
|
||||
fi
|
||||
|
||||
dnl condor does not like dynamic linking on Linux, DEC, and HP-UX platforms.
|
||||
if test "$use_condor" = yes
|
||||
then
|
||||
use_malloc="yes"
|
||||
CC="condor_compile $CC"
|
||||
dnl no readline with condor.
|
||||
yap_cv_readline="no"
|
||||
AC_DEFINE(SUPPORT_CONDOR, 1)
|
||||
STATIC_MODE=""
|
||||
else
|
||||
STATIC_MODE="# "
|
||||
fi
|
||||
|
||||
dnl Compilation Flags
|
||||
if test "$GCC" = "yes"
|
||||
then
|
||||
@ -561,11 +569,11 @@ then
|
||||
fi
|
||||
AC_CHECK_LIB(psapi,main)
|
||||
yap_cv_readline=no
|
||||
if test "$target_win64" = yes
|
||||
CLIB_NETLIBS="-lws2_32 -lpsapi -lgdi32"
|
||||
then
|
||||
CLIB_NETLIBS="-lwsock32 -lpsapi -lgdi32"
|
||||
fi
|
||||
if test "$target_win64" = yes
|
||||
CLIB_NETLIBS="-lws2_32 -lpsapi -lgdi32"
|
||||
then
|
||||
CLIB_NETLIBS="-lwsock32 -lpsapi -lgdi32"
|
||||
fi
|
||||
if test "$prefix" = "NONE"
|
||||
then
|
||||
if test "$target_win64" = yes
|
||||
@ -595,12 +603,20 @@ else
|
||||
,
|
||||
have_nsl=no)
|
||||
AC_CHECK_LIB(crypt,main)
|
||||
AC_CHECK_LIB(nss_files,main)
|
||||
AC_CHECK_LIB(nss_dns,main)
|
||||
AC_CHECK_LIB(resolv,main)
|
||||
fi
|
||||
|
||||
if test "$yap_cv_readline" != "no"
|
||||
then
|
||||
AC_CHECK_LIB(termcap,tgetent)
|
||||
AC_CHECK_LIB(ncurses,main)
|
||||
case "$target_os" in
|
||||
*darwin*)
|
||||
;;
|
||||
*)
|
||||
AC_CHECK_LIB(ncurses,main)
|
||||
esac
|
||||
AC_CHECK_LIB(readline,readline)
|
||||
fi
|
||||
if test "$yap_cv_gmp" != "no"
|
||||
@ -613,6 +629,16 @@ dnl then
|
||||
dnl AC_CHECK_LIB(cudd,Cudd_Init)
|
||||
dnl fi
|
||||
|
||||
dnl check if we have zlib
|
||||
AC_CHECK_LIB(z, zlibVersion,
|
||||
ZLIBS="-lz"
|
||||
ZLIB=yes,
|
||||
ZLIB=no)
|
||||
if test $ZLIB = yes
|
||||
then
|
||||
LIBS="$LIBS -lz"
|
||||
fi
|
||||
|
||||
if test "$yap_cv_myddas" != "no"
|
||||
then
|
||||
dnl check for mysql
|
||||
@ -630,7 +656,7 @@ then
|
||||
if test "$yap_cv_mysql" = yes
|
||||
then
|
||||
YAP_EXTRAS="$YAP_EXTRAS -DMYDDAS_MYSQL"
|
||||
LIBS="$LIBS -L${yap_cv_myddas}/lib/mysql -lmysqlclient -lz -lcrypt -lnsl -lm -lc -lnss_files -lnss_dns -lresolv "
|
||||
LIBS="$LIBS -L${yap_cv_myddas}/lib/mysql -lmysqlclient"
|
||||
fi
|
||||
|
||||
dnl check for odbc
|
||||
@ -652,19 +678,6 @@ then
|
||||
LIBS="$LIBS -lodbc "
|
||||
fi
|
||||
|
||||
if test "$yap_cv_mysql" = no -a "$yap_cv_odbc" = no
|
||||
then
|
||||
echo "-------------------------------"
|
||||
echo "--"
|
||||
echo "--"
|
||||
echo "--"
|
||||
echo "-- There\'s no devel libraries for MySQL or ODBC"
|
||||
echo "--"
|
||||
echo "--"
|
||||
echo "--"
|
||||
echo "-------------------------------"
|
||||
exit
|
||||
fi
|
||||
fi
|
||||
|
||||
if test "$myddasstats" = "yes"
|
||||
@ -853,7 +866,7 @@ dnl now this is where things get nasty.
|
||||
dnl trying to get the libraries
|
||||
case "$target_os" in
|
||||
*linux*)
|
||||
if test "$use_condor" = "no"
|
||||
if test "$static_compilation" = "no"
|
||||
then
|
||||
AC_CHECK_LIB(dl,dlopen,
|
||||
have_dl=yes
|
||||
@ -884,22 +897,22 @@ case "$target_os" in
|
||||
SHLIB_CXXFLAGS="-shared -fPIC $CXXFLAGS"
|
||||
INSTALL_DLLS=""
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
fi
|
||||
if test "$have_nsl" = yes
|
||||
then
|
||||
LIBS="$LIBS -lnsl"
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
if test "$dynamic_loading" = "yes"
|
||||
then
|
||||
YAPLIB_LD="\$(CC) -shared"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
else
|
||||
YAPLIB_LD="\$(CC)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
INSTALL_ENV="YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
PRE_INSTALL_ENV=""
|
||||
fi
|
||||
@ -913,9 +926,9 @@ case "$target_os" in
|
||||
fi
|
||||
SO="o"
|
||||
INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -952,9 +965,9 @@ case "$target_os" in
|
||||
then
|
||||
AC_DEFINE(MPI_AVOID_REALLOC,1)
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -965,9 +978,9 @@ case "$target_os" in
|
||||
# SO="a"
|
||||
#SHLIB_LD="\$(srcdir)/../../ldAix /bin/ld -bhalt:4 -bM:SRE -bE:lib.exp -H512 -T512 -bnoentry"
|
||||
#INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -983,9 +996,9 @@ dnl Linux has both elf and a.out, in this case we found elf
|
||||
SHLIB_LD="ld -shared -expect_unresolved '*'"
|
||||
DO_SECOND_LD=""
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -997,9 +1010,9 @@ dnl Linux has both elf and a.out, in this case we found elf
|
||||
SHLIB_LD="ld -n32 -shared -rdata_shared"
|
||||
SHLIB_CXX_LD="$SHLIB_LD"
|
||||
INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -1021,12 +1034,12 @@ dnl Linux has both elf and a.out, in this case we found elf
|
||||
then
|
||||
SHLIB_LD="$CC -dynamiclib"
|
||||
SHLIB_CXX_LD="$CXX -dynamiclib"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-install_name,\$(YAPLIBDIR)/\$@"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-install_name,\$(DESTDIR)\$(YAPLIBDIR)/\$@"
|
||||
INSTALL_ENV="DYLD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
else
|
||||
SHLIB_LD="$CC -dynamiclib -Wl,-undefined,dynamic_lookup"
|
||||
SHLIB_CXX_LD="$CXX -dynamiclib -Wl,-undefined,dynamic_lookup"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-install_name,\$(YAPLIBDIR)/\$@"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-install_name,\$(DESTDIR)\$(YAPLIBDIR)/\$@"
|
||||
INSTALL_ENV="YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
fi
|
||||
if test "$ac_cv_prog_gcc" = "yes"
|
||||
@ -1058,9 +1071,9 @@ dnl Linux has both elf and a.out, in this case we found elf
|
||||
SO="so"
|
||||
INSTALL_DLLS=""
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -Wl,-R,\$(YAPLIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -1074,16 +1087,16 @@ dnl Linux has both elf and a.out, in this case we found elf
|
||||
DO_SECOND_LD=""
|
||||
SO="so"
|
||||
INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
if test "$dynamic_loading" = "yes"
|
||||
then
|
||||
YAPLIB_LD="\$(CC)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
else
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV=""
|
||||
YAPLIB_LD="\$(CC) -shared"
|
||||
INSTALL_ENV="YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
@ -1132,9 +1145,9 @@ dnl Linux has both elf and a.out, in this case we found elf
|
||||
then
|
||||
LIBS="$LIBS -lnsl"
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
|
95
docs/yap.tex
95
docs/yap.tex
@ -1384,7 +1384,7 @@ anonymous variables.
|
||||
|
||||
Punctuation tokens consist of one of the following characters:
|
||||
@example
|
||||
@center ( ) , [ ] @{ @} |
|
||||
( ) , [ ] @{ @} |
|
||||
@end example
|
||||
|
||||
These characters are used to group terms.
|
||||
@ -1701,7 +1701,7 @@ if they have not been loaded before, does nothing otherwise.
|
||||
|
||||
@var{F} must be a list containing the names of the files to load.
|
||||
|
||||
@item make [ISO]
|
||||
@item make
|
||||
@findex make/0
|
||||
@snindex make/0
|
||||
@cnindex make/0
|
||||
@ -2719,7 +2719,7 @@ between 0 and 10.
|
||||
@cnindex call_with_args/n
|
||||
Meta-call where @var{Name} is the name of the procedure to be called and
|
||||
the @var{Ai} are the arguments. The number of arguments varies between 0
|
||||
and 10.
|
||||
and 10. New code should use @code{call/N} for better portability.
|
||||
|
||||
If @var{Name} is a complex term, then @code{call_with_args/n} behaves as
|
||||
@code{call/n}:
|
||||
@ -3179,7 +3179,7 @@ Checks whether @var{T} is a database reference.
|
||||
@cnindex float/1
|
||||
Checks whether @var{T} is a floating point number.
|
||||
|
||||
@item rational(@var{T}) [ISO]
|
||||
@item rational(@var{T})
|
||||
@findex rational/1
|
||||
@syindex rational/1
|
||||
@cyindex rational/1
|
||||
@ -3346,7 +3346,7 @@ Also refer to @code{copy_term/2}.
|
||||
True when @var{List} is a proper list. That is, @var{List}
|
||||
is bound to the empty list (nil) or a term with functor '.' and arity 2.
|
||||
|
||||
@item ?@var{Term1} =@= ?@var{Term2}
|
||||
@item ?@var{Term1} =@@= ?@var{Term2}
|
||||
@findex =@=/2
|
||||
@syindex =@=/2
|
||||
@cnindex =@=/2
|
||||
@ -3412,15 +3412,7 @@ will return:
|
||||
The predicate holds when at least one of the arguments is ground
|
||||
(otherwise, an error message will be displayed). The argument @var{A} must
|
||||
be unifiable with an atom, and the argument @var{L} with the list of the
|
||||
ASCII codes for the characters of the external representation of @var{A}.
|
||||
|
||||
The ISO-Prolog standard dictates that @code{atom_chars/2} should unify
|
||||
the second argument with a list of one-char atoms, and not the character
|
||||
codes. For compatibility with previous versions of YAP, and
|
||||
with other Prolog implementations, YAP unifies the second
|
||||
argument with the character codes, as in @code{atom_codes/2}. Use the
|
||||
@code{set_prolog_flag(to_chars_mode,iso)} to obtain ISO standard
|
||||
compatibility.
|
||||
characters of @var{A}.
|
||||
|
||||
@item atom_codes(?@var{A},?@var{L}) [ISO]
|
||||
@findex atom_codes/2
|
||||
@ -3498,7 +3490,7 @@ concatenated are the representation for @var{A12}.
|
||||
If @var{A1} and @var{A2} are unbound, the built-in will find all the atoms
|
||||
that concatenated give @var{A12}.
|
||||
|
||||
@item number_chars(?@var{I},?@var{L})
|
||||
@item number_chars(?@var{I},?@var{L}) [ISO]
|
||||
@findex number_chars/2
|
||||
@saindex number_chars/2
|
||||
@cnindex number_chars/2
|
||||
@ -3506,15 +3498,7 @@ that concatenated give @var{A12}.
|
||||
The predicate holds when at least one of the arguments is ground
|
||||
(otherwise, an error message will be displayed). The argument @var{I} must
|
||||
be unifiable with a number, and the argument @var{L} with the list of the
|
||||
ASCII codes for the characters of the external representation of @var{I}.
|
||||
|
||||
The ISO-Prolog standard dictates that @code{number_chars/2} should unify
|
||||
the second argument with a list of one-char atoms, and not the character
|
||||
codes. For compatibility with previous versions of YAP, and
|
||||
with other Prolog implementations, YAP unifies the second
|
||||
argument with the character codes, as in @code{number_codes/2}. Use the
|
||||
@code{set_prolog_flag(to_chars_mode,iso)} to obtain ISO standard
|
||||
compatibility.
|
||||
characters of the external representation of @var{I}.
|
||||
|
||||
@item number_codes(?@var{A},?@var{L}) [ISO]
|
||||
@findex number_codes/2
|
||||
@ -4084,6 +4068,8 @@ Integer bitwise conjunction.
|
||||
Integer bitwise disjunction.
|
||||
|
||||
@item @var{X} # @var{Y}
|
||||
@item @var{X} >< @var{Y}
|
||||
@item xor(@var{X} , @var{Y})
|
||||
Integer bitwise exclusive disjunction.
|
||||
|
||||
@item @var{X} << @var{Y}
|
||||
@ -4348,23 +4334,23 @@ these options:
|
||||
|
||||
@table @code
|
||||
|
||||
@item type(+@var{T})
|
||||
@item type(+@var{T}) [ISO]
|
||||
Specify whether the stream is a @code{text} stream (default), or a
|
||||
@code{binary} stream.
|
||||
|
||||
@item reposition(+@var{Bool})
|
||||
@item reposition(+@var{Bool}) [ISO]
|
||||
Specify whether it is possible to reposition the stream (@code{true}), or
|
||||
not (@code{false}). By default, YAP enables repositioning for all
|
||||
files, except terminal files and sockets.
|
||||
|
||||
@item eof_action(+@var{Action})
|
||||
@item eof_action(+@var{Action}) [ISO]
|
||||
Specify the action to take if attempting to input characters from a
|
||||
stream where we have previously found an @code{end_of_file}. The possible
|
||||
actions are @code{error}, that raises an error, @code{reset}, that tries to
|
||||
reset the stream and is used for @code{tty} type files, and @code{eof_code},
|
||||
which generates a new @code{end_of_file} (default for non-tty files).
|
||||
|
||||
@item alias(+@var{Name})
|
||||
@item alias(+@var{Name}) [ISO]
|
||||
Specify an alias to the stream. The alias @t{Name} must be an atom. The
|
||||
alias can be used instead of the stream descriptor for every operation
|
||||
concerning the stream.
|
||||
@ -4467,7 +4453,9 @@ Defines extensions. Current mapping: @code{txt} implies @code{['']},
|
||||
@code{['.so', '']}, @code{qlf} implies @code{['.qlf', '']} and
|
||||
@code{directory} implies @code{['']}. The file-type @code{source}
|
||||
is an alias for @code{prolog} for compatibility to SICStus Prolog.
|
||||
See also @code{prolog_file_type/2}.
|
||||
See also @code{prolog_file_type/2}. Notice also that this predicate only
|
||||
returns non-directories, unless the option @code{file_type(directory)} is
|
||||
specified, or unless @code{access(none)}.
|
||||
|
||||
@item file_errors(@code{fail}/@code{error})
|
||||
If @code{error} (default), throw and @code{existence_error} exception
|
||||
@ -4547,14 +4535,14 @@ Send out all data in the output buffer of the current output stream.
|
||||
@cnindex flush_output/1
|
||||
Send all data in the output buffer for stream @var{S}.
|
||||
|
||||
@item set_input(+@var{S})
|
||||
@item set_input(+@var{S}) [ISO]
|
||||
@findex set_input/1
|
||||
@syindex set_input/1
|
||||
@cnindex set_input/1
|
||||
Set stream @var{S} as the current input stream. Predicates like @code{read/1}
|
||||
and @code{get/1} will start using stream @var{S}.
|
||||
|
||||
@item set_output(+@var{S})
|
||||
@item set_output(+@var{S}) [ISO]
|
||||
@findex set_output/1
|
||||
@syindex set_output/1
|
||||
@cnindex set_output/1
|
||||
@ -4917,16 +4905,16 @@ Displays term @var{T} on the current output stream, according to the
|
||||
following options:
|
||||
|
||||
@table @code
|
||||
@item quoted(+@var{Bool})
|
||||
@item quoted(+@var{Bool}) [ISO]
|
||||
If @code{true}, quote atoms if this would be necessary for the atom to
|
||||
be recognized as an atom by YAP's parser. The default value is
|
||||
@code{false}.
|
||||
|
||||
@item ignore_ops(+@var{Bool})
|
||||
@item ignore_ops(+@var{Bool}) [ISO]
|
||||
If @code{true}, ignore operator declarations when writing the term. The
|
||||
default value is @code{false}.
|
||||
|
||||
@item numbervars(+@var{Bool})
|
||||
@item numbervars(+@var{Bool}) [ISO]
|
||||
If @code{true}, output terms of the form
|
||||
@code{'$VAR'(N)}, where @var{N} is an integer, as a sequence of capital
|
||||
letters. The default value is @code{false}.
|
||||
@ -5536,7 +5524,7 @@ input stream.
|
||||
@cnindex tab/2
|
||||
The same as @code{tab/1}, but using stream @var{S}.
|
||||
|
||||
@item nl(+@var{S})
|
||||
@item nl(+@var{S}) [ISO]
|
||||
@findex nl/1
|
||||
@syindex nl/1
|
||||
@cnindex nl/1
|
||||
@ -5910,8 +5898,8 @@ predicates:
|
||||
@findex assert/1
|
||||
@saindex assert/1
|
||||
@caindex assert/1
|
||||
Adds clause @var{C} to the program. If the predicate is undefined,
|
||||
declare it as dynamic.
|
||||
Same as @code{assertz/1}. Adds clause @var{C} to the program. If the predicate is undefined,
|
||||
declare it as dynamic. New code should use @code{assertz/1} for better portability.
|
||||
|
||||
Most Prolog systems only allow asserting clauses for dynamic
|
||||
predicates. This is also as specified in the ISO standard. YAP allows
|
||||
@ -6101,7 +6089,7 @@ name is the atom @var{A}.
|
||||
Defines the relation: @var{P} is a built-in predicate whose name
|
||||
is the atom @var{A}.
|
||||
|
||||
@item predicate_property(@var{P},@var{Prop})
|
||||
@item predicate_property(@var{P},@var{Prop}) [ISO]
|
||||
@findex predicate_property/2
|
||||
@saindex predicate_property/2
|
||||
@cnindex predicate_property/2
|
||||
@ -8174,7 +8162,8 @@ this version of YAP either defines @code{__unix__} or @code{unix}.
|
||||
@item unknown [ISO]
|
||||
@findex unknown (yap_flag/2 option)
|
||||
@*
|
||||
Corresponds to calling the @code{unknown/2} built-in.
|
||||
Corresponds to calling the @code{unknown/2} built-in. Possible values
|
||||
are @code{error}, @code{fail}, and @code{warning}.
|
||||
|
||||
@item update_semantics
|
||||
@findex update_semantics (yap_flag/2 option)
|
||||
@ -9515,16 +9504,6 @@ matrices are multi-dimensional and compact. In contrast to static
|
||||
arrays. these arrays are allocated in the stack. Matrices are available
|
||||
by loading the library @code{library(matrix)}.
|
||||
|
||||
Accessing the matlab dynamic libraries can be complicated. In Linux
|
||||
machines, to use this interface, you may have to set the environment
|
||||
variable @t{LD_LIBRARY_PATH}. Next, follows an example using bash in a
|
||||
64-bit Linux PC:
|
||||
@example
|
||||
export LD_LIBRARY_PATH=''$MATLAB_HOME"/sys/os/glnxa64:''$MATLAB_HOME"/bin/glnxa64:''$LD_LIBRARY_PATH"
|
||||
@end example
|
||||
where @code{MATLAB_HOME} is the directory where matlab is installed
|
||||
at. Please replace @code{ax64} for @code{x86} on a 32-bit PC.
|
||||
|
||||
Notice that the functionality in this library is only partial. Please
|
||||
contact the YAP maintainers if you require extra functionality.
|
||||
|
||||
@ -9829,6 +9808,16 @@ actually use it, you need to install YAP calling @code{configure} with
|
||||
the @code{--with-matlab=DIR} option, and you need to call
|
||||
@code{use_module(library(lists))} command.
|
||||
|
||||
Accessing the matlab dynamic libraries can be complicated. In Linux
|
||||
machines, to use this interface, you may have to set the environment
|
||||
variable @t{LD_LIBRARY_PATH}. Next, follows an example using bash in a
|
||||
64-bit Linux PC:
|
||||
@example
|
||||
export LD_LIBRARY_PATH=''$MATLAB_HOME"/sys/os/glnxa64:''$MATLAB_HOME"/bin/glnxa64:''$LD_LIBRARY_PATH"
|
||||
@end example
|
||||
where @code{MATLAB_HOME} is the directory where matlab is installed
|
||||
at. Please replace @code{ax64} for @code{x86} on a 32-bit PC.
|
||||
|
||||
@table @code
|
||||
|
||||
@item start_matlab(+@var{Options})
|
||||
@ -16691,6 +16680,14 @@ only two boolean flags are accepted: @code{YAPC_ENABLE_GC} and
|
||||
@code{YAPC_ENABLE_AGC}. The first enables/disables the standard garbage
|
||||
collector, the second does the same for the atom garbage collector.`
|
||||
|
||||
@item @code{int} YAP_HaltRegisterHook(@code{YAP_halt_hook f, void *closure})
|
||||
@findex YAP_HaltRegisterHook (C-Interface function)
|
||||
|
||||
Register the function @var{f} to be called if YAP is halted. The
|
||||
function is called with two arguments: the exit code of the process (@code{0}
|
||||
if this cannot be determined on your operating system) and the closure
|
||||
argument @var{closure}.
|
||||
@c See also @code{at_halt/1}.
|
||||
@end table
|
||||
|
||||
|
||||
|
@ -470,6 +470,9 @@ extern X_API int PROTO(YAP_AtomReleaseHold,(YAP_Atom));
|
||||
/* void YAP_AtomReleaseHold(YAP_Atom) */
|
||||
extern X_API YAP_agc_hook PROTO(YAP_AGCRegisterHook,(YAP_agc_hook));
|
||||
|
||||
/* void YAP_AtomReleaseHold(YAP_Atom) */
|
||||
extern X_API int PROTO(YAP_HaltRegisterHook,(YAP_halt_hook, void *));
|
||||
|
||||
/* char *YAP_cwd(void) */
|
||||
extern X_API char * PROTO(YAP_cwd,(void));
|
||||
|
||||
|
@ -176,6 +176,8 @@ typedef struct {
|
||||
|
||||
typedef int (*YAP_agc_hook)(void *_Atom);
|
||||
|
||||
typedef void (*YAP_halt_hook)(int exit_code, void *closure);
|
||||
|
||||
/********* execution mode ***********************/
|
||||
|
||||
typedef enum
|
||||
|
@ -114,7 +114,7 @@ install_myddas: $(MYDDAS_PROGRAMS)
|
||||
done; \
|
||||
for p in $(DESTDIR)$(SHAREDIR)/Yap/myddas*.ypp; \
|
||||
do name=`basename $$p .ypp`; \
|
||||
cpp $(YAP_EXTRAS) -P -E -w $$p -o $(DESTDIR)$(SHAREDIR)/Yap/$$name.yap; \
|
||||
cpp $(YAP_EXTRAS) -P -E -w $$p > $(DESTDIR)$(SHAREDIR)/Yap/$$name.yap; \
|
||||
rm -fr $$p; \
|
||||
done; \
|
||||
fi
|
||||
|
@ -45,8 +45,7 @@
|
||||
wdgraph_del_vertex/3,
|
||||
wdgraph_edges/2,
|
||||
wdgraph_neighbours/3,
|
||||
wdgraph_wneighbours/3,
|
||||
wdgraph_symmetric_closure/2
|
||||
wdgraph_wneighbours/3
|
||||
]).
|
||||
|
||||
:- use_module(library(rbtrees),
|
||||
|
@ -3249,6 +3249,7 @@ X_API void (*PL_signal(int sig, void (*func)(int)))(int)
|
||||
|
||||
X_API void PL_on_halt(void (*f)(int, void *), void *closure)
|
||||
{
|
||||
Yap_HaltRegisterHook((HaltHookFunc)f,closure);
|
||||
}
|
||||
|
||||
void Yap_swi_install(void);
|
||||
|
@ -295,6 +295,9 @@ struct operator_entry *op_list OpList =NULL OpListAdjust
|
||||
/* stream array */
|
||||
struct stream_desc *yap_streams Stream =NULL RestoreStreams()
|
||||
|
||||
/* halt hooks */
|
||||
struct halt_hook *yap_halt_hook Yap_HaltHooks =NULL RestoreHaltHooks()
|
||||
|
||||
/* stream aliases */
|
||||
UInt n_of_file_aliases NOfFileAliases =0 void
|
||||
UInt sz_of_file_aliases SzOfFileAliases =0 void
|
||||
|
@ -48,6 +48,8 @@ PROBLOG_PROGRAMS= \
|
||||
$(srcdir)/problog/timer.yap \
|
||||
$(srcdir)/problog/tptree.yap \
|
||||
$(srcdir)/problog/variable_elimination.yap \
|
||||
$(srcdir)/problog/print_learning.yap \
|
||||
$(srcdir)/problog/utils_learning.yap \
|
||||
$(srcdir)/problog/variables.yap
|
||||
|
||||
PROBLOG_EXAMPLES = \
|
||||
|
@ -310,16 +310,18 @@
|
||||
|
||||
|
||||
% general yap modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system), [delete_file/2, shell/2]).
|
||||
|
||||
:- problog_define_flag(optimization, problog_flag_validate_atom, 'optimization algorithm [local/global]', global, dtproblog).
|
||||
:- problog_define_flag(forest_type, problog_flag_validate_atom, 'type of BDD forest [dependent/independent]', dependent, dtproblog).
|
||||
:- initialization((
|
||||
problog_define_flag(optimization, problog_flag_validate_atom, 'optimization algorithm [local/global]', global, dtproblog),
|
||||
problog_define_flag(forest_type, problog_flag_validate_atom, 'type of BDD forest [dependent/independent]', dependent, dtproblog)
|
||||
)).
|
||||
|
||||
init_dtproblog :-
|
||||
problog_control(off,find_decisions),
|
||||
problog_control(off,internal_strategy).
|
||||
problog_control(off,find_decisions),
|
||||
problog_control(off,internal_strategy).
|
||||
|
||||
:- init_dtproblog.
|
||||
:- initialization(init_dtproblog).
|
||||
|
||||
:- op( 550, yfx, :: ).
|
||||
|
||||
@ -359,7 +361,7 @@ get_ground_strategy(_,never).
|
||||
% Internal strategy representation
|
||||
% for NON-GROUND strategies
|
||||
% e.g. 1 :: market(guy) for ? :: market(P)
|
||||
:- dynamic non_ground_strategy/2.
|
||||
:- dynamic(non_ground_strategy/2).
|
||||
|
||||
% Get Strategy
|
||||
strategy(_,_,_) :-
|
||||
@ -413,7 +415,7 @@ set_strategy([Term|R]) :-
|
||||
set_ground_strategy(ID2,LogProb)
|
||||
;
|
||||
copy_term(Decision, Decision2),
|
||||
assert(non_ground_strategy(Decision2,LogProb))
|
||||
assertz(non_ground_strategy(Decision2,LogProb))
|
||||
),
|
||||
set_strategy(R).
|
||||
|
||||
|
@ -230,7 +230,7 @@
|
||||
problog_kbest_save/6,
|
||||
problog_max/3,
|
||||
problog_exact/3,
|
||||
problog_exact_save/5,
|
||||
problog_exact_save/5,
|
||||
problog_montecarlo/3,
|
||||
problog_dnf_sampling/3,
|
||||
problog_answers/2,
|
||||
@ -296,32 +296,31 @@
|
||||
above/2]).
|
||||
|
||||
:- style_check(all).
|
||||
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
:- set_prolog_flag(to_chars_mode,quintus).
|
||||
|
||||
% general yap modules
|
||||
:- ensure_loaded(library(charsio)).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(terms)).
|
||||
:- ensure_loaded(library(random)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- ensure_loaded(library(rbtrees)).
|
||||
:- ensure_loaded(library(ordsets)).
|
||||
:- use_module(library(charsio)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(terms)).
|
||||
:- use_module(library(random)). % PM doesn't seem to be used!
|
||||
:- use_module(library(system)).
|
||||
:- use_module(library(rbtrees)). % PM doesn't seem to be used!
|
||||
:- use_module(library(ordsets), [list_to_ord_set/2, ord_insert/3, ord_union/3]).
|
||||
|
||||
% problog related modules
|
||||
:- ensure_loaded('problog/variables').
|
||||
:- ensure_loaded('problog/extlists').
|
||||
:- ensure_loaded('problog/flags').
|
||||
:- ensure_loaded('problog/print').
|
||||
:- ensure_loaded('problog/os').
|
||||
:- ensure_loaded('problog/tptree').
|
||||
:- ensure_loaded('problog/tabling').
|
||||
:- ensure_loaded('problog/sampling').
|
||||
:- ensure_loaded('problog/intervals').
|
||||
:- ensure_loaded('problog/mc_DNF_sampling').
|
||||
:- catch(ensure_loaded('problog/variable_elimination'),_,true).
|
||||
:- use_module('problog/variables').
|
||||
:- use_module('problog/extlists').
|
||||
:- use_module('problog/flags').
|
||||
:- use_module('problog/print').
|
||||
:- use_module('problog/os').
|
||||
:- use_module('problog/tptree').
|
||||
:- use_module('problog/tabling').
|
||||
:- use_module('problog/sampling').
|
||||
:- use_module('problog/intervals').
|
||||
:- use_module('problog/mc_DNF_sampling').
|
||||
:- use_module('problog/variable_elimination').
|
||||
|
||||
% op attaching probabilities to facts
|
||||
:- op( 550, yfx, :: ).
|
||||
@ -333,56 +332,56 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
% global over all inference methods, internal use only
|
||||
:- dynamic problog_predicate/2.
|
||||
:- dynamic problog_continuous_predicate/3.
|
||||
:- dynamic(problog_predicate/2).
|
||||
:- dynamic(problog_continuous_predicate/3).
|
||||
% global over all inference methods, exported
|
||||
:- dynamic tunable_fact/2.
|
||||
:- dynamic non_ground_fact/1.
|
||||
:- dynamic continuous_fact/1.
|
||||
%:- dynamic problog_dir/1.
|
||||
:- dynamic(tunable_fact/2).
|
||||
:- dynamic(non_ground_fact/1).
|
||||
:- dynamic(continuous_fact/1).
|
||||
%:- dynamic(problog_dir/1).
|
||||
% global, manipulated via problog_control/2
|
||||
:- dynamic up/0.
|
||||
:- dynamic limit/0.
|
||||
:- dynamic mc/0.
|
||||
:- dynamic remember/0.
|
||||
:- dynamic exact/0. % Theo tabling
|
||||
:- dynamic find_decisions/0.
|
||||
:- dynamic internal_strategy/0.
|
||||
:- dynamic(up/0).
|
||||
:- dynamic(limit/0).
|
||||
:- dynamic(mc/0).
|
||||
:- dynamic(remember/0).
|
||||
:- dynamic(exact/0). % Theo tabling
|
||||
:- dynamic(find_decisions/0).
|
||||
:- dynamic(internal_strategy/0).
|
||||
% local to problog_delta
|
||||
:- dynamic low/2.
|
||||
:- dynamic up/2.
|
||||
:- dynamic stopDiff/1.
|
||||
:- dynamic(low/2).
|
||||
:- dynamic(up/2).
|
||||
:- dynamic(stopDiff/1).
|
||||
% local to problog_kbest
|
||||
:- dynamic current_kbest/3.
|
||||
:- dynamic(current_kbest/3).
|
||||
% local to problog_max
|
||||
:- dynamic max_probability/1.
|
||||
:- dynamic max_proof/1.
|
||||
:- dynamic(max_probability/1).
|
||||
:- dynamic(max_proof/1).
|
||||
% local to problog_montecarlo
|
||||
:- dynamic mc_prob/1.
|
||||
:- dynamic(mc_prob/1).
|
||||
% local to problog_answers
|
||||
:- dynamic answer/1.
|
||||
:- dynamic(answer/1).
|
||||
% to keep track of the groundings for non-ground facts
|
||||
:- dynamic grounding_is_known/2.
|
||||
:- dynamic(grounding_is_known/2).
|
||||
|
||||
% for decisions
|
||||
:- dynamic decision_fact/2.
|
||||
:- dynamic(decision_fact/2).
|
||||
|
||||
% for fact where the proabability is a variable
|
||||
:- dynamic dynamic_probability_fact/1.
|
||||
:- dynamic dynamic_probability_fact_extract/2.
|
||||
:- dynamic(dynamic_probability_fact/1).
|
||||
:- dynamic(dynamic_probability_fact_extract/2).
|
||||
|
||||
% for storing continuous parts of proofs (Hybrid ProbLog)
|
||||
:- dynamic hybrid_proof/4.
|
||||
:- dynamic hybrid_proof_disjoint/4.
|
||||
:- dynamic(hybrid_proof/3, hybrid_proof/4).
|
||||
:- dynamic(hybrid_proof_disjoint/4).
|
||||
|
||||
% ProbLog files declare prob. facts as P::G
|
||||
% and this module provides the predicate X::Y to iterate over them
|
||||
:- multifile '::'/2.
|
||||
:- multifile('::'/2).
|
||||
|
||||
|
||||
% directory where problogbdd executable is located
|
||||
% automatically set during loading -- assumes it is in same place as this file (problog.yap)
|
||||
:- getcwd(PD), set_problog_path(PD).
|
||||
:- initialization((getcwd(PD), set_problog_path(PD))).
|
||||
|
||||
|
||||
|
||||
@ -395,22 +394,28 @@
|
||||
% - factor used to decrease threshold for next level, NewMin=Factor*OldMin (saved also in log-space)
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(first_threshold, problog_flag_validate_indomain_0_1_open, 'starting threshold iterative deepening', 0.1, inference).
|
||||
:- problog_define_flag(last_threshold, problog_flag_validate_indomain_0_1_open, 'stopping threshold iterative deepening', 1e-30, inference, flags:last_threshold_handler).
|
||||
:- problog_define_flag(id_stepsize, problog_flag_validate_indomain_0_1_close, 'threshold shrinking factor iterative deepening', 0.5, inference, flags:id_stepsize_handler).
|
||||
:- initialization((
|
||||
problog_define_flag(first_threshold, problog_flag_validate_indomain_0_1_open, 'starting threshold iterative deepening', 0.1, inference),
|
||||
problog_define_flag(last_threshold, problog_flag_validate_indomain_0_1_open, 'stopping threshold iterative deepening', 1e-30, inference, flags:last_threshold_handler),
|
||||
problog_define_flag(id_stepsize, problog_flag_validate_indomain_0_1_close, 'threshold shrinking factor iterative deepening', 0.5, inference, flags:id_stepsize_handler)
|
||||
)).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% prune check stops derivations if they use a superset of facts already known to form a proof
|
||||
% (very) costly test, can be switched on/off here (This is obsolete as it is not included in implementation)
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(prunecheck, problog_flag_validate_switch, 'stop derivations including all facts of known proof', off, inference).
|
||||
:- initialization(
|
||||
problog_define_flag(prunecheck, problog_flag_validate_switch, 'stop derivations including all facts of known proof', off, inference)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% max number of calls to probabilistic facts per derivation (to ensure termination)
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(maxsteps, problog_flag_validate_posint, 'max. number of prob. steps per derivation', 1000, inference).
|
||||
:- initialization(
|
||||
problog_define_flag(maxsteps, problog_flag_validate_posint, 'max. number of prob. steps per derivation', 1000, inference)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% BDD timeout in seconds, used as option in BDD tool
|
||||
@ -422,12 +427,13 @@
|
||||
% located in the directory given by problog_flag dir
|
||||
%%%%%%%%%%%%
|
||||
|
||||
%:- problog_define_flag(bdd_path, problog_flag_validate_directory, 'problogbdd directory', '.',bdd).
|
||||
:- problog_define_flag(bdd_time, problog_flag_validate_posint, 'BDD computation timeout in seconds', 60, bdd).
|
||||
:- problog_define_flag(save_bdd, problog_flag_validate_boolean, 'save BDD files for (last) lower bound', false, bdd).
|
||||
:- problog_define_flag(dynamic_reorder, problog_flag_validate_boolean, 'use dynamic re-ordering for BDD', true, bdd).
|
||||
:- problog_define_flag(bdd_static_order, problog_flag_validate_boolean, 'use a static order', false, bdd).
|
||||
|
||||
:- initialization((
|
||||
% problog_define_flag(bdd_path, problog_flag_validate_directory, 'problogbdd directory', '.',bdd),
|
||||
problog_define_flag(bdd_time, problog_flag_validate_posint, 'BDD computation timeout in seconds', 60, bdd),
|
||||
problog_define_flag(save_bdd, problog_flag_validate_boolean, 'save BDD files for (last) lower bound', false, bdd),
|
||||
problog_define_flag(dynamic_reorder, problog_flag_validate_boolean, 'use dynamic re-ordering for BDD', true, bdd),
|
||||
problog_define_flag(bdd_static_order, problog_flag_validate_boolean, 'use a static order', false, bdd)
|
||||
)).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% determine whether ProbLog outputs information (number of proofs, intermediate results, ...)
|
||||
@ -435,27 +441,34 @@
|
||||
% default is false now, as dtproblog will flood the user with verbosity
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(verbose, problog_flag_validate_boolean, 'output intermediate information', false,output).
|
||||
:- initialization(
|
||||
problog_define_flag(verbose, problog_flag_validate_boolean, 'output intermediate information', false,output)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% determine whether ProbLog outputs proofs when adding to trie
|
||||
% default is false
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(show_proofs, problog_flag_validate_boolean, 'output proofs', false,output).
|
||||
:- initialization(
|
||||
problog_define_flag(show_proofs, problog_flag_validate_boolean, 'output proofs', false,output)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% Trie dump parameter for saving a file with the trie structure in the directory by problog_flag dir
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(triedump, problog_flag_validate_boolean, 'generate file: trie_file containing the trie structure', false,output).
|
||||
:- initialization(
|
||||
problog_define_flag(triedump, problog_flag_validate_boolean, 'generate file: trie_file containing the trie structure', false,output)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% Default inference method
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(inference, problog_flag_validate_dummy, 'default inference method', exact, inference).
|
||||
|
||||
:- initialization(
|
||||
problog_define_flag(inference, problog_flag_validate_dummy, 'default inference method', exact, inference)
|
||||
).
|
||||
|
||||
problog_dir(PD):- problog_path(PD).
|
||||
|
||||
@ -465,15 +478,25 @@ problog_dir(PD):- problog_path(PD).
|
||||
|
||||
init_global_params :-
|
||||
%grow_atom_table(1000000),
|
||||
getcwd(Work),
|
||||
concat_path_with_filename(Work, output, WorkDir),
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% working directory: all the temporary and output files will be located there
|
||||
% it assumes a subdirectory of the current working dir
|
||||
% on initialization, the current dir is the one where the user's file is located
|
||||
% should be changed to use temporary folder structure of operating system
|
||||
%%%%%%%%%%%%
|
||||
problog_define_flag(dir, problog_flag_validate_directory, 'directory for files', WorkDir, output),
|
||||
tmpnam(TempFolder),
|
||||
atomic_concat([TempFolder, '_problog'], TempProblogFolder),
|
||||
problog_define_flag(dir, problog_flag_validate_directory, 'directory for files', TempProblogFolder, output),
|
||||
problog_define_flag(bdd_par_file, problog_flag_validate_file, 'file for BDD variable parameters', example_bdd_probs, bdd, flags:working_file_handler),
|
||||
problog_define_flag(bdd_result, problog_flag_validate_file, 'file to store result calculated from BDD', example_bdd_res, bdd, flags:working_file_handler),
|
||||
problog_define_flag(bdd_file, problog_flag_validate_file, 'file for BDD script', example_bdd, bdd, flags:bdd_file_handler),
|
||||
problog_define_flag(static_order_file, problog_flag_validate_file, 'file for BDD static order', example_bdd_order, bdd, flags:working_file_handler),
|
||||
%%%%%%%%%%%%
|
||||
% montecarlo: recalculate current approximation after N samples
|
||||
% montecarlo: write log to this file
|
||||
%%%%%%%%%%%%
|
||||
problog_define_flag(mc_logfile, problog_flag_validate_file, 'logfile for montecarlo', 'log.txt', mcmc, flags:working_file_handler),
|
||||
check_existance('problogbdd').
|
||||
|
||||
check_existance(FileName):-
|
||||
@ -487,18 +510,6 @@ check_existance(FileName):-
|
||||
% parameter initialization to be called after returning to user's directory:
|
||||
:- initialization(init_global_params).
|
||||
|
||||
:- problog_define_flag(bdd_par_file, problog_flag_validate_file, 'file for BDD variable parameters', example_bdd_probs, bdd).
|
||||
:- problog_define_flag(bdd_result, problog_flag_validate_file, 'file to store result calculated from BDD', example_bdd_res, bdd).
|
||||
:- problog_define_flag(bdd_file, problog_flag_validate_file, 'file for BDD script', example_bdd, bdd, flags:bdd_file_handler).
|
||||
:- problog_define_flag(static_order_file, problog_flag_validate_file, 'file for BDD static order', example_bdd_order, bdd).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% montecarlo: recalculate current approximation after N samples
|
||||
% montecarlo: write log to this file
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(mc_logfile, problog_flag_validate_file, 'logfile for montecarlo', 'log.txt', mcmc).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% internal control flags
|
||||
@ -511,7 +522,7 @@ check_existance(FileName):-
|
||||
problog_control(on,X) :-
|
||||
call(X),!.
|
||||
problog_control(on,X) :-
|
||||
assert(X).
|
||||
assertz(X).
|
||||
problog_control(off,X) :-
|
||||
retractall(X).
|
||||
problog_control(check,X) :-
|
||||
@ -524,7 +535,7 @@ reset_control :-
|
||||
% problog_control(off,exact),
|
||||
problog_control(off,remember).
|
||||
|
||||
:- reset_control.
|
||||
:- initialization(reset_control).
|
||||
|
||||
grow_atom_table(N):-
|
||||
generate_atoms(N, 0),
|
||||
@ -569,12 +580,12 @@ term_expansion_intern((Annotation :: Head :- Body), Module, problog:ExpandedClau
|
||||
% It's a decision with a body
|
||||
copy_term((Head,Body),(HeadCopy,_BodyCopy)),
|
||||
functor(Head, Functor, Arity),
|
||||
atomic_concat([problog_,Functor],LongFunctor),
|
||||
atom_concat(problog_, Functor, LongFunctor),
|
||||
Head =.. [Functor|Args],
|
||||
append(Args,[LProb],LongArgs),
|
||||
probclause_id(ID),
|
||||
ProbFactHead =.. [LongFunctor,ID|LongArgs],
|
||||
assert(decision_fact(ID,Head)),
|
||||
assertz(decision_fact(ID,Head)),
|
||||
ExpandedClause = (ProbFactHead :-
|
||||
user:Body,
|
||||
(problog_control(check,internal_strategy) ->
|
||||
@ -583,14 +594,14 @@ term_expansion_intern((Annotation :: Head :- Body), Module, problog:ExpandedClau
|
||||
LProb = '?'
|
||||
)
|
||||
),
|
||||
assert(dynamic_probability_fact(ID)),
|
||||
assert((dynamic_probability_fact_extract(HeadCopy,P_New) :-
|
||||
assertz(dynamic_probability_fact(ID)),
|
||||
assertz((dynamic_probability_fact_extract(HeadCopy,P_New) :-
|
||||
dtproblog:strategy(ID,HeadCopy,P_New)
|
||||
)),
|
||||
(ground(Head) ->
|
||||
true
|
||||
;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
problog_predicate(Functor, Arity, LongFunctor, Module)
|
||||
;
|
||||
@ -615,7 +626,7 @@ user:term_expansion(P::Goal,Goal) :-
|
||||
term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
copy_term((P,Goal),(P_Copy,Goal_Copy)),
|
||||
functor(Goal, Name, Arity),
|
||||
atomic_concat([problog_,Name],ProblogName),
|
||||
atom_concat(problog_, Name, ProblogName),
|
||||
Goal =.. [Name|Args],
|
||||
append(Args,[LProb],L1),
|
||||
probclause_id(ID),
|
||||
@ -624,7 +635,7 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
(nonvar(P), P = t(TrueProb))
|
||||
->
|
||||
(
|
||||
assert(tunable_fact(ID,TrueProb)),
|
||||
assertz(tunable_fact(ID,TrueProb)),
|
||||
LProb is log(random*0.9+0.05) % set unknown probability randomly in [0.05, 0.95]
|
||||
);
|
||||
(
|
||||
@ -646,8 +657,8 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
)
|
||||
),
|
||||
LProb=log(P),
|
||||
assert(dynamic_probability_fact(ID)),
|
||||
assert(dynamic_probability_fact_extract(Goal_Copy,P_Copy))
|
||||
assertz(dynamic_probability_fact(ID)),
|
||||
assertz(dynamic_probability_fact_extract(Goal_Copy,P_Copy))
|
||||
)
|
||||
)
|
||||
),
|
||||
@ -655,7 +666,7 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
ground(Goal)
|
||||
->
|
||||
true;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
problog_predicate(Name, Arity, ProblogName,Module).
|
||||
|
||||
@ -676,6 +687,7 @@ is_valid_gaussian(X) :-
|
||||
throw(invalid_gaussian(X))
|
||||
).
|
||||
|
||||
:- multifile(user:term_expansion/1).
|
||||
|
||||
user:term_expansion(Goal, problog:ProbFact) :-
|
||||
compound(Goal),
|
||||
@ -696,7 +708,7 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
),
|
||||
|
||||
functor(Goal, Name, Arity),
|
||||
atomic_concat([problogcontinuous_,Name],ProblogName),
|
||||
atom_concat(problogcontinuous_, Name, ProblogName),
|
||||
probclause_id(ID),
|
||||
|
||||
GaussianArg=gaussian(Mu_Arg,Sigma_Arg),
|
||||
@ -711,7 +723,7 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
Sigma_Random is 0.4, % random*2+0.5,
|
||||
nth(Pos,Args,_,KeepArgs),
|
||||
nth(Pos,NewArgs,gaussian(Mu_Random,Sigma_Random),KeepArgs),
|
||||
assert(tunable_fact(ID,gaussian(Mu_Arg,Sigma_Arg)))
|
||||
assertz(tunable_fact(ID,gaussian(Mu_Arg,Sigma_Arg)))
|
||||
)
|
||||
),
|
||||
ProbFact =.. [ProblogName,ID|NewArgs],
|
||||
@ -720,9 +732,9 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
ground(Goal)
|
||||
->
|
||||
true;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
assert(continuous_fact(ID)),
|
||||
assertz(continuous_fact(ID)),
|
||||
problog_continuous_predicate(Name, Arity, Pos,ProblogName).
|
||||
|
||||
|
||||
@ -759,7 +771,7 @@ problog_continuous_predicate(Name, Arity, ContinuousArgumentPosition, ProblogNam
|
||||
ProbFact =.. [ProblogName,ID|ProbArgs],
|
||||
prolog_load_context(module,Mod),
|
||||
|
||||
assert( (Mod:OriginalGoal :- ProbFact,
|
||||
assertz( (Mod:OriginalGoal :- ProbFact,
|
||||
% continuous facts always get a grounding ID, even when they are actually ground
|
||||
% this simplifies the BDD script generation
|
||||
non_ground_fact_grounding_id(ProbFact,Ground_ID),
|
||||
@ -767,7 +779,7 @@ problog_continuous_predicate(Name, Arity, ContinuousArgumentPosition, ProblogNam
|
||||
add_continuous_to_proof(ID,ID2)
|
||||
)),
|
||||
|
||||
assert(problog_continuous_predicate(Name, Arity,ContinuousArgumentPosition)),
|
||||
assertz(problog_continuous_predicate(Name, Arity,ContinuousArgumentPosition)),
|
||||
ArityPlus1 is Arity+1,
|
||||
dynamic(problog:ProblogName/ArityPlus1).
|
||||
|
||||
@ -795,16 +807,16 @@ interval_merge((_ID,GroundID,_Type),Interval) :-
|
||||
|
||||
|
||||
|
||||
problog_assert(P::Goal) :-
|
||||
problog_assert(user,P::Goal).
|
||||
problog_assert(Module, P::Goal) :-
|
||||
problog_assertz(P::Goal) :-
|
||||
problog_assertz(user,P::Goal).
|
||||
problog_assertz(Module, P::Goal) :-
|
||||
term_expansion_intern(P::Goal,Module,problog:ProbFact),
|
||||
assert(problog:ProbFact).
|
||||
assertz(problog:ProbFact).
|
||||
|
||||
problog_retractall(Goal) :-
|
||||
Goal =.. [F|Args],
|
||||
append([_ID|Args],[_Prob],Args2),
|
||||
atomic_concat(['problog_',F],F2),
|
||||
atom_concat('problog_', F, F2),
|
||||
ProbLogGoal=..[F2|Args2],
|
||||
retractall(problog:ProbLogGoal).
|
||||
|
||||
@ -818,18 +830,18 @@ problog_predicate(Name, Arity, ProblogName,Mod) :-
|
||||
OriginalGoal =.. [_|Args],
|
||||
append(Args,[Prob],L1),
|
||||
ProbFact =.. [ProblogName,ID|L1],
|
||||
assert( (Mod:OriginalGoal :-
|
||||
assertz( (Mod:OriginalGoal :-
|
||||
ProbFact,
|
||||
grounding_id(ID,OriginalGoal,ID2),
|
||||
prove_problog_fact(ID,ID2,Prob)
|
||||
)),
|
||||
|
||||
assert( (Mod:problog_not(OriginalGoal) :-
|
||||
assertz( (Mod:problog_not(OriginalGoal) :-
|
||||
ProbFact,
|
||||
grounding_id(ID,OriginalGoal,ID2),
|
||||
prove_problog_fact_negated(ID,ID2,Prob)
|
||||
)),
|
||||
assert(problog_predicate(Name, Arity)),
|
||||
assertz(problog_predicate(Name, Arity)),
|
||||
ArityPlus2 is Arity+2,
|
||||
dynamic(problog:ProblogName/ArityPlus2).
|
||||
|
||||
@ -899,7 +911,8 @@ prove_problog_fact_negated(ClauseID,GroundID,Prob) :-
|
||||
).
|
||||
|
||||
% generate next global identifier
|
||||
:- nb_setval(probclause_counter,0).
|
||||
:- initialization(nb_setval(probclause_counter,0)).
|
||||
|
||||
probclause_id(ID) :-
|
||||
nb_getval(probclause_counter,ID), !,
|
||||
C1 is ID+1,
|
||||
@ -925,7 +938,7 @@ non_ground_fact_grounding_id(Goal,ID) :-
|
||||
nb_getval(non_ground_fact_grounding_id_counter,ID),
|
||||
ID2 is ID+1,
|
||||
nb_setval(non_ground_fact_grounding_id_counter,ID2),
|
||||
assert(grounding_is_known(Goal,ID))
|
||||
assertz(grounding_is_known(Goal,ID))
|
||||
)
|
||||
).
|
||||
|
||||
@ -951,7 +964,7 @@ probabilistic_fact(P2,Goal,ID) :-
|
||||
->
|
||||
(
|
||||
Goal =.. [F|Args],
|
||||
atomic_concat('problog_',F,F2),
|
||||
atom_concat('problog_', F, F2),
|
||||
append([ID|Args],[P],Args2),
|
||||
Goal2 =..[F2|Args2],
|
||||
length(Args2,N),
|
||||
@ -997,7 +1010,7 @@ prob_for_id(dummy,dummy,dummy).
|
||||
|
||||
get_fact_probability(A, Prob) :-
|
||||
ground(A),
|
||||
not(number(A)),
|
||||
\+ number(A),
|
||||
atom_codes(A, A_Codes),
|
||||
once(append(Part1, [95|Part2], A_Codes)), % 95 = '_'
|
||||
number_codes(ID, Part1), !,
|
||||
@ -1055,11 +1068,11 @@ set_fact_probability(ID,Prob) :-
|
||||
NewLogProb is log(Prob),
|
||||
nth(ProblogArity,NewProblogTermArgs,NewLogProb,KeepArgs),
|
||||
NewProblogTerm =.. [ProblogName|NewProblogTermArgs],
|
||||
assert(NewProblogTerm).
|
||||
assertz(NewProblogTerm).
|
||||
|
||||
get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity) :-
|
||||
problog_predicate(Name,Arity),
|
||||
atomic_concat([problog_,Name],ProblogName),
|
||||
atom_concat(problog_, Name, ProblogName),
|
||||
ProblogArity is Arity+2,
|
||||
functor(ProblogTerm,ProblogName,ProblogArity),
|
||||
arg(1,ProblogTerm,ID),
|
||||
@ -1077,7 +1090,7 @@ get_continuous_fact_parameters(ID,Parameters) :-
|
||||
|
||||
get_internal_continuous_fact(ID,ProblogTerm,ProblogName,ProblogArity,ContinuousPos) :-
|
||||
problog_continuous_predicate(Name,Arity,ContinuousPos),
|
||||
atomic_concat([problogcontinuous_,Name],ProblogName),
|
||||
atom_concat(problogcontinuous_, Name, ProblogName),
|
||||
ProblogArity is Arity+1,
|
||||
functor(ProblogTerm,ProblogName,ProblogArity),
|
||||
arg(1,ProblogTerm,ID),
|
||||
@ -1090,7 +1103,7 @@ set_continuous_fact_parameters(ID,Parameters) :-
|
||||
nth0(ContinuousPos,ProblogTermArgs,_,KeepArgs),
|
||||
nth0(ContinuousPos,NewProblogTermArgs,Parameters,KeepArgs),
|
||||
NewProblogTerm =.. [ProblogName|NewProblogTermArgs],
|
||||
assert(NewProblogTerm).
|
||||
assertz(NewProblogTerm).
|
||||
|
||||
|
||||
|
||||
@ -1134,7 +1147,7 @@ get_fact(ID,OutsideTerm) :-
|
||||
ProblogTerm =.. [_Functor,ID|Args],
|
||||
atomic_concat('problog_',OutsideFunctor,ProblogName),
|
||||
Last is ProblogArity-1,
|
||||
nth(Last,Args,_LogProb,OutsideArgs),
|
||||
nth(Last,Args,_LogProb,OutsideArgs), % PM avoid nth/3; use nth0/3 or nth1/3 instead
|
||||
OutsideTerm =.. [OutsideFunctor|OutsideArgs].
|
||||
% ID of instance of non-ground fact: get fact from grounding table
|
||||
get_fact(ID,OutsideTerm) :-
|
||||
@ -1142,12 +1155,12 @@ get_fact(ID,OutsideTerm) :-
|
||||
grounding_is_known(OutsideTerm,GID).
|
||||
|
||||
recover_grounding_id(Atom,ID) :-
|
||||
name(Atom,List),
|
||||
atom_codes(Atom,List),
|
||||
reverse(List,Rev),
|
||||
recover_number(Rev,NumRev),
|
||||
reverse(NumRev,Num),
|
||||
name(ID,Num).
|
||||
recover_number([95|_],[]) :- !. % name('_',[95])
|
||||
atom_codes(ID,Num).
|
||||
recover_number([95|_],[]) :- !. % atom_codes('_',[95])
|
||||
recover_number([A|B],[A|C]) :-
|
||||
recover_number(B,C).
|
||||
|
||||
@ -1281,9 +1294,9 @@ montecarlo_check(ComposedID) :-
|
||||
fail.
|
||||
% (c) for unknown groundings of non-ground facts: generate a new sample (decompose the ID first)
|
||||
montecarlo_check(ID) :-
|
||||
name(ID,IDN),
|
||||
atom_codes(ID,IDN),
|
||||
recover_number(IDN,FactIDName),
|
||||
name(FactID,FactIDName),
|
||||
atom_codes(FactID,FactIDName),
|
||||
new_sample_nonground(ID,FactID).
|
||||
|
||||
% sampling from ground fact: set array value to 1 (in) or 2 (out)
|
||||
@ -1319,10 +1332,10 @@ new_sample_nonground(ComposedID,ID) :-
|
||||
% fail.
|
||||
|
||||
split_grounding_id(Composed,Fact,Grounding) :-
|
||||
name(Composed,C),
|
||||
atom_codes(Composed,C),
|
||||
split_g_id(C,F,G),
|
||||
name(Fact,F),
|
||||
name(Grounding,G).
|
||||
atom_codes(Fact,F),
|
||||
atom_codes(Grounding,G).
|
||||
split_g_id([95|Grounding],[],Grounding) :- !.
|
||||
split_g_id([A|B],[A|FactID],GroundingID) :-
|
||||
split_g_id(B,FactID,GroundingID).
|
||||
@ -1407,19 +1420,21 @@ put_module(Goal,Module,Module:Goal).
|
||||
% if remember is on, input files for problogbdd will be saved
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- problog_var_define(sld_time, times, time, messages('SLD resolution', ':', ' ms')).
|
||||
:- problog_var_define(bdd_script_time, times, time, messages('Generating BDD script', ':', ' ms')).
|
||||
:- problog_var_define(bdd_generation_time, times, time, messages('Constructing BDD', ':', ' ms')).
|
||||
:- problog_var_define(trie_statistics, memory, untyped, messages('Trie usage', ':', '')).
|
||||
:- problog_var_define(probability, result, number, messages('Probabilty', ' = ', '')).
|
||||
:- problog_var_define(bdd_script_time(Method), times, time, messages('Generating BDD script '(Method), ':', ' ms')).
|
||||
:- problog_var_define(bdd_generation_time(Method), times, time, messages('Constructing BDD '(Method), ':', ' ms')).
|
||||
:- problog_var_define(probability(Method), result, number, messages('Probabilty '(Method), ' = ', '')).
|
||||
:- problog_var_define(trie_statistics(Method), memory, untyped, messages('Trie usage '(Method), ':', '')).
|
||||
:- problog_var_define(dbtrie_statistics(Method), memory, untyped, messages('Depth Breadth Trie usage '(Method), ':', '')).
|
||||
:- problog_var_define(db_trie_opts_performed(Method), memory, untyped, messages('Optimisations performed '(Method), ':', '')).
|
||||
:- problog_var_define(variable_elimination_time, times, time, messages('Variable Elimination', ':', ' ms')).
|
||||
:- problog_var_define(variable_elimination_stats, memory, untyped, messages('Variable Elimination', ':', '')).
|
||||
:- initialization((
|
||||
problog_var_define(sld_time, times, time, messages('SLD resolution', ':', ' ms')),
|
||||
problog_var_define(bdd_script_time, times, time, messages('Generating BDD script', ':', ' ms')),
|
||||
problog_var_define(bdd_generation_time, times, time, messages('Constructing BDD', ':', ' ms')),
|
||||
problog_var_define(trie_statistics, memory, untyped, messages('Trie usage', ':', '')),
|
||||
problog_var_define(probability, result, number, messages('Probabilty', ' = ', '')),
|
||||
problog_var_define(bdd_script_time(Method), times, time, messages('Generating BDD script '(Method), ':', ' ms')),
|
||||
problog_var_define(bdd_generation_time(Method), times, time, messages('Constructing BDD '(Method), ':', ' ms')),
|
||||
problog_var_define(probability(Method), result, number, messages('Probabilty '(Method), ' = ', '')),
|
||||
problog_var_define(trie_statistics(Method), memory, untyped, messages('Trie usage '(Method), ':', '')),
|
||||
problog_var_define(dbtrie_statistics(Method), memory, untyped, messages('Depth Breadth Trie usage '(Method), ':', '')),
|
||||
problog_var_define(db_trie_opts_performed(Method), memory, untyped, messages('Optimisations performed '(Method), ':', '')),
|
||||
problog_var_define(variable_elimination_time, times, time, messages('Variable Elimination', ':', ' ms')),
|
||||
problog_var_define(variable_elimination_stats, memory, untyped, messages('Variable Elimination', ':', ''))
|
||||
)).
|
||||
|
||||
problog_statistics(Stat, Result):-
|
||||
problog_var_defined(Stat),
|
||||
@ -1780,7 +1795,7 @@ add_solution :-
|
||||
Continuous=[];
|
||||
(
|
||||
Continuous=[continuous(ProofID)],
|
||||
assert(hybrid_proof(ProofID,Cont_IDs,AllIntervals))
|
||||
assertz(hybrid_proof(ProofID,Cont_IDs,AllIntervals))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -1808,7 +1823,7 @@ collect_all_intervals([(ID,GroundID)|T],ProofID,[Interval|T2]) :-
|
||||
Interval \= all, % we do not need to store continuous
|
||||
% variables with domain [-oo,oo] (they have probability 1)
|
||||
!,
|
||||
assert(hybrid_proof(ProofID,ID,GroundID,Interval)),
|
||||
assertz(hybrid_proof(ProofID,ID,GroundID,Interval)),
|
||||
collect_all_intervals(T,ProofID,T2).
|
||||
collect_all_intervals([_|T],ProofID,T2) :-
|
||||
collect_all_intervals(T,ProofID,T2).
|
||||
@ -1865,7 +1880,7 @@ disjoin_hybrid_proofs([GroundID|T]) :-
|
||||
(
|
||||
hybrid_proof(ProofID,ID,GroundID,Interval),
|
||||
intervals_disjoin(Interval,Partition,PInterval),
|
||||
assert(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)),
|
||||
assertz(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)),
|
||||
|
||||
fail; % go to next proof
|
||||
true
|
||||
@ -1960,9 +1975,9 @@ init_problog_delta(Threshold,Delta) :-
|
||||
nb_setval(problog_completed_proofs, Trie_Completed_Proofs),
|
||||
init_ptree(Trie_Stopped_Proofs),
|
||||
nb_setval(problog_stopped_proofs, Trie_Stopped_Proofs),
|
||||
assert(low(0,0.0)),
|
||||
assert(up(0,1.0)),
|
||||
assert(stopDiff(Delta)),
|
||||
assertz(low(0,0.0)),
|
||||
assertz(up(0,1.0)),
|
||||
assertz(stopDiff(Delta)),
|
||||
init_problog(Threshold).
|
||||
|
||||
problog_delta_id(Goal, _) :-
|
||||
@ -2048,7 +2063,7 @@ eval_lower(N,P,Status) :-
|
||||
eval_dnf(Trie_Completed_Proofs,P,Status),
|
||||
(Status = ok ->
|
||||
retract(low(_,_)),
|
||||
assert(low(N,P)),
|
||||
assertz(low(N,P)),
|
||||
(problog_flag(verbose,true) -> format(user,'lower bound: ~6f~n',[P]);true),
|
||||
flush_output(user)
|
||||
;
|
||||
@ -2058,7 +2073,7 @@ eval_lower(N,P,Status) :-
|
||||
eval_upper(0,P,ok) :-
|
||||
retractall(up(_,_)),
|
||||
low(N,P),
|
||||
assert(up(N,P)).
|
||||
assertz(up(N,P)).
|
||||
% else merge proofs and stopped derivations to get upper bound
|
||||
% in case of timeout or other problems, skip and use bound from last level
|
||||
eval_upper(N,UpP,ok) :-
|
||||
@ -2071,7 +2086,7 @@ eval_upper(N,UpP,ok) :-
|
||||
delete_ptree(Trie_All_Proofs),
|
||||
(StatusUp = ok ->
|
||||
retract(up(_,_)),
|
||||
assert(up(N,UpP))
|
||||
assertz(up(N,UpP))
|
||||
;
|
||||
(problog_flag(verbose,true) -> format(user,'~w - continue using old up~n',[StatusUp]);true),
|
||||
flush_output(user),
|
||||
@ -2099,8 +2114,8 @@ problog_max(Goal, Prob, Facts) :-
|
||||
init_problog_max(Threshold) :-
|
||||
retractall(max_probability(_)),
|
||||
retractall(max_proof(_)),
|
||||
assert(max_probability(-999999)),
|
||||
assert(max_proof(unprovable)),
|
||||
assertz(max_probability(-999999)),
|
||||
assertz(max_proof(unprovable)),
|
||||
init_problog(Threshold).
|
||||
|
||||
update_max :-
|
||||
@ -2112,10 +2127,10 @@ update_max :-
|
||||
b_getval(problog_current_proof, IDs),
|
||||
open_end_close_end(IDs, R),
|
||||
retractall(max_proof(_)),
|
||||
assert(max_proof(R)),
|
||||
assertz(max_proof(R)),
|
||||
nb_setval(problog_threshold, CurrP),
|
||||
retractall(max_probability(_)),
|
||||
assert(max_probability(CurrP))
|
||||
assertz(max_probability(CurrP))
|
||||
).
|
||||
|
||||
problog_max_id(Goal, _Prob, _Clauses) :-
|
||||
@ -2196,7 +2211,7 @@ problog_real_kbest(Goal, K, Prob, Status) :-
|
||||
|
||||
init_problog_kbest(Threshold) :-
|
||||
retractall(current_kbest(_,_,_)),
|
||||
assert(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs)
|
||||
assertz(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs)
|
||||
init_ptree(Trie_Completed_Proofs),
|
||||
nb_setval(problog_completed_proofs, Trie_Completed_Proofs),
|
||||
init_problog(Threshold).
|
||||
@ -2237,7 +2252,7 @@ update_current_kbest(K,NewLogProb,Cl) :-
|
||||
sorted_insert(NewLogProb-Cl,List,NewList),
|
||||
NewLength is Length+1,
|
||||
(NewLength < K ->
|
||||
assert(current_kbest(OldThres,NewList,NewLength))
|
||||
assertz(current_kbest(OldThres,NewList,NewLength))
|
||||
;
|
||||
(NewLength>K ->
|
||||
First is NewLength-K+1,
|
||||
@ -2245,7 +2260,7 @@ update_current_kbest(K,NewLogProb,Cl) :-
|
||||
; FinalList=NewList, FinalLength=NewLength),
|
||||
FinalList=[NewThres-_|_],
|
||||
nb_setval(problog_threshold,NewThres),
|
||||
assert(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
assertz(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
|
||||
sorted_insert(A,[],[A]).
|
||||
sorted_insert(A-LA,[B1-LB1|B], [A-LA,B1-LB1|B] ) :-
|
||||
@ -2406,7 +2421,7 @@ montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :-
|
||||
;
|
||||
true
|
||||
),
|
||||
assert(mc_prob(Prob))
|
||||
assertz(mc_prob(Prob))
|
||||
;
|
||||
montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)
|
||||
).
|
||||
@ -2428,7 +2443,7 @@ montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :-
|
||||
% ;
|
||||
% true
|
||||
% ),
|
||||
% assert(mc_prob(Prob))
|
||||
% assertz(mc_prob(Prob))
|
||||
% ;
|
||||
% montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)
|
||||
% ).
|
||||
@ -2473,7 +2488,7 @@ problog_answers(Goal,File) :-
|
||||
set_problog_flag(verbose,false),
|
||||
retractall(answer(_)),
|
||||
% this will not give the exact prob of Goal!
|
||||
problog_exact((Goal,ground(Goal),\+problog:answer(Goal),assert(problog:answer(Goal))),_,_),
|
||||
problog_exact((Goal,ground(Goal),\+problog:answer(Goal),assertz(problog:answer(Goal))),_,_),
|
||||
open(File,write,_,[alias(answer)]),
|
||||
eval_answers,
|
||||
close(answer).
|
||||
@ -2531,13 +2546,13 @@ update_current_kbest_answers(_,NewLogProb,Goal) :-
|
||||
!,
|
||||
keysort(NewList,SortedList),%format(user_error,'updated variant of ~w~n',[Goal]),
|
||||
retract(current_kbest(K,_,Len)),
|
||||
assert(current_kbest(K,SortedList,Len)).
|
||||
assertz(current_kbest(K,SortedList,Len)).
|
||||
update_current_kbest_answers(K,NewLogProb,Goal) :-
|
||||
retract(current_kbest(OldThres,List,Length)),
|
||||
sorted_insert(NewLogProb-Goal,List,NewList),%format(user_error,'inserted new element ~w~n',[Goal]),
|
||||
NewLength is Length+1,
|
||||
(NewLength < K ->
|
||||
assert(current_kbest(OldThres,NewList,NewLength))
|
||||
assertz(current_kbest(OldThres,NewList,NewLength))
|
||||
;
|
||||
(NewLength>K ->
|
||||
First is NewLength-K+1,
|
||||
@ -2545,7 +2560,7 @@ update_current_kbest_answers(K,NewLogProb,Goal) :-
|
||||
; FinalList=NewList, FinalLength=NewLength),
|
||||
FinalList=[NewThres-_|_],
|
||||
nb_setval(problog_threshold,NewThres),
|
||||
assert(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
assertz(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
|
||||
% this fails if there is no variant -> go to second case above
|
||||
update_prob_of_known_answer([OldLogP-OldGoal|List],Goal,NewLogProb,[MaxLogP-OldGoal|List]) :-
|
||||
@ -2683,7 +2698,7 @@ build_trie(Goal, Trie) :-
|
||||
throw(error('Flag settings not supported by build_trie/2.'))
|
||||
).
|
||||
|
||||
build_trie_supported :- problog_flag(inference,exact).
|
||||
build_trie_supported :- problog_flag(inference,exact). % PM this can easily be written to avoid creating choice-points
|
||||
build_trie_supported :- problog_flag(inference,low(_)).
|
||||
build_trie_supported :- problog_flag(inference,atleast-_-best).
|
||||
build_trie_supported :- problog_flag(inference,_-best).
|
||||
@ -2862,7 +2877,7 @@ write_bdd_struct_script(Trie,BDDFile,Variables) :-
|
||||
Levels = [ROptLevel]
|
||||
),
|
||||
% Removed forall here, because it hides 'Variables' from what comes afterwards
|
||||
once(member(OptLevel, Levels)),
|
||||
memberchk(OptLevel, Levels),
|
||||
(
|
||||
(problog_flag(use_db_trie, true) ->
|
||||
tries:trie_db_opt_min_prefix(MinPrefix),
|
||||
@ -2976,7 +2991,7 @@ write_global_bdd_file_line(I,Max) :-
|
||||
).
|
||||
|
||||
write_global_bdd_file_query(I,Max) :-
|
||||
(I=Max ->
|
||||
(I=Max -> % PM shouldn't this be instead I =:= Max ?
|
||||
format("L~q~n",[I])
|
||||
;
|
||||
format("L~q,",[I]),
|
||||
@ -3011,8 +3026,8 @@ bdd_par_file(BDDParFile) :-
|
||||
|
||||
require(Feature) :-
|
||||
atom(Feature),
|
||||
atomic_concat(['problog_required_',Feature],Feature_Required),
|
||||
atomic_concat([Feature_Required,'_',depth],Feature_Depth),
|
||||
atom_concat('problog_required_', Feature, Feature_Required),
|
||||
atom_concat(Feature_Required, '_depth', Feature_Depth),
|
||||
(required(Feature) ->
|
||||
b_getval(Feature_Depth,Depth),
|
||||
Depth1 is Depth+1,
|
||||
@ -3025,8 +3040,8 @@ require(Feature) :-
|
||||
|
||||
unrequire(Feature) :-
|
||||
atom(Feature),
|
||||
atomic_concat(['problog_required_',Feature],Feature_Required),
|
||||
atomic_concat([Feature_Required,'_',depth],Feature_Depth),
|
||||
atom_concat('problog_required_', Feature, Feature_Required),
|
||||
atom_concat(Feature_Required, '_depth', Feature_Depth),
|
||||
b_getval(Feature_Depth,Depth),
|
||||
(Depth=1 ->
|
||||
nb_delete(Feature_Required),
|
||||
@ -3039,7 +3054,7 @@ unrequire(Feature) :-
|
||||
|
||||
required(Feature) :-
|
||||
atom(Feature),
|
||||
atomic_concat(['problog_required_',Feature],Feature_Required),
|
||||
atom_concat('problog_required_', Feature, Feature_Required),
|
||||
catch(b_getval(Feature_Required,Val),error(existence_error(variable,Feature_Required),_),fail),
|
||||
Val == required.
|
||||
|
||||
|
@ -212,7 +212,8 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(extlists, [open_end_memberchk/2, open_end_add/3, open_end_add_unique/3, open_end_close_end/2]).
|
||||
:- ensure_loaded(library(lists)).
|
||||
|
||||
:- use_module(library(lists), [memberchk/2]).
|
||||
|
||||
open_end_memberchk(_A, []):-!, fail.
|
||||
open_end_memberchk(A, L-E):-
|
||||
|
@ -204,19 +204,20 @@
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:-module(flags, [problog_define_flag/4,
|
||||
problog_define_flag/5,
|
||||
problog_define_flag/6,
|
||||
problog_defined_flag/5,
|
||||
problog_defined_flag_group/1,
|
||||
set_problog_flag/2,
|
||||
reset_problog_flags/0,
|
||||
problog_flag/2]).
|
||||
:-module(flags, [
|
||||
problog_define_flag/4,
|
||||
problog_define_flag/5,
|
||||
problog_define_flag/6,
|
||||
problog_defined_flag/5,
|
||||
problog_defined_flag_group/1,
|
||||
set_problog_flag/2,
|
||||
reset_problog_flags/0,
|
||||
problog_flag/2]).
|
||||
|
||||
|
||||
:-ensure_loaded(gflags).
|
||||
:-ensure_loaded(os).
|
||||
:-ensure_loaded(logger).
|
||||
:- use_module(gflags).
|
||||
:- use_module(os).
|
||||
:- use_module(logger).
|
||||
|
||||
problog_define_flag(Flag, Type, Description, DefaultValue):-
|
||||
flag_define(Flag, Type, DefaultValue, Description).
|
||||
@ -241,37 +242,39 @@ problog_flag(Flag, Value):-
|
||||
|
||||
reset_problog_flags:- flags_reset.
|
||||
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_dummy, flag_validate_dummy).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_atom, flag_validate_atom).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_atomic, flag_validate_atomic).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_number, flag_validate_number).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer, flag_validate_integer).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_directory, flag_validate_directory).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_file, flag_validate_file).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_list(L), flag_validate_in_list(L)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval(I, Type), flag_validate_in_interval(I, Type)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_closed([L, U]), flag_validate_in_interval([L, U], number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_open([L, U]), flag_validate_in_interval((L, U), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_closed([L, U]), flag_validate_in_interval([L, U], integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_open([L, U]), flag_validate_in_interval((L, U), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_closed([L, U]), flag_validate_in_interval([L, U], float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_open([L, U]), flag_validate_in_interval((L, U), float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_posnumber, flag_validate_in_interval((0, [+inf]), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_posint, flag_validate_in_interval((0, +inf), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_nonegint, flag_validate_in_interval(([0], +inf), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_boolean, flag_validate_in_list([true, false])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_switch, flag_validate_in_list([on, off])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_method, flag_validate_in_list([max, delta, exact, montecarlo, low, kbest])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_aggregate, flag_validate_in_list([sum, prod, soft_prod])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_open, flag_validate_in_interval((0, 1), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_close, flag_validate_in_interval([0, 1], number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_0to5, flag_validate_in_interval([0, 5], integer)).
|
||||
:- initialization((
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_dummy, flag_validate_dummy),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_atom, flag_validate_atom),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_atomic, flag_validate_atomic),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_number, flag_validate_number),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer, flag_validate_integer),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_directory, flag_validate_directory),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_file, flag_validate_file),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_list(L), flag_validate_in_list(L)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval(I, Type), flag_validate_in_interval(I, Type)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_closed([L, U]), flag_validate_in_interval([L, U], number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_open([L, U]), flag_validate_in_interval((L, U), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_closed([L, U]), flag_validate_in_interval([L, U], integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_open([L, U]), flag_validate_in_interval((L, U), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_closed([L, U]), flag_validate_in_interval([L, U], float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_open([L, U]), flag_validate_in_interval((L, U), float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_posnumber, flag_validate_in_interval((0, [+inf]), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_posint, flag_validate_in_interval((0, +inf), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_nonegint, flag_validate_in_interval(([0], +inf), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_boolean, flag_validate_in_list([true, false])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_switch, flag_validate_in_list([on, off])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_method, flag_validate_in_list([max, delta, exact, montecarlo, low, kbest])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_aggregate, flag_validate_in_list([sum, prod, soft_prod])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_open, flag_validate_in_interval((0, 1), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_close, flag_validate_in_interval([0, 1], number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_0to5, flag_validate_in_interval([0, 5], integer))
|
||||
)).
|
||||
|
||||
last_threshold_handler(message, '').
|
||||
last_threshold_handler(validating, _Value).
|
||||
@ -289,6 +292,14 @@ id_stepsize_handler(stored, Value):-
|
||||
|
||||
bdd_file_handler(message, '').
|
||||
bdd_file_handler(validating, _Value).
|
||||
bdd_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch(file_exists(Path), _, fail), file_property(Path, type(regular)), !.
|
||||
bdd_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch((\+ file_exists(Path), tell(Path)), _, fail),
|
||||
told,
|
||||
delete_file(Path).
|
||||
bdd_file_handler(validated, _Value).
|
||||
bdd_file_handler(stored, Value):-
|
||||
atomic_concat(Value, '_probs', ParValue),
|
||||
@ -296,6 +307,19 @@ bdd_file_handler(stored, Value):-
|
||||
atomic_concat(Value, '_res', ResValue),
|
||||
flag_set(bdd_result, ResValue).
|
||||
|
||||
working_file_handler(message, '').
|
||||
working_file_handler(validating, _Value).
|
||||
working_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch(file_exists(Path), _, fail), file_property(Path, type(regular)), !.
|
||||
working_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch((\+ file_exists(Path), tell(Path)), _, fail),
|
||||
told,
|
||||
delete_file(Path).
|
||||
working_file_handler(validated, _Value).
|
||||
working_file_handler(stored, _Value).
|
||||
|
||||
auto_handler(message, 'auto non-zero').
|
||||
auto_handler(validating, Value) :-
|
||||
number(Value),
|
||||
|
@ -245,8 +245,8 @@
|
||||
flag_get/2,
|
||||
flags_reset/0]).
|
||||
|
||||
:-ensure_loaded(library(lists)).
|
||||
:-ensure_loaded(library(system)). % for file operations
|
||||
:- use_module(library(lists), [append/3, memberchk/2, reverse/2]).
|
||||
:- use_module(library(system), [delete_file/1, file_exists/1, file_property/2, make_directory/1]). % for file operations
|
||||
|
||||
flag_define(Flag, Type, DefaultValue, Message):-
|
||||
flag_define(Flag, general, Type, DefaultValue, flags:true, Message).
|
||||
@ -259,10 +259,10 @@ flag_define(Flag, Group, Type, DefaultValue, Handler, Message):-
|
||||
throw(duplicate_flag_definition(flag_define(Flag, Group, Type, DefaultValue, Handler, Message))).
|
||||
|
||||
flag_define(Flag, Group, Type, DefaultValue, Handler, Message):-
|
||||
(catch(call(Type), _, fail)->
|
||||
(catch(Type, _, fail)->
|
||||
fail
|
||||
;
|
||||
\+ (flag_validation_syntactic_sugar(Type, SyntacticSugar), catch(call(SyntacticSugar), _, fail)),
|
||||
\+ (flag_validation_syntactic_sugar(Type, SyntacticSugar), catch(SyntacticSugar, _, fail)),
|
||||
throw(unknown_flag_type(flag_define(Flag, Group, Type, DefaultValue, Handler, Message)))
|
||||
).
|
||||
|
||||
@ -371,13 +371,13 @@ flag_validate(_Flag, Value, Type, M:Handler):-
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch((call(M:GoalValidating), call(G)), _, fail), !.
|
||||
catch((M:GoalValidating, G), _, fail), !.
|
||||
flag_validate(_Flag, Value, Type, _M:Handler):-
|
||||
Handler == true,
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch(call(G), _, fail), !.
|
||||
catch(G, _, fail), !.
|
||||
|
||||
flag_validate(_Flag, Value, SyntacticSugar, M:Handler):-
|
||||
Handler \= true,
|
||||
@ -386,14 +386,14 @@ flag_validate(_Flag, Value, SyntacticSugar, M:Handler):-
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch((call(M:GoalValidating), call(G)), _, fail), !.
|
||||
catch((M:GoalValidating, G), _, fail), !.
|
||||
flag_validate(_Flag, Value, SyntacticSugar, _M:Handler):-
|
||||
Handler == true,
|
||||
flag_validation_syntactic_sugar(SyntacticSugar, Type),
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch(call(G), _, fail), !.
|
||||
catch(G, _, fail), !.
|
||||
flag_validate(Flag, Value, Type, Handler):-
|
||||
(var(Value) ->
|
||||
Value = 'free variable'
|
||||
@ -435,14 +435,14 @@ flag_validate_directory(Value):-
|
||||
flag_validate_directory(Value):-
|
||||
atomic(Value),
|
||||
% fixme : why not inform the user???
|
||||
catch((not(file_exists(Value)), make_directory(Value)), _, fail).
|
||||
catch((\+ file_exists(Value), make_directory(Value)), _, fail).
|
||||
|
||||
flag_validate_file.
|
||||
flag_validate_file(Value):-
|
||||
catch(file_exists(Value), _, fail), file_property(Value, type(regular)), !.
|
||||
flag_validate_file(Value):-
|
||||
atomic(Value),
|
||||
catch((not(file_exists(Value)), tell(Value)), _, fail),
|
||||
catch((\+ file_exists(Value), tell(Value)), _, fail),
|
||||
told,
|
||||
delete_file(Value).
|
||||
|
||||
|
@ -276,7 +276,7 @@
|
||||
hash_table_display/3,
|
||||
problog_key_to_tuple/2]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [member/2]).
|
||||
%
|
||||
% General use predicates
|
||||
%
|
||||
@ -299,7 +299,7 @@ get_digits(Num, Digits, Acc):-
|
||||
%
|
||||
% Simple Counters
|
||||
%
|
||||
:- bb_put(array_count, 1).
|
||||
:- initialization(bb_put(array_count, 1)).
|
||||
|
||||
get_next_array(ID, Name):-
|
||||
bb_get(array_count, ID),
|
||||
@ -705,7 +705,7 @@ hash_table_get_elements(RevArray, RevSize, RevSize, Tupples):-
|
||||
hash_table_get_elements(_RevArray, RevSize, RevSize, []).
|
||||
|
||||
hash_table_get_chains(Array, Size, Chains):-
|
||||
((array_element(Array, Size, ChainID), not(ChainID == 0)) ->
|
||||
((array_element(Array, Size, ChainID), ChainID \== 0) ->
|
||||
(integer(ChainID) ->
|
||||
get_array_name(ChainID, ChainName)
|
||||
;
|
||||
|
@ -212,7 +212,8 @@
|
||||
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown,error).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
:- use_module(library(lists), [member/2, reverse/2, select/3]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% intervals_merge(+Interval1,+Interval2,-ResultingInterval)
|
||||
|
@ -225,10 +225,11 @@
|
||||
:- yap_flag(unknown,error).
|
||||
:- style_check(single_var).
|
||||
|
||||
:- bb_put(logger_filename,'out.dat').
|
||||
:- bb_put(logger_delimiter,';').
|
||||
:- bb_put(logger_variables,[]).
|
||||
|
||||
:- initialization((
|
||||
bb_put(logger_filename,'out.dat'),
|
||||
bb_put(logger_delimiter,';'),
|
||||
bb_put(logger_variables,[])
|
||||
)).
|
||||
|
||||
%========================================================================
|
||||
%= Defines a new variable, possible types are: int, float and time
|
||||
|
@ -206,9 +206,9 @@
|
||||
|
||||
:- module(mc_DNF_sampling, [problog_dnf_sampling/3]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [memberchk/2]).
|
||||
|
||||
:- ensure_loaded(variables).
|
||||
:- use_module(variables).
|
||||
:- use_module(sampling, _, [problog_random/1,
|
||||
problog_convergence_check/6]).
|
||||
|
||||
@ -217,14 +217,16 @@
|
||||
|
||||
:- use_module(os, _, [convert_filename_to_working_path/2]).
|
||||
|
||||
:- ensure_loaded(hash_table).
|
||||
:- use_module(hash_table).
|
||||
|
||||
:- problog_define_flag(search_method, problog_flag_validate_in_list([linear, binary]), 'search method for picking proof', binary, monte_carlo_sampling_dnf).
|
||||
:- problog_define_flag(represent_world, problog_flag_validate_in_list([list, record, array, hash_table]), 'structure that represents sampled world', array, monte_carlo_sampling_dnf).
|
||||
:- initialization((
|
||||
problog_define_flag(search_method, problog_flag_validate_in_list([linear, binary]), 'search method for picking proof', binary, monte_carlo_sampling_dnf),
|
||||
problog_define_flag(represent_world, problog_flag_validate_in_list([list, record, array, hash_table]), 'structure that represents sampled world', array, monte_carlo_sampling_dnf),
|
||||
|
||||
:- problog_var_define(dnf_sampling_time, times, time, messages('DNF Sampling', ':', ' ms')).
|
||||
:- problog_var_define(probability_lower, result, untyped, messages('Lower probability bound', ' = ', '')).
|
||||
:- problog_var_define(probability_upper, result, untyped, messages('Upper probability bound', ' = ', '')).
|
||||
problog_var_define(dnf_sampling_time, times, time, messages('DNF Sampling', ':', ' ms')),
|
||||
problog_var_define(probability_lower, result, untyped, messages('Lower probability bound', ' = ', '')),
|
||||
problog_var_define(probability_upper, result, untyped, messages('Upper probability bound', ' = ', ''))
|
||||
)).
|
||||
|
||||
% problog_independed(T, P):-
|
||||
% tries:trie_traverse_first(T, FirstRef), !,
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-08-24 15:23:06 +0200 (Tue, 24 Aug 2010) $
|
||||
% $Revision: 4672 $
|
||||
% $Date: 2010-09-24 15:54:45 +0200 (Fri, 24 Sep 2010) $
|
||||
% $Revision: 4822 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
@ -212,25 +212,24 @@
|
||||
convert_filename_to_working_path/2,
|
||||
convert_filename_to_problog_path/2,
|
||||
concat_path_with_filename/3,
|
||||
empty_bdd_directory/1,
|
||||
empty_output_directory/1,
|
||||
calc_md5/2]).
|
||||
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system), [exec/3, file_exists/1]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- use_module(gflags, _, [flag_get/2]).
|
||||
|
||||
:- dynamic [problog_dir/1, problog_working_path/1].
|
||||
:- dynamic(problog_path/1).
|
||||
:- dynamic(problog_working_path/1).
|
||||
|
||||
set_problog_path(Path):-
|
||||
retractall(problog_path(_)),
|
||||
assert(problog_path(Path)).
|
||||
assertz(problog_path(Path)).
|
||||
|
||||
convert_filename_to_working_path(File_Name, Path):-
|
||||
problog_flag(dir, Dir),
|
||||
flag_get(dir, Dir),
|
||||
concat_path_with_filename(Dir, File_Name, Path).
|
||||
|
||||
convert_filename_to_problog_path(File_Name, Path):-
|
||||
@ -253,32 +252,6 @@ concat_path_with_filename(Path, File_Name, Result):-
|
||||
|
||||
atomic_concat([Path_Absolute, Path_Seperator, File_Name], Result).
|
||||
|
||||
%========================================================================
|
||||
%= store the current succes probabilities for training and test examples
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_bdd_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
concat_path_with_filename(Path,'query_*',Files),
|
||||
atomic_concat(['rm -f ',Files],Command),
|
||||
(shell(Command) -> true; true).
|
||||
%========================================================================
|
||||
%= store the current succes probabilities for training and test examples
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_output_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
concat_path_with_filename(Path,'log.dat',F1),
|
||||
concat_path_with_filename(Path,'factprobs_*.pl',F2),
|
||||
concat_path_with_filename(Path,'predictions_*.pl',F3),
|
||||
|
||||
atomic_concat(['rm -f "', F1, '" "', F2, '" "', F3, '"'],Command),
|
||||
(shell(Command) -> true; true).
|
||||
|
||||
%========================================================================
|
||||
%= Calculate the MD5 checksum of +Filename by calling md5sum
|
||||
%= in case m5sum is not installed, try md5, otherwise fail
|
||||
@ -307,7 +280,7 @@ calc_md5_intern(Filename,Command,MD5) :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
( % read 32 Bytes from stdout of process
|
||||
repeat,
|
||||
get0(S,C),
|
||||
get_code(S,C),
|
||||
|
||||
(
|
||||
C== -1
|
||||
|
@ -225,11 +225,11 @@
|
||||
problog_help/0]).
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [member/2]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- ensure_loaded(variables).
|
||||
:- use_module(flags).
|
||||
:- use_module(variables).
|
||||
|
||||
|
||||
% size, line_char, line_char_bold
|
||||
|
232
packages/ProbLog/problog/print_learning.yap
Normal file
232
packages/ProbLog/problog/print_learning.yap
Normal file
@ -0,0 +1,232 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-09-23 11:13:10 +0200 (Thu, 23 Sep 2010) $
|
||||
% $Revision: 4804 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
%
|
||||
% ProbLog was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2008, 2009, 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Main authors of this file:
|
||||
% Bernd Gutmann
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%
|
||||
% Collected OS depended instructions
|
||||
%%%%%%%%
|
||||
:- module(print_learning, [format_learning/3]).
|
||||
|
||||
|
||||
% load our own modules
|
||||
:- use_module(flags).
|
||||
|
||||
:- initialization(problog_define_flag(verbosity_learning, problog_flag_validate_0to5,'How much output shall be given (0=nothing,5=all)',5, learning_general)).
|
||||
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
format_learning(Level,String,Arguments) :-
|
||||
problog_flag(verbosity_learning,V_Level),
|
||||
V_Level >= Level,
|
||||
!,
|
||||
format(String,Arguments),
|
||||
flush_output(user).
|
||||
format_learning(_,_,_) :-
|
||||
true.
|
@ -212,14 +212,14 @@
|
||||
|
||||
:- use_module(os, _, [convert_filename_to_working_path/2]).
|
||||
|
||||
:- ensure_loaded(library(random)).
|
||||
:- use_module(library(random)).
|
||||
|
||||
:- problog_define_flag(mc_batchsize, problog_flag_validate_posint, 'number of samples before update in montecarlo', 1000, monte_carlo_sampling).
|
||||
|
||||
:- problog_define_flag(min_mc_samples, problog_flag_validate_nonegint, 'minimum number of samples before to converge', 0, monte_carlo_sampling).
|
||||
:- problog_define_flag(max_mc_samples, problog_flag_validate_nonegint, 'maximum number of samples waiting to converge', 1000000, monte_carlo_sampling).
|
||||
|
||||
:- problog_define_flag(randomizer, problog_flag_validate_in_list([repeatable, nonrepeatable]), 'whether the random numbers are repeatable or not', repeatable, monte_carlo_sampling).
|
||||
:- initialization((
|
||||
problog_define_flag(mc_batchsize, problog_flag_validate_posint, 'number of samples before update in montecarlo', 1000, monte_carlo_sampling),
|
||||
problog_define_flag(min_mc_samples, problog_flag_validate_nonegint, 'minimum number of samples before to converge', 0, monte_carlo_sampling),
|
||||
problog_define_flag(max_mc_samples, problog_flag_validate_nonegint, 'maximum number of samples waiting to converge', 1000000, monte_carlo_sampling),
|
||||
problog_define_flag(randomizer, problog_flag_validate_in_list([repeatable, nonrepeatable]), 'whether the random numbers are repeatable or not', repeatable, monte_carlo_sampling)
|
||||
)).
|
||||
|
||||
problog_convergence_check(Time, P, SamplesSoFar, Delta, Epsilon, Converged):-
|
||||
Epsilon is 2.0 * sqrt(P * abs(1.0 - P) / SamplesSoFar),
|
||||
|
@ -233,7 +233,7 @@
|
||||
problog_tabling_get_negated_from_id/2,
|
||||
op(1150, fx, problog_table)]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [memberchk/2]).
|
||||
|
||||
:- use_module(extlists, _, [open_end_memberchk/2,
|
||||
open_end_add/3,
|
||||
@ -248,11 +248,18 @@
|
||||
empty_ptree/1]).
|
||||
|
||||
:- op( 1150, fx, problog_table ).
|
||||
:- meta_predicate problog_table(:).
|
||||
:- meta_predicate problog_neg(:).
|
||||
:- dynamic problog_tabled/1, has_synonyms/0, problog_tabling_retain/1.
|
||||
:- problog_define_flag(max_depth, problog_flag_validate_integer, 'maximum proof depth', -1).
|
||||
:- problog_define_flag(retain_tables, problog_flag_validate_boolean, 'retain tables after query', false).
|
||||
|
||||
:- meta_predicate(problog_table(0)).
|
||||
:- meta_predicate(problog_neg(0)).
|
||||
|
||||
:- dynamic(problog_tabled/1).
|
||||
:- dynamic(has_synonyms/0).
|
||||
:- dynamic(problog_tabling_retain/1).
|
||||
|
||||
:- initialization((
|
||||
problog_define_flag(max_depth, problog_flag_validate_integer, 'maximum proof depth', -1),
|
||||
problog_define_flag(retain_tables, problog_flag_validate_boolean, 'retain tables after query', false)
|
||||
)).
|
||||
|
||||
init_tabling :-
|
||||
nb_setval(problog_current_depth, 0),
|
||||
@ -277,7 +284,7 @@ clear_tabling:-
|
||||
clear_tabling.
|
||||
|
||||
retain_tabling:-
|
||||
forall(problog_chktabled(_, Trie), assert(problog_tabling_retain(Trie))).
|
||||
forall(problog_chktabled(_, Trie), assertz(problog_tabling_retain(Trie))).
|
||||
|
||||
clear_retained_tables:-
|
||||
forall(problog_tabling_retain(Trie), delete_ptree(Trie)),
|
||||
@ -311,7 +318,7 @@ problog_table((P1, P2), M) :-
|
||||
problog_table(Name/Arity, Module) :-
|
||||
makeargs(Arity, Args),
|
||||
Head =.. [Name|Args],
|
||||
not(predicate_property(Module:Head, dynamic)), !,
|
||||
\+ predicate_property(Module:Head, dynamic), !,
|
||||
throw(error('problog_table: Problog tabling currently requires the predicate to be declared dynamic and compiles it to static.')).
|
||||
problog_table(Name/Arity, Module) :-
|
||||
makeargs(Arity, Args),
|
||||
@ -322,7 +329,7 @@ problog_table(Name/Arity, Module) :-
|
||||
|
||||
% Monte carlo tabling
|
||||
table(Module:MCName/Arity),
|
||||
assert(problog_tabled(Module:Name/Arity)),
|
||||
assertz(problog_tabled(Module:Name/Arity)),
|
||||
|
||||
findall(_,(
|
||||
OriginalPred =.. [OriginalName|Args],
|
||||
@ -334,7 +341,7 @@ problog_table(Name/Arity, Module) :-
|
||||
OriginalPred =.. [OriginalName|Args],
|
||||
MCPred =.. [MCName|Args],
|
||||
ExactPred =.. [ExactName|Args],
|
||||
assert(Module:(
|
||||
assertz(Module:(
|
||||
Head:-
|
||||
(problog:problog_control(check, exact) ->
|
||||
ExactPred
|
||||
@ -361,7 +368,7 @@ problog_table(Name/Arity, Module) :-
|
||||
Finished
|
||||
),
|
||||
b_getval(problog_current_proof, IDs),
|
||||
not(open_end_memberchk(not(t(Hash)), IDs)),
|
||||
\+ open_end_memberchk(not(t(Hash)), IDs),
|
||||
open_end_add_unique(t(Hash), IDs, NIDs),
|
||||
b_setval(problog_current_proof, NIDs)
|
||||
;
|
||||
@ -413,7 +420,7 @@ problog_table(Name/Arity, Module) :-
|
||||
delete_ptree(SuspTrie)
|
||||
),
|
||||
b_setval(CurrentControlTrie, OCurTrie),
|
||||
not(open_end_memberchk(not(t(Hash)), OIDs)),
|
||||
\+ open_end_memberchk(not(t(Hash)), OIDs),
|
||||
open_end_add_unique(t(Hash), OIDs, NOIDs),
|
||||
b_setval(problog_current_proof, NOIDs)
|
||||
)
|
||||
@ -435,8 +442,8 @@ problog_abolish_table(M:P/A):-
|
||||
problog_neg(M:G):-
|
||||
problog:problog_control(check, exact),
|
||||
functor(G, Name, Arity),
|
||||
not(problog_tabled(M:Name/Arity)),
|
||||
not(problog:problog_predicate(Name, Arity)),
|
||||
\+ problog_tabled(M:Name/Arity),
|
||||
\+ problog:problog_predicate(Name, Arity),
|
||||
throw(problog_neg_error('Error: goal must be dynamic and tabled', M:G)).
|
||||
problog_neg(M:G):-
|
||||
% exact inference
|
||||
@ -446,20 +453,20 @@ problog_neg(M:G):-
|
||||
M:G,
|
||||
b_getval(problog_current_proof, L),
|
||||
open_end_close_end(L, [Trie]),
|
||||
not(open_end_memberchk(Trie, IDs)),
|
||||
\+ open_end_memberchk(Trie, IDs),
|
||||
open_end_add_unique(not(Trie), IDs, NIDs),
|
||||
b_setval(problog_current_proof, NIDs).
|
||||
problog_neg(M:G):-
|
||||
% monte carlo sampling
|
||||
problog:problog_control(check, mc),
|
||||
not(M:G).
|
||||
\+ M:G.
|
||||
|
||||
% This predicate assigns a synonym for negation that means: NotName = problog_neg(Name)
|
||||
problog_tabling_negated_synonym(Name, NotName):-
|
||||
recorded(problog_table_synonyms, negated(Name, NotName), _), !.
|
||||
problog_tabling_negated_synonym(Name, NotName):-
|
||||
retractall(has_synonyms),
|
||||
assert(has_synonyms),
|
||||
assertz(has_synonyms),
|
||||
recordz(problog_table_synonyms, negated(Name, NotName), _).
|
||||
|
||||
problog_tabling_get_negated_from_pred(Pred, Ref):-
|
||||
|
@ -214,8 +214,8 @@
|
||||
:- yap_flag(unknown,error).
|
||||
:- style_check(single_var).
|
||||
|
||||
:- dynamic timer/2.
|
||||
:- dynamic timer_paused/2.
|
||||
:- dynamic(timer/2).
|
||||
:- dynamic(timer_paused/2).
|
||||
|
||||
|
||||
timer_start(Name) :-
|
||||
@ -225,7 +225,7 @@ timer_start(Name) :-
|
||||
throw(timer_already_started(timer_start(Name)));
|
||||
|
||||
statistics(walltime,[StartTime,_]),
|
||||
assert(timer(Name,StartTime))
|
||||
assertz(timer(Name,StartTime))
|
||||
).
|
||||
|
||||
timer_stop(Name,Duration) :-
|
||||
@ -244,7 +244,7 @@ timer_pause(Name) :-
|
||||
->
|
||||
statistics(walltime,[StopTime,_]),
|
||||
Duration is StopTime-StartTime,
|
||||
assert(timer_paused(Name,Duration));
|
||||
assertz(timer_paused(Name,Duration));
|
||||
|
||||
throw(timer_not_started(timer_pause(Name)))
|
||||
).
|
||||
@ -255,7 +255,7 @@ timer_pause(Name, Duration) :-
|
||||
->
|
||||
statistics(walltime,[StopTime,_]),
|
||||
Duration is StopTime-StartTime,
|
||||
assert(timer_paused(Name,Duration));
|
||||
assertz(timer_paused(Name,Duration));
|
||||
|
||||
throw(timer_not_started(timer_pause(Name)))
|
||||
).
|
||||
@ -266,7 +266,7 @@ timer_resume(Name):-
|
||||
->
|
||||
statistics(walltime,[ResumeTime,_]),
|
||||
CorrectedStartTime is ResumeTime-Duration,
|
||||
assert(timer(Name,CorrectedStartTime));
|
||||
assertz(timer(Name,CorrectedStartTime));
|
||||
|
||||
throw(timer_not_paused(timer_resume(Name)))
|
||||
).
|
||||
|
@ -245,13 +245,13 @@
|
||||
]).
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(tries)).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- ensure_loaded(library(ordsets)).
|
||||
:- use_module(library(tries)).
|
||||
:- use_module(library(lists), [append/3, member/2, memberchk/2]).
|
||||
:- use_module(library(system), [delete_file/1, shell/1]).
|
||||
:- use_module(library(ordsets), [ord_intersection/3, ord_union/3]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- use_module(flags).
|
||||
|
||||
% switch on all tests to reduce bug searching time
|
||||
:- style_check(all).
|
||||
@ -259,28 +259,32 @@
|
||||
|
||||
|
||||
% this is a test to determine whether YAP provides the needed trie library
|
||||
:- current_predicate(tries:trie_disable_hash/0)
|
||||
->
|
||||
trie_disable_hash;
|
||||
print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.').
|
||||
:- initialization(
|
||||
( predicate_property(trie_disable_hash, imported_from(tries)) ->
|
||||
trie_disable_hash
|
||||
; print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.')
|
||||
)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% Define module flags
|
||||
%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(use_db_trie, problog_flag_validate_boolean, 'use the builtin trie 2 trie transformation', false).
|
||||
:- problog_define_flag(db_trie_opt_lvl, problog_flag_validate_integer, 'optimization level for the trie 2 trie transformation', 0).
|
||||
:- problog_define_flag(compare_opt_lvl, problog_flag_validate_boolean, 'comparison mode for optimization level', false).
|
||||
:- problog_define_flag(db_min_prefix, problog_flag_validate_integer, 'minimum size of prefix for dbtrie to optimize', 2).
|
||||
:- problog_define_flag(use_naive_trie, problog_flag_validate_boolean, 'use the naive algorithm to generate bdd scripts', false).
|
||||
:- problog_define_flag(use_old_trie, problog_flag_validate_boolean, 'use the old trie 2 trie transformation no nested', true).
|
||||
:- problog_define_flag(use_dec_trie, problog_flag_validate_boolean, 'use the decomposition method', false).
|
||||
:- problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true).
|
||||
:- problog_define_flag(deref_terms, problog_flag_validate_boolean, 'deref BDD terms after last use', false).
|
||||
|
||||
:- problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false).
|
||||
:- problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false).
|
||||
:- problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list).
|
||||
:- initialization((
|
||||
problog_define_flag(use_db_trie, problog_flag_validate_boolean, 'use the builtin trie 2 trie transformation', false),
|
||||
problog_define_flag(db_trie_opt_lvl, problog_flag_validate_integer, 'optimization level for the trie 2 trie transformation', 0),
|
||||
problog_define_flag(compare_opt_lvl, problog_flag_validate_boolean, 'comparison mode for optimization level', false),
|
||||
problog_define_flag(db_min_prefix, problog_flag_validate_integer, 'minimum size of prefix for dbtrie to optimize', 2),
|
||||
problog_define_flag(use_naive_trie, problog_flag_validate_boolean, 'use the naive algorithm to generate bdd scripts', false),
|
||||
problog_define_flag(use_old_trie, problog_flag_validate_boolean, 'use the old trie 2 trie transformation no nested', true),
|
||||
problog_define_flag(use_dec_trie, problog_flag_validate_boolean, 'use the decomposition method', false),
|
||||
problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true),
|
||||
problog_define_flag(deref_terms, problog_flag_validate_boolean, 'deref BDD terms after last use', false),
|
||||
|
||||
problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false),
|
||||
problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false),
|
||||
problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list)
|
||||
)).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -419,6 +423,7 @@ merge_ptree(T1, T2, T3) :-
|
||||
% - ptree_decomposition -> ptree_decomposition_struct
|
||||
% - bdd_ptree_script -> bdd_struct_ptree_script
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
:- dynamic(c_num/1).
|
||||
|
||||
bdd_struct_ptree(Trie, FileBDD, Variables) :-
|
||||
bdd_struct_ptree_script(Trie, FileBDD, Variables),
|
||||
@ -434,7 +439,7 @@ bdd_struct_ptree_script(Trie, FileBDD, Variables) :-
|
||||
edges_ptree(Trie, Variables),
|
||||
name_vars(Variables), % expected by output_compressed_script/1?
|
||||
length(Variables, VarCount),
|
||||
assert(c_num(1)),
|
||||
assertz(c_num(1)),
|
||||
bdd_pt(Trie, CT),
|
||||
c_num(NN),
|
||||
IntermediateSteps is NN - 1,
|
||||
@ -524,7 +529,7 @@ trie_to_bdd_struct_trie(A, B, OutputFile, OptimizationLevel, Variables) :-
|
||||
|
||||
nested_trie_to_bdd_struct_trie(A, B, OutputFile, OptimizationLevel, Variables):-
|
||||
trie_nested_to_depth_breadth_trie(A, B, LL, OptimizationLevel, problog:problog_chktabled),
|
||||
(islabel(LL) ->
|
||||
(is_label(LL) ->
|
||||
retractall(deref(_,_)),
|
||||
(problog_flag(deref_terms, true) ->
|
||||
asserta(deref(LL,no)),
|
||||
@ -580,7 +585,7 @@ ptree_decomposition_struct(Trie, BDDFileName, Variables) :-
|
||||
length(Variables, VarCnt),
|
||||
tell(TmpFile1),
|
||||
decompose_trie(Trie, Variables, L),
|
||||
(islabel(L)->
|
||||
(is_label(L)->
|
||||
atomic_concat('L', LCnt, L),
|
||||
write(L),nl
|
||||
;
|
||||
@ -642,7 +647,7 @@ bdd_ptree_script(Trie, FileBDD, FileParam) :-
|
||||
|
||||
told,
|
||||
length(Edges, VarCount),
|
||||
assert(c_num(1)),
|
||||
assertz(c_num(1)),
|
||||
bdd_pt(Trie, CT),
|
||||
c_num(NN),
|
||||
IntermediateSteps is NN - 1,
|
||||
@ -734,13 +739,13 @@ bdd_vars_script_intern2(A) :-
|
||||
bdd_pt(Trie, false) :-
|
||||
empty_ptree(Trie),
|
||||
!,
|
||||
once(retractall(c_num(_))),
|
||||
once(assert(c_num(2))).
|
||||
retractall(c_num(_)),
|
||||
assertz(c_num(2)).
|
||||
bdd_pt(Trie, true) :-
|
||||
trie_check_entry(Trie, [true], _),
|
||||
!,
|
||||
once(retractall(c_num(_))),
|
||||
once(assert(c_num(2))).
|
||||
retractall(c_num(_)),
|
||||
assertz(c_num(2)).
|
||||
|
||||
% general case: transform trie to nested tree structure for compression
|
||||
bdd_pt(Trie, CT) :-
|
||||
@ -976,7 +981,7 @@ format_compression_script([A, B|C]) :-
|
||||
get_next_name(Name) :-
|
||||
retract(c_num(N)),
|
||||
NN is N + 1,
|
||||
assert(c_num(NN)),
|
||||
assertz(c_num(NN)),
|
||||
atomic_concat('L', N, Name).
|
||||
|
||||
% create BDD-var as fact id prefixed by x
|
||||
@ -1017,7 +1022,7 @@ statistics_ptree:-
|
||||
write('--------------------------------'),nl.
|
||||
|
||||
|
||||
:- dynamic nested_ptree_printed/1.
|
||||
:- dynamic(nested_ptree_printed/1).
|
||||
|
||||
print_nested_ptree(Trie):-
|
||||
retractall(nested_ptree_printed(_)),
|
||||
@ -1029,7 +1034,7 @@ print_nested_ptree(Trie, Level, Space):-
|
||||
spacy_print(begin(t(Trie)), Level, Space),
|
||||
fail.
|
||||
print_nested_ptree(Trie, Level, Space):-
|
||||
assert(nested_ptree_printed(Trie)),
|
||||
assertz(nested_ptree_printed(Trie)),
|
||||
trie_path(Trie, Path),
|
||||
NewLevel is Level + 1,
|
||||
spacy_print(Path, NewLevel, Space),
|
||||
@ -1051,9 +1056,9 @@ spacy_print(Msg, Level, Space):-
|
||||
|
||||
% Theo Naive method works with Nested Trie to BDD Script
|
||||
|
||||
:-dynamic(get_used_vars/2).
|
||||
:-dynamic(generated_trie/2).
|
||||
:-dynamic(next_intermediate_step/1).
|
||||
:- dynamic(get_used_vars/2).
|
||||
:- dynamic(generated_trie/2).
|
||||
:- dynamic(next_intermediate_step/1).
|
||||
|
||||
%
|
||||
% This needs to be modified
|
||||
@ -1116,7 +1121,7 @@ generate_BDD_from_trie(Trie, TrieInter, Stream):-
|
||||
get_next_intermediate_step(TrieInter),
|
||||
write_bdd_line(OrLineTerms, TrieInter, '+', Stream)
|
||||
),
|
||||
assert(generated_trie(Trie, TrieInter)).
|
||||
assertz(generated_trie(Trie, TrieInter)).
|
||||
|
||||
write_bdd_line([], _LineInter, _Operator, _Stream):-!.
|
||||
write_bdd_line(LineTerms, LineInter, Operator, Stream):-
|
||||
@ -1171,13 +1176,13 @@ bddvars_to_script([H|T], Stream):-
|
||||
bddvars_to_script(T, Stream).
|
||||
|
||||
get_next_intermediate_step('L1'):-
|
||||
not(clause(next_intermediate_step(_), _)), !,
|
||||
assert(next_intermediate_step(2)).
|
||||
\+ clause(next_intermediate_step(_), _), !,
|
||||
assertz(next_intermediate_step(2)).
|
||||
get_next_intermediate_step(Inter):-
|
||||
next_intermediate_step(InterStep),
|
||||
retract(next_intermediate_step(InterStep)),
|
||||
NextInterStep is InterStep + 1,
|
||||
assert(next_intermediate_step(NextInterStep)),
|
||||
assertz(next_intermediate_step(NextInterStep)),
|
||||
atomic_concat(['L', InterStep], Inter).
|
||||
|
||||
make_bdd_var('true', 'TRUE'):-!.
|
||||
@ -1200,9 +1205,9 @@ add_to_vars(V):-
|
||||
clause(get_used_vars(Vars, Cnt), true), !,
|
||||
retract(get_used_vars(Vars, Cnt)),
|
||||
NewCnt is Cnt + 1,
|
||||
assert(get_used_vars([V|Vars], NewCnt)).
|
||||
assertz(get_used_vars([V|Vars], NewCnt)).
|
||||
add_to_vars(V):-
|
||||
assert(get_used_vars([V], 1)).
|
||||
assertz(get_used_vars([V], 1)).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%% depth breadth builtin support %%%%%%%%%%%%%%%%%
|
||||
@ -1231,14 +1236,14 @@ variable_in_dbtrie(Trie, V):-
|
||||
|
||||
get_next_variable(V, depth(L, _S)):-
|
||||
member(V, L),
|
||||
not(islabel(V)).
|
||||
\+ is_label(V).
|
||||
get_next_variable(V, breadth(L, _S)):-
|
||||
member(V, L),
|
||||
not(islabel(V)).
|
||||
\+ is_label(V).
|
||||
get_next_variable(V, L):-
|
||||
member(V, L),
|
||||
not(islabel(V)),
|
||||
not(isnestedtrie(V)).
|
||||
\+ is_label(V),
|
||||
\+ isnestedtrie(V).
|
||||
|
||||
get_variable(not(V), R):-
|
||||
!, get_variable(V, R).
|
||||
@ -1253,7 +1258,7 @@ get_variable(R, R).
|
||||
|
||||
trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
trie_to_depth_breadth_trie(A, B, LL, OptimizationLevel),
|
||||
(islabel(LL) ->
|
||||
(is_label(LL) ->
|
||||
atomic_concat('L', InterStep, LL),
|
||||
retractall(deref(_,_)),
|
||||
(problog_flag(deref_terms, true) ->
|
||||
@ -1306,7 +1311,7 @@ is_state(false).
|
||||
|
||||
nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
trie_nested_to_depth_breadth_trie(A, B, LL, OptimizationLevel, problog:problog_chktabled),
|
||||
(islabel(LL) ->
|
||||
(is_label(LL) ->
|
||||
retractall(deref(_,_)),
|
||||
(problog_flag(deref_terms, true) ->
|
||||
asserta(deref(LL,no)),
|
||||
@ -1407,7 +1412,7 @@ preprocess(_, _, _, FinalEndCount, FinalEndCount).
|
||||
|
||||
make_nested_trie_base_cases(Trie, t(ID), DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount, Ancestors):-
|
||||
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
|
||||
(not(Label = t(_)) ->
|
||||
(Label \= t(_) ->
|
||||
FinalEndCount = EndCount,
|
||||
problog:problog_chktabled(ID, RTrie),!,
|
||||
get_set_trie_from_id(t(ID), Label, RTrie, Ancestors, _, Ancestors)
|
||||
@ -1438,7 +1443,7 @@ trie_nested_to_db_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel, St
|
||||
|
||||
nested_trie_to_db_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel, StartCount, FinalEndCount, Module:GetTriePredicate, Ancestors, ContainsLoop, Childs, ChildsAcc):-
|
||||
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
|
||||
(not(Label = t(_)) ->
|
||||
(Label \= t(_) ->
|
||||
(var(ContainsLoop) ->
|
||||
ContainsLoop = false
|
||||
;
|
||||
@ -1721,14 +1726,14 @@ bddlineformat([not(H)|T], O):-
|
||||
write('~'), !,
|
||||
bddlineformat([H|T], O).
|
||||
bddlineformat([H], _O):-
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
Var = H
|
||||
;
|
||||
get_var_name(H, Var)
|
||||
),
|
||||
write(Var), nl, !.
|
||||
bddlineformat([H|T], O):-
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
Var = H
|
||||
;
|
||||
get_var_name(H, Var)
|
||||
@ -1741,7 +1746,7 @@ bddlineformat([not(H)], O):-
|
||||
!, write('~'),
|
||||
bddlineformat([H], O).
|
||||
bddlineformat([H], _O):-!,
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
VarName = H
|
||||
;
|
||||
get_var_name(H, VarName)
|
||||
@ -1751,7 +1756,7 @@ bddlineformat([not(H)|T], O):-
|
||||
!, write('~'),
|
||||
bddlineformat([H|T], O).
|
||||
bddlineformat([H|T], O):-
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
VarName = H
|
||||
;
|
||||
get_var_name(H, VarName)
|
||||
@ -1760,16 +1765,16 @@ bddlineformat([H|T], O):-
|
||||
bddlineformat(T, O).*/
|
||||
|
||||
bddlineformat(T, L, O):-
|
||||
(islabel(L) ->
|
||||
(is_label(L) ->
|
||||
write(L), write(' = '),
|
||||
bddlineformat(T, O)
|
||||
;
|
||||
write(user_output,bdd_script_error([L,T,O])),nl(user_output)
|
||||
).
|
||||
|
||||
islabel(not(L)):-
|
||||
!, islabel(L).
|
||||
islabel(Label):-
|
||||
is_label(not(L)):-
|
||||
!, is_label(L).
|
||||
is_label(Label):-
|
||||
atom(Label),
|
||||
atomic_concat('L', _, Label).
|
||||
|
||||
@ -1779,7 +1784,7 @@ isnestedtrie(t(_T)).
|
||||
|
||||
seperate([], [], []).
|
||||
seperate([H|T], [H|Labels], Vars):-
|
||||
islabel(H), !,
|
||||
is_label(H), !,
|
||||
seperate(T, Labels, Vars).
|
||||
seperate([H|T], Labels, [H|Vars]):-
|
||||
seperate(T, Labels, Vars).
|
||||
@ -1796,7 +1801,7 @@ ptree_decomposition(Trie, BDDFileName, VarFileName) :-
|
||||
told,
|
||||
tell(TmpFile1),
|
||||
decompose_trie(Trie, T, L),
|
||||
(islabel(L)->
|
||||
(is_label(L)->
|
||||
atomic_concat('L', LCnt, L),
|
||||
write(L),nl
|
||||
;
|
||||
@ -1932,7 +1937,7 @@ dwriteln(A):-
|
||||
|
||||
non_false([], []):-!.
|
||||
non_false([H|T], [H|NT]):-
|
||||
not(H == false),
|
||||
H \== false,
|
||||
non_false(T, NT).
|
||||
non_false([H|T], NT):-
|
||||
H == false,
|
||||
@ -1944,11 +1949,11 @@ one_true(_, _, 'TRUE'):-!.
|
||||
|
||||
all_false(false,false,false).
|
||||
one_non_false(L, false, false, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
one_non_false(false, L, false, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
one_non_false(false, false, L, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
|
||||
trie_seperate(Trie, Var, TrieWith, TrieWithNeg, TrieWithOut):-
|
||||
trie_traverse(Trie, R),
|
||||
@ -1986,7 +1991,7 @@ ptree_db_trie_opt_performed(LVL1, LVL2, LVL3):-
|
||||
trie_get_depth_breadth_reduction_opt_level_count(2, LVL2),
|
||||
trie_get_depth_breadth_reduction_opt_level_count(3, LVL3).
|
||||
|
||||
:- dynamic deref/2.
|
||||
:- dynamic(deref/2).
|
||||
|
||||
mark_for_deref(DB_Trie):-
|
||||
traverse_ptree_mode(OLD),
|
||||
@ -1998,7 +2003,7 @@ mark_deref(DB_Trie):-
|
||||
traverse_ptree(DB_Trie, DB_Term),
|
||||
(DB_Term = depth(List, Inter); DB_Term = breadth(List, Inter)),
|
||||
member(L, List),
|
||||
((islabel(L), not(deref(L, _))) ->
|
||||
((is_label(L), \+ deref(L, _)) ->
|
||||
asserta(deref(L, Inter))
|
||||
;
|
||||
true
|
||||
|
302
packages/ProbLog/problog/utils_learning.yap
Normal file
302
packages/ProbLog/problog/utils_learning.yap
Normal file
@ -0,0 +1,302 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-09-24 15:54:45 +0200 (Fri, 24 Sep 2010) $
|
||||
% $Revision: 4822 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
%
|
||||
% ProbLog was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2008, 2009, 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Main authors of this file:
|
||||
% Bernd Gutmann
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%
|
||||
% Collected OS depended instructions
|
||||
%%%%%%%%
|
||||
:- module(utils_learning, [empty_bdd_directory/1,
|
||||
empty_output_directory/1,
|
||||
delete_file_silent/1
|
||||
]).
|
||||
|
||||
% switch on all tests to reduce bug searching time
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
% load library modules
|
||||
:- use_module(library(lists), [append/3, member/2]).
|
||||
:- use_module(library(system), [delete_file/1, directory_files/2, file_exists/1]).
|
||||
|
||||
% load our own modules
|
||||
:- use_module(os).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_bdd_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
atom_codes('query_', PF1), % 'query_*'
|
||||
|
||||
directory_files(Path,List),
|
||||
delete_files_with_matching_prefix(List,Path,[PF1]).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_output_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
concat_path_with_filename(Path,'log.dat',F1),
|
||||
concat_path_with_filename(Path,'out.dat',F2),
|
||||
|
||||
(
|
||||
file_exists(F1)
|
||||
->
|
||||
delete_file_silent(F1);
|
||||
true
|
||||
),
|
||||
|
||||
(
|
||||
file_exists(F2)
|
||||
->
|
||||
delete_file_silent(F2);
|
||||
true
|
||||
),
|
||||
|
||||
atom_codes('values_', PF1), % 'values_*_q_*.dat'
|
||||
atom_codes('factprobs_', PF2), % 'factprobs_*.pl'
|
||||
atom_codes('input_', PF3), % 'input_*.pl'
|
||||
atom_codes('trainpredictions_',PF4), % 'trainpredictions_*.pl'
|
||||
atom_codes('testpredictions_',PF5), % 'testpredictions_*.pl'
|
||||
atom_codes('predictions_',PF6), % 'predictions_*.pl'
|
||||
directory_files(Path,List),
|
||||
delete_files_with_matching_prefix(List,Path,[PF1,PF2,PF3,PF4,PF5,PF6]).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
delete_file_silent(File) :-
|
||||
delete_file(File),
|
||||
!.
|
||||
delete_file_silent(_).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
delete_files_with_matching_prefix([],_,_).
|
||||
delete_files_with_matching_prefix([Name|T],Path,Prefixes) :-
|
||||
atom_codes(Name,NameCode),
|
||||
|
||||
(
|
||||
(member(Prefix,Prefixes), append(Prefix,_Suffix,NameCode))
|
||||
->
|
||||
(
|
||||
concat_path_with_filename(Path,Name,F),
|
||||
delete_file_silent(F)
|
||||
);
|
||||
true
|
||||
),
|
||||
|
||||
delete_files_with_matching_prefix(T,Path,Prefixes).
|
@ -204,18 +204,23 @@
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(variable_elimination, [trie_check_for_and_cluster/1, trie_replace_and_cluster/2, clean_up/0, variable_elimination_stats/3]).
|
||||
:- module(variable_elimination, [
|
||||
trie_check_for_and_cluster/1,
|
||||
trie_replace_and_cluster/2,
|
||||
clean_up/0,
|
||||
variable_elimination_stats/3
|
||||
]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(tries)).
|
||||
:- use_module(library(lists), [append/3, delete/3, memberchk/2, reverse/2]).
|
||||
:- use_module(library(tries)).
|
||||
|
||||
:- use_module('flags', _, [problog_define_flag/5]).
|
||||
|
||||
|
||||
:- nb_setval(prob_fact_count, 0).
|
||||
|
||||
:- problog_define_flag(variable_elimination, problog_flag_validate_boolean, 'enable variable elimination', false, variable_elimination).
|
||||
|
||||
:- initialization((
|
||||
nb_setval(prob_fact_count, 0),
|
||||
problog_define_flag(variable_elimination, problog_flag_validate_boolean, 'enable variable elimination', false, variable_elimination)
|
||||
)).
|
||||
|
||||
bit_encode(L, ON):-
|
||||
bit_encode(L, ON, 0).
|
||||
@ -373,7 +378,7 @@ last_cluster_element(L, Cluster, R):-
|
||||
|
||||
nocluster([], _).
|
||||
nocluster([H|T], L):-
|
||||
not(memberchk(H, L)),
|
||||
\+ memberchk(H, L),
|
||||
nocluster(T, L).
|
||||
|
||||
eliminate_list([], L, L).
|
||||
@ -386,7 +391,7 @@ replace([], _, _, []).
|
||||
replace([H|T], H, NH, [NH|NT]):-
|
||||
replace(T, H, NH, NT).
|
||||
replace([H|T], R, NR, [H|NT]):-
|
||||
\+ H == R,
|
||||
H \== R,
|
||||
replace(T, R, NR, NT).
|
||||
|
||||
clean_up:-
|
||||
@ -418,8 +423,8 @@ make_prob_fact(L, P, ID):-
|
||||
(clause(problog:problog_predicate(var_elimination, 1), true) ->
|
||||
true
|
||||
;
|
||||
assert(problog:problog_predicate(var_elimination, 1))
|
||||
assertz(problog:problog_predicate(var_elimination, 1))
|
||||
),
|
||||
assert(problog:problog_var_elimination(ID, L, P))
|
||||
assertz(problog:problog_var_elimination(ID, L, P))
|
||||
).
|
||||
|
||||
|
@ -376,7 +376,8 @@ problog_var_timer_timeout(Variable):-
|
||||
%%% This is possible for future use %%%
|
||||
|
||||
:- use_module(library(timeout)).
|
||||
:- meta_predicate problog_var_time_out(:,_,_,_), problog_time_out(:,_,_,_).
|
||||
:- meta_predicate(problog_var_time_out(0, *, *, *)).
|
||||
:- meta_predicate(problog_time_out(0, *, *, *)).
|
||||
%
|
||||
% Problems with nesting, use with care
|
||||
% always succeeds returns Success = true/fail, Time = Msec taken/timeout
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-08-25 15:45:22 +0200 (Wed, 25 Aug 2010) $
|
||||
% $Revision: 4692 $
|
||||
% $Date: 2010-09-24 15:54:45 +0200 (Fri, 24 Sep 2010) $
|
||||
% $Revision: 4822 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
@ -205,9 +205,10 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
:- module(learning,[do_learning/1,
|
||||
do_learning/2,
|
||||
set_learning_flag/2,
|
||||
:- module(learning,[
|
||||
do_learning/1,
|
||||
do_learning/2,
|
||||
set_learning_flag/2,
|
||||
learning_flag/2,
|
||||
learning_flags/0,
|
||||
problog_help/0,
|
||||
@ -215,54 +216,47 @@
|
||||
problog_flag/2,
|
||||
problog_flags/0,
|
||||
auto_alpha/0
|
||||
]).
|
||||
]).
|
||||
|
||||
% switch on all the checks to reduce bug searching time
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
% load modules from the YAP library
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(random)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(lists), [max_list/2, min_list/2, sum_list/2]).
|
||||
:- use_module(library(random)). % PM doesn't seem to be used!
|
||||
:- use_module(library(system), [delete_file/1, file_exists/1, shell/2]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(problog).
|
||||
:- ensure_loaded('problog/logger').
|
||||
:- ensure_loaded('problog/flags').
|
||||
:- ensure_loaded('problog/os').
|
||||
|
||||
:- use_module(problog).
|
||||
:- use_module('problog/logger').
|
||||
:- use_module('problog/flags').
|
||||
:- use_module('problog/os').
|
||||
:- use_module('problog/print_learning').
|
||||
:- use_module('problog/utils_learning').
|
||||
|
||||
% used to indicate the state of the system
|
||||
:- dynamic values_correct/0.
|
||||
:- dynamic learning_initialized/0.
|
||||
:- dynamic current_iteration/1.
|
||||
:- dynamic example_count/1.
|
||||
:- dynamic query_probability_intern/2.
|
||||
:- dynamic query_gradient_intern/4.
|
||||
:- dynamic last_mse/1.
|
||||
:- dynamic(values_correct/0).
|
||||
:- dynamic(learning_initialized/0).
|
||||
:- dynamic(current_iteration/1).
|
||||
:- dynamic(example_count/1).
|
||||
:- dynamic(query_probability_intern/2).
|
||||
:- dynamic(query_gradient_intern/4).
|
||||
:- dynamic(last_mse/1).
|
||||
|
||||
% used to identify queries which have identical proofs
|
||||
:- dynamic query_is_similar/2.
|
||||
:- dynamic query_md5/3.
|
||||
:- dynamic(query_is_similar/2).
|
||||
:- dynamic(query_md5/3).
|
||||
|
||||
:- assert_static(user:(example(A,B,C,=) :- current_predicate(example/3), example(A,B,C))).
|
||||
:- assert_static(user:(test_example(A,B,C,=) :- current_predicate(test_example/3), test_example(A,B,C))).
|
||||
:- multifile(user:example/4).
|
||||
user:example(A,B,C,=) :-
|
||||
current_predicate(user:example/3),
|
||||
user:example(A,B,C).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
my_format(Level,String,Arguments) :-
|
||||
problog_flag(learning_verbosity_level,V_Level),
|
||||
(
|
||||
V_Level >= Level
|
||||
->
|
||||
(format(String,Arguments),flush_output(user));
|
||||
true
|
||||
).
|
||||
:- multifile(user:test_example/4).
|
||||
user:test_example(A,B,C,=) :-
|
||||
current_predicate(user:test_example/3),
|
||||
user:test_example(A,B,C).
|
||||
|
||||
|
||||
%========================================================================
|
||||
@ -459,10 +453,10 @@ do_learning_intern(Iterations,Epsilon) :-
|
||||
retractall(current_iteration(_)),
|
||||
!,
|
||||
NextIteration is CurrentIteration+1,
|
||||
assert(current_iteration(NextIteration)),
|
||||
assertz(current_iteration(NextIteration)),
|
||||
EndIteration is CurrentIteration+Iterations-1,
|
||||
|
||||
my_format(1,'~nIteration ~d of ~d~n',[CurrentIteration,EndIteration]),
|
||||
format_learning(1,'~nIteration ~d of ~d~n',[CurrentIteration,EndIteration]),
|
||||
logger_set_variable(iteration,CurrentIteration),
|
||||
|
||||
logger_start_timer(duration),
|
||||
@ -491,12 +485,12 @@ do_learning_intern(Iterations,Epsilon) :-
|
||||
(
|
||||
retractall(last_mse(_)),
|
||||
logger_get_variable(mse_trainingset,Current_MSE),
|
||||
assert(last_mse(Current_MSE)),
|
||||
assertz(last_mse(Current_MSE)),
|
||||
!,
|
||||
MSE_Diff is abs(Last_MSE-Current_MSE)
|
||||
); (
|
||||
logger_get_variable(mse_trainingset,Current_MSE),
|
||||
assert(last_mse(Current_MSE)),
|
||||
assertz(last_mse(Current_MSE)),
|
||||
MSE_Diff is Epsilon+1
|
||||
)
|
||||
),
|
||||
@ -543,7 +537,7 @@ init_learning :-
|
||||
|
||||
logger_write_header,
|
||||
|
||||
my_format(1,'Initializing everything~n',[]),
|
||||
format_learning(1,'Initializing everything~n',[]),
|
||||
empty_output_directory,
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -594,7 +588,7 @@ init_learning :-
|
||||
true
|
||||
),
|
||||
bb_delete(test_examples,TestExampleCount),
|
||||
my_format(3,'~q test examples~n',[TestExampleCount]),
|
||||
format_learning(3,'~q test examples~n',[TestExampleCount]),
|
||||
!,
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% stop count test examples
|
||||
@ -615,8 +609,8 @@ init_learning :-
|
||||
true
|
||||
),
|
||||
bb_delete(training_examples,TrainingExampleCount),
|
||||
assert(example_count(TrainingExampleCount)),
|
||||
my_format(3,'~q training examples~n',[TrainingExampleCount]),
|
||||
assertz(example_count(TrainingExampleCount)),
|
||||
format_learning(3,'~q training examples~n',[TrainingExampleCount]),
|
||||
!,
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% stop count training examples
|
||||
@ -643,16 +637,15 @@ init_learning :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% build BDD script for every example
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
once(initialize_fact_probabilities),
|
||||
once(init_queries),
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% done
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
assert(current_iteration(0)),
|
||||
assert(learning_initialized),
|
||||
assertz(current_iteration(0)),
|
||||
assertz(learning_initialized),
|
||||
|
||||
my_format(1,'~n',[]).
|
||||
format_learning(1,'~n',[]).
|
||||
|
||||
|
||||
%========================================================================
|
||||
@ -681,11 +674,11 @@ empty_output_directory :-
|
||||
|
||||
|
||||
init_queries :-
|
||||
my_format(2,'Build BDDs for examples~n',[]),
|
||||
format_learning(2,'Build BDDs for examples~n',[]),
|
||||
( % go over all test examples
|
||||
current_predicate(user:test_example/4),
|
||||
user:test_example(ID,Query,Prob,_),
|
||||
my_format(3,' test example ~q: ~q~n',[ID,Query]),
|
||||
format_learning(3,' test example ~q: ~q~n',[ID,Query]),
|
||||
flush_output(user),
|
||||
init_one_query(ID,Query,test),
|
||||
|
||||
@ -695,7 +688,7 @@ init_queries :-
|
||||
( % go over all training examples
|
||||
current_predicate(user:example/4),
|
||||
user:example(ID,Query,Prob,_),
|
||||
my_format(3,' training example ~q: ~q~n',[ID,Query]),
|
||||
format_learning(3,' training example ~q: ~q~n',[ID,Query]),
|
||||
flush_output(user),
|
||||
init_one_query(ID,Query,training),
|
||||
|
||||
@ -721,10 +714,10 @@ init_one_query(QueryID,Query,Type) :-
|
||||
(
|
||||
file_exists(Filename)
|
||||
->
|
||||
my_format(3,' Reuse existing BDD ~q~n~n',[Filename]);
|
||||
format_learning(3,' Reuse existing BDD ~q~n~n',[Filename]);
|
||||
(
|
||||
problog_flag(init_method,(Query,_Prob,Filename,Probabilities_File,Call)),
|
||||
once(call(Call)),
|
||||
once(Call),
|
||||
delete_file(Probabilities_File)
|
||||
)
|
||||
),
|
||||
@ -741,39 +734,18 @@ init_one_query(QueryID,Query,Type) :-
|
||||
query_md5(OtherQueryID,Query_MD5,Type)
|
||||
->
|
||||
(
|
||||
assert(query_is_similar(QueryID,OtherQueryID)),
|
||||
my_format(3, '~q is similar to ~q~2n', [QueryID,OtherQueryID])
|
||||
assertz(query_is_similar(QueryID,OtherQueryID)),
|
||||
format_learning(3, '~q is similar to ~q~2n', [QueryID,OtherQueryID])
|
||||
);
|
||||
assert(query_md5(QueryID,Query_MD5,Type))
|
||||
assertz(query_md5(QueryID,Query_MD5,Type))
|
||||
)
|
||||
);
|
||||
|
||||
true
|
||||
).
|
||||
),!,
|
||||
garbage_collect.
|
||||
|
||||
|
||||
%========================================================================
|
||||
%= set all unknown fact probabilities to random values
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
initialize_fact_probabilities :-
|
||||
( % go over all tunable facts
|
||||
tunable_fact(FactID,_),
|
||||
problog_flag(probability_initializer,(FactID,Probability,Query)),
|
||||
once(call(Query)),
|
||||
set_fact_probability(FactID,Probability),
|
||||
|
||||
fail; % go to next tunable fact
|
||||
true
|
||||
).
|
||||
|
||||
random_probability(_FactID,Probability) :-
|
||||
% use probs around 0.5 to not confuse k-best search
|
||||
random(Random),
|
||||
Probability is 0.5+(Random-0.5)/100.
|
||||
|
||||
|
||||
|
||||
%========================================================================
|
||||
@ -791,8 +763,8 @@ update_values :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% delete old values
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
once(retractall(query_probability_intern(_,_))),
|
||||
once(retractall(query_gradient_intern(_,_,_,_))),
|
||||
retractall(query_probability_intern(_,_)),
|
||||
retractall(query_gradient_intern(_,_,_,_)),
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% start write current probabilities to file
|
||||
@ -837,7 +809,7 @@ update_values :-
|
||||
% stop write current probabilities to file
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
assert(values_correct).
|
||||
assertz(values_correct).
|
||||
|
||||
|
||||
|
||||
@ -854,7 +826,7 @@ update_query_cleanup(QueryID) :-
|
||||
% either this query is similar to another or vice versa,
|
||||
% therefore we don't delete anything
|
||||
true;
|
||||
once(retractall(query_gradient_intern(QueryID,_,_,_)))
|
||||
retractall(query_gradient_intern(QueryID,_,_,_))
|
||||
).
|
||||
|
||||
|
||||
@ -867,13 +839,13 @@ update_query(QueryID,Symbol,What_To_Update) :-
|
||||
query_is_similar(QueryID,_)
|
||||
->
|
||||
% we don't have to evaluate the BDD
|
||||
my_format(4,'#',[]);
|
||||
format_learning(4,'#',[]);
|
||||
(
|
||||
problog_flag(sigmoid_slope,Slope),
|
||||
problog_dir(PD),
|
||||
((What_To_Update=all;query_is_similar(_,QueryID)) -> Method='g' ; Method='l'),
|
||||
atomic_concat([PD,
|
||||
'/problogbdd',
|
||||
'/ProblogBDD',
|
||||
' -i "', Probabilities_File, '"',
|
||||
' -l "', Query_Directory,'/query_',QueryID, '"',
|
||||
' -m ', Method,
|
||||
@ -921,7 +893,7 @@ update_query(QueryID,Symbol,What_To_Update) :-
|
||||
),
|
||||
|
||||
delete_file(Values_Filename),
|
||||
my_format(4,'~w',[Symbol])
|
||||
format_learning(4,'~w',[Symbol])
|
||||
)
|
||||
),
|
||||
flush_output(user).
|
||||
@ -946,14 +918,14 @@ my_load_intern(end_of_file,_,_) :-
|
||||
!.
|
||||
my_load_intern(query_probability(QueryID,Prob),Handle,QueryID) :-
|
||||
!,
|
||||
assert(query_probability_intern(QueryID,Prob)),
|
||||
assertz(query_probability_intern(QueryID,Prob)),
|
||||
read(Handle,X),
|
||||
my_load_intern(X,Handle,QueryID).
|
||||
my_load_intern(query_gradient(QueryID,XFactID,Type,Value),Handle,QueryID) :-
|
||||
!,
|
||||
atomic_concat(x,StringFactID,XFactID),
|
||||
atom_number(StringFactID,FactID),
|
||||
assert(query_gradient_intern(QueryID,FactID,Type,Value)),
|
||||
assertz(query_gradient_intern(QueryID,FactID,Type,Value)),
|
||||
read(Handle,X),
|
||||
my_load_intern(X,Handle,QueryID).
|
||||
my_load_intern(X,Handle,QueryID) :-
|
||||
@ -1056,7 +1028,7 @@ mse_trainingset_only_for_linesearch(MSE) :-
|
||||
length(AllSquaredErrors,Length),
|
||||
sum_list(AllSquaredErrors,SumAllSquaredErrors),
|
||||
MSE is SumAllSquaredErrors/Length,
|
||||
my_format(3,' (~8f)~n',[MSE])
|
||||
format_learning(3,' (~8f)~n',[MSE])
|
||||
); true
|
||||
),
|
||||
retractall(values_correct).
|
||||
@ -1066,7 +1038,7 @@ mse_testset :-
|
||||
(current_predicate(user:test_example/4),user:test_example(_,_,_,_))
|
||||
->
|
||||
(
|
||||
my_format(2,'MSE_Test ',[]),
|
||||
format_learning(2,'MSE_Test ',[]),
|
||||
update_values,
|
||||
findall(SquaredError,
|
||||
(user:test_example(QueryID,_Query,QueryProb,Type),
|
||||
@ -1091,7 +1063,7 @@ mse_testset :-
|
||||
logger_set_variable(mse_testset,MSE),
|
||||
logger_set_variable(mse_min_testset,MinError),
|
||||
logger_set_variable(mse_max_testset,MaxError),
|
||||
my_format(2,' (~8f)~n',[MSE])
|
||||
format_learning(2,' (~8f)~n',[MSE])
|
||||
); true
|
||||
).
|
||||
|
||||
@ -1227,7 +1199,7 @@ add_gradient(Learning_Rate) :-
|
||||
|
||||
|
||||
gradient_descent :-
|
||||
my_format(2,'Gradient ',[]),
|
||||
format_learning(2,'Gradient ',[]),
|
||||
|
||||
save_old_probabilities,
|
||||
update_values,
|
||||
@ -1411,7 +1383,7 @@ gradient_descent :-
|
||||
logger_set_variable(mse_min_trainingset,MSE_Train_Min),
|
||||
logger_set_variable(mse_max_trainingset,MSE_Train_Max),
|
||||
|
||||
my_format(2,'~n',[]),
|
||||
format_learning(2,'~n',[]),
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% start add gradient to current probabilities
|
||||
@ -1422,7 +1394,7 @@ gradient_descent :-
|
||||
problog_flag(learning_rate,LearningRate);
|
||||
lineSearch(LearningRate,_)
|
||||
),
|
||||
my_format(3,'learning rate:~8f~n',[LearningRate]),
|
||||
format_learning(3,'learning rate:~8f~n',[LearningRate]),
|
||||
add_gradient(LearningRate),
|
||||
logger_set_variable(learning_rate,LearningRate),
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -1438,7 +1410,7 @@ gradient_descent :-
|
||||
|
||||
line_search_evaluate_point(Learning_Rate,MSE) :-
|
||||
add_gradient(Learning_Rate),
|
||||
my_format(2,'Line search (h=~8f) ',[Learning_Rate]),
|
||||
format_learning(2,'Line search (h=~8f) ',[Learning_Rate]),
|
||||
mse_trainingset_only_for_linesearch(MSE).
|
||||
|
||||
|
||||
@ -1449,7 +1421,7 @@ lineSearch(Final_X,Final_Value) :-
|
||||
problog_flag(line_search_tau,Tau),
|
||||
problog_flag(line_search_interval,(A,B)),
|
||||
|
||||
my_format(3,'Line search in interval (~4f,~4f)~n',[A,B]),
|
||||
format_learning(3,'Line search in interval (~4f,~4f)~n',[A,B]),
|
||||
|
||||
% init values
|
||||
Acc is Tol * (B-A),
|
||||
@ -1677,11 +1649,9 @@ init_flags :-
|
||||
problog_define_flag(output_directory, problog_flag_validate_directory, 'directory for logfiles etc', Output_Folder,learning_general,flags:learning_output_dir_handler),
|
||||
problog_define_flag(log_frequency, problog_flag_validate_posint, 'log results every nth iteration', 1, learning_general),
|
||||
problog_define_flag(rebuild_bdds, problog_flag_validate_nonegint, 'rebuild BDDs every nth iteration', 0, learning_general),
|
||||
problog_define_flag(learning_verbosity_level, problog_flag_validate_0to5,'How much output shall be given (0=nothing,5=all)',5, learning_general),
|
||||
problog_define_flag(reuse_initialized_bdds,problog_flag_validate_boolean, 'Reuse BDDs from previous runs',false, learning_general),
|
||||
problog_define_flag(check_duplicate_bdds,problog_flag_validate_boolean,'Store intermediate results in hash table',true,learning_general),
|
||||
problog_define_flag(init_method,problog_flag_validate_dummy,'ProbLog predicate to search proofs',(Query,Probability,BDDFile,ProbFile,problog_kbest_save(Query,100,Probability,_Status,BDDFile,ProbFile)),learning_general,flags:learning_init_handler),
|
||||
problog_define_flag(probability_initializer,problog_flag_validate_dummy,'Predicate to initialize probabilities',(FactID,P,random_probability(FactID,P)),learning_general,flags:learning_prob_init_handler),
|
||||
problog_define_flag(alpha,problog_flag_validate_number,'weight of negative examples (auto=n_p/n_n)',auto,learning_general,flags:auto_handler),
|
||||
problog_define_flag(sigmoid_slope,problog_flag_validate_posnumber,'slope of sigmoid function',1.0,learning_general),
|
||||
|
||||
|
@ -834,12 +834,22 @@ absolute_file_name(File,Opts,TrueFileName) :-
|
||||
'$get_abs_file'(File,opts(_,D0,_,_,_,_,_),AbsFile) :-
|
||||
operating_system_support:true_file_name(File,D0,AbsFile).
|
||||
|
||||
'$search_in_path'(File,opts(Extensions,_,_,Access,_,_,_),F) :-
|
||||
'$add_extensions'(Extensions,File,F),
|
||||
access_file(F,Access).
|
||||
'$search_in_path'(File,opts(Extensions,_,Type,Access,_,_,_),F) :-
|
||||
'$add_extensions'(Extensions, File, F0),
|
||||
'$check_file'(F0, Type, Access, F).
|
||||
'$search_in_path'(File,opts(_,_,Type,Access,_,_,_),F) :-
|
||||
'$add_type_extensions'(Type,File,F),
|
||||
access_file(F,Access).
|
||||
'$add_type_extensions'(Type, File, F0),
|
||||
'$check_file'(F0, Type, Access, F).
|
||||
|
||||
'$check_file'(F, Type, none, F) :- !.
|
||||
'$check_file'(F0, Type, Access, F0) :-
|
||||
access_file(F0, Access),
|
||||
(Type == directory
|
||||
->
|
||||
exists_directory(F0)
|
||||
;
|
||||
true
|
||||
).
|
||||
|
||||
'$add_extensions'([Ext|_],File,F) :-
|
||||
'$mk_sure_true_ext'(Ext,NExt),
|
||||
|
@ -222,8 +222,8 @@ system_message(error(permission_error(modify,dynamic_procedure,_), Where)) -->
|
||||
[ 'PERMISSION ERROR- ~w: modifying a dynamic procedure' - [Where] ].
|
||||
system_message(error(permission_error(modify,flag,W), _)) -->
|
||||
[ 'PERMISSION ERROR- cannot modify flag ~w' - [W] ].
|
||||
system_message(error(permission_error(modify,operator,W), _)) -->
|
||||
[ 'PERMISSION ERROR- T cannot declare ~w an operator' - [W] ].
|
||||
system_message(error(permission_error(modify,operator,W), Q)) -->
|
||||
[ 'PERMISSION ERROR- ~w: cannot modify operator ~q' - [Q,W] ].
|
||||
system_message(error(permission_error(modify,dynamic_procedure,F), Where)) -->
|
||||
[ 'PERMISSION ERROR- ~w: modifying dynamic procedure ~w' - [Where,F] ].
|
||||
system_message(error(permission_error(modify,static_procedure,F), Where)) -->
|
||||
|
@ -210,6 +210,18 @@
|
||||
'$signal_def'(sig_alarm, true).
|
||||
|
||||
|
||||
'$signal'(sig_hup).
|
||||
'$signal'(sig_usr1).
|
||||
'$signal'(sig_usr2).
|
||||
'$signal'(sig_pipe).
|
||||
'$signal'(sig_alarm).
|
||||
'$signal'(sig_vtalarm).
|
||||
|
||||
on_signal(Signal,OldAction,NewAction) :-
|
||||
var(Signal), !,
|
||||
(nonvar(OldAction) -> throw(error(instantiation_error,on_signal/3)) ; true),
|
||||
'$signal'(Signal),
|
||||
on_signal(Signal, OldAction, NewAction).
|
||||
on_signal(Signal,OldAction,default) :-
|
||||
'$reset_signal'(Signal, OldAction).
|
||||
on_signal(Signal,OldAction,Action) :-
|
||||
@ -219,7 +231,7 @@ on_signal(Signal,OldAction,Action) :-
|
||||
Action = (_:Goal),
|
||||
var(Goal), !,
|
||||
'$check_signal'(Signal, OldAction),
|
||||
Action = OldAction.
|
||||
Goal = OldAction.
|
||||
on_signal(Signal,OldAction,Action) :-
|
||||
'$reset_signal'(Signal, OldAction),
|
||||
% 13211-2 speaks only about callable
|
||||
|
131
pl/utils.yap
131
pl/utils.yap
@ -1,65 +1,80 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: utils.yap *
|
||||
* Last rev: 8/2/88 *
|
||||
* mods: *
|
||||
* comments: Some utility predicates available in yap *
|
||||
* *
|
||||
*************************************************************************/
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: utils.yap *
|
||||
* Last rev: 8/2/88 *
|
||||
* mods: *
|
||||
* comments: Some utility predicates available in yap *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
op(P,T,V) :-
|
||||
'$check_op'(P,T,V,op(P,T,V)),
|
||||
'$op'(P, T, V).
|
||||
op(P,T,V) :-
|
||||
'$check_op'(P,T,V,op(P,T,V)),
|
||||
'$op'(P, T, V).
|
||||
|
||||
'$check_op'(P,T,V,G) :-
|
||||
(
|
||||
var(P) ->
|
||||
'$do_error'(instantiation_error,G)
|
||||
;
|
||||
var(T) ->
|
||||
'$do_error'(instantiation_error,G)
|
||||
;
|
||||
var(V) ->
|
||||
'$do_error'(instantiation_error,G)
|
||||
;
|
||||
\+ integer(P) ->
|
||||
'$do_error'(type_error(integer,P),G)
|
||||
;
|
||||
\+ atom(T) ->
|
||||
'$do_error'(type_error(atom,T),G)
|
||||
;
|
||||
P < 0 ->
|
||||
'$do_error'(domain_error(operator_priority,P),G)
|
||||
;
|
||||
P > 1200 ->
|
||||
'$do_error'(domain_error(operator_priority,P),G)
|
||||
;
|
||||
\+ '$associativity'(T) ->
|
||||
'$do_error'(domain_error(operator_specifier,T),G)
|
||||
;
|
||||
'$check_op_name'(V,G)
|
||||
).
|
||||
'$check_op'(P,T,V,G) :-
|
||||
(
|
||||
var(P) ->
|
||||
'$do_error'(instantiation_error,G)
|
||||
;
|
||||
var(T) ->
|
||||
'$do_error'(instantiation_error,G)
|
||||
;
|
||||
var(V) ->
|
||||
'$do_error'(instantiation_error,G)
|
||||
;
|
||||
\+ integer(P) ->
|
||||
'$do_error'(type_error(integer,P),G)
|
||||
;
|
||||
\+ atom(T) ->
|
||||
'$do_error'(type_error(atom,T),G)
|
||||
;
|
||||
P < 0 ->
|
||||
'$do_error'(domain_error(operator_priority,P),G)
|
||||
;
|
||||
P > 1200 ->
|
||||
'$do_error'(domain_error(operator_priority,P),G)
|
||||
;
|
||||
\+ '$associativity'(T) ->
|
||||
'$do_error'(domain_error(operator_specifier,T),G)
|
||||
;
|
||||
'$check_op_name'(V,G)
|
||||
).
|
||||
|
||||
'$associativity'(xfx).
|
||||
'$associativity'(xfy).
|
||||
'$associativity'(yfx).
|
||||
'$associativity'(yfy).
|
||||
'$associativity'(xf).
|
||||
'$associativity'(yf).
|
||||
'$associativity'(fx).
|
||||
'$associativity'(fy).
|
||||
'$associativity'(xfx).
|
||||
'$associativity'(xfy).
|
||||
'$associativity'(yfx).
|
||||
'$associativity'(yfy).
|
||||
'$associativity'(xf).
|
||||
'$associativity'(yf).
|
||||
'$associativity'(fx).
|
||||
'$associativity'(fy).
|
||||
|
||||
'$check_op_name'(V,G) :-
|
||||
var(V), !,
|
||||
'$do_error'(instantiation_error,G).
|
||||
'$check_op_name'(V,G) :-
|
||||
var(V), !,
|
||||
'$do_error'(instantiation_error,G).
|
||||
'$check_op_name'(',',G) :- !,
|
||||
'$do_error'(permission_error(modify,operator,','),G).
|
||||
'$check_op_name'('[]',G) :- !,
|
||||
'$do_error'(permission_error(create,operator,'[]'),G).
|
||||
'$check_op_name'('{}',G) :- !,
|
||||
'$do_error'(permission_error(create,operator,'{}'),G).
|
||||
'$check_op_name'('|',G) :- !,
|
||||
G = op(P, T, _),
|
||||
(
|
||||
integer(P),
|
||||
P < 1001
|
||||
;
|
||||
Fix \== xfx, Fix \== xfy, Fix \== yfx, Fix \== yfy
|
||||
),
|
||||
'$do_error'(permission_error(create,operator,'|'),G).
|
||||
'$check_op_name'(V,_) :-
|
||||
atom(V), !.
|
||||
'$check_op_name'(M:A, G) :-
|
||||
|
Reference in New Issue
Block a user