Merge branch 'master' of yap.dcc.fc.up.pt:yap-6
Conflicts: configure.in
This commit is contained in:
commit
ba8c66f078
1
C/agc.c
1
C/agc.c
@ -187,6 +187,7 @@ AtomAdjust(Atom a)
|
||||
#define HoldEntryAdjust(P) (P)
|
||||
#define CodeCharPAdjust(P) (P)
|
||||
#define CodeVoidPAdjust(P) (P)
|
||||
#define HaltHookAdjust(P) (P)
|
||||
|
||||
#define recompute_mask(dbr)
|
||||
|
||||
|
@ -1031,6 +1031,7 @@ static InitBinEntry InitBinTab[] = {
|
||||
{"\\/", op_or},
|
||||
{"#", op_xor},
|
||||
{"><", op_xor},
|
||||
{"xor", op_xor},
|
||||
{"atan2", op_atan2},
|
||||
/* C-Prolog exponentiation */
|
||||
{"^", op_power},
|
||||
|
@ -505,6 +505,7 @@ X_API Term STD_PROTO(YAP_TermNil,(void));
|
||||
X_API int STD_PROTO(YAP_AtomGetHold,(Atom));
|
||||
X_API int STD_PROTO(YAP_AtomReleaseHold,(Atom));
|
||||
X_API Agc_hook STD_PROTO(YAP_AGCRegisterHook,(Agc_hook));
|
||||
X_API int STD_PROTO(YAP_HaltRegisterHook,(HaltHookFunc, void *));
|
||||
X_API char *STD_PROTO(YAP_cwd,(void));
|
||||
X_API Term STD_PROTO(YAP_OpenList,(int));
|
||||
X_API Term STD_PROTO(YAP_ExtendList,(Term, Term));
|
||||
@ -2957,6 +2958,12 @@ YAP_AGCRegisterHook(Agc_hook hook)
|
||||
return old;
|
||||
}
|
||||
|
||||
X_API int
|
||||
YAP_HaltRegisterHook(HaltHookFunc hook, void * closure)
|
||||
{
|
||||
return Yap_HaltRegisterHook(hook, closure);
|
||||
}
|
||||
|
||||
X_API char *
|
||||
YAP_cwd(void)
|
||||
{
|
||||
|
38
C/init.c
38
C/init.c
@ -407,7 +407,6 @@ static Opdef Ops[] = {
|
||||
{"\\/", yfx, 500},
|
||||
{"><", yfx, 500},
|
||||
{"#", yfx, 500},
|
||||
{"xor", yfx, 400},
|
||||
{"rdiv", yfx, 400},
|
||||
{"div", yfx, 400},
|
||||
{"*", yfx, 400},
|
||||
@ -1334,6 +1333,33 @@ Yap_InitWorkspace(UInt Heap, UInt Stack, UInt Trail, UInt Atts, UInt max_table_s
|
||||
}
|
||||
}
|
||||
|
||||
int
|
||||
Yap_HaltRegisterHook (HaltHookFunc f, void * env)
|
||||
{
|
||||
struct halt_hook *h;
|
||||
|
||||
if (!(h = (struct halt_hook *)Yap_AllocCodeSpace(sizeof(struct halt_hook))))
|
||||
return FALSE;
|
||||
h->environment = env;
|
||||
h->hook = f;
|
||||
LOCK(BGL);
|
||||
h->next = Yap_HaltHooks;
|
||||
Yap_HaltHooks = h;
|
||||
UNLOCK(BGL);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
run_halt_hooks(int code)
|
||||
{
|
||||
struct halt_hook *hooke = Yap_HaltHooks;
|
||||
|
||||
while (hooke) {
|
||||
hooke->hook(code, hooke->environment);
|
||||
hooke = hooke->next;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
Yap_exit (int value)
|
||||
{
|
||||
@ -1341,15 +1367,17 @@ Yap_exit (int value)
|
||||
unmap_memory();
|
||||
#endif /* YAPOR */
|
||||
|
||||
if (! (Yap_PrologMode & BootMode) ) {
|
||||
#ifdef LOW_PROF
|
||||
remove("PROFPREDS");
|
||||
remove("PROFILING");
|
||||
remove("PROFPREDS");
|
||||
remove("PROFILING");
|
||||
#endif
|
||||
#if defined MYDDAS_MYSQL || defined MYDDAS_ODBC
|
||||
Yap_MYDDAS_delete_all_myddas_structs();
|
||||
Yap_MYDDAS_delete_all_myddas_structs();
|
||||
#endif
|
||||
if (! (Yap_PrologMode & BootMode) )
|
||||
run_halt_hooks(value);
|
||||
Yap_ShutdownLoadForeign();
|
||||
}
|
||||
exit(value);
|
||||
}
|
||||
|
||||
|
@ -6462,6 +6462,9 @@ p_file_base_name (void)
|
||||
char *c = RepAtom(at)->StrOfAE;
|
||||
Int i = strlen(c);
|
||||
while (i && !Yap_dir_separator((int)c[--i]));
|
||||
if (Yap_dir_separator((int)c[i])) {
|
||||
i++;
|
||||
}
|
||||
return Yap_unify(ARG2, MkAtomTerm(Yap_LookupAtom(c+i)));
|
||||
}
|
||||
}
|
||||
|
60
C/ypsocks.c
60
C/ypsocks.c
@ -188,7 +188,6 @@ Yap_init_socks(char *host, long interface_port)
|
||||
struct sockaddr_in soadr;
|
||||
struct in_addr adr;
|
||||
struct hostent *he;
|
||||
struct linger ling; /* For making sockets linger. */
|
||||
|
||||
|
||||
#if USE_SOCKET
|
||||
@ -224,10 +223,22 @@ Yap_init_socks(char *host, long interface_port)
|
||||
return;
|
||||
}
|
||||
|
||||
#if ENABLE_SO_LINGER
|
||||
struct linger ling; /* disables socket lingering. */
|
||||
ling.l_onoff = 1;
|
||||
ling.l_linger = 0;
|
||||
setsockopt(s, SOL_SOCKET, SO_LINGER, (void *) &ling,
|
||||
sizeof(ling));
|
||||
if (setsockopt(s, SOL_SOCKET, SO_LINGER, (void *) &ling,
|
||||
sizeof(ling)) < 0) {
|
||||
#if HAVE_STRERROR
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_linger: %s)", strerror(socket_errno));
|
||||
#else
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_linger)");
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
r = connect ( s, (struct sockaddr *) &soadr, sizeof(soadr));
|
||||
if (r<0) {
|
||||
@ -760,7 +771,6 @@ p_socket_connect(void)
|
||||
struct hostent *he;
|
||||
struct sockaddr_in saddr;
|
||||
unsigned short int port;
|
||||
struct linger ling; /* For making sockets linger. */
|
||||
|
||||
memset((void *)&saddr,(int) 0, sizeof(saddr));
|
||||
if (IsVarTerm(thost)) {
|
||||
@ -794,19 +804,41 @@ p_socket_connect(void)
|
||||
}
|
||||
saddr.sin_port = htons(port);
|
||||
saddr.sin_family = AF_INET;
|
||||
ling.l_onoff = 1;
|
||||
ling.l_linger = 0;
|
||||
if (setsockopt(fd, SOL_SOCKET, SO_LINGER, (void *) &ling,
|
||||
sizeof(ling)) < 0) {
|
||||
#if ENABLE_SO_LINGER
|
||||
{
|
||||
struct linger ling; /* For making sockets linger. */
|
||||
/* disabled: I see why no reason why we should throw things away by default!! */
|
||||
ling.l_onoff = 1;
|
||||
ling.l_linger = 0;
|
||||
if (setsockopt(fd, SOL_SOCKET, SO_LINGER, (void *) &ling,
|
||||
sizeof(ling)) < 0) {
|
||||
#if HAVE_STRERROR
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_linger: %s)", strerror(socket_errno));
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_linger: %s)", strerror(socket_errno));
|
||||
#else
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_linger)");
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_linger)");
|
||||
#endif
|
||||
return(FALSE);
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
{
|
||||
int one = 1; /* code by David MW Powers */
|
||||
|
||||
if (setsockopt(fd, SOL_SOCKET, SO_BROADCAST, (void *)&one, sizeof(one))) {
|
||||
#if HAVE_STRERROR
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_broadcast: %s)", strerror(socket_errno));
|
||||
#else
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (setsockopt_broadcast)");
|
||||
#endif
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
flag = connect(fd,(struct sockaddr *)&saddr, sizeof(saddr));
|
||||
if(flag<0) {
|
||||
#if HAVE_STRERROR
|
||||
@ -816,7 +848,7 @@ p_socket_connect(void)
|
||||
Yap_Error(SYSTEM_ERROR, TermNil,
|
||||
"socket_connect/3 (connect)");
|
||||
#endif
|
||||
return(FALSE);
|
||||
return FALSE;
|
||||
}
|
||||
Yap_UpdateSocketStream(sno, client_socket, af_inet);
|
||||
} else
|
||||
|
10
H/YapHeap.h
10
H/YapHeap.h
@ -62,6 +62,16 @@ typedef struct gc_ma_hash_entry_struct {
|
||||
struct gc_ma_hash_entry_struct *next;
|
||||
} gc_ma_hash_entry;
|
||||
|
||||
typedef void (*HaltHookFunc)(int, void *);
|
||||
|
||||
typedef struct halt_hook {
|
||||
void * environment;
|
||||
HaltHookFunc hook;
|
||||
struct halt_hook *next;
|
||||
} halt_hook_entry;
|
||||
|
||||
int STD_PROTO(Yap_HaltRegisterHook,(HaltHookFunc, void *));
|
||||
|
||||
typedef struct atom_hash_entry {
|
||||
#if defined(YAPOR) || defined(THREADS)
|
||||
rwlock_t AERWLock;
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
#define Stream Yap_heap_regs->yap_streams
|
||||
|
||||
#define Yap_HaltHooks Yap_heap_regs->yap_halt_hook
|
||||
|
||||
#define NOfFileAliases Yap_heap_regs->n_of_file_aliases
|
||||
#define SzOfFileAliases Yap_heap_regs->sz_of_file_aliases
|
||||
#define FileAliases Yap_heap_regs->file_aliases
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
struct stream_desc *yap_streams;
|
||||
|
||||
struct halt_hook *yap_halt_hook;
|
||||
|
||||
UInt n_of_file_aliases;
|
||||
UInt sz_of_file_aliases;
|
||||
struct AliasDescS *file_aliases;
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
Yap_heap_regs->yap_streams = NULL;
|
||||
|
||||
Yap_heap_regs->yap_halt_hook = NULL;
|
||||
|
||||
Yap_heap_regs->n_of_file_aliases = 0;
|
||||
Yap_heap_regs->sz_of_file_aliases = 0;
|
||||
Yap_heap_regs->file_aliases = NULL;
|
||||
|
12
H/rheap.h
12
H/rheap.h
@ -899,6 +899,18 @@ RestoreDBErasedIList(void)
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
RestoreHaltHooks(void)
|
||||
{
|
||||
struct halt_hook *hooke = Yap_HaltHooks = HaltHookAdjust(Yap_HaltHooks);
|
||||
|
||||
while (hooke) {
|
||||
hooke->next = HaltHookAdjust(hooke->next);
|
||||
hooke = hooke->next;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
RestoreStreams(void)
|
||||
{
|
||||
|
@ -263,6 +263,8 @@
|
||||
|
||||
RestoreStreams();
|
||||
|
||||
RestoreHaltHooks();
|
||||
|
||||
|
||||
|
||||
RestoreAliases();
|
||||
|
10
H/sshift.h
10
H/sshift.h
@ -610,7 +610,15 @@ CodeVoidPAdjust (void * addr)
|
||||
return addr + HDiff;
|
||||
}
|
||||
|
||||
inline EXTERN struct halt_hook *HaltHookAdjust (struct halt_hook *);
|
||||
|
||||
inline EXTERN struct halt_hook *
|
||||
HaltHookAdjust (struct halt_hook * addr)
|
||||
{
|
||||
if (!addr)
|
||||
return NULL;
|
||||
return (struct halt_hook *) (CharP (addr) + HDiff);
|
||||
}
|
||||
|
||||
inline EXTERN BlockHeader *BlockAdjust (BlockHeader *);
|
||||
|
||||
@ -620,8 +628,6 @@ BlockAdjust (BlockHeader * addr)
|
||||
return (BlockHeader *) ((BlockHeader *) (CharP (addr) + HDiff));
|
||||
}
|
||||
|
||||
|
||||
|
||||
inline EXTERN yamop *PtoOpAdjust (yamop *);
|
||||
|
||||
inline EXTERN yamop *
|
||||
|
@ -577,7 +577,7 @@ install_unix: startup.yss libYap.a
|
||||
$(INSTALL) config.h $(DESTDIR)$(INCLUDEDIR)/config.h
|
||||
$(INSTALL) parms.h $(DESTDIR)$(INCLUDEDIR)/parms.h
|
||||
@ENABLE_CPLINT@ (cd packages/cplint; $(MAKE) install)
|
||||
@ENABLE_CPLINT@ (cd packages/cplint/simplecuddLPADs; $(MAKE) install)
|
||||
@ENABLE_CPLINT@ (cd packages/cplint/approx/simplecuddLPADs; $(MAKE) install)
|
||||
@USE_CUDD@ (cd packages/ProbLog/simplecudd; $(MAKE) install)
|
||||
|
||||
|
||||
|
46
configure
vendored
46
configure
vendored
@ -1435,7 +1435,7 @@ Optional Features:
|
||||
--enable-win64 compile YAP for win64
|
||||
--enable-april compile Yap to support April ILP system
|
||||
--enable-dlcompat use dlcompat library for dynamic loading on Mac OS X
|
||||
--enable-cplint=DIR enable the cplint library using the glu library in DIR/lib
|
||||
--enable-cplint=DIR enable the cplint library using the CUDD software in DIR
|
||||
--enable-myddas[=DIR] enable the MYDDAS library
|
||||
--enable-myddas-stats enable the MYDDAS library statistics support
|
||||
--enable-myddas-top-level enable the MYDDAS top-level support to MySQL
|
||||
@ -6820,17 +6820,17 @@ fi
|
||||
then
|
||||
LIBS="$LIBS -lnsl"
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
if test "$dynamic_loading" = "yes"
|
||||
then
|
||||
YAPLIB_LD="\$(CC) -shared"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
else
|
||||
YAPLIB_LD="\$(CC)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
INSTALL_ENV="YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
PRE_INSTALL_ENV=""
|
||||
fi
|
||||
@ -6844,9 +6844,9 @@ fi
|
||||
fi
|
||||
SO="o"
|
||||
INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -6887,9 +6887,9 @@ fi
|
||||
$as_echo "#define MPI_AVOID_REALLOC 1" >>confdefs.h
|
||||
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -6900,9 +6900,9 @@ fi
|
||||
# SO="a"
|
||||
#SHLIB_LD="\$(srcdir)/../../ldAix /bin/ld -bhalt:4 -bM:SRE -bE:lib.exp -H512 -T512 -bnoentry"
|
||||
#INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -6956,9 +6956,9 @@ fi
|
||||
SHLIB_LD="ld -shared -expect_unresolved '*'"
|
||||
DO_SECOND_LD=""
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -6970,9 +6970,9 @@ fi
|
||||
SHLIB_LD="ld -n32 -shared -rdata_shared"
|
||||
SHLIB_CXX_LD="$SHLIB_LD"
|
||||
INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -7033,12 +7033,12 @@ fi
|
||||
then
|
||||
SHLIB_LD="$CC -dynamiclib"
|
||||
SHLIB_CXX_LD="$CXX -dynamiclib"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-install_name,\$(DESTDIR)\$(YAPLIBDIR)/\$@"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-install_name,\$(YAPLIBDIR)/\$@"
|
||||
INSTALL_ENV="DYLD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
else
|
||||
SHLIB_LD="$CC -dynamiclib -Wl,-undefined,dynamic_lookup"
|
||||
SHLIB_CXX_LD="$CXX -dynamiclib -Wl,-undefined,dynamic_lookup"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-install_name,\$(DESTDIR)\$(YAPLIBDIR)/\$@"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-install_name,\$(YAPLIBDIR)/\$@"
|
||||
INSTALL_ENV="YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
fi
|
||||
if test "$ac_cv_c_compiler_gnu" = "yes"
|
||||
@ -7070,9 +7070,9 @@ fi
|
||||
SO="so"
|
||||
INSTALL_DLLS=""
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -Wl,-R,\$(YAPLIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
@ -7086,16 +7086,16 @@ fi
|
||||
DO_SECOND_LD=""
|
||||
SO="so"
|
||||
INSTALL_DLLS=""
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
if test "$dynamic_loading" = "yes"
|
||||
then
|
||||
YAPLIB_LD="\$(CC)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
else
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV=""
|
||||
YAPLIB_LD="\$(CC) -shared"
|
||||
INSTALL_ENV="YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
@ -7183,9 +7183,9 @@ fi
|
||||
then
|
||||
LIBS="$LIBS -lnsl"
|
||||
fi
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(DESTDIR)\$(LIBDIR) -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
LDFLAGS="$LDFLAGS -L\$(abs_top_builddir) -Wl,-R,\$(LIBDIR) -Wl,-R,\$(YAPLIBDIR)"
|
||||
DYNYAPLIB=libYap."$SO"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(DESTDIR)\$(YAPLIBDIR) -Wl,-R,\$(DESTDIR)\$(LIBDIR)"
|
||||
EXTRA_LIBS_FOR_DLLS="$EXTRA_LIBS_FOR_DLLS -L\$(abs_top_builddir) -lYap -Wl,-R,\$(YAPLIBDIR) -Wl,-R,\$(LIBDIR)"
|
||||
PRE_INSTALL_ENV="LD_LIBRARY_PATH=\$(abs_top_builddir)"
|
||||
INSTALL_ENV="LD_LIBRARY_PATH=\$(DESTDIR)\$(LIBDIR): YAPSHAREDIR=\$(DESTDIR)\$(SHAREDIR) YAPLIBDIR=\$(DESTDIR)\$(YAPLIBDIR)"
|
||||
;;
|
||||
|
794
docs/yap.tex
794
docs/yap.tex
@ -99,6 +99,7 @@ us to include his text in this document.
|
||||
* CLPR:: The CLP(R) System
|
||||
* CHR:: The CHR System
|
||||
* Logtalk:: The Logtalk Object-Oriented System
|
||||
* MYDDAS:: The YAP Database Interface
|
||||
* Threads:: Thread Library
|
||||
* Parallelism:: Running in Or-Parallel
|
||||
* Tabling:: Storing Intermediate Solutions of programs
|
||||
@ -1383,7 +1384,7 @@ anonymous variables.
|
||||
|
||||
Punctuation tokens consist of one of the following characters:
|
||||
@example
|
||||
@center ( ) , [ ] @{ @} |
|
||||
( ) , [ ] @{ @} |
|
||||
@end example
|
||||
|
||||
These characters are used to group terms.
|
||||
@ -3345,7 +3346,7 @@ Also refer to @code{copy_term/2}.
|
||||
True when @var{List} is a proper list. That is, @var{List}
|
||||
is bound to the empty list (nil) or a term with functor '.' and arity 2.
|
||||
|
||||
@item ?@var{Term1} =@= ?@var{Term2}
|
||||
@item ?@var{Term1} =@@= ?@var{Term2}
|
||||
@findex =@=/2
|
||||
@syindex =@=/2
|
||||
@cnindex =@=/2
|
||||
@ -4083,6 +4084,8 @@ Integer bitwise conjunction.
|
||||
Integer bitwise disjunction.
|
||||
|
||||
@item @var{X} # @var{Y}
|
||||
@item @var{X} >< @var{Y}
|
||||
@item xor(@var{X} , @var{Y})
|
||||
Integer bitwise exclusive disjunction.
|
||||
|
||||
@item @var{X} << @var{Y}
|
||||
@ -4466,7 +4469,9 @@ Defines extensions. Current mapping: @code{txt} implies @code{['']},
|
||||
@code{['.so', '']}, @code{qlf} implies @code{['.qlf', '']} and
|
||||
@code{directory} implies @code{['']}. The file-type @code{source}
|
||||
is an alias for @code{prolog} for compatibility to SICStus Prolog.
|
||||
See also @code{prolog_file_type/2}.
|
||||
See also @code{prolog_file_type/2}. Notice also that this predicate only
|
||||
returns non-directories, unless the option @code{file_type(directory)} is
|
||||
specified, or unless @code{access(none)}.
|
||||
|
||||
@item file_errors(@code{fail}/@code{error})
|
||||
If @code{error} (default), throw and @code{existence_error} exception
|
||||
@ -9514,16 +9519,6 @@ matrices are multi-dimensional and compact. In contrast to static
|
||||
arrays. these arrays are allocated in the stack. Matrices are available
|
||||
by loading the library @code{library(matrix)}.
|
||||
|
||||
Accessing the matlab dynamic libraries can be complicated. In Linux
|
||||
machines, to use this interface, you may have to set the environment
|
||||
variable @t{LD_LIBRARY_PATH}. Next, follows an example using bash in a
|
||||
64-bit Linux PC:
|
||||
@example
|
||||
export LD_LIBRARY_PATH=''$MATLAB_HOME"/sys/os/glnxa64:''$MATLAB_HOME"/bin/glnxa64:''$LD_LIBRARY_PATH"
|
||||
@end example
|
||||
where @code{MATLAB_HOME} is the directory where matlab is installed
|
||||
at. Please replace @code{ax64} for @code{x86} on a 32-bit PC.
|
||||
|
||||
Notice that the functionality in this library is only partial. Please
|
||||
contact the YAP maintainers if you require extra functionality.
|
||||
|
||||
@ -9828,6 +9823,16 @@ actually use it, you need to install YAP calling @code{configure} with
|
||||
the @code{--with-matlab=DIR} option, and you need to call
|
||||
@code{use_module(library(lists))} command.
|
||||
|
||||
Accessing the matlab dynamic libraries can be complicated. In Linux
|
||||
machines, to use this interface, you may have to set the environment
|
||||
variable @t{LD_LIBRARY_PATH}. Next, follows an example using bash in a
|
||||
64-bit Linux PC:
|
||||
@example
|
||||
export LD_LIBRARY_PATH=''$MATLAB_HOME"/sys/os/glnxa64:''$MATLAB_HOME"/bin/glnxa64:''$LD_LIBRARY_PATH"
|
||||
@end example
|
||||
where @code{MATLAB_HOME} is the directory where matlab is installed
|
||||
at. Please replace @code{ax64} for @code{x86} on a 32-bit PC.
|
||||
|
||||
@table @code
|
||||
|
||||
@item start_matlab(+@var{Options})
|
||||
@ -12888,6 +12893,7 @@ Extensions to Traditional Prolog
|
||||
* Attributed Variables:: Using attributed Variables
|
||||
* CLPR:: The CLP(R) System
|
||||
* Logtalk:: The Logtalk Object-Oriented system
|
||||
* MYDDAS:: The MYDDAS Database Interface package
|
||||
* Threads:: Thread Library
|
||||
* Parallelism:: Running in Or-Parallel
|
||||
* Tabling:: Storing Intermediate Solutions of programs
|
||||
@ -13701,7 +13707,7 @@ attributes from other known solvers/modules via the module prefix in
|
||||
|
||||
@include chr.tex
|
||||
|
||||
@node Logtalk, Threads, CHR, Extensions
|
||||
@node Logtalk, MYDDAS, CHR, Extensions
|
||||
@chapter Logtalk
|
||||
@cindex Logtalk
|
||||
|
||||
@ -13711,7 +13717,757 @@ systems or by using the @code{Logtalk - YAP} shortcut in the Logtalk
|
||||
program group in the Start Menu on Windows systems. For more information
|
||||
please see the URL @url{http://logtalk.org/}.
|
||||
|
||||
@node Threads, Parallelism, Logtalk, Extensions
|
||||
@node MYDDAS, Threads, Logtalk, Extensions
|
||||
@chapter MYDDAS
|
||||
@cindex MYDDAS
|
||||
|
||||
The MYDDAS database project was developed within a FCT project aiming at
|
||||
the development of a highly efficient deductive database system, based
|
||||
on the coupling of the MySQL relational database system with the Yap
|
||||
Prolog system. MYDDAS was later expanded to support the ODBC interface.
|
||||
|
||||
@menu
|
||||
Subnodes of MYDDAS
|
||||
* Requirements and Installation Guide::
|
||||
* MYDDAS Architecture::
|
||||
* Loading MYDDAS::
|
||||
* Connecting to and disconnecting from a Database Server::
|
||||
* Accessing a Relation::
|
||||
* View Level Interface ::
|
||||
* Accessing Tables in Data Sources Using SQL::
|
||||
* Insertion of Rows::
|
||||
* Types of Attributes::
|
||||
* Number of Fields::
|
||||
* Describing a Relation::
|
||||
* Enumerating Relations::
|
||||
* The MYDDAS MySQL Top Level::
|
||||
* Other MYDDAS Properties::
|
||||
@end menu
|
||||
|
||||
@node Requirements and Installation Guide, MYDDAS Architecture, , MYDDAS
|
||||
@section Requirements and Installation Guide
|
||||
|
||||
Next, we describe how to usen of the YAP with the MYDDAS System. The
|
||||
use of this system is entirely depend of the MySQL development libraries
|
||||
or the ODBC development libraries. At least one of the this development
|
||||
libraries must be installed on the computer system, otherwise MYDDAS
|
||||
will not compile. The MySQL development libraries from MySQL 3.23 an
|
||||
above are know to work. We recommend the usage of MySQL versusODBC,
|
||||
but it is possible to have both options installed
|
||||
|
||||
At the same time, without any problem. The MYDDAS system automatically
|
||||
controls the two options. Currently, MYDDAS is know to compile without
|
||||
problems in Linux. The usage of this system on Windows has not been
|
||||
tested yet. MYDDAS must be enabled at configure time. This can be done
|
||||
with the following options:
|
||||
|
||||
@table @code
|
||||
|
||||
@item --enable-myddas
|
||||
This option will detect which development libraries are installed on the computer system, MySQL, ODBC or both, and will compile the Yap system with the support for which libraries it detects;
|
||||
@item --enable-myddas-stats
|
||||
This option is only available in MySQL. It includes code to get
|
||||
statistics from the MYDDAS system;
|
||||
@item --enable-top-level
|
||||
This option is only available in MySQL. It enables the option to interact with the MySQL server in
|
||||
two different ways. As if we were on the MySQL Client Shell, and as if
|
||||
we were using Datalog.
|
||||
@end table
|
||||
|
||||
@node MYDDAS Architecture, Loading MYDDAS, Requirements and Installation Guide, MYDDAS
|
||||
@section MYDDAS Architecture
|
||||
|
||||
The system includes four main blocks that are put together through the
|
||||
MYDDAS interface: the Yap Prolog compiler, the MySQL database system, an
|
||||
ODBC layer and a Prolog to SQL compiler. Current effort is put on the
|
||||
MySQL interface rather than on the ODBC interface. If you want to use
|
||||
the full power of the MYDDAS interface we recommend you to use a MySQL
|
||||
database. Other databases, such as Oracle, PostGres or Microsoft SQL
|
||||
Server, can be interfaced through the ODBC layer, but with limited
|
||||
performance and features support.
|
||||
|
||||
The main structure of the MYDDAS interface is simple. Prolog queries
|
||||
involving database goals are translated to SQL using the Prolog to SQL
|
||||
compiler; then the SQL expression is sent to the database system, which
|
||||
returns the set of tuples satisfying the query; and finally those tuples
|
||||
are made available to the Prolog engine as terms. For recursive queries
|
||||
involving database goals, the YapTab tabling engine provides the
|
||||
necessary support for an efficient evaluation of such queries.
|
||||
|
||||
An important aspect of the MYDDAS interface is that for the programmer
|
||||
the use of predicates which are defined in database relations is
|
||||
completely transparent. An example of this transparent support is the
|
||||
Prolog cut operator, which has exactly the same behaviour from
|
||||
predicates defined in the Prolog program source code, or from predicates
|
||||
defined in database as relations.
|
||||
|
||||
@node Loading MYDDAS, Connecting to and disconnecting from a Database Server, MYDDAS Architecture, MYDDAS
|
||||
@section Loading MYDDAS
|
||||
|
||||
Begin by starting YAP and loading the library
|
||||
@code{use_module(library(myddas))}. This library already includes the
|
||||
Prolog to SQL Compiler described in [2] and [1]. In MYDDAS this compiler
|
||||
has been extended to support further constructs which allow a more
|
||||
efficient SQL translation.
|
||||
|
||||
@node Connecting to and disconnecting from a Database Server, Accessing a Relation, Loading MYDDAS, MYDDAS
|
||||
@section Connecting to and disconnecting from a Database Server
|
||||
|
||||
|
||||
@table @code
|
||||
@item db open(+,+,+,+,+).
|
||||
@findex db_open/5
|
||||
@snindex db_open/5
|
||||
@cnindex db_open/5
|
||||
|
||||
@item db open(+,+,+,+).
|
||||
@findex db_open/4
|
||||
@snindex db_open/4
|
||||
@cnindex db_open/4
|
||||
|
||||
@item db close(+).
|
||||
@findex db_close/1
|
||||
@snindex db_close/1
|
||||
@cnindex db_close/1
|
||||
|
||||
@item db_close.
|
||||
|
||||
@end table
|
||||
|
||||
Assuming the MySQL server is running and we have an account, we can
|
||||
login to MySQL by invoking @code{db_open/5} as one of the following:
|
||||
@example
|
||||
?- db_open(mysql,Connection,Host/Database,User,Password).
|
||||
?- db_open(mysql,Connection,Host/Database/Port,User,Password).
|
||||
?- db_open(mysql,Connection,Host/Database/UnixSocket,User,Password).
|
||||
?- db_open(mysql,Connection,Host/Database/Port/UnixSocket,User,Password).
|
||||
|
||||
@end example
|
||||
If the login is successful, there will be a response of @code{yes}. For
|
||||
instance:
|
||||
@example
|
||||
?- db_open(mysql,con1,localhost/guest_db,guest,'').
|
||||
@end example
|
||||
uses the MySQL native interface, selected by the first argument, to open
|
||||
a connection identified by the @code{con1} atom, to an instance of a
|
||||
MySQL server running on host @code{localhost}, using database guest @code{db}
|
||||
and user @code{guest} with empty @code{password}. To disconnect from the @code{con1}
|
||||
connection we use:
|
||||
@example
|
||||
?- db_close(con1).
|
||||
@end example
|
||||
Alternatively, we can use @code{db_open/4} and @code{db_close/0,} without an argument
|
||||
to identify the connection. In this case the default connection is used,
|
||||
with atom @code{myddas}. Thus using
|
||||
@example
|
||||
?- db_open(mysql,localhost/guest_db,guest,'').
|
||||
?- db_close.
|
||||
@end example
|
||||
or
|
||||
@example
|
||||
?- db_open(mysql,myddas,localhost/guest_db,guest,'').
|
||||
?- db_close(myddas).
|
||||
@end example
|
||||
is exactly the same.
|
||||
|
||||
MYDDAS also supports ODBC. To connect to a database using an ODBC driver
|
||||
you must have configured on your system a ODBC DSN. If so, the @code{db_open/4}
|
||||
and @code{db_open/5} have the following mode:
|
||||
@example
|
||||
?- db_open(odbc,Connection,ODBC_DSN,User,Password).
|
||||
?- db_open(odbc,ODBC_DSN,User,Password).
|
||||
@end example
|
||||
|
||||
For instance, if you do @code{db_open(odbc,odbc_dsn,guest,'')}. it will connect
|
||||
to a database, through ODBC, using the definitions on the @code{odbc_dsn} DSN
|
||||
configured on the system. The user will be the user @code{guest} with no
|
||||
password.
|
||||
|
||||
@node Accessing a Relation, View Level Interface , Connecting to and disconnecting from a Database Server, MYDDAS
|
||||
@section Accessing a Relation
|
||||
|
||||
@table @code
|
||||
@item db_import(+Conn,+RelationName,+PredName).
|
||||
@findex db_import/3
|
||||
@snindex db_import/3
|
||||
@cnindex db_import/3
|
||||
|
||||
@item db_import(+RelationName,+PredName).
|
||||
@findex db_import/2
|
||||
@snindex db_import/2
|
||||
@cnindex db_import/2
|
||||
@end table
|
||||
|
||||
Assuming you have access permission for the relation you wish to import,
|
||||
you can use @code{db_import/3} or @code{db_import/2} as:
|
||||
@example
|
||||
?- db_import(Conn,RelationName,PredName).
|
||||
?- db_import(RelationName,PredName).
|
||||
@end example
|
||||
where @var{RelationName}, is the name of
|
||||
relation we wish to access, @var{PredName} is the name of the predicate we
|
||||
wish to use to access the relation from YAP. @var{Conn}, is the connection
|
||||
identifier, which again can be dropped so that the default myddas connection
|
||||
is used. For instance, if we want to access the relation phonebook,
|
||||
using the predicate @code{phonebook/3} we write:
|
||||
@example
|
||||
?- db_import(con1,phonebook,phonebook).
|
||||
yes
|
||||
?- phonebook(Letter,Name,Number).
|
||||
Letter = 'D',
|
||||
Name = 'John Doe',
|
||||
Number = 123456789 ?
|
||||
yes
|
||||
@end example
|
||||
Backtracking can then be used to retrieve the next row
|
||||
of the relation phonebook. Records with particular field values may be
|
||||
selected in the same way as in Prolog. (In particular, no mode
|
||||
specification for database predicates is required). For instance:
|
||||
@example
|
||||
?- phonebook(Letter,'John Doe',Letter).
|
||||
Letter = 'D',
|
||||
Number = 123456789 ?
|
||||
yes
|
||||
@end example
|
||||
generates the query @example
|
||||
SELECT A.Letter , 'John Doe' , A.Number
|
||||
FROM 'phonebook' A
|
||||
WHERE A.Name = 'John Doe';
|
||||
@end example
|
||||
|
||||
@node View Level Interface, Accessing Tables in Data Sources Using SQL, Accessing a Relation, MYDDAS
|
||||
@section View Level Interface
|
||||
|
||||
@table @code
|
||||
@item db view(+,+,+).
|
||||
@findex db_view/3
|
||||
@snindex db_view/3
|
||||
@cnindex db_view/3
|
||||
|
||||
@item db view(+,+).
|
||||
@findex db_view/2
|
||||
@snindex db_view/2
|
||||
@cnindex db_view/2
|
||||
@end table
|
||||
If we import a database relation, such as an edge relation representing the edges of a directed graph, through
|
||||
@example
|
||||
?- db_import('Edge',edge).
|
||||
yes
|
||||
@end example
|
||||
and we then write a query to retrieve all the direct cycles in the
|
||||
graph, such as
|
||||
@example
|
||||
?- edge(A,B), edge(B,A).
|
||||
A = 10,
|
||||
B = 20 ?
|
||||
@end example
|
||||
this is clearly inefficient [3], because of relation-level
|
||||
access. Relation-level access means that a separate SQL query will be
|
||||
generated for every goal in the body of the clause. For the second
|
||||
@code{edge/2} goal, a SQL query is generated using the variable bindings that
|
||||
result from the first @code{edge/2} goal execution. If the second
|
||||
@code{edge/2} goal
|
||||
fails, or if alternative solutions are demanded, backtracking access the
|
||||
next tuple for the first @code{edge/2} goal and another SQL query will be
|
||||
generated for the second @code{edge/2} goal. The generation of this large
|
||||
number of queries and the communication overhead with the database
|
||||
system for each of them, makes the relation-level approach inefficient.
|
||||
To solve this problem the view level interface can be used for the
|
||||
definition of rules whose bodies includes only imported database
|
||||
predicates. One can use the view level interface through the predicates
|
||||
@code{db_view/3} and @code{db_view/2}:
|
||||
@example
|
||||
?- db_view(Conn,PredName(Arg_1,...,Arg_n),DbGoal).
|
||||
?- db_view(PredName(Arg_1,...,Arg_n),DbGoal).
|
||||
@end example
|
||||
All arguments are standard Prolog terms. @var{Arg1} through @var{Argn}
|
||||
define the attributes to be retrieved from the database, while
|
||||
@var{DbGoal} defines the selection restrictions and join
|
||||
conditions. @var{Conn} is the connection identifier, which again can be
|
||||
dropped. Calling predicate @code{PredName/n} will retrieve database
|
||||
tuples using a single SQL query generated for the @var{DbGoal}. We next show
|
||||
an example of a view definition for the direct cycles discussed
|
||||
above. Assuming the declaration:
|
||||
@example
|
||||
?- db_import('Edge',edge).
|
||||
yes
|
||||
@end example
|
||||
we
|
||||
write:@example
|
||||
?- db_view(direct_cycle(A,B),(edge(A,B), edge(B,A))).
|
||||
yes
|
||||
?- direct_cycle(A,B)).
|
||||
A = 10,
|
||||
B = 20 ?
|
||||
@end example
|
||||
This call generates the SQL
|
||||
statement: @example
|
||||
SELECT A.attr1 , A.attr2
|
||||
FROM Edge A , Edge B
|
||||
WHERE B.attr1 = A.attr2 AND B.attr2 = A.attr1;
|
||||
@end example
|
||||
|
||||
Backtracking, as in relational level interface, can be used to retrieve the next row of the view.
|
||||
The view interface also supports aggregate function predicates such as
|
||||
@code{sum}, @code{avg}, @code{count}, @code{min} and @code{max}. For
|
||||
instance:
|
||||
@example
|
||||
?- db_view(count(X),(X is count(B, B^edge(10,B)))).
|
||||
@end example
|
||||
generates the query :
|
||||
@example
|
||||
SELECT COUNT(A.attr2)
|
||||
FROM Edge A WHERE A.attr1 = 10;
|
||||
@end example
|
||||
|
||||
To know how to use db @code{view/3}, please refer to Draxler's Prolog to
|
||||
SQL Compiler Manual.
|
||||
|
||||
@node Accessing Tables in Data Sources Using SQL, Insertion of Rows, View Level Interface , MYDDAS
|
||||
@section Accessing Tables in Data Sources Using SQL
|
||||
|
||||
@table @code
|
||||
@item db_sql(+,+,?).
|
||||
@findex db_sql/3
|
||||
@snindex db_sql/3
|
||||
@cnindex db_sql/3
|
||||
|
||||
@item db_sql(+,?).
|
||||
@findex db_sql/2
|
||||
@snindex db_sql/2
|
||||
@cnindex db_sql/2
|
||||
@end table
|
||||
|
||||
It is also possible to explicitly send a SQL query to the database server using
|
||||
@example
|
||||
?- db_sql(Conn,SQL,List).
|
||||
?- db_sql(SQL,List).
|
||||
@end example
|
||||
where @var{SQL} is an arbitrary SQL expression, and @var{List} is a list
|
||||
holding the first tuple of result set returned by the server. The result
|
||||
set can also be navigated through backtracking.
|
||||
|
||||
Example:
|
||||
@example
|
||||
?- db_sql('SELECT * FROM phonebook',LA).
|
||||
LA = ['D','John Doe',123456789] ?
|
||||
@end example
|
||||
|
||||
@node Insertion of Rows, Types of Attributes, Accessing Tables in Data Sources Using SQL, MYDDAS
|
||||
@section Insertion of Rows
|
||||
|
||||
@table @code
|
||||
@item db_assert(+,+).
|
||||
@findex db_assert/2
|
||||
@snindex db_assert/2
|
||||
@cnindex db_assert/2
|
||||
|
||||
@item db_assert(+).
|
||||
@findex db_assert/1
|
||||
@snindex db_assert/1
|
||||
@cnindex db_assert/1
|
||||
|
||||
@end table
|
||||
|
||||
Assuming you have imported the related base table using
|
||||
@code{db_import/2} or @code{db_import/3}, you can insert to that table
|
||||
by using @code{db_assert/2} predicate any given fact.
|
||||
@example
|
||||
?- db_assert(Conn,Fact).
|
||||
?- db_assert(Fact).
|
||||
@end example
|
||||
The second argument must be declared with all of its arguments bound to
|
||||
constants. For example assuming @code{helloWorld} is imported through
|
||||
@code{db_import/2}:
|
||||
@example
|
||||
?- db_import('Hello World',helloWorld).
|
||||
yes
|
||||
?- db_assert(helloWorld('A' ,'Ana',31)).
|
||||
yes
|
||||
@end example
|
||||
This, would generate the following query
|
||||
@example
|
||||
INSERT INTO helloWorld
|
||||
VALUES ('A','Ana',3)
|
||||
@end example
|
||||
which would insert into the helloWorld, the following row:
|
||||
@code{A,Ana,31}. If we want to insert @code{NULL} values into the
|
||||
relation, we call @code{db_assert/2} with a uninstantiated variable in
|
||||
the data base imported predicate. For example, the following query on
|
||||
the YAP-prolog system:
|
||||
|
||||
@example
|
||||
?- db_assert(helloWorld('A',NULL,31)).
|
||||
yes
|
||||
@end example
|
||||
|
||||
Would insert the row: @code{A,null value,31} into the relation
|
||||
@code{Hello World}, assuming that the second row allows null values.
|
||||
|
||||
@table @code
|
||||
@item db insert(+,+,+).
|
||||
@findex db_insert/3
|
||||
@snindex db_insert/3
|
||||
@cnindex db_insert/3
|
||||
|
||||
@item db insert(+,+).
|
||||
@findex db_insert/2
|
||||
@snindex db_insert/2
|
||||
@cnindex db_insert/2
|
||||
@end table
|
||||
|
||||
This predicate would create a new database predicate, which will insert
|
||||
any given tuple into the database.
|
||||
@example
|
||||
?- db_insert(Conn,RelationName,PredName).
|
||||
?- db_insert(RelationName,PredName).
|
||||
@end example
|
||||
This would create a new predicate with name @var{PredName}, that will
|
||||
insert tuples into the relation @var{RelationName}. is the connection
|
||||
identifier. For example, if we wanted to insert the new tuple
|
||||
@code{('A',null,31)} into the relation @code{Hello World}, we do:
|
||||
@example
|
||||
?- db_insert('Hello World',helloWorldInsert).
|
||||
yes
|
||||
?- helloWorldInsert('A',NULL,31).
|
||||
yes
|
||||
@end example
|
||||
|
||||
@node Types of Attributes, Number of Fields, Insertion of Rows, MYDDAS
|
||||
@section Types of Attributes
|
||||
|
||||
|
||||
@table @code
|
||||
@item db_get_attributes_types(+,+,?).
|
||||
@findex db_get_attributes_types/3
|
||||
@snindex db_get_attributes_types/3
|
||||
@cnindex db_get_attributes_types/3
|
||||
|
||||
@item db_get_attributes_types(+,?).
|
||||
@findex db_get_attributes_types/2
|
||||
@snindex db_get_attributes_types/2
|
||||
@cnindex db_get_attributes_types/2
|
||||
|
||||
@end table
|
||||
|
||||
The prototype for this predicate is the following:
|
||||
@example
|
||||
?- db_get_attributes_types(Conn,RelationName,ListOfFields).
|
||||
?- db_get_attributes_types(RelationName,ListOfFields).
|
||||
@end example
|
||||
|
||||
You can use the
|
||||
predicate @code{db_get_attributes types/2} or @code{db_get_attributes_types/3}, to
|
||||
know what are the names and attributes types of the fields of a given
|
||||
relation. For example:
|
||||
@example
|
||||
?- db_get_attributes_types(myddas,'Hello World',LA).
|
||||
LA = ['Number',integer,'Name',string,'Letter',string] ?
|
||||
yes
|
||||
@end example
|
||||
where @t{Hello World} is the name of the relation and @t{myddas} is the
|
||||
connection identifier.
|
||||
|
||||
@node Number of Fields, Describing a Relation, Types of Attributes, MYDDAS
|
||||
@section Number of Fields
|
||||
|
||||
@table @code
|
||||
@item db_number_of_fields(+,?).
|
||||
@findex db_number_of_fields/2
|
||||
@snindex db_number_of_fields/2
|
||||
@cnindex db_number_of_fields/2
|
||||
|
||||
@item db_number_of_fields(+,+,?).
|
||||
@findex db_number_of_fields/3
|
||||
@snindex db_number_of_fields/3
|
||||
@cnindex db_number_of_fields/3
|
||||
@end table
|
||||
|
||||
The prototype for this
|
||||
predicate is the following:
|
||||
@example
|
||||
?- db_number_of_fields(Conn,RelationName,Arity).
|
||||
?- db_number_of_fields(RelationName,Arity).
|
||||
@end example
|
||||
You can use the predicate @code{db_number_of_fields/2} or
|
||||
@code{db_number_of_fields/3} to know what is the arity of a given
|
||||
relation. Example:
|
||||
@example
|
||||
?- db_number_of_fields(myddas,'Hello World',Arity).
|
||||
Arity = 3 ?
|
||||
yes
|
||||
@end example
|
||||
where @code{Hello World} is the name of the
|
||||
relation and @code{myddas} is the connection identifier.
|
||||
|
||||
@node Describing a Relation, Enumerating Relations, Number of Fields, MYDDAS
|
||||
@section Describing a Relation
|
||||
|
||||
@table @code
|
||||
@item db_datalog_describe(+,+).
|
||||
@findex db_datalog_describe/2
|
||||
@snindex db_datalog_describe/2
|
||||
@cnindex db_datalog_describe/2
|
||||
|
||||
@item db_datalog_describe(+).
|
||||
@findex db_datalog_describe/1
|
||||
@snindex db_datalog_describe/1
|
||||
@cnindex db_datalog_describe/1
|
||||
@end table
|
||||
|
||||
|
||||
The db @code{datalog_describe/2} predicate does not really returns any
|
||||
value. It simply prints to the screen the result of the MySQL describe
|
||||
command, the same way as @code{DESCRIBE} in the MySQL prompt would.
|
||||
@example
|
||||
?- db_datalog_describe(myddas,'Hello World').
|
||||
+----------+----------+------+-----+---------+-------+
|
||||
| Field | Type | Null | Key | Default | Extra |
|
||||
+----------+----------+------+-----+---------+-------+
|
||||
+ Number | int(11) | YES | | NULL | |
|
||||
+ Name | char(10) | YES | | NULL | |
|
||||
+ Letter | char(1) | YES | | NULL | |
|
||||
+----------+----------+------+-----+---------+-------+
|
||||
yes
|
||||
@end example
|
||||
|
||||
@table @code
|
||||
@item db_describe(+,+).
|
||||
@findex db_describe/2
|
||||
@snindex db_describe/2
|
||||
@cnindex db_describe/2
|
||||
|
||||
@item db_describe(+).
|
||||
@findex db_describe/1
|
||||
@snindex db_describe/1
|
||||
@cnindex db_describe/1
|
||||
|
||||
@end table
|
||||
|
||||
The @code{db_describe/3} predicate does the same action as
|
||||
@code{db_datalog_describe/2} predicate but with one major
|
||||
difference. The results are returned by backtracking. For example, the
|
||||
last query:
|
||||
@example
|
||||
?- db_describe(myddas,'Hello World',Term).
|
||||
Term = tableInfo('Number',int(11),'YES','',null(0),'') ? ;
|
||||
Term = tableInfo('Name',char(10),'YES','',null(1),'' ? ;
|
||||
Term = tableInfo('Letter',char(1),'YES','',null(2),'') ? ;
|
||||
no
|
||||
@end example
|
||||
|
||||
@node Enumerating Relations, The MYDDAS MySQL Top Level, Describing a Relation, MYDDAS
|
||||
@section Enumeration Relations
|
||||
|
||||
@table @code
|
||||
@item db_datalog_show_tables(+).
|
||||
@item db_datalog_show_tables
|
||||
@end table
|
||||
|
||||
|
||||
If we need to know what relations exists in a given MySQL Schema, we can use
|
||||
the @code{db_datalog_show_tables/1} predicate. As @t{db_datalog_describe/2},
|
||||
it does not returns any value, but instead prints to the screen the result of the
|
||||
@code{SHOW TABLES} command, the same way as it would be in the MySQL prompt.
|
||||
@example
|
||||
?- db_datalog_show_tables(myddas).
|
||||
+-----------------+
|
||||
| Tables_in_guest |
|
||||
+-----------------+
|
||||
| Hello World |
|
||||
+-----------------+
|
||||
yes
|
||||
@end example
|
||||
|
||||
@table @code
|
||||
@item db_show_tables(+, ?).
|
||||
@findex db_show_tables/2
|
||||
@snindex db_show_tables/2
|
||||
@cnindex db_show_tables/2
|
||||
|
||||
@item db_show_tables(?)
|
||||
@findex db_show_tables/1
|
||||
@snindex db_show_tables/1
|
||||
@cnindex db_show_tables/1
|
||||
|
||||
@end table
|
||||
|
||||
The @code{db_show_tables/2} predicate does the same action as
|
||||
@code{db_show_tables/1} predicate but with one major difference. The
|
||||
results are returned by backtracking. For example, given the last query:
|
||||
@example
|
||||
?- db_show_tables(myddas,Table).
|
||||
Table = table('Hello World') ? ;
|
||||
no
|
||||
@end example
|
||||
|
||||
@node The MYDDAS MySQL Top Level, Other MYDDAS Properties, Enumerating Relations, MYDDAS
|
||||
@section The MYDDAS MySQL Top Level
|
||||
|
||||
|
||||
@table @code
|
||||
@item db_top_level(+,+,+,+,+).
|
||||
@findex db_top_level/5
|
||||
@snindex db_top_level/5
|
||||
@cnindex db_top_level/5
|
||||
|
||||
@item db_top_level(+,+,+,+).
|
||||
@findex db_top_level/4
|
||||
@snindex db_top_level/4
|
||||
@cnindex db_top_level/4
|
||||
|
||||
@end table
|
||||
|
||||
Through MYDDAS is also possible to access the MySQL Database Server, in
|
||||
the same wthe mysql client. In this mode, is possible to query the
|
||||
SQL server by just using the standard SQL language. This mode is exactly the same as
|
||||
different from the standard mysql client. We can use this
|
||||
mode, by invoking the db top level/5. as one of the following:
|
||||
@example
|
||||
?- db_top_level(mysql,Connection,Host/Database,User,Password).
|
||||
?- db_top_level(mysql,Connection,Host/Database/Port,User,Password).
|
||||
?- db_top_level(mysql,Connection,Host/Database/UnixSocket,User,Password).
|
||||
?- db_top_level(mysql,Connection,Host/Database/Port/UnixSocket,User,Password).
|
||||
@end example
|
||||
|
||||
Usage is similar as the one described for the @code{db_open/5} predicate
|
||||
discussed above. If the login is successful, automatically the prompt of
|
||||
the mysql client will be used. For example:
|
||||
@example
|
||||
?- db_top_level(mysql,con1,localhost/guest_db,guest,'').
|
||||
@end example
|
||||
opens a
|
||||
connection identified by the @code{con1} atom, to an instance of a MySQL server
|
||||
running on host @code{localhost}, using database guest @code{db} and user @code{guest} with
|
||||
empty password. After this is possible to use MYDDAS as the mysql
|
||||
client.
|
||||
@example
|
||||
?- db_top_level(mysql,con1,localhost/guest_db,guest,'').
|
||||
Reading table information for completion of table and column names
|
||||
You can turn off this feature to get a quicker startup with -A
|
||||
|
||||
Welcome to the MySQL monitor.
|
||||
Commands end with ; or \g.
|
||||
|
||||
Your MySQL connection id is 4468 to server version: 4.0.20
|
||||
Type 'help;' or '\h' for help.
|
||||
Type '\c' to clear the buffer.
|
||||
mysql> exit
|
||||
Bye
|
||||
yes
|
||||
?-
|
||||
@end example
|
||||
|
||||
@node Other MYDDAS Properties, , The MYDDAS MySQL Top Level , MYDDAS
|
||||
@section Other MYDDAS Properties
|
||||
|
||||
|
||||
@table @code
|
||||
@item db_verbose(+).
|
||||
@item db_top_level(+,+,+,+).
|
||||
@end table
|
||||
|
||||
When we ask a question to YAP, using a predicate asserted by
|
||||
@code{db_import/3}, or by @code{db_view/3}, this will generate a SQL
|
||||
@code{QUERY}. If we want to see that query, we must to this at a given
|
||||
point in our session on YAP.
|
||||
@example
|
||||
?- db_verbose(1).
|
||||
yes
|
||||
?-
|
||||
@end example
|
||||
If we want to
|
||||
disable this feature, we must call the @code{db_verbose/1} predicate with the value 0.
|
||||
|
||||
@table @code
|
||||
@item db_module(?).
|
||||
@findex db_module/1
|
||||
@snindex db_module/1
|
||||
@cnindex db_module/1
|
||||
|
||||
@end table
|
||||
|
||||
When we create a new database predicate, by using @code{db_import/3},
|
||||
@code{db_view/3} or @code{db_insert/3}, that predicate will be asserted
|
||||
by default on the @code{user} module. If we want to change this value, we can
|
||||
use the @code{db_module/1} predicate to do so.
|
||||
@example
|
||||
?- db_module(lists).
|
||||
yes
|
||||
?-
|
||||
@end example
|
||||
By executing this predicate, all of the predicates asserted by the
|
||||
predicates enumerated earlier will created in the lists module.
|
||||
If we want to put back the value on default, we can manually put the
|
||||
value user. Example:
|
||||
@example
|
||||
?- db_module(user).
|
||||
yes
|
||||
?-
|
||||
@end example
|
||||
|
||||
We can also see in what module the predicates are being asserted by doing:
|
||||
@example
|
||||
?- db_module(X).
|
||||
X=user
|
||||
yes
|
||||
?-
|
||||
@end example
|
||||
|
||||
@table @code
|
||||
@item db_my_result_set(?).
|
||||
@findex db_my_result_set/1
|
||||
@snindex db_my_result_set/1
|
||||
@cnindex db_my_result_set/1
|
||||
|
||||
@end table
|
||||
|
||||
|
||||
The MySQL C API permits two modes for transferring the data generated by
|
||||
a query to the client, in our case YAP. The first mode, and the default
|
||||
mode used by the MYDDAS-MySQL, is to store the result. This mode copies all the
|
||||
information generated to the client side.@example
|
||||
?- db_my_result_set(X).
|
||||
X=store_result
|
||||
yes
|
||||
@end example
|
||||
|
||||
|
||||
The other mode that we can use is use result. This one uses the result
|
||||
set created directly from the server. If we want to use this mode, he
|
||||
simply do
|
||||
@example
|
||||
?- db_my_result_set(use_result).
|
||||
yes
|
||||
@end example
|
||||
After this command, all
|
||||
of the database predicates will use use result by default. We can change
|
||||
this by doing again @code{db_my_result_set(store_result)}.
|
||||
|
||||
@table @code
|
||||
@item db_my_sql_mode(+Conn,?SQL_Mode).
|
||||
@findex db_my_sql_mode/2
|
||||
@snindex db_my_sql_mode/2
|
||||
@cnindex db_my_sql_mode/2
|
||||
|
||||
@item db_my_sql_mode(?SQL_Mode).
|
||||
@findex db_my_sql_mode/1
|
||||
@snindex db_my_sql_mode/1
|
||||
@cnindex db_my_sql_mode/1
|
||||
|
||||
@end table
|
||||
|
||||
The MySQL server allows the user to change the SQL mode. This can be
|
||||
very useful for debugging proposes. For example, if we want MySQL server
|
||||
not to ignore the INSERT statement warnings and instead of taking
|
||||
action, report an error, we could use the following SQL mode.
|
||||
@example
|
||||
?-db_my_sql_mode(traditional). yes
|
||||
@end example
|
||||
You can see the available SQL Modes at the MySQL homepage at
|
||||
@url{http://www.mysql.org}.
|
||||
|
||||
@node Threads, Parallelism, MYDDAS, Extensions
|
||||
@chapter Threads
|
||||
|
||||
YAP implements a SWI-Prolog compatible multithreading
|
||||
@ -15939,6 +16695,14 @@ only two boolean flags are accepted: @code{YAPC_ENABLE_GC} and
|
||||
@code{YAPC_ENABLE_AGC}. The first enables/disables the standard garbage
|
||||
collector, the second does the same for the atom garbage collector.`
|
||||
|
||||
@item @code{int} YAP_HaltRegisterHook(@code{YAP_halt_hook f, void *closure})
|
||||
@findex YAP_HaltRegisterHook (C-Interface function)
|
||||
|
||||
Register the function @var{f} to be called if YAP is halted. The
|
||||
function is called with two arguments: the exit code of the process (@code{0}
|
||||
if this cannot be determined on your operating system) and the closure
|
||||
argument @var{closure}.
|
||||
@c See also @code{at_halt/1}.
|
||||
@end table
|
||||
|
||||
|
||||
|
@ -470,6 +470,9 @@ extern X_API int PROTO(YAP_AtomReleaseHold,(YAP_Atom));
|
||||
/* void YAP_AtomReleaseHold(YAP_Atom) */
|
||||
extern X_API YAP_agc_hook PROTO(YAP_AGCRegisterHook,(YAP_agc_hook));
|
||||
|
||||
/* void YAP_AtomReleaseHold(YAP_Atom) */
|
||||
extern X_API int PROTO(YAP_HaltRegisterHook,(YAP_halt_hook, void *));
|
||||
|
||||
/* char *YAP_cwd(void) */
|
||||
extern X_API char * PROTO(YAP_cwd,(void));
|
||||
|
||||
|
@ -176,6 +176,8 @@ typedef struct {
|
||||
|
||||
typedef int (*YAP_agc_hook)(void *_Atom);
|
||||
|
||||
typedef void (*YAP_halt_hook)(int exit_code, void *closure);
|
||||
|
||||
/********* execution mode ***********************/
|
||||
|
||||
typedef enum
|
||||
|
@ -620,7 +620,7 @@ static int p_itrie_loadFromStream(void) {
|
||||
/* check args */
|
||||
if (!YAP_IsVarTerm(arg_itrie))
|
||||
return FALSE;
|
||||
if (!(file = (FILE*) Yap_FileDescriptorFromStream(arg_stream)))
|
||||
if (!(file = (FILE*) YAP_FileDescriptorFromStream(arg_stream)))
|
||||
return FALSE;
|
||||
|
||||
/* load itrie */
|
||||
|
@ -45,8 +45,7 @@
|
||||
wdgraph_del_vertex/3,
|
||||
wdgraph_edges/2,
|
||||
wdgraph_neighbours/3,
|
||||
wdgraph_wneighbours/3,
|
||||
wdgraph_symmetric_closure/2
|
||||
wdgraph_wneighbours/3
|
||||
]).
|
||||
|
||||
:- use_module(library(rbtrees),
|
||||
|
@ -3249,6 +3249,7 @@ X_API void (*PL_signal(int sig, void (*func)(int)))(int)
|
||||
|
||||
X_API void PL_on_halt(void (*f)(int, void *), void *closure)
|
||||
{
|
||||
Yap_HaltRegisterHook((HaltHookFunc)f,closure);
|
||||
}
|
||||
|
||||
void Yap_swi_install(void);
|
||||
|
@ -295,6 +295,9 @@ struct operator_entry *op_list OpList =NULL OpListAdjust
|
||||
/* stream array */
|
||||
struct stream_desc *yap_streams Stream =NULL RestoreStreams()
|
||||
|
||||
/* halt hooks */
|
||||
struct halt_hook *yap_halt_hook Yap_HaltHooks =NULL RestoreHaltHooks()
|
||||
|
||||
/* stream aliases */
|
||||
UInt n_of_file_aliases NOfFileAliases =0 void
|
||||
UInt sz_of_file_aliases SzOfFileAliases =0 void
|
||||
|
@ -48,6 +48,8 @@ PROBLOG_PROGRAMS= \
|
||||
$(srcdir)/problog/timer.yap \
|
||||
$(srcdir)/problog/tptree.yap \
|
||||
$(srcdir)/problog/variable_elimination.yap \
|
||||
$(srcdir)/problog/print_learning.yap \
|
||||
$(srcdir)/problog/utils_learning.yap \
|
||||
$(srcdir)/problog/variables.yap
|
||||
|
||||
PROBLOG_EXAMPLES = \
|
||||
|
@ -310,16 +310,18 @@
|
||||
|
||||
|
||||
% general yap modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system), [delete_file/2, shell/2]).
|
||||
|
||||
:- problog_define_flag(optimization, problog_flag_validate_atom, 'optimization algorithm [local/global]', global, dtproblog).
|
||||
:- problog_define_flag(forest_type, problog_flag_validate_atom, 'type of BDD forest [dependent/independent]', dependent, dtproblog).
|
||||
:- initialization((
|
||||
problog_define_flag(optimization, problog_flag_validate_atom, 'optimization algorithm [local/global]', global, dtproblog),
|
||||
problog_define_flag(forest_type, problog_flag_validate_atom, 'type of BDD forest [dependent/independent]', dependent, dtproblog)
|
||||
)).
|
||||
|
||||
init_dtproblog :-
|
||||
problog_control(off,find_decisions),
|
||||
problog_control(off,internal_strategy).
|
||||
problog_control(off,find_decisions),
|
||||
problog_control(off,internal_strategy).
|
||||
|
||||
:- init_dtproblog.
|
||||
:- initialization(init_dtproblog).
|
||||
|
||||
:- op( 550, yfx, :: ).
|
||||
|
||||
@ -359,7 +361,7 @@ get_ground_strategy(_,never).
|
||||
% Internal strategy representation
|
||||
% for NON-GROUND strategies
|
||||
% e.g. 1 :: market(guy) for ? :: market(P)
|
||||
:- dynamic non_ground_strategy/2.
|
||||
:- dynamic(non_ground_strategy/2).
|
||||
|
||||
% Get Strategy
|
||||
strategy(_,_,_) :-
|
||||
@ -413,7 +415,7 @@ set_strategy([Term|R]) :-
|
||||
set_ground_strategy(ID2,LogProb)
|
||||
;
|
||||
copy_term(Decision, Decision2),
|
||||
assert(non_ground_strategy(Decision2,LogProb))
|
||||
assertz(non_ground_strategy(Decision2,LogProb))
|
||||
),
|
||||
set_strategy(R).
|
||||
|
||||
|
@ -230,7 +230,7 @@
|
||||
problog_kbest_save/6,
|
||||
problog_max/3,
|
||||
problog_exact/3,
|
||||
problog_exact_save/5,
|
||||
problog_exact_save/5,
|
||||
problog_montecarlo/3,
|
||||
problog_dnf_sampling/3,
|
||||
problog_answers/2,
|
||||
@ -296,32 +296,31 @@
|
||||
above/2]).
|
||||
|
||||
:- style_check(all).
|
||||
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
:- set_prolog_flag(to_chars_mode,quintus).
|
||||
|
||||
% general yap modules
|
||||
:- ensure_loaded(library(charsio)).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(terms)).
|
||||
:- ensure_loaded(library(random)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- ensure_loaded(library(rbtrees)).
|
||||
:- ensure_loaded(library(ordsets)).
|
||||
:- use_module(library(charsio)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(terms)).
|
||||
:- use_module(library(random)). % PM doesn't seem to be used!
|
||||
:- use_module(library(system)).
|
||||
:- use_module(library(rbtrees)). % PM doesn't seem to be used!
|
||||
:- use_module(library(ordsets), [list_to_ord_set/2, ord_insert/3, ord_union/3]).
|
||||
|
||||
% problog related modules
|
||||
:- ensure_loaded('problog/variables').
|
||||
:- ensure_loaded('problog/extlists').
|
||||
:- ensure_loaded('problog/flags').
|
||||
:- ensure_loaded('problog/print').
|
||||
:- ensure_loaded('problog/os').
|
||||
:- ensure_loaded('problog/tptree').
|
||||
:- ensure_loaded('problog/tabling').
|
||||
:- ensure_loaded('problog/sampling').
|
||||
:- ensure_loaded('problog/intervals').
|
||||
:- ensure_loaded('problog/mc_DNF_sampling').
|
||||
:- catch(ensure_loaded('problog/variable_elimination'),_,true).
|
||||
:- use_module('problog/variables').
|
||||
:- use_module('problog/extlists').
|
||||
:- use_module('problog/flags').
|
||||
:- use_module('problog/print').
|
||||
:- use_module('problog/os').
|
||||
:- use_module('problog/tptree').
|
||||
:- use_module('problog/tabling').
|
||||
:- use_module('problog/sampling').
|
||||
:- use_module('problog/intervals').
|
||||
:- use_module('problog/mc_DNF_sampling').
|
||||
:- use_module('problog/variable_elimination').
|
||||
|
||||
% op attaching probabilities to facts
|
||||
:- op( 550, yfx, :: ).
|
||||
@ -333,56 +332,56 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
% global over all inference methods, internal use only
|
||||
:- dynamic problog_predicate/2.
|
||||
:- dynamic problog_continuous_predicate/3.
|
||||
:- dynamic(problog_predicate/2).
|
||||
:- dynamic(problog_continuous_predicate/3).
|
||||
% global over all inference methods, exported
|
||||
:- dynamic tunable_fact/2.
|
||||
:- dynamic non_ground_fact/1.
|
||||
:- dynamic continuous_fact/1.
|
||||
%:- dynamic problog_dir/1.
|
||||
:- dynamic(tunable_fact/2).
|
||||
:- dynamic(non_ground_fact/1).
|
||||
:- dynamic(continuous_fact/1).
|
||||
%:- dynamic(problog_dir/1).
|
||||
% global, manipulated via problog_control/2
|
||||
:- dynamic up/0.
|
||||
:- dynamic limit/0.
|
||||
:- dynamic mc/0.
|
||||
:- dynamic remember/0.
|
||||
:- dynamic exact/0. % Theo tabling
|
||||
:- dynamic find_decisions/0.
|
||||
:- dynamic internal_strategy/0.
|
||||
:- dynamic(up/0).
|
||||
:- dynamic(limit/0).
|
||||
:- dynamic(mc/0).
|
||||
:- dynamic(remember/0).
|
||||
:- dynamic(exact/0). % Theo tabling
|
||||
:- dynamic(find_decisions/0).
|
||||
:- dynamic(internal_strategy/0).
|
||||
% local to problog_delta
|
||||
:- dynamic low/2.
|
||||
:- dynamic up/2.
|
||||
:- dynamic stopDiff/1.
|
||||
:- dynamic(low/2).
|
||||
:- dynamic(up/2).
|
||||
:- dynamic(stopDiff/1).
|
||||
% local to problog_kbest
|
||||
:- dynamic current_kbest/3.
|
||||
:- dynamic(current_kbest/3).
|
||||
% local to problog_max
|
||||
:- dynamic max_probability/1.
|
||||
:- dynamic max_proof/1.
|
||||
:- dynamic(max_probability/1).
|
||||
:- dynamic(max_proof/1).
|
||||
% local to problog_montecarlo
|
||||
:- dynamic mc_prob/1.
|
||||
:- dynamic(mc_prob/1).
|
||||
% local to problog_answers
|
||||
:- dynamic answer/1.
|
||||
:- dynamic(answer/1).
|
||||
% to keep track of the groundings for non-ground facts
|
||||
:- dynamic grounding_is_known/2.
|
||||
:- dynamic(grounding_is_known/2).
|
||||
|
||||
% for decisions
|
||||
:- dynamic decision_fact/2.
|
||||
:- dynamic(decision_fact/2).
|
||||
|
||||
% for fact where the proabability is a variable
|
||||
:- dynamic dynamic_probability_fact/1.
|
||||
:- dynamic dynamic_probability_fact_extract/2.
|
||||
:- dynamic(dynamic_probability_fact/1).
|
||||
:- dynamic(dynamic_probability_fact_extract/2).
|
||||
|
||||
% for storing continuous parts of proofs (Hybrid ProbLog)
|
||||
:- dynamic hybrid_proof/4.
|
||||
:- dynamic hybrid_proof_disjoint/4.
|
||||
:- dynamic(hybrid_proof/3, hybrid_proof/4).
|
||||
:- dynamic(hybrid_proof_disjoint/4).
|
||||
|
||||
% ProbLog files declare prob. facts as P::G
|
||||
% and this module provides the predicate X::Y to iterate over them
|
||||
:- multifile '::'/2.
|
||||
:- multifile('::'/2).
|
||||
|
||||
|
||||
% directory where problogbdd executable is located
|
||||
% automatically set during loading -- assumes it is in same place as this file (problog.yap)
|
||||
:- getcwd(PD), set_problog_path(PD).
|
||||
:- initialization((getcwd(PD), set_problog_path(PD))).
|
||||
|
||||
|
||||
|
||||
@ -395,29 +394,28 @@
|
||||
% - factor used to decrease threshold for next level, NewMin=Factor*OldMin (saved also in log-space)
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(first_threshold, problog_flag_validate_indomain_0_1_open, 'starting threshold iterative deepening', 0.1, inference).
|
||||
:- problog_define_flag(last_threshold, problog_flag_validate_indomain_0_1_open, 'stopping threshold iterative deepening', 1e-30, inference, flags:last_threshold_handler).
|
||||
:- problog_define_flag(id_stepsize, problog_flag_validate_indomain_0_1_close, 'threshold shrinking factor iterative deepening', 0.5, inference, flags:id_stepsize_handler).
|
||||
:- initialization((
|
||||
problog_define_flag(first_threshold, problog_flag_validate_indomain_0_1_open, 'starting threshold iterative deepening', 0.1, inference),
|
||||
problog_define_flag(last_threshold, problog_flag_validate_indomain_0_1_open, 'stopping threshold iterative deepening', 1e-30, inference, flags:last_threshold_handler),
|
||||
problog_define_flag(id_stepsize, problog_flag_validate_indomain_0_1_close, 'threshold shrinking factor iterative deepening', 0.5, inference, flags:id_stepsize_handler)
|
||||
)).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% prune check stops derivations if they use a superset of facts already known to form a proof
|
||||
% (very) costly test, can be switched on/off here (This is obsolete as it is not included in implementation)
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(prunecheck, problog_flag_validate_switch, 'stop derivations including all facts of known proof', off, inference).
|
||||
:- initialization(
|
||||
problog_define_flag(prunecheck, problog_flag_validate_switch, 'stop derivations including all facts of known proof', off, inference)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% max number of calls to probabilistic facts per derivation (to ensure termination)
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(maxsteps, problog_flag_validate_posint, 'max. number of prob. steps per derivation', 1000, inference).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% montecarlo: recalculate current approximation after N samples
|
||||
% montecarlo: write log to this file
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(mc_logfile, problog_flag_validate_file, 'logfile for montecarlo', 'log.txt', mcmc).
|
||||
:- initialization(
|
||||
problog_define_flag(maxsteps, problog_flag_validate_posint, 'max. number of prob. steps per derivation', 1000, inference)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% BDD timeout in seconds, used as option in BDD tool
|
||||
@ -429,16 +427,13 @@
|
||||
% located in the directory given by problog_flag dir
|
||||
%%%%%%%%%%%%
|
||||
|
||||
%:- problog_define_flag(bdd_path, problog_flag_validate_directory, 'problogbdd directory', '.',bdd).
|
||||
:- problog_define_flag(bdd_time, problog_flag_validate_posint, 'BDD computation timeout in seconds', 60, bdd).
|
||||
:- problog_define_flag(bdd_par_file, problog_flag_validate_file, 'file for BDD variable parameters', example_bdd_probs, bdd).
|
||||
:- problog_define_flag(bdd_result, problog_flag_validate_file, 'file to store result calculated from BDD', example_bdd_res, bdd).
|
||||
:- problog_define_flag(bdd_file, problog_flag_validate_file, 'file for BDD script', example_bdd, bdd, flags:bdd_file_handler).
|
||||
:- problog_define_flag(save_bdd, problog_flag_validate_boolean, 'save BDD files for (last) lower bound', false, bdd).
|
||||
:- problog_define_flag(dynamic_reorder, problog_flag_validate_boolean, 'use dynamic re-ordering for BDD', true, bdd).
|
||||
:- problog_define_flag(bdd_static_order, problog_flag_validate_boolean, 'use a static order', false, bdd).
|
||||
:- problog_define_flag(static_order_file, problog_flag_validate_file, 'file for BDD static order', example_bdd_order, bdd).
|
||||
|
||||
:- initialization((
|
||||
% problog_define_flag(bdd_path, problog_flag_validate_directory, 'problogbdd directory', '.',bdd),
|
||||
problog_define_flag(bdd_time, problog_flag_validate_posint, 'BDD computation timeout in seconds', 60, bdd),
|
||||
problog_define_flag(save_bdd, problog_flag_validate_boolean, 'save BDD files for (last) lower bound', false, bdd),
|
||||
problog_define_flag(dynamic_reorder, problog_flag_validate_boolean, 'use dynamic re-ordering for BDD', true, bdd),
|
||||
problog_define_flag(bdd_static_order, problog_flag_validate_boolean, 'use a static order', false, bdd)
|
||||
)).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% determine whether ProbLog outputs information (number of proofs, intermediate results, ...)
|
||||
@ -446,27 +441,34 @@
|
||||
% default is false now, as dtproblog will flood the user with verbosity
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(verbose, problog_flag_validate_boolean, 'output intermediate information', false,output).
|
||||
:- initialization(
|
||||
problog_define_flag(verbose, problog_flag_validate_boolean, 'output intermediate information', false,output)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% determine whether ProbLog outputs proofs when adding to trie
|
||||
% default is false
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(show_proofs, problog_flag_validate_boolean, 'output proofs', false,output).
|
||||
:- initialization(
|
||||
problog_define_flag(show_proofs, problog_flag_validate_boolean, 'output proofs', false,output)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% Trie dump parameter for saving a file with the trie structure in the directory by problog_flag dir
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(triedump, problog_flag_validate_boolean, 'generate file: trie_file containing the trie structure', false,output).
|
||||
:- initialization(
|
||||
problog_define_flag(triedump, problog_flag_validate_boolean, 'generate file: trie_file containing the trie structure', false,output)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% Default inference method
|
||||
%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(inference, problog_flag_validate_dummy, 'default inference method', exact, inference).
|
||||
|
||||
:- initialization(
|
||||
problog_define_flag(inference, problog_flag_validate_dummy, 'default inference method', exact, inference)
|
||||
).
|
||||
|
||||
problog_dir(PD):- problog_path(PD).
|
||||
|
||||
@ -476,14 +478,25 @@ problog_dir(PD):- problog_path(PD).
|
||||
|
||||
init_global_params :-
|
||||
%grow_atom_table(1000000),
|
||||
getcwd(Work),
|
||||
concat_path_with_filename(Work, output, WorkDir),
|
||||
|
||||
%%%%%%%%%%%%
|
||||
% working directory: all the temporary and output files will be located there
|
||||
% it assumes a subdirectory of the current working dir
|
||||
% on initialization, the current dir is the one where the user's file is located
|
||||
% should be changed to use temporary folder structure of operating system
|
||||
%%%%%%%%%%%%
|
||||
problog_define_flag(dir, problog_flag_validate_directory, 'directory for files', WorkDir, output),
|
||||
tmpnam(TempFolder),
|
||||
atomic_concat([TempFolder, '_problog'], TempProblogFolder),
|
||||
problog_define_flag(dir, problog_flag_validate_directory, 'directory for files', TempProblogFolder, output),
|
||||
problog_define_flag(bdd_par_file, problog_flag_validate_file, 'file for BDD variable parameters', example_bdd_probs, bdd, flags:working_file_handler),
|
||||
problog_define_flag(bdd_result, problog_flag_validate_file, 'file to store result calculated from BDD', example_bdd_res, bdd, flags:working_file_handler),
|
||||
problog_define_flag(bdd_file, problog_flag_validate_file, 'file for BDD script', example_bdd, bdd, flags:bdd_file_handler),
|
||||
problog_define_flag(static_order_file, problog_flag_validate_file, 'file for BDD static order', example_bdd_order, bdd, flags:working_file_handler),
|
||||
%%%%%%%%%%%%
|
||||
% montecarlo: recalculate current approximation after N samples
|
||||
% montecarlo: write log to this file
|
||||
%%%%%%%%%%%%
|
||||
problog_define_flag(mc_logfile, problog_flag_validate_file, 'logfile for montecarlo', 'log.txt', mcmc, flags:working_file_handler),
|
||||
check_existance('problogbdd').
|
||||
|
||||
check_existance(FileName):-
|
||||
@ -497,6 +510,7 @@ check_existance(FileName):-
|
||||
% parameter initialization to be called after returning to user's directory:
|
||||
:- initialization(init_global_params).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% internal control flags
|
||||
% if on
|
||||
@ -508,7 +522,7 @@ check_existance(FileName):-
|
||||
problog_control(on,X) :-
|
||||
call(X),!.
|
||||
problog_control(on,X) :-
|
||||
assert(X).
|
||||
assertz(X).
|
||||
problog_control(off,X) :-
|
||||
retractall(X).
|
||||
problog_control(check,X) :-
|
||||
@ -521,7 +535,7 @@ reset_control :-
|
||||
% problog_control(off,exact),
|
||||
problog_control(off,remember).
|
||||
|
||||
:- reset_control.
|
||||
:- initialization(reset_control).
|
||||
|
||||
grow_atom_table(N):-
|
||||
generate_atoms(N, 0),
|
||||
@ -566,12 +580,12 @@ term_expansion_intern((Annotation :: Head :- Body), Module, problog:ExpandedClau
|
||||
% It's a decision with a body
|
||||
copy_term((Head,Body),(HeadCopy,_BodyCopy)),
|
||||
functor(Head, Functor, Arity),
|
||||
atomic_concat([problog_,Functor],LongFunctor),
|
||||
atom_concat(problog_, Functor, LongFunctor),
|
||||
Head =.. [Functor|Args],
|
||||
append(Args,[LProb],LongArgs),
|
||||
probclause_id(ID),
|
||||
ProbFactHead =.. [LongFunctor,ID|LongArgs],
|
||||
assert(decision_fact(ID,Head)),
|
||||
assertz(decision_fact(ID,Head)),
|
||||
ExpandedClause = (ProbFactHead :-
|
||||
user:Body,
|
||||
(problog_control(check,internal_strategy) ->
|
||||
@ -580,14 +594,14 @@ term_expansion_intern((Annotation :: Head :- Body), Module, problog:ExpandedClau
|
||||
LProb = '?'
|
||||
)
|
||||
),
|
||||
assert(dynamic_probability_fact(ID)),
|
||||
assert((dynamic_probability_fact_extract(HeadCopy,P_New) :-
|
||||
assertz(dynamic_probability_fact(ID)),
|
||||
assertz((dynamic_probability_fact_extract(HeadCopy,P_New) :-
|
||||
dtproblog:strategy(ID,HeadCopy,P_New)
|
||||
)),
|
||||
(ground(Head) ->
|
||||
true
|
||||
;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
problog_predicate(Functor, Arity, LongFunctor, Module)
|
||||
;
|
||||
@ -612,7 +626,7 @@ user:term_expansion(P::Goal,Goal) :-
|
||||
term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
copy_term((P,Goal),(P_Copy,Goal_Copy)),
|
||||
functor(Goal, Name, Arity),
|
||||
atomic_concat([problog_,Name],ProblogName),
|
||||
atom_concat(problog_, Name, ProblogName),
|
||||
Goal =.. [Name|Args],
|
||||
append(Args,[LProb],L1),
|
||||
probclause_id(ID),
|
||||
@ -621,7 +635,7 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
(nonvar(P), P = t(TrueProb))
|
||||
->
|
||||
(
|
||||
assert(tunable_fact(ID,TrueProb)),
|
||||
assertz(tunable_fact(ID,TrueProb)),
|
||||
LProb is log(random*0.9+0.05) % set unknown probability randomly in [0.05, 0.95]
|
||||
);
|
||||
(
|
||||
@ -643,8 +657,8 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
)
|
||||
),
|
||||
LProb=log(P),
|
||||
assert(dynamic_probability_fact(ID)),
|
||||
assert(dynamic_probability_fact_extract(Goal_Copy,P_Copy))
|
||||
assertz(dynamic_probability_fact(ID)),
|
||||
assertz(dynamic_probability_fact_extract(Goal_Copy,P_Copy))
|
||||
)
|
||||
)
|
||||
),
|
||||
@ -652,7 +666,7 @@ term_expansion_intern(P :: Goal,Module,problog:ProbFact) :-
|
||||
ground(Goal)
|
||||
->
|
||||
true;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
problog_predicate(Name, Arity, ProblogName,Module).
|
||||
|
||||
@ -673,6 +687,7 @@ is_valid_gaussian(X) :-
|
||||
throw(invalid_gaussian(X))
|
||||
).
|
||||
|
||||
:- multifile(user:term_expansion/1).
|
||||
|
||||
user:term_expansion(Goal, problog:ProbFact) :-
|
||||
compound(Goal),
|
||||
@ -693,7 +708,7 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
),
|
||||
|
||||
functor(Goal, Name, Arity),
|
||||
atomic_concat([problogcontinuous_,Name],ProblogName),
|
||||
atom_concat(problogcontinuous_, Name, ProblogName),
|
||||
probclause_id(ID),
|
||||
|
||||
GaussianArg=gaussian(Mu_Arg,Sigma_Arg),
|
||||
@ -708,7 +723,7 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
Sigma_Random is 0.4, % random*2+0.5,
|
||||
nth(Pos,Args,_,KeepArgs),
|
||||
nth(Pos,NewArgs,gaussian(Mu_Random,Sigma_Random),KeepArgs),
|
||||
assert(tunable_fact(ID,gaussian(Mu_Arg,Sigma_Arg)))
|
||||
assertz(tunable_fact(ID,gaussian(Mu_Arg,Sigma_Arg)))
|
||||
)
|
||||
),
|
||||
ProbFact =.. [ProblogName,ID|NewArgs],
|
||||
@ -717,9 +732,9 @@ user:term_expansion(Goal, problog:ProbFact) :-
|
||||
ground(Goal)
|
||||
->
|
||||
true;
|
||||
assert(non_ground_fact(ID))
|
||||
assertz(non_ground_fact(ID))
|
||||
),
|
||||
assert(continuous_fact(ID)),
|
||||
assertz(continuous_fact(ID)),
|
||||
problog_continuous_predicate(Name, Arity, Pos,ProblogName).
|
||||
|
||||
|
||||
@ -756,7 +771,7 @@ problog_continuous_predicate(Name, Arity, ContinuousArgumentPosition, ProblogNam
|
||||
ProbFact =.. [ProblogName,ID|ProbArgs],
|
||||
prolog_load_context(module,Mod),
|
||||
|
||||
assert( (Mod:OriginalGoal :- ProbFact,
|
||||
assertz( (Mod:OriginalGoal :- ProbFact,
|
||||
% continuous facts always get a grounding ID, even when they are actually ground
|
||||
% this simplifies the BDD script generation
|
||||
non_ground_fact_grounding_id(ProbFact,Ground_ID),
|
||||
@ -764,7 +779,7 @@ problog_continuous_predicate(Name, Arity, ContinuousArgumentPosition, ProblogNam
|
||||
add_continuous_to_proof(ID,ID2)
|
||||
)),
|
||||
|
||||
assert(problog_continuous_predicate(Name, Arity,ContinuousArgumentPosition)),
|
||||
assertz(problog_continuous_predicate(Name, Arity,ContinuousArgumentPosition)),
|
||||
ArityPlus1 is Arity+1,
|
||||
dynamic(problog:ProblogName/ArityPlus1).
|
||||
|
||||
@ -792,16 +807,16 @@ interval_merge((_ID,GroundID,_Type),Interval) :-
|
||||
|
||||
|
||||
|
||||
problog_assert(P::Goal) :-
|
||||
problog_assert(user,P::Goal).
|
||||
problog_assert(Module, P::Goal) :-
|
||||
problog_assertz(P::Goal) :-
|
||||
problog_assertz(user,P::Goal).
|
||||
problog_assertz(Module, P::Goal) :-
|
||||
term_expansion_intern(P::Goal,Module,problog:ProbFact),
|
||||
assert(problog:ProbFact).
|
||||
assertz(problog:ProbFact).
|
||||
|
||||
problog_retractall(Goal) :-
|
||||
Goal =.. [F|Args],
|
||||
append([_ID|Args],[_Prob],Args2),
|
||||
atomic_concat(['problog_',F],F2),
|
||||
atom_concat('problog_', F, F2),
|
||||
ProbLogGoal=..[F2|Args2],
|
||||
retractall(problog:ProbLogGoal).
|
||||
|
||||
@ -815,18 +830,18 @@ problog_predicate(Name, Arity, ProblogName,Mod) :-
|
||||
OriginalGoal =.. [_|Args],
|
||||
append(Args,[Prob],L1),
|
||||
ProbFact =.. [ProblogName,ID|L1],
|
||||
assert( (Mod:OriginalGoal :-
|
||||
assertz( (Mod:OriginalGoal :-
|
||||
ProbFact,
|
||||
grounding_id(ID,OriginalGoal,ID2),
|
||||
prove_problog_fact(ID,ID2,Prob)
|
||||
)),
|
||||
|
||||
assert( (Mod:problog_not(OriginalGoal) :-
|
||||
assertz( (Mod:problog_not(OriginalGoal) :-
|
||||
ProbFact,
|
||||
grounding_id(ID,OriginalGoal,ID2),
|
||||
prove_problog_fact_negated(ID,ID2,Prob)
|
||||
)),
|
||||
assert(problog_predicate(Name, Arity)),
|
||||
assertz(problog_predicate(Name, Arity)),
|
||||
ArityPlus2 is Arity+2,
|
||||
dynamic(problog:ProblogName/ArityPlus2).
|
||||
|
||||
@ -896,7 +911,8 @@ prove_problog_fact_negated(ClauseID,GroundID,Prob) :-
|
||||
).
|
||||
|
||||
% generate next global identifier
|
||||
:- nb_setval(probclause_counter,0).
|
||||
:- initialization(nb_setval(probclause_counter,0)).
|
||||
|
||||
probclause_id(ID) :-
|
||||
nb_getval(probclause_counter,ID), !,
|
||||
C1 is ID+1,
|
||||
@ -922,7 +938,7 @@ non_ground_fact_grounding_id(Goal,ID) :-
|
||||
nb_getval(non_ground_fact_grounding_id_counter,ID),
|
||||
ID2 is ID+1,
|
||||
nb_setval(non_ground_fact_grounding_id_counter,ID2),
|
||||
assert(grounding_is_known(Goal,ID))
|
||||
assertz(grounding_is_known(Goal,ID))
|
||||
)
|
||||
).
|
||||
|
||||
@ -948,7 +964,7 @@ probabilistic_fact(P2,Goal,ID) :-
|
||||
->
|
||||
(
|
||||
Goal =.. [F|Args],
|
||||
atomic_concat('problog_',F,F2),
|
||||
atom_concat('problog_', F, F2),
|
||||
append([ID|Args],[P],Args2),
|
||||
Goal2 =..[F2|Args2],
|
||||
length(Args2,N),
|
||||
@ -994,7 +1010,7 @@ prob_for_id(dummy,dummy,dummy).
|
||||
|
||||
get_fact_probability(A, Prob) :-
|
||||
ground(A),
|
||||
not(number(A)),
|
||||
\+ number(A),
|
||||
atom_codes(A, A_Codes),
|
||||
once(append(Part1, [95|Part2], A_Codes)), % 95 = '_'
|
||||
number_codes(ID, Part1), !,
|
||||
@ -1052,11 +1068,11 @@ set_fact_probability(ID,Prob) :-
|
||||
NewLogProb is log(Prob),
|
||||
nth(ProblogArity,NewProblogTermArgs,NewLogProb,KeepArgs),
|
||||
NewProblogTerm =.. [ProblogName|NewProblogTermArgs],
|
||||
assert(NewProblogTerm).
|
||||
assertz(NewProblogTerm).
|
||||
|
||||
get_internal_fact(ID,ProblogTerm,ProblogName,ProblogArity) :-
|
||||
problog_predicate(Name,Arity),
|
||||
atomic_concat([problog_,Name],ProblogName),
|
||||
atom_concat(problog_, Name, ProblogName),
|
||||
ProblogArity is Arity+2,
|
||||
functor(ProblogTerm,ProblogName,ProblogArity),
|
||||
arg(1,ProblogTerm,ID),
|
||||
@ -1074,7 +1090,7 @@ get_continuous_fact_parameters(ID,Parameters) :-
|
||||
|
||||
get_internal_continuous_fact(ID,ProblogTerm,ProblogName,ProblogArity,ContinuousPos) :-
|
||||
problog_continuous_predicate(Name,Arity,ContinuousPos),
|
||||
atomic_concat([problogcontinuous_,Name],ProblogName),
|
||||
atom_concat(problogcontinuous_, Name, ProblogName),
|
||||
ProblogArity is Arity+1,
|
||||
functor(ProblogTerm,ProblogName,ProblogArity),
|
||||
arg(1,ProblogTerm,ID),
|
||||
@ -1087,7 +1103,7 @@ set_continuous_fact_parameters(ID,Parameters) :-
|
||||
nth0(ContinuousPos,ProblogTermArgs,_,KeepArgs),
|
||||
nth0(ContinuousPos,NewProblogTermArgs,Parameters,KeepArgs),
|
||||
NewProblogTerm =.. [ProblogName|NewProblogTermArgs],
|
||||
assert(NewProblogTerm).
|
||||
assertz(NewProblogTerm).
|
||||
|
||||
|
||||
|
||||
@ -1131,7 +1147,7 @@ get_fact(ID,OutsideTerm) :-
|
||||
ProblogTerm =.. [_Functor,ID|Args],
|
||||
atomic_concat('problog_',OutsideFunctor,ProblogName),
|
||||
Last is ProblogArity-1,
|
||||
nth(Last,Args,_LogProb,OutsideArgs),
|
||||
nth(Last,Args,_LogProb,OutsideArgs), % PM avoid nth/3; use nth0/3 or nth1/3 instead
|
||||
OutsideTerm =.. [OutsideFunctor|OutsideArgs].
|
||||
% ID of instance of non-ground fact: get fact from grounding table
|
||||
get_fact(ID,OutsideTerm) :-
|
||||
@ -1139,12 +1155,12 @@ get_fact(ID,OutsideTerm) :-
|
||||
grounding_is_known(OutsideTerm,GID).
|
||||
|
||||
recover_grounding_id(Atom,ID) :-
|
||||
name(Atom,List),
|
||||
atom_codes(Atom,List),
|
||||
reverse(List,Rev),
|
||||
recover_number(Rev,NumRev),
|
||||
reverse(NumRev,Num),
|
||||
name(ID,Num).
|
||||
recover_number([95|_],[]) :- !. % name('_',[95])
|
||||
atom_codes(ID,Num).
|
||||
recover_number([95|_],[]) :- !. % atom_codes('_',[95])
|
||||
recover_number([A|B],[A|C]) :-
|
||||
recover_number(B,C).
|
||||
|
||||
@ -1278,9 +1294,9 @@ montecarlo_check(ComposedID) :-
|
||||
fail.
|
||||
% (c) for unknown groundings of non-ground facts: generate a new sample (decompose the ID first)
|
||||
montecarlo_check(ID) :-
|
||||
name(ID,IDN),
|
||||
atom_codes(ID,IDN),
|
||||
recover_number(IDN,FactIDName),
|
||||
name(FactID,FactIDName),
|
||||
atom_codes(FactID,FactIDName),
|
||||
new_sample_nonground(ID,FactID).
|
||||
|
||||
% sampling from ground fact: set array value to 1 (in) or 2 (out)
|
||||
@ -1316,10 +1332,10 @@ new_sample_nonground(ComposedID,ID) :-
|
||||
% fail.
|
||||
|
||||
split_grounding_id(Composed,Fact,Grounding) :-
|
||||
name(Composed,C),
|
||||
atom_codes(Composed,C),
|
||||
split_g_id(C,F,G),
|
||||
name(Fact,F),
|
||||
name(Grounding,G).
|
||||
atom_codes(Fact,F),
|
||||
atom_codes(Grounding,G).
|
||||
split_g_id([95|Grounding],[],Grounding) :- !.
|
||||
split_g_id([A|B],[A|FactID],GroundingID) :-
|
||||
split_g_id(B,FactID,GroundingID).
|
||||
@ -1404,19 +1420,21 @@ put_module(Goal,Module,Module:Goal).
|
||||
% if remember is on, input files for problogbdd will be saved
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- problog_var_define(sld_time, times, time, messages('SLD resolution', ':', ' ms')).
|
||||
:- problog_var_define(bdd_script_time, times, time, messages('Generating BDD script', ':', ' ms')).
|
||||
:- problog_var_define(bdd_generation_time, times, time, messages('Constructing BDD', ':', ' ms')).
|
||||
:- problog_var_define(trie_statistics, memory, untyped, messages('Trie usage', ':', '')).
|
||||
:- problog_var_define(probability, result, number, messages('Probabilty', ' = ', '')).
|
||||
:- problog_var_define(bdd_script_time(Method), times, time, messages('Generating BDD script '(Method), ':', ' ms')).
|
||||
:- problog_var_define(bdd_generation_time(Method), times, time, messages('Constructing BDD '(Method), ':', ' ms')).
|
||||
:- problog_var_define(probability(Method), result, number, messages('Probabilty '(Method), ' = ', '')).
|
||||
:- problog_var_define(trie_statistics(Method), memory, untyped, messages('Trie usage '(Method), ':', '')).
|
||||
:- problog_var_define(dbtrie_statistics(Method), memory, untyped, messages('Depth Breadth Trie usage '(Method), ':', '')).
|
||||
:- problog_var_define(db_trie_opts_performed(Method), memory, untyped, messages('Optimisations performed '(Method), ':', '')).
|
||||
:- problog_var_define(variable_elimination_time, times, time, messages('Variable Elimination', ':', ' ms')).
|
||||
:- problog_var_define(variable_elimination_stats, memory, untyped, messages('Variable Elimination', ':', '')).
|
||||
:- initialization((
|
||||
problog_var_define(sld_time, times, time, messages('SLD resolution', ':', ' ms')),
|
||||
problog_var_define(bdd_script_time, times, time, messages('Generating BDD script', ':', ' ms')),
|
||||
problog_var_define(bdd_generation_time, times, time, messages('Constructing BDD', ':', ' ms')),
|
||||
problog_var_define(trie_statistics, memory, untyped, messages('Trie usage', ':', '')),
|
||||
problog_var_define(probability, result, number, messages('Probabilty', ' = ', '')),
|
||||
problog_var_define(bdd_script_time(Method), times, time, messages('Generating BDD script '(Method), ':', ' ms')),
|
||||
problog_var_define(bdd_generation_time(Method), times, time, messages('Constructing BDD '(Method), ':', ' ms')),
|
||||
problog_var_define(probability(Method), result, number, messages('Probabilty '(Method), ' = ', '')),
|
||||
problog_var_define(trie_statistics(Method), memory, untyped, messages('Trie usage '(Method), ':', '')),
|
||||
problog_var_define(dbtrie_statistics(Method), memory, untyped, messages('Depth Breadth Trie usage '(Method), ':', '')),
|
||||
problog_var_define(db_trie_opts_performed(Method), memory, untyped, messages('Optimisations performed '(Method), ':', '')),
|
||||
problog_var_define(variable_elimination_time, times, time, messages('Variable Elimination', ':', ' ms')),
|
||||
problog_var_define(variable_elimination_stats, memory, untyped, messages('Variable Elimination', ':', ''))
|
||||
)).
|
||||
|
||||
problog_statistics(Stat, Result):-
|
||||
problog_var_defined(Stat),
|
||||
@ -1777,7 +1795,7 @@ add_solution :-
|
||||
Continuous=[];
|
||||
(
|
||||
Continuous=[continuous(ProofID)],
|
||||
assert(hybrid_proof(ProofID,Cont_IDs,AllIntervals))
|
||||
assertz(hybrid_proof(ProofID,Cont_IDs,AllIntervals))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -1805,7 +1823,7 @@ collect_all_intervals([(ID,GroundID)|T],ProofID,[Interval|T2]) :-
|
||||
Interval \= all, % we do not need to store continuous
|
||||
% variables with domain [-oo,oo] (they have probability 1)
|
||||
!,
|
||||
assert(hybrid_proof(ProofID,ID,GroundID,Interval)),
|
||||
assertz(hybrid_proof(ProofID,ID,GroundID,Interval)),
|
||||
collect_all_intervals(T,ProofID,T2).
|
||||
collect_all_intervals([_|T],ProofID,T2) :-
|
||||
collect_all_intervals(T,ProofID,T2).
|
||||
@ -1862,7 +1880,7 @@ disjoin_hybrid_proofs([GroundID|T]) :-
|
||||
(
|
||||
hybrid_proof(ProofID,ID,GroundID,Interval),
|
||||
intervals_disjoin(Interval,Partition,PInterval),
|
||||
assert(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)),
|
||||
assertz(hybrid_proof_disjoint(ProofID,ID,GroundID,PInterval)),
|
||||
|
||||
fail; % go to next proof
|
||||
true
|
||||
@ -1957,9 +1975,9 @@ init_problog_delta(Threshold,Delta) :-
|
||||
nb_setval(problog_completed_proofs, Trie_Completed_Proofs),
|
||||
init_ptree(Trie_Stopped_Proofs),
|
||||
nb_setval(problog_stopped_proofs, Trie_Stopped_Proofs),
|
||||
assert(low(0,0.0)),
|
||||
assert(up(0,1.0)),
|
||||
assert(stopDiff(Delta)),
|
||||
assertz(low(0,0.0)),
|
||||
assertz(up(0,1.0)),
|
||||
assertz(stopDiff(Delta)),
|
||||
init_problog(Threshold).
|
||||
|
||||
problog_delta_id(Goal, _) :-
|
||||
@ -2045,7 +2063,7 @@ eval_lower(N,P,Status) :-
|
||||
eval_dnf(Trie_Completed_Proofs,P,Status),
|
||||
(Status = ok ->
|
||||
retract(low(_,_)),
|
||||
assert(low(N,P)),
|
||||
assertz(low(N,P)),
|
||||
(problog_flag(verbose,true) -> format(user,'lower bound: ~6f~n',[P]);true),
|
||||
flush_output(user)
|
||||
;
|
||||
@ -2055,7 +2073,7 @@ eval_lower(N,P,Status) :-
|
||||
eval_upper(0,P,ok) :-
|
||||
retractall(up(_,_)),
|
||||
low(N,P),
|
||||
assert(up(N,P)).
|
||||
assertz(up(N,P)).
|
||||
% else merge proofs and stopped derivations to get upper bound
|
||||
% in case of timeout or other problems, skip and use bound from last level
|
||||
eval_upper(N,UpP,ok) :-
|
||||
@ -2068,7 +2086,7 @@ eval_upper(N,UpP,ok) :-
|
||||
delete_ptree(Trie_All_Proofs),
|
||||
(StatusUp = ok ->
|
||||
retract(up(_,_)),
|
||||
assert(up(N,UpP))
|
||||
assertz(up(N,UpP))
|
||||
;
|
||||
(problog_flag(verbose,true) -> format(user,'~w - continue using old up~n',[StatusUp]);true),
|
||||
flush_output(user),
|
||||
@ -2096,8 +2114,8 @@ problog_max(Goal, Prob, Facts) :-
|
||||
init_problog_max(Threshold) :-
|
||||
retractall(max_probability(_)),
|
||||
retractall(max_proof(_)),
|
||||
assert(max_probability(-999999)),
|
||||
assert(max_proof(unprovable)),
|
||||
assertz(max_probability(-999999)),
|
||||
assertz(max_proof(unprovable)),
|
||||
init_problog(Threshold).
|
||||
|
||||
update_max :-
|
||||
@ -2109,10 +2127,10 @@ update_max :-
|
||||
b_getval(problog_current_proof, IDs),
|
||||
open_end_close_end(IDs, R),
|
||||
retractall(max_proof(_)),
|
||||
assert(max_proof(R)),
|
||||
assertz(max_proof(R)),
|
||||
nb_setval(problog_threshold, CurrP),
|
||||
retractall(max_probability(_)),
|
||||
assert(max_probability(CurrP))
|
||||
assertz(max_probability(CurrP))
|
||||
).
|
||||
|
||||
problog_max_id(Goal, _Prob, _Clauses) :-
|
||||
@ -2193,7 +2211,7 @@ problog_real_kbest(Goal, K, Prob, Status) :-
|
||||
|
||||
init_problog_kbest(Threshold) :-
|
||||
retractall(current_kbest(_,_,_)),
|
||||
assert(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs)
|
||||
assertz(current_kbest(-999999,[],0)), %(log-threshold,proofs,num_proofs)
|
||||
init_ptree(Trie_Completed_Proofs),
|
||||
nb_setval(problog_completed_proofs, Trie_Completed_Proofs),
|
||||
init_problog(Threshold).
|
||||
@ -2234,7 +2252,7 @@ update_current_kbest(K,NewLogProb,Cl) :-
|
||||
sorted_insert(NewLogProb-Cl,List,NewList),
|
||||
NewLength is Length+1,
|
||||
(NewLength < K ->
|
||||
assert(current_kbest(OldThres,NewList,NewLength))
|
||||
assertz(current_kbest(OldThres,NewList,NewLength))
|
||||
;
|
||||
(NewLength>K ->
|
||||
First is NewLength-K+1,
|
||||
@ -2242,7 +2260,7 @@ update_current_kbest(K,NewLogProb,Cl) :-
|
||||
; FinalList=NewList, FinalLength=NewLength),
|
||||
FinalList=[NewThres-_|_],
|
||||
nb_setval(problog_threshold,NewThres),
|
||||
assert(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
assertz(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
|
||||
sorted_insert(A,[],[A]).
|
||||
sorted_insert(A-LA,[B1-LB1|B], [A-LA,B1-LB1|B] ) :-
|
||||
@ -2403,7 +2421,7 @@ montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :-
|
||||
;
|
||||
true
|
||||
),
|
||||
assert(mc_prob(Prob))
|
||||
assertz(mc_prob(Prob))
|
||||
;
|
||||
montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)
|
||||
).
|
||||
@ -2425,7 +2443,7 @@ montecarlo(Goal,Delta,K,SamplesSoFar,File,PositiveSoFar,InitialTime) :-
|
||||
% ;
|
||||
% true
|
||||
% ),
|
||||
% assert(mc_prob(Prob))
|
||||
% assertz(mc_prob(Prob))
|
||||
% ;
|
||||
% montecarlo(Goal,Delta,K,SamplesNew,File,Next,InitialTime)
|
||||
% ).
|
||||
@ -2470,7 +2488,7 @@ problog_answers(Goal,File) :-
|
||||
set_problog_flag(verbose,false),
|
||||
retractall(answer(_)),
|
||||
% this will not give the exact prob of Goal!
|
||||
problog_exact((Goal,ground(Goal),\+problog:answer(Goal),assert(problog:answer(Goal))),_,_),
|
||||
problog_exact((Goal,ground(Goal),\+problog:answer(Goal),assertz(problog:answer(Goal))),_,_),
|
||||
open(File,write,_,[alias(answer)]),
|
||||
eval_answers,
|
||||
close(answer).
|
||||
@ -2528,13 +2546,13 @@ update_current_kbest_answers(_,NewLogProb,Goal) :-
|
||||
!,
|
||||
keysort(NewList,SortedList),%format(user_error,'updated variant of ~w~n',[Goal]),
|
||||
retract(current_kbest(K,_,Len)),
|
||||
assert(current_kbest(K,SortedList,Len)).
|
||||
assertz(current_kbest(K,SortedList,Len)).
|
||||
update_current_kbest_answers(K,NewLogProb,Goal) :-
|
||||
retract(current_kbest(OldThres,List,Length)),
|
||||
sorted_insert(NewLogProb-Goal,List,NewList),%format(user_error,'inserted new element ~w~n',[Goal]),
|
||||
NewLength is Length+1,
|
||||
(NewLength < K ->
|
||||
assert(current_kbest(OldThres,NewList,NewLength))
|
||||
assertz(current_kbest(OldThres,NewList,NewLength))
|
||||
;
|
||||
(NewLength>K ->
|
||||
First is NewLength-K+1,
|
||||
@ -2542,7 +2560,7 @@ update_current_kbest_answers(K,NewLogProb,Goal) :-
|
||||
; FinalList=NewList, FinalLength=NewLength),
|
||||
FinalList=[NewThres-_|_],
|
||||
nb_setval(problog_threshold,NewThres),
|
||||
assert(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
assertz(current_kbest(NewThres,FinalList,FinalLength))).
|
||||
|
||||
% this fails if there is no variant -> go to second case above
|
||||
update_prob_of_known_answer([OldLogP-OldGoal|List],Goal,NewLogProb,[MaxLogP-OldGoal|List]) :-
|
||||
@ -2680,7 +2698,7 @@ build_trie(Goal, Trie) :-
|
||||
throw(error('Flag settings not supported by build_trie/2.'))
|
||||
).
|
||||
|
||||
build_trie_supported :- problog_flag(inference,exact).
|
||||
build_trie_supported :- problog_flag(inference,exact). % PM this can easily be written to avoid creating choice-points
|
||||
build_trie_supported :- problog_flag(inference,low(_)).
|
||||
build_trie_supported :- problog_flag(inference,atleast-_-best).
|
||||
build_trie_supported :- problog_flag(inference,_-best).
|
||||
@ -2859,7 +2877,7 @@ write_bdd_struct_script(Trie,BDDFile,Variables) :-
|
||||
Levels = [ROptLevel]
|
||||
),
|
||||
% Removed forall here, because it hides 'Variables' from what comes afterwards
|
||||
once(member(OptLevel, Levels)),
|
||||
memberchk(OptLevel, Levels),
|
||||
(
|
||||
(problog_flag(use_db_trie, true) ->
|
||||
tries:trie_db_opt_min_prefix(MinPrefix),
|
||||
@ -2973,7 +2991,7 @@ write_global_bdd_file_line(I,Max) :-
|
||||
).
|
||||
|
||||
write_global_bdd_file_query(I,Max) :-
|
||||
(I=Max ->
|
||||
(I=Max -> % PM shouldn't this be instead I =:= Max ?
|
||||
format("L~q~n",[I])
|
||||
;
|
||||
format("L~q,",[I]),
|
||||
@ -3008,8 +3026,8 @@ bdd_par_file(BDDParFile) :-
|
||||
|
||||
require(Feature) :-
|
||||
atom(Feature),
|
||||
atomic_concat(['problog_required_',Feature],Feature_Required),
|
||||
atomic_concat([Feature_Required,'_',depth],Feature_Depth),
|
||||
atom_concat('problog_required_', Feature, Feature_Required),
|
||||
atom_concat(Feature_Required, '_depth', Feature_Depth),
|
||||
(required(Feature) ->
|
||||
b_getval(Feature_Depth,Depth),
|
||||
Depth1 is Depth+1,
|
||||
@ -3022,8 +3040,8 @@ require(Feature) :-
|
||||
|
||||
unrequire(Feature) :-
|
||||
atom(Feature),
|
||||
atomic_concat(['problog_required_',Feature],Feature_Required),
|
||||
atomic_concat([Feature_Required,'_',depth],Feature_Depth),
|
||||
atom_concat('problog_required_', Feature, Feature_Required),
|
||||
atom_concat(Feature_Required, '_depth', Feature_Depth),
|
||||
b_getval(Feature_Depth,Depth),
|
||||
(Depth=1 ->
|
||||
nb_delete(Feature_Required),
|
||||
@ -3036,7 +3054,7 @@ unrequire(Feature) :-
|
||||
|
||||
required(Feature) :-
|
||||
atom(Feature),
|
||||
atomic_concat(['problog_required_',Feature],Feature_Required),
|
||||
atom_concat('problog_required_', Feature, Feature_Required),
|
||||
catch(b_getval(Feature_Required,Val),error(existence_error(variable,Feature_Required),_),fail),
|
||||
Val == required.
|
||||
|
||||
|
@ -212,7 +212,8 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(extlists, [open_end_memberchk/2, open_end_add/3, open_end_add_unique/3, open_end_close_end/2]).
|
||||
:- ensure_loaded(library(lists)).
|
||||
|
||||
:- use_module(library(lists), [memberchk/2]).
|
||||
|
||||
open_end_memberchk(_A, []):-!, fail.
|
||||
open_end_memberchk(A, L-E):-
|
||||
|
@ -204,19 +204,20 @@
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:-module(flags, [problog_define_flag/4,
|
||||
problog_define_flag/5,
|
||||
problog_define_flag/6,
|
||||
problog_defined_flag/5,
|
||||
problog_defined_flag_group/1,
|
||||
set_problog_flag/2,
|
||||
reset_problog_flags/0,
|
||||
problog_flag/2]).
|
||||
:-module(flags, [
|
||||
problog_define_flag/4,
|
||||
problog_define_flag/5,
|
||||
problog_define_flag/6,
|
||||
problog_defined_flag/5,
|
||||
problog_defined_flag_group/1,
|
||||
set_problog_flag/2,
|
||||
reset_problog_flags/0,
|
||||
problog_flag/2]).
|
||||
|
||||
|
||||
:-ensure_loaded(gflags).
|
||||
:-ensure_loaded(os).
|
||||
:-ensure_loaded(logger).
|
||||
:- use_module(gflags).
|
||||
:- use_module(os).
|
||||
:- use_module(logger).
|
||||
|
||||
problog_define_flag(Flag, Type, Description, DefaultValue):-
|
||||
flag_define(Flag, Type, DefaultValue, Description).
|
||||
@ -241,37 +242,39 @@ problog_flag(Flag, Value):-
|
||||
|
||||
reset_problog_flags:- flags_reset.
|
||||
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_dummy, flag_validate_dummy).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_atom, flag_validate_atom).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_atomic, flag_validate_atomic).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_number, flag_validate_number).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer, flag_validate_integer).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_directory, flag_validate_directory).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_file, flag_validate_file).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_list(L), flag_validate_in_list(L)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval(I, Type), flag_validate_in_interval(I, Type)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_closed([L, U]), flag_validate_in_interval([L, U], number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_open([L, U]), flag_validate_in_interval((L, U), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_closed([L, U]), flag_validate_in_interval([L, U], integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_open([L, U]), flag_validate_in_interval((L, U), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_closed([L, U]), flag_validate_in_interval([L, U], float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_open([L, U]), flag_validate_in_interval((L, U), float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), float)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_posnumber, flag_validate_in_interval((0, [+inf]), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_posint, flag_validate_in_interval((0, +inf), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_nonegint, flag_validate_in_interval(([0], +inf), integer)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_boolean, flag_validate_in_list([true, false])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_switch, flag_validate_in_list([on, off])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_method, flag_validate_in_list([max, delta, exact, montecarlo, low, kbest])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_aggregate, flag_validate_in_list([sum, prod, soft_prod])).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_open, flag_validate_in_interval((0, 1), number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_close, flag_validate_in_interval([0, 1], number)).
|
||||
:- flag_add_validation_syntactic_sugar(problog_flag_validate_0to5, flag_validate_in_interval([0, 5], integer)).
|
||||
:- initialization((
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_dummy, flag_validate_dummy),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_atom, flag_validate_atom),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_atomic, flag_validate_atomic),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_number, flag_validate_number),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer, flag_validate_integer),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_directory, flag_validate_directory),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_file, flag_validate_file),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_list(L), flag_validate_in_list(L)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval(I, Type), flag_validate_in_interval(I, Type)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_closed([L, U]), flag_validate_in_interval([L, U], number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_open([L, U]), flag_validate_in_interval((L, U), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_closed([L, U]), flag_validate_in_interval([L, U], integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_open([L, U]), flag_validate_in_interval((L, U), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_integer_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_closed([L, U]), flag_validate_in_interval([L, U], float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_open([L, U]), flag_validate_in_interval((L, U), float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_left_open([L, U]), flag_validate_in_interval((L, [U]), float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_float_in_interval_right_open([L, U]), flag_validate_in_interval(([L], U), float)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_posnumber, flag_validate_in_interval((0, [+inf]), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_posint, flag_validate_in_interval((0, +inf), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_nonegint, flag_validate_in_interval(([0], +inf), integer)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_boolean, flag_validate_in_list([true, false])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_switch, flag_validate_in_list([on, off])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_method, flag_validate_in_list([max, delta, exact, montecarlo, low, kbest])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_aggregate, flag_validate_in_list([sum, prod, soft_prod])),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_open, flag_validate_in_interval((0, 1), number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_indomain_0_1_close, flag_validate_in_interval([0, 1], number)),
|
||||
flag_add_validation_syntactic_sugar(problog_flag_validate_0to5, flag_validate_in_interval([0, 5], integer))
|
||||
)).
|
||||
|
||||
last_threshold_handler(message, '').
|
||||
last_threshold_handler(validating, _Value).
|
||||
@ -289,6 +292,14 @@ id_stepsize_handler(stored, Value):-
|
||||
|
||||
bdd_file_handler(message, '').
|
||||
bdd_file_handler(validating, _Value).
|
||||
bdd_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch(file_exists(Path), _, fail), file_property(Path, type(regular)), !.
|
||||
bdd_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch((\+ file_exists(Path), tell(Path)), _, fail),
|
||||
told,
|
||||
delete_file(Path).
|
||||
bdd_file_handler(validated, _Value).
|
||||
bdd_file_handler(stored, Value):-
|
||||
atomic_concat(Value, '_probs', ParValue),
|
||||
@ -296,6 +307,19 @@ bdd_file_handler(stored, Value):-
|
||||
atomic_concat(Value, '_res', ResValue),
|
||||
flag_set(bdd_result, ResValue).
|
||||
|
||||
working_file_handler(message, '').
|
||||
working_file_handler(validating, _Value).
|
||||
working_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch(file_exists(Path), _, fail), file_property(Path, type(regular)), !.
|
||||
working_file_handler(validate, Value):-
|
||||
convert_filename_to_working_path(Value, Path),
|
||||
catch((\+ file_exists(Path), tell(Path)), _, fail),
|
||||
told,
|
||||
delete_file(Path).
|
||||
working_file_handler(validated, _Value).
|
||||
working_file_handler(stored, _Value).
|
||||
|
||||
auto_handler(message, 'auto non-zero').
|
||||
auto_handler(validating, Value) :-
|
||||
number(Value),
|
||||
|
@ -245,8 +245,8 @@
|
||||
flag_get/2,
|
||||
flags_reset/0]).
|
||||
|
||||
:-ensure_loaded(library(lists)).
|
||||
:-ensure_loaded(library(system)). % for file operations
|
||||
:- use_module(library(lists), [append/3, memberchk/2, reverse/2]).
|
||||
:- use_module(library(system), [delete_file/1, file_exists/1, file_property/2, make_directory/1]). % for file operations
|
||||
|
||||
flag_define(Flag, Type, DefaultValue, Message):-
|
||||
flag_define(Flag, general, Type, DefaultValue, flags:true, Message).
|
||||
@ -259,10 +259,10 @@ flag_define(Flag, Group, Type, DefaultValue, Handler, Message):-
|
||||
throw(duplicate_flag_definition(flag_define(Flag, Group, Type, DefaultValue, Handler, Message))).
|
||||
|
||||
flag_define(Flag, Group, Type, DefaultValue, Handler, Message):-
|
||||
(catch(call(Type), _, fail)->
|
||||
(catch(Type, _, fail)->
|
||||
fail
|
||||
;
|
||||
\+ (flag_validation_syntactic_sugar(Type, SyntacticSugar), catch(call(SyntacticSugar), _, fail)),
|
||||
\+ (flag_validation_syntactic_sugar(Type, SyntacticSugar), catch(SyntacticSugar, _, fail)),
|
||||
throw(unknown_flag_type(flag_define(Flag, Group, Type, DefaultValue, Handler, Message)))
|
||||
).
|
||||
|
||||
@ -371,13 +371,13 @@ flag_validate(_Flag, Value, Type, M:Handler):-
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch((call(M:GoalValidating), call(G)), _, fail), !.
|
||||
catch((M:GoalValidating, G), _, fail), !.
|
||||
flag_validate(_Flag, Value, Type, _M:Handler):-
|
||||
Handler == true,
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch(call(G), _, fail), !.
|
||||
catch(G, _, fail), !.
|
||||
|
||||
flag_validate(_Flag, Value, SyntacticSugar, M:Handler):-
|
||||
Handler \= true,
|
||||
@ -386,14 +386,14 @@ flag_validate(_Flag, Value, SyntacticSugar, M:Handler):-
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch((call(M:GoalValidating), call(G)), _, fail), !.
|
||||
catch((M:GoalValidating, G), _, fail), !.
|
||||
flag_validate(_Flag, Value, SyntacticSugar, _M:Handler):-
|
||||
Handler == true,
|
||||
flag_validation_syntactic_sugar(SyntacticSugar, Type),
|
||||
Type =.. LType,
|
||||
append(LType, [Value], LGoal),
|
||||
G =.. LGoal,
|
||||
catch(call(G), _, fail), !.
|
||||
catch(G, _, fail), !.
|
||||
flag_validate(Flag, Value, Type, Handler):-
|
||||
(var(Value) ->
|
||||
Value = 'free variable'
|
||||
@ -435,14 +435,14 @@ flag_validate_directory(Value):-
|
||||
flag_validate_directory(Value):-
|
||||
atomic(Value),
|
||||
% fixme : why not inform the user???
|
||||
catch((not(file_exists(Value)), make_directory(Value)), _, fail).
|
||||
catch((\+ file_exists(Value), make_directory(Value)), _, fail).
|
||||
|
||||
flag_validate_file.
|
||||
flag_validate_file(Value):-
|
||||
catch(file_exists(Value), _, fail), file_property(Value, type(regular)), !.
|
||||
flag_validate_file(Value):-
|
||||
atomic(Value),
|
||||
catch((not(file_exists(Value)), tell(Value)), _, fail),
|
||||
catch((\+ file_exists(Value), tell(Value)), _, fail),
|
||||
told,
|
||||
delete_file(Value).
|
||||
|
||||
|
@ -276,7 +276,7 @@
|
||||
hash_table_display/3,
|
||||
problog_key_to_tuple/2]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [member/2]).
|
||||
%
|
||||
% General use predicates
|
||||
%
|
||||
@ -299,7 +299,7 @@ get_digits(Num, Digits, Acc):-
|
||||
%
|
||||
% Simple Counters
|
||||
%
|
||||
:- bb_put(array_count, 1).
|
||||
:- initialization(bb_put(array_count, 1)).
|
||||
|
||||
get_next_array(ID, Name):-
|
||||
bb_get(array_count, ID),
|
||||
@ -705,7 +705,7 @@ hash_table_get_elements(RevArray, RevSize, RevSize, Tupples):-
|
||||
hash_table_get_elements(_RevArray, RevSize, RevSize, []).
|
||||
|
||||
hash_table_get_chains(Array, Size, Chains):-
|
||||
((array_element(Array, Size, ChainID), not(ChainID == 0)) ->
|
||||
((array_element(Array, Size, ChainID), ChainID \== 0) ->
|
||||
(integer(ChainID) ->
|
||||
get_array_name(ChainID, ChainName)
|
||||
;
|
||||
|
@ -212,7 +212,8 @@
|
||||
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown,error).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
:- use_module(library(lists), [member/2, reverse/2, select/3]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% intervals_merge(+Interval1,+Interval2,-ResultingInterval)
|
||||
|
@ -225,10 +225,11 @@
|
||||
:- yap_flag(unknown,error).
|
||||
:- style_check(single_var).
|
||||
|
||||
:- bb_put(logger_filename,'out.dat').
|
||||
:- bb_put(logger_delimiter,';').
|
||||
:- bb_put(logger_variables,[]).
|
||||
|
||||
:- initialization((
|
||||
bb_put(logger_filename,'out.dat'),
|
||||
bb_put(logger_delimiter,';'),
|
||||
bb_put(logger_variables,[])
|
||||
)).
|
||||
|
||||
%========================================================================
|
||||
%= Defines a new variable, possible types are: int, float and time
|
||||
|
@ -206,9 +206,9 @@
|
||||
|
||||
:- module(mc_DNF_sampling, [problog_dnf_sampling/3]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [memberchk/2]).
|
||||
|
||||
:- ensure_loaded(variables).
|
||||
:- use_module(variables).
|
||||
:- use_module(sampling, _, [problog_random/1,
|
||||
problog_convergence_check/6]).
|
||||
|
||||
@ -217,14 +217,16 @@
|
||||
|
||||
:- use_module(os, _, [convert_filename_to_working_path/2]).
|
||||
|
||||
:- ensure_loaded(hash_table).
|
||||
:- use_module(hash_table).
|
||||
|
||||
:- problog_define_flag(search_method, problog_flag_validate_in_list([linear, binary]), 'search method for picking proof', binary, monte_carlo_sampling_dnf).
|
||||
:- problog_define_flag(represent_world, problog_flag_validate_in_list([list, record, array, hash_table]), 'structure that represents sampled world', array, monte_carlo_sampling_dnf).
|
||||
:- initialization((
|
||||
problog_define_flag(search_method, problog_flag_validate_in_list([linear, binary]), 'search method for picking proof', binary, monte_carlo_sampling_dnf),
|
||||
problog_define_flag(represent_world, problog_flag_validate_in_list([list, record, array, hash_table]), 'structure that represents sampled world', array, monte_carlo_sampling_dnf),
|
||||
|
||||
:- problog_var_define(dnf_sampling_time, times, time, messages('DNF Sampling', ':', ' ms')).
|
||||
:- problog_var_define(probability_lower, result, untyped, messages('Lower probability bound', ' = ', '')).
|
||||
:- problog_var_define(probability_upper, result, untyped, messages('Upper probability bound', ' = ', '')).
|
||||
problog_var_define(dnf_sampling_time, times, time, messages('DNF Sampling', ':', ' ms')),
|
||||
problog_var_define(probability_lower, result, untyped, messages('Lower probability bound', ' = ', '')),
|
||||
problog_var_define(probability_upper, result, untyped, messages('Upper probability bound', ' = ', ''))
|
||||
)).
|
||||
|
||||
% problog_independed(T, P):-
|
||||
% tries:trie_traverse_first(T, FirstRef), !,
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-08-24 15:23:06 +0200 (Tue, 24 Aug 2010) $
|
||||
% $Revision: 4672 $
|
||||
% $Date: 2010-09-24 15:54:45 +0200 (Fri, 24 Sep 2010) $
|
||||
% $Revision: 4822 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
@ -212,25 +212,24 @@
|
||||
convert_filename_to_working_path/2,
|
||||
convert_filename_to_problog_path/2,
|
||||
concat_path_with_filename/3,
|
||||
empty_bdd_directory/1,
|
||||
empty_output_directory/1,
|
||||
calc_md5/2]).
|
||||
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(system), [exec/3, file_exists/1]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- use_module(gflags, _, [flag_get/2]).
|
||||
|
||||
:- dynamic [problog_dir/1, problog_working_path/1].
|
||||
:- dynamic(problog_path/1).
|
||||
:- dynamic(problog_working_path/1).
|
||||
|
||||
set_problog_path(Path):-
|
||||
retractall(problog_path(_)),
|
||||
assert(problog_path(Path)).
|
||||
assertz(problog_path(Path)).
|
||||
|
||||
convert_filename_to_working_path(File_Name, Path):-
|
||||
problog_flag(dir, Dir),
|
||||
flag_get(dir, Dir),
|
||||
concat_path_with_filename(Dir, File_Name, Path).
|
||||
|
||||
convert_filename_to_problog_path(File_Name, Path):-
|
||||
@ -253,32 +252,6 @@ concat_path_with_filename(Path, File_Name, Result):-
|
||||
|
||||
atomic_concat([Path_Absolute, Path_Seperator, File_Name], Result).
|
||||
|
||||
%========================================================================
|
||||
%= store the current succes probabilities for training and test examples
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_bdd_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
concat_path_with_filename(Path,'query_*',Files),
|
||||
atomic_concat(['rm -f ',Files],Command),
|
||||
(shell(Command) -> true; true).
|
||||
%========================================================================
|
||||
%= store the current succes probabilities for training and test examples
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_output_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
concat_path_with_filename(Path,'log.dat',F1),
|
||||
concat_path_with_filename(Path,'factprobs_*.pl',F2),
|
||||
concat_path_with_filename(Path,'predictions_*.pl',F3),
|
||||
|
||||
atomic_concat(['rm -f ',F1, ' ', F2, ' ', F3],Command),
|
||||
(shell(Command) -> true; true).
|
||||
|
||||
%========================================================================
|
||||
%= Calculate the MD5 checksum of +Filename by calling md5sum
|
||||
%= in case m5sum is not installed, try md5, otherwise fail
|
||||
@ -307,7 +280,7 @@ calc_md5_intern(Filename,Command,MD5) :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
( % read 32 Bytes from stdout of process
|
||||
repeat,
|
||||
get0(S,C),
|
||||
get_code(S,C),
|
||||
|
||||
(
|
||||
C== -1
|
||||
|
@ -225,11 +225,11 @@
|
||||
problog_help/0]).
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [member/2]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- ensure_loaded(variables).
|
||||
:- use_module(flags).
|
||||
:- use_module(variables).
|
||||
|
||||
|
||||
% size, line_char, line_char_bold
|
||||
|
232
packages/ProbLog/problog/print_learning.yap
Normal file
232
packages/ProbLog/problog/print_learning.yap
Normal file
@ -0,0 +1,232 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-09-23 11:13:10 +0200 (Thu, 23 Sep 2010) $
|
||||
% $Revision: 4804 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
%
|
||||
% ProbLog was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2008, 2009, 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Main authors of this file:
|
||||
% Bernd Gutmann
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%
|
||||
% Collected OS depended instructions
|
||||
%%%%%%%%
|
||||
:- module(print_learning, [format_learning/3]).
|
||||
|
||||
|
||||
% load our own modules
|
||||
:- use_module(flags).
|
||||
|
||||
:- initialization(problog_define_flag(verbosity_learning, problog_flag_validate_0to5,'How much output shall be given (0=nothing,5=all)',5, learning_general)).
|
||||
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
format_learning(Level,String,Arguments) :-
|
||||
problog_flag(verbosity_learning,V_Level),
|
||||
V_Level >= Level,
|
||||
!,
|
||||
format(String,Arguments),
|
||||
flush_output(user).
|
||||
format_learning(_,_,_) :-
|
||||
true.
|
@ -212,14 +212,14 @@
|
||||
|
||||
:- use_module(os, _, [convert_filename_to_working_path/2]).
|
||||
|
||||
:- ensure_loaded(library(random)).
|
||||
:- use_module(library(random)).
|
||||
|
||||
:- problog_define_flag(mc_batchsize, problog_flag_validate_posint, 'number of samples before update in montecarlo', 1000, monte_carlo_sampling).
|
||||
|
||||
:- problog_define_flag(min_mc_samples, problog_flag_validate_nonegint, 'minimum number of samples before to converge', 0, monte_carlo_sampling).
|
||||
:- problog_define_flag(max_mc_samples, problog_flag_validate_nonegint, 'maximum number of samples waiting to converge', 1000000, monte_carlo_sampling).
|
||||
|
||||
:- problog_define_flag(randomizer, problog_flag_validate_in_list([repeatable, nonrepeatable]), 'whether the random numbers are repeatable or not', repeatable, monte_carlo_sampling).
|
||||
:- initialization((
|
||||
problog_define_flag(mc_batchsize, problog_flag_validate_posint, 'number of samples before update in montecarlo', 1000, monte_carlo_sampling),
|
||||
problog_define_flag(min_mc_samples, problog_flag_validate_nonegint, 'minimum number of samples before to converge', 0, monte_carlo_sampling),
|
||||
problog_define_flag(max_mc_samples, problog_flag_validate_nonegint, 'maximum number of samples waiting to converge', 1000000, monte_carlo_sampling),
|
||||
problog_define_flag(randomizer, problog_flag_validate_in_list([repeatable, nonrepeatable]), 'whether the random numbers are repeatable or not', repeatable, monte_carlo_sampling)
|
||||
)).
|
||||
|
||||
problog_convergence_check(Time, P, SamplesSoFar, Delta, Epsilon, Converged):-
|
||||
Epsilon is 2.0 * sqrt(P * abs(1.0 - P) / SamplesSoFar),
|
||||
|
@ -233,7 +233,7 @@
|
||||
problog_tabling_get_negated_from_id/2,
|
||||
op(1150, fx, problog_table)]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- use_module(library(lists), [memberchk/2]).
|
||||
|
||||
:- use_module(extlists, _, [open_end_memberchk/2,
|
||||
open_end_add/3,
|
||||
@ -248,11 +248,18 @@
|
||||
empty_ptree/1]).
|
||||
|
||||
:- op( 1150, fx, problog_table ).
|
||||
:- meta_predicate problog_table(:).
|
||||
:- meta_predicate problog_neg(:).
|
||||
:- dynamic problog_tabled/1, has_synonyms/0, problog_tabling_retain/1.
|
||||
:- problog_define_flag(max_depth, problog_flag_validate_integer, 'maximum proof depth', -1).
|
||||
:- problog_define_flag(retain_tables, problog_flag_validate_boolean, 'retain tables after query', false).
|
||||
|
||||
:- meta_predicate(problog_table(0)).
|
||||
:- meta_predicate(problog_neg(0)).
|
||||
|
||||
:- dynamic(problog_tabled/1).
|
||||
:- dynamic(has_synonyms/0).
|
||||
:- dynamic(problog_tabling_retain/1).
|
||||
|
||||
:- initialization((
|
||||
problog_define_flag(max_depth, problog_flag_validate_integer, 'maximum proof depth', -1),
|
||||
problog_define_flag(retain_tables, problog_flag_validate_boolean, 'retain tables after query', false)
|
||||
)).
|
||||
|
||||
init_tabling :-
|
||||
nb_setval(problog_current_depth, 0),
|
||||
@ -277,7 +284,7 @@ clear_tabling:-
|
||||
clear_tabling.
|
||||
|
||||
retain_tabling:-
|
||||
forall(problog_chktabled(_, Trie), assert(problog_tabling_retain(Trie))).
|
||||
forall(problog_chktabled(_, Trie), assertz(problog_tabling_retain(Trie))).
|
||||
|
||||
clear_retained_tables:-
|
||||
forall(problog_tabling_retain(Trie), delete_ptree(Trie)),
|
||||
@ -311,7 +318,7 @@ problog_table((P1, P2), M) :-
|
||||
problog_table(Name/Arity, Module) :-
|
||||
makeargs(Arity, Args),
|
||||
Head =.. [Name|Args],
|
||||
not(predicate_property(Module:Head, dynamic)), !,
|
||||
\+ predicate_property(Module:Head, dynamic), !,
|
||||
throw(error('problog_table: Problog tabling currently requires the predicate to be declared dynamic and compiles it to static.')).
|
||||
problog_table(Name/Arity, Module) :-
|
||||
makeargs(Arity, Args),
|
||||
@ -322,7 +329,7 @@ problog_table(Name/Arity, Module) :-
|
||||
|
||||
% Monte carlo tabling
|
||||
table(Module:MCName/Arity),
|
||||
assert(problog_tabled(Module:Name/Arity)),
|
||||
assertz(problog_tabled(Module:Name/Arity)),
|
||||
|
||||
findall(_,(
|
||||
OriginalPred =.. [OriginalName|Args],
|
||||
@ -334,7 +341,7 @@ problog_table(Name/Arity, Module) :-
|
||||
OriginalPred =.. [OriginalName|Args],
|
||||
MCPred =.. [MCName|Args],
|
||||
ExactPred =.. [ExactName|Args],
|
||||
assert(Module:(
|
||||
assertz(Module:(
|
||||
Head:-
|
||||
(problog:problog_control(check, exact) ->
|
||||
ExactPred
|
||||
@ -361,7 +368,7 @@ problog_table(Name/Arity, Module) :-
|
||||
Finished
|
||||
),
|
||||
b_getval(problog_current_proof, IDs),
|
||||
not(open_end_memberchk(not(t(Hash)), IDs)),
|
||||
\+ open_end_memberchk(not(t(Hash)), IDs),
|
||||
open_end_add_unique(t(Hash), IDs, NIDs),
|
||||
b_setval(problog_current_proof, NIDs)
|
||||
;
|
||||
@ -413,7 +420,7 @@ problog_table(Name/Arity, Module) :-
|
||||
delete_ptree(SuspTrie)
|
||||
),
|
||||
b_setval(CurrentControlTrie, OCurTrie),
|
||||
not(open_end_memberchk(not(t(Hash)), OIDs)),
|
||||
\+ open_end_memberchk(not(t(Hash)), OIDs),
|
||||
open_end_add_unique(t(Hash), OIDs, NOIDs),
|
||||
b_setval(problog_current_proof, NOIDs)
|
||||
)
|
||||
@ -435,8 +442,8 @@ problog_abolish_table(M:P/A):-
|
||||
problog_neg(M:G):-
|
||||
problog:problog_control(check, exact),
|
||||
functor(G, Name, Arity),
|
||||
not(problog_tabled(M:Name/Arity)),
|
||||
not(problog:problog_predicate(Name, Arity)),
|
||||
\+ problog_tabled(M:Name/Arity),
|
||||
\+ problog:problog_predicate(Name, Arity),
|
||||
throw(problog_neg_error('Error: goal must be dynamic and tabled', M:G)).
|
||||
problog_neg(M:G):-
|
||||
% exact inference
|
||||
@ -446,20 +453,20 @@ problog_neg(M:G):-
|
||||
M:G,
|
||||
b_getval(problog_current_proof, L),
|
||||
open_end_close_end(L, [Trie]),
|
||||
not(open_end_memberchk(Trie, IDs)),
|
||||
\+ open_end_memberchk(Trie, IDs),
|
||||
open_end_add_unique(not(Trie), IDs, NIDs),
|
||||
b_setval(problog_current_proof, NIDs).
|
||||
problog_neg(M:G):-
|
||||
% monte carlo sampling
|
||||
problog:problog_control(check, mc),
|
||||
not(M:G).
|
||||
\+ M:G.
|
||||
|
||||
% This predicate assigns a synonym for negation that means: NotName = problog_neg(Name)
|
||||
problog_tabling_negated_synonym(Name, NotName):-
|
||||
recorded(problog_table_synonyms, negated(Name, NotName), _), !.
|
||||
problog_tabling_negated_synonym(Name, NotName):-
|
||||
retractall(has_synonyms),
|
||||
assert(has_synonyms),
|
||||
assertz(has_synonyms),
|
||||
recordz(problog_table_synonyms, negated(Name, NotName), _).
|
||||
|
||||
problog_tabling_get_negated_from_pred(Pred, Ref):-
|
||||
|
@ -214,8 +214,8 @@
|
||||
:- yap_flag(unknown,error).
|
||||
:- style_check(single_var).
|
||||
|
||||
:- dynamic timer/2.
|
||||
:- dynamic timer_paused/2.
|
||||
:- dynamic(timer/2).
|
||||
:- dynamic(timer_paused/2).
|
||||
|
||||
|
||||
timer_start(Name) :-
|
||||
@ -225,7 +225,7 @@ timer_start(Name) :-
|
||||
throw(timer_already_started(timer_start(Name)));
|
||||
|
||||
statistics(walltime,[StartTime,_]),
|
||||
assert(timer(Name,StartTime))
|
||||
assertz(timer(Name,StartTime))
|
||||
).
|
||||
|
||||
timer_stop(Name,Duration) :-
|
||||
@ -244,7 +244,7 @@ timer_pause(Name) :-
|
||||
->
|
||||
statistics(walltime,[StopTime,_]),
|
||||
Duration is StopTime-StartTime,
|
||||
assert(timer_paused(Name,Duration));
|
||||
assertz(timer_paused(Name,Duration));
|
||||
|
||||
throw(timer_not_started(timer_pause(Name)))
|
||||
).
|
||||
@ -255,7 +255,7 @@ timer_pause(Name, Duration) :-
|
||||
->
|
||||
statistics(walltime,[StopTime,_]),
|
||||
Duration is StopTime-StartTime,
|
||||
assert(timer_paused(Name,Duration));
|
||||
assertz(timer_paused(Name,Duration));
|
||||
|
||||
throw(timer_not_started(timer_pause(Name)))
|
||||
).
|
||||
@ -266,7 +266,7 @@ timer_resume(Name):-
|
||||
->
|
||||
statistics(walltime,[ResumeTime,_]),
|
||||
CorrectedStartTime is ResumeTime-Duration,
|
||||
assert(timer(Name,CorrectedStartTime));
|
||||
assertz(timer(Name,CorrectedStartTime));
|
||||
|
||||
throw(timer_not_paused(timer_resume(Name)))
|
||||
).
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-08-30 18:09:17 +0200 (Mon, 30 Aug 2010) $
|
||||
% $Revision: 4728 $
|
||||
% $Date: 2010-09-07 23:20:03 +0200 (Tue, 07 Sep 2010) $
|
||||
% $Revision: 4765 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
@ -245,13 +245,13 @@
|
||||
]).
|
||||
|
||||
% load library modules
|
||||
:- ensure_loaded(library(tries)).
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- ensure_loaded(library(ordsets)).
|
||||
:- use_module(library(tries)).
|
||||
:- use_module(library(lists), [append/3, member/2, memberchk/2]).
|
||||
:- use_module(library(system), [delete_file/1, shell/1]).
|
||||
:- use_module(library(ordsets), [ord_intersection/3, ord_union/3]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(flags).
|
||||
:- use_module(flags).
|
||||
|
||||
% switch on all tests to reduce bug searching time
|
||||
:- style_check(all).
|
||||
@ -259,28 +259,32 @@
|
||||
|
||||
|
||||
% this is a test to determine whether YAP provides the needed trie library
|
||||
:- current_predicate(tries:trie_disable_hash/0)
|
||||
->
|
||||
trie_disable_hash;
|
||||
print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.').
|
||||
:- initialization(
|
||||
( predicate_property(trie_disable_hash, imported_from(tries)) ->
|
||||
trie_disable_hash
|
||||
; print_message(warning,'The predicate tries:trie_disable_hash/0 does not exist. Please update trie library.')
|
||||
)
|
||||
).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% Define module flags
|
||||
%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- problog_define_flag(use_db_trie, problog_flag_validate_boolean, 'use the builtin trie 2 trie transformation', true).
|
||||
:- problog_define_flag(db_trie_opt_lvl, problog_flag_validate_integer, 'optimization level for the trie 2 trie transformation', 0).
|
||||
:- problog_define_flag(compare_opt_lvl, problog_flag_validate_boolean, 'comparison mode for optimization level', false).
|
||||
:- problog_define_flag(db_min_prefix, problog_flag_validate_integer, 'minimum size of prefix for dbtrie to optimize', 2).
|
||||
:- problog_define_flag(use_naive_trie, problog_flag_validate_boolean, 'use the naive algorithm to generate bdd scripts', false).
|
||||
:- problog_define_flag(use_old_trie, problog_flag_validate_boolean, 'use the old trie 2 trie transformation no nested', false).
|
||||
:- problog_define_flag(use_dec_trie, problog_flag_validate_boolean, 'use the decomposition method', false).
|
||||
:- problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true).
|
||||
:- problog_define_flag(deref_terms, problog_flag_validate_boolean, 'deref BDD terms after last use', false).
|
||||
|
||||
:- problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false).
|
||||
:- problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false).
|
||||
:- problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list).
|
||||
:- initialization((
|
||||
problog_define_flag(use_db_trie, problog_flag_validate_boolean, 'use the builtin trie 2 trie transformation', false),
|
||||
problog_define_flag(db_trie_opt_lvl, problog_flag_validate_integer, 'optimization level for the trie 2 trie transformation', 0),
|
||||
problog_define_flag(compare_opt_lvl, problog_flag_validate_boolean, 'comparison mode for optimization level', false),
|
||||
problog_define_flag(db_min_prefix, problog_flag_validate_integer, 'minimum size of prefix for dbtrie to optimize', 2),
|
||||
problog_define_flag(use_naive_trie, problog_flag_validate_boolean, 'use the naive algorithm to generate bdd scripts', false),
|
||||
problog_define_flag(use_old_trie, problog_flag_validate_boolean, 'use the old trie 2 trie transformation no nested', true),
|
||||
problog_define_flag(use_dec_trie, problog_flag_validate_boolean, 'use the decomposition method', false),
|
||||
problog_define_flag(subset_check, problog_flag_validate_boolean, 'perform subset check in nested tries', true),
|
||||
problog_define_flag(deref_terms, problog_flag_validate_boolean, 'deref BDD terms after last use', false),
|
||||
|
||||
problog_define_flag(trie_preprocess, problog_flag_validate_boolean, 'perform a preprocess step to nested tries', false),
|
||||
problog_define_flag(refine_anclst, problog_flag_validate_boolean, 'refine the ancestor list with their childs', false),
|
||||
problog_define_flag(anclst_represent,problog_flag_validate_in_list([list, integer]), 'represent the ancestor list', list)
|
||||
)).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -419,6 +423,7 @@ merge_ptree(T1, T2, T3) :-
|
||||
% - ptree_decomposition -> ptree_decomposition_struct
|
||||
% - bdd_ptree_script -> bdd_struct_ptree_script
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
:- dynamic(c_num/1).
|
||||
|
||||
bdd_struct_ptree(Trie, FileBDD, Variables) :-
|
||||
bdd_struct_ptree_script(Trie, FileBDD, Variables),
|
||||
@ -434,7 +439,7 @@ bdd_struct_ptree_script(Trie, FileBDD, Variables) :-
|
||||
edges_ptree(Trie, Variables),
|
||||
name_vars(Variables), % expected by output_compressed_script/1?
|
||||
length(Variables, VarCount),
|
||||
assert(c_num(1)),
|
||||
assertz(c_num(1)),
|
||||
bdd_pt(Trie, CT),
|
||||
c_num(NN),
|
||||
IntermediateSteps is NN - 1,
|
||||
@ -502,7 +507,7 @@ trie_to_bdd_struct_trie(A, B, OutputFile, OptimizationLevel, Variables) :-
|
||||
write(VarCNT), nl,
|
||||
write(0), nl,
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, LL),
|
||||
write(LL), nl,
|
||||
told
|
||||
;
|
||||
@ -524,7 +529,7 @@ trie_to_bdd_struct_trie(A, B, OutputFile, OptimizationLevel, Variables) :-
|
||||
|
||||
nested_trie_to_bdd_struct_trie(A, B, OutputFile, OptimizationLevel, Variables):-
|
||||
trie_nested_to_depth_breadth_trie(A, B, LL, OptimizationLevel, problog:problog_chktabled),
|
||||
(islabel(LL) ->
|
||||
(is_label(LL) ->
|
||||
retractall(deref(_,_)),
|
||||
(problog_flag(deref_terms, true) ->
|
||||
asserta(deref(LL,no)),
|
||||
@ -545,13 +550,13 @@ nested_trie_to_bdd_struct_trie(A, B, OutputFile, OptimizationLevel, Variables):-
|
||||
InterStep is NegStepN + 1,
|
||||
atomic_concat('L', InterStep, FL),
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, FL),
|
||||
write(FL), write(' = ~'), write(NegL), nl,
|
||||
write(FL), nl
|
||||
;
|
||||
atomic_concat('L', InterStep, LL),
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, LL),
|
||||
write(LL), nl
|
||||
),
|
||||
told
|
||||
@ -580,7 +585,7 @@ ptree_decomposition_struct(Trie, BDDFileName, Variables) :-
|
||||
length(Variables, VarCnt),
|
||||
tell(TmpFile1),
|
||||
decompose_trie(Trie, Variables, L),
|
||||
(islabel(L)->
|
||||
(is_label(L)->
|
||||
atomic_concat('L', LCnt, L),
|
||||
write(L),nl
|
||||
;
|
||||
@ -642,7 +647,7 @@ bdd_ptree_script(Trie, FileBDD, FileParam) :-
|
||||
|
||||
told,
|
||||
length(Edges, VarCount),
|
||||
assert(c_num(1)),
|
||||
assertz(c_num(1)),
|
||||
bdd_pt(Trie, CT),
|
||||
c_num(NN),
|
||||
IntermediateSteps is NN - 1,
|
||||
@ -734,13 +739,13 @@ bdd_vars_script_intern2(A) :-
|
||||
bdd_pt(Trie, false) :-
|
||||
empty_ptree(Trie),
|
||||
!,
|
||||
once(retractall(c_num(_))),
|
||||
once(assert(c_num(2))).
|
||||
retractall(c_num(_)),
|
||||
assertz(c_num(2)).
|
||||
bdd_pt(Trie, true) :-
|
||||
trie_check_entry(Trie, [true], _),
|
||||
!,
|
||||
once(retractall(c_num(_))),
|
||||
once(assert(c_num(2))).
|
||||
retractall(c_num(_)),
|
||||
assertz(c_num(2)).
|
||||
|
||||
% general case: transform trie to nested tree structure for compression
|
||||
bdd_pt(Trie, CT) :-
|
||||
@ -976,7 +981,7 @@ format_compression_script([A, B|C]) :-
|
||||
get_next_name(Name) :-
|
||||
retract(c_num(N)),
|
||||
NN is N + 1,
|
||||
assert(c_num(NN)),
|
||||
assertz(c_num(NN)),
|
||||
atomic_concat('L', N, Name).
|
||||
|
||||
% create BDD-var as fact id prefixed by x
|
||||
@ -1017,7 +1022,7 @@ statistics_ptree:-
|
||||
write('--------------------------------'),nl.
|
||||
|
||||
|
||||
:- dynamic nested_ptree_printed/1.
|
||||
:- dynamic(nested_ptree_printed/1).
|
||||
|
||||
print_nested_ptree(Trie):-
|
||||
retractall(nested_ptree_printed(_)),
|
||||
@ -1029,7 +1034,7 @@ print_nested_ptree(Trie, Level, Space):-
|
||||
spacy_print(begin(t(Trie)), Level, Space),
|
||||
fail.
|
||||
print_nested_ptree(Trie, Level, Space):-
|
||||
assert(nested_ptree_printed(Trie)),
|
||||
assertz(nested_ptree_printed(Trie)),
|
||||
trie_path(Trie, Path),
|
||||
NewLevel is Level + 1,
|
||||
spacy_print(Path, NewLevel, Space),
|
||||
@ -1051,9 +1056,9 @@ spacy_print(Msg, Level, Space):-
|
||||
|
||||
% Theo Naive method works with Nested Trie to BDD Script
|
||||
|
||||
:-dynamic(get_used_vars/2).
|
||||
:-dynamic(generated_trie/2).
|
||||
:-dynamic(next_intermediate_step/1).
|
||||
:- dynamic(get_used_vars/2).
|
||||
:- dynamic(generated_trie/2).
|
||||
:- dynamic(next_intermediate_step/1).
|
||||
|
||||
%
|
||||
% This needs to be modified
|
||||
@ -1116,7 +1121,7 @@ generate_BDD_from_trie(Trie, TrieInter, Stream):-
|
||||
get_next_intermediate_step(TrieInter),
|
||||
write_bdd_line(OrLineTerms, TrieInter, '+', Stream)
|
||||
),
|
||||
assert(generated_trie(Trie, TrieInter)).
|
||||
assertz(generated_trie(Trie, TrieInter)).
|
||||
|
||||
write_bdd_line([], _LineInter, _Operator, _Stream):-!.
|
||||
write_bdd_line(LineTerms, LineInter, Operator, Stream):-
|
||||
@ -1171,13 +1176,13 @@ bddvars_to_script([H|T], Stream):-
|
||||
bddvars_to_script(T, Stream).
|
||||
|
||||
get_next_intermediate_step('L1'):-
|
||||
not(clause(next_intermediate_step(_), _)), !,
|
||||
assert(next_intermediate_step(2)).
|
||||
\+ clause(next_intermediate_step(_), _), !,
|
||||
assertz(next_intermediate_step(2)).
|
||||
get_next_intermediate_step(Inter):-
|
||||
next_intermediate_step(InterStep),
|
||||
retract(next_intermediate_step(InterStep)),
|
||||
NextInterStep is InterStep + 1,
|
||||
assert(next_intermediate_step(NextInterStep)),
|
||||
assertz(next_intermediate_step(NextInterStep)),
|
||||
atomic_concat(['L', InterStep], Inter).
|
||||
|
||||
make_bdd_var('true', 'TRUE'):-!.
|
||||
@ -1200,9 +1205,9 @@ add_to_vars(V):-
|
||||
clause(get_used_vars(Vars, Cnt), true), !,
|
||||
retract(get_used_vars(Vars, Cnt)),
|
||||
NewCnt is Cnt + 1,
|
||||
assert(get_used_vars([V|Vars], NewCnt)).
|
||||
assertz(get_used_vars([V|Vars], NewCnt)).
|
||||
add_to_vars(V):-
|
||||
assert(get_used_vars([V], 1)).
|
||||
assertz(get_used_vars([V], 1)).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%% depth breadth builtin support %%%%%%%%%%%%%%%%%
|
||||
@ -1231,14 +1236,14 @@ variable_in_dbtrie(Trie, V):-
|
||||
|
||||
get_next_variable(V, depth(L, _S)):-
|
||||
member(V, L),
|
||||
not(islabel(V)).
|
||||
\+ is_label(V).
|
||||
get_next_variable(V, breadth(L, _S)):-
|
||||
member(V, L),
|
||||
not(islabel(V)).
|
||||
\+ is_label(V).
|
||||
get_next_variable(V, L):-
|
||||
member(V, L),
|
||||
not(islabel(V)),
|
||||
not(isnestedtrie(V)).
|
||||
\+ is_label(V),
|
||||
\+ isnestedtrie(V).
|
||||
|
||||
get_variable(not(V), R):-
|
||||
!, get_variable(V, R).
|
||||
@ -1253,7 +1258,7 @@ get_variable(R, R).
|
||||
|
||||
trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
trie_to_depth_breadth_trie(A, B, LL, OptimizationLevel),
|
||||
(islabel(LL) ->
|
||||
(is_label(LL) ->
|
||||
atomic_concat('L', InterStep, LL),
|
||||
retractall(deref(_,_)),
|
||||
(problog_flag(deref_terms, true) ->
|
||||
@ -1273,7 +1278,7 @@ trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
write(VarCNT), nl,
|
||||
write(0), nl,
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, LL),
|
||||
write(LL), nl,
|
||||
told
|
||||
;
|
||||
@ -1306,7 +1311,7 @@ is_state(false).
|
||||
|
||||
nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
trie_nested_to_depth_breadth_trie(A, B, LL, OptimizationLevel, problog:problog_chktabled),
|
||||
(islabel(LL) ->
|
||||
(is_label(LL) ->
|
||||
retractall(deref(_,_)),
|
||||
(problog_flag(deref_terms, true) ->
|
||||
asserta(deref(LL,no)),
|
||||
@ -1331,13 +1336,13 @@ nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
InterStep is NegStepN + 1,
|
||||
atomic_concat('L', InterStep, FL),
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, FL),
|
||||
write(FL), write(' = ~'), write(NegL), nl,
|
||||
write(FL), nl
|
||||
;
|
||||
atomic_concat('L', InterStep, LL),
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, LL),
|
||||
write(LL), nl
|
||||
),
|
||||
told
|
||||
@ -1375,7 +1380,7 @@ nested_trie_to_bdd_trie(A, B, OutputFile, OptimizationLevel, FileParam):-
|
||||
write(VarCNT), nl,
|
||||
write(0), nl,
|
||||
write(InterStep), nl,
|
||||
trie_write(B),
|
||||
trie_write(B, LL),
|
||||
write(LL), nl,
|
||||
told
|
||||
;
|
||||
@ -1407,7 +1412,7 @@ preprocess(_, _, _, FinalEndCount, FinalEndCount).
|
||||
|
||||
make_nested_trie_base_cases(Trie, t(ID), DepthBreadthTrie, OptimizationLevel, StartCount, FinalEndCount, Ancestors):-
|
||||
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
|
||||
(not(Label = t(_)) ->
|
||||
(Label \= t(_) ->
|
||||
FinalEndCount = EndCount,
|
||||
problog:problog_chktabled(ID, RTrie),!,
|
||||
get_set_trie_from_id(t(ID), Label, RTrie, Ancestors, _, Ancestors)
|
||||
@ -1438,7 +1443,7 @@ trie_nested_to_db_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel, St
|
||||
|
||||
nested_trie_to_db_trie(Trie, DepthBreadthTrie, FinalLabel, OptimizationLevel, StartCount, FinalEndCount, Module:GetTriePredicate, Ancestors, ContainsLoop, Childs, ChildsAcc):-
|
||||
trie_to_depth_breadth_trie(Trie, DepthBreadthTrie, Label, OptimizationLevel, StartCount, EndCount),
|
||||
(not(Label = t(_)) ->
|
||||
(Label \= t(_) ->
|
||||
(var(ContainsLoop) ->
|
||||
ContainsLoop = false
|
||||
;
|
||||
@ -1675,19 +1680,24 @@ replace_in_functor(F, NF, T, R):-
|
||||
|
||||
|
||||
|
||||
trie_write(T):-
|
||||
trie_write(T, MAXL):-
|
||||
atomic_concat('L', MAXLA, MAXL),
|
||||
atom_number(MAXLA, MAXLN),
|
||||
trie_traverse(T, R),
|
||||
trie_get_entry(R, L),
|
||||
%write(user_output, L),nl(user_output),
|
||||
(dnfbddformat(L) ->
|
||||
(dnfbddformat(L, MAXLN) ->
|
||||
true
|
||||
;
|
||||
write(error(L)), nl
|
||||
write(user_error, warning(L, not_processed)), nl(user_error)
|
||||
),
|
||||
fail.
|
||||
trie_write(_).
|
||||
trie_write(_, _).
|
||||
|
||||
dnfbddformat(depth(T, L)):-
|
||||
dnfbddformat(depth(T, L), MAXL):-
|
||||
atomic_concat('L', LA, L),
|
||||
atom_number(LA, LN),
|
||||
MAXL >= LN,
|
||||
seperate(T, Li, V),
|
||||
%sort(Li, SL),
|
||||
%reverse(SL, RSL),
|
||||
@ -1697,7 +1707,10 @@ dnfbddformat(depth(T, L)):-
|
||||
atomic_concat('L', D, I),
|
||||
write('D'), write(D), nl
|
||||
)).
|
||||
dnfbddformat(breadth(T, L)):-
|
||||
dnfbddformat(breadth(T, L), MAXL):-
|
||||
atomic_concat('L', LA, L),
|
||||
atom_number(LA, LN),
|
||||
MAXL >= LN,
|
||||
seperate(T, Li, V),
|
||||
%sort(Li, SL),
|
||||
%reverse(SL, RSL),
|
||||
@ -1713,14 +1726,14 @@ bddlineformat([not(H)|T], O):-
|
||||
write('~'), !,
|
||||
bddlineformat([H|T], O).
|
||||
bddlineformat([H], _O):-
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
Var = H
|
||||
;
|
||||
get_var_name(H, Var)
|
||||
),
|
||||
write(Var), nl, !.
|
||||
bddlineformat([H|T], O):-
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
Var = H
|
||||
;
|
||||
get_var_name(H, Var)
|
||||
@ -1733,7 +1746,7 @@ bddlineformat([not(H)], O):-
|
||||
!, write('~'),
|
||||
bddlineformat([H], O).
|
||||
bddlineformat([H], _O):-!,
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
VarName = H
|
||||
;
|
||||
get_var_name(H, VarName)
|
||||
@ -1743,7 +1756,7 @@ bddlineformat([not(H)|T], O):-
|
||||
!, write('~'),
|
||||
bddlineformat([H|T], O).
|
||||
bddlineformat([H|T], O):-
|
||||
(islabel(H) ->
|
||||
(is_label(H) ->
|
||||
VarName = H
|
||||
;
|
||||
get_var_name(H, VarName)
|
||||
@ -1752,16 +1765,16 @@ bddlineformat([H|T], O):-
|
||||
bddlineformat(T, O).*/
|
||||
|
||||
bddlineformat(T, L, O):-
|
||||
(islabel(L) ->
|
||||
(is_label(L) ->
|
||||
write(L), write(' = '),
|
||||
bddlineformat(T, O)
|
||||
;
|
||||
write(user_output,bdd_script_error([L,T,O])),nl(user_output)
|
||||
).
|
||||
|
||||
islabel(not(L)):-
|
||||
!, islabel(L).
|
||||
islabel(Label):-
|
||||
is_label(not(L)):-
|
||||
!, is_label(L).
|
||||
is_label(Label):-
|
||||
atom(Label),
|
||||
atomic_concat('L', _, Label).
|
||||
|
||||
@ -1771,7 +1784,7 @@ isnestedtrie(t(_T)).
|
||||
|
||||
seperate([], [], []).
|
||||
seperate([H|T], [H|Labels], Vars):-
|
||||
islabel(H), !,
|
||||
is_label(H), !,
|
||||
seperate(T, Labels, Vars).
|
||||
seperate([H|T], Labels, [H|Vars]):-
|
||||
seperate(T, Labels, Vars).
|
||||
@ -1788,7 +1801,7 @@ ptree_decomposition(Trie, BDDFileName, VarFileName) :-
|
||||
told,
|
||||
tell(TmpFile1),
|
||||
decompose_trie(Trie, T, L),
|
||||
(islabel(L)->
|
||||
(is_label(L)->
|
||||
atomic_concat('L', LCnt, L),
|
||||
write(L),nl
|
||||
;
|
||||
@ -1924,7 +1937,7 @@ dwriteln(A):-
|
||||
|
||||
non_false([], []):-!.
|
||||
non_false([H|T], [H|NT]):-
|
||||
not(H == false),
|
||||
H \== false,
|
||||
non_false(T, NT).
|
||||
non_false([H|T], NT):-
|
||||
H == false,
|
||||
@ -1936,11 +1949,11 @@ one_true(_, _, 'TRUE'):-!.
|
||||
|
||||
all_false(false,false,false).
|
||||
one_non_false(L, false, false, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
one_non_false(false, L, false, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
one_non_false(false, false, L, L):-
|
||||
not(L == false), !.
|
||||
L \== false, !.
|
||||
|
||||
trie_seperate(Trie, Var, TrieWith, TrieWithNeg, TrieWithOut):-
|
||||
trie_traverse(Trie, R),
|
||||
@ -1978,7 +1991,7 @@ ptree_db_trie_opt_performed(LVL1, LVL2, LVL3):-
|
||||
trie_get_depth_breadth_reduction_opt_level_count(2, LVL2),
|
||||
trie_get_depth_breadth_reduction_opt_level_count(3, LVL3).
|
||||
|
||||
:- dynamic deref/2.
|
||||
:- dynamic(deref/2).
|
||||
|
||||
mark_for_deref(DB_Trie):-
|
||||
traverse_ptree_mode(OLD),
|
||||
@ -1990,7 +2003,7 @@ mark_deref(DB_Trie):-
|
||||
traverse_ptree(DB_Trie, DB_Term),
|
||||
(DB_Term = depth(List, Inter); DB_Term = breadth(List, Inter)),
|
||||
member(L, List),
|
||||
((islabel(L), not(deref(L, _))) ->
|
||||
((is_label(L), \+ deref(L, _)) ->
|
||||
asserta(deref(L, Inter))
|
||||
;
|
||||
true
|
||||
|
302
packages/ProbLog/problog/utils_learning.yap
Normal file
302
packages/ProbLog/problog/utils_learning.yap
Normal file
@ -0,0 +1,302 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-09-24 15:54:45 +0200 (Fri, 24 Sep 2010) $
|
||||
% $Revision: 4822 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
%
|
||||
% ProbLog was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2008, 2009, 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Main authors of this file:
|
||||
% Bernd Gutmann
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%
|
||||
% Collected OS depended instructions
|
||||
%%%%%%%%
|
||||
:- module(utils_learning, [empty_bdd_directory/1,
|
||||
empty_output_directory/1,
|
||||
delete_file_silent/1
|
||||
]).
|
||||
|
||||
% switch on all tests to reduce bug searching time
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
% load library modules
|
||||
:- use_module(library(lists), [append/3, member/2]).
|
||||
:- use_module(library(system), [delete_file/1, directory_files/2, file_exists/1]).
|
||||
|
||||
% load our own modules
|
||||
:- use_module(os).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_bdd_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
atom_codes('query_', PF1), % 'query_*'
|
||||
|
||||
directory_files(Path,List),
|
||||
delete_files_with_matching_prefix(List,Path,[PF1]).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
empty_output_directory(Path) :-
|
||||
ground(Path),
|
||||
|
||||
concat_path_with_filename(Path,'log.dat',F1),
|
||||
concat_path_with_filename(Path,'out.dat',F2),
|
||||
|
||||
(
|
||||
file_exists(F1)
|
||||
->
|
||||
delete_file_silent(F1);
|
||||
true
|
||||
),
|
||||
|
||||
(
|
||||
file_exists(F2)
|
||||
->
|
||||
delete_file_silent(F2);
|
||||
true
|
||||
),
|
||||
|
||||
atom_codes('values_', PF1), % 'values_*_q_*.dat'
|
||||
atom_codes('factprobs_', PF2), % 'factprobs_*.pl'
|
||||
atom_codes('input_', PF3), % 'input_*.pl'
|
||||
atom_codes('trainpredictions_',PF4), % 'trainpredictions_*.pl'
|
||||
atom_codes('testpredictions_',PF5), % 'testpredictions_*.pl'
|
||||
atom_codes('predictions_',PF6), % 'predictions_*.pl'
|
||||
directory_files(Path,List),
|
||||
delete_files_with_matching_prefix(List,Path,[PF1,PF2,PF3,PF4,PF5,PF6]).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
delete_file_silent(File) :-
|
||||
delete_file(File),
|
||||
!.
|
||||
delete_file_silent(_).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
delete_files_with_matching_prefix([],_,_).
|
||||
delete_files_with_matching_prefix([Name|T],Path,Prefixes) :-
|
||||
atom_codes(Name,NameCode),
|
||||
|
||||
(
|
||||
(member(Prefix,Prefixes), append(Prefix,_Suffix,NameCode))
|
||||
->
|
||||
(
|
||||
concat_path_with_filename(Path,Name,F),
|
||||
delete_file_silent(F)
|
||||
);
|
||||
true
|
||||
),
|
||||
|
||||
delete_files_with_matching_prefix(T,Path,Prefixes).
|
@ -204,18 +204,23 @@
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(variable_elimination, [trie_check_for_and_cluster/1, trie_replace_and_cluster/2, clean_up/0, variable_elimination_stats/3]).
|
||||
:- module(variable_elimination, [
|
||||
trie_check_for_and_cluster/1,
|
||||
trie_replace_and_cluster/2,
|
||||
clean_up/0,
|
||||
variable_elimination_stats/3
|
||||
]).
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(tries)).
|
||||
:- use_module(library(lists), [append/3, delete/3, memberchk/2, reverse/2]).
|
||||
:- use_module(library(tries)).
|
||||
|
||||
:- use_module('flags', _, [problog_define_flag/5]).
|
||||
|
||||
|
||||
:- nb_setval(prob_fact_count, 0).
|
||||
|
||||
:- problog_define_flag(variable_elimination, problog_flag_validate_boolean, 'enable variable elimination', false, variable_elimination).
|
||||
|
||||
:- initialization((
|
||||
nb_setval(prob_fact_count, 0),
|
||||
problog_define_flag(variable_elimination, problog_flag_validate_boolean, 'enable variable elimination', false, variable_elimination)
|
||||
)).
|
||||
|
||||
bit_encode(L, ON):-
|
||||
bit_encode(L, ON, 0).
|
||||
@ -373,7 +378,7 @@ last_cluster_element(L, Cluster, R):-
|
||||
|
||||
nocluster([], _).
|
||||
nocluster([H|T], L):-
|
||||
not(memberchk(H, L)),
|
||||
\+ memberchk(H, L),
|
||||
nocluster(T, L).
|
||||
|
||||
eliminate_list([], L, L).
|
||||
@ -386,7 +391,7 @@ replace([], _, _, []).
|
||||
replace([H|T], H, NH, [NH|NT]):-
|
||||
replace(T, H, NH, NT).
|
||||
replace([H|T], R, NR, [H|NT]):-
|
||||
\+ H == R,
|
||||
H \== R,
|
||||
replace(T, R, NR, NT).
|
||||
|
||||
clean_up:-
|
||||
@ -418,8 +423,8 @@ make_prob_fact(L, P, ID):-
|
||||
(clause(problog:problog_predicate(var_elimination, 1), true) ->
|
||||
true
|
||||
;
|
||||
assert(problog:problog_predicate(var_elimination, 1))
|
||||
assertz(problog:problog_predicate(var_elimination, 1))
|
||||
),
|
||||
assert(problog:problog_var_elimination(ID, L, P))
|
||||
assertz(problog:problog_var_elimination(ID, L, P))
|
||||
).
|
||||
|
||||
|
@ -376,7 +376,8 @@ problog_var_timer_timeout(Variable):-
|
||||
%%% This is possible for future use %%%
|
||||
|
||||
:- use_module(library(timeout)).
|
||||
:- meta_predicate problog_var_time_out(:,_,_,_), problog_time_out(:,_,_,_).
|
||||
:- meta_predicate(problog_var_time_out(0, *, *, *)).
|
||||
:- meta_predicate(problog_time_out(0, *, *, *)).
|
||||
%
|
||||
% Problems with nesting, use with care
|
||||
% always succeeds returns Success = true/fail, Time = Msec taken/timeout
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% $Date: 2010-08-25 15:45:22 +0200 (Wed, 25 Aug 2010) $
|
||||
% $Revision: 4692 $
|
||||
% $Date: 2010-09-24 15:54:45 +0200 (Fri, 24 Sep 2010) $
|
||||
% $Revision: 4822 $
|
||||
%
|
||||
% This file is part of ProbLog
|
||||
% http://dtai.cs.kuleuven.be/problog
|
||||
@ -205,9 +205,10 @@
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
:- module(learning,[do_learning/1,
|
||||
do_learning/2,
|
||||
set_learning_flag/2,
|
||||
:- module(learning,[
|
||||
do_learning/1,
|
||||
do_learning/2,
|
||||
set_learning_flag/2,
|
||||
learning_flag/2,
|
||||
learning_flags/0,
|
||||
problog_help/0,
|
||||
@ -215,54 +216,47 @@
|
||||
problog_flag/2,
|
||||
problog_flags/0,
|
||||
auto_alpha/0
|
||||
]).
|
||||
]).
|
||||
|
||||
% switch on all the checks to reduce bug searching time
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown,error).
|
||||
|
||||
% load modules from the YAP library
|
||||
:- ensure_loaded(library(lists)).
|
||||
:- ensure_loaded(library(random)).
|
||||
:- ensure_loaded(library(system)).
|
||||
:- use_module(library(lists), [max_list/2, min_list/2, sum_list/2]).
|
||||
:- use_module(library(random)). % PM doesn't seem to be used!
|
||||
:- use_module(library(system), [delete_file/1, file_exists/1, shell/2]).
|
||||
|
||||
% load our own modules
|
||||
:- ensure_loaded(problog).
|
||||
:- ensure_loaded('problog/logger').
|
||||
:- ensure_loaded('problog/flags').
|
||||
:- ensure_loaded('problog/os').
|
||||
|
||||
:- use_module(problog).
|
||||
:- use_module('problog/logger').
|
||||
:- use_module('problog/flags').
|
||||
:- use_module('problog/os').
|
||||
:- use_module('problog/print_learning').
|
||||
:- use_module('problog/utils_learning').
|
||||
|
||||
% used to indicate the state of the system
|
||||
:- dynamic values_correct/0.
|
||||
:- dynamic learning_initialized/0.
|
||||
:- dynamic current_iteration/1.
|
||||
:- dynamic example_count/1.
|
||||
:- dynamic query_probability_intern/2.
|
||||
:- dynamic query_gradient_intern/4.
|
||||
:- dynamic last_mse/1.
|
||||
:- dynamic(values_correct/0).
|
||||
:- dynamic(learning_initialized/0).
|
||||
:- dynamic(current_iteration/1).
|
||||
:- dynamic(example_count/1).
|
||||
:- dynamic(query_probability_intern/2).
|
||||
:- dynamic(query_gradient_intern/4).
|
||||
:- dynamic(last_mse/1).
|
||||
|
||||
% used to identify queries which have identical proofs
|
||||
:- dynamic query_is_similar/2.
|
||||
:- dynamic query_md5/3.
|
||||
:- dynamic(query_is_similar/2).
|
||||
:- dynamic(query_md5/3).
|
||||
|
||||
:- assert_static(user:(example(A,B,C,=) :- current_predicate(example/3), example(A,B,C))).
|
||||
:- assert_static(user:(test_example(A,B,C,=) :- current_predicate(test_example/3), test_example(A,B,C))).
|
||||
:- multifile(user:example/4).
|
||||
user:example(A,B,C,=) :-
|
||||
current_predicate(user:example/3),
|
||||
user:example(A,B,C).
|
||||
|
||||
%========================================================================
|
||||
%=
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
my_format(Level,String,Arguments) :-
|
||||
problog_flag(learning_verbosity_level,V_Level),
|
||||
(
|
||||
V_Level >= Level
|
||||
->
|
||||
(format(String,Arguments),flush_output(user));
|
||||
true
|
||||
).
|
||||
:- multifile(user:test_example/4).
|
||||
user:test_example(A,B,C,=) :-
|
||||
current_predicate(user:test_example/3),
|
||||
user:test_example(A,B,C).
|
||||
|
||||
|
||||
%========================================================================
|
||||
@ -459,10 +453,10 @@ do_learning_intern(Iterations,Epsilon) :-
|
||||
retractall(current_iteration(_)),
|
||||
!,
|
||||
NextIteration is CurrentIteration+1,
|
||||
assert(current_iteration(NextIteration)),
|
||||
assertz(current_iteration(NextIteration)),
|
||||
EndIteration is CurrentIteration+Iterations-1,
|
||||
|
||||
my_format(1,'~nIteration ~d of ~d~n',[CurrentIteration,EndIteration]),
|
||||
format_learning(1,'~nIteration ~d of ~d~n',[CurrentIteration,EndIteration]),
|
||||
logger_set_variable(iteration,CurrentIteration),
|
||||
|
||||
logger_start_timer(duration),
|
||||
@ -491,12 +485,12 @@ do_learning_intern(Iterations,Epsilon) :-
|
||||
(
|
||||
retractall(last_mse(_)),
|
||||
logger_get_variable(mse_trainingset,Current_MSE),
|
||||
assert(last_mse(Current_MSE)),
|
||||
assertz(last_mse(Current_MSE)),
|
||||
!,
|
||||
MSE_Diff is abs(Last_MSE-Current_MSE)
|
||||
); (
|
||||
logger_get_variable(mse_trainingset,Current_MSE),
|
||||
assert(last_mse(Current_MSE)),
|
||||
assertz(last_mse(Current_MSE)),
|
||||
MSE_Diff is Epsilon+1
|
||||
)
|
||||
),
|
||||
@ -543,7 +537,7 @@ init_learning :-
|
||||
|
||||
logger_write_header,
|
||||
|
||||
my_format(1,'Initializing everything~n',[]),
|
||||
format_learning(1,'Initializing everything~n',[]),
|
||||
empty_output_directory,
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -594,7 +588,7 @@ init_learning :-
|
||||
true
|
||||
),
|
||||
bb_delete(test_examples,TestExampleCount),
|
||||
my_format(3,'~q test examples~n',[TestExampleCount]),
|
||||
format_learning(3,'~q test examples~n',[TestExampleCount]),
|
||||
!,
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% stop count test examples
|
||||
@ -615,8 +609,8 @@ init_learning :-
|
||||
true
|
||||
),
|
||||
bb_delete(training_examples,TrainingExampleCount),
|
||||
assert(example_count(TrainingExampleCount)),
|
||||
my_format(3,'~q training examples~n',[TrainingExampleCount]),
|
||||
assertz(example_count(TrainingExampleCount)),
|
||||
format_learning(3,'~q training examples~n',[TrainingExampleCount]),
|
||||
!,
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% stop count training examples
|
||||
@ -643,16 +637,15 @@ init_learning :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% build BDD script for every example
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
once(initialize_fact_probabilities),
|
||||
once(init_queries),
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% done
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
assert(current_iteration(0)),
|
||||
assert(learning_initialized),
|
||||
assertz(current_iteration(0)),
|
||||
assertz(learning_initialized),
|
||||
|
||||
my_format(1,'~n',[]).
|
||||
format_learning(1,'~n',[]).
|
||||
|
||||
|
||||
%========================================================================
|
||||
@ -681,11 +674,11 @@ empty_output_directory :-
|
||||
|
||||
|
||||
init_queries :-
|
||||
my_format(2,'Build BDDs for examples~n',[]),
|
||||
format_learning(2,'Build BDDs for examples~n',[]),
|
||||
( % go over all test examples
|
||||
current_predicate(user:test_example/4),
|
||||
user:test_example(ID,Query,Prob,_),
|
||||
my_format(3,' test example ~q: ~q~n',[ID,Query]),
|
||||
format_learning(3,' test example ~q: ~q~n',[ID,Query]),
|
||||
flush_output(user),
|
||||
init_one_query(ID,Query,test),
|
||||
|
||||
@ -695,7 +688,7 @@ init_queries :-
|
||||
( % go over all training examples
|
||||
current_predicate(user:example/4),
|
||||
user:example(ID,Query,Prob,_),
|
||||
my_format(3,' training example ~q: ~q~n',[ID,Query]),
|
||||
format_learning(3,' training example ~q: ~q~n',[ID,Query]),
|
||||
flush_output(user),
|
||||
init_one_query(ID,Query,training),
|
||||
|
||||
@ -721,10 +714,10 @@ init_one_query(QueryID,Query,Type) :-
|
||||
(
|
||||
file_exists(Filename)
|
||||
->
|
||||
my_format(3,' Reuse existing BDD ~q~n~n',[Filename]);
|
||||
format_learning(3,' Reuse existing BDD ~q~n~n',[Filename]);
|
||||
(
|
||||
problog_flag(init_method,(Query,_Prob,Filename,Probabilities_File,Call)),
|
||||
once(call(Call)),
|
||||
once(Call),
|
||||
delete_file(Probabilities_File)
|
||||
)
|
||||
),
|
||||
@ -741,39 +734,18 @@ init_one_query(QueryID,Query,Type) :-
|
||||
query_md5(OtherQueryID,Query_MD5,Type)
|
||||
->
|
||||
(
|
||||
assert(query_is_similar(QueryID,OtherQueryID)),
|
||||
my_format(3, '~q is similar to ~q~2n', [QueryID,OtherQueryID])
|
||||
assertz(query_is_similar(QueryID,OtherQueryID)),
|
||||
format_learning(3, '~q is similar to ~q~2n', [QueryID,OtherQueryID])
|
||||
);
|
||||
assert(query_md5(QueryID,Query_MD5,Type))
|
||||
assertz(query_md5(QueryID,Query_MD5,Type))
|
||||
)
|
||||
);
|
||||
|
||||
true
|
||||
).
|
||||
),!,
|
||||
garbage_collect.
|
||||
|
||||
|
||||
%========================================================================
|
||||
%= set all unknown fact probabilities to random values
|
||||
%=
|
||||
%=
|
||||
%========================================================================
|
||||
|
||||
initialize_fact_probabilities :-
|
||||
( % go over all tunable facts
|
||||
tunable_fact(FactID,_),
|
||||
problog_flag(probability_initializer,(FactID,Probability,Query)),
|
||||
once(call(Query)),
|
||||
set_fact_probability(FactID,Probability),
|
||||
|
||||
fail; % go to next tunable fact
|
||||
true
|
||||
).
|
||||
|
||||
random_probability(_FactID,Probability) :-
|
||||
% use probs around 0.5 to not confuse k-best search
|
||||
random(Random),
|
||||
Probability is 0.5+(Random-0.5)/100.
|
||||
|
||||
|
||||
|
||||
%========================================================================
|
||||
@ -791,8 +763,8 @@ update_values :-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% delete old values
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
once(retractall(query_probability_intern(_,_))),
|
||||
once(retractall(query_gradient_intern(_,_,_,_))),
|
||||
retractall(query_probability_intern(_,_)),
|
||||
retractall(query_gradient_intern(_,_,_,_)),
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% start write current probabilities to file
|
||||
@ -837,7 +809,7 @@ update_values :-
|
||||
% stop write current probabilities to file
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
assert(values_correct).
|
||||
assertz(values_correct).
|
||||
|
||||
|
||||
|
||||
@ -854,7 +826,7 @@ update_query_cleanup(QueryID) :-
|
||||
% either this query is similar to another or vice versa,
|
||||
% therefore we don't delete anything
|
||||
true;
|
||||
once(retractall(query_gradient_intern(QueryID,_,_,_)))
|
||||
retractall(query_gradient_intern(QueryID,_,_,_))
|
||||
).
|
||||
|
||||
|
||||
@ -867,13 +839,13 @@ update_query(QueryID,Symbol,What_To_Update) :-
|
||||
query_is_similar(QueryID,_)
|
||||
->
|
||||
% we don't have to evaluate the BDD
|
||||
my_format(4,'#',[]);
|
||||
format_learning(4,'#',[]);
|
||||
(
|
||||
problog_flag(sigmoid_slope,Slope),
|
||||
problog_dir(PD),
|
||||
((What_To_Update=all;query_is_similar(_,QueryID)) -> Method='g' ; Method='l'),
|
||||
atomic_concat([PD,
|
||||
'/problogbdd',
|
||||
'/ProblogBDD',
|
||||
' -i "', Probabilities_File, '"',
|
||||
' -l "', Query_Directory,'/query_',QueryID, '"',
|
||||
' -m ', Method,
|
||||
@ -921,7 +893,7 @@ update_query(QueryID,Symbol,What_To_Update) :-
|
||||
),
|
||||
|
||||
delete_file(Values_Filename),
|
||||
my_format(4,'~w',[Symbol])
|
||||
format_learning(4,'~w',[Symbol])
|
||||
)
|
||||
),
|
||||
flush_output(user).
|
||||
@ -946,14 +918,14 @@ my_load_intern(end_of_file,_,_) :-
|
||||
!.
|
||||
my_load_intern(query_probability(QueryID,Prob),Handle,QueryID) :-
|
||||
!,
|
||||
assert(query_probability_intern(QueryID,Prob)),
|
||||
assertz(query_probability_intern(QueryID,Prob)),
|
||||
read(Handle,X),
|
||||
my_load_intern(X,Handle,QueryID).
|
||||
my_load_intern(query_gradient(QueryID,XFactID,Type,Value),Handle,QueryID) :-
|
||||
!,
|
||||
atomic_concat(x,StringFactID,XFactID),
|
||||
atom_number(StringFactID,FactID),
|
||||
assert(query_gradient_intern(QueryID,FactID,Type,Value)),
|
||||
assertz(query_gradient_intern(QueryID,FactID,Type,Value)),
|
||||
read(Handle,X),
|
||||
my_load_intern(X,Handle,QueryID).
|
||||
my_load_intern(X,Handle,QueryID) :-
|
||||
@ -1056,7 +1028,7 @@ mse_trainingset_only_for_linesearch(MSE) :-
|
||||
length(AllSquaredErrors,Length),
|
||||
sum_list(AllSquaredErrors,SumAllSquaredErrors),
|
||||
MSE is SumAllSquaredErrors/Length,
|
||||
my_format(3,' (~8f)~n',[MSE])
|
||||
format_learning(3,' (~8f)~n',[MSE])
|
||||
); true
|
||||
),
|
||||
retractall(values_correct).
|
||||
@ -1066,7 +1038,7 @@ mse_testset :-
|
||||
(current_predicate(user:test_example/4),user:test_example(_,_,_,_))
|
||||
->
|
||||
(
|
||||
my_format(2,'MSE_Test ',[]),
|
||||
format_learning(2,'MSE_Test ',[]),
|
||||
update_values,
|
||||
findall(SquaredError,
|
||||
(user:test_example(QueryID,_Query,QueryProb,Type),
|
||||
@ -1091,7 +1063,7 @@ mse_testset :-
|
||||
logger_set_variable(mse_testset,MSE),
|
||||
logger_set_variable(mse_min_testset,MinError),
|
||||
logger_set_variable(mse_max_testset,MaxError),
|
||||
my_format(2,' (~8f)~n',[MSE])
|
||||
format_learning(2,' (~8f)~n',[MSE])
|
||||
); true
|
||||
).
|
||||
|
||||
@ -1227,7 +1199,7 @@ add_gradient(Learning_Rate) :-
|
||||
|
||||
|
||||
gradient_descent :-
|
||||
my_format(2,'Gradient ',[]),
|
||||
format_learning(2,'Gradient ',[]),
|
||||
|
||||
save_old_probabilities,
|
||||
update_values,
|
||||
@ -1411,7 +1383,7 @@ gradient_descent :-
|
||||
logger_set_variable(mse_min_trainingset,MSE_Train_Min),
|
||||
logger_set_variable(mse_max_trainingset,MSE_Train_Max),
|
||||
|
||||
my_format(2,'~n',[]),
|
||||
format_learning(2,'~n',[]),
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% start add gradient to current probabilities
|
||||
@ -1422,7 +1394,7 @@ gradient_descent :-
|
||||
problog_flag(learning_rate,LearningRate);
|
||||
lineSearch(LearningRate,_)
|
||||
),
|
||||
my_format(3,'learning rate:~8f~n',[LearningRate]),
|
||||
format_learning(3,'learning rate:~8f~n',[LearningRate]),
|
||||
add_gradient(LearningRate),
|
||||
logger_set_variable(learning_rate,LearningRate),
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -1438,7 +1410,7 @@ gradient_descent :-
|
||||
|
||||
line_search_evaluate_point(Learning_Rate,MSE) :-
|
||||
add_gradient(Learning_Rate),
|
||||
my_format(2,'Line search (h=~8f) ',[Learning_Rate]),
|
||||
format_learning(2,'Line search (h=~8f) ',[Learning_Rate]),
|
||||
mse_trainingset_only_for_linesearch(MSE).
|
||||
|
||||
|
||||
@ -1449,7 +1421,7 @@ lineSearch(Final_X,Final_Value) :-
|
||||
problog_flag(line_search_tau,Tau),
|
||||
problog_flag(line_search_interval,(A,B)),
|
||||
|
||||
my_format(3,'Line search in interval (~4f,~4f)~n',[A,B]),
|
||||
format_learning(3,'Line search in interval (~4f,~4f)~n',[A,B]),
|
||||
|
||||
% init values
|
||||
Acc is Tol * (B-A),
|
||||
@ -1677,11 +1649,9 @@ init_flags :-
|
||||
problog_define_flag(output_directory, problog_flag_validate_directory, 'directory for logfiles etc', Output_Folder,learning_general,flags:learning_output_dir_handler),
|
||||
problog_define_flag(log_frequency, problog_flag_validate_posint, 'log results every nth iteration', 1, learning_general),
|
||||
problog_define_flag(rebuild_bdds, problog_flag_validate_nonegint, 'rebuild BDDs every nth iteration', 0, learning_general),
|
||||
problog_define_flag(learning_verbosity_level, problog_flag_validate_0to5,'How much output shall be given (0=nothing,5=all)',5, learning_general),
|
||||
problog_define_flag(reuse_initialized_bdds,problog_flag_validate_boolean, 'Reuse BDDs from previous runs',false, learning_general),
|
||||
problog_define_flag(check_duplicate_bdds,problog_flag_validate_boolean,'Store intermediate results in hash table',true,learning_general),
|
||||
problog_define_flag(init_method,problog_flag_validate_dummy,'ProbLog predicate to search proofs',(Query,Probability,BDDFile,ProbFile,problog_kbest_save(Query,100,Probability,_Status,BDDFile,ProbFile)),learning_general,flags:learning_init_handler),
|
||||
problog_define_flag(probability_initializer,problog_flag_validate_dummy,'Predicate to initialize probabilities',(FactID,P,random_probability(FactID,P)),learning_general,flags:learning_prob_init_handler),
|
||||
problog_define_flag(alpha,problog_flag_validate_number,'weight of negative examples (auto=n_p/n_n)',auto,learning_general,flags:auto_handler),
|
||||
problog_define_flag(sigmoid_slope,problog_flag_validate_posnumber,'slope of sigmoid function',1.0,learning_general),
|
||||
|
||||
|
@ -1,131 +0,0 @@
|
||||
|
||||
|
||||
|
||||
|
||||
The "Artistic License"
|
||||
|
||||
Preamble
|
||||
|
||||
The intent of this document is to state the conditions under which a
|
||||
Package may be copied, such that the Copyright Holder maintains some
|
||||
semblance of artistic control over the development of the package,
|
||||
while giving the users of the package the right to use and distribute
|
||||
the Package in a more-or-less customary fashion, plus the right to make
|
||||
reasonable modifications.
|
||||
|
||||
Definitions:
|
||||
|
||||
"Package" refers to the collection of files distributed by the
|
||||
Copyright Holder, and derivatives of that collection of files
|
||||
created through textual modification.
|
||||
|
||||
"Standard Version" refers to such a Package if it has not been
|
||||
modified, or has been modified in accordance with the wishes
|
||||
of the Copyright Holder as specified below.
|
||||
|
||||
"Copyright Holder" is whoever is named in the copyright or
|
||||
copyrights for the package.
|
||||
|
||||
"You" is you, if you're thinking about copying or distributing
|
||||
this Package.
|
||||
|
||||
"Reasonable copying fee" is whatever you can justify on the
|
||||
basis of media cost, duplication charges, time of people involved,
|
||||
and so on. (You will not be required to justify it to the
|
||||
Copyright Holder, but only to the computing community at large
|
||||
as a market that must bear the fee.)
|
||||
|
||||
"Freely Available" means that no fee is charged for the item
|
||||
itself, though there may be fees involved in handling the item.
|
||||
It also means that recipients of the item may redistribute it
|
||||
under the same conditions they received it.
|
||||
|
||||
1. You may make and give away verbatim copies of the source form of the
|
||||
Standard Version of this Package without restriction, provided that you
|
||||
duplicate all of the original copyright notices and associated disclaimers.
|
||||
|
||||
2. You may apply bug fixes, portability fixes and other modifications
|
||||
derived from the Public Domain or from the Copyright Holder. A Package
|
||||
modified in such a way shall still be considered the Standard Version.
|
||||
|
||||
3. You may otherwise modify your copy of this Package in any way, provided
|
||||
that you insert a prominent notice in each changed file stating how and
|
||||
when you changed that file, and provided that you do at least ONE of the
|
||||
following:
|
||||
|
||||
a) place your modifications in the Public Domain or otherwise make them
|
||||
Freely Available, such as by posting said modifications to Usenet or
|
||||
an equivalent medium, or placing the modifications on a major archive
|
||||
site such as uunet.uu.net, or by allowing the Copyright Holder to include
|
||||
your modifications in the Standard Version of the Package.
|
||||
|
||||
b) use the modified Package only within your corporation or organization.
|
||||
|
||||
c) rename any non-standard executables so the names do not conflict
|
||||
with standard executables, which must also be provided, and provide
|
||||
a separate manual page for each non-standard executable that clearly
|
||||
documents how it differs from the Standard Version.
|
||||
|
||||
d) make other distribution arrangements with the Copyright Holder.
|
||||
|
||||
4. You may distribute the programs of this Package in object code or
|
||||
executable form, provided that you do at least ONE of the following:
|
||||
|
||||
a) distribute a Standard Version of the executables and library files,
|
||||
together with instructions (in the manual page or equivalent) on where
|
||||
to get the Standard Version.
|
||||
|
||||
b) accompany the distribution with the machine-readable source of
|
||||
the Package with your modifications.
|
||||
|
||||
c) give non-standard executables non-standard names, and clearly
|
||||
document the differences in manual pages (or equivalent), together
|
||||
with instructions on where to get the Standard Version.
|
||||
|
||||
d) make other distribution arrangements with the Copyright Holder.
|
||||
|
||||
5. You may charge a reasonable copying fee for any distribution of this
|
||||
Package. You may charge any fee you choose for support of this
|
||||
Package. You may not charge a fee for this Package itself. However,
|
||||
you may distribute this Package in aggregate with other (possibly
|
||||
commercial) programs as part of a larger (possibly commercial) software
|
||||
distribution provided that you do not advertise this Package as a
|
||||
product of your own. You may embed this Package's interpreter within
|
||||
an executable of yours (by linking); this shall be construed as a mere
|
||||
form of aggregation, provided that the complete Standard Version of the
|
||||
interpreter is so embedded.
|
||||
|
||||
6. The scripts and library files supplied as input to or produced as
|
||||
output from the programs of this Package do not automatically fall
|
||||
under the copyright of this Package, but belong to whoever generated
|
||||
them, and may be sold commercially, and may be aggregated with this
|
||||
Package. If such scripts or library files are aggregated with this
|
||||
Package via the so-called "undump" or "unexec" methods of producing a
|
||||
binary executable image, then distribution of such an image shall
|
||||
neither be construed as a distribution of this Package nor shall it
|
||||
fall under the restrictions of Paragraphs 3 and 4, provided that you do
|
||||
not represent such an executable image as a Standard Version of this
|
||||
Package.
|
||||
|
||||
7. C subroutines (or comparably compiled subroutines in other
|
||||
languages) supplied by you and linked into this Package in order to
|
||||
emulate subroutines and variables of the language defined by this
|
||||
Package shall not be considered part of this Package, but are the
|
||||
equivalent of input as in Paragraph 6, provided these subroutines do
|
||||
not change the language in any way that would cause it to fail the
|
||||
regression tests for the language.
|
||||
|
||||
8. Aggregation of this Package with a commercial distribution is always
|
||||
permitted provided that the use of this Package is embedded; that is,
|
||||
when no overt attempt is made to make this Package's interfaces visible
|
||||
to the end user of the commercial distribution. Such use shall not be
|
||||
construed as a distribution of this Package.
|
||||
|
||||
9. The name of the Copyright Holder may not be used to endorse or promote
|
||||
products derived from this software without specific prior written permission.
|
||||
|
||||
10. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
The End
|
182
packages/ProbLog/simplecudd/LICENSE
Normal file
182
packages/ProbLog/simplecudd/LICENSE
Normal file
@ -0,0 +1,182 @@
|
||||
Artistic License 2.0
|
||||
|
||||
Copyright (c) 2000-2006, The Perl Foundation.
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed. Preamble
|
||||
|
||||
This license establishes the terms under which a given free software
|
||||
Package may be copied, modified, distributed, and/or
|
||||
redistributed. The intent is that the Copyright Holder maintains some
|
||||
artistic control over the development of that Package while still
|
||||
keeping the Package available as open source and free software.
|
||||
|
||||
You are always permitted to make arrangements wholly outside of this
|
||||
license directly with the Copyright Holder of a given Package. If the
|
||||
terms of this license do not permit the full use that you propose to
|
||||
make of the Package, you should contact the Copyright Holder and seek
|
||||
a different licensing arrangement. Definitions
|
||||
|
||||
"Copyright Holder" means the individual(s) or organization(s) named in
|
||||
the copyright notice for the entire Package.
|
||||
|
||||
"Contributor" means any party that has contributed code or other
|
||||
material to the Package, in accordance with the Copyright Holder's
|
||||
procedures.
|
||||
|
||||
"You" and "your" means any person who would like to copy, distribute,
|
||||
or modify the Package.
|
||||
|
||||
"Package" means the collection of files distributed by the Copyright
|
||||
Holder, and derivatives of that collection and/or of those files. A
|
||||
given Package may consist of either the Standard Version, or a
|
||||
Modified Version.
|
||||
|
||||
"Distribute" means providing a copy of the Package or making it
|
||||
accessible to anyone else, or in the case of a company or
|
||||
organization, to others outside of your company or organization.
|
||||
|
||||
"Distributor Fee" means any fee that you charge for Distributing this
|
||||
Package or providing support for this Package to another party. It
|
||||
does not mean licensing fees.
|
||||
|
||||
"Standard Version" refers to the Package if it has not been modified,
|
||||
or has been modified only in ways explicitly requested by the
|
||||
Copyright Holder.
|
||||
|
||||
"Modified Version" means the Package, if it has been changed, and such
|
||||
changes were not explicitly requested by the Copyright Holder.
|
||||
|
||||
"Original License" means this Artistic License as Distributed with the
|
||||
Standard Version of the Package, in its current version or as it may
|
||||
be modified by The Perl Foundation in the future.
|
||||
|
||||
"Source" form means the source code, documentation source, and
|
||||
configuration files for the Package.
|
||||
|
||||
"Compiled" form means the compiled bytecode, object code, binary, or
|
||||
any other form resulting from mechanical transformation or translation
|
||||
of the Source form.
|
||||
|
||||
Permission for Use and Modification Without Distribution
|
||||
|
||||
(1) You are permitted to use the Standard Version and create and use
|
||||
Modified Versions for any purpose without restriction, provided that
|
||||
you do not Distribute the Modified Version.
|
||||
|
||||
Permissions for Redistribution of the Standard Version
|
||||
|
||||
(2) You may Distribute verbatim copies of the Source form of the
|
||||
Standard Version of this Package in any medium without restriction,
|
||||
either gratis or for a Distributor Fee, provided that you duplicate
|
||||
all of the original copyright notices and associated disclaimers. At
|
||||
your discretion, such verbatim copies may or may not include a
|
||||
Compiled form of the Package.
|
||||
|
||||
(3) You may apply any bug fixes, portability changes, and other
|
||||
modifications made available from the Copyright Holder. The resulting
|
||||
Package will still be considered the Standard Version, and as such
|
||||
will be subject to the Original License.
|
||||
|
||||
Distribution of Modified Versions of the Package as Source
|
||||
|
||||
(4) You may Distribute your Modified Version as Source (either gratis
|
||||
or for a Distributor Fee, and with or without a Compiled form of the
|
||||
Modified Version) provided that you clearly document how it differs
|
||||
from the Standard Version, including, but not limited to, documenting
|
||||
any non-standard features, executables, or modules, and provided that
|
||||
you do at least ONE of the following:
|
||||
|
||||
(a) make the Modified Version available to the Copyright Holder of the
|
||||
Standard Version, under the Original License, so that the Copyright
|
||||
Holder may include your modifications in the Standard Version. (b)
|
||||
ensure that installation of your Modified Version does not prevent the
|
||||
user installing or running the Standard Version. In addition, the
|
||||
modified Version must bear a name that is different from the name of
|
||||
the Standard Version. (c) allow anyone who receives a copy of the
|
||||
Modified Version to make the Source form of the Modified Version
|
||||
available to others under (i) the Original License or (ii) a license
|
||||
that permits the licensee to freely copy, modify and redistribute the
|
||||
Modified Version using the same licensing terms that apply to the copy
|
||||
that the licensee received, and requires that the Source form of the
|
||||
Modified Version, and of any works derived from it, be made freely
|
||||
available in that license fees are prohibited but Distributor Fees are
|
||||
allowed.
|
||||
|
||||
Distribution of Compiled Forms of the Standard Version or Modified Versions
|
||||
without the Source
|
||||
|
||||
(5) You may Distribute Compiled forms of the Standard Version without
|
||||
the Source, provided that you include complete instructions on how to
|
||||
get the Source of the Standard Version. Such instructions must be
|
||||
valid at the time of your distribution. If these instructions, at any
|
||||
time while you are carrying out such distribution, become invalid, you
|
||||
must provide new instructions on demand or cease further
|
||||
distribution. If you provide valid instructions or cease distribution
|
||||
within thirty days after you become aware that the instructions are
|
||||
invalid, then you do not forfeit any of your rights under this
|
||||
license.
|
||||
|
||||
(6) You may Distribute a Modified Version in Compiled form without the
|
||||
Source, provided that you comply with Section 4 with respect to the
|
||||
Source of the Modified Version.
|
||||
|
||||
Aggregating or Linking the Package
|
||||
|
||||
(7) You may aggregate the Package (either the Standard Version or
|
||||
Modified Version) with other packages and Distribute the resulting
|
||||
aggregation provided that you do not charge a licensing fee for the
|
||||
Package. Distributor Fees are permitted, and licensing fees for other
|
||||
components in the aggregation are permitted. The terms of this license
|
||||
apply to the use and Distribution of the Standard or Modified Versions
|
||||
as included in the aggregation.
|
||||
|
||||
(8) You are permitted to link Modified and Standard Versions with
|
||||
other works, to embed the Package in a larger work of your own, or to
|
||||
build stand-alone binary or bytecode versions of applications that
|
||||
include the Package, and Distribute the result without restriction,
|
||||
provided the result does not expose a direct interface to the Package.
|
||||
|
||||
|
||||
Items That are Not Considered Part of a Modified Version
|
||||
|
||||
(9) Works (including, but not limited to, modules and scripts) that
|
||||
merely extend or make use of the Package, do not, by themselves, cause
|
||||
the Package to be a Modified Version. In addition, such works are not
|
||||
considered parts of the Package itself, and are not subject to the
|
||||
terms of this license.
|
||||
|
||||
General Provisions
|
||||
|
||||
(10) Any use, modification, and distribution of the Standard or
|
||||
Modified Versions is governed by this Artistic License. By using,
|
||||
modifying or distributing the Package, you accept this license. Do not
|
||||
use, modify, or distribute the Package, if you do not accept this
|
||||
license.
|
||||
|
||||
(11) If your Modified Version has been derived from a Modified Version
|
||||
made by someone other than you, you are nevertheless required to
|
||||
ensure that your Modified Version complies with the requirements of
|
||||
this license.
|
||||
|
||||
(12) This license does not grant you the right to use any trademark,
|
||||
service mark, tradename, or logo of the Copyright Holder.
|
||||
|
||||
(13) This license includes the non-exclusive, worldwide,
|
||||
free-of-charge patent license to make, have made, use, offer to sell,
|
||||
sell, import and otherwise transfer the Package with respect to any
|
||||
patent claims licensable by the Copyright Holder that are necessarily
|
||||
infringed by the Package. If you institute patent litigation
|
||||
(including a cross-claim or counterclaim) against any party alleging
|
||||
that the Package constitutes direct or contributory patent
|
||||
infringement, then this Artistic License to you shall terminate on the
|
||||
date that such litigation is filed.
|
||||
|
||||
(14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -834,12 +834,22 @@ absolute_file_name(File,Opts,TrueFileName) :-
|
||||
'$get_abs_file'(File,opts(_,D0,_,_,_,_,_),AbsFile) :-
|
||||
operating_system_support:true_file_name(File,D0,AbsFile).
|
||||
|
||||
'$search_in_path'(File,opts(Extensions,_,_,Access,_,_,_),F) :-
|
||||
'$add_extensions'(Extensions,File,F),
|
||||
access_file(F,Access).
|
||||
'$search_in_path'(File,opts(Extensions,_,Type,Access,_,_,_),F) :-
|
||||
'$add_extensions'(Extensions, File, F0),
|
||||
'$check_file'(F0, Type, Access, F).
|
||||
'$search_in_path'(File,opts(_,_,Type,Access,_,_,_),F) :-
|
||||
'$add_type_extensions'(Type,File,F),
|
||||
access_file(F,Access).
|
||||
'$add_type_extensions'(Type, File, F0),
|
||||
'$check_file'(F0, Type, Access, F).
|
||||
|
||||
'$check_file'(F, Type, none, F) :- !.
|
||||
'$check_file'(F0, Type, Access, F0) :-
|
||||
access_file(F0, Access),
|
||||
(Type == directory
|
||||
->
|
||||
exists_directory(F0)
|
||||
;
|
||||
true
|
||||
).
|
||||
|
||||
'$add_extensions'([Ext|_],File,F) :-
|
||||
'$mk_sure_true_ext'(Ext,NExt),
|
||||
|
@ -60,15 +60,20 @@ attgoal_for_delay(redo_dif(Done, X, Y), V) -->
|
||||
[prolog:dif(X,Y)].
|
||||
attgoal_for_delay(redo_freeze(Done, V, Goal), V) -->
|
||||
{ var(Done) }, !,
|
||||
[prolog:freeze(V,Goal)].
|
||||
{ remove_when_declarations(Goal, NoWGoal) },
|
||||
[ prolog:freeze(V,NoWGoal) ].
|
||||
attgoal_for_delay(redo_eq(Done, X, Y, Goal), V) -->
|
||||
{ var(Done), first_att(Goal, V) }, !,
|
||||
[prolog:when(X=Y,Goal)].
|
||||
[ prolog:when(X=Y,Goal) ].
|
||||
attgoal_for_delay(redo_ground(Done, X, Goal), V) -->
|
||||
{ var(Done) }, !,
|
||||
[prolog:when(ground(X),Goal)].
|
||||
[ prolog:when(ground(X),Goal) ].
|
||||
attgoal_for_delay(_, V) --> [].
|
||||
|
||||
remove_when_declarations(when(Cond,Goal,_), when(Cond,NoWGoal)) :- !,
|
||||
remove_when_declarations(Goal, NoWGoal).
|
||||
remove_when_declarations(Goal, Goal).
|
||||
|
||||
%
|
||||
% operators defined in this module:
|
||||
%
|
||||
@ -357,8 +362,8 @@ prolog:'$block'(Conds) :-
|
||||
prolog:'$block'(_).
|
||||
|
||||
generate_blocking_code(Conds, G, Code) :-
|
||||
'$extract_head_for_block'(Conds, G),
|
||||
'$recorded'('$blocking_code','$code'(G,OldConds),R), !,
|
||||
extract_head_for_block(Conds, G),
|
||||
recorded('$blocking_code','$code'(G,OldConds),R), !,
|
||||
erase(R),
|
||||
functor(G, Na, Ar),
|
||||
'$current_module'(M),
|
||||
|
20
pl/preds.yap
20
pl/preds.yap
@ -234,7 +234,7 @@ assertz_static(C) :-
|
||||
|
||||
'$erase_all_mf_dynamic'(Na,A,M) :-
|
||||
get_value('$consulting_file',F),
|
||||
'$recorded'('$multifile_dynamic'(_,_,_), '$mf'(Na,A,M,F,R), R1),
|
||||
recorded('$multifile_dynamic'(_,_,_), '$mf'(Na,A,M,F,R), R1),
|
||||
erase(R1),
|
||||
erase(R),
|
||||
fail.
|
||||
@ -649,12 +649,6 @@ abolish(X) :-
|
||||
'$undefined'(G, Module),
|
||||
functor(G,Name,Arity),
|
||||
print_message(warning,no_match(abolish(Module:Name/Arity))).
|
||||
% I cannot allow modifying static procedures in YAPOR
|
||||
% this code has to be here because of abolish/2
|
||||
% '$abolishs'(G, Module) :-
|
||||
% '$has_yap_or', !,
|
||||
% functor(G,A,N),
|
||||
% '$do_error'(permission_error(modify,static_procedure,A/N),abolish(Module:G)).
|
||||
'$abolishs'(G, M) :-
|
||||
'$is_multifile'(G,M), !,
|
||||
functor(G,Name,Arity),
|
||||
@ -927,7 +921,8 @@ current_predicate(A,T) :-
|
||||
current_predicate(A) :-
|
||||
'$current_predicate_inside'(A).
|
||||
|
||||
'$current_predicate_inside'(F) :- var(F), !, % only for the predicate
|
||||
'$current_predicate_inside'(F) :-
|
||||
var(F), !, % only for the predicate
|
||||
'$current_module'(M),
|
||||
'$current_predicate3'(M,F).
|
||||
'$current_predicate_inside'(M:F) :- % module specified
|
||||
@ -955,7 +950,14 @@ system_predicate(P) :-
|
||||
'$ifunctor'(T,A,Arity),
|
||||
'$pred_exists'(T,M).
|
||||
|
||||
'$current_predicate3'(M,A/Arity) :- nonvar(A), nonvar(Arity), !,
|
||||
'$current_predicate3'(M,A/Arity) :-
|
||||
nonvar(M),
|
||||
nonvar(A),
|
||||
nonvar(Arity), !,
|
||||
'$ifunctor'(Pred,A,Arity),
|
||||
'$pred_exists'(Pred,M).
|
||||
'$current_predicate3'(M,A/Arity) :-
|
||||
nonvar(A), nonvar(Arity), !,
|
||||
(
|
||||
'$current_predicate'(M,A,Arity)
|
||||
->
|
||||
|
@ -210,6 +210,18 @@
|
||||
'$signal_def'(sig_alarm, true).
|
||||
|
||||
|
||||
'$signal'(sig_hup).
|
||||
'$signal'(sig_usr1).
|
||||
'$signal'(sig_usr2).
|
||||
'$signal'(sig_pipe).
|
||||
'$signal'(sig_alarm).
|
||||
'$signal'(sig_vtalarm).
|
||||
|
||||
on_signal(Signal,OldAction,NewAction) :-
|
||||
var(Signal), !,
|
||||
(nonvar(OldAction) -> throw(error(instantiation_error,on_signal/3)) ; true),
|
||||
'$signal'(Signal),
|
||||
on_signal(Signal, OldAction, NewAction).
|
||||
on_signal(Signal,OldAction,default) :-
|
||||
'$reset_signal'(Signal, OldAction).
|
||||
on_signal(Signal,OldAction,Action) :-
|
||||
@ -219,7 +231,7 @@ on_signal(Signal,OldAction,Action) :-
|
||||
Action = (_:Goal),
|
||||
var(Goal), !,
|
||||
'$check_signal'(Signal, OldAction),
|
||||
Action = OldAction.
|
||||
Goal = OldAction.
|
||||
on_signal(Signal,OldAction,Action) :-
|
||||
'$reset_signal'(Signal, OldAction),
|
||||
% 13211-2 speaks only about callable
|
||||
|
@ -51,7 +51,7 @@ socket_select(Socks, OutSocks, TimeOut, Streams, OutStreams) :-
|
||||
'$check_select_time'(Sec0:USec0, Sec, USec, _) :-
|
||||
Sec is Sec0,
|
||||
USec is USec0,
|
||||
Sec > 0, USec > 0.
|
||||
Sec >= 0, USec >= 0.
|
||||
|
||||
'$cp_socket_fds'([], Fds, [], Fds).
|
||||
'$cp_socket_fds'([_|Scks], [[]|Fds], Out, StrFds) :- !,
|
||||
|
Reference in New Issue
Block a user