Merge branch 'master' of ssh://git.dcc.fc.up.pt/yap-6.3
This commit is contained in:
commit
45e4384b6d
@ -208,12 +208,13 @@
|
||||
/* Local Procedures */
|
||||
/* -------------------------- */
|
||||
|
||||
static TrNode depth_reduction(TrEntry trie, TrNode depth_node, YAP_Int opt_level);
|
||||
static TrNode breadth_reduction(TrEntry trie, TrNode breadth_node, YAP_Int opt_level);
|
||||
static void simplification_reduction(TrEntry trie);
|
||||
static TrNode depth_reduction(TrEntry trie, TrNode depth_node, YAP_Int opt_level);
|
||||
static TrNode breadth_reduction(TrEntry trie, TrNode breadth_node, YAP_Int opt_level);
|
||||
static inline int compare_label_nodes(TrData data1, TrData data2);
|
||||
static inline void move_after(TrData data_source, TrData data_dest);
|
||||
static inline void move_last_data_after(TrData moveto_data);
|
||||
static inline void set_depth_breadth_reduction_current_data(TrData data);
|
||||
static inline void move_after(TrData data_source, TrData data_dest);
|
||||
static inline void move_last_data_after(TrData moveto_data);
|
||||
static inline void set_depth_breadth_reduction_current_data(TrData data);
|
||||
|
||||
|
||||
/* -------------------------- */
|
||||
@ -235,6 +236,9 @@ YAP_Term trie_depth_breadth(TrEntry trie, TrEntry db_trie, YAP_Int opt_level, YA
|
||||
core_set_trie_db_return_term(YAP_MkAtomTerm(YAP_LookupAtom("false")));
|
||||
core_initialize_depth_breadth_trie(TrEntry_trie(db_trie), &depth_node, &breadth_node);
|
||||
set_depth_breadth_reduction_current_data(NULL);
|
||||
/* We only need to simplify the trie once! */
|
||||
if (TrNode_child(TrEntry_trie(trie)))
|
||||
simplification_reduction(trie);
|
||||
while (TrNode_child(TrEntry_trie(trie))) {
|
||||
nested_trie = depth_reduction(trie, depth_node, opt_level);
|
||||
if (nested_trie) {
|
||||
@ -301,6 +305,27 @@ void set_depth_breadth_reduction_current_data(TrData data) {
|
||||
}
|
||||
|
||||
|
||||
static
|
||||
void simplification_reduction(TrEntry trie) {
|
||||
TrNode node;
|
||||
TrData stop_data, new_data, data = NULL;
|
||||
stop_data = TrData_previous(TrEntry_first_data(trie));
|
||||
data = TrEntry_traverse_data(trie) = TrEntry_last_data(trie);
|
||||
while ((data != stop_data) && (data != NULL)) {
|
||||
node = core_simplification_reduction(TRIE_ENGINE, TrData_leaf(data), &trie_data_destruct);
|
||||
if (node) {
|
||||
new_trie_data(new_data, trie, node);
|
||||
PUT_DATA_IN_LEAF_TRIE_NODE(node, new_data);
|
||||
}
|
||||
if (data == TrEntry_traverse_data(trie)) {
|
||||
data = TrData_previous(data);
|
||||
TrEntry_traverse_data(trie) = data;
|
||||
} else
|
||||
data = TrEntry_traverse_data(trie);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static
|
||||
TrNode depth_reduction(TrEntry trie, TrNode depth_node, YAP_Int opt_level) {
|
||||
TrNode node;
|
||||
@ -309,9 +334,7 @@ TrNode depth_reduction(TrEntry trie, TrNode depth_node, YAP_Int opt_level) {
|
||||
stop_data = TrData_previous(TrEntry_first_data(trie));
|
||||
data = TrEntry_traverse_data(trie) = TrEntry_last_data(trie);
|
||||
while (data != stop_data) {
|
||||
// printf("hi0\n");
|
||||
node = core_depth_reduction(TRIE_ENGINE, TrData_leaf(data), depth_node, opt_level, &trie_data_construct, &trie_data_destruct, &trie_data_copy, &trie_data_order_correction);
|
||||
// printf("bye0\n");
|
||||
if (node && IS_FUNCTOR_NODE(TrNode_parent(node)) && (strcmp(YAP_AtomName(YAP_NameOfFunctor((YAP_Functor)(~ApplTag & TrNode_entry(TrNode_parent(node))))), NESTED_TRIE_TERM) == 0)) {
|
||||
//nested trie stop procedure return nested trie node
|
||||
return node;
|
||||
@ -338,9 +361,7 @@ TrNode breadth_reduction(TrEntry trie, TrNode breadth_node, YAP_Int opt_level) {
|
||||
stop_data = TrData_previous(TrEntry_first_data(trie));
|
||||
data = TrEntry_traverse_data(trie) = TrEntry_last_data(trie);
|
||||
while ((data != stop_data) && (data != NULL)) {
|
||||
// printf("hi\n");
|
||||
node = core_breadth_reduction(TRIE_ENGINE, TrData_leaf(data), breadth_node, opt_level, &trie_data_construct, &trie_data_destruct, &trie_data_copy, &trie_data_order_correction);
|
||||
// printf("bye\n");
|
||||
if (node && IS_FUNCTOR_NODE(TrNode_parent(node)) && (strcmp(YAP_AtomName(YAP_NameOfFunctor((YAP_Functor)(~ApplTag & TrNode_entry(TrNode_parent(node))))), NESTED_TRIE_TERM) == 0)) {
|
||||
//nested trie stop procedure return nested trie node
|
||||
return node;
|
||||
|
@ -206,13 +206,22 @@
|
||||
/* Local Procedures */
|
||||
/* -------------------------- */
|
||||
|
||||
inline void displaynode(TrNode node);
|
||||
inline int traverse_get_counter(TrNode node);
|
||||
inline YAP_Term generate_label(YAP_Int Index);
|
||||
YAP_Term update_depth_breadth_trie(TrEngine engine, TrNode root, YAP_Int opt_level, void (*construct_function)(TrNode), void (*destruct_function)(TrNode), void (*copy_function)(TrNode, TrNode), void (*correct_order_function)(void));
|
||||
YAP_Term get_return_node_term(TrNode node);
|
||||
void traverse_and_replace_nested_trie(TrNode node, YAP_Int nested_trie_id, YAP_Term new_term);
|
||||
TrNode replace_nested_trie(TrNode node, TrNode child, YAP_Term new_term);
|
||||
inline TrNode get_simplification_sibling(TrNode node);
|
||||
inline TrNode check_parent_first(TrNode node);
|
||||
inline TrNode TrNode_myparent(TrNode node);
|
||||
|
||||
/* -------------------------- */
|
||||
/* Debug Procedures */
|
||||
/* -------------------------- */
|
||||
|
||||
inline void displaynode(TrNode node);
|
||||
inline void displayentry(TrNode node);
|
||||
|
||||
|
||||
/* -------------------------- */
|
||||
@ -274,6 +283,17 @@ void displaynode(TrNode node) {
|
||||
}
|
||||
|
||||
|
||||
inline
|
||||
void displayentry(TrNode node) {
|
||||
printf("Entry Contains Bottom Up:\n");
|
||||
while (node) {
|
||||
displaynode(node);
|
||||
node = TrNode_parent(node);
|
||||
}
|
||||
printf("--- End of Entry ---\n");
|
||||
}
|
||||
|
||||
|
||||
inline
|
||||
void traverse_and_replace_nested_trie(TrNode node, YAP_Int nested_trie_id, YAP_Term new_term) {
|
||||
TrNode child, temp;
|
||||
@ -463,6 +483,52 @@ void core_finalize_depth_breadth_trie(TrNode depth_node, TrNode breadth_node) {
|
||||
}
|
||||
|
||||
|
||||
inline
|
||||
TrNode get_simplification_sibling(TrNode node) {
|
||||
TrNode sibling = node;
|
||||
while (sibling != NULL && TrNode_entry(sibling) != PairEndTag)
|
||||
sibling = TrNode_next(sibling);
|
||||
if (sibling != NULL && TrNode_entry(sibling) == PairEndTag) return sibling;
|
||||
sibling = node;
|
||||
while (sibling != NULL && TrNode_entry(sibling) != PairEndTag)
|
||||
sibling = TrNode_previous(sibling);
|
||||
return sibling;
|
||||
}
|
||||
|
||||
inline
|
||||
TrNode check_parent_first(TrNode node) {
|
||||
TrNode simplification;
|
||||
if (TrNode_entry(TrNode_myparent(node)) != PairInitTag) {
|
||||
simplification = check_parent_first(TrNode_myparent(node));
|
||||
if (simplification != NULL && TrNode_entry(simplification) == PairEndTag) return simplification;
|
||||
}
|
||||
simplification = get_simplification_sibling(node);
|
||||
return simplification;
|
||||
}
|
||||
|
||||
inline
|
||||
TrNode TrNode_myparent(TrNode node) {
|
||||
TrNode parent = TrNode_parent(node);
|
||||
while (parent != NULL && IS_FUNCTOR_NODE(parent))
|
||||
parent = TrNode_parent(parent);
|
||||
return parent;
|
||||
}
|
||||
|
||||
TrNode core_simplification_reduction(TrEngine engine, TrNode node, void (*destruct_function)(TrNode)) {
|
||||
/* Try to find the greatest parent that has a sibling that is a PairEndTag: this indicates a deep simplification */
|
||||
node = check_parent_first(TrNode_myparent(node));
|
||||
if (node != NULL) {
|
||||
/* do breadth reduction simplification */
|
||||
node = TrNode_parent(node);
|
||||
DATA_DESTRUCT_FUNCTION = destruct_function;
|
||||
remove_child_nodes(TrNode_child(node));
|
||||
TrNode_child(node) = NULL;
|
||||
node = trie_node_check_insert(node, PairEndTag);
|
||||
INCREMENT_ENTRIES(CURRENT_TRIE_ENGINE);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
|
||||
TrNode core_depth_reduction(TrEngine engine, TrNode node, TrNode depth_node, YAP_Int opt_level, void (*construct_function)(TrNode), void (*destruct_function)(TrNode), void (*copy_function)(TrNode, TrNode), void (*correct_order_function)(void)) {
|
||||
TrNode leaf = node;
|
||||
@ -534,14 +600,18 @@ TrNode core_breadth_reduction(TrEngine engine, TrNode node, TrNode breadth_node,
|
||||
YAP_Term t, *stack_top;
|
||||
int count = -1;
|
||||
TrNode child;
|
||||
|
||||
/* Simplification with breadth reduction (faster dbtrie execution worse BDD)
|
||||
child = core_simplification_reduction(engine, node, destruct_function);
|
||||
if (child) return child;
|
||||
*/
|
||||
|
||||
/* collect breadth nodes */
|
||||
stack_args_base = stack_args = AUXILIARY_TERM_STACK;
|
||||
stack_top = AUXILIARY_TERM_STACK + CURRENT_AUXILIARY_TERM_STACK_SIZE - 1;
|
||||
node = TrNode_parent(TrNode_parent(node));
|
||||
// printf("1\n");
|
||||
// printf("start node: "); displaynode(node);
|
||||
if (IS_FUNCTOR_NODE(node)) {
|
||||
// printf("2\n");
|
||||
while(IS_FUNCTOR_NODE(node))
|
||||
node = TrNode_parent(node);
|
||||
child = TrNode_child(node);
|
||||
@ -613,6 +683,7 @@ TrNode core_breadth_reduction(TrEngine engine, TrNode node, TrNode breadth_node,
|
||||
do {
|
||||
if (TrNode_entry(child) == PairEndTag) {
|
||||
/* do breadth reduction simplification */
|
||||
printf("I should never arrive here, please contact Theo!\n");
|
||||
node = TrNode_parent(child);
|
||||
DATA_DESTRUCT_FUNCTION = destruct_function;
|
||||
remove_child_nodes(TrNode_child(node));
|
||||
@ -676,10 +747,7 @@ TrNode core_breadth_reduction(TrEngine engine, TrNode node, TrNode breadth_node,
|
||||
child = TrNode_parent(child);
|
||||
}
|
||||
child = TrNode_next(child);
|
||||
// printf("Siblings: ");displaynode(child);
|
||||
|
||||
} while (child);
|
||||
// printf("pass through\n");
|
||||
}
|
||||
if (!count) {
|
||||
/* termination condition */
|
||||
@ -699,7 +767,6 @@ TrNode core_breadth_reduction(TrEngine engine, TrNode node, TrNode breadth_node,
|
||||
node = trie_node_check_insert(node, t);
|
||||
node = trie_node_check_insert(node, PairEndTag);
|
||||
INCREMENT_ENTRIES(CURRENT_TRIE_ENGINE);
|
||||
// printf("end node: "); displaynode(node);
|
||||
return node;
|
||||
}
|
||||
|
||||
|
@ -219,6 +219,7 @@ void core_set_label_counter(YAP_Int value);
|
||||
YAP_Int core_get_label_counter(void);
|
||||
void core_initialize_depth_breadth_trie(TrNode node, TrNode *depth_node, TrNode *breadth_node);
|
||||
void core_finalize_depth_breadth_trie(TrNode depth_node, TrNode breadth_node);
|
||||
TrNode core_simplification_reduction(TrEngine engine, TrNode node, void (*destruct_function)(TrNode));
|
||||
TrNode core_depth_reduction(TrEngine engine, TrNode node, TrNode depth_node, YAP_Int opt_level, void (*construct_function)(TrNode), void (*destruct_function)(TrNode), void (*copy_function)(TrNode, TrNode), void (*correct_order_function)(void));
|
||||
TrNode core_breadth_reduction(TrEngine engine, TrNode node, TrNode breadth_node, YAP_Int opt_level, void (*construct_function)(TrNode), void (*destruct_function)(TrNode), void (*copy_function)(TrNode, TrNode), void (*correct_order_function)(void));
|
||||
YAP_Term core_get_trie_db_return_term(void);
|
||||
|
@ -519,6 +519,7 @@ TrNode core_trie_load(TrEngine engine, FILE *file, void (*load_function)(TrNode,
|
||||
DATA_LOAD_FUNCTION = load_function;
|
||||
node = core_trie_open(engine);
|
||||
traverse_and_load(node, file);
|
||||
if (n) n = 0; // just added to remove the warning of not used!
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -1450,6 +1451,7 @@ void traverse_and_load(TrNode parent, FILE *file) {
|
||||
traverse_and_load(child, file);
|
||||
} while (fscanf(file, "%lu", &t));
|
||||
CURRENT_DEPTH--;
|
||||
if (n) n = 0; // just added to remove the warning of not used!
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -41,8 +41,8 @@ CPLINT_LIBS=@CPLINT_LIBS@
|
||||
#4.1VPATH=@srcdir@:@srcdir@/OPTYap
|
||||
CWD=$(PWD)
|
||||
#
|
||||
OBJS=cplint_yap.o cplint_Prob.o
|
||||
SOBJS=cplint.@SHLIB_SUFFIX@
|
||||
OBJS=cplint_yap.o cplint_Prob.o bddem.o
|
||||
SOBJS=cplint.@SHLIB_SUFFIX@ bddem.@SHLIB_SUFFIX@
|
||||
|
||||
|
||||
|
||||
@ -148,7 +148,9 @@ CPLINT_LEARNING_PROGRAMS=\
|
||||
$(CPLINT_SRCDIR)/em \
|
||||
$(CPLINT_SRCDIR)/rib \
|
||||
$(CPLINT_SRCDIR)/slipcase \
|
||||
$(CPLINT_SRCDIR)/slipcover
|
||||
$(CPLINT_SRCDIR)/slipcover \
|
||||
$(CPLINT_SRCDIR)/lemur
|
||||
|
||||
|
||||
|
||||
all: $(SOBJS)
|
||||
@ -159,15 +161,20 @@ cplint_yap.o: $(srcdir)/cplint_yap.c $(srcdir)/cplint.h
|
||||
cplint_Prob.o: $(srcdir)/cplint_Prob.c $(srcdir)/cplint.h
|
||||
$(CC) -c $(CFLAGS) $(CPLINT_CFLAGS) $(srcdir)/cplint_Prob.c -o cplint_Prob.o
|
||||
|
||||
bddem.o : $(srcdir)/slipcase/bddem.c
|
||||
$(CC) -c $(CFLAGS) $(CPLINT_CFLAGS) $(srcdir)/slipcase/bddem.c -o bddem.o
|
||||
|
||||
|
||||
@DO_SECOND_LD@cplint.@SHLIB_SUFFIX@: cplint_yap.o cplint_Prob.o
|
||||
@DO_SECOND_LD@ @CPLINT_SHLIB_LD@ -o cplint.@SHLIB_SUFFIX@ $(CPLINT_LDFLAGS) cplint_yap.o cplint_Prob.o $(CPLINT_LIBS) @EXTRA_LIBS_FOR_DLLS@
|
||||
|
||||
@DO_SECOND_LD@bddem.@SHLIB_SUFFIX@: bddem.o
|
||||
@DO_SECOND_LD@ @CPLINT_SHLIB_LD@ -o bddem.@SHLIB_SUFFIX@ $(CPLINT_LDFLAGS) bddem.o $(CPLINT_LIBS) @EXTRA_LIBS_FOR_DLLS@
|
||||
|
||||
clean:
|
||||
rm -f *.o *~ $(OBJS) $(SOBJS) *.BAK
|
||||
|
||||
install: all install-examples
|
||||
install: all
|
||||
mkdir -p $(DESTDIR)$(SHAREDIR)/cplint
|
||||
mkdir -p $(DESTDIR)$(SHAREDIR)/cplint/examples
|
||||
mkdir -p $(DESTDIR)$(SHAREDIR)/cplint/doc
|
||||
@ -183,9 +190,8 @@ install: all install-examples
|
||||
cp $(CPLINT_SRCDIR)/rib/*.pl $(DESTDIR)$(SHAREDIR)/cplint/
|
||||
cp $(CPLINT_SRCDIR)/slipcase/*.pl $(DESTDIR)$(SHAREDIR)/cplint/
|
||||
cp $(CPLINT_SRCDIR)/slipcover/*.pl $(DESTDIR)$(SHAREDIR)/cplint/
|
||||
$(INSTALL_PROGRAM) slipcase/bddem.so $(DESTDIR)$(YAPLIBDIR)
|
||||
cp $(CPLINT_SRCDIR)/lemur/*.pl $(DESTDIR)$(SHAREDIR)/cplint/
|
||||
|
||||
install_examples:
|
||||
|
||||
installcheck:
|
||||
for h in ${CPLINT_TEST_PROGRAMS}; do echo "t. halt." | $(BINDIR)/yap -l $$h; done
|
||||
|
@ -27,7 +27,7 @@ if test ! "$yap_cv_cplint" = "no"
|
||||
CPLINT_SHLIB_LD=$SHLIB_LD
|
||||
SHLIB_SUFFIX="so"
|
||||
fi
|
||||
PKG_CPLINT="packages/cplint packages/cplint/splipcase packages/cplint/approx/simplecuddLPADs"
|
||||
PKG_CPLINT="packages/cplint packages/cplint/approx/simplecuddLPADs"
|
||||
AC_SUBST(CPLINT_LIBS)
|
||||
AC_SUBST(CPLINT_CFLAGS)
|
||||
AC_SUBST(CPLINT_LDFLAGS)
|
||||
|
@ -441,6 +441,9 @@ The files \texttt{*.uni} that are present for some of the examples are used by
|
||||
\item EMBLEM (EM over Bdds for probabilistic Logic programs Efficient Mining): an implementation of EM for learning parameters that computes expectations directly on BDDs \cite{BelRig11-IDA,BelRig11-CILC11-NC,BelRig11-TR}
|
||||
\item SLIPCASE (Structure LearnIng of ProbabilistiC logic progrAmS with Em over bdds): an algorithm for learning the structure of programs by searching directly the theory space \cite{BelRig11-ILP11-IC}
|
||||
\item SLIPCOVER (Structure LearnIng of Probabilistic logic programs by searChing OVER the clause space): an algorithm for learning the structure of programs by searching the clause space and the theory space separatery \cite{BelRig13-TPLP-IJ}
|
||||
\item LEMUR (LEarning with a Monte carlo Upgrade of tRee search): an algorithm
|
||||
for learning the structure of programs by searching the clase space using
|
||||
Monte-Carlo tree search.
|
||||
\end{itemize}
|
||||
|
||||
\subsection{Input}
|
||||
@ -449,7 +452,7 @@ To execute the learning algorithms, prepare four files in the same folder:
|
||||
\item \texttt{<stem>.kb}: contains the example interpretations
|
||||
\item \texttt{<stem>.bg}: contains the background knowledge, i.e., knowledge valid for all interpretations
|
||||
\item \texttt{<stem>.l}: contains language bias information
|
||||
\item \texttt{<stem>.cpl}: contains the LPAD for you which you want to learn the parameters or the initial LPAD for SLIPCASE. For SLIPCOVER, this file should be absent
|
||||
\item \texttt{<stem>.cpl}: contains the LPAD for you which you want to learn the parameters or the initial LPAD for SLIPCASE and LEMUR. For SLIPCOVER, this file should be absent
|
||||
\end{itemize}
|
||||
where \texttt{<stem>} is your dataset name. Examples of these files can be found in the dataset pages.
|
||||
|
||||
@ -504,7 +507,7 @@ For RIB, if there are unseen predicates, i.e., predicates that are present in th
|
||||
unseen(<predicate>/<arity>).
|
||||
\end{verbatim}
|
||||
|
||||
For SLIPCASE and SLIPCOVER, you have to specify the language bias by means of mode declarations in the style of
|
||||
For SLIPCASE, SLIPCOVER and LEMUR, you have to specify the language bias by means of mode declarations in the style of
|
||||
\href{http://www.doc.ic.ac.uk/\string ~shm/progol.html}{Progol}.
|
||||
\begin{verbatim}
|
||||
modeh(<recall>,<predicate>(<arg1>,...).
|
||||
@ -558,7 +561,7 @@ modeb(*,samecourse(+course, -course)).
|
||||
modeb(*,samecourse(-course, +course)).
|
||||
....
|
||||
\end{verbatim}
|
||||
SLIPCOVER also requires facts for the \verb|determination/2| predicate that indicate which predicates can appear in the body of clauses.
|
||||
SLIPCOVER and LEMUR lso requires facts for the \verb|determination/2| predicate that indicate which predicates can appear in the body of clauses.
|
||||
For example
|
||||
\begin{verbatim}
|
||||
determination(professor/1,student/1).
|
||||
@ -592,17 +595,21 @@ In order to set the algorithms' parameters, you have to insert in \texttt{<stem>
|
||||
The available parameters are:
|
||||
\begin{itemize}
|
||||
\item \verb|depth| (values: integer or \verb|inf|, default value: 3): depth of derivations if \verb|depth_bound| is set to \verb|true|
|
||||
\item \verb|single_var| (values: \verb|{true,false}|, default value: \verb|false|, valid for CEM, EMBLEM, SLIPCASE and SLIPCOVER): if set to \verb|true|, there is a random variable for each clauses, instead of a separate random variable for each grounding of a clause
|
||||
\item \verb|single_var| (values: \verb|{true,false}|, default value: \verb|false|, valid for CEM, EMBLEM, SLIPCASE, SLIPCOVER and LEMUR): if set to \verb|true|, there is a random variable for each clauses, instead of a separate random variable for each grounding of a clause
|
||||
\item \verb|sample_size| (values: integer, default value: 1000): total number of examples in case in which the models in the \verb|.kb| file contain a \verb|prob(P).| fact. In that case, one model corresponds to \verb|sample_size*P| examples
|
||||
\item \verb|epsilon_em| (values: real, default value: 0.1, valid for CEM, EMBLEM, SLIPCASE and SLIPCOVER): if the difference in the log likelihood in two successive EM iteration is smaller
|
||||
\item \verb|epsilon_em| (values: real, default value: 0.1, valid for CEM,
|
||||
EMBLEM, SLIPCASE, SLIPCOVER and LEMUR): if the difference in the log likelihood in two successive EM iteration is smaller
|
||||
than \verb|epsilon_em|, then EM stops
|
||||
\item \verb|epsilon_em_fraction| (values: real, default value: 0.01, valid for CEM, EMBLEM, SLIPCASE and SLIPCOVER): if the difference in the log likelihood in two successive EM iteration is smaller
|
||||
\item \verb|epsilon_em_fraction| (values: real, default value: 0.01, valid for
|
||||
CEM, EMBLEM, SLIPCASE, SLIPCOVER and LEMUR): if the difference in the log likelihood in two successive EM iteration is smaller
|
||||
than \verb|epsilon_em_fraction|*(-current log likelihood), then EM stops
|
||||
\item \verb|iter| (values: integer, defualt value: 1, valid for EMBLEM, SLIPCASE and SLIPCOVER): maximum number of iteration of EM parameter learning. If set to -1, no maximum number of iterations is imposed
|
||||
\item \verb|iterREF| (values: integer, defualt value: 1, valid for SLIPCASE and SLIPCOVER):
|
||||
\item \verb|iter| (values: integer, defualt value: 1, valid for EMBLEM,
|
||||
SLIPCASE, SLIPCOVER and LEMUR): maximum number of iteration of EM parameter learning. If set to -1, no maximum number of iterations is imposed
|
||||
\item \verb|iterREF| (values: integer, defualt value: 1, valid for SLIPCASE,
|
||||
SLIPCOVER and LEMUR):
|
||||
maximum number of iteration of EM parameter learning for refinements. If set to -1, no maximum number of iterations is imposed.
|
||||
\item \verb|random_restarts_number| (values: integer, default value: 1, valid for CEM, EMBLEM, SLIPCASE and SLIPCOVER): number of random restarts of EM learning
|
||||
\item \verb|random_restarts_REFnumber| (values: integer, default value: 1, valid for SLIPCASE and SLIPCOVER): number of random restarts of EM learning for refinements
|
||||
\item \verb|random_restarts_number| (values: integer, default value: 1, valid for CEM, EMBLEM, SLIPCASE, SLIPCOVER and LEMUR): number of random restarts of EM learning
|
||||
\item \verb|random_restarts_REFnumber| (values: integer, default value: 1, valid for SLIPCASE, SLIPCOVER and LEMUR): number of random restarts of EM learning for refinements
|
||||
\item \verb|setrand| (values: rand(integer,integer,integer)): seed for the random functions, see Yap manual for allowed values
|
||||
\item \verb|minimal_step| (values: [0,1], default value: 0.005, valid for RIB): minimal increment of $\gamma$
|
||||
\item \verb|maximal_step| (values: [0,1], default value: 0.1, valid for RIB): maximal increment of $\gamma$
|
||||
@ -610,14 +617,19 @@ than \verb|epsilon_em_fraction|*(-current log likelihood), then EM stops
|
||||
\item \verb|delta| (values: negative integer, default value -10, valid for RIB): value assigned to $\log 0$
|
||||
\item \verb|epsilon_fraction| (values: integer, default value 100, valid for RIB): in the computation of the step, the value of $\epsilon$ of \cite{DBLP:journals/jmlr/ElidanF05} is obtained as $\log |CH,T|\times$\verb|epsilon_fraction|
|
||||
\item \verb|max_rules| (values: integer, default value: 6000, valid for RIB and SLIPCASE): maximum number of ground rules. Used to set the size of arrays for storing internal statistics. Can be increased as much as memory allows.
|
||||
\item \verb|logzero| (values: negative real, default value $\log(0.000001)$, valid for SLIPCASE and SLIPCOVER): value assigned to $\log 0$
|
||||
\item \verb|logzero| (values: negative real, default value $\log(0.000001)$, valid for SLIPCASE, SLIPCOVER and LEMUR): value assigned to $\log 0$
|
||||
\item \verb|examples| (values: \verb|atoms|,\verb|interpretations|, default value \verb|atoms|, valid for SLIPCASE): determines how BDDs are built: if set to \verb|interpretations|, a BDD for the conjunction of all the atoms for the target predicates in each interpretations is built.
|
||||
If set to \verb|atoms|, a BDD is built for the conjunction of a group of atoms for the target predicates in each interpretations. The number of atoms in each group is determined by the parameter \verb|group|
|
||||
\item \verb|group| (values: integer, default value: 1, valid for SLIPCASE): number of target atoms in the groups that are used to build BDDs
|
||||
\item \verb|nax_iter| (values: integer, default value: 10, valid for SLIPCASE and SLIPCOVER): number of interations of beam search
|
||||
\item \verb|max_var| (values: integer, default value: 1, valid for SLIPCASE and SLIPCOVER): maximum number of distinct variables in a clause
|
||||
\item \verb|max_var| (values: integer, default value: 1, valid for SLIPCASE,
|
||||
SLIPCOVER and LEMUR): maximum number of distinct variables in a clause
|
||||
\item \verb|verbosity| (values: integer in [1,3], default value: 1): level of verbosity of the algorithms
|
||||
\item \verb|beamsize| (values: integer, default value: 20, valid for SLIPCASE and SLIPCOVER): size of the beam
|
||||
\item \verb|mcts_beamsize| (values: integer, default value: 3, valid for LEMUR): size of the MCTS beam
|
||||
|
||||
\item \verb|mcts_visits| (values: integer, default value: +inf, valid for LEMUR): maximum number of visits (Nicola controlla)
|
||||
|
||||
\item \verb|megaex_bottom| (values: integer, default value: 1, valid for SLIPCOVER): number of mega-examples on which to build the bottom clauses
|
||||
\item \verb|initial_clauses_per_megaex| (values: integer, default value: 1, valid for SLIPCOVER):
|
||||
number of bottom clauses to build for each mega-example
|
||||
@ -627,7 +639,7 @@ If set to \verb|atoms|, a BDD is built for the conjunction of a group of atoms f
|
||||
maximum number of theory search iterations
|
||||
\item \verb|background_clauses| (values: integer, default value: 50, valid for SLIPCOVER):
|
||||
maximum numbers of background clauses
|
||||
\item \verb|maxdepth_var| (values: integer, default value: 2, valid for SLIPCOVER): maximum depth of
|
||||
\item \verb|maxdepth_var| (values: integer, default value: 2, valid for SLIPCOVER and LEMUR): maximum depth of
|
||||
variables in clauses (as defined in \cite{DBLP:journals/ai/Cohen95}).
|
||||
\item \verb|score| (values: \verb|ll|, \verb|aucpr|, default value \verb|ll|, valid for SLIPCOVER): determines the score function for refinement: if set to \verb|ll|, log likelihood is used, if set to \verb|aucpr|, the area under the
|
||||
Precision-Recall curve is used.
|
||||
@ -673,7 +685,19 @@ and call
|
||||
\begin{verbatim}
|
||||
?:- sl(stem).
|
||||
\end{verbatim}
|
||||
|
||||
To execute LEMUR, load \texttt{lemur.pl} with
|
||||
\begin{verbatim}
|
||||
?:- use_module(library('cplint/lemur')).
|
||||
\end{verbatim}
|
||||
and call
|
||||
\begin{verbatim}
|
||||
?:- "mcts(stem,depth,c,iter,rules,covering)
|
||||
\end{verbatim}
|
||||
where \verb|depth| (integer) is the maximum number
|
||||
of random specialization steps in the default policy, \verb|C| (real) is the value of the MCTS $C$ constant, \verb|iter| (integer) is the number of UCT rounds, \verb|rules| (integer) is
|
||||
the maximum number of clauses to be
|
||||
learned and \verb|covering| (Boolean) dentoes whether the search is peformed in
|
||||
the space of clauses (true) or theories (false) (Nicola controlla).
|
||||
|
||||
\subsection{Testing}
|
||||
To test the theories learned, load \texttt{test.pl} with
|
||||
|
167
packages/cplint/lemur/dv_lemur.pl
Normal file
167
packages/cplint/lemur/dv_lemur.pl
Normal file
@ -0,0 +1,167 @@
|
||||
/*:- use_module(library(terms)).
|
||||
*/
|
||||
:- use_module(library(lists)).
|
||||
|
||||
|
||||
/*define the depth of a variable appearing in a clause A B ^ : : : ^ Br as follows.
|
||||
|
||||
Variables appearing in the head of a clause have depth zero.
|
||||
Otherwise, let Bi be the first literal containing the variable V, and let d be the maximal depth of the input variables of Bi
|
||||
then the depth of V is d + 1. The depth of a clause is the maximal depth of any variable in the clause.
|
||||
|
||||
In questo modo possiamo lasciare il numero massimo di variabili a 4 (e cosi' impara la regola con taughtby) e riduciamo la profondita' massima delle variabili a 2 (in questo modo dovremmo imparare la regola con i due publication nel body e anche quella con taughtby).
|
||||
Bisogna modificare revise.pl per controllare che gli atomi che si aggiungono nel body non abbiano variabili oltre la profondita' massima.
|
||||
|
||||
testa professor(_710033).
|
||||
body taughtby(_710343,_710033,_710355).
|
||||
|
||||
Vars1 V1=_710033, V2=_710343, V3=_710355
|
||||
|
||||
testa professor(_710033).
|
||||
body yearsinprogram(_710149,_710196).
|
||||
|
||||
Vars1 V1=_710033, V2=_710149, V3=_710196.
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
:- op(500,fx,#).
|
||||
:- op(500,fx,'-#').
|
||||
%:-['graphics_train.l'].
|
||||
|
||||
%setting(maxdepth_var,1).
|
||||
%funzionamento
|
||||
%?- t(DV).
|
||||
%DV = [[_A,1],[_B,0],[_C,0]] - lista di coppie [variabile,profondità massima]
|
||||
|
||||
|
||||
|
||||
t(DV):-% dv([advisedby(A,B)],[taughtby(C,B,D),ta(C,A,D)],DV).
|
||||
dv([advisedby(A,B)],[publication(C,B),publication(C,A),professor(B),student(A)],DV).
|
||||
|
||||
% dv([professor(A)],[taughtby(B,A,C),taughtby(D,A,E),taughtby(D,A,E)],DV). %max_var 5
|
||||
|
||||
|
||||
dv(H,B,DV1):- %-DV1
|
||||
term_variables(H,V),
|
||||
head_depth(V,DV0),
|
||||
findall((MD-DV),var_depth(B,DV0,DV,0,MD),LDs), % cerchiamo tutte le possibili liste di coppie var-prof che si possono generare in base alle scelte del modeb e poi prendiamo la lista che porta al massimo della profondita' massima
|
||||
get_max(LDs,-1,-,DV1).
|
||||
|
||||
|
||||
|
||||
input_variables_b(LitM,InputVars):-
|
||||
LitM=..[P|Args],
|
||||
length(Args,LA),
|
||||
length(Args1,LA),
|
||||
Lit1=..[P|Args1],
|
||||
modeb(_,Lit1),
|
||||
input_vars(LitM,Lit1,InputVars).
|
||||
|
||||
/* decommentare per testare il file da solo */
|
||||
/* input_vars(Lit,Lit1,InputVars):-
|
||||
Lit =.. [_|Vars],
|
||||
Lit1 =.. [_|Types],
|
||||
input_vars1(Vars,Types,InputVars).
|
||||
|
||||
|
||||
input_vars1([],_,[]).
|
||||
|
||||
input_vars1([V|RV],[+_T|RT],[V|RV1]):-
|
||||
!,
|
||||
input_vars1(RV,RT,RV1).
|
||||
|
||||
input_vars1([_V|RV],[_|RT],RV1):-
|
||||
input_vars1(RV,RT,RV1).
|
||||
*/
|
||||
|
||||
|
||||
depth_var_head(List,VarsD):- % exit:depth_var_head([professor(_G131537)],[[_G131537,0]]) ?
|
||||
term_variables(List,Vars0), %List = lista atomi testa, Vars0 = lista variabili estratte dalla testa (term_variables _710033,_710237,_711016,_710969).
|
||||
head_depth(Vars0,VarsD). %aggiunge la profondità 0 ad ogni variabile, creando sottoliste
|
||||
|
||||
head_depth([],[]).
|
||||
head_depth([V|R],[[V,0]|R1]):-
|
||||
head_depth(R,R1).
|
||||
|
||||
/*
|
||||
depth_var_body(VarsH,BL,VarsD):-
|
||||
term_variables(List,Vars0), %estrae la lista Vars0 di variabili degli atomi del body in List
|
||||
exclude_headvar(VarsH,Vars0,VarsB), %VarsB: lista variabili v nel body per cui calcolare d(v) - diverse da quelle nella testa per cui vale 0 (Z,W,R)
|
||||
set_Uv(VarsB,BL,Sets), %Sets: a ogni var v in VarsB associa il set U_v delle var!=v: lista di liste [v,(set)]
|
||||
max(Sets,VarsH,VarsD). %VarsD: a ogni var v associa la profondità, usando le variabili nella testa VarsH che hanno d=0 (crea tuple (v,d))
|
||||
*/
|
||||
|
||||
var_depth([],PrevDs1,PrevDs1,MD,MD):-!.
|
||||
|
||||
var_depth([L|R],PrevDs,PrevDs1,_MD,MD):- %L=body atom
|
||||
% MD e' la profondita' massima a cui si e' arrivati
|
||||
input_variables_b(L,InputVars), %variabili di input nell'atomo L
|
||||
% write(L),format("~n variabili di input:",[]),write_list(InputVars), %L=letterale del body=ta(_710237,_710858,_711331) InputVars = variabili di input nel letterale=_710237,_710858.
|
||||
term_variables(L, BodyAtomVars), %BodyAtomVars: estrae dal letterale Lit del body la lista di variabili
|
||||
output_vars(BodyAtomVars,InputVars,OutputVars), %OutputVars = BodyAtomVars-InputVars
|
||||
depth_InputVars(InputVars,PrevDs,0,MaxD), %MaxD: massima profondita' delle variabili di input presenti nel letterale
|
||||
D is MaxD+1,
|
||||
compute_depth(OutputVars,D,PrevDs,PrevDs0), %Ds: lista di liste [v,d] per tutte le variabili (assegna D a tutte le variabili)
|
||||
|
||||
% term_variables(PrevLits,PrevVars), %PrevVars: lista variabili nella testa
|
||||
% write(BodyD),
|
||||
% PrevDs1 = [BodyD|PrevDs].
|
||||
var_depth(R,PrevDs0,PrevDs1,D,MD).
|
||||
|
||||
get_max([],_,Ds,Ds).
|
||||
|
||||
get_max([(MD-DsH)|T],MD0,_Ds0,Ds):-
|
||||
MD>MD0,!,
|
||||
get_max(T,MD,DsH,Ds).
|
||||
|
||||
get_max([_H|T],MD,Ds0,Ds):-
|
||||
get_max(T,MD,Ds0,Ds).
|
||||
|
||||
|
||||
output_vars(OutVars,[],OutVars):-!.
|
||||
output_vars(BodyAtomVars,[I|InputVars],OutVars):- %esclude le variabili di input dalla lista di var del letterale del body
|
||||
delete(BodyAtomVars, I, Residue), %cancella I da BodyAtomVars
|
||||
output_vars(Residue,InputVars, OutVars).
|
||||
|
||||
% restituisce in D la profondita' massima delle variabili presenti nella lista passata come primo argomento
|
||||
depth_InputVars([],_,D,D).
|
||||
depth_InputVars([I|Input],PrevDs,D0,D):-
|
||||
member_l(PrevDs,I,MD),
|
||||
(MD>D0->
|
||||
D1=MD
|
||||
;
|
||||
D1=D0
|
||||
),
|
||||
depth_InputVars(Input,PrevDs,D1,D).
|
||||
|
||||
member_l([[L,D]|P],I,D):- %resituisce in output la profondita' della variabile I
|
||||
I==L,!.
|
||||
member_l([_|P],I,D):-
|
||||
member_l(P,I,D).
|
||||
|
||||
compute_depth([],_,PD,PD):-!. %LVarD
|
||||
compute_depth([O|Output],D,PD,RestO):- %LVarD
|
||||
member_l(PD,O,_),!, % variabile gia' presente
|
||||
compute_depth(Output,D,PD,RestO).
|
||||
|
||||
compute_depth([O|Output],D,PD,[[O,D]|RestO]):- %LVarD
|
||||
compute_depth(Output,D,PD,RestO).
|
||||
|
||||
%Otherwise, let Bi be the first literal containing the variable V, and let d be the maximal depth of the input variables of Bi: then the depth of V is d+1. The depth of a clause is the maximal depth of any variable in the clause.
|
||||
%
|
||||
|
||||
|
||||
%[[_A,1],[_B,0],[_C,0]]
|
||||
exceed_depth([],_):-!.
|
||||
exceed_depth([H|T],MD):-
|
||||
nth1(2,H,Dep), %estrae la profondità
|
||||
%setting(maxdepth_var,MD),
|
||||
% (Dep>=MD ->
|
||||
% format("*****************depth exceeded ~n")
|
||||
% ;
|
||||
% true
|
||||
% ),
|
||||
Dep<MD,
|
||||
exceed_depth(T,MD).
|
1106
packages/cplint/lemur/inference_lemur.pl
Normal file
1106
packages/cplint/lemur/inference_lemur.pl
Normal file
File diff suppressed because it is too large
Load Diff
1089
packages/cplint/lemur/lemur.pl
Normal file
1089
packages/cplint/lemur/lemur.pl
Normal file
File diff suppressed because it is too large
Load Diff
636
packages/cplint/lemur/revise_lemur.pl
Normal file
636
packages/cplint/lemur/revise_lemur.pl
Normal file
@ -0,0 +1,636 @@
|
||||
/*
|
||||
|
||||
EMBLEM and SLIPCASE
|
||||
|
||||
Copyright (c) 2011, Fabrizio Riguzzi, Nicola di Mauro and Elena Bellodi
|
||||
|
||||
*/
|
||||
:- use_module(library(terms)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(random)).
|
||||
|
||||
:- set_prolog_flag(discontiguous_warnings,on).
|
||||
:- set_prolog_flag(single_var_warnings,on).
|
||||
|
||||
:- [dv_lemur].
|
||||
|
||||
|
||||
theory_revisions_op(Theory,TheoryRevs):-
|
||||
setof(RevOp, Theory^revise_theory(Theory,RevOp), TheoryRevs),!.
|
||||
theory_revisions_op(_Theory,[]).
|
||||
|
||||
filter_add_rule([],[]).
|
||||
filter_add_rule([add(Rule)|R],R1):-
|
||||
!,
|
||||
filter_add_rule(R,R1).
|
||||
filter_add_rule([A|R],[A|R1]):-
|
||||
!,
|
||||
filter_add_rule(R,R1).
|
||||
|
||||
|
||||
theory_revisions_r(Theory,TheoryRevs):-
|
||||
theory_revisions_op(Theory,TheoryRevs1),
|
||||
% filter_add_rule(TheoryRevs11,TheoryRevs1),
|
||||
|
||||
|
||||
( TheoryRevs1 == [] ->
|
||||
TheoryRevs = []
|
||||
;
|
||||
length(TheoryRevs1,L),
|
||||
random(0,L,K),
|
||||
nth0(K, TheoryRevs1,Revision),
|
||||
apply_operators([Revision],Theory,TheoryRevs)
|
||||
).
|
||||
|
||||
|
||||
theory_revisions(Theory,TheoryRevs):-
|
||||
theory_revisions_op(Theory,TheoryRevs1),
|
||||
apply_operators(TheoryRevs1,Theory,TheoryRevs).
|
||||
|
||||
|
||||
apply_operators([],_Theory,[]).
|
||||
|
||||
apply_operators([add(Rule)|RestOps],Theory,[NewTheory|RestTheory]) :-
|
||||
append(Theory, [Rule], NewTheory),
|
||||
% nl,write(NewTheory),
|
||||
apply_operators(RestOps,Theory,RestTheory).
|
||||
|
||||
apply_operators([add_body(Rule1,Rule2,_A)|RestOps],Theory,[NewTheory|RestTheory]) :-
|
||||
delete_matching(Theory,Rule1,Theory1),
|
||||
append(Theory1, [Rule2], NewTheory),
|
||||
% nl,write(NewTheory),
|
||||
apply_operators(RestOps,Theory,RestTheory).
|
||||
|
||||
apply_operators([remove_body(Rule1,Rule2,_A)|RestOps],Theory,[NewTheory|RestTheory]) :-
|
||||
delete_matching(Theory,Rule1,Theory1),
|
||||
append(Theory1, [Rule2], NewTheory),
|
||||
% nl,write(NewTheory),
|
||||
apply_operators(RestOps,Theory,RestTheory).
|
||||
|
||||
apply_operators([add_head(Rule1,Rule2,_A)|RestOps],Theory,[NewTheory|RestTheory]) :-
|
||||
delete_matching(Theory,Rule1,Theory1),
|
||||
append(Theory1, [Rule2], NewTheory),
|
||||
% nl,write(NewTheory),
|
||||
apply_operators(RestOps,Theory,RestTheory).
|
||||
|
||||
apply_operators([remove_head(Rule1,Rule2,_A)|RestOps],Theory,[NewTheory|RestTheory]) :-
|
||||
delete_matching(Theory,Rule1,Theory1),
|
||||
append(Theory1, [Rule2], NewTheory),
|
||||
% nl,write(NewTheory),
|
||||
apply_operators(RestOps,Theory,RestTheory).
|
||||
|
||||
apply_operators([remove(Rule)|RestOps],Theory,[NewTheory|RestTheory]) :-
|
||||
delete_matching(Theory,Rule,NewTheory),
|
||||
% nl,write(NewTheory),
|
||||
apply_operators(RestOps,Theory,RestTheory).
|
||||
|
||||
|
||||
revise_theory(Theory,Ref):-
|
||||
specialize_theory(Theory,Ref).
|
||||
|
||||
revise_theory(Theory,Ref):-
|
||||
generalize_theory(Theory,Ref).
|
||||
|
||||
/*
|
||||
generalize_theory(Theory,Ref):-
|
||||
Theory \== [],
|
||||
choose_rule(Theory,Rule),
|
||||
generalize_rule(Rule,Ref).
|
||||
*/
|
||||
generalize_theory(Theory,Ref):-
|
||||
length(Theory,LT),
|
||||
setting(max_rules,MR),
|
||||
LT<MR,
|
||||
add_rule(Ref).
|
||||
|
||||
|
||||
generalize_rule(Rule,Ref):-
|
||||
generalize_head(Rule,Ref).
|
||||
|
||||
generalize_rule(Rule,Ref):-
|
||||
generalize_body(Rule,Ref).
|
||||
|
||||
|
||||
add_rule(add(rule(ID,Head,[],Lits))):-
|
||||
setting(specialization,bottom),!,
|
||||
database(DB),
|
||||
sample(1,DB,[M]),
|
||||
get_head_atoms(O),
|
||||
member(A,O),
|
||||
functor(A,F,N),
|
||||
functor(F1,F,N),
|
||||
F1=..[F|Arg],
|
||||
Pred1=..[F,M|Arg],
|
||||
A=..[F|ArgM],
|
||||
keep_const(ArgM,Arg),
|
||||
findall((A,Pred1),call(Pred1),L),
|
||||
sample(1,L,LH),
|
||||
generate_body(LH,[rule(ID,Head,[],Lits)]).
|
||||
|
||||
add_rule(add(SpecRule)):-
|
||||
findall(HL , modeh(_,HL), HLS),
|
||||
length(HLS,L),
|
||||
L1 is L+1,
|
||||
P is 1/L1,
|
||||
generate_head(HLS,P,Head),
|
||||
get_next_rule_number(ID),
|
||||
Rule0 = rule(ID,Head,[],true),
|
||||
specialize_rule(Rule0,SpecRule,Lit).
|
||||
|
||||
|
||||
|
||||
generate_head([H|_T],_P,[H1:0.5,'':0.5]):-
|
||||
H=..[Pred|Args],
|
||||
length(Args,LA),
|
||||
length(Args1,LA),
|
||||
H1=..[Pred|Args1],
|
||||
check_for_constants(Args,Args1).
|
||||
|
||||
check_for_constants([],[]).
|
||||
check_for_constants([+X|R],[V|R1]):-
|
||||
!,
|
||||
check_for_constants(R,R1).
|
||||
check_for_constants([-X|R],[V|R1]):-
|
||||
!,
|
||||
check_for_constants(R,R1).
|
||||
check_for_constants([X|R],[X|R1]):-
|
||||
check_for_constants(R,R1).
|
||||
|
||||
|
||||
|
||||
generate_head([_H|T],P,Head):-
|
||||
generate_head(T,P,Head).
|
||||
|
||||
|
||||
generalize_head(Rule,Ref):-
|
||||
Rule = rule(ID,LH,BL,L),
|
||||
generalize_head1(LH,LH1,NewAt),
|
||||
Ref = add_head(Rule,rule(ID,LH1,BL,L),NewAt).
|
||||
|
||||
|
||||
generalize_head1(LH,LH1,NH):-
|
||||
findall(HL , modeh(_,HL), HLS),
|
||||
generalize_head2(HLS,LH,LH1,NH).
|
||||
|
||||
|
||||
generalize_head2([X|_R],LH,LH1,PH) :-
|
||||
X =.. [P|A],
|
||||
length(A,LA),
|
||||
length(A1,LA),
|
||||
PH =.. [P|A1],
|
||||
\+ member(PH:_, LH),
|
||||
(setting(new_head_atoms_zero_prob,true)->
|
||||
delete_matching(LH,'':PNull,LH0),
|
||||
append(LH0,[PH:0.0,'':PNull],LH1)
|
||||
;
|
||||
length(LH,NH),
|
||||
add_to_head(LH,NH,PH,LH1)
|
||||
).
|
||||
|
||||
generalize_head2([_X|R],LH,LH1) :-
|
||||
generalize_head2(R,LH,LH1).
|
||||
|
||||
|
||||
add_to_head(['':PN],NH,At,[At:PA,'':PN1]):-!,
|
||||
PN1 is PN*NH/(NH+1),
|
||||
PA is 1/(NH+1).
|
||||
|
||||
add_to_head([H:PH|T],NH,At,[H:PH1|T1]):-
|
||||
PH1 is PH*NH/(NH+1),
|
||||
add_to_head(T,NH,At,T1).
|
||||
|
||||
|
||||
get_module_var(LH,Module):-
|
||||
member(H:_,LH),!,
|
||||
H=..[_F,Module|_].
|
||||
|
||||
|
||||
generalize_body(Rule,Ref):-
|
||||
Rule = rule(ID,LH,BL),
|
||||
delete_one(BL,BL1,A),
|
||||
remove_prob(LH,LH1),
|
||||
delete(LH1,'',LH2),
|
||||
linked_clause(BL1,LH2),
|
||||
Ref = remove_body(Rule,rule(ID,LH,BL1),A).
|
||||
|
||||
|
||||
specialize_theory(Theory,Ref):-
|
||||
Theory \== [],
|
||||
choose_rule(Theory,Rule),
|
||||
specialize_rule(Rule,SpecRule,Lit),
|
||||
Ref = add_body(Rule,SpecRule,Lit),
|
||||
SpecRule = rule(_,_,B,_).
|
||||
|
||||
/*,
|
||||
\+ (member(b_rel11(X1,Y1),B), member(b_rel11(Z1,Y1),B), Y1 \== Z1),
|
||||
\+ (member(b_rel12(X2,Y2),B), member(b_rel12(Z2,Y2),B), Y2 \== Z2),
|
||||
\+ (member(b_rel13(X3,Y3),B), member(b_rel13(Z3,Y3),B), Y3 \== Z3).*/
|
||||
|
||||
specialize_rule(Rule,SpecRule,Lit):-
|
||||
setting(specialization,bottom),
|
||||
Rule = rule(ID,LH,BL,Lits),
|
||||
delete_one(Lits,RLits,Lit),
|
||||
\+ lookahead_cons(Lit,_),
|
||||
append(BL,[Lit],BL1),
|
||||
remove_prob(LH,LH1),
|
||||
% check_ref(LH1,BL1),
|
||||
delete(LH1,'',LH2),
|
||||
append(LH2,BL1,ALL2),
|
||||
extract_fancy_vars(ALL2,Vars1),
|
||||
length(Vars1,NV),
|
||||
setting(max_var,MV),
|
||||
NV=<MV,
|
||||
linked_clause(BL1,LH2),
|
||||
\+ banned_clause(LH2,BL1),
|
||||
SpecRule=rule(ID,LH,BL1,RLits).
|
||||
|
||||
specialize_rule(Rule,SpecRule,Lit):-
|
||||
setting(specialization,bottom),
|
||||
Rule = rule(ID,LH,BL,Lits),
|
||||
delete_one(Lits,RLits,Lit),
|
||||
append(BL,[Lit],BL0),
|
||||
(lookahead(Lit,LLit1);lookahead_cons(Lit,LLit1)), % lookahead_cons serve a dire che rating(_A,_B,_C) e' aggiunto solo insieme ai letterali indicati nella lista, mai da solo.
|
||||
copy_term(LLit1,LLit2),
|
||||
specialize_rule_la_bot(LLit2,RLits,RLits1,BL0,BL1),
|
||||
remove_prob(LH,LH1),
|
||||
% check_ref(LH1,BL1),
|
||||
delete(LH1,'',LH2),
|
||||
append(LH2,BL1,ALL2),
|
||||
extract_fancy_vars(ALL2,Vars1),
|
||||
length(Vars1,NV),
|
||||
setting(max_var,MV),
|
||||
NV=<MV,
|
||||
linked_clause(BL1,LH2),
|
||||
\+ banned_clause(LH2,BL1),
|
||||
SpecRule=rule(ID,LH,BL1,RLits1).
|
||||
|
||||
specialize_rule(Rule,SpecRule,Lit):-
|
||||
setting(specialization,mode),!,
|
||||
% findall(BL , modeb(_,BL), BLS),
|
||||
mcts_modeb(BSL0),
|
||||
Rule = rule(ID,LH,BL,_),
|
||||
( BL \= [] ->
|
||||
%last(BL,LastLit),
|
||||
%LastLit =.. [Pred|_],
|
||||
%filter_modeb(BSL0,LastLit,BSL)
|
||||
BSL = BSL0
|
||||
;
|
||||
BSL = BSL0
|
||||
),
|
||||
specialize_rule(BSL,Rule,SpecRule,Lit).
|
||||
|
||||
filter_modeb([],_Pred,[]).
|
||||
filter_modeb([Modeb|RestModeb],Pred,[Modeb|RestBSL]):-
|
||||
Modeb =.. [PredMode|_],
|
||||
Modeb @>= Pred,
|
||||
!,
|
||||
filter_modeb(RestModeb,Pred,RestBSL).
|
||||
filter_modeb([_|RestModeb],Pred,RestBSL):-
|
||||
filter_modeb(RestModeb,Pred,RestBSL).
|
||||
|
||||
|
||||
skolemize(Theory,Theory1):-
|
||||
copy_term(Theory,Theory1),
|
||||
term_variables(Theory1,Vars),
|
||||
skolemize1(Vars,1).
|
||||
|
||||
skolemize1([],_).
|
||||
skolemize1([Var|R],K):-
|
||||
atomic_list_concat([s,K],Skolem),
|
||||
Var = Skolem,
|
||||
K1 is K + 1,
|
||||
skolemize1(R,K1).
|
||||
|
||||
|
||||
banned_clause(H,B):-
|
||||
skolemize([H,B],[H1,B1]),
|
||||
banned(H2,B2),
|
||||
mysublist(H2,H1),
|
||||
mysublist(B2,B1).
|
||||
|
||||
|
||||
mysublist([],_).
|
||||
|
||||
mysublist([A\==B|T],L):-
|
||||
!,
|
||||
A\==B,
|
||||
mysublist(T,L).
|
||||
mysublist([H|T],L):-
|
||||
nth(_,L,H,R),
|
||||
mysublist(T,R).
|
||||
|
||||
|
||||
check_ref(H,B):-
|
||||
copy_term((H,B),(H1,B1)),
|
||||
numbervars((H1,B1),0,_N),
|
||||
(ref(H1,B1)->
|
||||
fail
|
||||
;
|
||||
assert(ref(H1,B1))
|
||||
).
|
||||
|
||||
|
||||
specialize_rule([Lit|_RLit],Rule,SpecRul,SLit):-
|
||||
Rule = rule(ID,LH,BL,true),
|
||||
remove_prob(LH,LH1),
|
||||
append(LH1,BL,ALL),
|
||||
specialize_rule1(Lit,ALL,SLit),
|
||||
append(BL,[SLit],BL1),
|
||||
(lookahead(SLit,LLit1);lookahead_cons(SLit,LLit1)),
|
||||
specialize_rule_la(LLit1,LH1,BL1,BL2),
|
||||
append(LH1,BL2,ALL2),
|
||||
extract_fancy_vars(ALL2,Vars1),
|
||||
length(Vars1,NV),
|
||||
setting(max_var,MV),
|
||||
NV=<MV,
|
||||
\+ banned_clause(LH1,BL2),
|
||||
SpecRul = rule(ID,LH,BL2,true).
|
||||
|
||||
specialize_rule([Lit|_RLit],Rule,SpecRul,SLit):-
|
||||
Rule = rule(ID,LH,BL,true),
|
||||
remove_prob(LH,LH1),
|
||||
append(LH1,BL,ALL),
|
||||
specialize_rule1(Lit,ALL,SLit),
|
||||
|
||||
% \+ member(SLit,LH1), %%%%
|
||||
|
||||
\+ lookahead_cons(SLit,_),
|
||||
|
||||
|
||||
append(BL,[SLit],BL1),
|
||||
|
||||
append(LH1,BL1,ALL1),
|
||||
|
||||
% dv(LH1,BL1,DList), %var,depth list DList in output
|
||||
|
||||
extract_fancy_vars(ALL1,Vars1),
|
||||
length(Vars1,NV),
|
||||
setting(max_var,MV),
|
||||
NV=<MV,
|
||||
setting(maxdepth_var,MD),
|
||||
% exceed_depth(DList,MD), %fallisce se una sottolista eccede MD
|
||||
\+ banned_clause(LH1,BL1),
|
||||
SpecRul = rule(ID,LH,BL1,true).
|
||||
|
||||
specialize_rule([_|RLit],Rule,SpecRul,Lit):-
|
||||
specialize_rule(RLit,Rule,SpecRul,Lit).
|
||||
|
||||
|
||||
specialize_rule_la([],_LH1,BL1,BL1).
|
||||
|
||||
specialize_rule_la([Lit1|T],LH1,BL1,BL3):-
|
||||
copy_term(Lit1,Lit2),
|
||||
modeb(_,Lit2),
|
||||
append(LH1,BL1,ALL1),
|
||||
specialize_rule1(Lit2,ALL1,SLit1),
|
||||
append(BL1,[SLit1],BL2),
|
||||
specialize_rule_la(T,LH1,BL2,BL3).
|
||||
|
||||
|
||||
specialize_rule_la_bot([],Bot,Bot,BL,BL).
|
||||
|
||||
specialize_rule_la_bot([Lit|T],Bot0,Bot,BL1,BL3):-
|
||||
delete_one(Bot0,Bot1,Lit),
|
||||
append(BL1,[Lit],BL2),
|
||||
specialize_rule_la_bot(T,Bot1,Bot,BL2,BL3).
|
||||
|
||||
|
||||
remove_prob(['':_P],[]):-!.
|
||||
|
||||
remove_prob([X:_|R],[X|R1]):-
|
||||
remove_prob(R,R1).
|
||||
|
||||
|
||||
specialize_rule1(Lit,Lits,SpecLit):-
|
||||
Lit =.. [Pred|Args],
|
||||
exctract_type_vars(Lits,TypeVars0),
|
||||
remove_duplicates(TypeVars0,TypeVars),
|
||||
take_var_args(Args,TypeVars,Args1),
|
||||
SpecLit =.. [Pred|Args1],
|
||||
\+ member_eq(SpecLit,Lits).
|
||||
|
||||
|
||||
convert_to_input_vars([],[]):-!.
|
||||
|
||||
convert_to_input_vars([+T|RT],[+T|RT1]):-
|
||||
!,
|
||||
convert_to_input_vars(RT,RT1).
|
||||
|
||||
convert_to_input_vars([-T|RT],[+T|RT1]):-
|
||||
convert_to_input_vars(RT,RT1).
|
||||
|
||||
|
||||
member_eq(X,[Y|_List]) :-
|
||||
X == Y.
|
||||
|
||||
member_eq(X,[_|List]) :-
|
||||
member_eq(X,List).
|
||||
|
||||
|
||||
remove_eq(X,[Y|R],R):-
|
||||
X == Y,
|
||||
!.
|
||||
|
||||
remove_eq(X,[_|R],R1):-
|
||||
remove_eq(X,R,R1).
|
||||
|
||||
|
||||
linked_clause(X):-
|
||||
linked_clause(X,[]).
|
||||
|
||||
linked_clause([],_).
|
||||
|
||||
linked_clause([L|R],PrevLits):-
|
||||
term_variables(PrevLits,PrevVars),
|
||||
input_variables(L,InputVars),
|
||||
linked(InputVars,PrevVars),!,
|
||||
linked_clause(R,[L|PrevLits]).
|
||||
|
||||
|
||||
linked([],_).
|
||||
|
||||
linked([X|R],L) :-
|
||||
member_eq(X,L),
|
||||
!,
|
||||
linked(R,L).
|
||||
|
||||
|
||||
input_variables(\+ LitM,InputVars):-
|
||||
!,
|
||||
LitM=..[P|Args],
|
||||
length(Args,LA),
|
||||
length(Args1,LA),
|
||||
Lit1=..[P|Args1],
|
||||
copy_term(LitM,Lit0),
|
||||
modeb(_,Lit1),
|
||||
Lit1 =.. [P|Args1],
|
||||
convert_to_input_vars(Args1,Args2),
|
||||
Lit2 =.. [P|Args2],
|
||||
input_vars(Lit0,Lit2,InputVars).
|
||||
|
||||
input_variables(LitM,InputVars):-
|
||||
LitM=..[P|Args],
|
||||
length(Args,LA),
|
||||
length(Args1,LA),
|
||||
Lit1=..[P|Args1],
|
||||
modeb(_,Lit1),
|
||||
input_vars(LitM,Lit1,InputVars).
|
||||
|
||||
input_variables(LitM,InputVars):-
|
||||
LitM=..[P|Args],
|
||||
length(Args,LA),
|
||||
length(Args1,LA),
|
||||
Lit1=..[P|Args1],
|
||||
modeh(_,Lit1),
|
||||
input_vars(LitM,Lit1,InputVars).
|
||||
|
||||
input_vars(Lit,Lit1,InputVars):-
|
||||
Lit =.. [_|Vars],
|
||||
Lit1 =.. [_|Types],
|
||||
input_vars1(Vars,Types,InputVars).
|
||||
|
||||
|
||||
input_vars1([],_,[]).
|
||||
|
||||
input_vars1([V|RV],[+_T|RT],[V|RV1]):-
|
||||
!,
|
||||
input_vars1(RV,RT,RV1).
|
||||
|
||||
input_vars1([_V|RV],[_|RT],RV1):-
|
||||
input_vars1(RV,RT,RV1).
|
||||
|
||||
|
||||
exctract_type_vars([],[]).
|
||||
|
||||
exctract_type_vars([Lit|RestLit],TypeVars):-
|
||||
Lit =.. [Pred|Args],
|
||||
length(Args,L),
|
||||
length(Args1,L),
|
||||
Lit1 =.. [Pred|Args1],
|
||||
take_mode(Lit1),
|
||||
type_vars(Args,Args1,Types),
|
||||
exctract_type_vars(RestLit,TypeVars0),
|
||||
!,
|
||||
append(Types,TypeVars0,TypeVars).
|
||||
|
||||
|
||||
take_mode(Lit):-
|
||||
modeh(_,Lit),!.
|
||||
|
||||
take_mode(Lit):-
|
||||
modeb(_,Lit),!.
|
||||
|
||||
take_mode(Lit):-
|
||||
mode(_,Lit),!.
|
||||
|
||||
|
||||
type_vars([],[],[]).
|
||||
|
||||
type_vars([V|RV],[+T|RT],[V=T|RTV]):-
|
||||
!,
|
||||
type_vars(RV,RT,RTV).
|
||||
|
||||
type_vars([V|RV],[-T|RT],[V=T|RTV]):-atom(T),!,
|
||||
type_vars(RV,RT,RTV).
|
||||
|
||||
type_vars([_V|RV],[_T|RT],RTV):-
|
||||
type_vars(RV,RT,RTV).
|
||||
|
||||
|
||||
take_var_args([],_,[]).
|
||||
|
||||
take_var_args([+T|RT],TypeVars,[V|RV]):-
|
||||
!,
|
||||
member(V=T,TypeVars),
|
||||
take_var_args(RT,TypeVars,RV).
|
||||
|
||||
take_var_args([-T|RT],TypeVars,[_V|RV]):-
|
||||
atom(T),
|
||||
take_var_args(RT,TypeVars,RV).
|
||||
|
||||
take_var_args([-T|RT],TypeVars,[V|RV]):-
|
||||
member(V=T,TypeVars),
|
||||
take_var_args(RT,TypeVars,RV).
|
||||
|
||||
take_var_args([T|RT],TypeVars,[T|RV]):-
|
||||
T\= + _,(T\= - _; T= - A,number(A)),
|
||||
take_var_args(RT,TypeVars,RV).
|
||||
|
||||
|
||||
choose_rule(Theory,Rule):-
|
||||
( setting(mcts_covering,true) ->
|
||||
mcts_restart(Restart),
|
||||
nth(K,Theory,Rule),
|
||||
K >= Restart
|
||||
;
|
||||
member(Rule,Theory)
|
||||
).
|
||||
%last(Theory,Rule).
|
||||
|
||||
add_rule(Theory,add(rule(ID,H,[],true))):-
|
||||
new_id(ID),
|
||||
findall(HL , modeh(_,HL), HLS),
|
||||
length(HLS,NH),
|
||||
P is 1/(NH+1),
|
||||
add_probs(HLS,H,P),
|
||||
\+ member(rule(_,H,[],true),Theory).
|
||||
|
||||
add_rule(Theory,TheoryGen):-
|
||||
findall(HL , modeh(_,HL), HLS),
|
||||
add_rule(HLS,Theory,TheoryGen).
|
||||
|
||||
add_rule([X|_R],Theory,TheoryGen) :-
|
||||
new_id(ID),
|
||||
X =.. [P|A],
|
||||
length(A,LA),
|
||||
length(A1,LA),
|
||||
PH =.. [P|A1],
|
||||
TheoryGen = add(rule(ID,[PH:0.5,'':0.5],[],true)),
|
||||
\+ member(rule(_,[PH:_,'':_],[],true),Theory).
|
||||
|
||||
add_rule([_X|R],Theory,TheoryGen) :-
|
||||
add_rule(R,Theory,TheoryGen).
|
||||
|
||||
|
||||
add_probs([],['':P],P):-!.
|
||||
|
||||
add_probs([H|T],[H:P|T1],P):-
|
||||
add_probs(T,T1,P).
|
||||
|
||||
|
||||
extract_fancy_vars(List,Vars):-
|
||||
term_variables(List,Vars0),
|
||||
fancy_vars(Vars0,1,Vars).
|
||||
|
||||
|
||||
fancy_vars([],_,[]).
|
||||
|
||||
fancy_vars([X|R],N,[NN2=X|R1]):-
|
||||
name(N,NN),
|
||||
append([86],NN,NN1),
|
||||
name(NN2,NN1),
|
||||
N1 is N + 1,
|
||||
fancy_vars(R,N1,R1).
|
||||
|
||||
|
||||
delete_one([X|R],R,X).
|
||||
|
||||
delete_one([X|R],[X|R1],D):-
|
||||
delete_one(R,R1,D).
|
||||
|
||||
|
||||
remove_last([_X],[]) :-
|
||||
!.
|
||||
|
||||
remove_last([X|R],[X|R1]):-
|
||||
remove_last(R,R1).
|
||||
|
||||
|
||||
delete_matching([],_El,[]).
|
||||
|
||||
delete_matching([El|T],El,T1):-!,
|
||||
delete_matching(T,El,T1).
|
||||
|
||||
delete_matching([H|T],El,[H|T1]):-
|
||||
delete_matching(T,El,T1).
|
||||
|
1410
packages/cplint/lemur/slipcover_lemur.pl
Normal file
1410
packages/cplint/lemur/slipcover_lemur.pl
Normal file
File diff suppressed because it is too large
Load Diff
@ -13,7 +13,7 @@ BINDIR = $(EROOTDIR)/bin
|
||||
# where YAP should look for libraries
|
||||
#
|
||||
LIBDIR=@libdir@/Yap
|
||||
DESTDIR=$(prefix)/share/Yap
|
||||
DESTDIR=$(prefix)/lib/Yap
|
||||
#
|
||||
#
|
||||
CC=@CC@
|
||||
|
Reference in New Issue
Block a user