test program

This commit is contained in:
Fabrizio Riguzzi 2013-09-17 13:04:22 +02:00
parent 50287c268b
commit faf7726452
4 changed files with 576 additions and 32 deletions

View File

@ -7,7 +7,7 @@
<meta name="originator" content="TeX4ht (http://www.cse.ohio-state.edu/~gurari/TeX4ht/)">
<!-- html -->
<meta name="src" content="manual.tex">
<meta name="date" content="2013-09-04 21:32:00">
<meta name="date" content="2013-09-17 13:02:00">
<link rel="stylesheet" type="text/css" href="manual.css">
</head><body
>
@ -21,7 +21,7 @@ class="cmr-12">Fabrizio Riguzzi</span>
<br /><span
class="cmr-12">fabrizio.riguzzi@unife.it</span></div><br />
<div class="date" ><span
class="cmr-12">September 4, 2013</span></div>
class="cmr-12">September 17, 2013</span></div>
</div>
<h3 class="sectionHead"><span class="titlemark">1 </span> <a
id="x1-10001"></a>Introduction</h3>
@ -1339,69 +1339,121 @@ class="cmtt-10">background_clauses</span></span></span> (values: integer, d
<li class="itemize"><span class="obeylines-h"><span class="verb"><span
class="cmtt-10">maxdepth_var</span></span></span> (values: integer, default value: 2, valid for SLIPCOVER):
maximum depth of variables in clauses (as defined in <span class="cite">[<a
href="#XDBLP:journals/ai/Cohen95">10</a>]</span>).</li></ul>
<!--l. 633--><p class="noindent" >
href="#XDBLP:journals/ai/Cohen95">10</a>]</span>).
</li>
<li class="itemize"><span class="obeylines-h"><span class="verb"><span
class="cmtt-10">score</span></span></span> (values: <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">ll</span></span></span>, <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">aucpr</span></span></span>, default value <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">ll</span></span></span>, valid for SLIPCOVER):
determines the score function for refinement: if set to <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">ll</span></span></span>, log likelihood is
used, if set to <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">aucpr</span></span></span>, the area under the precision-recall curve is used.</li></ul>
<!--l. 635--><p class="noindent" >
<h4 class="subsectionHead"><span class="titlemark">5.3 </span> <a
id="x1-130005.3"></a>Commands</h4>
<!--l. 634--><p class="noindent" >To execute CEM, load <span
<!--l. 636--><p class="noindent" >To execute CEM, load <span
class="cmtt-10">em.pl </span>with
<div class="verbatim" id="verbatim-54">
?:-&#x00A0;use_module(library(&#8217;cplint/em&#8217;)).
</div>
<!--l. 637--><p class="nopar" > and call:
<!--l. 639--><p class="nopar" > and call:
<div class="verbatim" id="verbatim-55">
?:-&#x00A0;em(stem).
</div>
<!--l. 641--><p class="nopar" > To execute RIB, load <span
<!--l. 643--><p class="nopar" > To execute RIB, load <span
class="cmtt-10">rib.pl </span>with
<div class="verbatim" id="verbatim-56">
?:-&#x00A0;use_module(library(&#8217;cplint/rib&#8217;)).
</div>
<!--l. 645--><p class="nopar" > and call:
<!--l. 647--><p class="nopar" > and call:
<div class="verbatim" id="verbatim-57">
?:-&#x00A0;ib_par(stem).
</div>
<!--l. 649--><p class="nopar" > To execute EMBLEM, load <span
<!--l. 651--><p class="nopar" > To execute EMBLEM, load <span
class="cmtt-10">slipcase.pl </span>with
<div class="verbatim" id="verbatim-58">
?:-&#x00A0;use_module(library(&#8217;cplint/slipcase&#8217;)).
</div>
<!--l. 653--><p class="nopar" > and call
<!--l. 655--><p class="nopar" > and call
<div class="verbatim" id="verbatim-59">
?:-&#x00A0;em(stem).
</div>
<!--l. 657--><p class="nopar" > To execute SLIPCASE, load <span
<!--l. 659--><p class="nopar" > To execute SLIPCASE, load <span
class="cmtt-10">slipcase.pl </span>with
<div class="verbatim" id="verbatim-60">
?:-&#x00A0;use_module(library(&#8217;cplint/slipcase&#8217;)).
</div>
<!--l. 661--><p class="nopar" > and call
<!--l. 663--><p class="nopar" > and call
<div class="verbatim" id="verbatim-61">
?:-&#x00A0;sl(stem).
</div>
<!--l. 665--><p class="nopar" > To execute SLIPCOVER, load <span
<!--l. 667--><p class="nopar" > To execute SLIPCOVER, load <span
class="cmtt-10">slipcover.pl </span>with
<div class="verbatim" id="verbatim-62">
?:-&#x00A0;use_module(library(&#8217;cplint/slipcover&#8217;)).
</div>
<!--l. 669--><p class="nopar" > and call
<!--l. 671--><p class="nopar" > and call
<div class="verbatim" id="verbatim-63">
?:-&#x00A0;sl(stem).
</div>
<!--l. 673--><p class="nopar" >
<!--l. 675--><p class="nopar" >
<h4 class="subsectionHead"><span class="titlemark">5.4 </span> <a
id="x1-140005.4"></a>Learning Examples</h4>
<!--l. 675--><p class="noindent" >The subfolders <span class="obeylines-h"><span class="verb"><span
id="x1-140005.4"></a>Testing</h4>
<!--l. 677--><p class="noindent" >To test the theories learned, load <span
class="cmtt-10">test.pl </span>with
<div class="verbatim" id="verbatim-64">
?:-&#x00A0;use_module(library(&#8217;cplint/test&#8217;)).
</div>
<!--l. 680--><p class="nopar" > and call
<div class="verbatim" id="verbatim-65">
?:-&#x00A0;main([&#x003C;stem_fold1&#x003E;,...,&#x003C;stem_foldn&#x003E;],[&#x003C;testing_set_fold1&#x003E;,...,
&#x00A0;<br />&#x00A0;&#x00A0;&#x003C;testing_set_foldn&#x003E;]).
</div>
<!--l. 685--><p class="nopar" > For example, if you want to test the theory in <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">ai_train.rules</span></span></span> on the set <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">ai.kb</span></span></span>,
you can call
<div class="verbatim" id="verbatim-66">
?:-&#x00A0;main([ai_train],[ai]).
</div>
<!--l. 689--><p class="nopar" > The testing program has the following parameter:
<ul class="itemize1">
<li class="itemize"><span class="obeylines-h"><span class="verb"><span
class="cmtt-10">neg_ex</span></span></span> (values: <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">given</span></span></span>, <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">cw</span></span></span>, default value: <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">cw</span></span></span>): if set to <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">given</span></span></span>, the negative
examples are taken from <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">&#x003C;testing_set_foldi&#x003E;.kb</span></span></span>, i.e., those example
<span class="obeylines-h"><span class="verb"><span
class="cmtt-10">ex</span></span></span> stored as <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">neg(ex)</span></span></span>; if set to <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">cw</span></span></span>, the negative examples are generated
according to the closed world assumption, i.e., all atoms for target
predicates that are not positive examples. The set of all atoms is obtained
by collecting the set of constants for each type of the arguments of the
target predicate.</li></ul>
<!--l. 696--><p class="noindent" >
<h4 class="subsectionHead"><span class="titlemark">5.5 </span> <a
id="x1-150005.5"></a>Learning Examples</h4>
<!--l. 697--><p class="noindent" >The subfolders <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">em</span></span></span>, <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">rib</span></span></span>, <span class="obeylines-h"><span class="verb"><span
class="cmtt-10">slipcase</span></span></span> and <span class="obeylines-h"><span class="verb"><span
@ -1409,26 +1461,27 @@ class="cmtt-10">slipcover</span></span></span> of the <span class="obeylines-h">
class="cmtt-10">packages/cplint</span></span></span> folder in
Yap git distribution contain examples of input and output files for the learning
algorithms.
<!--l. 678--><p class="noindent" >
<!--l. 700--><p class="noindent" >
<h3 class="sectionHead"><span class="titlemark">6 </span> <a
id="x1-150006"></a>License</h3>
<!--l. 683--><p class="noindent" ><span
id="x1-160006"></a>License</h3>
<!--l. 705--><p class="noindent" ><span
class="cmtt-10">cplint</span>, as Yap, follows the Artistic License 2.0 that you can find in Yap CVS root
dir. The copyright is by Fabrizio Riguzzi.
<!--l. 686--><p class="indent" > The modules in the approx subdirectory use SimplecuddLPADs, a modification of
<!--l. 708--><p class="indent" > The modules in the approx subdirectory use SimplecuddLPADs, a modification of
the <a
href="http://dtai.cs.kuleuven.be/problog/download.html" > Simplecudd </a> library whose copyright is by Katholieke Universiteit Leuven and
that follows the Artistic License 2.0.
<!--l. 689--><p class="indent" > Some modules use the library <a
<!--l. 711--><p class="indent" > Some modules use the library <a
href="http://vlsi.colorado.edu/~fabio/" > CUDD </a> for manipulating BDDs that is included in
glu. For the use of CUDD, the following license must be accepted:
<!--l. 694--><p class="indent" > Copyright (c) 1995-2004, Regents of the University of Colorado
<!--l. 696--><p class="indent" > All rights reserved.
<!--l. 698--><p class="indent" > Redistribution and use in source and binary forms, with or without modification,
<!--l. 716--><p class="indent" > Copyright (c) 1995-2004, Regents of the University of Colorado
<!--l. 718--><p class="indent" > All rights reserved.
<!--l. 720--><p class="indent" > Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
<ul class="itemize1">
<li class="itemize">Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
</li>
<li class="itemize">Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
@ -1437,8 +1490,7 @@ are permitted provided that the following conditions are met:
<li class="itemize">Neither the name of the University of Colorado nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.</li></ul>
<!--l. 715--><p class="noindent" >THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS <br
<!--l. 737--><p class="noindent" >THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS <br
class="newline" />AND CONTRIBUTORS &#8221;AS IS&#8221; AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
@ -1452,7 +1504,7 @@ class="newline" />AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<!--l. 729--><p class="indent" > <span
<!--l. 751--><p class="indent" > <span
class="cmtt-10">lpad.pl</span>, <span
class="cmtt-10">semlpad.pl </span>and <span
class="cmtt-10">cpl.pl </span>are based on the SLG system by Weidong
@ -1462,7 +1514,7 @@ href="http://www.cs.sunysb.edu/~warren/" > David Scott Warren </a>, Copyright (C
on this copyright.
<!--l. 1--><p class="noindent" >
<h3 class="likesectionHead"><a
id="x1-160006"></a>References</h3>
id="x1-170006"></a>References</h3>
<!--l. 1--><p class="noindent" >
<div class="thebibliography">
<p class="bibitem" ><span class="biblabel">
@ -1480,6 +1532,7 @@ class="cmti-10">Proceedings of the 26th Italian</span>
class="cmti-10">Conference on Computational Logic (CILC2011), Pescara, Italy, 31 August</span>
<span
class="cmti-10">31-2 September, 2011</span>, 2011.
</p>
<p class="bibitem" ><span class="biblabel">
[3]<span class="bibsp">&#x00A0;&#x00A0;&#x00A0;</span></span><a
@ -1490,7 +1543,6 @@ class="cmti-10">31-2 September, 2011</span>, 2011.
<p class="bibitem" ><span class="biblabel">
[4]<span class="bibsp">&#x00A0;&#x00A0;&#x00A0;</span></span><a
id="XBelRig11-ILP11-IC"></a>Elena Bellodi and Fabrizio Riguzzi. Learning the structure of
probabilistic logic programs. In <span
class="cmti-10">Inductive Logic Programming, 21th</span>
<span
@ -1537,6 +1589,7 @@ class="cmti-10">LNCS</span>, pages 30&#8211;37. Springer, 2011.
delaying for general logic programs. <span
class="cmti-10">Journal of the ACM</span>, 43(1):20&#8211;74, 1996.
</p>
<p class="bibitem" ><span class="biblabel">
[10]<span class="bibsp">&#x00A0;&#x00A0;&#x00A0;</span></span><a
id="XDBLP:journals/ai/Cohen95"></a>William&#x00A0;W. Cohen. Pac-learning non-recursive prolog clauses. <span
@ -1551,7 +1604,6 @@ class="cmti-10">Intell.</span>, 79(1):1&#8211;38, 1995.
class="cmti-10">International Joint</span>
<span
class="cmti-10">Conference on Artificial Intelligence</span>, pages 2462&#8211;2467, 2007.
</p>
<p class="bibitem" ><span class="biblabel">
[12]<span class="bibsp">&#x00A0;&#x00A0;&#x00A0;</span></span><a
@ -1596,6 +1648,7 @@ class="cmti-10">LNAI</span>, pages 109&#8211;120. Springer, 2007.
class="cmti-10">Proceedings of the 14th RCRA workshop Experimental Evaluation of</span>
<span
class="cmti-10">Algorithms for Solving Problems with Combinatorial Explosion</span>, 2007.
</p>
<p class="bibitem" ><span class="biblabel">
[18]<span class="bibsp">&#x00A0;&#x00A0;&#x00A0;</span></span><a
@ -1612,7 +1665,6 @@ class="cmti-10">Proceedings of the 26th Italian</span>
class="cmti-10">Conference on Computational Logic (CILC2011), Pescara, Italy, 31</span>
<span
class="cmti-10">August-2 September, 2011</span>, 2011.
</p>
<p class="bibitem" ><span class="biblabel">
[20]<span class="bibsp">&#x00A0;&#x00A0;&#x00A0;</span></span><a
@ -1661,6 +1713,7 @@ class="cmti-10">International Conference on Logic</span>
<span
class="cmti-10">Programming</span>, volume 3131 of <span
class="cmti-10">LNCS</span>, pages 195&#8211;209. Springer, 2004.
</p>
</div>

Binary file not shown.

View File

@ -629,6 +629,8 @@ maximum number of theory search iterations
maximum numbers of background clauses
\item \verb|maxdepth_var| (values: integer, default value: 2, valid for SLIPCOVER): maximum depth of
variables in clauses (as defined in \cite{DBLP:journals/ai/Cohen95}).
\item \verb|score| (values: \verb|ll|, \verb|aucpr|, default value \verb|ll|, valid for SLIPCOVER): determines the score function for refinement: if set to \verb|ll|, log likelihood is used, if set to \verb|aucpr|, the area under the
precision-recall curve is used.
\end{itemize}
\subsection{Commands}
To execute CEM, load \texttt{em.pl} with
@ -671,6 +673,26 @@ and call
\begin{verbatim}
?:- sl(stem).
\end{verbatim}
\subsection{Testing}
To test the theories learned, load \texttt{test.pl} with
\begin{verbatim}
?:- use_module(library('cplint/test')).
\end{verbatim}
and call
\begin{verbatim}
?:- main([<stem_fold1>,...,<stem_foldn>],[<testing_set_fold1>,...,
<testing_set_foldn>]).
\end{verbatim}
For example, if you want to test the theory in \verb|ai_train.rules| on the set \verb|ai.kb|, you can call
\begin{verbatim}
?:- main([ai_train],[ai]).
\end{verbatim}
The testing program has the following parameter:
\begin{itemize}
\item \verb|neg_ex| (values: \verb|given|, \verb|cw|, default value: \verb|cw|): if set to \verb|given|, the negative examples
are taken from \verb|<testing_set_foldi>.kb|, i.e., those example \verb|ex| stored as \verb|neg(ex)|; if set to \verb|cw|, the negative examples are generated according to the closed world assumption, i.e., all atoms for target predicates that are not positive examples. The set of all atoms is obtained by collecting the set of constants for each type of the arguments of the target predicate.
\end{itemize}
\subsection{Learning Examples}
The subfolders \verb|em|, \verb|rib|, \verb|slipcase| and \verb|slipcover| of the \verb|packages/cplint| folder in Yap git distribution
contain examples of input and output files for the learning algorithms.

View File

@ -0,0 +1,469 @@
:-multifile setting/2.
:-source.
:-use_module(library('cplint/slipcover')).
setting(neg_ex,cw).
/* allowed values: given, cw */
main(TrainP,TestSets):-
system('rm -f areas.csv'),
system('rm -f curve_roc.m'),
system('rm -f curve_pr.m'),
open('cll1.pl',write,S),
open('areas.csv',append,SA),
format(SA,"Fold;\tCLL;\t AUCROC;\t AUCPR~n",[]),
close(SA),
test(TrainP,TestSets,S,[],LG,0,Pos,0,Neg,0,CLL),
keysort(LG,LG1),
format(S,"cll(all,post,~d,~d,[",[Pos,Neg]),
writes(LG1,S),
reverse(LG1,LGR1),
compute_areas(LGR1,Pos,Neg,AUCROC,AUCPR),
open('areas.csv',append,SA1),
format(SA1,"~a;\t ~f;\t ~f;\t ~f~n",[all,CLL,AUCROC,AUCPR]),
close(SA1),
close(S).
test([],[],_S,LG,LG,Pos,Pos,Neg,Neg,CLL,CLL).
test([HP|TP],[HT|TT],S,LG0,LG,Pos0,Pos,Neg0,Neg,CLL0,CLL):-
test_fold(HP,HT,S,LG1,Pos1,Neg1,CLL1),
append(LG0,LG1,LG2),
Pos2 is Pos0+Pos1,
Neg2 is Neg0+Neg1,
CLL2 is CLL0+CLL1,
test(TP,TT,S,LG2,LG,Pos2,Pos,Neg2,Neg,CLL2,CLL).
test_fold(P,F,S,LGOrd,Pos,Neg,CLL1):-
atom_concat([P,'.rules'],PR),
atom_concat([P,'.bg'],PBG),
atom_concat([P,'.l'],FL),
atom_concat([F,'.kb'],TKB),
reconsult(FL),
(file_exists(PBG)->
set(compiling,on),
load(PBG,_ThBG,RBG),
set(compiling,off),
generate_clauses(RBG,_RBG1,0,[],ThBG),
assert_all(ThBG)
;
true
),
format("~a~n",[TKB]),
load_models(TKB,DB),
set(compiling,on),
load(PR,Th1,R1),
set(compiling,off),
assert_all(Th1),
assert_all(R1),
find_ex(DB,LG,Pos,Neg),
compute_CLL_atoms(LG,0,0,CLL1,LG1),
(file_exists(PBG)->
retract_all(ThBG)
;
true
),
retract_all(Th1),
retract_all(R1),
keysort(LG1,LGOrd),
reverse(LGOrd,LGROrd),
compute_areas(LGROrd,Pos,Neg,AUCROC,AUCPR),
format(S,"cll(~a,post,~d,~d,[",[F,Pos,Neg]),
writes(LGOrd,S),
open('areas.csv',append,SA),
format(SA,"~a;\t ~f;\t ~f;\t ~f~n",[F,CLL1,AUCROC,AUCPR]),
close(SA).
compute_areas(LG,Pos,Neg,AUCROC,AUCPR):-
compute_pointsroc(LG,+inf,0,0,Pos,Neg,[],ROC),
hull(ROC,0,0,0,AUCROC),
open('curve_roc.m',append,SC),
write_p(ROC,SC),
close(SC),
compute_aucpr(LG,Pos,Neg,AUCPR,PR),
open('curve_pr.m',append,SPR),
write_ppr(PR,SPR),
close(SPR).
compute_pointsroc([],_P0,_TP,_FP,_FN,_TN,P0,P1):-!,
append(P0,[1.0-1.0],P1).
compute_pointsroc([P- (\+ _)|T],P0,TP,FP,FN,TN,Po0,Po1):-!,
(P<P0->
FPR is FP/(FP+TN),
TPR is TP/(TP+FN),
append(Po0,[(FPR-TPR)],Po2),
P1=P
;
Po2=Po0,
P1=P0
),
FP1 is FP+1,
TN1 is TN-1,
compute_pointsroc(T,P1,TP,FP1,FN,TN1,Po2,Po1).
compute_pointsroc([P- _|T],P0,TP,FP,FN,TN,Po0,Po1):-!,
(P<P0->
FPR is FP/(FP+TN),
TPR is TP/(TP+FN),
append(Po0,[FPR-TPR],Po2),
P1=P
;
Po2=Po0,
P1=P0
),
TP1 is TP+1,
FN1 is FN-1,
compute_pointsroc(T,P1,TP1,FP,FN1,TN,Po2,Po1).
hull([],FPR,TPR,AUC0,AUC1):-
AUC1 is AUC0+(1-FPR)*(1+TPR)/2.
hull([FPR1-TPR1|T],FPR,TPR,AUC0,AUC1):-
AUC2 is AUC0+(FPR1-FPR)*(TPR1+TPR)/2,
hull(T,FPR1,TPR1,AUC2,AUC1).
compute_aucpr(L,Pos,Neg,A,PR):-
L=[P_0-E|TL],
(E= (\+ _ )->
FP=1,
TP=0,
FN=Pos,
TN is Neg -1
;
FP=0,
TP=1,
FN is Pos -1,
TN=Neg
),
compute_curve_points(TL,P_0,TP,FP,FN,TN,Points),
Points=[R0-P0|_TPoints],
(R0=:=0,P0=:=0->
Flag=true
;
Flag=false
),
area(Points,Flag,Pos,0,0,0,A,[],PR).
compute_curve_points([],_P0,TP,FP,_FN,_TN,[1.0-Prec]):-!,
Prec is TP/(TP+FP).
compute_curve_points([P- (\+ _)|T],P0,TP,FP,FN,TN,Pr):-!,
(P<P0->
Prec is TP/(TP+FP),
Rec is TP/(TP+FN),
Pr=[Rec-Prec|Pr1],
P1=P
;
Pr=Pr1,
P1=P0
),
FP1 is FP+1,
TN1 is TN-1,
compute_curve_points(T,P1,TP,FP1,FN,TN1,Pr1).
compute_curve_points([P- _|T],P0,TP,FP,FN,TN,Pr):-!,
(P<P0->
Prec is TP/(TP+FP),
Rec is TP/(TP+FN),
Pr=[Rec-Prec|Pr1],
P1=P
;
Pr=Pr1,
P1=P0
),
TP1 is TP+1,
FN1 is FN-1,
compute_curve_points(T,P1,TP1,FP,FN1,TN,Pr1).
area([],_Flag,_Pos,_TPA,_FPA,A,A,PR,PR).
area([R0-P0|T],Flag,Pos,TPA,FPA,A0,A,PR0,PR):-
TPB is R0*Pos,
(TPB=:=0->
A1=A0,
FPB=0,
PR2=PR0,
PR=[R0-P0|PR3]
;
R_1 is TPA/Pos,
(TPA=:=0->
(Flag=false->
P_1=P0,
PR=[0.0-P0|PR3]
;
P_1=0.0,
PR=[0.0-0.0|PR3]
)
;
P_1 is TPA/(TPA+FPA),
PR=PR3
),
FPB is TPB*(1-P0)/P0,
N is TPB-TPA+0.5,
(N<1.0->
append(PR0,[R0-P0],PR2),
A1=A0
;
interpolate(1,N,Pos,R_1,P_1,TPA,FPA,TPB,FPB,A0,A1,[],PR1),
append(PR0,PR1,PR2)
)
),
area(T,Flag,Pos,TPB,FPB,A1,A,PR2,PR3).
interpolate(I,N,_Pos,_R0,_P0,_TPA,_FPA,_TPB,_FPB,A,A,PR,PR):-I>N,!.
interpolate(I,N,Pos,R0,P0,TPA,FPA,TPB,FPB,A0,A,PR0,[R-P|PR]):-
R is (TPA+I)/Pos,
P is (TPA+I)/(TPA+I+FPA+(FPB-FPA)/(TPB-TPA)*I),
A1 is A0+(R-R0)*(P+P0)/2,
I1 is I+1,
interpolate(I1,N,Pos,R,P,TPA,FPA,TPB,FPB,A1,A,PR0,PR).
find_ex(DB,LG,Pos,Neg):-
findall(P/A,output(P/A),LP),
setting(neg_ex,given),!,
find_ex_pred(LP,DB,[],LG,0,Pos,0,Neg).
find_ex(DB,LG,Pos,Neg):-
findall(P/A,output(P/A),LP),
setting(neg_ex,cw),
find_ex_pred_cw(LP,DB,[],LG,0,Pos,0,Neg).
find_ex_pred([],_DB,LG,LG,Pos,Pos,Neg,Neg).
find_ex_pred([P/A|T],DB,LG0,LG,Pos0,Pos,Neg0,Neg):-
functor(At,P,A),
find_ex_db(DB,At,LG0,LG1,Pos0,Pos1,Neg0,Neg1),
find_ex_pred(T,DB,LG1,LG,Pos1,Pos,Neg1,Neg).
find_ex_db([],_At,LG,LG,Pos,Pos,Neg,Neg).
find_ex_db([H|T],At,LG0,LG,Pos0,Pos,Neg0,Neg):-
At=..[P|L],
At1=..[P,H|L],
findall(At1,At1,LP),
findall(\+ At1,neg(At1),LN),
length(LP,NP),
length(LN,NN),
append([LG0,LP,LN],LG1),
Pos1 is Pos0+NP,
Neg1 is Neg0+NN,
find_ex_db(T,At,LG1,LG,Pos1,Pos,Neg1,Neg).
find_ex_pred_cw([],_DB,LG,LG,Pos,Pos,Neg,Neg).
find_ex_pred_cw([P/A|T],DB,LG0,LG,Pos0,Pos,Neg0,Neg):-
functor(At,P,A),
get_types(At,Types),
remove_duplicates(Types,Types1),
find_ex_db_cw(DB,At,Types1,LG0,LG1,Pos0,Pos1,Neg0,Neg1),
find_ex_pred_cw(T,DB,LG1,LG,Pos1,Pos,Neg1,Neg).
get_types(At,Types):-
modeh(_,At),
At=..[_|Args],
get_args(Args,Types).
get_args([],[]).
get_args([+H|T],[H|T1]):-!,
get_args(T,T1).
get_args([-H|T],[H|T1]):-!,
get_args(T,T1).
get_args([#H|T],[H|T1]):-!,
get_args(T,T1).
get_args([-#H|T],[H|T1]):-!,
get_args(T,T1).
get_args([H|T],[H|T1]):-
get_args(T,T1).
get_constants([],_M,[]).
get_constants([Type|T],M,[(Type,Co)|C]):-
find_pred_using_type(Type,LP),
find_constants(LP,M,[],Co),
get_constants(T,M,C).
find_pred_using_type(T,L):-
setof((P,Ar,A),pred_type(T,P,Ar,A),L).
pred_type(T,P,Ar,A):-
modeh(_,S),
S=..[P|Args],
length(Args,Ar),
scan_args(Args,T,1,A).
pred_type(T,P,Ar,A):-
modeb(_,S),
S=..[P|Args],
length(Args,Ar),
scan_args(Args,T,1,A).
scan_args([+T|_],T,A,A):-!.
scan_args([-T|_],T,A,A):-!.
scan_args([#T|_],T,A,A):-!.
scan_args([-#T|_],T,A,A):-!.
scan_args([_|Tail],T,A0,A):-
A1 is A0+1,
scan_args(Tail,T,A1,A).
find_constants([],_M,C,C).
find_constants([(P,Ar,A)|T],M,C0,C):-
gen_goal(1,Ar,A,Args,ArgsNoV,V),
G=..[P,M|Args],
setof(V,ArgsNoV^G,LC),
append(C0,LC,C1),
remove_duplicates(C1,C2),
find_constants(T,M,C2,C).
gen_goal(Arg,Ar,_A,[],[],_):-
Arg =:= Ar+1,!.
gen_goal(A,Ar,A,[V|Args],ArgsNoV,V):-!,
Arg1 is A+1,
gen_goal(Arg1,Ar,A,Args,ArgsNoV,V).
gen_goal(Arg,Ar,A,[ArgV|Args],[ArgV|ArgsNoV],V):-
Arg1 is Arg+1,
gen_goal(Arg1,Ar,A,Args,ArgsNoV,V).
find_ex_db_cw([],_At,_Ty,LG,LG,Pos,Pos,Neg,Neg).
find_ex_db_cw([H|T],At,Types,LG0,LG,Pos0,Pos,Neg0,Neg):-
get_constants(Types,H,C),
At=..[P|L],
get_types(At,TypesA),
length(L,N),
length(LN,N),
At1=..[P,H|LN],
findall(At1,At1,LP),
setof(\+ At1,neg_ex(LN,TypesA,At1,C),LNeg),
length(LP,NP),
length(LNeg,NN),
append([LG0,LP,LNeg],LG1),
Pos1 is Pos0+NP,
Neg1 is Neg0+NN,
find_ex_db_cw(T,At,Types,LG1,LG,Pos1,Pos,Neg1,Neg).
neg_ex([],[],At1,_C):-
\+ At1.
neg_ex([H|T],[HT|TT],At1,C):-
member((HT,Co),C),
member(H,Co),
neg_ex(T,TT,At1,C).
compute_CLL_atoms([],_N,CLL,CLL,[]):-!.
compute_CLL_atoms([\+ H|T],N,CLL0,CLL1,[PG- (\+ H)|T1]):-!,
rule_n(NR),
init_test(NR),
% write(\+ H),
get_node(H,BDD),!,
ret_prob(BDD,PG),
% write(PG),nl,
end_test,!,
PG1 is 1-PG,
(PG1=:=0.0->
CLL2 is CLL0-10
;
CLL2 is CLL0+ log(PG1)
),
N1 is N+1,
compute_CLL_atoms(T,N1,CLL2,CLL1,T1).
compute_CLL_atoms([H|T],N,CLL0,CLL1,[PG-H|T1]):-
rule_n(NR),
init_test(NR),
% write(H),
get_node(H,BDD),!,
ret_prob(BDD,PG),
% write(PG),nl,
end_test,!,
(PG=:=0.0->
CLL2 is CLL0-10
;
CLL2 is CLL0+ log(PG)
),
N1 is N+1,
compute_CLL_atoms(T,N1,CLL2,CLL1,T1).
writes([H-H1],S):-
format(S,"~f - (~p)]).~n~n",[H,H1]).
writes([H-H1|T],S):-
format(S,"~f - (~p),~n",[H,H1]),
writes(T,S).
write_p(P,S):-
get_xy(P,PX,PY),
format(S,"x=[",[]),
writesf(PX,S),
format(S,"y=[",[]),
writesf(PY,S),
format(S,"
figure('Name','roc','NumberTitle','off')
set(gca,'XLim',[0.0 1.0])
set(gca,'YLim',[0.0 1.0])
x=[x 1.0]
y=[y 0.0]
k=convhull(x,y)
plot(x(k),y(k),'r-',x,y,'--b+')
A = polyarea(x,y)~n~n
save area_roc.csv A -ascii -append
",
[]).
get_xy([],[],[]).
get_xy([X-Y|T],[X|TX],[Y|TY]):-
get_xy(T,TX,TY).
writesf([H],S):-
format(S,"~f]~n",[H]).
writesf([H|T],S):-
format(S,"~f ",[H]),
writesf(T,S).
write_ppr(P,S):-
get_xy(P,PX,PY),
format(S,"rec=[",[A]),
writesf(PX,S),
format(S,"prec=[",[A]),
writesf(PY,S),
format(S,"
figure('Name','pr','NumberTitle','off')
set(gca,'XLim',[0.0 1.0])
set(gca,'YLim',[0.0 1.0])
rec=[0.0 rec 1.0];
prec=[0.0 prec 0.0];
plot(rec,prec,'--*k')
A=polyarea(rec,prec)
save area_pr.csv A -ascii -append
~n~n",
[]).