Merge branch 'master' of xato:0517
This commit is contained in:
commit
6a2d74ac6e
@ -493,8 +493,7 @@ Prop Yap_GetPredPropByAtom(Atom at, Term cur_mod)
|
||||
return (p0);
|
||||
}
|
||||
|
||||
inline static Prop GetPredPropByAtomHavingLockInThisModule(AtomEntry *ae,
|
||||
Term cur_mod)
|
||||
inline static Prop GetPredPropByAtomHavingLockInThisModule(AtomEntry *ae, Term cur_mod)
|
||||
/* get predicate entry for ap/arity; create it if neccessary. */
|
||||
{
|
||||
Prop p0;
|
||||
@ -528,14 +527,15 @@ Prop Yap_GetPredPropByAtomInThisModule(Atom at, Term cur_mod)
|
||||
return (p0);
|
||||
}
|
||||
|
||||
|
||||
Prop Yap_GetPredPropByFunc(Functor f, Term cur_mod)
|
||||
/* get predicate entry for ap/arity; */
|
||||
{
|
||||
Prop p0;
|
||||
|
||||
FUNC_READ_LOCK(f);
|
||||
|
||||
p0 = GetPredPropByFuncHavingLock(f, cur_mod);
|
||||
|
||||
FUNC_READ_UNLOCK(f);
|
||||
return (p0);
|
||||
}
|
||||
|
@ -3396,10 +3396,18 @@ X_API Functor YAP_IntToFunctor(Int i) { return TR_Functors[i]; }
|
||||
|
||||
X_API void *YAP_shared(void) { return LOCAL_shared; }
|
||||
|
||||
void yap_init(void) {}
|
||||
X_API PredEntry *YAP_TopGoal(void)
|
||||
{
|
||||
YAP_Functor f = Yap_MkFunctor(Yap_LookupAtom("yap_query"),3);
|
||||
Term tmod = MkAtomTerm(Yap_LookupAtom("yapi"));
|
||||
PredEntry *p = RepPredProp(Yap_GetPredPropByFunc(f, tmod));
|
||||
return p;
|
||||
}
|
||||
|
||||
void yap_init(void) {}
|
||||
|
||||
#endif // C_INTERFACE_C
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
|
20
C/cdmgr.c
20
C/cdmgr.c
@ -1647,6 +1647,14 @@ bool Yap_constPred(PredEntry *p) {
|
||||
pred_flags_t pflags;
|
||||
pflags = p->PredFlags;
|
||||
|
||||
if (pflags &
|
||||
((UserCPredFlag | CArgsPredFlag | NumberDBPredFlag | AtomDBPredFlag |
|
||||
TestPredFlag | AsmPredFlag | CPredFlag | BinaryPredFlag)))
|
||||
return true;
|
||||
|
||||
if (p->PredFlags &
|
||||
(SysExportPredFlag | MultiFileFlag | DynamicPredFlag | LogUpdatePredFlag))
|
||||
return false;
|
||||
if (Yap_isSystemModule(p->ModuleOfPred)) {
|
||||
if (p->cs.p_code.NOfClauses == 0) {
|
||||
p->src.OwnerFile = Yap_source_file_name();
|
||||
@ -1656,15 +1664,7 @@ bool Yap_constPred(PredEntry *p) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (pflags &
|
||||
((UserCPredFlag | CArgsPredFlag | NumberDBPredFlag | AtomDBPredFlag |
|
||||
TestPredFlag | AsmPredFlag | CPredFlag | BinaryPredFlag)))
|
||||
return true;
|
||||
|
||||
if (p->PredFlags &
|
||||
(SysExportPredFlag | MultiFileFlag | DynamicPredFlag | LogUpdatePredFlag))
|
||||
return false;
|
||||
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1719,7 +1719,6 @@ bool Yap_addclause(Term t, yamop *cp, Term tmode, Term mod, Term *t4ref)
|
||||
at = NameOfFunctor(f);
|
||||
p = RepPredProp(PredPropByFunc(f, mod));
|
||||
}
|
||||
Yap_PutValue(AtomAbol, TermNil);
|
||||
PELOCK(20, p);
|
||||
/* we are redefining a prolog module predicate */
|
||||
if (Yap_constPred(p)) {
|
||||
@ -1727,6 +1726,7 @@ bool Yap_addclause(Term t, yamop *cp, Term tmode, Term mod, Term *t4ref)
|
||||
UNLOCKPE(30, p);
|
||||
return false;
|
||||
}
|
||||
Yap_PutValue(AtomAbol, TermNil);
|
||||
pflags = p->PredFlags;
|
||||
/* we are redefining a prolog module predicate */
|
||||
if (pflags & MegaClausePredFlag) {
|
||||
|
14
CXX/yapdb.hh
14
CXX/yapdb.hh
@ -46,7 +46,7 @@ class YAPModule;
|
||||
class YAPModule : protected YAPAtomTerm {
|
||||
friend class YAPPredicate;
|
||||
friend class YAPModuleProp;
|
||||
YAPModule(Term t) : YAPAtomTerm(t){};
|
||||
YAPModule(YAP_Term t) : YAPAtomTerm(t){};
|
||||
Term t() { return gt(); }
|
||||
Term curModule() { CACHE_REGS return Yap_CurrentModule(); }
|
||||
|
||||
@ -138,6 +138,11 @@ protected:
|
||||
|
||||
PredEntry *asPred() { return ap; };
|
||||
|
||||
/// Empty constructor for predicates
|
||||
///
|
||||
/// Just do nothing.
|
||||
inline YAPPredicate() {
|
||||
}
|
||||
/// String constructor for predicates
|
||||
///
|
||||
/// It also communicates the array of arguments t[]
|
||||
@ -181,6 +186,13 @@ protected:
|
||||
///
|
||||
inline YAPPredicate(PredEntry *pe) { ap = pe; }
|
||||
|
||||
/// Functor constructor for predicates, is given a specific module.
|
||||
/// This version avoids manufacturing objects
|
||||
inline YAPPredicate(Functor f, Term mod) {
|
||||
ap = RepPredProp(PredPropByFunc(f, mod));
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
|
||||
/// Functor constructor for predicates
|
||||
|
478
CXX/yapi.cpp
478
CXX/yapi.cpp
@ -435,16 +435,7 @@ void YAPQuery::openQuery(Term t)
|
||||
XREGS[i + 1] = ts[i];
|
||||
}
|
||||
}
|
||||
// oq = LOCAL_execution;
|
||||
// LOCAL_execution = this;
|
||||
q_open = true;
|
||||
q_state = 0;
|
||||
q_flags = true; // PL_Q_PASS_EXCEPTION;
|
||||
|
||||
q_p = P;
|
||||
q_cp = CP;
|
||||
// make sure this is safe
|
||||
q_handles = LOCAL_CurSlot;
|
||||
setNext();
|
||||
}
|
||||
|
||||
bool YAPEngine::call(YAPPredicate ap, YAPTerm ts[])
|
||||
@ -833,6 +824,7 @@ void Yap_displayWithJava(int c)
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
void YAPEngine::doInit(YAP_file_type_t BootMode)
|
||||
{
|
||||
if ((BootMode = YAP_Init(&engine_args->init_args)) == YAP_FOUND_BOOT_ERROR)
|
||||
@ -852,228 +844,264 @@ void YAPEngine::doInit(YAP_file_type_t BootMode)
|
||||
|
||||
do_init_python();
|
||||
#endif
|
||||
|
||||
YAPQuery initq = YAPQuery(YAPAtom("$init_system"));
|
||||
if (initq.next())
|
||||
{
|
||||
initq.cut();
|
||||
}
|
||||
else
|
||||
{
|
||||
// should throw exception
|
||||
}
|
||||
YAP_Functor f = YAP_MkFunctor(YAP_LookupAtom("$init_system"), 3);
|
||||
YAP_PredEntryPtr p = YAP_FunctorToPred( f );
|
||||
YAPQuery initq = YAPQuery(YAPPredicate(p), nullptr);
|
||||
if (initq.next())
|
||||
{
|
||||
initq.cut();
|
||||
}
|
||||
else
|
||||
{
|
||||
// should throw exception
|
||||
}
|
||||
}
|
||||
|
||||
YAPEngine::YAPEngine(int argc, char *argv[],
|
||||
YAPCallback *cb)
|
||||
: _callback(0) { // a single engine can be active
|
||||
YAPCallback *cb)
|
||||
: _callback(0) { // a single engine can be active
|
||||
|
||||
YAP_file_type_t BootMode;
|
||||
engine_args = new YAPEngineArgs();
|
||||
BootMode = YAP_parse_yap_arguments(argc, argv, &engine_args->init_args);
|
||||
// delYAPCallback()b
|
||||
// if (cb)
|
||||
// setYAPCallback(cb);
|
||||
doInit(BootMode);
|
||||
}
|
||||
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at)
|
||||
{
|
||||
CACHE_REGS
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at, uintptr_t arity)
|
||||
{
|
||||
CACHE_REGS
|
||||
if (arity)
|
||||
{
|
||||
Functor f = Yap_MkFunctor(at.a, arity);
|
||||
ap = RepPredProp(PredPropByFunc(f, Yap_CurrentModule()));
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
}
|
||||
|
||||
/// auxiliary routine to find a predicate in the current module.
|
||||
PredEntry *YAPPredicate::getPred(YAPTerm &tt, Term *&outp)
|
||||
{
|
||||
CACHE_REGS
|
||||
Term m = Yap_CurrentModule(), t = tt.term();
|
||||
t = Yap_StripModule(t, &m);
|
||||
if (IsVarTerm(t) || IsNumTerm(t))
|
||||
{
|
||||
if (IsVarTerm(t))
|
||||
Yap_ThrowError(INSTANTIATION_ERROR, tt.term(), 0);
|
||||
else if (IsNumTerm(t))
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, tt.term(), 0);
|
||||
throw YAPError();
|
||||
}
|
||||
tt.put(t);
|
||||
if (IsAtomTerm(t))
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(AtomOfTerm(t), m));
|
||||
outp = (Term *)NULL;
|
||||
return ap;
|
||||
}
|
||||
else if (IsPairTerm(t))
|
||||
{
|
||||
Term ts[2];
|
||||
ts[0] = t;
|
||||
ts[1] = m;
|
||||
t = Yap_MkApplTerm(FunctorCsult, 2, ts);
|
||||
tt.put(t);
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
Functor f = FunctorOfTerm(t);
|
||||
if (IsExtensionFunctor(f))
|
||||
{
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, t, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByFunc(f, m));
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
return ap;
|
||||
}
|
||||
|
||||
X_API bool YAPPrologPredicate::assertClause(YAPTerm cl, bool last,
|
||||
YAPTerm source)
|
||||
{
|
||||
CACHE_REGS
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = cl.gt();
|
||||
Term sourcet;
|
||||
Term ntt = cl.gt();
|
||||
if (source.initialized())
|
||||
sourcet = source.gt();
|
||||
else
|
||||
sourcet = TermZERO;
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
sourcet); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &ntt;
|
||||
if (Yap_addclause(ntt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
bool YAPPrologPredicate::assertFact(YAPTerm *cl, bool last)
|
||||
{
|
||||
CACHE_REGS
|
||||
arity_t i;
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = AbsAppl(HR);
|
||||
*HR++ = (CELL)(ap->FunctorOfPred);
|
||||
for (i = 0; i < ap->ArityOfPE; i++, cl++)
|
||||
*HR++ = cl->gt();
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
tt); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &tt;
|
||||
if (Yap_addclause(tt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
void *YAPPrologPredicate::retractClause(YAPTerm skeleton, bool all)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string YAPError::text()
|
||||
{
|
||||
char buf[256];
|
||||
std::string s = "";
|
||||
if (LOCAL_ActiveError->errorFunction)
|
||||
{
|
||||
s += LOCAL_ActiveError->errorFile;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->errorLine);
|
||||
s += buf;
|
||||
s += ":0 in C-code";
|
||||
}
|
||||
if (LOCAL_ActiveError->prologPredLine)
|
||||
{
|
||||
s += "\n";
|
||||
s += LOCAL_ActiveError->prologPredFile->StrOfAE;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredLine);
|
||||
s += buf; // std::to_string(LOCAL_ActiveError->prologPredLine) ;
|
||||
// YAPIntegerTerm(LOCAL_ActiveError->prologPredLine).text();
|
||||
s += ":0 ";
|
||||
s += LOCAL_ActiveError->prologPredModule;
|
||||
s += ":";
|
||||
s += (LOCAL_ActiveError->prologPredName)->StrOfAE;
|
||||
s += "/";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredArity);
|
||||
s += // std::to_string(LOCAL_ActiveError->prologPredArity);
|
||||
buf;
|
||||
}
|
||||
s += " error ";
|
||||
if (LOCAL_ActiveError->classAsText != nullptr)
|
||||
s += LOCAL_ActiveError->classAsText->StrOfAE;
|
||||
s += ".";
|
||||
s += LOCAL_ActiveError->errorAsText->StrOfAE;
|
||||
s += ".\n";
|
||||
if (LOCAL_ActiveError->errorTerm)
|
||||
{
|
||||
Term t = LOCAL_ActiveError->errorTerm->Entry;
|
||||
if (t)
|
||||
{
|
||||
s += "error term is: ";
|
||||
s += YAPTerm(t).text();
|
||||
s += "\n";
|
||||
YAP_file_type_t BootMode;
|
||||
engine_args = new YAPEngineArgs();
|
||||
BootMode = YAP_parse_yap_arguments(argc, argv, &engine_args->init_args);
|
||||
// delYAPCallback()b
|
||||
// if (cb)
|
||||
// setYAPCallback(cb);
|
||||
doInit(BootMode);
|
||||
}
|
||||
}
|
||||
printf("%s\n", s.c_str());
|
||||
return s.c_str();
|
||||
}
|
||||
|
||||
void YAPEngine::reSet()
|
||||
{
|
||||
/* ignore flags for now */
|
||||
BACKUP_MACHINE_REGS();
|
||||
Yap_RebootHandles(worker_id);
|
||||
while (B->cp_b)
|
||||
B = B->cp_b;
|
||||
P = FAILCODE;
|
||||
Yap_exec_absmi(true, YAP_EXEC_ABSMI);
|
||||
/* recover stack space */
|
||||
HR = B->cp_h;
|
||||
TR = B->cp_tr;
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at)
|
||||
{
|
||||
CACHE_REGS
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at, uintptr_t arity)
|
||||
{
|
||||
CACHE_REGS
|
||||
if (arity)
|
||||
{
|
||||
Functor f = Yap_MkFunctor(at.a, arity);
|
||||
ap = RepPredProp(PredPropByFunc(f, Yap_CurrentModule()));
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
}
|
||||
|
||||
/// auxiliary routine to find a predicate in the current module.
|
||||
PredEntry *YAPPredicate::getPred(YAPTerm &tt, Term *&outp)
|
||||
{
|
||||
CACHE_REGS
|
||||
Term m = Yap_CurrentModule(), t = tt.term();
|
||||
t = Yap_StripModule(t, &m);
|
||||
if (IsVarTerm(t) || IsNumTerm(t))
|
||||
{
|
||||
if (IsVarTerm(t))
|
||||
Yap_ThrowError(INSTANTIATION_ERROR, tt.term(), 0);
|
||||
else if (IsNumTerm(t))
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, tt.term(), 0);
|
||||
throw YAPError();
|
||||
}
|
||||
tt.put(t);
|
||||
if (IsAtomTerm(t))
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(AtomOfTerm(t), m));
|
||||
outp = (Term *)NULL;
|
||||
return ap;
|
||||
}
|
||||
else if (IsPairTerm(t))
|
||||
{
|
||||
Term ts[2];
|
||||
ts[0] = t;
|
||||
ts[1] = m;
|
||||
t = Yap_MkApplTerm(FunctorCsult, 2, ts);
|
||||
tt.put(t);
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
Functor f = FunctorOfTerm(t);
|
||||
if (IsExtensionFunctor(f))
|
||||
{
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, t, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByFunc(f, m));
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
return ap;
|
||||
}
|
||||
|
||||
X_API bool YAPPrologPredicate::assertClause(YAPTerm cl, bool last,
|
||||
YAPTerm source)
|
||||
{
|
||||
CACHE_REGS
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = cl.gt();
|
||||
Term sourcet;
|
||||
Term ntt = cl.gt();
|
||||
if (source.initialized())
|
||||
sourcet = source.gt();
|
||||
else
|
||||
sourcet = TermZERO;
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
sourcet); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &ntt;
|
||||
if (Yap_addclause(ntt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
bool YAPPrologPredicate::assertFact(YAPTerm *cl, bool last)
|
||||
{
|
||||
CACHE_REGS
|
||||
arity_t i;
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = AbsAppl(HR);
|
||||
*HR++ = (CELL)(ap->FunctorOfPred);
|
||||
for (i = 0; i < ap->ArityOfPE; i++, cl++)
|
||||
*HR++ = cl->gt();
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
tt); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &tt;
|
||||
if (Yap_addclause(tt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
void *YAPPrologPredicate::retractClause(YAPTerm skeleton, bool all)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string YAPError::text()
|
||||
{
|
||||
char buf[256];
|
||||
std::string s = "";
|
||||
if (LOCAL_ActiveError->errorFunction)
|
||||
{
|
||||
s += LOCAL_ActiveError->errorFile;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->errorLine);
|
||||
s += buf;
|
||||
s += ":0 in C-code";
|
||||
}
|
||||
if (LOCAL_ActiveError->prologPredLine)
|
||||
{
|
||||
s += "\n";
|
||||
s += LOCAL_ActiveError->prologPredFile->StrOfAE;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredLine);
|
||||
s += buf; // std::to_string(LOCAL_ActiveError->prologPredLine) ;
|
||||
// YAPIntegerTerm(LOCAL_ActiveError->prologPredLine).text();
|
||||
s += ":0 ";
|
||||
s += LOCAL_ActiveError->prologPredModule;
|
||||
s += ":";
|
||||
s += (LOCAL_ActiveError->prologPredName)->StrOfAE;
|
||||
s += "/";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredArity);
|
||||
s += // std::to_string(LOCAL_ActiveError->prologPredArity);
|
||||
buf;
|
||||
}
|
||||
s += " error ";
|
||||
if (LOCAL_ActiveError->classAsText != nullptr)
|
||||
s += LOCAL_ActiveError->classAsText->StrOfAE;
|
||||
s += ".";
|
||||
s += LOCAL_ActiveError->errorAsText->StrOfAE;
|
||||
s += ".\n";
|
||||
if (LOCAL_ActiveError->errorTerm)
|
||||
{
|
||||
Term t = LOCAL_ActiveError->errorTerm->Entry;
|
||||
if (t)
|
||||
{
|
||||
s += "error term is: ";
|
||||
s += YAPTerm(t).text();
|
||||
s += "\n";
|
||||
}
|
||||
}
|
||||
printf("%s\n", s.c_str());
|
||||
return s.c_str();
|
||||
}
|
||||
|
||||
void YAPEngine::reSet()
|
||||
{
|
||||
/* ignore flags for now */
|
||||
BACKUP_MACHINE_REGS();
|
||||
Yap_RebootHandles(worker_id);
|
||||
while (B->cp_b)
|
||||
B = B->cp_b;
|
||||
P = FAILCODE;
|
||||
Yap_exec_absmi(true, YAP_EXEC_ABSMI);
|
||||
/* recover stack space */
|
||||
HR = B->cp_h;
|
||||
TR = B->cp_tr;
|
||||
#ifdef DEPTH_LIMIT
|
||||
DEPTH = B->cp_depth;
|
||||
DEPTH = B->cp_depth;
|
||||
#endif /* DEPTH_LIMIT */
|
||||
YENV = ENV = B->cp_env;
|
||||
YENV = ENV = B->cp_env;
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
|
||||
YAPError::YAPError(yap_error_number id, YAPTerm culprit, std::string txt)
|
||||
{
|
||||
ID = id;
|
||||
goal = culprit.text();
|
||||
info = txt;
|
||||
}
|
||||
YAPError::YAPError(yap_error_number id, YAPTerm culprit, std::string txt)
|
||||
{
|
||||
ID = id;
|
||||
goal = culprit.text();
|
||||
info = txt;
|
||||
}
|
||||
|
||||
Term YAPEngine::top_level( std::string s)
|
||||
|
||||
{
|
||||
|
||||
/// parse string s and make term with var names
|
||||
/// available.
|
||||
Term tp;
|
||||
ARG1 = YAP_ReadBuffer(s.data(), &tp);
|
||||
ARG2 = tp;
|
||||
ARG3 = MkVarTerm();
|
||||
YAPPredicate p = YAPPredicate(YAP_TopGoal());
|
||||
YAPQuery *Q = new YAPQuery(p,0);
|
||||
if (Q->next()) {
|
||||
Term ts[2];
|
||||
ts[0]= MkAddressTerm(Q);
|
||||
ts[1]= ARG3;
|
||||
return YAP_MkApplTerm(YAP_MkFunctor(YAP_LookupAtom("t"), 2), 2, ts);
|
||||
}
|
||||
YAPError();
|
||||
return 0;
|
||||
}
|
||||
|
||||
Term YAPEngine::next_answer(YAPQuery * &Q) {
|
||||
|
||||
/// parse string s and make term with var names
|
||||
/// available.
|
||||
if (Q->next()) {
|
||||
Term ts[2];
|
||||
ts[0]= MkAddressTerm(Q);
|
||||
ts[1]= ARG3;
|
||||
return YAP_MkApplTerm(YAP_MkFunctor(YAP_LookupAtom("t"), 2), 2, ts);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
130
CXX/yapq.hh
130
CXX/yapq.hh
@ -2,7 +2,7 @@
|
||||
/**
|
||||
* @file yapq.hh
|
||||
*
|
||||
* @defgroup yap-cplus-query-hanadlinge Query Handling in the YAP interface.
|
||||
* @defgroup yap-cplus-query-handling Query Handling in the YAP interface.
|
||||
* @brief Engine and Query Management
|
||||
*
|
||||
* @ingroup yap-cplus-interface
|
||||
@ -46,9 +46,24 @@ class YAPQuery : public YAPPredicate
|
||||
// temporaries
|
||||
Term tnames, tgoal ;
|
||||
|
||||
inline void setNext() { // oq = LOCAL_execution;
|
||||
// LOCAL_execution = this;
|
||||
q_open = true;
|
||||
q_state = 0;
|
||||
q_flags = true; // PL_Q_PASS_EXCEPTION;
|
||||
|
||||
q_p = P;
|
||||
q_cp = CP;
|
||||
// make sure this is safe
|
||||
q_handles = LOCAL_CurSlot;
|
||||
}
|
||||
|
||||
void openQuery(Term t);
|
||||
|
||||
|
||||
public:
|
||||
YAPQuery() {
|
||||
};
|
||||
/// main constructor, uses a predicate and an array of terms
|
||||
///
|
||||
/// It is given a YAPPredicate _p_ , and an array of terms that must have at
|
||||
@ -74,63 +89,68 @@ public:
|
||||
/// goal.
|
||||
inline YAPQuery(const char *s) : YAPPredicate(s, tgoal, tnames)
|
||||
{
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %ld",
|
||||
LOCAL_CurSlot);
|
||||
if (!ap)
|
||||
return;
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
goal = YAPTerm(tgoal);
|
||||
names = YAPPairTerm(tnames);
|
||||
openQuery(tgoal);
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %ld",
|
||||
LOCAL_CurSlot);
|
||||
if (!ap)
|
||||
return;
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
goal = YAPTerm(tgoal);
|
||||
names = YAPPairTerm(tnames);
|
||||
openQuery(tgoal);
|
||||
};
|
||||
// inline YAPQuery() : YAPPredicate(s, tgoal, tnames)
|
||||
// {
|
||||
// __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %ld",
|
||||
// LOCAL_CurSlot);
|
||||
// if (!ap)
|
||||
// return;
|
||||
// __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
// goal = YAPTerm(tgoal);
|
||||
// names = YAPPairTerm(tnames);
|
||||
// openQuery(tgoal);
|
||||
// };
|
||||
/// string constructor with just an atom
|
||||
///
|
||||
/// It is given an atom, and a Prolog term that should be a callable
|
||||
/// goal, say `main`, `init`, `live`.
|
||||
inline YAPQuery(YAPAtom g) : YAPPredicate(g)
|
||||
{
|
||||
goal = YAPAtomTerm(g);
|
||||
names = YAPPairTerm( );
|
||||
openQuery(goal.term());
|
||||
};
|
||||
/// It i;
|
||||
///};
|
||||
|
||||
/// set flags for query execution, currently only for exception handling
|
||||
void setFlag(int flag) { q_flags |= flag; }
|
||||
/// reset flags for query execution, currently only for exception handling
|
||||
void resetFlag(int flag) { q_flags &= ~flag; }
|
||||
/// first query
|
||||
///
|
||||
/// actually implemented by calling the next();
|
||||
inline bool first() { return next(); }
|
||||
/// ask for the next solution of the current query
|
||||
/// same call for every solution
|
||||
bool next();
|
||||
/// does this query have open choice-points?
|
||||
/// or is it deterministic?
|
||||
bool deterministic();
|
||||
/// represent the top-goal
|
||||
const char *text();
|
||||
/// remove alternatives in the current search space, and finish the current
|
||||
/// query
|
||||
/// finish the current query: undo all bindings.
|
||||
void close();
|
||||
/// query variables.
|
||||
void cut();
|
||||
Term namedVars() {return names.term(); };
|
||||
/// query variables, but copied out
|
||||
std::vector<Term> namedVarsVector() {
|
||||
return names.listToArray(); };
|
||||
/// convert a ref to a binding.
|
||||
YAPTerm getTerm(yhandle_t t);
|
||||
/// simple YAP Query;
|
||||
/// just calls YAP and reports success or failure, Useful when we just
|
||||
/// want things done, eg YAPCommand("load_files(library(lists), )")
|
||||
inline bool command()
|
||||
{
|
||||
bool rc = next();
|
||||
close();
|
||||
return rc;
|
||||
};
|
||||
void resetFlag(int flag) { q_flags &= ~flag; }
|
||||
/// first query
|
||||
///
|
||||
/// actually implemented by calling the next();
|
||||
inline bool first() { return next(); }
|
||||
/// ask for the next solution of the current query
|
||||
/// same call for every solution
|
||||
bool next();
|
||||
/// does this query have open choice-points?
|
||||
/// or is it deterministic?
|
||||
bool deterministic();
|
||||
/// represent the top-goal
|
||||
const char *text();
|
||||
/// remove alternatives in the current search space, and finish the current
|
||||
/// query
|
||||
/// finish the current query: undo all bindings.
|
||||
void close();
|
||||
/// query variables.
|
||||
void cut();
|
||||
Term namedVars() {return names.term(); };
|
||||
/// query variables, but copied out
|
||||
std::vector<Term> namedVarsVector() {
|
||||
return names.listToArray(); };
|
||||
/// convert a ref to a binding.
|
||||
YAPTerm getTerm(yhandle_t t);
|
||||
/// simple YAP Query;
|
||||
/// just calls YAP and reports success or failure, Useful when we just
|
||||
/// want things done, eg YAPCommand("load_files(library(lists), )")
|
||||
inline bool command()
|
||||
{
|
||||
bool rc = next();
|
||||
close();
|
||||
return rc;
|
||||
};
|
||||
};
|
||||
|
||||
// Java support
|
||||
@ -408,8 +428,12 @@ public:
|
||||
{
|
||||
return setYapFlag(MkAtomTerm(Yap_LookupAtom(arg.data())), MkAtomTerm(Yap_LookupAtom(path.data())));
|
||||
};
|
||||
};
|
||||
|
||||
Term top_level( std::string s);
|
||||
Term next_answer(YAPQuery * &Q);
|
||||
|
||||
};
|
||||
|
||||
#endif /* YAPQ_HH */
|
||||
|
||||
/// @}
|
||||
/// @}
|
||||
|
197
cmake/docs/source/conf.py
Normal file
197
cmake/docs/source/conf.py
Normal file
@ -0,0 +1,197 @@
|
||||
#!/Usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# YAP documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Mar 26 10:27:55 2017.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
from recommonmark.parser import CommonMarkParser
|
||||
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.githubpages',
|
||||
'breathe'
|
||||
]
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/github/yap-6.3/cmake/docs/xml" }
|
||||
breathe_default_project = "yap"
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
source_suffix = ['.rst', '.md']
|
||||
# source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'YAP'
|
||||
copyright = '2017, Vitor Santos Costa'
|
||||
author = 'Vitor Santos Costa'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '6.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '6.3.5'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = []
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
source_parsers = {
|
||||
'.md': 'recommonmark.parser.CommonMarkParser',
|
||||
}
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
|
||||
# -- Options for HTMLHelp output ------------------------------------------
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'YAPdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'YAP.tex', 'YAP Documentation',
|
||||
'Vitor Santos Costa', 'manual'),
|
||||
]
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'yap', 'YAP Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'YAP', 'YAP Documentation',
|
||||
author, 'YAP', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
|
||||
|
||||
# -- Options for Epub output ----------------------------------------------
|
||||
|
||||
# Bibliographic Dublin Core info.
|
||||
epub_title = project
|
||||
epub_author = author
|
||||
epub_publisher = author
|
||||
epub_copyright = copyright
|
||||
|
||||
# The unique identifier of the text. This can be a ISBN number
|
||||
# or the project homepage.
|
||||
#
|
||||
# epub_identifier = ''
|
||||
|
||||
# A unique identification for the text.
|
||||
#
|
||||
# epub_uid = ''
|
||||
|
||||
# A list of files that should not be packed into the epub file.
|
||||
epub_exclude_files = ['search.html']
|
||||
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
45
cmake/docs/source/index.rst
Normal file
45
cmake/docs/source/index.rst
Normal file
@ -0,0 +1,45 @@
|
||||
.. YAP documentation master file, created by
|
||||
sphinx-quickstart on Sun Mar 26 10:27:55 2017.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to YAP's documentation!
|
||||
===============================
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
'../../md/attributes.md'
|
||||
'../../md'/builtins.md'
|
||||
'../../md'/download.md'
|
||||
'../../md'/extensions.md'
|
||||
'../../md'/fli.md'
|
||||
'../../md'/library.md'
|
||||
'../../md'/load_files.md'
|
||||
'../../md'/modules.md'
|
||||
'../../md'/packages.md'
|
||||
'../../md'/run.md'
|
||||
'../../md'/swi.md'
|
||||
'../../md'/syntax.md'
|
||||
'../../md'/yap.md'
|
||||
'classlist.rst'
|
||||
'file.rst'
|
||||
'group.rst'
|
||||
'section.rst'
|
||||
'union.rst'
|
||||
'namespace.rst'
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`search`
|
2
cmake/packages/raptor/raptor_config.h
Normal file
2
cmake/packages/raptor/raptor_config.h
Normal file
@ -0,0 +1,2 @@
|
||||
#define HAVE_RAPTOR2_RAPTOR2_H 1
|
||||
/* #undef HAVE_RAPTOR_H */
|
26
cmake/packages/real/rconfig.h
Normal file
26
cmake/packages/real/rconfig.h
Normal file
@ -0,0 +1,26 @@
|
||||
/*--------------------------------------------------------------------------
|
||||
* This file is autogenerated from rconfig.h.cmake
|
||||
* during the cmake configuration of your project. If you need to make changes
|
||||
* edit the original file NOT THIS FILE.
|
||||
* --------------------------------------------------------------------------*/
|
||||
#ifndef RCONFIG_H
|
||||
#define RCONFIG_H
|
||||
|
||||
/* Define to 1 if you have the <alloca.h> header file. */
|
||||
#ifndef HAVE_R_H
|
||||
#define HAVE_R_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <alloca.h> header file. */
|
||||
#ifndef HAVE_R_EMBEDDED_H
|
||||
#define HAVE_R_EMBEDDED_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <alloca.h> header file. */
|
||||
#ifndef HAVE_R_INTERFACE_H
|
||||
#define HAVE_R_INTERFACE_H 1
|
||||
#endif
|
||||
|
||||
|
||||
#endif
|
||||
|
1012
compile_commands.json
Normal file
1012
compile_commands.json
Normal file
File diff suppressed because it is too large
Load Diff
43
cudd_config.h
Normal file
43
cudd_config.h
Normal file
@ -0,0 +1,43 @@
|
||||
// cmake template file
|
||||
|
||||
/* Define to 1 if you have the <cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_H
|
||||
/* #undef HAVE_CUDD_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_CUDD_H
|
||||
#define HAVE_CUDD_CUDD_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDDINT_H
|
||||
/* #undef HAVE_CUDDINT_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_CUDDINT_H
|
||||
#define HAVE_CUDD_CUDDINT_H 1
|
||||
#endif
|
||||
|
||||
|
||||
/* Define to 1 if you have the <cuddObj.hh> header file. */
|
||||
#ifndef HAVE_CUDDOBJ_HH
|
||||
/* #undef HAVE_CUDDOBJ_HH */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/cuddObj.hh> header file. */
|
||||
#ifndef HAVE_CUDD_CUDDOBJ_HH
|
||||
/* #undef HAVE_CUDD_CUDDOBJ_HH */
|
||||
#endif
|
||||
|
||||
|
||||
/* Define to 1 if you have the <dddmpInt.h> header file. */
|
||||
#ifndef HAVE_DDDMPINT_H
|
||||
/* #undef HAVE_DDDMPINT_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/dddmpInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_CUDDINT_H
|
||||
/* #undef HAVE_CUDD_DDDMPINT_H */
|
||||
#endif
|
255
docs/custom/customdoxygen.css
Normal file
255
docs/custom/customdoxygen.css
Normal file
@ -0,0 +1,255 @@
|
||||
h1, .h1, h2, .h2, h3, .h3{
|
||||
font-weight: 200 !important;
|
||||
}
|
||||
|
||||
#navrow1, #navrow2, #navrow3, #navrow4, #navrow5{
|
||||
border-bottom: 1px solid #EEEEEE;
|
||||
}
|
||||
|
||||
.adjust-right {
|
||||
margin-left: 30px !important;
|
||||
font-size: 1.15em !important;
|
||||
}
|
||||
.navbar{
|
||||
border: 0px solid #222 !important;
|
||||
}
|
||||
|
||||
|
||||
/* Sticky footer styles
|
||||
-------------------------------------------------- */
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
/* The html and body elements cannot have any padding or margin. */
|
||||
}
|
||||
|
||||
/* Wrapper for page content to push down footer */
|
||||
#wrap {
|
||||
min-height: 100%;
|
||||
height: auto;
|
||||
/* Negative indent footer by its height */
|
||||
margin: 0 auto -60px;
|
||||
/* Pad bottom by footer height */
|
||||
padding: 0 0 60px;
|
||||
}
|
||||
|
||||
/* Set the fixed height of the footer here */
|
||||
#footer {
|
||||
font-size: 0.9em;
|
||||
padding: 8px 0px;
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
.footer-row {
|
||||
line-height: 44px;
|
||||
}
|
||||
|
||||
#footer > .container {
|
||||
padding-left: 15px;
|
||||
padding-right: 15px;
|
||||
}
|
||||
|
||||
.footer-follow-icon {
|
||||
margin-left: 3px;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.footer-follow-icon img {
|
||||
width: 20px;
|
||||
}
|
||||
|
||||
.footer-link {
|
||||
padding-top: 5px;
|
||||
display: inline-block;
|
||||
color: #999999;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.footer-copyright {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
|
||||
@media (min-width: 992px) {
|
||||
.footer-row {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.footer-icons {
|
||||
text-align: right;
|
||||
}
|
||||
}
|
||||
@media (max-width: 991px) {
|
||||
.footer-row {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.footer-icons {
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
/* DOXYGEN Code Styles
|
||||
----------------------------------- */
|
||||
|
||||
|
||||
a.qindex {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
a.qindexHL {
|
||||
font-weight: bold;
|
||||
background-color: #9CAFD4;
|
||||
color: #ffffff;
|
||||
border: 1px double #869DCA;
|
||||
}
|
||||
|
||||
.contents a.qindexHL:visited {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
a.code, a.code:visited, a.line, a.line:visited {
|
||||
color: #4665A2;
|
||||
}
|
||||
|
||||
a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited {
|
||||
color: #4665A2;
|
||||
}
|
||||
|
||||
/* @end */
|
||||
|
||||
dl.el {
|
||||
margin-left: -1cm;
|
||||
}
|
||||
|
||||
pre.fragment {
|
||||
border: 1px solid #C4CFE5;
|
||||
background-color: #FBFCFD;
|
||||
padding: 4px 6px;
|
||||
margin: 4px 8px 4px 2px;
|
||||
overflow: auto;
|
||||
word-wrap: break-word;
|
||||
font-size: 9pt;
|
||||
line-height: 125%;
|
||||
font-family: monospace, fixed;
|
||||
font-size: 105%;
|
||||
}
|
||||
|
||||
div.fragment {
|
||||
padding: 4px 6px;
|
||||
margin: 4px 8px 4px 2px;
|
||||
border: 1px solid #C4CFE5;
|
||||
}
|
||||
|
||||
div.line {
|
||||
font-family: monospace, fixed;
|
||||
font-size: 13px;
|
||||
min-height: 13px;
|
||||
line-height: 1.0;
|
||||
text-wrap: unrestricted;
|
||||
white-space: -moz-pre-wrap; /* Moz */
|
||||
white-space: -pre-wrap; /* Opera 4-6 */
|
||||
white-space: -o-pre-wrap; /* Opera 7 */
|
||||
white-space: pre-wrap; /* CSS3 */
|
||||
word-wrap: break-word; /* IE 5.5+ */
|
||||
text-indent: -53px;
|
||||
padding-left: 53px;
|
||||
padding-bottom: 0px;
|
||||
margin: 0px;
|
||||
-webkit-transition-property: background-color, box-shadow;
|
||||
-webkit-transition-duration: 0.5s;
|
||||
-moz-transition-property: background-color, box-shadow;
|
||||
-moz-transition-duration: 0.5s;
|
||||
-ms-transition-property: background-color, box-shadow;
|
||||
-ms-transition-duration: 0.5s;
|
||||
-o-transition-property: background-color, box-shadow;
|
||||
-o-transition-duration: 0.5s;
|
||||
transition-property: background-color, box-shadow;
|
||||
transition-duration: 0.5s;
|
||||
}
|
||||
|
||||
div.line.glow {
|
||||
background-color: cyan;
|
||||
box-shadow: 0 0 10px cyan;
|
||||
}
|
||||
|
||||
|
||||
span.lineno {
|
||||
padding-right: 4px;
|
||||
text-align: right;
|
||||
border-right: 2px solid #0F0;
|
||||
background-color: #E8E8E8;
|
||||
white-space: pre;
|
||||
}
|
||||
span.lineno a {
|
||||
background-color: #D8D8D8;
|
||||
}
|
||||
|
||||
span.lineno a:hover {
|
||||
background-color: #C8C8C8;
|
||||
}
|
||||
|
||||
div.groupHeader {
|
||||
margin-left: 16px;
|
||||
margin-top: 12px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
div.groupText {
|
||||
margin-left: 16px;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* @group Code Colorization */
|
||||
|
||||
span.keyword {
|
||||
color: #008000
|
||||
}
|
||||
|
||||
span.keywordtype {
|
||||
color: #604020
|
||||
}
|
||||
|
||||
span.keywordflow {
|
||||
color: #e08000
|
||||
}
|
||||
|
||||
span.comment {
|
||||
color: #800000
|
||||
}
|
||||
|
||||
span.preprocessor {
|
||||
color: #806020
|
||||
}
|
||||
|
||||
span.stringliteral {
|
||||
color: #002080
|
||||
}
|
||||
|
||||
span.charliteral {
|
||||
color: #008080
|
||||
}
|
||||
|
||||
span.vhdldigit {
|
||||
color: #ff00ff
|
||||
}
|
||||
|
||||
span.vhdlchar {
|
||||
color: #000000
|
||||
}
|
||||
|
||||
span.vhdlkeyword {
|
||||
color: #700070
|
||||
}
|
||||
|
||||
span.vhdllogic {
|
||||
color: #ff0000
|
||||
}
|
||||
|
||||
blockquote {
|
||||
background-color: #F7F8FB;
|
||||
border-left: 2px solid #9CAFD4;
|
||||
margin: 0 24px 0 4px;
|
||||
padding: 0 12px 0 16px;
|
||||
}
|
||||
|
1596
docs/custom/default/customdoxygen.css
Normal file
1596
docs/custom/default/customdoxygen.css
Normal file
File diff suppressed because it is too large
Load Diff
121
docs/custom/doxy-boot.js
Normal file
121
docs/custom/doxy-boot.js
Normal file
@ -0,0 +1,121 @@
|
||||
$( document ).ready(function() {
|
||||
|
||||
$("div.headertitle").addClass("page-header");
|
||||
$("div.title").addClass("h1");
|
||||
|
||||
$('li > a[href="index.html"] > span').before("<i class='fa fa-cog'></i> ");
|
||||
$('li > a[href="index.html"] > span').text("BioGears");
|
||||
$('li > a[href="modules.html"] > span').before("<i class='fa fa-square'></i> ");
|
||||
$('li > a[href="namespaces.html"] > span').before("<i class='fa fa-bars'></i> ");
|
||||
$('li > a[href="annotated.html"] > span').before("<i class='fa fa-list-ul'></i> ");
|
||||
$('li > a[href="classes.html"] > span').before("<i class='fa fa-book'></i> ");
|
||||
$('li > a[href="inherits.html"] > span').before("<i class='fa fa-sitemap'></i> ");
|
||||
$('li > a[href="functions.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_func.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_vars.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_enum.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_eval.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('img[src="ftv2ns.png"]').replaceWith('<span class="label label-danger">N</span> ');
|
||||
$('img[src="ftv2cl.png"]').replaceWith('<span class="label label-danger">C</span> ');
|
||||
|
||||
$("ul.tablist").addClass("nav nav-pills nav-justified");
|
||||
$("ul.tablist").css("margin-top", "0.5em");
|
||||
$("ul.tablist").css("margin-bottom", "0.5em");
|
||||
$("li.current").addClass("active");
|
||||
$("iframe").attr("scrolling", "yes");
|
||||
|
||||
$("#nav-path > ul").addClass("breadcrumb");
|
||||
|
||||
$("table.params").addClass("table");
|
||||
$("div.ingroups").wrapInner("<small></small>");
|
||||
$("div.levels").css("margin", "0.5em");
|
||||
$("div.levels > span").addClass("btn btn-default btn-xs");
|
||||
$("div.levels > span").css("margin-right", "0.25em");
|
||||
|
||||
$("table.directory").addClass("table table-striped");
|
||||
$("div.summary > a").addClass("btn btn-default btn-xs");
|
||||
$("table.fieldtable").addClass("table");
|
||||
$(".fragment").addClass("well");
|
||||
$(".memitem").addClass("panel panel-default");
|
||||
$(".memproto").addClass("panel-heading");
|
||||
$(".memdoc").addClass("panel-body");
|
||||
$("span.mlabel").addClass("label label-info");
|
||||
|
||||
$("table.memberdecls").addClass("table");
|
||||
$("[class^=memitem]").addClass("active");
|
||||
|
||||
$("div.ah").addClass("btn btn-default");
|
||||
$("span.mlabels").addClass("pull-right");
|
||||
$("table.mlabels").css("width", "100%")
|
||||
$("td.mlabels-right").addClass("pull-right");
|
||||
|
||||
$("div.ttc").addClass("panel panel-primary");
|
||||
$("div.ttname").addClass("panel-heading");
|
||||
$("div.ttname a").css("color", 'white');
|
||||
$("div.ttdef,div.ttdoc,div.ttdeci").addClass("panel-body");
|
||||
|
||||
$('#MSearchBox').parent().remove();
|
||||
|
||||
$('div.fragment.well div.line:first').css('margin-top', '15px');
|
||||
$('div.fragment.well div.line:last').css('margin-bottom', '15px');
|
||||
|
||||
$('table.doxtable').removeClass('doxtable').addClass('table table-striped table-bordered').each(function(){
|
||||
$(this).prepend('<thead></thead>');
|
||||
$(this).find('tbody > tr:first').prependTo($(this).find('thead'));
|
||||
|
||||
$(this).find('td > span.success').parent().addClass('success');
|
||||
$(this).find('td > span.warning').parent().addClass('warning');
|
||||
$(this).find('td > span.danger').parent().addClass('danger');
|
||||
});
|
||||
|
||||
|
||||
|
||||
if($('div.fragment.well div.ttc').length > 0)
|
||||
{
|
||||
$('div.fragment.well div.line:first').parent().removeClass('fragment well');
|
||||
}
|
||||
|
||||
$('table.memberdecls').find('.memItemRight').each(function(){
|
||||
$(this).contents().appendTo($(this).siblings('.memItemLeft'));
|
||||
$(this).siblings('.memItemLeft').attr('align', 'left');
|
||||
});
|
||||
|
||||
function getOriginalWidthOfImg(img_element) {
|
||||
var t = new Image();
|
||||
t.src = (img_element.getAttribute ? img_element.getAttribute("src") : false) || img_element.src;
|
||||
return t.width;
|
||||
}
|
||||
|
||||
$('div.dyncontent').find('img').each(function(){
|
||||
if(getOriginalWidthOfImg($(this)[0]) > $('#content>div.container').width())
|
||||
$(this).css('width', '100%');
|
||||
});
|
||||
|
||||
$(".memitem").removeClass('memitem');
|
||||
$(".memproto").removeClass('memproto');
|
||||
$(".memdoc").removeClass('memdoc');
|
||||
$("span.mlabel").removeClass('mlabel');
|
||||
$("table.memberdecls").removeClass('memberdecls');
|
||||
$("[class^=memitem]").removeClass('memitem');
|
||||
$("span.mlabels").removeClass('mlabels');
|
||||
$("table.mlabels").removeClass('mlabels');
|
||||
$("td.mlabels-right").removeClass('mlabels-right');
|
||||
$(".navpath").removeClass('navpath');
|
||||
$("li.navelem").removeClass('navelem');
|
||||
$("a.el").removeClass('el');
|
||||
$("div.ah").removeClass('ah');
|
||||
$("div.header").removeClass("header");
|
||||
|
||||
$('.mdescLeft').each(function(){
|
||||
if($(this).html()==" ") {
|
||||
$(this).siblings('.mdescRight').attr('colspan', 2);
|
||||
$(this).remove();
|
||||
}
|
||||
});
|
||||
$('td.memItemLeft').each(function(){
|
||||
if($(this).siblings('.memItemRight').html()=="") {
|
||||
$(this).attr('colspan', 2);
|
||||
$(this).siblings('.memItemRight').remove();
|
||||
}
|
||||
});
|
||||
});
|
194
docs/md/#run.md#
Normal file
194
docs/md/#run.md#
Normal file
@ -0,0 +1,194 @@
|
||||
|
||||
|
||||
@page run Running YAP
|
||||
|
||||
We next describe how to invoke YAP from the command-line, either interactively or as a script:
|
||||
|
||||
* @subpage Running_YAP_Interactively
|
||||
|
||||
* @subpage
|
||||
|
||||
@page Running_YAP_Interactively Running YAP Interactively
|
||||
|
||||
Most often you will want to use YAP in interactive mode. Assuming that
|
||||
YAP is in the user's search path, the top-level can be invoked under
|
||||
Unix with the following command:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
yap [-s n] [-h n] [-a n] [-c IP_HOST port ] [filename]
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
All the arguments and flags are optional and have the following meaning:
|
||||
|
||||
+ -?
|
||||
print a short error message.
|
||||
+ -s _Size_
|
||||
allocate _Size_ KBytes for local and global stacks. The user may
|
||||
specify <tt>M</tt> bytes.
|
||||
+ -h _Size_
|
||||
allocate _Size_ KBytes for heap and auxiliary stacks
|
||||
+ -t _Size_
|
||||
allocate _Size_ KBytes for the trail stack
|
||||
+ -L _Size_
|
||||
SWI-compatible option to allocate _Size_ K bytes for local and global stacks, the local stack
|
||||
cannot be expanded. To avoid confusion with the load option, _Size_
|
||||
must immediately follow the letter `L`.
|
||||
+ -G _Size_
|
||||
SWI-compatible option to allocate _Size_ K bytes for local and global stacks; the global
|
||||
stack cannot be expanded
|
||||
+ -T _Size_
|
||||
SWI-compatible option to allocate _Size_ K bytes for the trail stack; the trail cannot be expanded.
|
||||
+ -l _YAP_FILE_
|
||||
compile the Prolog file _YAP_FILE_ before entering the top-level.
|
||||
+ -L _YAP_FILE_
|
||||
compile the Prolog file _YAP_FILE_ and then halt. This option is
|
||||
useful for implementing scripts.
|
||||
+ -g _Goal_
|
||||
run the goal _Goal_ before top-level. The goal is converted from
|
||||
an atom to a Prolog term.
|
||||
+ -z _Goal_
|
||||
run the goal _Goal_ as top-level. The goal is converted from
|
||||
an atom to a Prolog term.
|
||||
+ -b _BOOT_FILE_
|
||||
boot code is in Prolog file _BOOT_FILE_. The filename must define
|
||||
the predicate `'$live'/0`.
|
||||
3333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333connect standard streams to host <tt>IP_HOST</tt> at port <tt>port</tt>
|
||||
+ filename
|
||||
restore state saved in the given file
|
||||
+ -f
|
||||
do not consult initial files
|
||||
+ -q
|
||||
do not print informational messages
|
||||
+ --
|
||||
separator for arguments to Prolog code. These arguments are visible
|
||||
through the unix/1 built-in predicate.
|
||||
|
||||
|
||||
Note that YAP will output an error message on the following conditions:
|
||||
|
||||
+
|
||||
a file name was given but the file does not exist or is not a saved
|
||||
YAP state;
|
||||
|
||||
+
|
||||
the necessary amount of memory could not be allocated;
|
||||
|
||||
+
|
||||
the allocated memory is not enough to restore the state.
|
||||
|
||||
|
||||
When restoring a saved state, YAP will allocate the
|
||||
same amount of memory as that in use when the state was saved, unless a
|
||||
different amount is specified by flags in the command line. By default,
|
||||
YAP restores the file startup.yss from the current directory or from
|
||||
the YAP library.
|
||||
|
||||
+
|
||||
YAP usually boots from a saved state. The saved state will use the default
|
||||
installation directory to search for the YAP binary unless you define
|
||||
the environment variable YAPBINDIR.
|
||||
|
||||
+
|
||||
YAP always tries to find saved states from the current directory
|
||||
first. If it cannot it will use the environment variable YAPLIBDIR, if
|
||||
defined, or search the default library directory.
|
||||
|
||||
|
||||
YAP will try to find library files from the YAPSHAREDIR/library
|
||||
directory.
|
||||
|
||||
@subpage Running_Prolog_Files Running Prolog Files
|
||||
|
||||
YAP can also be used to run Prolog files as scripts, at least in
|
||||
Unix-like environments. A simple example is shown next (do not forget
|
||||
that the shell comments are very important):
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/local/bin/yap -L --
|
||||
#
|
||||
# Hello World script file using YAP
|
||||
#
|
||||
# put a dot because of syntax errors .
|
||||
|
||||
vvvvvvvvvvvvvvvvvvvvvvvvvvv :- write('Hello World'), nl.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The `#!` characters specify that the script should call the binary
|
||||
file YAP. Notice that many systems will require the complete path to the
|
||||
YAP binary. The `-L` flag indicates that YAP should consult the
|
||||
current file when booting and then halt. The remaining arguments are
|
||||
then passed to YAP. Note that YAP will skip the first lines if they
|
||||
start with `#` (the comment sign for Unix's shell). YAP will
|
||||
consult the file and execute any commands.
|
||||
|
||||
A slightly more sophisticated example is:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/bin/yap -L --
|
||||
#
|
||||
# Hello Wor ld script file using YAP
|
||||
# .
|
||||
|
||||
:- initialization(main).
|
||||
|
||||
main :- write('Hello World'), nl.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The `initialization` directive tells YAP to execute the goal main
|
||||
after consulting the file. Source code is thus compiled and `main`
|
||||
executed at the end. The `.` is useful while debugging the script
|
||||
as a Prolog program: it guarantees that the syntax error will not
|
||||
propagate to the Prolog code.
|
||||
|
||||
Notice that the `--` is required so that the shell passes the extra
|
||||
arguments to YAP. As an example, consider the following script
|
||||
`dump_args`:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/bin/yap -L --
|
||||
#.
|
||||
|
||||
main( [] ).
|
||||
main( [H|T] ) :-
|
||||
write( H ), nl,
|
||||
main( T ).
|
||||
|
||||
:- unix( argv(AllArgs) ), main( AllArgs ).
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you this run this script with the arguments:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
./dump_args -s 10000
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
the script will start an YAP process with stack size `10MB`, and
|
||||
the list of arguments to the process will be empty.
|
||||
|
||||
Often one wants to run the script as any other program, and for this it
|
||||
is convenient to ignore arguments to YAP. This is possible by using
|
||||
`L --` as in the next version of `dump_args`:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/bin/yap -L --
|
||||
|
||||
main( [] ).
|
||||
main( [H|T] ) :-
|
||||
write( H ), nl,
|
||||
main( T ).
|
||||
|
||||
:- unix( argv(AllArgs) ), main( AllArgs ).
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The `--` indicates the next arguments are not for YAP. Instead,
|
||||
they must be sent directly to the argv built-in. Hence, running
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
./dump_args test
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
will write `test` on the standard output.
|
||||
|
390
docs/md/atts.md
Normal file
390
docs/md/atts.md
Normal file
@ -0,0 +1,390 @@
|
||||
|
||||
@ingroup extensions
|
||||
|
||||
YAP supports attributed variables, originally developed at OFAI by
|
||||
Christian Holzbaur. Attributes are a means of declaring that an
|
||||
arbitrary term is a property for a variable. These properties can be
|
||||
updated during forward execution. Moreover, the unification algorithm is
|
||||
aware of attributed variables and will call user defined handlers when
|
||||
trying to unify these variables.
|
||||
|
||||
Attributed variables provide an elegant abstraction over which one can
|
||||
extend Prolog systems. Their main application so far has been in
|
||||
implementing constraint handlers, such as Holzbaur's CLPQR, Fruewirth
|
||||
and Holzbaur's CHR, and CLP(BN).
|
||||
|
||||
Different Prolog systems implement attributed variables in different
|
||||
ways. Originally, YAP used the interface designed by SICStus
|
||||
Prolog. This interface is still
|
||||
available through the <tt>atts</tt> library, and is used by CLPBN.
|
||||
|
||||
From YAP-6.0.3 onwards we recommend using the hProlog, SWI style
|
||||
interface. We believe that this design is easier to understand and
|
||||
work with. Most packages included in YAP that use attributed
|
||||
variables, such as CHR, CLP(FD), and CLP(QR), rely on the SWI-Prolog
|
||||
interface.
|
||||
|
||||
+ @ref SICS_attributes
|
||||
+ @ref sicsatts
|
||||
+ @ref New_Style_Attribute_Declarations
|
||||
+ @ref AttributedVariables_Builtins
|
||||
+ @ref corout
|
||||
|
||||
### SICStus Style attribute declarations. {#SICS_attributes}
|
||||
|
||||
The YAP library `atts` implements attribute variables in the style of
|
||||
SICStus Prolog. Attributed variables work as follows:
|
||||
|
||||
+ Each attribute must be declared beforehand. Attributes are described
|
||||
as a functor with name and arity and are local to a module. Each
|
||||
Prolog module declares its own sets of attributes. Different modules
|
||||
may have attributes with the same name and arity.
|
||||
|
||||
+ The built-in put_atts/2 adds or deletes attributes to a
|
||||
variable. The variable may be unbound or may be an attributed
|
||||
variable. In the latter case, YAP discards previous values for the
|
||||
attributes.
|
||||
|
||||
+ The built-in get_atts/2 can be used to check the values of
|
||||
an attribute associated with a variable.
|
||||
|
||||
+ The unification algorithm calls the user-defined predicate
|
||||
verify_attributes/3 before trying to bind an attributed
|
||||
variable. Unification will resume after this call.
|
||||
|
||||
+ The user-defined predicate
|
||||
<tt>attribute_goal/2</tt> converts from an attribute to a goal.
|
||||
|
||||
+ The user-defined predicate
|
||||
<tt>project_attributes/2</tt> is used from a set of variables into a set of
|
||||
constraints or goals. One application of <tt>project_attributes/2</tt> is in
|
||||
the top-level, where it is used to output the set of
|
||||
floundered constraints at the end of a query.
|
||||
|
||||
|
||||
Attributes are compound terms associated with a variable. Each attribute
|
||||
has a <em>name</em> which is <em>private</em> to the module in which the
|
||||
attribute was defined. Variables may have at most one attribute with a
|
||||
name. Attribute names are defined through the following declaration:
|
||||
|
||||
~~~~~
|
||||
:- attribute AttributeSpec, ..., AttributeSpec.
|
||||
~~~~~
|
||||
|
||||
where each _AttributeSpec_ has the form ( _Name_/ _Arity_).
|
||||
One single such declaration is allowed per module _Module_.
|
||||
|
||||
Although the YAP module system is predicate based, attributes are local
|
||||
to modules. This is implemented by rewriting all calls to the
|
||||
built-ins that manipulate attributes so that attribute names are
|
||||
preprocessed depending on the module. The `user:goal_expansion/3`
|
||||
mechanism is used for this purpose.
|
||||
|
||||
|
||||
The attribute manipulation predicates always work as follows:
|
||||
|
||||
+ The first argument is the unbound variable associated with
|
||||
attributes,
|
||||
+ The second argument is a list of attributes. Each attribute will
|
||||
be a Prolog term or a constant, prefixed with the <tt>+</tt> and <tt>-</tt> unary
|
||||
operators. The prefix <tt>+</tt> may be dropped for convenience.
|
||||
|
||||
The following three procedures are available to the user. Notice that
|
||||
these built-ins are rewritten by the system into internal built-ins, and
|
||||
that the rewriting process <em>depends</em> on the module on which the
|
||||
built-ins have been invoked.
|
||||
|
||||
|
||||
The user-predicate predicate verify_attributes/3 is called when
|
||||
attempting to unify an attributed variable which might have attributes
|
||||
in some _Module_.
|
||||
|
||||
|
||||
Attributes are usually presented as goals. The following routines are
|
||||
used by built-in predicates such as call_residue/2 and by the
|
||||
Prolog top-level to display attributes:
|
||||
|
||||
|
||||
Constraint solvers must be able to project a set of constraints to a set
|
||||
of variables. This is useful when displaying the solution to a goal, but
|
||||
may also be used to manipulate computations. The user-defined
|
||||
project_attributes/2 is responsible for implementing this
|
||||
projection.
|
||||
|
||||
|
||||
The following examples are taken from the SICStus Prolog
|
||||
manual. The sketches the implementation of a simple finite domain
|
||||
`solver`. Note that an industrial strength solver would have to
|
||||
provide a wider range of functionality and that it quite likely would
|
||||
utilize a more efficient representation for the domains proper. The
|
||||
module exports a single predicate `domain( _-Var_, _?Domain_)` which
|
||||
associates _Domain_ (a list of terms) with _Var_. A variable can be
|
||||
queried for its domain by leaving _Domain_ unbound.
|
||||
|
||||
We do not present here a definition for project_attributes/2.
|
||||
Projecting finite domain constraints happens to be difficult.
|
||||
|
||||
~~~~~
|
||||
:- module(domain, [domain/2]).
|
||||
|
||||
:- use_module(library(atts)).
|
||||
:- use_module(library(ordsets), [
|
||||
ord_intersection/3,
|
||||
ord_intersect/2,
|
||||
list_to_ord_set/2
|
||||
]).
|
||||
|
||||
:- attribute dom/1.
|
||||
|
||||
verify_attributes(Var, Other, Goals) :-
|
||||
get_atts(Var, dom(Da)), !, % are we involved?
|
||||
( var(Other) -> % must be attributed then
|
||||
( get_atts(Other, dom(Db)) -> % has a domain?
|
||||
ord_intersection(Da, Db, Dc),
|
||||
Dc = [El|Els], % at least one element
|
||||
( Els = [] -> % exactly one element
|
||||
Goals = [Other=El] % implied binding
|
||||
; Goals = [],
|
||||
put_atts(Other, dom(Dc))% rescue intersection
|
||||
)
|
||||
; Goals = [],
|
||||
put_atts(Other, dom(Da)) % rescue the domain
|
||||
)
|
||||
; Goals = [],
|
||||
ord_intersect([Other], Da) % value in domain?
|
||||
).
|
||||
verify_attributes(_, _, []). % unification triggered
|
||||
% because of attributes
|
||||
% in other modules
|
||||
|
||||
attribute_goal(Var, domain(Var,Dom)) :- % interpretation as goal
|
||||
get_atts(Var, dom(Dom)).
|
||||
|
||||
domain(X, Dom) :-
|
||||
var(Dom), !,
|
||||
get_atts(X, dom(Dom)).
|
||||
domain(X, List) :-
|
||||
list_to_ord_set(List, Set),
|
||||
Set = [El|Els], % at least one element
|
||||
( Els = [] -> % exactly one element
|
||||
X = El % implied binding
|
||||
; put_atts(Fresh, dom(Set)),
|
||||
X = Fresh % may call
|
||||
% verify_attributes/3
|
||||
).
|
||||
~~~~~
|
||||
|
||||
Note that the _implied binding_ `Other=El` was deferred until after
|
||||
the completion of `verify_attribute/3`. Otherwise, there might be a
|
||||
danger of recursively invoking `verify_attribute/3`, which might bind
|
||||
`Var`, which is not allowed inside the scope of `verify_attribute/3`.
|
||||
Deferring unifications into the third argument of `verify_attribute/3`
|
||||
effectively serializes the calls to `verify_attribute/3`.
|
||||
|
||||
Assuming that the code resides in the file domain.yap, we
|
||||
can use it via:
|
||||
|
||||
~~~~~
|
||||
| ?- use_module(domain).
|
||||
~~~~~
|
||||
|
||||
Let's test it:
|
||||
|
||||
~~~~~
|
||||
| ?- domain(X,[5,6,7,1]), domain(Y,[3,4,5,6]), domain(Z,[1,6,7,8]).
|
||||
|
||||
domain(X,[1,5,6,7]),
|
||||
domain(Y,[3,4,5,6]),
|
||||
domain(Z,[1,6,7,8]) ?
|
||||
|
||||
yes
|
||||
| ?- domain(X,[5,6,7,1]), domain(Y,[3,4,5,6]), domain(Z,[1,6,7,8]),
|
||||
X=Y.
|
||||
|
||||
Y = X,
|
||||
domain(X,[5,6]),
|
||||
domain(Z,[1,6,7,8]) ?
|
||||
|
||||
yes
|
||||
| ?- domain(X,[5,6,7,1]), domain(Y,[3,4,5,6]), domain(Z,[1,6,7,8]),
|
||||
X=Y, Y=Z.
|
||||
|
||||
X = 6,
|
||||
Y = 6,
|
||||
Z = 6
|
||||
~~~~~
|
||||
|
||||
To demonstrate the use of the _Goals_ argument of
|
||||
verify_attributes/3, we give an implementation of
|
||||
freeze/2. We have to name it `myfreeze/2` in order to
|
||||
avoid a name clash with the built-in predicate of the same name.
|
||||
|
||||
~~~~~
|
||||
:- module(myfreeze, [myfreeze/2]).
|
||||
|
||||
:- use_module(library(atts)).
|
||||
|
||||
:- attribute frozen/1.
|
||||
|
||||
verify_attributes(Var, Other, Goals) :-
|
||||
get_atts(Var, frozen(Fa)), !, % are we involved?
|
||||
( var(Other) -> % must be attributed then
|
||||
( get_atts(Other, frozen(Fb)) % has a pending goal?
|
||||
-> put_atts(Other, frozen((Fa,Fb))) % rescue conjunction
|
||||
; put_atts(Other, frozen(Fa)) % rescue the pending goal
|
||||
),
|
||||
Goals = []
|
||||
; Goals = [Fa]
|
||||
).
|
||||
verify_attributes(_, _, []).
|
||||
|
||||
attribute_goal(Var, Goal) :- % interpretation as goal
|
||||
get_atts(Var, frozen(Goal)).
|
||||
|
||||
myfreeze(X, Goal) :- put_atts(Fresh, frozen(Goal)), Fresh = X. ~~~~~
|
||||
|
||||
Assuming that this code lives in file myfreeze.yap,
|
||||
we would use it via:
|
||||
|
||||
~~~~~
|
||||
| ?- use_module(myfreeze).
|
||||
| ?- myfreeze(X,print(bound(x,X))), X=2.
|
||||
|
||||
bound(x,2) % side effect
|
||||
X = 2 % bindings
|
||||
~~~~~
|
||||
|
||||
The two solvers even work together:
|
||||
|
||||
~~~~~
|
||||
| ?- myfreeze(X,print(bound(x,X))), domain(X,[1,2,3]),
|
||||
domain(Y,[2,10]), X=Y.
|
||||
|
||||
bound(x,2) % side effect
|
||||
X = 2, % bindings
|
||||
Y = 2
|
||||
~~~~~
|
||||
|
||||
The two example solvers interact via bindings to shared attributed
|
||||
variables only. More complicated interactions are likely to be found
|
||||
in more sophisticated solvers. The corresponding
|
||||
verify_attributes/3 predicates would typically refer to the
|
||||
attributes from other known solvers/modules via the module prefix in
|
||||
Module:get_atts/2`.
|
||||
|
||||
@}
|
||||
|
||||
@{
|
||||
### hProlog and SWI-Prolog style Attribute Declarations {#New_Style_Attribute_Declarations}
|
||||
|
||||
The following documentation is taken from the SWI-Prolog manual.
|
||||
|
||||
Binding an attributed variable schedules a goal to be executed at the
|
||||
first possible opportunity. In the current implementation the hooks are
|
||||
executed immediately after a successful unification of the clause-head
|
||||
or successful completion of a foreign language (built-in) predicate. Each
|
||||
attribute is associated to a module and the hook attr_unify_hook/2 is
|
||||
executed in this module. The example below realises a very simple and
|
||||
incomplete finite domain reasoner.
|
||||
|
||||
~~~~~
|
||||
:- module(domain,
|
||||
[ domain/2 % Var, ?Domain %
|
||||
]).
|
||||
:- use_module(library(ordsets)).
|
||||
|
||||
domain(X, Dom) :-
|
||||
var(Dom), !,
|
||||
get_attr(X, domain, Dom).
|
||||
domain(X, List) :-
|
||||
list_to_ord_set(List, Domain),
|
||||
v put_attr(Y, domain, Domain),
|
||||
X = Y.
|
||||
|
||||
% An attributed variable with attribute value Domain has been %
|
||||
% assigned the value Y %
|
||||
|
||||
attr_unify_hook(Domain, Y) :-
|
||||
( get_attr(Y, domain, Dom2)
|
||||
-> ord_intersection(Domain, Dom2, NewDomain),
|
||||
( NewDomain == []
|
||||
-> fail
|
||||
; NewDomain = [Value]
|
||||
-> Y = Value
|
||||
; put_attr(Y, domain, NewDomain)
|
||||
)
|
||||
; var(Y)
|
||||
-> put_attr( Y, domain, Domain )
|
||||
; ord_memberchk(Y, Domain)
|
||||
).
|
||||
|
||||
% Translate attributes from this module to residual goals %
|
||||
|
||||
attribute_goals(X) -->
|
||||
{ get_attr(X, domain, List) },
|
||||
[domain(X, List)].
|
||||
~~~~~
|
||||
|
||||
Before explaining the code we give some example queries:
|
||||
|
||||
The predicate `domain/2` fetches (first clause) or assigns
|
||||
(second clause) the variable a <em>domain</em>, a set of values it can
|
||||
be unified with. In the second clause first associates the domain
|
||||
with a fresh variable and then unifies X to this variable to deal
|
||||
with the possibility that X already has a domain. The
|
||||
predicate attr_unify_hook/2 is a hook called after a variable with
|
||||
a domain is assigned a value. In the simple case where the variable
|
||||
is bound to a concrete value we simply check whether this value is in
|
||||
the domain. Otherwise we take the intersection of the domains and either
|
||||
fail if the intersection is empty (first example), simply assign the
|
||||
value if there is only one value in the intersection (second example) or
|
||||
assign the intersection as the new domain of the variable (third
|
||||
example). The nonterminal `attribute_goals/3` is used to translate
|
||||
remaining attributes to user-readable goals that, when executed, reinstate
|
||||
these attributes.
|
||||
|
||||
@}
|
||||
|
||||
|
||||
@{
|
||||
### Co-routining {#CohYroutining}
|
||||
|
||||
Prolog uses a simple left-to-right flow of control. It is sometimes
|
||||
convenient to change this control so that goals will only execute when
|
||||
sufficiently instantiated. This may result in a more "data-driven"
|
||||
execution, or may be necessary to correctly implement extensions such
|
||||
as negation by failure.
|
||||
|
||||
Initially, YAP used a separate mechanism for co-routining. Nowadays, YAP uses
|
||||
attributed variables to implement co-routining.
|
||||
|
||||
Two declarations are supported:
|
||||
|
||||
+ block/1
|
||||
The argument to `block/1` is a condition on a goal or a conjunction
|
||||
of conditions, with each element separated by commas. Each condition is
|
||||
of the form `predname( _C1_,..., _CN_)`, where _N_ is the
|
||||
arity of the goal, and each _CI_ is of the form `-`, if the
|
||||
argument must suspend until the first such variable is bound, or
|
||||
`?`, otherwise.
|
||||
|
||||
+ wait/1
|
||||
The argument to `wait/1` is a predicate descriptor or a conjunction
|
||||
of these predicates. These predicates will suspend until their first
|
||||
argument is bound.
|
||||
|
||||
|
||||
The following primitives can be used:
|
||||
|
||||
- freeze/2
|
||||
|
||||
- dif/2
|
||||
|
||||
- when/2
|
||||
|
||||
- frozen/2
|
||||
|
||||
|
||||
@}
|
||||
|
||||
@}
|
24
docs/md/c
Normal file
24
docs/md/c
Normal file
@ -0,0 +1,24 @@
|
||||
YAP Core Built-ins {#core}
|
||||
=================
|
||||
|
||||
This chapter describes the core predicates that control the execution of
|
||||
Prolog programs, provide fundamental functionality such as termm manipulation or arithmetic, and support interaction with external
|
||||
resources, Many of the predicates described here have been standardised by the ISO. The standartised subset of Prolog also known as ISO-Prolog.
|
||||
|
||||
In the description of the arguments of predicates the following
|
||||
notation will be used:
|
||||
|
||||
+ a preceding plus sign will denote an argument as an "input
|
||||
argument" - it cannot be a free variable at the time of the call;
|
||||
+ a preceding minus sign will denote an "output argument";
|
||||
+ an argument with no preceding symbol can be used in both ways.
|
||||
|
||||
|
||||
@copydoc builtins
|
||||
|
||||
|
||||
@{
|
||||
@defgroup builtins YAP Core Builtins:
|
||||
|
||||
@}
|
||||
|
8
docs/md/library.md
Normal file
8
docs/md/library.md
Normal file
@ -0,0 +1,8 @@
|
||||
|
||||
@page Library YAP Library
|
||||
|
||||
|
||||
the library_directory path (set by the
|
||||
`LIBDIR` variable in the Makefile for YAP). Several files in the
|
||||
library are originally from the public-domain Edinburgh Prolog library.
|
||||
|
4
docs/source/union/d2/d77/unionseq__val__t.rst
Normal file
4
docs/source/union/d2/d77/unionseq__val__t.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union seq_val_t
|
||||
===============
|
||||
|
||||
.. doxygenunion:: seq_val_t
|
4
docs/source/union/d4/da6/unionflag_term.rst
Normal file
4
docs/source/union/d4/da6/unionflag_term.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union flagTerm
|
||||
==============
|
||||
|
||||
.. doxygenunion:: flagTerm
|
4
docs/source/union/d5/dc7/union_r_l___node.rst
Normal file
4
docs/source/union/d5/dc7/union_r_l___node.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union RL_Node
|
||||
=============
|
||||
|
||||
.. doxygenunion:: RL_Node
|
4
docs/source/union/d9/db2/union_u.rst
Normal file
4
docs/source/union/d9/db2/union_u.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union U
|
||||
=======
|
||||
|
||||
.. doxygenunion:: U
|
4
docs/source/union/da/ded/union_c_o_n_s_u_l_t___o_b_j.rst
Normal file
4
docs/source/union/da/ded/union_c_o_n_s_u_l_t___o_b_j.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union CONSULT_OBJ
|
||||
=================
|
||||
|
||||
.. doxygenunion:: CONSULT_OBJ
|
4
docs/source/union/db/de7/union_a_i.rst
Normal file
4
docs/source/union/db/de7/union_a_i.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union AI
|
||||
========
|
||||
|
||||
.. doxygenunion:: AI
|
4
docs/source/union/dd/d52/unionstatarray__elements.rst
Normal file
4
docs/source/union/dd/d52/unionstatarray__elements.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union statarray_elements
|
||||
========================
|
||||
|
||||
.. doxygenunion:: statarray_elements
|
4
docs/source/union/dd/de1/unioncell__size__t.rst
Normal file
4
docs/source/union/dd/de1/unioncell__size__t.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union cell_size_t
|
||||
=================
|
||||
|
||||
.. doxygenunion:: cell_size_t
|
4
docs/source/union/df/d0e/unionclause__ptr.rst
Normal file
4
docs/source/union/df/d0e/unionclause__ptr.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union clause_ptr
|
||||
================
|
||||
|
||||
.. doxygenunion:: clause_ptr
|
4
docs/source/union/df/d1b/unionclause__obj.rst
Normal file
4
docs/source/union/df/d1b/unionclause__obj.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union clause_obj
|
||||
================
|
||||
|
||||
.. doxygenunion:: clause_obj
|
4
docs/source/union/df/dfa/unionoptvalue.rst
Normal file
4
docs/source/union/df/dfa/unionoptvalue.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union optvalue
|
||||
==============
|
||||
|
||||
.. doxygenunion:: optvalue
|
319
docs/sx/conf.py
Normal file
319
docs/sx/conf.py
Normal file
@ -0,0 +1,319 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# yap documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Jan 5 11:01:36 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
sys.path.append( "/usr/local/lib/python3.6/site-packages/breathe")
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.pngmath',
|
||||
'breathe'
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
source_suffix = ['.rst', '.md']
|
||||
#source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'yap'
|
||||
copyright = u'2016, Vitor Santos Costa'
|
||||
author = u'Vitor Santos Costa'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = u'4.6.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u'4.6.3'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/github/yap-6.3/Debug/docs/xml" }
|
||||
|
||||
#Specify a default project:
|
||||
|
||||
breathe_default_project = "yap"
|
||||
|
||||
#Once this is done you may use the the following commands:
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||
#html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# Now only 'ja' uses this config value
|
||||
#html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'yapdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'yap.tex', u'yap Documentation',
|
||||
u'Vitor Santos Costa', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
author, 'yap', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/git/yap-6.3/Release/docs/xml/" }
|
||||
breathe_default_project = "yap"
|
||||
|
10
docs/sx/conf.py.in
Normal file
10
docs/sx/conf.py.in
Normal file
@ -0,0 +1,10 @@
|
||||
extensions = [
|
||||
breathe_projects = { "yap": "/Users/vsc/git/yap-6.3/Release/doc/xml/" }i
|
||||
breathe_default_project = "yap"
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
319
docs/sx/conf.pyo
Normal file
319
docs/sx/conf.pyo
Normal file
@ -0,0 +1,319 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# yap documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Jan 5 11:01:36 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
sys.path.append( "/usr/local/lib/python3.6/site-packages/breathe")
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.pngmath',
|
||||
'breathe'
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
source_suffix = ['.rst', '.md']
|
||||
#source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'yap'
|
||||
copyright = u'2016, Vitor Santos Costa'
|
||||
author = u'Vitor Santos Costa'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = u'4.6.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u'4.6.3'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
breathe_projects = { "yap": "../../Debug/docs/xml" }
|
||||
|
||||
#Specify a default project:
|
||||
|
||||
breathe_default_project = "yap"
|
||||
|
||||
#Once this is done you may use the the following commands:
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||
#html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# Now only 'ja' uses this config value
|
||||
#html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'yapdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'yap.tex', u'yap Documentation',
|
||||
u'Vitor Santos Costa', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
author, 'yap', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/git/yap-6.3/Release/docs/xml/" }
|
||||
breathe_default_project = "yap"
|
||||
|
29
docs/sx/index.rst0
Normal file
29
docs/sx/index.rst0
Normal file
@ -0,0 +1,29 @@
|
||||
.. yap documentation master file, created by
|
||||
sphinx-quickstart on Tue Jan 5 11:01:36 2016.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to yap's documentation!
|
||||
===============================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
36
docs/sx/make.bat
Normal file
36
docs/sx/make.bat
Normal file
@ -0,0 +1,36 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
set SPHINXPROJ=YAP
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||
|
||||
:end
|
||||
popd
|
@ -684,6 +684,8 @@ extern X_API YAP_Int YAP_FunctorToInt(YAP_Functor At);
|
||||
|
||||
extern X_API YAP_Functor YAP_IntToFunctor(YAP_Int i);
|
||||
|
||||
extern X_API YAP_PredEntryPtr YAP_TopGoal(void);
|
||||
|
||||
#define YAP_InitCPred(N, A, F) YAP_UserCPredicate(N, F, A)
|
||||
|
||||
__END_DECLS
|
||||
|
1
ipykernel
Submodule
1
ipykernel
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 629ac54cae9767310616d47d769665453619ac64
|
14
kernel
Normal file
14
kernel
Normal file
@ -0,0 +1,14 @@
|
||||
mkdir tmp_k
|
||||
cd tmp_k
|
||||
cp -a ~/github/ipykernel/* .
|
||||
for i in *py */*py */*/*py; do
|
||||
sed -i '.bk' -e 's/ipkernel/yapkernel/g' $i
|
||||
sed -i '.bk' -e 's/ipykernel/yap_kernel/g' $i
|
||||
sed -i '.bk' -e 's/IPKernelApp/YAP_KernelApp/g' $i
|
||||
sed -i '.bk' -e 's/IPythonKernel/YAPKernel/g' $i
|
||||
sed -i '.bk' -e 's/IPKernel/YAPKernel/g' $i
|
||||
done
|
||||
mv ipykernel yap_kernel
|
||||
mv ipykernel_launcher.py yap_kernel_launcher.py
|
||||
mv yap_kernel/ipkernel.py yap_kernel/yapkernel.py
|
||||
|
31
library/system/sys_config.h
Normal file
31
library/system/sys_config.h
Normal file
@ -0,0 +1,31 @@
|
||||
/* Define to 1 if you have the <openssl/ripemd.h> header file. */
|
||||
#ifndef HAVE_APACHE2_UTIL_MD5_H
|
||||
/* #undef HAVE_APACHE2_UTIL_MD5_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <openssl/ripemd.h> header file. */
|
||||
#ifndef HAVE_APR_1_APR_MD5_H
|
||||
/* #undef HAVE_APR_1_APR_MD5_H */
|
||||
#endif
|
||||
|
||||
|
||||
/* Define to 1 if you have the <openssl/md5.h> header file. */
|
||||
#ifndef HAVE_OPENSSL_MD5_H
|
||||
/* #undef HAVE_OPENSSL_MD5_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <openssl/ripemd.h> header file. */
|
||||
#ifndef HAVE_OPENSSL_RIPEMD_H
|
||||
/* #undef HAVE_OPENSSL_RIPEMD_H */
|
||||
#endif
|
||||
|
||||
/* "Define if you have the crypt function." */
|
||||
#ifndef HAVE_CRYPT
|
||||
/* #undef HAVE_CRYPT */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <crypt.h> header file. */
|
||||
#ifndef HAVE_CRYPT_H
|
||||
/* #undef HAVE_CRYPT_H */
|
||||
#endif
|
||||
|
34
packages/Makefile.sav
Normal file
34
packages/Makefile.sav
Normal file
@ -0,0 +1,34 @@
|
||||
CODEGEN=code-generator.py
|
||||
DISJUNCTOR = -DDISJUNCTOR
|
||||
|
||||
yap: gecode_yap.so gecode.yap
|
||||
|
||||
gecode_yap.o: gecode_yap.cc gecode-common.icc \
|
||||
gecode_yap_cc_impl_auto_generated.icc \
|
||||
gecode_yap_cc_init_auto_generated.icc \
|
||||
gecode_yap_cc_forward_auto_generated.icc \
|
||||
disjunctor.icc disjunctor.hh
|
||||
$(CXX) -Wall -ggdb -c -shared -fPIC $(DISJUNCTOR) $(CPPFLAGS) $(CXXFLAGS) $(LDFLAGS) -o $@ $<
|
||||
|
||||
gecode_yap.so: gecode_yap.o
|
||||
$(CXX) -shared $(LDFLAGS) -o $@ $< -lgecodeint -lgecodeset -lgecodesearch
|
||||
|
||||
gecode_yap_cc_init_auto_generated.icc: $(CODEGEN)
|
||||
python $< -t yap-cc-init > $@
|
||||
|
||||
gecode_yap_cc_impl_auto_generated.icc: $(CODEGEN)
|
||||
python $< -t yap-cc-impl > $@
|
||||
|
||||
gecode_yap_cc_forward_auto_generated.icc: $(CODEGEN)
|
||||
python $< -t yap-cc-forward > $@
|
||||
|
||||
gecode_yap_auto_generated.yap: $(CODEGEN)
|
||||
python $< -t yap-prolog > $@
|
||||
|
||||
gecode.yap: gecode_yap_hand_written.yap gecode_yap_auto_generated.yap
|
||||
cat $^ > $@
|
||||
|
||||
.PHONY: yap
|
||||
|
||||
clean:
|
||||
-rm -f *.o *.so *~ *_auto_generated* gecode.yap
|
19
packages/bdd/bdd.md
Normal file
19
packages/bdd/bdd.md
Normal file
@ -0,0 +1,19 @@
|
||||
Boolean Decision Making in YAP {#BDDs}
|
||||
==============================
|
||||
|
||||
This is an experimental interface to BDD libraries. It is not as
|
||||
sophisticated as simplecudd, but it should be fun to play around with bdds.
|
||||
|
||||
It currently works with cudd only, although it should be possible to
|
||||
port to other libraries. It requires the ability to dynamically link
|
||||
with cudd binaries. This works:
|
||||
|
||||
- in fedora with standard package
|
||||
- in osx with hand-compiled and ports package
|
||||
|
||||
In ubuntu, you may want to install the fedora rpm, or just download the package from the original
|
||||
and compile it.
|
||||
|
||||
|
||||
+ @ref BDDsPL
|
||||
+ @ref CUDD
|
131
packages/configure.in
Normal file
131
packages/configure.in
Normal file
@ -0,0 +1,131 @@
|
||||
dnl Gecode support
|
||||
|
||||
AC_ARG_WITH(gecode,
|
||||
[ --with-gecode install gecode library],
|
||||
[use_gecode="$withval"], use_gecode=no)
|
||||
|
||||
if test "$use_gecode" = no; then
|
||||
PKG_GECODE=""
|
||||
else
|
||||
PKG_GECODE="packages/gecode"
|
||||
if test -d "$use_gecode"; then
|
||||
CPPFLAGS="$CPPFLAGS -I $use_gecode/include"
|
||||
GECODE_EXTRALIBS="-L $use_gecode/lib"
|
||||
GECODE_INCLUDES=" $use_gecode/include"
|
||||
GECODE_PATH="$use_gecode/bin"
|
||||
dnl gecode is usually in /usr/local
|
||||
elif test -d /usr/local/include/gecode -a x$SYSROOT = x; then
|
||||
CPPFLAGS="$CPPFLAGS -I/usr/local/include"
|
||||
GECODE_EXTRALIBS="-L/usr/local/lib"
|
||||
fi
|
||||
fi
|
||||
|
||||
AC_SUBST(PKG_GECODE)
|
||||
|
||||
AC_MSG_CHECKING([if dynamic arrays are supported])
|
||||
|
||||
AC_COMPILE_IFELSE([
|
||||
AC_LANG_PROGRAM([[void foo(int n) { int a[n]; a[1]=0; }]],[[foo(3);]])
|
||||
],[
|
||||
AC_MSG_RESULT([yes])
|
||||
AC_DEFINE([HAVE_DYNARRAY],[1],[Define if dynamic arrays are supported])
|
||||
],[
|
||||
AC_MSG_RESULT([no])
|
||||
])
|
||||
|
||||
GECODE_VERSION=unknown-gecode-version
|
||||
|
||||
|
||||
if test "$use_gecode" != no; then
|
||||
if test $i_am_cross_compiling = yes
|
||||
then
|
||||
GECODE_VERSION=4.2.1
|
||||
elif test x"$WINDOWS" = x; then
|
||||
if test $ac_cv_sizeof_int_p = 8; then
|
||||
GECODE_ARCH=x64
|
||||
else
|
||||
GECODE_ARCH=i386
|
||||
fi
|
||||
if test "$use_gecode" = yes; then
|
||||
GECODE_PATH=`which fzn-gecode`
|
||||
GECODE_PATH=`dirname "$GECODE_PATH"`
|
||||
else
|
||||
GECODE_PATH="$use_gecode"
|
||||
fi
|
||||
GECODE_INCLUDES="$GECODE_PATH"/include
|
||||
GECODE_LIBDIR="\"$GECODE_PATH\""/lib
|
||||
GECODE_BINDIR="\"$GECODE_PATH\""/bin
|
||||
GECODE_TVERSION=`echo "$GECODE_PATH"/lib/GecodeDriver-*-d-"$GECODE_ARCH".lib | cut -d'-' -f 2-4`
|
||||
GECODE_VERSION=`echo "$GECODE_TVERSION"|sed 's/-/\./g'`
|
||||
GECODE_MAJOR=`echo $GECODE_VERSION| sed 's/\(^.\).*/\1/'`
|
||||
GE_SUF="-$GECODE_TVERSION-r-$GECODE_ARCH.dll"
|
||||
GECODE_EXTRALIBS="$GECODE_BINDIR/GecodeDriver$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeSupport$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeKernel$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeInt$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeSet$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeSearch$GE_SUF"
|
||||
if test "$GECODE_MAJOR" = "4"; then
|
||||
GECODE_EXTRALIBS="$GECODE_EXTRALIBS $GECODE_BINDIR/GecodeFloat$GE_SUF"
|
||||
fi
|
||||
else
|
||||
AC_MSG_CHECKING([gecode version])
|
||||
AC_RUN_IFELSE([AC_LANG_PROGRAM([[
|
||||
#include "gecode/support/config.hpp"
|
||||
#include <stdio.h>
|
||||
]],[[
|
||||
FILE* out = fopen("conftest.out","w");
|
||||
fprintf(out,"%s\n",GECODE_VERSION);
|
||||
fclose(out);
|
||||
return 0;
|
||||
]])],[GECODE_VERSION=$(cat conftest.out)
|
||||
AC_MSG_RESULT([$GECODE_VERSION])],
|
||||
[AC_MSG_ERROR([cannot determine gecode version])])
|
||||
case "$target_os" in
|
||||
*darwin*)
|
||||
if test "$use_gecode" = yes; then
|
||||
AC_MSG_CHECKING([if -framework gecode is required])
|
||||
AC_LANG_PUSH([C++])
|
||||
saved_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS -framework gecode"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
|
||||
#include "gecode/int.hh"
|
||||
]],[[
|
||||
Gecode::Exception e("","");
|
||||
return 0;
|
||||
]])],[GECODE_EXTRALIBS="-framework gecode"
|
||||
AC_MSG_RESULT([yes])],
|
||||
[AC_MSG_RESULT([no])])
|
||||
AC_LANG_POP()
|
||||
CXXFLAGS="$saved_CXXFLAGS"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
GECODE_MAJOR=`echo $GECODE_VERSION| sed 's/\(^.\).*/\1/'`
|
||||
if test "$GECODE_MAJOR" = "3"; then
|
||||
GECODE_EXTRALIBS="$GECODE_EXTRALIBS -lgecodesupport -lgecodekernel -lgecodeint -lgecodeset -lgecodesearch"
|
||||
else
|
||||
GECODE_EXTRALIBS="$GECODE_EXTRALIBS -lgecodesupport -lgecodekernel -lgecodefloat -lgecodeint -lgecodeset -lgecodesearch"
|
||||
fi
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
saved_CPPFLAGS="$CPPFLAGS"
|
||||
CPPFLAGS="$CPPFLAGS -I \"$GECODE_INCLUDES\""
|
||||
|
||||
AC_CHECK_HEADER(gecode/support/config.hpp)
|
||||
|
||||
AC_SUBST(GECODE_EXTRALIBS)
|
||||
AC_SUBST(GECODE_INCLUDES)
|
||||
AC_SUBST(GECODE_VERSION)
|
||||
AC_SUBST(GECODE_MAJOR)
|
||||
|
||||
|
||||
if test "$PKG_GECODE" = "packages/gecode"; then
|
||||
AC_CONFIG_FILES([packages/gecode/Makefile])
|
||||
fi
|
||||
|
||||
CPPFLAGS="$saved_CPPFLAGS"
|
||||
|
1
packages/gecode/5.1.0/gecode-version.txt
vendored
Normal file
1
packages/gecode/5.1.0/gecode-version.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
5.1.0
|
3493
packages/gecode/5.1.0/gecode_yap_auto_generated.yap
vendored
Normal file
3493
packages/gecode/5.1.0/gecode_yap_auto_generated.yap
vendored
Normal file
File diff suppressed because it is too large
Load Diff
28
packages/gecode/5.1.0/gecode_yap_cc_forward_auto_generated.icc
vendored
Normal file
28
packages/gecode/5.1.0/gecode_yap_cc_forward_auto_generated.icc
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
// -*- c++ -*-
|
||||
//=============================================================================
|
||||
// Copyright (C) 2011 by Denys Duchier
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify it
|
||||
// under the terms of the GNU Lesser General Public License as published by the
|
||||
// Free Software Foundation, either version 3 of the License, or (at your
|
||||
// option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
// more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Lesser General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//=============================================================================
|
||||
|
||||
static RestartMode gecode_RestartMode_from_term(YAP_Term);
|
||||
static FloatRelType gecode_FloatRelType_from_term(YAP_Term);
|
||||
static ReifyMode gecode_ReifyMode_from_term(YAP_Term);
|
||||
static IntRelType gecode_IntRelType_from_term(YAP_Term);
|
||||
static BoolOpType gecode_BoolOpType_from_term(YAP_Term);
|
||||
static IntPropLevel gecode_IntPropLevel_from_term(YAP_Term);
|
||||
static TaskType gecode_TaskType_from_term(YAP_Term);
|
||||
static TraceEvent gecode_TraceEvent_from_term(YAP_Term);
|
||||
static SetRelType gecode_SetRelType_from_term(YAP_Term);
|
||||
static SetOpType gecode_SetOpType_from_term(YAP_Term);
|
5176
packages/gecode/5.1.0/gecode_yap_cc_impl_auto_generated.icc
vendored
Normal file
5176
packages/gecode/5.1.0/gecode_yap_cc_impl_auto_generated.icc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
660
packages/gecode/5.1.0/gecode_yap_cc_init_auto_generated.icc
vendored
Normal file
660
packages/gecode/5.1.0/gecode_yap_cc_init_auto_generated.icc
vendored
Normal file
@ -0,0 +1,660 @@
|
||||
// -*- c++ -*-
|
||||
//=============================================================================
|
||||
// Copyright (C) 2011 by Denys Duchier
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify it
|
||||
// under the terms of the GNU Lesser General Public License as published by the
|
||||
// Free Software Foundation, either version 3 of the License, or (at your
|
||||
// option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
// more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Lesser General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//=============================================================================
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_NONE");
|
||||
gecode_RM_NONE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_CONSTANT");
|
||||
gecode_RM_CONSTANT = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_LINEAR");
|
||||
gecode_RM_LINEAR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_LUBY");
|
||||
gecode_RM_LUBY = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_GEOMETRIC");
|
||||
gecode_RM_GEOMETRIC = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_EQ");
|
||||
gecode_FRT_EQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_NQ");
|
||||
gecode_FRT_NQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_LQ");
|
||||
gecode_FRT_LQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_LE");
|
||||
gecode_FRT_LE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_GQ");
|
||||
gecode_FRT_GQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_GR");
|
||||
gecode_FRT_GR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_EQV");
|
||||
gecode_RM_EQV = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_IMP");
|
||||
gecode_RM_IMP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_PMI");
|
||||
gecode_RM_PMI = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_EQ");
|
||||
gecode_IRT_EQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_NQ");
|
||||
gecode_IRT_NQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_LQ");
|
||||
gecode_IRT_LQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_LE");
|
||||
gecode_IRT_LE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_GQ");
|
||||
gecode_IRT_GQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_GR");
|
||||
gecode_IRT_GR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_AND");
|
||||
gecode_BOT_AND = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_OR");
|
||||
gecode_BOT_OR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_IMP");
|
||||
gecode_BOT_IMP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_EQV");
|
||||
gecode_BOT_EQV = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_XOR");
|
||||
gecode_BOT_XOR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_DEF");
|
||||
gecode_IPL_DEF = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_VAL");
|
||||
gecode_IPL_VAL = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_BND");
|
||||
gecode_IPL_BND = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_DOM");
|
||||
gecode_IPL_DOM = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_SPEED");
|
||||
gecode_IPL_SPEED = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_MEMORY");
|
||||
gecode_IPL_MEMORY = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_BASIC");
|
||||
gecode_IPL_BASIC = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_ADVANCED");
|
||||
gecode_IPL_ADVANCED = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_BASIC_ADVANCED");
|
||||
gecode_IPL_BASIC_ADVANCED = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("TT_FIXP");
|
||||
gecode_TT_FIXP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TT_FIXS");
|
||||
gecode_TT_FIXS = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TT_FIXE");
|
||||
gecode_TT_FIXE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_INIT");
|
||||
gecode_TE_INIT = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_PRUNE");
|
||||
gecode_TE_PRUNE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_FIX");
|
||||
gecode_TE_FIX = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_FAIL");
|
||||
gecode_TE_FAIL = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_DONE");
|
||||
gecode_TE_DONE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_PROPAGATE");
|
||||
gecode_TE_PROPAGATE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_COMMIT");
|
||||
gecode_TE_COMMIT = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_EQ");
|
||||
gecode_SRT_EQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_NQ");
|
||||
gecode_SRT_NQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_SUB");
|
||||
gecode_SRT_SUB = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_SUP");
|
||||
gecode_SRT_SUP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_DISJ");
|
||||
gecode_SRT_DISJ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_CMPL");
|
||||
gecode_SRT_CMPL = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_LQ");
|
||||
gecode_SRT_LQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_LE");
|
||||
gecode_SRT_LE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_GQ");
|
||||
gecode_SRT_GQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_GR");
|
||||
gecode_SRT_GR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_UNION");
|
||||
gecode_SOT_UNION = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_DUNION");
|
||||
gecode_SOT_DUNION = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_INTER");
|
||||
gecode_SOT_INTER = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_MINUS");
|
||||
gecode_SOT_MINUS = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
YAP_UserCPredicate("gecode_constraint_unary_450", gecode_constraint_unary_450, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_446", gecode_constraint_unary_446, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_340", gecode_constraint_nvalues_340, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_342", gecode_constraint_nvalues_342, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_336", gecode_constraint_nvalues_336, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_338", gecode_constraint_nvalues_338, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_max_300", gecode_constraint_max_300, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_max_304", gecode_constraint_max_304, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_max_299", gecode_constraint_max_299, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_198", gecode_constraint_dom_198, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_204", gecode_constraint_dom_204, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_13", gecode_constraint_argmin_13, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_convex_3", gecode_constraint_convex_3, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_324", gecode_constraint_nooverlap_324, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_21", gecode_constraint_assign_21, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_30", gecode_constraint_assign_30, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_27", gecode_constraint_assign_27, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_19", gecode_constraint_assign_19, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_33", gecode_constraint_assign_33, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_23", gecode_constraint_assign_23, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_36", gecode_constraint_assign_36, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_25", gecode_constraint_assign_25, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_element_239", gecode_constraint_element_239, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_241", gecode_constraint_element_241, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_235", gecode_constraint_element_235, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_233", gecode_constraint_element_233, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_225", gecode_constraint_element_225, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_227", gecode_constraint_element_227, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_223", gecode_constraint_element_223, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_256", gecode_constraint_ite_256, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_253", gecode_constraint_ite_253, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_255", gecode_constraint_ite_255, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_258", gecode_constraint_ite_258, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_439", gecode_constraint_unary_439, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_nroot_334", gecode_constraint_nroot_334, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_94", gecode_constraint_circuit_94, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_83", gecode_constraint_circuit_83, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_196", gecode_constraint_dom_196, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_197", gecode_constraint_dom_197, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_194", gecode_constraint_dom_194, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_190", gecode_constraint_dom_190, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_221", gecode_constraint_dom_221, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_218", gecode_constraint_dom_218, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_181", gecode_constraint_dom_181, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_203", gecode_constraint_dom_203, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_202", gecode_constraint_dom_202, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_200", gecode_constraint_dom_200, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_12", gecode_constraint_argmax_12, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_329", gecode_constraint_nooverlap_329, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_330", gecode_constraint_nooverlap_330, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_243", gecode_constraint_element_243, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_element_237", gecode_constraint_element_237, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_element_231", gecode_constraint_element_231, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_element_229", gecode_constraint_element_229, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_max_302", gecode_constraint_max_302, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_max_303", gecode_constraint_max_303, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_453", gecode_constraint_unshare_453, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_451", gecode_constraint_unshare_451, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_path_353", gecode_constraint_path_353, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_path_352", gecode_constraint_path_352, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_63", gecode_constraint_branch_63, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_54", gecode_constraint_branch_54, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_69", gecode_constraint_branch_69, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_mult_322", gecode_constraint_mult_322, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_mult_321", gecode_constraint_mult_321, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_98", gecode_constraint_clause_98, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_96", gecode_constraint_clause_96, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_361", gecode_constraint_precede_361, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_7", gecode_constraint_argmax_7, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_169", gecode_constraint_distinct_169, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_member_310", gecode_constraint_member_310, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_member_311", gecode_constraint_member_311, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_member_306", gecode_constraint_member_306, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_member_307", gecode_constraint_member_307, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_mod_320", gecode_constraint_mod_320, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_271", gecode_constraint_linear_271, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_273", gecode_constraint_linear_273, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_259", gecode_constraint_linear_259, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_263", gecode_constraint_linear_263, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_291", gecode_constraint_linear_291, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_295", gecode_constraint_linear_295, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_91", gecode_constraint_circuit_91, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_379", gecode_constraint_rel_379, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_381", gecode_constraint_rel_381, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_413", gecode_constraint_rel_413, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_411", gecode_constraint_rel_411, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_407", gecode_constraint_rel_407, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_403", gecode_constraint_rel_403, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_399", gecode_constraint_rel_399, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_405", gecode_constraint_rel_405, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_375", gecode_constraint_rel_375, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_376", gecode_constraint_rel_376, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_371", gecode_constraint_rel_371, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_372", gecode_constraint_rel_372, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_368", gecode_constraint_rel_368, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_366", gecode_constraint_rel_366, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_417", gecode_constraint_rel_417, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_423", gecode_constraint_rel_423, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_421", gecode_constraint_rel_421, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_419", gecode_constraint_rel_419, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_383", gecode_constraint_rel_383, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_384", gecode_constraint_rel_384, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_387", gecode_constraint_rel_387, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_388", gecode_constraint_rel_388, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_391", gecode_constraint_rel_391, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_393", gecode_constraint_rel_393, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_395", gecode_constraint_rel_395, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_397", gecode_constraint_rel_397, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_365", gecode_constraint_rel_365, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_363", gecode_constraint_rel_363, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_min_314", gecode_constraint_min_314, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_318", gecode_constraint_min_318, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_313", gecode_constraint_min_313, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_when_456", gecode_constraint_when_456, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_when_457", gecode_constraint_when_457, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_cardinality_71", gecode_constraint_cardinality_71, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_cardinality_70", gecode_constraint_cardinality_70, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_118", gecode_constraint_count_118, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_119", gecode_constraint_count_119, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_111", gecode_constraint_count_111, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_114", gecode_constraint_count_114, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_115", gecode_constraint_count_115, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sqrt_437", gecode_constraint_sqrt_437, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sqrt_436", gecode_constraint_sqrt_436, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_164", gecode_constraint_cumulatives_164, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_162", gecode_constraint_cumulatives_162, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_160", gecode_constraint_cumulatives_160, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_158", gecode_constraint_cumulatives_158, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_156", gecode_constraint_cumulatives_156, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_154", gecode_constraint_cumulatives_154, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_152", gecode_constraint_cumulatives_152, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_150", gecode_constraint_cumulatives_150, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_339", gecode_constraint_nvalues_339, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_341", gecode_constraint_nvalues_341, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_335", gecode_constraint_nvalues_335, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_337", gecode_constraint_nvalues_337, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_binpacking_39", gecode_constraint_binpacking_39, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_286", gecode_constraint_linear_286, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_290", gecode_constraint_linear_290, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_278", gecode_constraint_linear_278, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_282", gecode_constraint_linear_282, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_abs_6", gecode_constraint_abs_6, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_convex_2", gecode_constraint_convex_2, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_div_174", gecode_constraint_div_174, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_div_173", gecode_constraint_div_173, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_385", gecode_constraint_rel_385, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_389", gecode_constraint_rel_389, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_377", gecode_constraint_rel_377, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_373", gecode_constraint_rel_373, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_369", gecode_constraint_rel_369, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_367", gecode_constraint_rel_367, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_max_301", gecode_constraint_max_301, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_path_350", gecode_constraint_path_350, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_447", gecode_constraint_unary_447, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_443", gecode_constraint_unary_443, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_440", gecode_constraint_unary_440, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_441", gecode_constraint_unary_441, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nroot_333", gecode_constraint_nroot_333, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nroot_332", gecode_constraint_nroot_332, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_432", gecode_constraint_sorted_432, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_87", gecode_constraint_circuit_87, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_84", gecode_constraint_circuit_84, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_85", gecode_constraint_circuit_85, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_183", gecode_constraint_dom_183, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_180", gecode_constraint_dom_180, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_208", gecode_constraint_dom_208, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_207", gecode_constraint_dom_207, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_179", gecode_constraint_dom_179, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_217", gecode_constraint_dom_217, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_216", gecode_constraint_dom_216, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_214", gecode_constraint_dom_214, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_212", gecode_constraint_dom_212, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_186", gecode_constraint_dom_186, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_195", gecode_constraint_dom_195, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_192", gecode_constraint_dom_192, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_193", gecode_constraint_dom_193, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_188", gecode_constraint_dom_188, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_189", gecode_constraint_dom_189, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_220", gecode_constraint_dom_220, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_219", gecode_constraint_dom_219, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_201", gecode_constraint_dom_201, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_199", gecode_constraint_dom_199, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_abs_5", gecode_constraint_abs_5, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_abs_4", gecode_constraint_abs_4, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_81", gecode_constraint_channel_81, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_78", gecode_constraint_channel_78, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_32", gecode_constraint_assign_32, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_29", gecode_constraint_assign_29, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_35", gecode_constraint_assign_35, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_38", gecode_constraint_assign_38, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_when_455", gecode_constraint_when_455, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_414", gecode_constraint_rel_414, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_400", gecode_constraint_rel_400, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_path_351", gecode_constraint_path_351, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_44", gecode_constraint_branch_44, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_55", gecode_constraint_branch_55, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_49", gecode_constraint_branch_49, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_42", gecode_constraint_branch_42, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_58", gecode_constraint_branch_58, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_46", gecode_constraint_branch_46, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_64", gecode_constraint_branch_64, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_48", gecode_constraint_branch_48, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_mult_323", gecode_constraint_mult_323, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_88", gecode_constraint_circuit_88, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_89", gecode_constraint_circuit_89, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_86", gecode_constraint_circuit_86, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_97", gecode_constraint_clause_97, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_95", gecode_constraint_clause_95, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_360", gecode_constraint_precede_360, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_359", gecode_constraint_precede_359, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_82", gecode_constraint_channel_82, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_134", gecode_constraint_cumulative_134, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_135", gecode_constraint_cumulative_135, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_130", gecode_constraint_cumulative_130, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_131", gecode_constraint_cumulative_131, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_128", gecode_constraint_cumulative_128, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_146", gecode_constraint_cumulative_146, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_147", gecode_constraint_cumulative_147, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_142", gecode_constraint_cumulative_142, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_143", gecode_constraint_cumulative_143, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_140", gecode_constraint_cumulative_140, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_171", gecode_constraint_distinct_171, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_170", gecode_constraint_distinct_170, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_165", gecode_constraint_distinct_165, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_167", gecode_constraint_distinct_167, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_member_312", gecode_constraint_member_312, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_member_308", gecode_constraint_member_308, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_mod_319", gecode_constraint_mod_319, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sqr_434", gecode_constraint_sqr_434, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sqr_433", gecode_constraint_sqr_433, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_428", gecode_constraint_sequence_428, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_426", gecode_constraint_sequence_426, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_path_347", gecode_constraint_path_347, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_path_344", gecode_constraint_path_344, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_path_345", gecode_constraint_path_345, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_divmod_177", gecode_constraint_divmod_177, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_429", gecode_constraint_sorted_429, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_252", gecode_constraint_extensional_252, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_250", gecode_constraint_extensional_250, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_248", gecode_constraint_extensional_248, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_246", gecode_constraint_extensional_246, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_93", gecode_constraint_circuit_93, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_92", gecode_constraint_circuit_92, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_14", gecode_constraint_argmin_14, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_16", gecode_constraint_argmin_16, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_79", gecode_constraint_channel_79, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_76", gecode_constraint_channel_76, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_72", gecode_constraint_channel_72, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_75", gecode_constraint_channel_75, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_74", gecode_constraint_channel_74, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_when_458", gecode_constraint_when_458, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_103", gecode_constraint_count_103, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_105", gecode_constraint_count_105, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_121", gecode_constraint_count_121, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_123", gecode_constraint_count_123, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_107", gecode_constraint_count_107, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_109", gecode_constraint_count_109, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_112", gecode_constraint_count_112, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_116", gecode_constraint_count_116, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_120", gecode_constraint_count_120, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_99", gecode_constraint_count_99, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_101", gecode_constraint_count_101, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_163", gecode_constraint_cumulatives_163, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_161", gecode_constraint_cumulatives_161, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_159", gecode_constraint_cumulatives_159, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_157", gecode_constraint_cumulatives_157, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_155", gecode_constraint_cumulatives_155, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_153", gecode_constraint_cumulatives_153, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_151", gecode_constraint_cumulatives_151, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_149", gecode_constraint_cumulatives_149, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_binpacking_40", gecode_constraint_binpacking_40, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_1", gecode_constraint_branch_1, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_251", gecode_constraint_extensional_251, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_249", gecode_constraint_extensional_249, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_247", gecode_constraint_extensional_247, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_245", gecode_constraint_extensional_245, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_294", gecode_constraint_linear_294, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_298", gecode_constraint_linear_298, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_262", gecode_constraint_linear_262, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_266", gecode_constraint_linear_266, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_268", gecode_constraint_linear_268, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_270", gecode_constraint_linear_270, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_284", gecode_constraint_linear_284, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_285", gecode_constraint_linear_285, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_288", gecode_constraint_linear_288, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_289", gecode_constraint_linear_289, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_276", gecode_constraint_linear_276, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_277", gecode_constraint_linear_277, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_280", gecode_constraint_linear_280, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_281", gecode_constraint_linear_281, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_328", gecode_constraint_nooverlap_328, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_327", gecode_constraint_nooverlap_327, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_div_175", gecode_constraint_div_175, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_sqr_435", gecode_constraint_sqr_435, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_80", gecode_constraint_channel_80, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_77", gecode_constraint_channel_77, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_73", gecode_constraint_channel_73, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_path_348", gecode_constraint_path_348, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_path_349", gecode_constraint_path_349, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_path_346", gecode_constraint_path_346, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_448", gecode_constraint_unary_448, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_449", gecode_constraint_unary_449, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_444", gecode_constraint_unary_444, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_445", gecode_constraint_unary_445, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_442", gecode_constraint_unary_442, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_430", gecode_constraint_sorted_430, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_431", gecode_constraint_sorted_431, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_244", gecode_constraint_element_244, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_238", gecode_constraint_element_238, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_232", gecode_constraint_element_232, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_230", gecode_constraint_element_230, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_22", gecode_constraint_assign_22, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_31", gecode_constraint_assign_31, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_28", gecode_constraint_assign_28, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_20", gecode_constraint_assign_20, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_34", gecode_constraint_assign_34, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_24", gecode_constraint_assign_24, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_37", gecode_constraint_assign_37, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_26", gecode_constraint_assign_26, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_240", gecode_constraint_element_240, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_242", gecode_constraint_element_242, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_236", gecode_constraint_element_236, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_234", gecode_constraint_element_234, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_226", gecode_constraint_element_226, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_228", gecode_constraint_element_228, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_224", gecode_constraint_element_224, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_56", gecode_constraint_branch_56, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_52", gecode_constraint_branch_52, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_50", gecode_constraint_branch_50, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_61", gecode_constraint_branch_61, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_59", gecode_constraint_branch_59, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_65", gecode_constraint_branch_65, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_67", gecode_constraint_branch_67, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_relax_424", gecode_constraint_relax_424, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_90", gecode_constraint_circuit_90, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_pow_356", gecode_constraint_pow_356, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_pow_355", gecode_constraint_pow_355, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_358", gecode_constraint_precede_358, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_9", gecode_constraint_argmax_9, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_11", gecode_constraint_argmax_11, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_133", gecode_constraint_cumulative_133, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_129", gecode_constraint_cumulative_129, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_126", gecode_constraint_cumulative_126, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_127", gecode_constraint_cumulative_127, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_145", gecode_constraint_cumulative_145, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_141", gecode_constraint_cumulative_141, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_138", gecode_constraint_cumulative_138, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_139", gecode_constraint_cumulative_139, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_172", gecode_constraint_distinct_172, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_166", gecode_constraint_distinct_166, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_168", gecode_constraint_distinct_168, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_316", gecode_constraint_min_316, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_min_317", gecode_constraint_min_317, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sqrt_438", gecode_constraint_sqrt_438, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_427", gecode_constraint_sequence_427, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_425", gecode_constraint_sequence_425, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_454", gecode_constraint_unshare_454, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_452", gecode_constraint_unshare_452, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_path_354", gecode_constraint_path_354, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_path_343", gecode_constraint_path_343, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_divmod_176", gecode_constraint_divmod_176, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_57", gecode_constraint_branch_57, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_53", gecode_constraint_branch_53, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_51", gecode_constraint_branch_51, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_62", gecode_constraint_branch_62, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_60", gecode_constraint_branch_60, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_66", gecode_constraint_branch_66, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_68", gecode_constraint_branch_68, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_331", gecode_constraint_nooverlap_331, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_15", gecode_constraint_argmin_15, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_17", gecode_constraint_argmin_17, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_125", gecode_constraint_cumulative_125, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_137", gecode_constraint_cumulative_137, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_member_309", gecode_constraint_member_309, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_member_305", gecode_constraint_member_305, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_count_100", gecode_constraint_count_100, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_102", gecode_constraint_count_102, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_122", gecode_constraint_count_122, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_124", gecode_constraint_count_124, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_108", gecode_constraint_count_108, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_110", gecode_constraint_count_110, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_104", gecode_constraint_count_104, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_106", gecode_constraint_count_106, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_pow_357", gecode_constraint_pow_357, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_136", gecode_constraint_cumulative_136, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_132", gecode_constraint_cumulative_132, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_148", gecode_constraint_cumulative_148, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_144", gecode_constraint_cumulative_144, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_45", gecode_constraint_branch_45, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_41", gecode_constraint_branch_41, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_43", gecode_constraint_branch_43, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_47", gecode_constraint_branch_47, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_182", gecode_constraint_dom_182, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_184", gecode_constraint_dom_184, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_210", gecode_constraint_dom_210, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_209", gecode_constraint_dom_209, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_206", gecode_constraint_dom_206, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_178", gecode_constraint_dom_178, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_215", gecode_constraint_dom_215, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_213", gecode_constraint_dom_213, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_211", gecode_constraint_dom_211, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_185", gecode_constraint_dom_185, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_191", gecode_constraint_dom_191, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_187", gecode_constraint_dom_187, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_222", gecode_constraint_dom_222, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_205", gecode_constraint_dom_205, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_292", gecode_constraint_linear_292, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_293", gecode_constraint_linear_293, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_296", gecode_constraint_linear_296, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_297", gecode_constraint_linear_297, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_272", gecode_constraint_linear_272, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_274", gecode_constraint_linear_274, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_260", gecode_constraint_linear_260, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_261", gecode_constraint_linear_261, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_264", gecode_constraint_linear_264, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_265", gecode_constraint_linear_265, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_267", gecode_constraint_linear_267, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_269", gecode_constraint_linear_269, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_283", gecode_constraint_linear_283, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_287", gecode_constraint_linear_287, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_275", gecode_constraint_linear_275, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_279", gecode_constraint_linear_279, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_18", gecode_constraint_argmin_18, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_325", gecode_constraint_nooverlap_325, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_326", gecode_constraint_nooverlap_326, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_378", gecode_constraint_rel_378, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_380", gecode_constraint_rel_380, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_412", gecode_constraint_rel_412, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_410", gecode_constraint_rel_410, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_408", gecode_constraint_rel_408, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_409", gecode_constraint_rel_409, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_406", gecode_constraint_rel_406, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_401", gecode_constraint_rel_401, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_402", gecode_constraint_rel_402, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_398", gecode_constraint_rel_398, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_404", gecode_constraint_rel_404, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_374", gecode_constraint_rel_374, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_370", gecode_constraint_rel_370, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_416", gecode_constraint_rel_416, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_422", gecode_constraint_rel_422, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_415", gecode_constraint_rel_415, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_420", gecode_constraint_rel_420, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_418", gecode_constraint_rel_418, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_382", gecode_constraint_rel_382, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_386", gecode_constraint_rel_386, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_390", gecode_constraint_rel_390, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_392", gecode_constraint_rel_392, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_394", gecode_constraint_rel_394, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_396", gecode_constraint_rel_396, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_364", gecode_constraint_rel_364, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_362", gecode_constraint_rel_362, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_315", gecode_constraint_min_315, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_117", gecode_constraint_count_117, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_count_113", gecode_constraint_count_113, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_8", gecode_constraint_argmax_8, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_10", gecode_constraint_argmax_10, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_257", gecode_constraint_ite_257, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_254", gecode_constraint_ite_254, 6);
|
167
packages/python/#python.pl#
Normal file
167
packages/python/#python.pl#
Normal file
@ -0,0 +1,167 @@
|
||||
% % % -*-Mode : Prolog; -*-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% Author: Vitor Santos Costa
|
||||
% E-mail: vsc@dcc.fc.up.pt
|
||||
% Copyright (C): Universidade do Porto
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% This file is part of the YAP Python Interface
|
||||
% distributed according to Perl Artistic License
|
||||
% check LICENSE file for distribution license
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%
|
||||
|
||||
:- module(python,
|
||||
[
|
||||
init_python/0,
|
||||
end_python/0,
|
||||
python_command/1,
|
||||
python_run_file/1,
|
||||
python_run_command/1,
|
||||
python_run_script/2,
|
||||
python_assign/3,
|
||||
python_import/1,
|
||||
array_to_python_list/4,
|
||||
array_to_python_tuple/4,
|
||||
array_to_python_view/5,
|
||||
python/2,
|
||||
acquire_GIL/0,
|
||||
release_GIL/0,
|
||||
python_threaded/0,
|
||||
(:=)/2,
|
||||
(:=)/1,
|
||||
% (<-)/2,
|
||||
% (<-)/1,
|
||||
op(100,fy,$),
|
||||
op(950,fy,:=),
|
||||
op(950,yfx,:=),
|
||||
op(950,fx,<-),
|
||||
op(950,yfx,<-),
|
||||
op(50, yf, []),
|
||||
op(50, yf, '()'),
|
||||
op(100, xfy, '.'),
|
||||
op(100, fy, '.')
|
||||
]).
|
||||
|
||||
|
||||
/** <module> python
|
||||
|
||||
A C-based Prolog interface to python.
|
||||
|
||||
@author Vitor Santos Costa
|
||||
@version 0:0:5, 2012/10/8
|
||||
@license Perl Artistic License
|
||||
|
||||
This is an interface to allow calling Python from Prolog. Please look
|
||||
at the SWIG package if you want to embedd Prolog with Python.
|
||||
|
||||
The interface should be activated by consulting the python lybrary. It
|
||||
immediately boots a Python image.
|
||||
|
||||
To best define the interface, one has to address two opposite goals:
|
||||
- make it as similar to python as possible
|
||||
- make all embedded language interfaces (python, R, Java) as
|
||||
similar as possible.
|
||||
|
||||
YAP supports the following translation between Prolog and Python:
|
||||
|
||||
| *Prolog* | *Pyhon* | *Prolog Examples* |
|
||||
|:-------------:|:-------------:|---------------------------------------:|
|
||||
| Numbers | Numbers | 2.3
|
||||
| | | 1545
|
||||
| | |
|
||||
| Atom | Symbols | var
|
||||
| $Atom | | $var [ = var]
|
||||
| `string` | 'string' | \`hello\`
|
||||
| "string" | ' | "hello"
|
||||
| | |
|
||||
| Atom(...) | Symb(...) | f( a, b, named=v)
|
||||
| E.F(...) | E.F (...) | mod.f( a) [ = [mod\|f(a)] ]
|
||||
| Atom() | | f() [ = '()'(f) ]
|
||||
| Lists | Lists | [1,2,3]
|
||||
| t(....) | Tuples | t(1,2,3) to (1,2,3)
|
||||
| (..., ...) | | (1,2,3)[ = (1,(2,3))]
|
||||
| {.=., .=.} | Dict | {\`one\`: 1, \`two\`: 2, \`three\`: 3}
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/************************************************************************************************************
|
||||
|
||||
|
||||
Python interface
|
||||
|
||||
Data types are
|
||||
|
||||
Python Prolog
|
||||
string atoms
|
||||
numbers numbers
|
||||
lists lists
|
||||
tuples t(...)
|
||||
generic objs __pointer__(Address)
|
||||
|
||||
$var refers to the attribute __main__.var
|
||||
|
||||
*************************************************************************************************************/
|
||||
|
||||
|
||||
:- use_module(library(shlib)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(apply_macros)).
|
||||
:- use_module(library(charsio)).
|
||||
:- dynamic python_mref_cache/2, python_obj_cache/2.
|
||||
|
||||
:= (P1,P2) :- !,
|
||||
:= P1,
|
||||
:= P2.
|
||||
:= import( F ) :- !, python_import(F).
|
||||
:= F :- python_is(F,_).
|
||||
|
||||
V <- F :-
|
||||
V := F.
|
||||
|
||||
( V := F ) :-
|
||||
python_assign(V, F).
|
||||
|
||||
((<- F)) :-
|
||||
:= F.
|
||||
|
||||
python_import(Module) :-
|
||||
python_import(Module, _).
|
||||
|
||||
|
||||
python(Exp, Out) :-
|
||||
Out := Exp.
|
||||
|
||||
python_assign(V, New) :- var(V), !,
|
||||
python_is( New, V).
|
||||
python_assign(T, F) :- atom(T), !,
|
||||
python_assign_symbol(T, F).
|
||||
python_assign(T.I, F) :- !,
|
||||
python_assign_field(T, I, F).
|
||||
python_assign(T[I], F) :- !,
|
||||
python_assign_item(T, I, F).
|
||||
python_assign(F, Tuple) :-
|
||||
python_assign_tuple(F, Tuple).
|
||||
|
||||
python_command(Cmd) :-
|
||||
python_run_command(Cmd).
|
||||
|
||||
|
||||
start_python :-
|
||||
python_import('inspect', _),
|
||||
at_halt(end_python).
|
||||
|
||||
add_cwd_to_python :-
|
||||
unix(getcwd(Dir)),
|
||||
atom_concat(['sys.path.append(\"',Dir,'\")'], Command),
|
||||
python_command(Command),
|
||||
python_command("sys.argv = [\"yap\"]").
|
||||
% done
|
||||
|
||||
:- initialization( load_foreign_files([libYAPPython], [], init_python), now ).
|
||||
|
||||
:- initialization( load_foreign_library(foreign(libYAPPython), init_python), now ).
|
7
packages/python/#sc#
Normal file
7
packages/python/#sc#
Normal file
@ -0,0 +1,7 @@
|
||||
PyThreadState *_save;
|
||||
|
||||
_save = PyThreadState_Swap(NULL);
|
||||
PyEval_ReleaseLock();
|
||||
...Do some blocking I/O operation...
|
||||
PyEval_AcquireLock();
|
||||
PyThreadState_Swap(_save);
|
3
packages/python/__init__.pybk
Normal file
3
packages/python/__init__.pybk
Normal file
@ -0,0 +1,3 @@
|
||||
"""A Prolog kernel for Jupyter"""
|
||||
|
||||
__version__ = '0.0.1'
|
4
packages/python/__main__.pybk
Normal file
4
packages/python/__main__.pybk
Normal file
@ -0,0 +1,4 @@
|
||||
if __name__ == '__main__':
|
||||
from yapkernel import kernelapp as app
|
||||
import pdbl pdb.set_trace()
|
||||
app.launch_new_instance()
|
22
packages/python/examples/multiply.pybk
Normal file
22
packages/python/examples/multiply.pybk
Normal file
@ -0,0 +1,22 @@
|
||||
i = 5
|
||||
|
||||
def f(arg=i):
|
||||
print arg
|
||||
|
||||
def multiply(a,b):
|
||||
print "Will compute", a, "times", b
|
||||
c = 0
|
||||
for i in range(0, a):
|
||||
c = c + b
|
||||
return c
|
||||
|
||||
def square(a,b):
|
||||
return [a*a,b*b]
|
||||
|
||||
def lsquare(a):
|
||||
print a
|
||||
b = []
|
||||
for i in a:
|
||||
b.append(i*i)
|
||||
return b
|
||||
|
10
packages/python/examples/plot.pybk
Normal file
10
packages/python/examples/plot.pybk
Normal file
@ -0,0 +1,10 @@
|
||||
|
||||
:- [library(python)].
|
||||
|
||||
main :-
|
||||
:= import matplotlib.pyplot,
|
||||
:= Plt = ematplotlib.pyplot,
|
||||
Plt.plot([1,2,3,4]),
|
||||
Plt.ylabel(`some numbers`),
|
||||
Plt.show().
|
||||
|
28
packages/python/examples/tests.yap
Normal file
28
packages/python/examples/tests.yap
Normal file
@ -0,0 +1,28 @@
|
||||
:- use_module(library(python)).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
main :-
|
||||
test(I),
|
||||
catch( dot(I), G, err(I,G) ),
|
||||
writeln('.'),
|
||||
fail.
|
||||
main.
|
||||
|
||||
test(I) :-
|
||||
findall(I, clause(det(I,_,_),_), IsF, Is0 ),
|
||||
Is0 = [],
|
||||
sort(IsF,Is),
|
||||
member(I, Is).
|
||||
|
||||
dot(I) :-
|
||||
det(I, Vs, Sol),
|
||||
Vs == Sol.
|
||||
|
||||
err(I,N) :-
|
||||
format(' test ~d failed with error: ~w',[I,N]).
|
||||
|
||||
|
||||
det(a1,[X],[2]) :- X:=2.
|
||||
det(a2,[],[]) :- x := range(1,10).
|
||||
det(b2 [X],[9]) :- X := x.length().
|
||||
det(c3,[X],[Y]) :- X:=cmath.sin(1), Y is sin(1).
|
44
packages/python/install.pybk
Normal file
44
packages/python/install.pybk
Normal file
@ -0,0 +1,44 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
from jupyter_client.kernelspec import install_kernel_spec
|
||||
except ImportError:
|
||||
from IPython.kernel.kernelspec import install_kernel_spec
|
||||
from IPython.utils.tempdir import TemporaryDirectory
|
||||
|
||||
|
||||
kernel_json = {
|
||||
"argv": [sys.executable,
|
||||
"-m", "yap_kernel",
|
||||
"-f", "{connection_file}"],
|
||||
"display_name": "yap",
|
||||
"mimetype": "text/x-prolog",
|
||||
"language": "prolog",
|
||||
"name": "yap",
|
||||
}
|
||||
|
||||
def install_my_kernel_spec(user=False):
|
||||
with TemporaryDirectory() as td:
|
||||
os.chmod(td, 0o755) # Starts off as 700, not user readable
|
||||
with open(os.path.join(td, 'kernel.json'), 'w') as f:
|
||||
json.dump(kernel_json, f, sort_keys=True)
|
||||
# TODO: Copy resources once they're specified
|
||||
|
||||
print('Installing IPython kernel spec')
|
||||
install_kernel_spec(td, 'yap', user=False, replace=True)
|
||||
|
||||
def _is_root():
|
||||
return True
|
||||
try:
|
||||
return os.geteuid() == 0
|
||||
except AttributeError:
|
||||
return False # assume not an admin on non-Unix platforms
|
||||
|
||||
def main(argv=[]):
|
||||
user = '--user' in argv or not _is_root()
|
||||
install_my_kernel_spec(user=user)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(argv=sys.argv)
|
112
packages/python/setup.py
Normal file
112
packages/python/setup.py
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# the name of the package
|
||||
name = 'ipykernel'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Minimal Python version sanity check
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
|
||||
v = sys.version_info
|
||||
if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
|
||||
error = "ERROR: %s requires Python version 2.7 or 3.3 or above." % name
|
||||
print(error, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
PY3 = (sys.version_info[0] >= 3)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# get on with it
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from glob import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
pjoin = os.path.join
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
pkg_root = pjoin(here, name)
|
||||
|
||||
packages = []
|
||||
for d, _, _ in os.walk(pjoin(here, name)):
|
||||
if os.path.exists(pjoin(d, '__init__.py')):
|
||||
packages.append(d[len(here)+1:].replace(os.path.sep, '.'))
|
||||
|
||||
package_data = {
|
||||
'ipykernel': ['resources/*.*'],
|
||||
}
|
||||
|
||||
version_ns = {}
|
||||
with open(pjoin(here, name, '_version.py')) as f:
|
||||
exec(f.read(), {}, version_ns)
|
||||
|
||||
|
||||
setup_args = dict(
|
||||
name = name,
|
||||
version = version_ns['__version__'],
|
||||
scripts = glob(pjoin('scripts', '*')),
|
||||
packages = packages,
|
||||
py_modules = ['ipykernel_launcher'],
|
||||
package_data = package_data,
|
||||
description = "IPython Kernel for Jupyter",
|
||||
author = 'IPython Development Team',
|
||||
author_email = 'ipython-dev@scipy.org',
|
||||
url = 'http://ipython.org',
|
||||
license = 'BSD',
|
||||
platforms = "Linux, Mac OS X, Windows",
|
||||
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
|
||||
classifiers = [
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
'Intended Audience :: Science/Research',
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
],
|
||||
)
|
||||
|
||||
if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv):
|
||||
import setuptools
|
||||
|
||||
setuptools_args = {}
|
||||
install_requires = setuptools_args['install_requires'] = [
|
||||
'ipython>=4.0.0',
|
||||
'traitlets>=4.1.0',
|
||||
'jupyter_client',
|
||||
'tornado>=4.0',
|
||||
]
|
||||
|
||||
if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
|
||||
from ipykernel.kernelspec import write_kernel_spec, make_yap_kernel_cmd, KERNEL_NAME
|
||||
|
||||
argv = make_yap_kernel_cmd(executable='python')
|
||||
dest = os.path.join(here, 'data_kernelspec')
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
write_kernel_spec(dest, overrides={'argv': argv})
|
||||
|
||||
setup_args['data_files'] = [
|
||||
(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))),
|
||||
]
|
||||
|
||||
extras_require = setuptools_args['extras_require'] = {
|
||||
'test:python_version=="2.7"': ['mock'],
|
||||
'test': ['nose_warnings_filters', 'nose-timer'],
|
||||
}
|
||||
|
||||
if 'setuptools' in sys.modules:
|
||||
setup_args.update(setuptools_args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup(**setup_args)
|
112
packages/python/setup.pybk
Normal file
112
packages/python/setup.pybk
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# the name of the package
|
||||
name = 'ipykernel'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Minimal Python version sanity check
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
|
||||
v = sys.version_info
|
||||
if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
|
||||
error = "ERROR: %s requires Python version 2.7 or 3.3 or above." % name
|
||||
print(error, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
PY3 = (sys.version_info[0] >= 3)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# get on with it
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from glob import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
pjoin = os.path.join
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
pkg_root = pjoin(here, name)
|
||||
|
||||
packages = []
|
||||
for d, _, _ in os.walk(pjoin(here, name)):
|
||||
if os.path.exists(pjoin(d, '__init__.py')):
|
||||
packages.append(d[len(here)+1:].replace(os.path.sep, '.'))
|
||||
|
||||
package_data = {
|
||||
'ipykernel': ['resources/*.*'],
|
||||
}
|
||||
|
||||
version_ns = {}
|
||||
with open(pjoin(here, name, '_version.py')) as f:
|
||||
exec(f.read(), {}, version_ns)
|
||||
|
||||
|
||||
setup_args = dict(
|
||||
name = name,
|
||||
version = version_ns['__version__'],
|
||||
scripts = glob(pjoin('scripts', '*')),
|
||||
packages = packages,
|
||||
py_modules = ['ipykernel_launcher'],
|
||||
package_data = package_data,
|
||||
description = "IPython Kernel for Jupyter",
|
||||
author = 'IPython Development Team',
|
||||
author_email = 'ipython-dev@scipy.org',
|
||||
url = 'http://ipython.org',
|
||||
license = 'BSD',
|
||||
platforms = "Linux, Mac OS X, Windows",
|
||||
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
|
||||
classifiers = [
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
'Intended Audience :: Science/Research',
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
],
|
||||
)
|
||||
|
||||
if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv):
|
||||
import setuptools
|
||||
|
||||
setuptools_args = {}
|
||||
install_requires = setuptools_args['install_requires'] = [
|
||||
'ipython>=4.0.0',
|
||||
'traitlets>=4.1.0',
|
||||
'jupyter_client',
|
||||
'tornado>=4.0',
|
||||
]
|
||||
|
||||
if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
|
||||
from ipykernel.kernelspec import write_kernel_spec, make_ipkernel_cmd, KERNEL_NAME
|
||||
|
||||
argv = make_ipkernel_cmd(executable='python')
|
||||
dest = os.path.join(here, 'data_kernelspec')
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
write_kernel_spec(dest, overrides={'argv': argv})
|
||||
|
||||
setup_args['data_files'] = [
|
||||
(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))),
|
||||
]
|
||||
|
||||
extras_require = setuptools_args['extras_require'] = {
|
||||
'test:python_version=="2.7"': ['mock'],
|
||||
'test': ['nose_warnings_filters', 'nose-timer'],
|
||||
}
|
||||
|
||||
if 'setuptools' in sys.modules:
|
||||
setup_args.update(setuptools_args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup(**setup_args)
|
@ -70,9 +70,9 @@ add_custom_target( YAP4PY ALL
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${pl_library} ${PROLOG_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${pl_boot_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/pl
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${pl_os_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/os
|
||||
COMMAND ${PYTHON_EXECUTABLE} setup.py sdist bdist_wheel
|
||||
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py sdist bdist_wheel
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||
DEPENDS STARTUP ${dlls} ${PYTHON_SOURCES} ${PROLOG_SOURCES} setup.py ${SWIG_MODULE_Py2YAP_REAL_NAME} )
|
||||
DEPENDS STARTUP ${dlls} ${PYTHON_SOURCES} ${PROLOG_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/setup.py ${SWIG_MODULE_Py2YAP_REAL_NAME} )
|
||||
|
||||
|
||||
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install --no-index -f dist yap4py
|
||||
@ -95,7 +95,7 @@ DEPENDS STARTUP ${dlls} ${PYTHON_SOURCES} ${PROLOG_SOURCES} setup.py ${SWIG_MOD
|
||||
# generate .i from doxygen .xml
|
||||
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/ftdi1_doc.i
|
||||
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/doxy2swig.py -n
|
||||
${CMAKE_BINARY_DIR}/doc/xml/ftdi_8c.xml
|
||||
${CMAKE_BINARY_DIR}/doc/xm11l/ftdi_8c.xml
|
||||
${CMAKE_CURRENT_BINARY_DIR}/ftdi1_doc.i
|
||||
DEPENDS ${CMAKE_BINARY_DIR}/doc/xml/ftdi_8c.xml
|
||||
)
|
||||
|
17
packages/python/swig/__init__.pybk
Normal file
17
packages/python/swig/__init__.pybk
Normal file
@ -0,0 +1,17 @@
|
||||
import imp
|
||||
import os
|
||||
import ctypes
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
global yap_lib_path
|
||||
yap_lib_path = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def load( dll ):
|
||||
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
|
||||
dll = os.path.abspath(dll)
|
||||
ctypes.CDLL(dll, mode=ctypes.RTLD_GLOBAL)
|
||||
|
||||
load('libYap*')
|
14
packages/python/swig/__main__.pybk
Normal file
14
packages/python/swig/__main__.pybk
Normal file
@ -0,0 +1,14 @@
|
||||
"""The main routine of the yap python project."""
|
||||
|
||||
import sys
|
||||
import yapi
|
||||
|
||||
|
||||
def main(args=None):
|
||||
"""The main routine."""
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
yap.yapi.live(args)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
97
packages/python/swig/setup.py
Normal file
97
packages/python/swig/setup.py
Normal file
@ -0,0 +1,97 @@
|
||||
"""A setuptools based setup module.
|
||||
|
||||
See:
|
||||
https://packaging.python.org/en/latest/distributing.html
|
||||
https://github.com/pypa/sampleproject
|
||||
"""
|
||||
|
||||
# Always prefer setuptools over distutils
|
||||
from setuptools import setup
|
||||
from setuptools.extension import Extension
|
||||
# To use a consistent encoding
|
||||
from codecs import open
|
||||
from os import path, makedirs, walk
|
||||
from shutil import copytree, rmtree, copy2, move
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import os.path
|
||||
|
||||
my_extra_link_args = []
|
||||
if platform.system() == 'Darwin':
|
||||
my_extra_link_args = ['-Wl,-rpath','-Wl,']
|
||||
so = 'dylib'
|
||||
#or dll in glob('yap/dlls/*'):
|
||||
# move( dll ,'lib' )
|
||||
pls = []
|
||||
for (r,d,fs) in walk('yap/pl'):
|
||||
for f in fs:
|
||||
pls += [os.path.join(r, f)]
|
||||
for (r,d,fs) in walk('yap'):
|
||||
for f in fs:
|
||||
pls += [os.path.join(r, f)]
|
||||
# for f in glob( 'lib*.*' ):
|
||||
# ofile.write(f+"\n")
|
||||
cplus=['../../../CXX/yapi.cpp']
|
||||
py2yap=['../../../packages/python/python.c',
|
||||
'../../../packages/python/pl2py.c',
|
||||
'../../../packages/python/pybips.c',
|
||||
'../../../packages/python/py2pl.c',
|
||||
'../../../packages/python/pl2pl.c',
|
||||
'../../../packages/python/pypreds.c'
|
||||
]
|
||||
python_sources = ['yapPYTHON_wrap.cxx']+py2yap+cplus
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
# Get the long description from the README file
|
||||
|
||||
extensions=[Extension('_yap', python_sources,
|
||||
define_macros = [('MAJOR_VERSION', '1'),
|
||||
('MINOR_VERSION', '0'),
|
||||
('_YAP_NOT_INSTALLED_', '1'),
|
||||
('YAP_PYTHON', '1')],
|
||||
runtime_library_dirs=['yap4py','/usr/local/lib','/usr/local/bin'],
|
||||
swig_opts=['-modern', '-c++', '-py3','-I../../..//CXX'],
|
||||
library_dirs=['../../..','../../../CXX','../../packages/python',"/usr/local/lib/Yap","/usr/local/bin", '.'],
|
||||
extra_link_args=my_extra_link_args,
|
||||
extra_compile_args=['-g'],
|
||||
libraries=['Yap','/usr/local/lib/libgmp.dylib'],
|
||||
include_dirs=['../../..',
|
||||
'/usr/local/include',
|
||||
'../../../H',
|
||||
'../../../H/generated',
|
||||
'../../../OPTYap',
|
||||
'../../../os',
|
||||
'../../../include',
|
||||
'../../../CXX', '.']
|
||||
)]
|
||||
|
||||
setup(
|
||||
name='YAP4Py',
|
||||
version='6.3.5',
|
||||
description='The YAP Prolog compiler as a Python Library',
|
||||
url='https://github.com/vscosta/yap-6.3',
|
||||
author='Vitor Santos Costa',
|
||||
author_email='vsc@dcc.fc.up.pt',
|
||||
license='Artistic',
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'Topic :: Software Development :: Build Tools',
|
||||
'License :: OSI Approved :: Artistic License',
|
||||
'Programming Language :: Python :: 3',
|
||||
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7'],
|
||||
keywords=['Logic Programing'],
|
||||
#spackage_data={'': ['yap']},
|
||||
include_package_data=True,
|
||||
ext_modules = extensions,
|
||||
py_modules = ['yap'],
|
||||
zip_safe=False,
|
||||
eager_resources = ['yap4py'],
|
||||
packages=['yap4py'] # find_packages()
|
||||
#package_dir = {'':'yap4py'}
|
||||
)
|
17
packages/python/swig/yap4py/__init__.py
Normal file
17
packages/python/swig/yap4py/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
import imp
|
||||
import os
|
||||
import ctypes
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
global yap_lib_path
|
||||
yap_lib_path = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def load( dll ):
|
||||
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
|
||||
dll = os.path.abspath(dll)
|
||||
ctypes.CDLL(dll, mode=ctypes.RTLD_GLOBAL)
|
||||
|
||||
load('libYap*')
|
406
packages/python/swig/yap4py/prolog/INDEX.pl
Normal file
406
packages/python/swig/yap4py/prolog/INDEX.pl
Normal file
@ -0,0 +1,406 @@
|
||||
index(foreach,2,aggretate,library(aggregate)).
|
||||
index(aggregate,3,aggretate,library(aggregate)).
|
||||
index(aggregate,4,aggretate,library(aggregate)).
|
||||
index(aggregate_all,3,aggretate,library(aggregate)).
|
||||
index(aggregate_all,4,aggretate,library(aggregate)).
|
||||
index(free_variables,4,aggretate,library(aggregate)).
|
||||
index(genarg,3,arg,library(arg)).
|
||||
index(arg0,3,arg,library(arg)).
|
||||
index(genarg0,3,arg,library(arg)).
|
||||
index(args,3,arg,library(arg)).
|
||||
index(args0,3,arg,library(arg)).
|
||||
index(path_arg,3,arg,library(arg)).
|
||||
index(empty_assoc,1,assoc,library(assoc)).
|
||||
index(assoc_to_list,2,assoc,library(assoc)).
|
||||
index(is_assoc,1,assoc,library(assoc)).
|
||||
index(min_assoc,3,assoc,library(assoc)).
|
||||
index(max_assoc,3,assoc,library(assoc)).
|
||||
index(gen_assoc,3,assoc,library(assoc)).
|
||||
index(get_assoc,3,assoc,library(assoc)).
|
||||
index(get_assoc,5,assoc,library(assoc)).
|
||||
index(get_next_assoc,4,assoc,library(assoc)).
|
||||
index(get_prev_assoc,4,assoc,library(assoc)).
|
||||
index(list_to_assoc,2,assoc,library(assoc)).
|
||||
index(ord_list_to_assoc,2,assoc,library(assoc)).
|
||||
index(map_assoc,2,assoc,library(assoc)).
|
||||
index(map_assoc,3,assoc,library(assoc)).
|
||||
index(put_assoc,4,assoc,library(assoc)).
|
||||
index(del_assoc,4,assoc,library(assoc)).
|
||||
index(assoc_to_keys,2,assoc,library(assoc)).
|
||||
index(del_min_assoc,4,assoc,library(assoc)).
|
||||
index(del_max_assoc,4,assoc,library(assoc)).
|
||||
index(avl_new,1,avl,library(avl)).
|
||||
index(avl_insert,4,avl,library(avl)).
|
||||
index(avl_lookup,3,avl,library(avl)).
|
||||
index(b_hash_new,1,b_hash,library(bhash)).
|
||||
index(b_hash_new,2,b_hash,library(bhash)).
|
||||
index(b_hash_new,4,b_hash,library(bhash)).
|
||||
index(b_hash_lookup,3,b_hash,library(bhash)).
|
||||
index(b_hash_update,3,b_hash,library(bhash)).
|
||||
index(b_hash_update,4,b_hash,library(bhash)).
|
||||
index(b_hash_insert_new,4,b_hash,library(bhash)).
|
||||
index(b_hash_insert,4,b_hash,library(bhash)).
|
||||
index(format_to_chars,3,charsio,library(charsio)).
|
||||
index(format_to_chars,4,charsio,library(charsio)).
|
||||
index(write_to_chars,3,charsio,library(charsio)).
|
||||
index(write_to_chars,2,charsio,library(charsio)).
|
||||
index(atom_to_chars,3,charsio,library(charsio)).
|
||||
index(atom_to_chars,2,charsio,library(charsio)).
|
||||
index(number_to_chars,3,charsio,library(charsio)).
|
||||
index(number_to_chars,2,charsio,library(charsio)).
|
||||
index(read_from_chars,2,charsio,library(charsio)).
|
||||
index(open_chars_stream,2,charsio,library(charsio)).
|
||||
index(with_output_to_chars,2,charsio,library(charsio)).
|
||||
index(with_output_to_chars,3,charsio,library(charsio)).
|
||||
index(with_output_to_chars,4,charsio,library(charsio)).
|
||||
index(term_to_atom,2,charsio,library(charsio)).
|
||||
index(chr_show_store,1,chr,library(chr)).
|
||||
index(find_chr_constraint,1,chr,library(chr)).
|
||||
index(chr_trace,0,chr,library(chr)).
|
||||
index(chr_notrace,0,chr,library(chr)).
|
||||
index(chr_leash,1,chr,library(chr)).
|
||||
index(#>,2,clpfd,library(clpfd)).
|
||||
index(#<,2,clpfd,library(clpfd)).
|
||||
index(#>=,2,clpfd,library(clpfd)).
|
||||
index(#=<,2,clpfd,library(clpfd)).
|
||||
index(#=,2,clpfd,library(clpfd)).
|
||||
index(#\=,2,clpfd,library(clpfd)).
|
||||
index(#\,1,clpfd,library(clpfd)).
|
||||
index(#<==>,2,clpfd,library(clpfd)).
|
||||
index(#==>,2,clpfd,library(clpfd)).
|
||||
index(#<==,2,clpfd,library(clpfd)).
|
||||
index(#\/,2,clpfd,library(clpfd)).
|
||||
index(#/\,2,clpfd,library(clpfd)).
|
||||
index(in,2,clpfd,library(clpfd)).
|
||||
index(ins,2,clpfd,library(clpfd)).
|
||||
index(all_different,1,clpfd,library(clpfd)).
|
||||
index(all_distinct,1,clpfd,library(clpfd)).
|
||||
index(sum,3,clpfd,library(clpfd)).
|
||||
index(scalar_product,4,clpfd,library(clpfd)).
|
||||
index(tuples_in,2,clpfd,library(clpfd)).
|
||||
index(labeling,2,clpfd,library(clpfd)).
|
||||
index(label,1,clpfd,library(clpfd)).
|
||||
index(indomain,1,clpfd,library(clpfd)).
|
||||
index(lex_chain,1,clpfd,library(clpfd)).
|
||||
index(serialized,2,clpfd,library(clpfd)).
|
||||
index(global_cardinality,2,clpfd,library(clpfd)).
|
||||
index(global_cardinality,3,clpfd,library(clpfd)).
|
||||
index(circuit,1,clpfd,library(clpfd)).
|
||||
index(element,3,clpfd,library(clpfd)).
|
||||
index(automaton,3,clpfd,library(clpfd)).
|
||||
index(automaton,8,clpfd,library(clpfd)).
|
||||
index(transpose,2,clpfd,library(clpfd)).
|
||||
index(zcompare,3,clpfd,library(clpfd)).
|
||||
index(chain,2,clpfd,library(clpfd)).
|
||||
index(fd_var,1,clpfd,library(clpfd)).
|
||||
index(fd_inf,2,clpfd,library(clpfd)).
|
||||
index(fd_sup,2,clpfd,library(clpfd)).
|
||||
index(fd_size,2,clpfd,library(clpfd)).
|
||||
index(fd_dom,2,clpfd,library(clpfd)).
|
||||
index({},1,clpr,library(clpr)).
|
||||
index(maximize,1,clpr,library(clpr)).
|
||||
index(minimize,1,clpr,library(clpr)).
|
||||
index(inf,2,clpr,library(clpr)).
|
||||
index(inf,4,clpr,library(clpr)).
|
||||
index(sup,2,clpr,library(clpr)).
|
||||
index(sup,4,clpr,library(clpr)).
|
||||
index(bb_inf,3,clpr,library(clpr)).
|
||||
index(bb_inf,5,clpr,library(clpr)).
|
||||
index(ordering,1,clpr,library(clpr)).
|
||||
index(entailed,1,clpr,library(clpr)).
|
||||
index(clp_type,2,clpr,library(clpr)).
|
||||
index(dump,3,clpr,library(clpr)).
|
||||
index(gensym,2,gensym,library(gensym)).
|
||||
index(reset_gensym,1,gensym,library(gensym)).
|
||||
index(reset_gensym,0,gensym,library(gensym)).
|
||||
index(add_to_heap,4,heaps,library(heaps)).
|
||||
index(get_from_heap,4,heaps,library(heaps)).
|
||||
index(empty_heap,1,heaps,library(heaps)).
|
||||
index(heap_size,2,heaps,library(heaps)).
|
||||
index(heap_to_list,2,heaps,library(heaps)).
|
||||
index(list_to_heap,2,heaps,library(heaps)).
|
||||
index(min_of_heap,3,heaps,library(heaps)).
|
||||
index(min_of_heap,5,heaps,library(heaps)).
|
||||
index(jpl_get_default_jvm_opts,1,jpl,library(jpl)).
|
||||
index(jpl_set_default_jvm_opts,1,jpl,library(jpl)).
|
||||
index(jpl_get_actual_jvm_opts,1,jpl,library(jpl)).
|
||||
index(jpl_pl_lib_version,1,jpl,library(jpl)).
|
||||
index(jpl_c_lib_version,1,jpl,library(jpl)).
|
||||
index(jpl_new,3,jpl,library(jpl)).
|
||||
index(jpl_call,4,jpl,library(jpl)).
|
||||
index(jpl_get,3,jpl,library(jpl)).
|
||||
index(jpl_set,3,jpl,library(jpl)).
|
||||
index(jpl_servlet_byref,3,jpl,library(jpl)).
|
||||
index(jpl_servlet_byval,3,jpl,library(jpl)).
|
||||
index(jpl_class_to_classname,2,jpl,library(jpl)).
|
||||
index(jpl_class_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_classname_to_class,2,jpl,library(jpl)).
|
||||
index(jpl_classname_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_datum_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_false,1,jpl,library(jpl)).
|
||||
index(jpl_is_class,1,jpl,library(jpl)).
|
||||
index(jpl_is_false,1,jpl,library(jpl)).
|
||||
index(jpl_is_null,1,jpl,library(jpl)).
|
||||
index(jpl_is_object,1,jpl,library(jpl)).
|
||||
index(jpl_is_object_type,1,jpl,library(jpl)).
|
||||
index(jpl_is_ref,1,jpl,library(jpl)).
|
||||
index(jpl_is_true,1,jpl,library(jpl)).
|
||||
index(jpl_is_type,1,jpl,library(jpl)).
|
||||
index(jpl_is_void,1,jpl,library(jpl)).
|
||||
index(jpl_null,1,jpl,library(jpl)).
|
||||
index(jpl_object_to_class,2,jpl,library(jpl)).
|
||||
index(jpl_object_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_primitive_type,1,jpl,library(jpl)).
|
||||
index(jpl_ref_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_true,1,jpl,library(jpl)).
|
||||
index(jpl_type_to_class,2,jpl,library(jpl)).
|
||||
index(jpl_type_to_classname,2,jpl,library(jpl)).
|
||||
index(jpl_void,1,jpl,library(jpl)).
|
||||
index(jpl_array_to_length,2,jpl,library(jpl)).
|
||||
index(jpl_array_to_list,2,jpl,library(jpl)).
|
||||
index(jpl_datums_to_array,2,jpl,library(jpl)).
|
||||
index(jpl_enumeration_element,2,jpl,library(jpl)).
|
||||
index(jpl_enumeration_to_list,2,jpl,library(jpl)).
|
||||
index(jpl_hashtable_pair,2,jpl,library(jpl)).
|
||||
index(jpl_iterator_element,2,jpl,library(jpl)).
|
||||
index(jpl_list_to_array,2,jpl,library(jpl)).
|
||||
index(jpl_list_to_array,3,jpl,library(jpl)).
|
||||
index(jpl_terms_to_array,2,jpl,library(jpl)).
|
||||
index(jpl_map_element,2,jpl,library(jpl)).
|
||||
index(jpl_set_element,2,jpl,library(jpl)).
|
||||
index(append,3,lists,library(lists)).
|
||||
index(append,2,lists,library(lists)).
|
||||
index(delete,3,lists,library(lists)).
|
||||
index(intersection,3,lists,library(lists)).
|
||||
index(flatten,2,lists,library(lists)).
|
||||
index(last,2,lists,library(lists)).
|
||||
index(list_concat,2,lists,library(lists)).
|
||||
index(max_list,2,lists,library(lists)).
|
||||
index(member,2,lists,library(lists)).
|
||||
index(memberchk,2,lists,library(lists)).
|
||||
index(min_list,2,lists,library(lists)).
|
||||
index(nextto,3,lists,library(lists)).
|
||||
index(nth,3,lists,library(lists)).
|
||||
index(nth,4,lists,library(lists)).
|
||||
index(nth0,3,lists,library(lists)).
|
||||
index(nth0,4,lists,library(lists)).
|
||||
index(nth1,3,lists,library(lists)).
|
||||
index(nth1,4,lists,library(lists)).
|
||||
index(numlist,3,lists,library(lists)).
|
||||
index(permutation,2,lists,library(lists)).
|
||||
index(prefix,2,lists,library(lists)).
|
||||
index(remove_duplicates,2,lists,library(lists)).
|
||||
index(reverse,2,lists,library(lists)).
|
||||
index(same_length,2,lists,library(lists)).
|
||||
index(select,3,lists,library(lists)).
|
||||
index(selectchk,3,lists,library(lists)).
|
||||
index(sublist,2,lists,library(lists)).
|
||||
index(substitute,4,lists,library(lists)).
|
||||
index(subtract,3,lists,library(lists)).
|
||||
index(suffix,2,lists,library(lists)).
|
||||
index(sum_list,2,lists,library(lists)).
|
||||
index(sum_list,3,lists,library(lists)).
|
||||
index(sumlist,2,lists,library(lists)).
|
||||
index(nb_queue,1,nb,library(nb)).
|
||||
index(nb_queue,2,nb,library(nb)).
|
||||
index(nb_queue_close,3,nb,library(nb)).
|
||||
index(nb_queue_enqueue,2,nb,library(nb)).
|
||||
index(nb_queue_dequeue,2,nb,library(nb)).
|
||||
index(nb_queue_peek,2,nb,library(nb)).
|
||||
index(nb_queue_empty,1,nb,library(nb)).
|
||||
index(nb_queue_size,2,nb,library(nb)).
|
||||
index(nb_heap,2,nb,library(nb)).
|
||||
index(nb_heap_close,1,nb,library(nb)).
|
||||
index(nb_heap_add,3,nb,library(nb)).
|
||||
index(nb_heap_del,3,nb,library(nb)).
|
||||
index(nb_heap_peek,3,nb,library(nb)).
|
||||
index(nb_heap_empty,1,nb,library(nb)).
|
||||
index(nb_heap_size,2,nb,library(nb)).
|
||||
index(nb_beam,2,nb,library(nb)).
|
||||
index(nb_beam_close,1,nb,library(nb)).
|
||||
index(nb_beam_add,3,nb,library(nb)).
|
||||
index(nb_beam_del,3,nb,library(nb)).
|
||||
index(nb_beam_peek,3,nb,library(nb)).
|
||||
index(nb_beam_empty,1,nb,library(nb)).
|
||||
index(nb_beam_size,2,nb,library(nb)).
|
||||
index(contains_term,2,occurs,library(occurs)).
|
||||
index(contains_var,2,occurs,library(occurs)).
|
||||
index(free_of_term,2,occurs,library(occurs)).
|
||||
index(free_of_var,2,occurs,library(occurs)).
|
||||
index(occurrences_of_term,3,occurs,library(occurs)).
|
||||
index(occurrences_of_var,3,occurs,library(occurs)).
|
||||
index(sub_term,2,occurs,library(occurs)).
|
||||
index(sub_var,2,occurs,library(occurs)).
|
||||
index(option,2,swi_option,library(option)).
|
||||
index(option,3,swi_option,library(option)).
|
||||
index(select_option,3,swi_option,library(option)).
|
||||
index(select_option,4,swi_option,library(option)).
|
||||
index(merge_options,3,swi_option,library(option)).
|
||||
index(meta_options,3,swi_option,library(option)).
|
||||
index(list_to_ord_set,2,ordsets,library(ordsets)).
|
||||
index(merge,3,ordsets,library(ordsets)).
|
||||
index(ord_add_element,3,ordsets,library(ordsets)).
|
||||
index(ord_del_element,3,ordsets,library(ordsets)).
|
||||
index(ord_disjoint,2,ordsets,library(ordsets)).
|
||||
index(ord_insert,3,ordsets,library(ordsets)).
|
||||
index(ord_member,2,ordsets,library(ordsets)).
|
||||
index(ord_intersect,2,ordsets,library(ordsets)).
|
||||
index(ord_intersect,3,ordsets,library(ordsets)).
|
||||
index(ord_intersection,3,ordsets,library(ordsets)).
|
||||
index(ord_intersection,4,ordsets,library(ordsets)).
|
||||
index(ord_seteq,2,ordsets,library(ordsets)).
|
||||
index(ord_setproduct,3,ordsets,library(ordsets)).
|
||||
index(ord_subset,2,ordsets,library(ordsets)).
|
||||
index(ord_subtract,3,ordsets,library(ordsets)).
|
||||
index(ord_symdiff,3,ordsets,library(ordsets)).
|
||||
index(ord_union,2,ordsets,library(ordsets)).
|
||||
index(ord_union,3,ordsets,library(ordsets)).
|
||||
index(ord_union,4,ordsets,library(ordsets)).
|
||||
index(ord_empty,1,ordsets,library(ordsets)).
|
||||
index(ord_memberchk,2,ordsets,library(ordsets)).
|
||||
index(pairs_keys_values,3,pairs,library(pairs)).
|
||||
index(pairs_values,2,pairs,library(pairs)).
|
||||
index(pairs_keys,2,pairs,library(pairs)).
|
||||
index(group_pairs_by_key,2,pairs,library(pairs)).
|
||||
index(transpose_pairs,2,pairs,library(pairs)).
|
||||
index(map_list_to_pairs,3,pairs,library(pairs)).
|
||||
index(xref_source,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_called,3,prolog_xref,library(prolog_xref)).
|
||||
index(xref_defined,3,prolog_xref,library(prolog_xref)).
|
||||
index(xref_definition_line,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_exported,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_module,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_op,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_clean,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_current_source,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_done,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_built_in,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_expand,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_source_file,3,prolog_xref,library(prolog_xref)).
|
||||
index(xref_source_file,4,prolog_xref,library(prolog_xref)).
|
||||
index(xref_public_list,4,prolog_xref,library(prolog_xref)).
|
||||
index(xref_meta,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_hook,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_used_class,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_defined_class,3,prolog_xref,library(prolog_xref)).
|
||||
index(set_test_options,1,plunit,library(plunit)).
|
||||
index(begin_tests,1,plunit,library(plunit)).
|
||||
index(begin_tests,2,plunit,library(plunit)).
|
||||
index(end_tests,1,plunit,library(plunit)).
|
||||
index(run_tests,0,plunit,library(plunit)).
|
||||
index(run_tests,1,plunit,library(plunit)).
|
||||
index(load_test_files,1,plunit,library(plunit)).
|
||||
index(running_tests,0,plunit,library(plunit)).
|
||||
index(test_report,1,plunit,library(plunit)).
|
||||
index(make_queue,1,queues,library(queues)).
|
||||
index(join_queue,3,queues,library(queues)).
|
||||
index(list_join_queue,3,queues,library(queues)).
|
||||
index(jump_queue,3,queues,library(queues)).
|
||||
index(list_jump_queue,3,queues,library(queues)).
|
||||
index(head_queue,2,queues,library(queues)).
|
||||
index(serve_queue,3,queues,library(queues)).
|
||||
index(length_queue,2,queues,library(queues)).
|
||||
index(empty_queue,1,queues,library(queues)).
|
||||
index(list_to_queue,2,queues,library(queues)).
|
||||
index(queue_to_list,2,queues,library(queues)).
|
||||
index(random,1,random,library(random)).
|
||||
index(random,3,random,library(random)).
|
||||
index(randseq,3,random,library(random)).
|
||||
index(randset,3,random,library(random)).
|
||||
index(getrand,1,random,library(random)).
|
||||
index(setrand,1,random,library(random)).
|
||||
index(rb_new,1,rbtrees,library(rbtrees)).
|
||||
index(rb_empty,1,rbtrees,library(rbtrees)).
|
||||
index(rb_lookup,3,rbtrees,library(rbtrees)).
|
||||
index(rb_update,4,rbtrees,library(rbtrees)).
|
||||
index(rb_update,5,rbtrees,library(rbtrees)).
|
||||
index(rb_apply,4,rbtrees,library(rbtrees)).
|
||||
index(rb_lookupall,3,rbtrees,library(rbtrees)).
|
||||
index(rb_insert,4,rbtrees,library(rbtrees)).
|
||||
index(rb_insert_new,4,rbtrees,library(rbtrees)).
|
||||
index(rb_delete,3,rbtrees,library(rbtrees)).
|
||||
index(rb_delete,4,rbtrees,library(rbtrees)).
|
||||
index(rb_visit,2,rbtrees,library(rbtrees)).
|
||||
index(rb_visit,3,rbtrees,library(rbtrees)).
|
||||
index(rb_keys,2,rbtrees,library(rbtrees)).
|
||||
index(rb_keys,3,rbtrees,library(rbtrees)).
|
||||
index(rb_map,2,rbtrees,library(rbtrees)).
|
||||
index(rb_map,3,rbtrees,library(rbtrees)).
|
||||
index(rb_partial_map,4,rbtrees,library(rbtrees)).
|
||||
index(rb_clone,3,rbtrees,library(rbtrees)).
|
||||
index(rb_clone,4,rbtrees,library(rbtrees)).
|
||||
index(rb_min,3,rbtrees,library(rbtrees)).
|
||||
index(rb_max,3,rbtrees,library(rbtrees)).
|
||||
index(rb_del_min,4,rbtrees,library(rbtrees)).
|
||||
index(rb_del_max,4,rbtrees,library(rbtrees)).
|
||||
index(rb_next,4,rbtrees,library(rbtrees)).
|
||||
index(rb_previous,4,rbtrees,library(rbtrees)).
|
||||
index(list_to_rbtree,2,rbtrees,library(rbtrees)).
|
||||
index(ord_list_to_rbtree,2,rbtrees,library(rbtrees)).
|
||||
index(is_rbtree,1,rbtrees,library(rbtrees)).
|
||||
index(rb_size,2,rbtrees,library(rbtrees)).
|
||||
index(rb_in,3,rbtrees,library(rbtrees)).
|
||||
index(read_line_to_codes,2,read_util,library(readutil)).
|
||||
index(read_line_to_codes,3,read_util,library(readutil)).
|
||||
index(read_stream_to_codes,2,read_util,library(readutil)).
|
||||
index(read_stream_to_codes,3,read_util,library(readutil)).
|
||||
index(read_file_to_codes,3,read_util,library(readutil)).
|
||||
index(read_file_to_terms,3,read_util,library(readutil)).
|
||||
index(regexp,3,regexp,library(regexp)).
|
||||
index(regexp,4,regexp,library(regexp)).
|
||||
index(load_foreign_library,1,shlib,library(shlib)).
|
||||
index(load_foreign_library,2,shlib,library(shlib)).
|
||||
index(unload_foreign_library,1,shlib,library(shlib)).
|
||||
index(unload_foreign_library,2,shlib,library(shlib)).
|
||||
index(current_foreign_library,2,shlib,library(shlib)).
|
||||
index(reload_foreign_libraries,0,shlib,library(shlib)).
|
||||
index(use_foreign_library,1,shlib,library(shlib)).
|
||||
index(use_foreign_library,2,shlib,library(shlib)).
|
||||
index(datime,1,operating_system_support,library(system)).
|
||||
index(delete_file,1,operating_system_support,library(system)).
|
||||
index(delete_file,2,operating_system_support,library(system)).
|
||||
index(directory_files,2,operating_system_support,library(system)).
|
||||
index(environ,2,operating_system_support,library(system)).
|
||||
index(exec,3,operating_system_support,library(system)).
|
||||
index(file_exists,1,operating_system_support,library(system)).
|
||||
index(file_exists,2,operating_system_support,library(system)).
|
||||
index(file_property,2,operating_system_support,library(system)).
|
||||
index(host_id,1,operating_system_support,library(system)).
|
||||
index(host_name,1,operating_system_support,library(system)).
|
||||
index(pid,1,operating_system_support,library(system)).
|
||||
index(kill,2,operating_system_support,library(system)).
|
||||
index(mktemp,2,operating_system_support,library(system)).
|
||||
index(make_directory,1,operating_system_support,library(system)).
|
||||
index(popen,3,operating_system_support,library(system)).
|
||||
index(rename_file,2,operating_system_support,library(system)).
|
||||
index(shell,0,operating_system_support,library(system)).
|
||||
index(shell,1,operating_system_support,library(system)).
|
||||
index(shell,2,operating_system_support,library(system)).
|
||||
index(sleep,1,operating_system_support,library(system)).
|
||||
index(system,0,operating_system_support,library(system)).
|
||||
index(system,1,operating_system_support,library(system)).
|
||||
index(system,2,operating_system_support,library(system)).
|
||||
index(mktime,2,operating_system_support,library(system)).
|
||||
index(tmpnam,1,operating_system_support,library(system)).
|
||||
index(tmp_file,2,operating_system_support,library(system)).
|
||||
index(tmpdir,1,operating_system_support,library(system)).
|
||||
index(wait,2,operating_system_support,library(system)).
|
||||
index(working_directory,2,operating_system_support,library(system)).
|
||||
index(term_hash,2,terms,library(terms)).
|
||||
index(term_hash,4,terms,library(terms)).
|
||||
index(instantiated_term_hash,4,terms,library(terms)).
|
||||
index(variant,2,terms,library(terms)).
|
||||
index(unifiable,3,terms,library(terms)).
|
||||
index(subsumes,2,terms,library(terms)).
|
||||
index(subsumes_chk,2,terms,library(terms)).
|
||||
index(cyclic_term,1,terms,library(terms)).
|
||||
index(variable_in_term,2,terms,library(terms)).
|
||||
index(variables_within_term,3,terms,library(terms)).
|
||||
index(new_variables_in_term,3,terms,library(terms)).
|
||||
index(time_out,3,timeout,library(timeout)).
|
||||
index(get_label,3,trees,library(trees)).
|
||||
index(list_to_tree,2,trees,library(trees)).
|
||||
index(map_tree,3,trees,library(trees)).
|
||||
index(put_label,4,trees,library(trees)).
|
||||
index(tree_size,2,trees,library(trees)).
|
||||
index(tree_to_list,2,trees,library(trees)).
|
52
packages/python/swig/yap4py/prolog/apply.yap
Normal file
52
packages/python/swig/yap4py/prolog/apply.yap
Normal file
@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @file apply.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Mon Nov 16 23:00:08 2015
|
||||
*
|
||||
* @brief Stub for maplist and friends
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(apply_stub,[]).
|
||||
|
||||
/**
|
||||
* @file apply.yap
|
||||
* @defgroup apply_stub Apply Predicates
|
||||
*
|
||||
* @ingroup library
|
||||
*
|
||||
* @{
|
||||
|
||||
This library provides a SWI-compatible set of utilities for applying a
|
||||
predicate to all elements of a list.
|
||||
|
||||
The apply library is a _stub_, it just forwards definitions to the
|
||||
@ref maplist library. The predicates forwarded are:
|
||||
|
||||
- maplist/2,
|
||||
- maplist/3,
|
||||
- maplist/4,
|
||||
- maplist/5,
|
||||
- include/3,
|
||||
- exclude/3,
|
||||
- partition/4,
|
||||
- partition/5
|
||||
|
||||
*/
|
||||
|
||||
:- reexport(library(maplist),
|
||||
[maplist/2,
|
||||
maplist/3,
|
||||
maplist/4,
|
||||
maplist/5,
|
||||
include/3,
|
||||
exclude/3,
|
||||
partition/4,
|
||||
partition/5
|
||||
]).
|
||||
|
||||
|
||||
%% @}
|
||||
|
38
packages/python/swig/yap4py/prolog/apply_macros.yap
Normal file
38
packages/python/swig/yap4py/prolog/apply_macros.yap
Normal file
@ -0,0 +1,38 @@
|
||||
|
||||
%% @file apply_macros.yap
|
||||
%% @author E. Alphonse from code by Joachim Schimpf
|
||||
%% @date 15 June 2002
|
||||
%% @nrief Purpose: Macros to apply a predicate to all elements
|
||||
% of a list or to all sub-terms of a term.
|
||||
|
||||
:- module(apply_macros, []).
|
||||
|
||||
/**
|
||||
|
||||
@defgroup apply_macros Apply Interface to maplist
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This library provides a SWI-compatible set of utilities for applying a
|
||||
predicate to all elements of a list.
|
||||
|
||||
The apply library just forwards
|
||||
definitions to the @ref maplist library, these include:
|
||||
|
||||
- maplist/2,
|
||||
- maplist/3,
|
||||
- maplist/4,
|
||||
- maplist/5,
|
||||
- include/3,
|
||||
- exclude/3,
|
||||
- partition/4,
|
||||
- partition/5
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- reexport(maplist).
|
||||
|
||||
:- reexport(mapargs).
|
||||
|
||||
%% @}
|
167
packages/python/swig/yap4py/prolog/arg.yap
Normal file
167
packages/python/swig/yap4py/prolog/arg.yap
Normal file
@ -0,0 +1,167 @@
|
||||
/**
|
||||
* @file arg.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:08:55 2015
|
||||
*
|
||||
* @brief
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
|
||||
@defgroup args Term Argument Manipulation.
|
||||
|
||||
@ingroup @library
|
||||
|
||||
@{
|
||||
|
||||
Extends arg/3 by including backtracking through arguments and access
|
||||
to sub-arguments,
|
||||
|
||||
- arg0/3
|
||||
- args/3
|
||||
- args0/3
|
||||
- genarg/3
|
||||
- genarg0/3
|
||||
- path_arg/3
|
||||
|
||||
|
||||
It is based on the Quintus Prolog arg library. Except for project, all
|
||||
predicates use the arg/3 argument pattern.
|
||||
This file has been included in the YAP library by Vitor Santos Costa, 2008. No error checking is actuallly performed within the package: this left to the C-code thaat implements arg``/3 and
|
||||
genarg/3.
|
||||
*/
|
||||
|
||||
:- module(arg,
|
||||
[
|
||||
genarg/3,
|
||||
arg0/3,
|
||||
genarg0/3,
|
||||
args/3,
|
||||
args0/3,
|
||||
% project/3
|
||||
path_arg/3
|
||||
]).
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @pred arg0( +_Index_, +_Term_ , -_Arg_ )
|
||||
*
|
||||
* Similar to arg/3, but `arg0(0,_T_,_F_)` unifies _F_ with _T_'s principal functor:
|
||||
|
||||
~~~~~~~~~
|
||||
?- arg0(0, f(a,b), A).
|
||||
A = f.
|
||||
?- arg0(1, f(a,b), A).
|
||||
A = a.
|
||||
?- arg0(2, f(a,b), A).
|
||||
A = b.
|
||||
~~~~~~~~~
|
||||
|
||||
*/
|
||||
arg0(0,T,A) :- !,
|
||||
functor(T,A,_).
|
||||
arg0(I,T,A) :-
|
||||
arg(I,T,A).
|
||||
|
||||
/**
|
||||
* @pred genarg0( +_Index_, +_Term_ , -_Arg_ )
|
||||
*
|
||||
* Similar to genarg/3, but `genarg0(0,_T_,_F_)` unifies _F_ with _T_'s principal functor:
|
||||
~~~~~~~~~
|
||||
?- genarg0(I,f(a,b),A).
|
||||
A = f,
|
||||
I = 0 ? ;
|
||||
A = a,
|
||||
I = 1 ? ;
|
||||
A = b,
|
||||
I = 2.
|
||||
~~~~~~~~~
|
||||
|
||||
*/
|
||||
genarg0(I,T,A) :-
|
||||
nonvar(I), !,
|
||||
arg0(I,T,A).
|
||||
genarg0(0,T,A) :-
|
||||
functor(T,A,_).
|
||||
genarg0(I,T,A) :-
|
||||
genarg(I,T,A).
|
||||
|
||||
/**
|
||||
* @pred args( +_Index_, +_ListOfTerms_ , -_ListOfArgs_ )
|
||||
*
|
||||
* Succeeds if _ListOfArgs_ unifies with the application of genarg/3 to every element of _ListOfTerms_.
|
||||
|
||||
It corresponds to calling maplist/3 on genarg/3:
|
||||
~~~~~~~~~
|
||||
args( I, Ts, As) :-
|
||||
maplist( genarg(I), Ts, As).
|
||||
~~~~~~~~~
|
||||
|
||||
Notice that unification allows _ListOfArgs_ to be bound, eg:
|
||||
|
||||
~~~~~~~~~
|
||||
?- args(1, [X1+Y1,X2-Y2,X3*Y3,X4/Y4], [1,1,1,1]).
|
||||
X1 = X2 = X3 = X4 = 1.
|
||||
~~~~~~~~~
|
||||
|
||||
|
||||
*/
|
||||
args(_,[],[]).
|
||||
args(I,[T|List],[A|ArgList]) :-
|
||||
genarg(I, T, A),
|
||||
args(I, List, ArgList).
|
||||
|
||||
/**
|
||||
* @pred args0( +_Index_, +_ListOfTerms_ , -_ListOfArgs_ )
|
||||
*
|
||||
* Succeeds if _ListOfArgs_ unifies with the application of genarg0/3 to every element of _ListOfTerms_.
|
||||
|
||||
It corresponds to calling maplist/3 on genarg0/3:
|
||||
~~~~~~~~~
|
||||
args( I, Ts, As) :-
|
||||
maplist( genarg0(I), Ts, As).
|
||||
~~~~~~~~~
|
||||
|
||||
Notice that unification allows _ListOfArgs_ to be bound, eg:
|
||||
|
||||
~~~~~~~~~
|
||||
?- args(1, [X1+Y1,X2-Y2,X3*Y3,X4/Y4], [1,1,1,1]).
|
||||
X1 = X2 = X3 = X4 = 1.
|
||||
~~~~~~~~~
|
||||
|
||||
|
||||
*/
|
||||
args0(_,[],[]).
|
||||
args0(I,[T|List],[A|ArgList]) :-
|
||||
genarg(I, T, A),
|
||||
args0(I, List, ArgList).
|
||||
|
||||
/**
|
||||
* @pred args0( +_ListOfTerms_ , +_Index_, -_ListOfArgs_ )
|
||||
*
|
||||
* Succeeds if _ListOfArgs_ unifies with the application of genarg0/3 to every element of _ListOfTerms_.
|
||||
|
||||
It corresponds to calling args0/3 but with a different order.
|
||||
*/
|
||||
project(Terms, Index, Args) :-
|
||||
args0(Index, Terms, Args).
|
||||
|
||||
% no error checking here!
|
||||
/**
|
||||
* @pred path_arg( +_Path_ , +_Term_, -_Arg_ )
|
||||
*
|
||||
* Succeeds if _Path_ is empty and _Arg unifies with _Term_, or if _Path_ is a list with _Head_ and _Tail_, genarg/3 succeeds on the current term, and path_arg/3 succeeds on its argument.
|
||||
*
|
||||
* Notice that it can be used to enumerate all possible paths in a term.
|
||||
*/
|
||||
path_arg([], Term, Term).
|
||||
path_arg([Index|Indices], Term, SubTerm) :-
|
||||
genarg(Index, Term, Arg),
|
||||
path_arg(Indices, Arg, SubTerm).
|
||||
|
||||
%%% @}
|
||||
|
||||
/** @} */
|
296
packages/python/swig/yap4py/prolog/assoc.yap
Normal file
296
packages/python/swig/yap4py/prolog/assoc.yap
Normal file
@ -0,0 +1,296 @@
|
||||
|
||||
/**
|
||||
* @file assoc.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 13:53:34 2015
|
||||
*
|
||||
* @brief Red-Black Implementation of Association Lists.
|
||||
*
|
||||
* This file has been included as an YAP library by Vitor Santos Costa, 1999
|
||||
*
|
||||
* Note: the keys should be bound, the associated values need not be.
|
||||
*/
|
||||
|
||||
:- module(assoc, [
|
||||
empty_assoc/1,
|
||||
assoc_to_list/2,
|
||||
is_assoc/1,
|
||||
min_assoc/3,
|
||||
max_assoc/3,
|
||||
gen_assoc/3,
|
||||
get_assoc/3,
|
||||
get_assoc/5,
|
||||
get_next_assoc/4,
|
||||
get_prev_assoc/4,
|
||||
list_to_assoc/2,
|
||||
ord_list_to_assoc/2,
|
||||
map_assoc/2,
|
||||
map_assoc/3,
|
||||
put_assoc/4,
|
||||
del_assoc/4,
|
||||
assoc_to_keys/2,
|
||||
del_min_assoc/4,
|
||||
del_max_assoc/4
|
||||
]).
|
||||
|
||||
/** @defgroup Association_Lists Association Lists
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following association list manipulation predicates are available
|
||||
once included with the `use_module(library(assoc))` command. The
|
||||
original library used Richard O'Keefe's implementation, on top of
|
||||
unbalanced binary trees. The current code utilises code from the
|
||||
red-black trees library and emulates the SICStus Prolog interface.
|
||||
|
||||
The library exports the following definitions:
|
||||
|
||||
- is/assoc/1
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
:- meta_predicate map_assoc(2, +, -), map_assoc(1, +).
|
||||
|
||||
:- use_module(library(rbtrees), [
|
||||
rb_empty/1,
|
||||
rb_visit/2,
|
||||
is_rbtree/1,
|
||||
rb_min/3,
|
||||
rb_max/3,
|
||||
rb_in/3,
|
||||
rb_lookup/3,
|
||||
rb_update/5,
|
||||
rb_next/4,
|
||||
rb_previous/4,
|
||||
list_to_rbtree/2,
|
||||
ord_list_to_rbtree/2,
|
||||
rb_map/2,
|
||||
rb_map/3,
|
||||
rb_keys/2,
|
||||
rb_update/4,
|
||||
rb_insert/4,
|
||||
rb_delete/4,
|
||||
rb_del_min/4,
|
||||
rb_del_max/4
|
||||
]).
|
||||
|
||||
/** @pred empty_assoc(+ _Assoc_)
|
||||
|
||||
Succeeds if association list _Assoc_ is empty.
|
||||
|
||||
*/
|
||||
empty_assoc(t).
|
||||
|
||||
/** @pred assoc_to_list(+ _Assoc_,? _List_)
|
||||
|
||||
|
||||
Given an association list _Assoc_ unify _List_ with a list of
|
||||
the form _Key-Val_, where the elements _Key_ are in ascending
|
||||
order.
|
||||
|
||||
|
||||
*/
|
||||
assoc_to_list(t, L) :- !, L = [].
|
||||
assoc_to_list(T, L) :-
|
||||
rb_visit(T, L).
|
||||
|
||||
/** @pred is_assoc(+ _Assoc_)
|
||||
|
||||
Succeeds if _Assoc_ is an association list, that is, if it is a
|
||||
red-black tree.
|
||||
*/
|
||||
is_assoc(t) :- !.
|
||||
is_assoc(T) :-
|
||||
is_rbtree(T).
|
||||
|
||||
/** @pred min_assoc(+ _Assoc_,- _Key_,? _Value_)
|
||||
|
||||
|
||||
Given the association list
|
||||
_Assoc_, _Key_ in the smallest key in the list, and _Value_
|
||||
the associated value.
|
||||
|
||||
|
||||
*/
|
||||
min_assoc(T,K,V) :-
|
||||
rb_min(T,K,V).
|
||||
|
||||
/** @pred max_assoc(+ _Assoc_,- _Key_,? _Value_)
|
||||
|
||||
|
||||
Given the association list
|
||||
_Assoc_, _Key_ in the largest key in the list, and _Value_
|
||||
the associated value.
|
||||
|
||||
|
||||
*/
|
||||
max_assoc(T,K,V) :-
|
||||
rb_max(T,K,V).
|
||||
|
||||
/** @pred gen_assoc( ?Key, +Assoc, ?Valu_)
|
||||
|
||||
|
||||
Given the association list _Assoc_, unify _Key_ and _Value_
|
||||
with a key-value pair in the list. It can be used to enumerate all elements
|
||||
in the association list.
|
||||
*/
|
||||
gen_assoc(K, T, V) :-
|
||||
rb_in(K,V,T).
|
||||
|
||||
/** @pred get_assoc(+ _Key_,+ _Assoc_,? _Value_)
|
||||
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the associated value.
|
||||
*/
|
||||
get_assoc(K,T,V) :-
|
||||
rb_lookup(K,V,T).
|
||||
|
||||
/** @pred get_assoc(+ _Key_,+ _Assoc_,? _Value_,+ _NAssoc_,? _NValue_)
|
||||
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the associated value _Value_ and a new association list
|
||||
_NAssoc_ where _Key_ is associated with _NValue_.
|
||||
|
||||
|
||||
*/
|
||||
get_assoc(K,T,V,NT,NV) :-
|
||||
rb_update(T,K,V,NV,NT).
|
||||
|
||||
/** @pred get_next_assoc(+ _Key_,+ _Assoc_,? _Next_,? _Value_)
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the next key, _Next_, and its value, _Value_.
|
||||
|
||||
|
||||
*/
|
||||
get_next_assoc(K,T,KN,VN) :-
|
||||
rb_next(T,K,KN,VN).
|
||||
|
||||
/** @pred get_prev_assoc(+ _Key_,+ _Assoc_,? _Next_,? _Value_)
|
||||
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the previous key, _Next_, and its value, _Value_.
|
||||
|
||||
|
||||
*/
|
||||
get_prev_assoc(K,T,KP,VP) :-
|
||||
rb_previous(T,K,KP,VP).
|
||||
|
||||
/** @pred list_to_assoc(+ _List_,? _Assoc_)
|
||||
|
||||
|
||||
Given a list _List_ such that each element of _List_ is of the
|
||||
form _Key-Val_, and all the _Keys_ are unique, _Assoc_ is
|
||||
the corresponding association list.
|
||||
|
||||
|
||||
*/
|
||||
list_to_assoc(L, T) :-
|
||||
list_to_rbtree(L, T).
|
||||
|
||||
/** @pred ord_list_to_assoc(+ _List_,? _Assoc_)
|
||||
|
||||
|
||||
Given an ordered list _List_ such that each element of _List_ is
|
||||
of the form _Key-Val_, and all the _Keys_ are unique, _Assoc_ is
|
||||
the corresponding association list.
|
||||
|
||||
*/
|
||||
ord_list_to_assoc(L, T) :-
|
||||
ord_list_to_rbtree(L, T).
|
||||
|
||||
/** @pred map_assoc(+ _Pred_,+ _Assoc_)
|
||||
|
||||
|
||||
Succeeds if the unary predicate name _Pred_( _Val_) holds for every
|
||||
element in the association list.
|
||||
|
||||
|
||||
*/
|
||||
map_assoc(t, _) :- !.
|
||||
map_assoc(P, T) :-
|
||||
yap_flag(typein_module, M0),
|
||||
extract_mod(P, M0, M, G),
|
||||
functor(G, Name, 1),
|
||||
rb_map(T, M:Name).
|
||||
|
||||
/** @pred map_assoc(+ _Pred_,+ _Assoc_,? _New_)
|
||||
|
||||
Given the binary predicate name _Pred_ and the association list
|
||||
_Assoc_, _New_ in an association list with keys in _Assoc_,
|
||||
and such that if _Key-Val_ is in _Assoc_, and _Key-Ans_ is in
|
||||
_New_, then _Pred_( _Val_, _Ans_) holds.*/
|
||||
map_assoc(t, T, T) :- !.
|
||||
map_assoc(P, T, NT) :-
|
||||
yap_flag(typein_module, M0),
|
||||
extract_mod(P, M0, M, G),
|
||||
functor(G, Name, 2),
|
||||
rb_map(T, M:Name, NT).
|
||||
|
||||
|
||||
extract_mod(G,_,_) :- var(G), !, fail.
|
||||
extract_mod(M:G, _, FM, FG ) :- !,
|
||||
extract_mod(G, M, FM, FG ).
|
||||
extract_mod(G, M, M, G ).
|
||||
|
||||
/** @pred put_assoc(+ _Key_,+ _Assoc_,+ _Val_,+ _New_)
|
||||
|
||||
The association list _New_ includes and element of association
|
||||
_key_ with _Val_, and all elements of _Assoc_ that did not
|
||||
have key _Key_.
|
||||
|
||||
*/
|
||||
put_assoc(K, T, V, NT) :-
|
||||
rb_update(T, K, V, NT), !.
|
||||
put_assoc(K, t, V, NT) :- !,
|
||||
rbtrees:rb_new(K,V,NT).
|
||||
put_assoc(K, T, V, NT) :-
|
||||
rb_insert(T, K, V, NT).
|
||||
|
||||
/** @pred del_assoc(+ _Key_, + _Assoc_, ? _Val_, ? _NewAssoc_)
|
||||
|
||||
|
||||
Succeeds if _NewAssoc_ is an association list, obtained by removing
|
||||
the element with _Key_ and _Val_ from the list _Assoc_.
|
||||
|
||||
|
||||
*/
|
||||
del_assoc(K, T, V, NT) :-
|
||||
rb_delete(T, K, V, NT).
|
||||
|
||||
/** @pred del_min_assoc(+ _Assoc_, ? _Key_, ? _Val_, ? _NewAssoc_)
|
||||
|
||||
|
||||
Succeeds if _NewAssoc_ is an association list, obtained by removing
|
||||
the smallest element of the list, with _Key_ and _Val_
|
||||
from the list _Assoc_.
|
||||
|
||||
*/
|
||||
del_min_assoc(T, K, V, NT) :-
|
||||
rb_del_min(T, K, V, NT).
|
||||
|
||||
/** @pred del_max_assoc(+ _Assoc_, ? _Key_, ? _Val_, ? _NewAssoc_)
|
||||
|
||||
|
||||
Succeeds if _NewAssoc_ is an association list, obtained by removing
|
||||
the largest element of the list, with _Key_ and _Val_ from the
|
||||
list _Assoc_.
|
||||
|
||||
*/
|
||||
del_max_assoc(T, K, V, NT) :-
|
||||
rb_del_max(T, K, V, NT).
|
||||
|
||||
|
||||
assoc_to_keys(T, Ks) :-
|
||||
rb_keys(T, Ks).
|
||||
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
280
packages/python/swig/yap4py/prolog/atts.yap
Normal file
280
packages/python/swig/yap4py/prolog/atts.yap
Normal file
@ -0,0 +1,280 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: atts.yap *
|
||||
* Last rev: 8/2/88 *
|
||||
* mods: *
|
||||
* comments: attribute support for Prolog *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
:- module(attributes, [op(1150, fx, attribute)]).
|
||||
|
||||
/**
|
||||
*
|
||||
* @defgroup sicsatts SICStus style attribute declarations
|
||||
*
|
||||
* @ingroup attributes
|
||||
*
|
||||
* @{
|
||||
*
|
||||
|
||||
SICStus style attribute declarations are activated through loading the
|
||||
library <tt>atts</tt>. The command
|
||||
|
||||
~~~~~
|
||||
| ?- use_module(library(atts)).
|
||||
~~~~~
|
||||
enables this form of attributed variables.
|
||||
|
||||
The directive
|
||||
|
||||
- attribute/1
|
||||
|
||||
and the following user defined predicates can be used:
|
||||
|
||||
- Module:get_atts/2
|
||||
|
||||
- Module:put_atts/2
|
||||
|
||||
- Module:put_atts/3
|
||||
|
||||
- Module:woken_att_do/4
|
||||
|
||||
*/
|
||||
|
||||
|
||||
:- use_module(library(lists), [member/2]).
|
||||
|
||||
:- multifile
|
||||
user:goal_expansion/3.
|
||||
:- multifile
|
||||
user:term_expansion/2.
|
||||
:- multifile
|
||||
attributed_module/3.
|
||||
|
||||
:- dynamic existing_attribute/4.
|
||||
:- dynamic modules_with_attributes/1.
|
||||
:- dynamic attributed_module/3.
|
||||
|
||||
modules_with_attributes([]).
|
||||
|
||||
%
|
||||
% defining a new attribute is just a question of establishing a
|
||||
% Functor, Mod -> INT mappings
|
||||
%
|
||||
new_attribute(V) :- var(V), !,
|
||||
throw(error(instantiation_error,attribute(V))).
|
||||
new_attribute((At1,At2)) :-
|
||||
new_attribute(At1),
|
||||
new_attribute(At2).
|
||||
new_attribute(Na/Ar) :-
|
||||
source_module(Mod),
|
||||
functor(S,Na,Ar),
|
||||
existing_attribute(S,Mod,_,_) , !.
|
||||
new_attribute(Na/Ar) :-
|
||||
source_module(Mod),
|
||||
functor(S,Na,Ar),
|
||||
store_new_module(Mod,Ar,Position),
|
||||
assertz(existing_attribute(S,Mod,Ar,Position)).
|
||||
|
||||
store_new_module(Mod,Ar,ArgPosition) :-
|
||||
(
|
||||
retract(attributed_module(Mod,Position,_))
|
||||
->
|
||||
true
|
||||
;
|
||||
retract(modules_with_attributes(Mods)),
|
||||
assert(modules_with_attributes([Mod|Mods])), Position = 2
|
||||
),
|
||||
ArgPosition is Position+1,
|
||||
( Ar == 0 -> NOfAtts is Position+1 ; NOfAtts is Position+Ar),
|
||||
functor(AccessTerm,Mod,NOfAtts),
|
||||
assertz(attributed_module(Mod,NOfAtts,AccessTerm)).
|
||||
|
||||
:- user_defined_directive(attribute(G), attributes:new_attribute(G)).
|
||||
|
||||
/** @pred Module:get_atts( _-Var_, _?ListOfAttributes_)
|
||||
|
||||
|
||||
Unify the list _?ListOfAttributes_ with the attributes for the unbound
|
||||
variable _Var_. Each member of the list must be a bound term of the
|
||||
form `+( _Attribute_)`, `-( _Attribute_)` (the <tt>kbd</tt>
|
||||
prefix may be dropped). The meaning of <tt>+</tt> and <tt>-</tt> is:
|
||||
+ +( _Attribute_)
|
||||
Unifies _Attribute_ with a corresponding attribute associated with
|
||||
_Var_, fails otherwise.
|
||||
|
||||
+ -( _Attribute_)
|
||||
Succeeds if a corresponding attribute is not associated with
|
||||
_Var_. The arguments of _Attribute_ are ignored.
|
||||
|
||||
|
||||
*/
|
||||
user:goal_expansion(get_atts(Var,AccessSpec), Mod, Goal) :-
|
||||
expand_get_attributes(AccessSpec,Mod,Var,Goal).
|
||||
|
||||
/** @pred Module:put_atts( _-Var_, _?ListOfAttributes_)
|
||||
|
||||
|
||||
Associate with or remove attributes from a variable _Var_. The
|
||||
attributes are given in _?ListOfAttributes_, and the action depends
|
||||
on how they are prefixed:
|
||||
|
||||
+ +( _Attribute_ )
|
||||
Associate _Var_ with _Attribute_. A previous value for the
|
||||
attribute is simply replace (like with `set_mutable/2`).
|
||||
|
||||
+ -( _Attribute_ )
|
||||
Remove the attribute with the same name. If no such attribute existed,
|
||||
simply succeed.
|
||||
|
||||
*/
|
||||
user:goal_expansion(put_atts(Var,AccessSpec), Mod, Goal) :-
|
||||
expand_put_attributes(AccessSpec, Mod, Var, Goal).
|
||||
|
||||
|
||||
expand_get_attributes(V,_,_,_) :- var(V), !, fail.
|
||||
expand_get_attributes([],_,_,true) :- !.
|
||||
expand_get_attributes([-G1],Mod,V,attributes:free_att(V,Mod,Pos)) :-
|
||||
existing_attribute(G1,Mod,_,Pos), !.
|
||||
expand_get_attributes([+G1],Mod,V,attributes:get_att(V,Mod,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
arg(1,G1,A).
|
||||
expand_get_attributes([G1],Mod,V,attributes:get_att(V,Mod,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
arg(1,G1,A).
|
||||
expand_get_attributes(Atts,Mod,Var,attributes:get_module_atts(Var,AccessTerm)) :- Atts = [_|_], !,
|
||||
attributed_module(Mod,NOfAtts,AccessTerm),
|
||||
void_term(Void),
|
||||
cvt_atts(Atts,Mod,Void,LAtts),
|
||||
sort(LAtts,SortedLAtts),
|
||||
free_term(Free),
|
||||
build_att_term(1,NOfAtts,SortedLAtts,Free,AccessTerm).
|
||||
expand_get_attributes(Att,Mod,Var,Goal) :-
|
||||
expand_get_attributes([Att],Mod,Var,Goal).
|
||||
|
||||
build_att_term(NOfAtts,NOfAtts,[],_,_) :- !.
|
||||
build_att_term(I0,NOfAtts,[I-Info|SortedLAtts],Void,AccessTerm) :-
|
||||
I is I0+1, !,
|
||||
copy_att_args(Info,I0,NI,AccessTerm),
|
||||
build_att_term(NI,NOfAtts,SortedLAtts,Void,AccessTerm).
|
||||
build_att_term(I0,NOfAtts,SortedLAtts,Void,AccessTerm) :-
|
||||
I is I0+1,
|
||||
arg(I,AccessTerm,Void),
|
||||
build_att_term(I,NOfAtts,SortedLAtts,Void,AccessTerm).
|
||||
|
||||
cvt_atts(V,_,_,_) :- var(V), !, fail.
|
||||
cvt_atts([],_,_,[]).
|
||||
cvt_atts([V|_],_,_,_) :- var(V), !, fail.
|
||||
cvt_atts([+Att|Atts],Mod,Void,[Pos-LAtts|Read]) :- !,
|
||||
existing_attribute(Att,Mod,_,Pos),
|
||||
(atom(Att) -> LAtts = [_] ; Att=..[_|LAtts]),
|
||||
cvt_atts(Atts,Mod,Void,Read).
|
||||
cvt_atts([-Att|Atts],Mod,Void,[Pos-LVoids|Read]) :- !,
|
||||
existing_attribute(Att,Mod,_,Pos),
|
||||
(
|
||||
atom(Att)
|
||||
->
|
||||
LVoids = [Void]
|
||||
;
|
||||
Att =..[_|LAtts],
|
||||
void_vars(LAtts,Void,LVoids)
|
||||
),
|
||||
cvt_atts(Atts,Mod,Void,Read).
|
||||
cvt_atts([Att|Atts],Mod,Void,[Pos-LAtts|Read]) :- !,
|
||||
existing_attribute(Att,Mod,_,Pos),
|
||||
(atom(Att) -> LAtts = [_] ; Att=..[_|LAtts]),
|
||||
cvt_atts(Atts,Mod,Void,Read).
|
||||
|
||||
copy_att_args([],I,I,_).
|
||||
copy_att_args([V|Info],I,NI,AccessTerm) :-
|
||||
I1 is I+1,
|
||||
arg(I1,AccessTerm,V),
|
||||
copy_att_args(Info,I1,NI,AccessTerm).
|
||||
|
||||
void_vars([],_,[]).
|
||||
void_vars([_|LAtts],Void,[Void|LVoids]) :-
|
||||
void_vars(LAtts,Void,LVoids).
|
||||
|
||||
expand_put_attributes(V,_,_,_) :- var(V), !, fail.
|
||||
expand_put_attributes([-G1],Mod,V,attributes:rm_att(V,Mod,NOfAtts,Pos)) :-
|
||||
existing_attribute(G1,Mod,_,Pos), !,
|
||||
attributed_module(Mod,NOfAtts,_).
|
||||
expand_put_attributes([+G1],Mod,V,attributes:put_att(V,Mod,NOfAtts,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
attributed_module(Mod,NOfAtts,_),
|
||||
arg(1,G1,A).
|
||||
expand_put_attributes([G1],Mod,V,attributes:put_att(V,Mod,NOfAtts,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
attributed_module(Mod,NOfAtts,_),
|
||||
arg(1,G1,A).
|
||||
expand_put_attributes(Atts,Mod,Var,attributes:put_module_atts(Var,AccessTerm)) :- Atts = [_|_], !,
|
||||
attributed_module(Mod,NOfAtts,AccessTerm),
|
||||
void_term(Void),
|
||||
cvt_atts(Atts,Mod,Void,LAtts),
|
||||
sort(LAtts,SortedLAtts),
|
||||
free_term(Free),
|
||||
build_att_term(1,NOfAtts,SortedLAtts,Free,AccessTerm).
|
||||
expand_put_attributes(Att,Mod,Var,Goal) :-
|
||||
expand_put_attributes([Att],Mod,Var,Goal).
|
||||
|
||||
woken_att_do(AttVar, Binding, NGoals, DoNotBind) :-
|
||||
modules_with_attributes(AttVar,Mods0),
|
||||
modules_with_attributes(Mods),
|
||||
find_used(Mods,Mods0,[],ModsI),
|
||||
do_verify_attributes(ModsI, AttVar, Binding, Goals),
|
||||
process_goals(Goals, NGoals, DoNotBind).
|
||||
|
||||
% dirty trick to be able to unbind a variable that has been constrained.
|
||||
process_goals([], [], _).
|
||||
process_goals((M:do_not_bind_variable(Gs)).Goals, (M:Gs).NGoals, true) :- !,
|
||||
process_goals(Goals, NGoals, _).
|
||||
process_goals(G.Goals, G.NGoals, Do) :-
|
||||
process_goals(Goals, NGoals, Do).
|
||||
|
||||
find_used([],_,L,L).
|
||||
find_used([M|Mods],Mods0,L0,Lf) :-
|
||||
member(M,Mods0), !,
|
||||
find_used(Mods,Mods0,[M|L0],Lf).
|
||||
find_used([_|Mods],Mods0,L0,Lf) :-
|
||||
find_used(Mods,Mods0,L0,Lf).
|
||||
|
||||
/** @pred Module:verify_attributes( _-Var_, _+Value_, _-Goals_)
|
||||
|
||||
The predicate is called when trying to unify the attributed variable
|
||||
_Var_ with the Prolog term _Value_. Note that _Value_ may be
|
||||
itself an attributed variable, or may contain attributed variables. The
|
||||
goal <tt>verify_attributes/3</tt> is actually called before _Var_ is
|
||||
unified with _Value_.
|
||||
|
||||
It is up to the user to define which actions may be performed by
|
||||
<tt>verify_attributes/3</tt> but the procedure is expected to return in
|
||||
_Goals_ a list of goals to be called <em>after</em> _Var_ is
|
||||
unified with _Value_. If <tt>verify_attributes/3</tt> fails, the
|
||||
unification will fail.
|
||||
|
||||
Notice that the <tt>verify_attributes/3</tt> may be called even if _Var_<
|
||||
has no attributes in module <tt>Module</tt>. In this case the routine should
|
||||
simply succeed with _Goals_ unified with the empty list.
|
||||
|
||||
|
||||
*/
|
||||
do_verify_attributes([], _, _, []).
|
||||
do_verify_attributes([Mod|Mods], AttVar, Binding, [Mod:Goal|Goals]) :-
|
||||
current_predicate(verify_attributes,Mod:verify_attributes(_,_,_)), !,
|
||||
Mod:verify_attributes(AttVar, Binding, Goal),
|
||||
do_verify_attributes(Mods, AttVar, Binding, Goals).
|
||||
do_verify_attributes([_|Mods], AttVar, Binding, Goals) :-
|
||||
do_verify_attributes(Mods, AttVar, Binding, Goals).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
127
packages/python/swig/yap4py/prolog/autoloader.yap
Normal file
127
packages/python/swig/yap4py/prolog/autoloader.yap
Normal file
@ -0,0 +1,127 @@
|
||||
|
||||
:- module(autoloader,[make_library_index/0]).
|
||||
|
||||
:- use_module(library(lists),[append/3]).
|
||||
|
||||
:- dynamic exported/3, loaded/1.
|
||||
|
||||
make_library_index :-
|
||||
scan_library_exports,
|
||||
scan_swi_exports.
|
||||
|
||||
scan_library_exports :-
|
||||
% init table file.
|
||||
open('INDEX.pl', write, W),
|
||||
close(W),
|
||||
scan_exports('../GPL/aggregate', library(aggregate)),
|
||||
scan_exports(apply, library(apply)),
|
||||
scan_exports(arg, library(arg)),
|
||||
scan_exports(assoc, library(assoc)),
|
||||
scan_exports(avl, library(avl)),
|
||||
scan_exports(bhash, library(bhash)),
|
||||
scan_exports(charsio, library(charsio)),
|
||||
scan_exports('../packages/chr/chr_swi', library(chr)),
|
||||
scan_exports(clp/clpfd, library(clpfd)),
|
||||
scan_exports('../packages/clpqr/clpr', library(clpr)),
|
||||
scan_exports(gensym, library(gensym)),
|
||||
scan_exports(heaps, library(heaps)),
|
||||
scan_exports('../packages/jpl/jpl', library(jpl)),
|
||||
scan_exports(lists, library(lists)),
|
||||
scan_exports(nb, library(nb)),
|
||||
scan_exports(occurs, library(occurs)),
|
||||
scan_exports('../LGPL/option', library(option)),
|
||||
scan_exports(ordsets, library(ordsets)),
|
||||
scan_exports(pairs, library(pairs)),
|
||||
scan_exports('../LGPL/prolog_xref', library(prolog_xref)),
|
||||
scan_exports('../packages/plunit/plunit', library(plunit)),
|
||||
scan_exports(queues, library(queues)),
|
||||
scan_exports(random, library(random)),
|
||||
scan_exports(rbtrees, library(rbtrees)),
|
||||
scan_exports('../LGPL/readutil', library(readutil)),
|
||||
scan_exports(regexp, library(regexp)),
|
||||
scan_exports('../LGPL/shlib', library(shlib)),
|
||||
scan_exports(system, library(system)),
|
||||
scan_exports(terms, library(terms)),
|
||||
scan_exports(timeout, library(timeout)),
|
||||
scan_exports(trees, library(trees)).
|
||||
|
||||
scan_exports(Library, CallName) :-
|
||||
absolute_file_name(Library, Path,
|
||||
[ file_type(prolog),
|
||||
access(read),
|
||||
file_errors(fail)
|
||||
]),
|
||||
open(Path, read, O),
|
||||
!,
|
||||
get_exports(O, Exports, Module),
|
||||
close(O),
|
||||
open('INDEX.pl', append, W),
|
||||
publish_exports(Exports, W, CallName, Module),
|
||||
close(W).
|
||||
scan_exports(Library) :-
|
||||
format(user_error,'[ warning: library ~w not defined ]~n',[Library]).
|
||||
|
||||
%
|
||||
% SWI is the only language that uses autoload.
|
||||
%
|
||||
scan_swi_exports :-
|
||||
retractall(exported(_,_,_)),
|
||||
absolute_file_name(dialect/swi, Path,
|
||||
[ file_type(prolog),
|
||||
access(read),
|
||||
file_errors(fail)
|
||||
]),
|
||||
open(Path, read, O),
|
||||
get_exports(O, Exports, Module),
|
||||
get_reexports(O, Reexports, Exports),
|
||||
close(O),
|
||||
open('dialect/swi/INDEX.pl', write, W),
|
||||
publish_exports(Reexports, W, library(dialect/swi), Module),
|
||||
close(W).
|
||||
|
||||
get_exports(O, Exports, Module) :-
|
||||
read(O, (:- module(Module,Exports))), !.
|
||||
get_exports(O, Exports, Module) :-
|
||||
get_exports(O, Exports, Module).
|
||||
|
||||
get_reexports(O, Exports, ExportsL) :-
|
||||
read(O, (:- reexport(_File,ExportsI))), !,
|
||||
get_reexports(O, Exports0, ExportsL),
|
||||
append(ExportsI, Exports0, Exports).
|
||||
get_reexports(_, Exports, Exports).
|
||||
|
||||
publish_exports([], _, _, _).
|
||||
publish_exports([F/A|Exports], W, Path, Module) :-
|
||||
publish_export(F, A, W, Path, Module),
|
||||
publish_exports(Exports, W, Path, Module).
|
||||
publish_exports([F//A0|Exports], W, Path, Module) :-
|
||||
A is A0+2,
|
||||
publish_export(F, A, W, Path, Module),
|
||||
publish_exports(Exports, W, Path, Module).
|
||||
publish_exports([op(_,_,_)|Exports], W, Path, Module) :-
|
||||
publish_exports(Exports, W, Path, Module).
|
||||
|
||||
publish_export(F, A, _, _, Module) :-
|
||||
exported(F, A, M), M \= Module, !,
|
||||
format(user_error,'[ warning: clash between ~a and ~a over ~a/~d ]~n',[Module,M,F,A]).
|
||||
publish_export(F, A, W, Path, Module) :-
|
||||
assert(exported(F, A, Module)), !,
|
||||
portray_clause(W, index(F, A, Module, Path)).
|
||||
|
||||
find_predicate(G,ExportingModI) :-
|
||||
nonvar(G), !,
|
||||
functor(G, Name, Arity),
|
||||
index(Name,Arity,ExportingModI,File),
|
||||
ensure_file_loaded(File).
|
||||
find_predicate(G,ExportingModI) :-
|
||||
var(G),
|
||||
index(Name,Arity,ExportingModI,File),
|
||||
functor(G, Name, Arity),
|
||||
ensure_file_loaded(File).
|
||||
|
||||
ensure_file_loaded(File) :-
|
||||
loaded(File), !.
|
||||
ensure_file_loaded(File) :-
|
||||
load_files(autoloader:File,[silent(true),if(not_loaded)]),
|
||||
assert(loaded(File)).
|
||||
|
152
packages/python/swig/yap4py/prolog/avl.yap
Normal file
152
packages/python/swig/yap4py/prolog/avl.yap
Normal file
@ -0,0 +1,152 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: regexp.yap *
|
||||
* Last rev: 5/15/2000 *
|
||||
* mods: *
|
||||
* comments: AVL trees in YAP (from code by M. van Emden, P. Vasey) *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
/**
|
||||
* @file avl.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 00:59:28 2015
|
||||
*
|
||||
* @brief Support for constructing AVL trees
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
|
||||
:- module(avl, [
|
||||
avl_new/1,
|
||||
avl_insert/4,
|
||||
avl_lookup/3
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup avl AVL Trees
|
||||
* @ingroup library
|
||||
@{
|
||||
Supports constructing AVL trees, available through the directive:
|
||||
|
||||
~~~~~~~
|
||||
:- use_module(library(avl)).
|
||||
~~~~~~~
|
||||
|
||||
It includes the following predicates:
|
||||
|
||||
- avl_insert/4
|
||||
- avl_lookup/3
|
||||
- avl_new/1
|
||||
|
||||
AVL trees are balanced search binary trees. They are named after their
|
||||
inventors, Adelson-Velskii and Landis, and they were the first
|
||||
dynamically balanced trees to be proposed. The YAP AVL tree manipulation
|
||||
predicates library uses code originally written by Martin van Emdem and
|
||||
published in the Logic Programming Newsletter, Autumn 1981. A bug in
|
||||
this code was fixed by Philip Vasey, in the Logic Programming
|
||||
Newsletter, Summer 1982. The library currently only includes routines to
|
||||
insert and lookup elements in the tree. Please try red-black trees if
|
||||
you need deletion.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred avl_new(+ _T_)
|
||||
|
||||
|
||||
Create a new tree.
|
||||
|
||||
|
||||
*/
|
||||
avl_new([]).
|
||||
|
||||
/** @pred avl_insert(+ _Key_,? _Value_,+ _T0_,- _TF_)
|
||||
|
||||
|
||||
Add an element with key _Key_ and _Value_ to the AVL tree
|
||||
_T0_ creating a new AVL tree _TF_. Duplicated elements are
|
||||
allowed.
|
||||
|
||||
|
||||
*/
|
||||
avl_insert(Key, Value, T0, TF) :-
|
||||
insert(T0, Key, Value, TF, _).
|
||||
|
||||
insert([], Key, Value, avl([],Key,Value,-,[]), yes).
|
||||
insert(avl(L,Root,RVal,Bl,R), E, Value, NewTree, WhatHasChanged) :-
|
||||
E @< Root, !,
|
||||
insert(L, E, Value, NewL, LeftHasChanged),
|
||||
adjust(avl(NewL,Root,RVal,Bl,R), LeftHasChanged, left, NewTree, WhatHasChanged).
|
||||
insert(avl(L,Root,RVal,Bl,R), E, Val, NewTree, WhatHasChanged) :-
|
||||
% E @>= Root, currently we allow duplicated values, although
|
||||
% lookup will only fetch the first.
|
||||
insert(R, E, Val,NewR, RightHasChanged),
|
||||
adjust(avl(L,Root,RVal,Bl,NewR), RightHasChanged, right, NewTree, WhatHasChanged).
|
||||
|
||||
adjust(Oldtree, no, _, Oldtree, no).
|
||||
adjust(avl(L,Root,RVal,Bl,R), yes, Lor, NewTree, WhatHasChanged) :-
|
||||
table(Bl, Lor, Bl1, WhatHasChanged, ToBeRebalanced),
|
||||
rebalance(avl(L, Root, RVal, Bl, R), Bl1, ToBeRebalanced, NewTree).
|
||||
|
||||
% balance where balance whole tree to be
|
||||
% before inserted after increased rebalanced
|
||||
table(- , left , < , yes , no ).
|
||||
table(- , right , > , yes , no ).
|
||||
table(< , left , - , no , yes ).
|
||||
table(< , right , - , no , no ).
|
||||
table(> , left , - , no , no ).
|
||||
table(> , right , - , no , yes ).
|
||||
|
||||
rebalance(avl(Lst, Root, RVal, _Bl, Rst), Bl1, no, avl(Lst, Root, RVal, Bl1,Rst)).
|
||||
rebalance(OldTree, _, yes, NewTree) :-
|
||||
avl_geq(OldTree,NewTree).
|
||||
|
||||
avl_geq(avl(Alpha,A,VA,>,avl(Beta,B,VB,>,Gamma)),
|
||||
avl(avl(Alpha,A,VA,-,Beta),B,VB,-,Gamma)).
|
||||
avl_geq(avl(avl(Alpha,A,VA,<,Beta),B,VB,<,Gamma),
|
||||
avl(Alpha,A,VA,-,avl(Beta,B,VB,-,Gamma))).
|
||||
avl_geq(avl(Alpha,A,VA,>,avl(avl(Beta,X,VX,Bl1,Gamma),B,VB,<,Delta)),
|
||||
avl(avl(Alpha,A,VA,Bl2,Beta),X,VX,-,avl(Gamma,B,VB,Bl3,Delta))) :-
|
||||
table2(Bl1,Bl2,Bl3).
|
||||
avl_geq(avl(avl(Alpha,A,VA,>,avl(Beta,X,VX,Bl1,Gamma)),B,VB,<,Delta),
|
||||
avl(avl(Alpha,A,VA,Bl2,Beta),X,VX,-,avl(Gamma,B,VB,Bl3,Delta))) :-
|
||||
table2(Bl1,Bl2,Bl3).
|
||||
|
||||
table2(< ,- ,> ).
|
||||
table2(> ,< ,- ).
|
||||
table2(- ,- ,- ).
|
||||
|
||||
/** @pred avl_lookup(+ _Key_,- _Value_,+ _T_)
|
||||
|
||||
|
||||
Lookup an element with key _Key_ in the AVL tree
|
||||
_T_, returning the value _Value_.
|
||||
|
||||
*/
|
||||
|
||||
avl_lookup(Key, Value, avl(L,Key0,KVal,_,R)) :-
|
||||
compare(Cmp, Key, Key0),
|
||||
avl_lookup(Cmp, Value, L, R, Key, KVal).
|
||||
|
||||
avl_lookup(=, Value, _, _, _, Value).
|
||||
avl_lookup(<, Value, L, _, Key, _) :-
|
||||
avl_lookup(Key, Value, L).
|
||||
avl_lookup(>, Value, _, R, Key, _) :-
|
||||
avl_lookup(Key, Value, R).
|
||||
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
332
packages/python/swig/yap4py/prolog/bhash.yap
Normal file
332
packages/python/swig/yap4py/prolog/bhash.yap
Normal file
@ -0,0 +1,332 @@
|
||||
%% -*- Prolog -*-
|
||||
|
||||
/**
|
||||
* @file bhash.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:11:29 2015
|
||||
*
|
||||
* @brief Backtrackable Hash Tables
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- source.
|
||||
:- yap_flag(unknown,error).
|
||||
:- style_check(all).
|
||||
|
||||
:- module(b_hash, [ b_hash_new/1,
|
||||
b_hash_new/2,
|
||||
b_hash_new/4,
|
||||
b_hash_lookup/3,
|
||||
b_hash_update/3,
|
||||
b_hash_update/4,
|
||||
b_hash_insert_new/4,
|
||||
b_hash_insert/4,
|
||||
b_hash_size/2,
|
||||
b_hash_code/2,
|
||||
is_b_hash/1,
|
||||
b_hash_to_list/2,
|
||||
b_hash_values_to_list/2,
|
||||
b_hash_keys_to_list/2
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup bhash Backtrackable Hash Tables
|
||||
* @ingroup library
|
||||
|
||||
@{
|
||||
|
||||
This library implements hash-arrays.
|
||||
It requires the hash key to be a ground term. The library can
|
||||
be loaded as
|
||||
|
||||
:- use_module( library( bhash ) ).
|
||||
|
||||
This code relies on backtrackable updates. The default hash key is
|
||||
generated by term_hash/4.
|
||||
|
||||
*/
|
||||
|
||||
:- use_module(library(terms), [ term_hash/4 ]).
|
||||
|
||||
|
||||
:- meta_predicate(b_hash_new(-,+,3,2)).
|
||||
|
||||
array_default_size(2048).
|
||||
|
||||
/** @pred is_b_hash( +Hash )
|
||||
|
||||
Term _Hash_ is a hash table.
|
||||
*/
|
||||
is_b_hash(V) :- var(V), !, fail.
|
||||
is_b_hash(hash(_,_,_,_,_)).
|
||||
|
||||
/** @pred b_hash_new( -NewHash )
|
||||
|
||||
Create a empty hash table _NewHash_, with size 2048 entries.
|
||||
*/
|
||||
b_hash_new(hash(Keys, Vals, Size, N, _, _)) :-
|
||||
array_default_size(Size),
|
||||
array(Keys, Size),
|
||||
array(Vals, Size),
|
||||
create_mutable(0, N).
|
||||
|
||||
/** @pred b_hash_new( -_NewHash_, +_Size_ )
|
||||
|
||||
Create a empty hash table, with size _Size_ entries.
|
||||
*/
|
||||
b_hash_new(hash(Keys, Vals, Size, N, _, _), Size) :-
|
||||
array(Keys, Size),
|
||||
array(Vals, Size),
|
||||
create_mutable(0, N).
|
||||
|
||||
/** @pred b_hash_new( -_NewHash_, +_Size_, :_Hash_, :_Cmp_ )
|
||||
|
||||
Create a empty hash table, with size _Size_ entries.
|
||||
_Hash_ defines a partition function, and _Cmp_ defined a comparison function.
|
||||
*/
|
||||
b_hash_new(hash(Keys,Vals, Size, N, HashF, CmpF), Size, HashF, CmpF) :-
|
||||
array(Keys, Size),
|
||||
array(Vals, Size),
|
||||
create_mutable(0, N).
|
||||
|
||||
/**
|
||||
@pred b_hash_size( +_Hash_, -_Size_ )
|
||||
|
||||
_Size_ unifies with the size of the hash table _Hash_.
|
||||
*/
|
||||
b_hash_size(hash(_, _, Size, _, _, _), Size).
|
||||
|
||||
/**
|
||||
@pred b_hash_lookup( +_Key_, ?_Val_, +_Hash_ )
|
||||
|
||||
Search the ground term _Key_ in table _Hash_ and unify _Val_ with the associated entry.
|
||||
*/
|
||||
b_hash_lookup(Key, Val, hash(Keys, Vals, Size, _, F, CmpF)):-
|
||||
hash_f(Key, Size, Index, F),
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex),
|
||||
array_element(Vals, ActualIndex, Mutable),
|
||||
get_mutable(Val, Mutable).
|
||||
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex) :-
|
||||
array_element(Keys, Index, El),
|
||||
nonvar(El),
|
||||
(
|
||||
cmp_f(CmpF, El, Key)
|
||||
->
|
||||
Index = ActualIndex
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
fetch_key(Keys, I1, Size, Key, CmpF, ActualIndex)
|
||||
).
|
||||
|
||||
/**
|
||||
@pred b_hash_update( +_Key_, +_Hash_, +NewVal )
|
||||
|
||||
Update to the value associated with the ground term _Key_ in table _Hash_ to _NewVal_.
|
||||
*/
|
||||
b_hash_update(Hash, Key, NewVal):-
|
||||
Hash = hash(Keys, Vals, Size, _, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex),
|
||||
array_element(Vals, ActualIndex, Mutable),
|
||||
update_mutable(NewVal, Mutable).
|
||||
|
||||
/**
|
||||
@pred b_hash_update( +_Key_, -_OldVal_, +_Hash_, +NewVal )
|
||||
|
||||
Update to the value associated with the ground term _Key_ in table _Hash_ to _NewVal_, and unify _OldVal_ with the current value.
|
||||
*/
|
||||
b_hash_update(Hash, Key, OldVal, NewVal):-
|
||||
Hash = hash(Keys, Vals, Size, _, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex),
|
||||
array_element(Vals, ActualIndex, Mutable),
|
||||
get_mutable(OldVal, Mutable),
|
||||
update_mutable(NewVal, Mutable).
|
||||
|
||||
/** b_hash_insert(+_Hash_, +_Key_, _Val_, +_NewHash_ )
|
||||
|
||||
Insert the term _Key_-_Val_ in table _Hash_ and unify _NewHash_ with the result. If ground term _Key_ exists, update the dictionary.
|
||||
*/
|
||||
b_hash_insert(Hash, Key, NewVal, NewHash):-
|
||||
Hash = hash(Keys, Vals, Size, N, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
find_or_insert(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash).
|
||||
|
||||
find_or_insert(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash) :-
|
||||
array_element(Keys, Index, El),
|
||||
(
|
||||
var(El)
|
||||
->
|
||||
add_element(Keys, Index, Size, N, Vals, Key, NewVal, Hash, NewHash)
|
||||
;
|
||||
cmp_f(CmpF, El, Key)
|
||||
->
|
||||
% do rb_update
|
||||
array_element(Vals, Index, Mutable),
|
||||
update_mutable(NewVal, Mutable),
|
||||
Hash = NewHash
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
find_or_insert(Keys, I1, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash)
|
||||
).
|
||||
|
||||
/**
|
||||
@pred b_hash_insert_new(+_Hash_, +_Key_, _Val_, +_NewHash_ )
|
||||
|
||||
Insert the term _Key_-_Val_ in table _Hash_ and unify _NewHash_ with the result. If ground term _Key_ exists, fail.
|
||||
*/
|
||||
b_hash_insert_new(Hash, Key, NewVal, NewHash):-
|
||||
Hash = hash(Keys, Vals, Size, N, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
find_or_insert_new(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash).
|
||||
|
||||
find_or_insert_new(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash) :-
|
||||
array_element(Keys, Index, El),
|
||||
(
|
||||
var(El)
|
||||
->
|
||||
add_element(Keys, Index, Size, N, Vals, Key, NewVal, Hash, NewHash)
|
||||
;
|
||||
cmp_f(CmpF, El, Key)
|
||||
->
|
||||
fail
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
find_or_insert_new(Keys, I1, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash)
|
||||
).
|
||||
|
||||
add_element(Keys, Index, Size, N, Vals, Key, NewVal, Hash, NewHash) :-
|
||||
get_mutable(NEls, N),
|
||||
NN is NEls+1,
|
||||
update_mutable(NN, N),
|
||||
array_element(Keys, Index, Key),
|
||||
update_mutable(NN, N),
|
||||
array_element(Vals, Index, Mutable),
|
||||
create_mutable(NewVal, Mutable),
|
||||
(
|
||||
NN > Size/3
|
||||
->
|
||||
expand_array(Hash, NewHash)
|
||||
;
|
||||
Hash = NewHash
|
||||
).
|
||||
|
||||
expand_array(Hash, NewHash) :-
|
||||
Hash == NewHash, !,
|
||||
Hash = hash(Keys, Vals, Size, _X, F, _CmpF),
|
||||
new_size(Size, NewSize),
|
||||
array(NewKeys, NewSize),
|
||||
array(NewVals, NewSize),
|
||||
copy_hash_table(Size, Keys, Vals, F, NewSize, NewKeys, NewVals),
|
||||
/* overwrite in place */
|
||||
setarg(1, Hash, NewKeys),
|
||||
setarg(2, Hash, NewVals),
|
||||
setarg(3, Hash, NewSize).
|
||||
|
||||
expand_array(Hash, hash(NewKeys, NewVals, NewSize, X, F, CmpF)) :-
|
||||
Hash = hash(Keys, Vals, Size, X, F, CmpF),
|
||||
new_size(Size, NewSize),
|
||||
array(NewKeys, NewSize),
|
||||
array(NewVals, NewSize),
|
||||
copy_hash_table(Size, Keys, Vals, F, NewSize, NewKeys, NewVals).
|
||||
|
||||
new_size(Size, NewSize) :-
|
||||
Size > 1048576, !,
|
||||
NewSize is Size+1048576.
|
||||
new_size(Size, NewSize) :-
|
||||
NewSize is Size*2.
|
||||
|
||||
copy_hash_table(0, _, _, _, _, _, _) :- !.
|
||||
copy_hash_table(I1, Keys, Vals, F, Size, NewKeys, NewVals) :-
|
||||
I is I1-1,
|
||||
array_element(Keys, I, Key),
|
||||
nonvar(Key), !,
|
||||
array_element(Vals, I, Val),
|
||||
insert_el(Key, Val, Size, F, NewKeys, NewVals),
|
||||
copy_hash_table(I, Keys, Vals, F, Size, NewKeys, NewVals).
|
||||
copy_hash_table(I1, Keys, Vals, F, Size, NewKeys, NewVals) :-
|
||||
I is I1-1,
|
||||
copy_hash_table(I, Keys, Vals, F, Size, NewKeys, NewVals).
|
||||
|
||||
insert_el(Key, Val, Size, F, NewKeys, NewVals) :-
|
||||
hash_f(Key,Size,Index, F),
|
||||
find_free(Index, Size, NewKeys, TrueIndex),
|
||||
array_element(NewKeys, TrueIndex, Key),
|
||||
array_element(NewVals, TrueIndex, Val).
|
||||
|
||||
find_free(Index, Size, Keys, NewIndex) :-
|
||||
array_element(Keys, Index, El),
|
||||
(
|
||||
var(El)
|
||||
->
|
||||
NewIndex = Index
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
find_free(I1, Size, Keys, NewIndex)
|
||||
).
|
||||
|
||||
hash_f(Key, Size, Index, F) :-
|
||||
var(F), !,
|
||||
term_hash(Key,-1,Size,Index).
|
||||
hash_f(Key, Size, Index, F) :-
|
||||
call(F, Key, Size, Index).
|
||||
|
||||
cmp_f(F, A, B) :-
|
||||
var(F), !,
|
||||
A == B.
|
||||
cmp_f(F, A, B) :-
|
||||
call(F, A, B).
|
||||
|
||||
/**
|
||||
@pred b_hash_to_list(+_Hash_, -_KeyValList_ )
|
||||
|
||||
The term _KeyValList_ unifies with a list containing all terms _Key_-_Val_ in the hash table.
|
||||
*/
|
||||
b_hash_to_list(hash(Keys, Vals, _, _, _, _), LKeyVals) :-
|
||||
Keys =.. (_.LKs),
|
||||
Vals =.. (_.LVs),
|
||||
mklistpairs(LKs, LVs, LKeyVals).
|
||||
|
||||
/**
|
||||
@pred b_key_to_list(+_Hash_, -_KeyList_ )
|
||||
|
||||
The term _KeyList_ unifies with a list containing all keys in the hash table.
|
||||
*/
|
||||
b_hash_keys_to_list(hash(Keys, _, _, _, _, _), LKeys) :-
|
||||
Keys =.. (_.LKs),
|
||||
mklistels(LKs, LKeys).
|
||||
|
||||
/**
|
||||
@pred b_key_to_list(+_Hash_, -_ValList_ )
|
||||
|
||||
The term _`valList_ unifies with a list containing all values in the hash table.
|
||||
*/
|
||||
b_hash_values_to_list(hash(_, Vals, _, _, _, _), LVals) :-
|
||||
Vals =.. (_.LVs),
|
||||
mklistvals(LVs, LVals).
|
||||
|
||||
mklistpairs([], [], []).
|
||||
mklistpairs(V.LKs, _.LVs, KeyVals) :- var(V), !,
|
||||
mklistpairs(LKs, LVs, KeyVals).
|
||||
mklistpairs(K.LKs, V.LVs, (K-VV).KeyVals) :-
|
||||
get_mutable(VV, V),
|
||||
mklistpairs(LKs, LVs, KeyVals).
|
||||
|
||||
mklistels([], []).
|
||||
mklistels(V.Els, NEls) :- var(V), !,
|
||||
mklistels(Els, NEls).
|
||||
mklistels(K.Els, K.NEls) :-
|
||||
mklistels(Els, NEls).
|
||||
|
||||
mklistvals([], []).
|
||||
mklistvals(V.Vals, NVals) :- var(V), !,
|
||||
mklistvals(Vals, NVals).
|
||||
mklistvals(K.Vals, KK.NVals) :-
|
||||
get_mutable(KK, K),
|
||||
mklistvals(Vals, NVals).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
477
packages/python/swig/yap4py/prolog/block_diagram.yap
Normal file
477
packages/python/swig/yap4py/prolog/block_diagram.yap
Normal file
@ -0,0 +1,477 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
/**
|
||||
* @file block_diagram.yap
|
||||
* @author Theofrastos Mantadelis, Sugestions from Paulo Moura
|
||||
* @date Tue Nov 17 14:12:02 2015
|
||||
*
|
||||
* @brief Graph the program structure.
|
||||
*
|
||||
* @{
|
||||
*/
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Flags was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Contributions to this file:
|
||||
% Author: Theofrastos Mantadelis
|
||||
% Sugestions: Paulo Moura
|
||||
% Version: 1
|
||||
% Date: 19/11/2010
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
/** @defgroup block_diagram Block Diagram
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This library provides a way of visualizing a prolog program using
|
||||
modules with blocks. To use it use:
|
||||
`:-use_module(library(block_diagram))`.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- module(block_diagram, [make_diagram/2, make_diagram/5]).
|
||||
|
||||
/* ---------------------------------------------------------------------- *\
|
||||
|* Missing stuff: a parameter that bounds the module connection depth *|
|
||||
|* and a parameter that diseables/limits the text over edges *|
|
||||
\* ---------------------------------------------------------------------- */
|
||||
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown, error).
|
||||
|
||||
|
||||
:- use_module(library(charsio), [term_to_atom/2]).
|
||||
:- use_module(library(lists), [memberchk/2, member/2, append/3]).
|
||||
:- use_module(library(system), [working_directory/2]).
|
||||
:- dynamic([seen_module/1, parameter/1]).
|
||||
|
||||
parameter(texts((+inf))).
|
||||
parameter(depth((+inf))).
|
||||
parameter(default_ext('.yap')).
|
||||
|
||||
/** @pred make_diagram(+Inputfilename, +Ouputfilename)
|
||||
|
||||
|
||||
|
||||
This will crawl the files following the use_module, ensure_loaded directives withing the inputfilename.
|
||||
The result will be a file in dot format.
|
||||
You can make a pdf at the shell by asking `dot -Tpdf filename > output.pdf`.
|
||||
|
||||
|
||||
*/
|
||||
make_diagram(InputFile, OutputFile):-
|
||||
tell(OutputFile),
|
||||
write('digraph G {\nrankdir=BT'), nl,
|
||||
extract_name_file(InputFile, Name, File),
|
||||
nb_setval(depth, 0),
|
||||
read_module_file(File, Name),
|
||||
write_explicit,
|
||||
write('}'), nl,
|
||||
told.
|
||||
|
||||
/** @pred make_diagram(+Inputfilename, +Ouputfilename, +Predicate, +Depth, +Extension)
|
||||
|
||||
|
||||
The same as make_diagram/2 but you can define how many of the imported/exporeted predicates will be shown with predicate, and how deep the crawler is allowed to go with depth. The extension is used if the file use module directives do not include a file extension.
|
||||
|
||||
*/
|
||||
make_diagram(InputFile, OutputFile, Texts, Depth, Ext):-
|
||||
integer(Texts),
|
||||
integer(Depth),
|
||||
retractall(parameter(_)),
|
||||
assertz(parameter(texts(Texts))),
|
||||
assertz(parameter(depth(Depth))),
|
||||
assertz(parameter(default_ext(Ext))),
|
||||
make_diagram(InputFile, OutputFile),
|
||||
retractall(parameter(_)),
|
||||
assertz(parameter(texts((+inf)))),
|
||||
assertz(parameter(depth((+inf)))),
|
||||
assertz(parameter(default_ext('.yap'))).
|
||||
|
||||
path_seperator('\\'):-
|
||||
yap_flag(windows, true), !.
|
||||
path_seperator('/').
|
||||
|
||||
split_path_file(PathFile, Path, File):-
|
||||
path_seperator(PathSeperator),
|
||||
atom_concat(Path, File, PathFile),
|
||||
name(PathSeperator, [PathSeperatorName]),
|
||||
name(File, FileName),
|
||||
\+ memberchk(PathSeperatorName, FileName),
|
||||
!.
|
||||
split_file_ext(FileExt, File, Ext):-
|
||||
atom_concat(File, Ext, FileExt),
|
||||
atom_concat('.', _, Ext),
|
||||
name('.', [DotName]),
|
||||
name(Ext, ExtName),
|
||||
findall(A, (member(A, ExtName), A = DotName), L),
|
||||
length(L, 1), !.
|
||||
|
||||
|
||||
parse_module_directive(':-'(module(Name)), _):-
|
||||
seen_module(node(Name)), !.
|
||||
parse_module_directive(':-'(module(Name, _Exported)), _):-
|
||||
seen_module(node(Name)), !.
|
||||
parse_module_directive(':-'(module(Name, Exported)), Shape):-
|
||||
!, \+ seen_module(node(Name)),
|
||||
assertz(seen_module(node(Name))),
|
||||
list_to_message(Exported, ExportedMessage),
|
||||
atom_concat([Name, ' [shape=', Shape,',label="', Name, '\\n', ExportedMessage, '"]'], NodeDefinition),
|
||||
write(NodeDefinition), nl.
|
||||
parse_module_directive(':-'(module(Name)), Shape):-
|
||||
\+ seen_module(node(Name)),
|
||||
assertz(seen_module(node(Name))),
|
||||
atom_concat([Name, ' [shape=', Shape,',label="', Name, '"]'], NodeDefinition),
|
||||
write(NodeDefinition), nl.
|
||||
|
||||
extract_name_file(PathFile, Name, FinalFile):-
|
||||
split_path_file(PathFile, Path, FileName), Path \== '', !,
|
||||
extract_name_file(FileName, Name, File),
|
||||
atom_concat(Path, File, FinalFile).
|
||||
extract_name_file(File, Name, File):-
|
||||
split_file_ext(File, Name, _), !.
|
||||
extract_name_file(Name, Name, File):-
|
||||
parameter(default_ext(Ext)),
|
||||
atom_concat(Name, Ext, File).
|
||||
|
||||
read_use_module_directive(':-'(ensure_loaded(library(Name))), Name, library(Name), []):- !.
|
||||
read_use_module_directive(':-'(ensure_loaded(Path)), Name, FinalFile, []):-
|
||||
extract_name_file(Path, Name, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(library(Name))), Name, library(Name), []):- !.
|
||||
read_use_module_directive(':-'(use_module(Path)), Name, FinalFile, []):-
|
||||
extract_name_file(Path, Name, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(library(Name), Import)), Name, library(Name), Import):- !.
|
||||
read_use_module_directive(':-'(use_module(Path, Import)), Name, FinalFile, Import):-
|
||||
extract_name_file(Path, Name, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(Name, Path, Import)), Name, FinalFile, Import):-
|
||||
nonvar(Path),
|
||||
extract_name_file(Path, _, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(Name, Path, Import)), Name, FinalFile, Import):-
|
||||
var(Path),
|
||||
extract_name_file(Name, _, FinalFile), !.
|
||||
|
||||
parse_use_module_directive(Module, Directive):-
|
||||
read_use_module_directive(Directive, Name, File, Imported),
|
||||
parse_use_module_directive(Module, Name, File, Imported).
|
||||
parse_use_module_directive(Module, Name, _File, _Imported):-
|
||||
seen_module(edge(Module, Name)), !.
|
||||
parse_use_module_directive(Module, Name, File, Imported):-
|
||||
\+ seen_module(edge(Module, Name)),
|
||||
assertz(seen_module(edge(Module, Name))),
|
||||
read_module_file(File, Name),
|
||||
list_to_message(Imported, ImportedMessage),
|
||||
atom_concat([Module, ' -> ', Name, ' [label="', ImportedMessage, '"]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
list_to_message(List, Message):-
|
||||
length(List, Len),
|
||||
parameter(texts(TextCnt)),
|
||||
(Len > TextCnt + 1 ->
|
||||
append(FirstCnt, _, List),
|
||||
length(FirstCnt, TextCnt),
|
||||
append(FirstCnt, ['...'], First)
|
||||
;
|
||||
First = List
|
||||
),
|
||||
list_to_message(First, '', Message).
|
||||
|
||||
list_to_message([], Message, Message).
|
||||
list_to_message([H|T], '', FinalMessage):-
|
||||
term_to_atom(H, HAtom), !,
|
||||
list_to_message(T, HAtom, FinalMessage).
|
||||
list_to_message([H|T], AccMessage, FinalMessage):-
|
||||
term_to_atom(H, HAtom),
|
||||
atom_concat([AccMessage, '\\n', HAtom], NewMessage),
|
||||
list_to_message(T, NewMessage, FinalMessage).
|
||||
|
||||
read_module_file(library(Module), Module):-
|
||||
!, parse_module_directive(':-'(module(Module, [])), component).
|
||||
read_module_file(File, Module):-
|
||||
parameter(depth(MaxDepth)),
|
||||
nb_getval(depth, Depth),
|
||||
MaxDepth > Depth,
|
||||
split_path_file(File, Path, FileName),
|
||||
catch((working_directory(CurDir,Path), open(FileName, read, S)), _, (parse_module_directive(':-'(module(Module, [])), box3d), fail)),
|
||||
NDepth is Depth + 1,
|
||||
nb_setval(depth, NDepth),
|
||||
repeat,
|
||||
catch(read(S, Next),_,fail),
|
||||
process(Module, Next),
|
||||
nb_setval(depth, Depth),
|
||||
close(S), working_directory(_,CurDir), !.
|
||||
read_module_file(_, _).
|
||||
|
||||
/** @pred process(+ _StreamInp_, + _Goal_)
|
||||
|
||||
|
||||
|
||||
For every line _LineIn_ in stream _StreamInp_, call
|
||||
`call(Goal,LineIn)`.
|
||||
|
||||
|
||||
*/
|
||||
process(_, end_of_file):-!.
|
||||
process(_, Term):-
|
||||
parse_module_directive(Term, box), !, fail.
|
||||
process(Module, Term):-
|
||||
parse_use_module_directive(Module, Term), !, fail.
|
||||
process(Module, Term):-
|
||||
find_explicit_qualification(Module, Term), fail.
|
||||
|
||||
find_explicit_qualification(OwnerModule, ':-'(Module:Goal)):-
|
||||
!, explicit_qualification(OwnerModule, Module, Goal).
|
||||
find_explicit_qualification(OwnerModule, ':-'(_Head, Body)):-
|
||||
find_explicit_qualification(OwnerModule, Body).
|
||||
find_explicit_qualification(OwnerModule, (Module:Goal, RestBody)):-
|
||||
!, explicit_qualification(OwnerModule, Module, Goal),
|
||||
find_explicit_qualification(OwnerModule, RestBody).
|
||||
find_explicit_qualification(OwnerModule, (_Goal, RestBody)):-
|
||||
!, find_explicit_qualification(OwnerModule, RestBody).
|
||||
find_explicit_qualification(OwnerModule, Module:Goal):-
|
||||
!, explicit_qualification(OwnerModule, Module, Goal).
|
||||
find_explicit_qualification(_OwnerModule, _Goal).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
nonvar(Goal), nonvar(ToModule), !,
|
||||
functor(Goal, FunctorName, Arity),
|
||||
\+ seen_module(explicit(InModule, ToModule, FunctorName/Arity)),
|
||||
assertz(seen_module(explicit(InModule, ToModule, FunctorName/Arity))).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
var(Goal), nonvar(ToModule), !,
|
||||
\+ seen_module(explicit(InModule, ToModule, 'DYNAMIC')),
|
||||
assertz(seen_module(explicit(InModule, ToModule, 'DYNAMIC'))).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
nonvar(Goal), var(ToModule), !,
|
||||
functor(Goal, FunctorName, Arity),
|
||||
\+ seen_module(explicit(InModule, 'DYNAMIC', FunctorName/Arity)),
|
||||
assertz(seen_module(explicit(InModule, 'DYNAMIC', FunctorName/Arity))).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
var(Goal), var(ToModule),
|
||||
\+ seen_module(explicit(InModule, 'DYNAMIC', 'DYNAMIC')),
|
||||
assertz(seen_module(explicit(InModule, 'DYNAMIC', 'DYNAMIC'))).
|
||||
|
||||
write_explicit:-
|
||||
seen_module(explicit(InModule, ToModule, _Goal)),
|
||||
\+ seen_module(generate_explicit(InModule, ToModule)),
|
||||
assertz(seen_module(generate_explicit(InModule, ToModule))),
|
||||
all(Goal, seen_module(explicit(InModule, ToModule, Goal)), Goals),
|
||||
list_to_message(Goals, Explicit),
|
||||
atom_concat([InModule, ' -> ', ToModule, ' [label="', Explicit, '",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl, fail.
|
||||
write_explicit.
|
||||
|
||||
/*
|
||||
functor(Goal, FunctorName, Arity),
|
||||
term_to_atom(FunctorName/Arity, Imported),
|
||||
atom_concat([InModule, ' -> ', ToModule, ' [label="', Imported, '",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
atom_concat([InModule, ' -> ', ToModule, ' [label="DYNAMIC",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
functor(Goal, FunctorName, Arity),
|
||||
term_to_atom(FunctorName/Arity, Imported),
|
||||
atom_concat([InModule, ' -> DYNAMIC [label="', Imported, '",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
atom_concat([InModule, ' -> DYNAMIC [label="DYNAMIC",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
*/
|
||||
|
||||
%% @} @}
|
422
packages/python/swig/yap4py/prolog/c_alarms.yap
Normal file
422
packages/python/swig/yap4py/prolog/c_alarms.yap
Normal file
@ -0,0 +1,422 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
/**
|
||||
* @file c_alarms.yap
|
||||
* @author Theofrastos Mantadelis
|
||||
* @date Tue Nov 17 14:50:03 2015
|
||||
*
|
||||
* @brief Concurrent alarms
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Concurrent alarms was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Contributions to this file:
|
||||
% Author: Theofrastos Mantadelis
|
||||
% $Date: 2011-02-04 16:04:49 +0100 (Fri, 04 Feb 2011) $
|
||||
% $Revision: 11 $
|
||||
% Contributions: The timer implementation is inspired by Bernd Gutmann's timers
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(c_alarms, [set_alarm/3,
|
||||
unset_alarm/1,
|
||||
time_out_call_once/3,
|
||||
timer_start/1,
|
||||
timer_restart/1,
|
||||
timer_stop/2,
|
||||
timer_elapsed/2,
|
||||
timer_pause/2]).
|
||||
|
||||
/** @defgroup c_alarms Concurrent Alarms
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This library provides a concurrent signals. To use it use:
|
||||
`:-use_module(library(c_alarms))`.
|
||||
*/
|
||||
|
||||
|
||||
:- use_module(library(lists), [member/2, memberchk/2, delete/3]).
|
||||
:- use_module(library(ordsets), [ord_add_element/3]).
|
||||
:- use_module(library(apply_macros), [maplist/3]).
|
||||
|
||||
:- dynamic('$timer'/3).
|
||||
|
||||
:- meta_predicate(set_alarm(+, 0, -)).
|
||||
:- meta_predicate(time_out_call_once(+, 0, -)).
|
||||
:- meta_predicate(prove_once(0)).
|
||||
|
||||
:- initialization(local_init).
|
||||
|
||||
local_init:-
|
||||
bb_put(alarms, []),
|
||||
bb_put(identity, 0).
|
||||
|
||||
get_next_identity(ID):-
|
||||
bb_get(identity, ID),
|
||||
NID is ID + 1,
|
||||
bb_put(identity, NID).
|
||||
|
||||
set_alarm(Seconds, Execute, ID):-
|
||||
bb_get(alarms, []),
|
||||
get_next_identity(ID), !,
|
||||
bb_put(alarms, [alarm(Seconds, ID, Execute)]),
|
||||
alarm(Seconds, alarm_handler, _).
|
||||
|
||||
%% set_alarm(+Seconds, +Execute, -ID)
|
||||
%
|
||||
% calls Executes after a time interval of Seconds
|
||||
% ID is returned to be able to unset the alarm (the call will not be executed)
|
||||
% set_alarm/3 supports multiple & nested settings of alarms.
|
||||
% Known Bug: There is the case that an alarm might trigger +-1 second of the set time.
|
||||
%
|
||||
set_alarm(Seconds, Execute, ID):-
|
||||
get_next_identity(ID), !,
|
||||
bb_get(alarms, [alarm(CurrentSeconds, CurrentID, CurrentExecute)|Alarms]),
|
||||
alarm(0, true, Remaining),
|
||||
Elapsed is CurrentSeconds - Remaining - 1,
|
||||
maplist(subtract(Elapsed), [alarm(CurrentSeconds, CurrentID, CurrentExecute)|Alarms], RemainingAlarms),
|
||||
ord_add_element(RemainingAlarms, alarm(Seconds, ID, Execute), [alarm(NewSeconds, NewID, NewToExecute)|NewAlarms]),
|
||||
bb_put(alarms, [alarm(NewSeconds, NewID, NewToExecute)|NewAlarms]),
|
||||
alarm(NewSeconds, alarm_handler, _).
|
||||
set_alarm(Seconds, Execute, ID):-
|
||||
throw(error(permission_error(create, alarm, set_alarm(Seconds, Execute, ID)), 'Non permitted alarm identifier.')).
|
||||
|
||||
subtract(Elapsed, alarm(Seconds, ID, Execute), alarm(NewSeconds, ID, Execute)):-
|
||||
NewSeconds is Seconds - Elapsed.
|
||||
|
||||
%% unset_alarm(+ID)
|
||||
%
|
||||
% It will unschedule the alarm.
|
||||
% It will not affect other concurrent alarms.
|
||||
%
|
||||
unset_alarm(ID):-
|
||||
\+ ground(ID),
|
||||
throw(error(instantiation_error, 'Alarm ID needs to be instantiated.')).
|
||||
unset_alarm(ID):-
|
||||
bb_get(alarms, Alarms),
|
||||
\+ memberchk(alarm(_Seconds, ID, _Execute), Alarms),
|
||||
throw(error(existence_error(alarm, unset_alarm(ID)), 'Alarm does not exist.')).
|
||||
unset_alarm(ID):-
|
||||
alarm(0, true, Remaining),
|
||||
bb_get(alarms, Alarms),
|
||||
[alarm(Seconds, _, _)|_] = Alarms,
|
||||
Elapsed is Seconds - Remaining - 1,
|
||||
delete_alarm(Alarms, ID, NewAlarms),
|
||||
bb_put(alarms, NewAlarms),
|
||||
(NewAlarms = [alarm(NewSeconds, _, _)|_] ->
|
||||
RemainingSeconds is NewSeconds - Elapsed,
|
||||
alarm(RemainingSeconds, alarm_handler, _)
|
||||
;
|
||||
true
|
||||
).
|
||||
|
||||
delete_alarm(Alarms, ID, NewAlarms):-
|
||||
memberchk(alarm(Seconds, ID, Execute), Alarms),
|
||||
delete(Alarms, alarm(Seconds, ID, Execute), NewAlarms).
|
||||
|
||||
alarm_handler:-
|
||||
bb_get(alarms, [alarm(_, _, CurrentExecute)|[]]),
|
||||
bb_put(alarms, []),
|
||||
call(CurrentExecute).
|
||||
alarm_handler:-
|
||||
bb_get(alarms, [alarm(Elapsed, CurrentID, CurrentExecute)|Alarms]),
|
||||
maplist(subtract(Elapsed), Alarms, NewAlarms),
|
||||
find_zeros(NewAlarms, ZeroAlarms),
|
||||
findall(alarm(S, ID, E), (member(alarm(S, ID, E), NewAlarms), S > 0), NonZeroAlarms),
|
||||
bb_put(alarms, NonZeroAlarms),
|
||||
(NonZeroAlarms = [alarm(NewSeconds, _, _)|_] ->
|
||||
alarm(NewSeconds, alarm_handler, _)
|
||||
;
|
||||
true
|
||||
),
|
||||
execute([alarm(0, CurrentID, CurrentExecute)|ZeroAlarms]).
|
||||
|
||||
find_zeros([], []).
|
||||
find_zeros([alarm(0, ID, E)|T], [alarm(0, ID, E)|R]):-
|
||||
find_zeros(T, R).
|
||||
find_zeros([alarm(S, _, _)|T], R):-
|
||||
S > 0,
|
||||
find_zeros(T, R).
|
||||
|
||||
execute([]).
|
||||
execute([alarm(_, _, Execute)|R]):-
|
||||
call(Execute),
|
||||
execute(R).
|
||||
|
||||
%% time_out_call(+Seconds, +Goal, -Return)
|
||||
%
|
||||
% It will will execute the closure Goal and returns its success or failure at Return.
|
||||
% If the goal times out in Seconds then Return = timeout.
|
||||
time_out_call_once(Seconds, Goal, Return):-
|
||||
bb_get(identity, ID),
|
||||
set_alarm(Seconds, throw(timeout(ID)), ID),
|
||||
catch((
|
||||
prove_once(Goal, Return),
|
||||
unset_alarm(ID))
|
||||
, Exception, (
|
||||
(Exception == timeout(ID) ->
|
||||
Return = timeout
|
||||
;
|
||||
unset_alarm(ID),
|
||||
throw(Exception)
|
||||
))).
|
||||
|
||||
prove_once(Goal, success):-
|
||||
once(Goal), !.
|
||||
prove_once(_Goal, failure).
|
||||
|
||||
timer_start(Name):-
|
||||
\+ ground(Name),
|
||||
throw(error(instantiation_error, 'Timer name needs to be instantiated.')).
|
||||
timer_start(Name):-
|
||||
'$timer'(Name, _, _),
|
||||
throw(error(permission_error(create, timer, timer_start(Name)), 'Timer already exists.')).
|
||||
timer_start(Name):-
|
||||
statistics(walltime, [StartTime, _]),
|
||||
assertz('$timer'(Name, running, StartTime)).
|
||||
|
||||
timer_restart(Name):-
|
||||
\+ ground(Name),
|
||||
throw(error(instantiation_error, 'Timer name needs to be instantiated.')).
|
||||
timer_restart(Name):-
|
||||
\+ '$timer'(Name, _, _), !,
|
||||
statistics(walltime, [StartTime, _]),
|
||||
assertz('$timer'(Name, running, StartTime)).
|
||||
timer_restart(Name):-
|
||||
retract('$timer'(Name, running, _)), !,
|
||||
statistics(walltime, [StartTime, _]),
|
||||
assertz('$timer'(Name, running, StartTime)).
|
||||
timer_restart(Name):-
|
||||
retract('$timer'(Name, paused, Duration)),
|
||||
statistics(walltime, [StartTime, _]),
|
||||
Elapsed is StartTime - Duration,
|
||||
assertz('$timer'(Name, running, Elapsed)).
|
||||
|
||||
timer_stop(Name, Elapsed):-
|
||||
\+ '$timer'(Name, _, _),
|
||||
throw(error(existence_error(timer, timer_stop(Name, Elapsed)), 'Timer does not exist.')).
|
||||
timer_stop(Name, Elapsed):-
|
||||
retract('$timer'(Name, running, StartTime)), !,
|
||||
statistics(walltime, [EndTime, _]),
|
||||
Elapsed is EndTime - StartTime.
|
||||
timer_stop(Name, Elapsed):-
|
||||
retract('$timer'(Name, paused, Elapsed)).
|
||||
|
||||
timer_elapsed(Name, Elapsed):-
|
||||
\+ '$timer'(Name, _, _),
|
||||
throw(error(existence_error(timer, timer_elapsed(Name, Elapsed)), 'Timer does not exist.')).
|
||||
timer_elapsed(Name, Elapsed):-
|
||||
'$timer'(Name, running, StartTime), !,
|
||||
statistics(walltime, [EndTime, _]),
|
||||
Elapsed is EndTime - StartTime.
|
||||
timer_elapsed(Name, Elapsed):-
|
||||
'$timer'(Name, paused, Elapsed).
|
||||
|
||||
timer_pause(Name, Elapsed):-
|
||||
\+ '$timer'(Name, _, _),
|
||||
throw(error(existence_error(timer, timer_pause(Name, Elapsed)), 'Timer does not exist.')).
|
||||
timer_pause(Name, Elapsed):-
|
||||
'$timer'(Name, paused, _),
|
||||
throw(error(permission_error(timer, timer_pause(Name, Elapsed)), 'Timer already paused.')).
|
||||
timer_pause(Name, Elapsed):-
|
||||
retract('$timer'(Name, _, StartTime)),
|
||||
statistics(walltime, [EndTime, _]),
|
||||
Elapsed is EndTime - StartTime,
|
||||
assertz('$timer'(Name, paused, Elapsed)).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
230
packages/python/swig/yap4py/prolog/charsio.yap
Normal file
230
packages/python/swig/yap4py/prolog/charsio.yap
Normal file
@ -0,0 +1,230 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: charsio.yap *
|
||||
* Last rev: 5/12/99 *
|
||||
* mods: *
|
||||
* comments: I/O on character strings *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
/**
|
||||
* @file charsio.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:17:33 2015
|
||||
*
|
||||
* @brief Several operations on text.
|
||||
* @{
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(charsio, [
|
||||
format_to_chars/3,
|
||||
format_to_chars/4,
|
||||
write_to_chars/3,
|
||||
write_to_chars/2,
|
||||
atom_to_chars/3,
|
||||
atom_to_chars/2,
|
||||
number_to_chars/3,
|
||||
number_to_chars/2,
|
||||
read_from_chars/2,
|
||||
open_chars_stream/2,
|
||||
with_output_to_chars/2,
|
||||
with_output_to_chars/3,
|
||||
with_output_to_chars/4,
|
||||
term_to_atom/2
|
||||
]).
|
||||
|
||||
/** @defgroup charsio Operations on Sequences of Codes.
|
||||
@ingroup library
|
||||
|
||||
Term to sequence of codes conversion, mostly replaced by engine code.
|
||||
You can use the following directive to load the files.
|
||||
|
||||
|
||||
~~~~~~~
|
||||
:- use_module(library(avl)).
|
||||
~~~~~~~
|
||||
|
||||
It includes the following predicates:
|
||||
- atom_to_chars/2
|
||||
- atom_to_chars/3
|
||||
- format_to_chars/3
|
||||
- format_to_chars/4
|
||||
- number_to_chars/2
|
||||
- number_to_chars/3
|
||||
- open_chars_stream/2
|
||||
- read_from_chars/2
|
||||
- term_to_atom/2
|
||||
- with_output_to_chars/2
|
||||
- with_output_to_chars/3
|
||||
- with_output_to_chars/4
|
||||
- write_to_chars/2
|
||||
- write_to_chars/3
|
||||
|
||||
*/
|
||||
|
||||
:- meta_predicate(with_output_to_chars(0,?)).
|
||||
:- meta_predicate(with_output_to_chars(0,-,?)).
|
||||
:- meta_predicate(with_output_to_chars(0,-,?,?)).
|
||||
|
||||
/** @pred format_to_chars(+ _Form_, + _Args_, - _Result_)
|
||||
|
||||
Execute the built-in procedure format/2 with form _Form_ and
|
||||
arguments _Args_ outputting the result to the string of character
|
||||
codes _Result_.
|
||||
*/
|
||||
format_to_chars(Format, Args, Codes) :-
|
||||
format(codes(Codes), Format, Args).
|
||||
|
||||
/** @pred format_to_chars(+ _Form_, + _Args_, - _Result_, - _Result0_)
|
||||
|
||||
Execute the built-in procedure format/2 with form _Form_ and
|
||||
arguments _Args_ outputting the result to the difference list of
|
||||
character codes _Result-Result0_.
|
||||
|
||||
*/
|
||||
format_to_chars(Format, Args, OUT, L0) :-
|
||||
format(codes(OUT, L0), Format, Args).
|
||||
|
||||
/** @pred write_to_chars(+ _Term_, - _Result_)
|
||||
|
||||
Execute the built-in procedure write/1 with argument _Term_
|
||||
outputting the result to the string of character codes _Result_.
|
||||
*/
|
||||
write_to_chars(Term, Codes) :-
|
||||
format(codes(Codes), '~w', [Term]).
|
||||
|
||||
/** @pred write_to_chars(+ _Term_, - _Result0_, - _Result_)
|
||||
|
||||
Execute the built-in procedure write/1 with argument _Term_
|
||||
outputting the result to the difference list of character codes
|
||||
_Result-Result0_.
|
||||
*/
|
||||
write_to_chars(Term, Out, Tail) :-
|
||||
format(codes(Out,Tail),'~w',[Term]).
|
||||
|
||||
/** @pred atom_to_chars(+ _Atom_, - _Result_)
|
||||
|
||||
Convert the atom _Atom_ to the string of character codes
|
||||
_Result_.
|
||||
*/
|
||||
atom_to_chars(Atom, OUT) :-
|
||||
atom_codes(Atom, OUT).
|
||||
|
||||
/** @pred atom_to_chars(+ _Atom_, - _Result0_, - _Result_)
|
||||
|
||||
Convert the atom _Atom_ to the difference list of character codes
|
||||
_Result-Result0_.
|
||||
*/
|
||||
atom_to_chars(Atom, L0, OUT) :-
|
||||
format(codes(L0, OUT), '~a', [Atom]).
|
||||
|
||||
/** @pred number_to_chars(+ _Number_, - _Result_)
|
||||
|
||||
Convert the number _Number_ to the string of character codes
|
||||
_Result_.
|
||||
*/
|
||||
number_to_chars(Number, OUT) :-
|
||||
number_codes(Number, OUT).
|
||||
|
||||
/** @pred number_to_chars(+ _Number_, - _Result0_, - _Result_)
|
||||
|
||||
Convert the atom _Number_ to the difference list of character codes
|
||||
_Result-Result0_.
|
||||
*/
|
||||
number_to_chars(Number, L0, OUT) :-
|
||||
var(Number), !,
|
||||
throw(error(instantiation_error,number_to_chars(Number, L0, OUT))).
|
||||
number_to_chars(Number, L0, OUT) :-
|
||||
number(Number), !,
|
||||
format(codes(L0, OUT), '~w', [Number]).
|
||||
number_to_chars(Number, L0, OUT) :-
|
||||
throw(error(type_error(number,Number),number_to_chars(Number, L0, OUT))).
|
||||
|
||||
/** @pred open_chars_stream(+ _Chars_, - _Stream_)
|
||||
|
||||
Open the list of character codes _Chars_ as a stream _Stream_.
|
||||
*/
|
||||
open_chars_stream(Codes, Stream) :-
|
||||
open_chars_stream(Codes, Stream, '').
|
||||
|
||||
open_chars_stream(Codes, Stream, Postfix) :-
|
||||
predicate_property(memory_file:open_memory_file(_,_,_),_), !,
|
||||
memory_file:new_memory_file(MF),
|
||||
memory_file:open_memory_file(MF, write, Out),
|
||||
format(Out, '~s~w', [Codes, Postfix]),
|
||||
close(Out),
|
||||
memory_file:open_memory_file(MF, read, Stream,
|
||||
[ free_on_close(true)
|
||||
]).
|
||||
open_chars_stream(Codes, Stream, Postfix) :-
|
||||
ensure_loaded(library(memfile)),
|
||||
open_chars_stream(Codes, Stream, Postfix).
|
||||
|
||||
/** @pred with_output_to_chars(? _Goal_, - _Chars_)
|
||||
|
||||
Execute goal _Goal_ such that its standard output will be sent to a
|
||||
memory buffer. After successful execution the contents of the memory
|
||||
buffer will be converted to the list of character codes _Chars_.
|
||||
*/
|
||||
with_output_to_chars(Goal, Codes) :-
|
||||
with_output_to(codes(Codes), Goal).
|
||||
|
||||
/** @pred with_output_to_chars(? _Goal_, ? _Chars0_, - _Chars_)
|
||||
|
||||
Execute goal _Goal_ such that its standard output will be sent to a
|
||||
memory buffer. After successful execution the contents of the memory
|
||||
buffer will be converted to the difference list of character codes
|
||||
_Chars-Chars0_.
|
||||
*/
|
||||
with_output_to_chars(Goal, Codes, L0) :-
|
||||
with_output_to(codes(Codes, L0), Goal).
|
||||
%% with_output_to_chars(:Goal, -Stream, -Codes, ?Tail) is det.
|
||||
%
|
||||
% As with_output_to_chars/2, but Stream is unified with the
|
||||
% temporary stream.
|
||||
|
||||
/** @pred with_output_to_chars(? _Goal_, - _Stream_, ? _Chars0_, - _Chars_)
|
||||
|
||||
|
||||
Execute goal _Goal_ such that its standard output will be sent to a
|
||||
memory buffer. After successful execution the contents of the memory
|
||||
buffer will be converted to the difference list of character codes
|
||||
_Chars-Chars0_ and _Stream_ receives the stream corresponding to
|
||||
the memory buffer.
|
||||
|
||||
*/
|
||||
with_output_to_chars(Goal, Stream, Codes, Tail) :-
|
||||
with_output_to(codes(Codes, Tail), with_stream(Stream, Goal)).
|
||||
|
||||
with_stream(Stream, Goal) :-
|
||||
current_output(Stream),
|
||||
call(Goal).
|
||||
|
||||
/** @pred read_from_chars(+ _Chars_, - _Term_)
|
||||
|
||||
Parse the list of character codes _Chars_ and return the result in
|
||||
the term _Term_. The character codes to be read must terminate with
|
||||
a dot character such that either (i) the dot character is followed by
|
||||
blank characters; or (ii) the dot character is the last character in the
|
||||
string.
|
||||
|
||||
@compat The SWI-Prolog version does not require Codes to end
|
||||
in a full-stop.
|
||||
*/
|
||||
read_from_chars("", end_of_file) :- !.
|
||||
read_from_chars(List, Term) :-
|
||||
atom_to_term(List, Term, _).
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
|
96
packages/python/swig/yap4py/prolog/clauses.yap
Normal file
96
packages/python/swig/yap4py/prolog/clauses.yap
Normal file
@ -0,0 +1,96 @@
|
||||
/**
|
||||
* @file clauses.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 14:51:30 2015
|
||||
*
|
||||
* @brief Utilities for clause manipulation.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(clauses,
|
||||
[list2conj/2,
|
||||
conj2list/2,
|
||||
clauselength/2]).
|
||||
|
||||
%% @{
|
||||
|
||||
/**
|
||||
* @defgroup clauses Clause Manipulation
|
||||
* @ingroup library
|
||||
|
||||
This library supports a number of useful utilities that come up over and
|
||||
over again when manipulating Prolog programs. This will include
|
||||
operations and conversion to other structures.
|
||||
|
||||
@author Vitor Santos Costa
|
||||
*/
|
||||
|
||||
/** conj2list( +Conj, -List) is det
|
||||
Generate a list from a conjunction of literals.
|
||||
|
||||
It is often easier to apply operations on lists than on clauses
|
||||
*/
|
||||
conj2list( M:Conj, List ) :-
|
||||
conj2list_( Conj, M, List, [] ).
|
||||
|
||||
conj2list( Conj, List ) :-
|
||||
conj2list_( Conj, List, [] ).
|
||||
|
||||
|
||||
conj2list_( C ) -->
|
||||
{ var(C) },
|
||||
!,
|
||||
[C].
|
||||
conj2list_( true ) --> !.
|
||||
conj2list_( (C1, C2) ) -->
|
||||
!,
|
||||
conj2list_( C1 ),
|
||||
conj2list_( C2 ).
|
||||
conj2list_( C ) -->
|
||||
[C].
|
||||
|
||||
conj2list_( C, M ) -->
|
||||
{ var(C) },
|
||||
!,
|
||||
[M: C].
|
||||
conj2list_( true , _) --> !.
|
||||
conj2list_( (C1, C2), M ) -->
|
||||
!,
|
||||
conj2list_( C1, M ),
|
||||
conj2list_( C2, M ).
|
||||
conj2list_( C, M ) -->
|
||||
{ strip_module(M:C, NM, NC) },
|
||||
[NM:NC].
|
||||
|
||||
/** list2conj( +List, -Conj) is det
|
||||
Generate a conjunction from a list of literals.
|
||||
|
||||
Notice Mthat this relies on indexing within the list to avoid creating
|
||||
choice-points.
|
||||
*/
|
||||
list2conj([], true).
|
||||
list2conj([Last], Last).
|
||||
list2conj([Head,Next|Tail], (Head,Goals)) :-
|
||||
list2conj([Next|Tail], Goals).
|
||||
|
||||
/** clauselength( +Clause, -Length) is det
|
||||
Count the number of literals in a clause (head counts as one).
|
||||
|
||||
Notice that this is 1+length(conj2list), as we ignore disjunctions.
|
||||
*/
|
||||
clauselength( (_Head :- Conj), Length ) :-
|
||||
clauselength( Conj, Length, 1 ).
|
||||
|
||||
|
||||
clauselength( C, I1, I ) :-
|
||||
{ var(C) },
|
||||
!,
|
||||
I1 is I+1.
|
||||
clauselength( (C1, C2), I2, I ) :- !,
|
||||
clauselength( C1, I1, I ),
|
||||
clauselength( C2, I2, I1 ).
|
||||
clauselength( _C, I1, I ) :-
|
||||
I1 is I+1.
|
||||
|
||||
%%@}
|
216
packages/python/swig/yap4py/prolog/coinduction.yap
Normal file
216
packages/python/swig/yap4py/prolog/coinduction.yap
Normal file
@ -0,0 +1,216 @@
|
||||
/**
|
||||
* @file coinduction.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>, Arvin Bansal,
|
||||
*
|
||||
*
|
||||
* @date Tue Nov 17 14:55:02 2015
|
||||
*
|
||||
* @brief Co-inductive execution
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: coinduction.yap *
|
||||
* Last rev: 8/2/88 *
|
||||
* mods: *
|
||||
* comments: coinduction support for Prolog *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
% :- yap_flag(unknown,error).
|
||||
% :- style_check(all).
|
||||
|
||||
%
|
||||
% Code originally written by Arvin Bansal and Vitor Santos Costa
|
||||
% Includes nice extensions from Jan Wielemaker (from the SWI version).
|
||||
%
|
||||
|
||||
:- module(coinduction,
|
||||
[ (coinductive)/1,
|
||||
op(1150, fx, (coinductive))
|
||||
]).
|
||||
|
||||
:- use_module(library(error)).
|
||||
|
||||
/** <module> coinduction Co-Logic Programming
|
||||
@ingroup library
|
||||
|
||||
This simple module implements the directive coinductive/1 as described
|
||||
in "Co-Logic Programming: Extending Logic Programming with Coinduction"
|
||||
by Luke Somin et al. The idea behind coinduction is that a goal succeeds
|
||||
if it unifies to a parent goal. This enables some interesting programs,
|
||||
notably on infinite trees (cyclic terms).
|
||||
|
||||
~~~~
|
||||
:- use_module(library(coinduction)).
|
||||
|
||||
:- coinductive stream/1.
|
||||
stream([H|T]) :- i(H), stream(T).
|
||||
|
||||
% inductive
|
||||
i(0).
|
||||
i(s(N)) :- i(N).
|
||||
|
||||
?- X=[s(s(A))|X], stream(X).
|
||||
X= [s(s(A))|X], stream(X).
|
||||
A = 0,
|
||||
X = [s(s(0)),**]
|
||||
~~~~
|
||||
|
||||
This predicate is true for any cyclic list containing only 1-s,
|
||||
regardless of the cycle-length.
|
||||
|
||||
@bug Programs mixing normal predicates and coinductive predicates must
|
||||
be _stratified_. The theory does not apply to normal Prolog calling
|
||||
coinductive predicates, calling normal Prolog predicates, etc.
|
||||
|
||||
Stratification is not checked or enforced in any other way and thus
|
||||
left as a responsibility to the user.
|
||||
@see "Co-Logic Programming: Extending Logic Programming with Coinduction"
|
||||
by Luke Somin et al.
|
||||
|
||||
@{
|
||||
|
||||
*/
|
||||
|
||||
:- meta_predicate coinductive(:).
|
||||
|
||||
:- dynamic coinductive/3.
|
||||
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
coinductive(Spec) :-
|
||||
var(Spec),
|
||||
!,
|
||||
throw(error(instantiation_error,coinductive(Spec))).
|
||||
coinductive(Module:Spec) :-
|
||||
coinductive_declaration(Spec, Module, coinductive(Module:Spec)).
|
||||
coinductive(Spec) :-
|
||||
prolog_load_context(module, Module),
|
||||
coinductive_declaration(Spec, Module, coinductive(Spec)).
|
||||
|
||||
coinductive_declaration(Spec, _M, G) :-
|
||||
var(Spec),
|
||||
!,
|
||||
throw(error(instantiation_error,G)).
|
||||
coinductive_declaration((A,B), M, G) :- !,
|
||||
coinductive_declaration(A, M, G),
|
||||
coinductive_declaration(B, M, G).
|
||||
coinductive_declaration(M:Spec, _, G) :- !,
|
||||
coinductive_declaration(Spec, M, G).
|
||||
coinductive_declaration(Spec, M, _G) :-
|
||||
valid_pi(Spec, F, N),
|
||||
functor(S,F,N),
|
||||
atomic_concat(['__coinductive__',F,'/',N],NF),
|
||||
functor(NS,NF,N),
|
||||
match_args(N,S,NS),
|
||||
atomic_concat(['__stack_',M,':',F,'/',N],SF),
|
||||
nb_setval(SF, _),
|
||||
assert((M:S :-
|
||||
b_getval(SF,L),
|
||||
coinduction:in_stack(S, L, End),
|
||||
(
|
||||
nonvar(End)
|
||||
->
|
||||
true
|
||||
;
|
||||
End = [S|_],
|
||||
M:NS)
|
||||
)
|
||||
),
|
||||
assert(coinduction:coinductive(S,M,NS)).
|
||||
|
||||
valid_pi(Name/Arity, Name, Arity) :-
|
||||
must_be(atom, Name),
|
||||
must_be(integer, Arity).
|
||||
|
||||
match_args(0,_,_) :- !.
|
||||
match_args(I,S1,S2) :-
|
||||
arg(I,S1,A),
|
||||
arg(I,S2,A),
|
||||
I1 is I-1,
|
||||
match_args(I1,S1,S2).
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
co_term_expansion((M:H :- B), _, (M:NH :- B)) :- !,
|
||||
co_term_expansion((H :- B), M, (NH :- B)).
|
||||
co_term_expansion((H :- B), M, (NH :- B)) :- !,
|
||||
coinductive(H, M, NH), !.
|
||||
co_term_expansion(H, M, NH) :-
|
||||
coinductive(H, M, NH), !.
|
||||
|
||||
user:term_expansion(M:Cl,M:NCl ) :- !,
|
||||
co_term_expansion(Cl, M, NCl).
|
||||
|
||||
user:term_expansion(G, NG) :-
|
||||
prolog_load_context(module, Module),
|
||||
co_term_expansion(G, Module, NG).
|
||||
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
in_stack(_, V, V) :- var(V), !.
|
||||
in_stack(G, [G|_], [G|_]) :- !.
|
||||
in_stack(G, [_|T], End) :- in_stack(G, T, End).
|
||||
|
||||
writeG_val(G_var) :-
|
||||
b_getval(G_var, G_val),
|
||||
write(G_var), write(' ==> '), write(G_val), nl.
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
/**
|
||||
|
||||
Some examples from Coinductive Logic Programming and its Applications by Gopal Gupta et al, ICLP 97
|
||||
|
||||
~~~~
|
||||
:- coinductive stream/1.
|
||||
stream([H|T]) :- i(H), stream(T).
|
||||
|
||||
% inductive
|
||||
i(0).
|
||||
i(s(N)) :- i(N).
|
||||
|
||||
% Are there infinitely many "occurrences" of arg1 in arg2?
|
||||
:- coinductive comember/2.
|
||||
|
||||
comember(X, L) :-
|
||||
drop(X, L, L1),
|
||||
comember(X, L1).
|
||||
|
||||
% Drop some prefix of arg2 upto an "occurrence" of arg1 from arg2,
|
||||
% yielding arg3.
|
||||
% ("Occurrence" of X = something unifiable with X.)
|
||||
%:- table(drop/3). % not working; needs tabling supporting cyclic terms!
|
||||
drop(H, [H| T], T).
|
||||
drop(H, [_| T], T1) :-
|
||||
drop(H, T, T1).
|
||||
|
||||
|
||||
% X = [1, 2, 3| X], comember(E, X).
|
||||
|
||||
user:p(E) :-
|
||||
X = [1, 2, 3| X],
|
||||
comember(E, X),
|
||||
format('~w~n',[E]),
|
||||
get_code(_),
|
||||
fail.
|
||||
|
||||
~~~~
|
||||
|
||||
@}
|
||||
*/
|
||||
|
70
packages/python/swig/yap4py/prolog/dbqueues.yap
Normal file
70
packages/python/swig/yap4py/prolog/dbqueues.yap
Normal file
@ -0,0 +1,70 @@
|
||||
s/**
|
||||
* @file dbqueues.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 15:01:49 2015
|
||||
*
|
||||
* @brief A library to support queues with no-backtrackable queues.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(nbqueue, [
|
||||
nb_enqueue/2,
|
||||
nb_dequeue/2,
|
||||
nb_clean_queue/1,
|
||||
nb_size/2
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup dbqueues Non-backtrackable queues in YAP.
|
||||
* @ingroup library
|
||||
|
||||
A library to implement queues of NB Terms
|
||||
|
||||
*/
|
||||
|
||||
|
||||
:- unhide_atom('$init_nb_queue').
|
||||
:- unhide_atom('$nb_enqueue').
|
||||
:- unhide_atom('$nb_dequeue').
|
||||
|
||||
|
||||
nb_enqueue(Name,El) :- var(Name),
|
||||
throw(error(instantiation_error(Name),nb_enqueue(Name,El))).
|
||||
nb_enqueue(Name,El) :- \+ atom(Name), !,
|
||||
throw(error(type_error_atom(Name),nb_enqueue(Name,El))).
|
||||
nb_enqueue(Name,El) :-
|
||||
recorded('$nb_queue',[Name|Ref],_), !,
|
||||
prolog:'$nb_enqueue'(Ref, El).
|
||||
nb_enqueue(Name,El) :-
|
||||
prolog:'$init_nb_queue'(Ref),
|
||||
recorda('$nb_queue',[Name|Ref],_),
|
||||
prolog:'$nb_enqueue'(Ref,El).
|
||||
|
||||
|
||||
nb_dequeue(Name,El) :- var(Name),
|
||||
throw(error(instantiation_error(Name),nb_dequeue(Name,El))).
|
||||
nb_dequeue(Name,El) :- \+ atom(Name), !,
|
||||
throw(error(type_error_atom(Name),nb_dequeue(Name,El))).
|
||||
nb_dequeue(Name,El) :-
|
||||
recorded('$nb_queue',[Name|Ref],R),
|
||||
( prolog:'$nb_dequeue'(Ref, El) ->
|
||||
true
|
||||
;
|
||||
erase(R),
|
||||
fail
|
||||
).
|
||||
|
||||
nb_clean_queue(Name) :-
|
||||
recorded('$nb_queue',[Name|Ref],R), !,
|
||||
erase(R),
|
||||
nb_dequeue_all(Ref).
|
||||
nb_clean_queue(_).
|
||||
|
||||
nb_dequeue_all(Ref) :-
|
||||
( prolog:'$nb_dequeue'(Ref, _) -> nb_dequeue_all(Ref) ; true ).
|
||||
|
||||
nb_dequeue_size(Ref, Size) :-
|
||||
prolog:'$nb_size'(Ref, Size).
|
||||
|
208
packages/python/swig/yap4py/prolog/dbusage.yap
Normal file
208
packages/python/swig/yap4py/prolog/dbusage.yap
Normal file
@ -0,0 +1,208 @@
|
||||
/**
|
||||
* @file dbusage.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 15:04:52 2015
|
||||
*
|
||||
* @brief Useful statistics on memory usage
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(dbusage, [
|
||||
db_usage/0,
|
||||
db_static/0,
|
||||
db_static/1,
|
||||
db_dynamic/0,
|
||||
db_dynamic/1
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup dbusage Memory Usage in Prolog Data-Base
|
||||
* @ingroup library
|
||||
@{
|
||||
|
||||
This library provides a set of utilities for studying memory usage in YAP.
|
||||
The following routines are available once included with the
|
||||
`use_module(library(dbusage))` command.
|
||||
*/
|
||||
|
||||
/** @pred db_usage
|
||||
Give general overview of data-base usage in the system.
|
||||
*/
|
||||
db_usage :-
|
||||
statistics(heap,[HeapUsed,HeapFree]),
|
||||
statistics(local_stack,[GInU,FreeS]),
|
||||
statistics(global_stack,[SInU,_]),
|
||||
statistics(trail,[TInU,FreeT]),
|
||||
HeapUsedK is HeapUsed//1024,
|
||||
HeapFreeK is HeapFree//1024,
|
||||
StackSpace is (GInU+SInU+FreeS+TInU+FreeT)//1024,
|
||||
format(user_error, 'Heap Space = ~D KB (+ ~D KB free)~n',[HeapUsedK,HeapFreeK]),
|
||||
format(user_error, 'Stack Space = ~D KB~n',[StackSpace]),
|
||||
findall(p(Cls,CSz,ISz),
|
||||
(current_module(M),
|
||||
current_predicate(_,M:P),
|
||||
predicate_statistics(M:P,Cls,CSz,ISz)),LAll),
|
||||
sumall(LAll, TCls, TCSz, TISz),
|
||||
statistics(atoms,[AtomN,AtomS]),
|
||||
AtomSK is AtomS//1024,
|
||||
format(user_error, '~D Atoms taking ~D KB~n',[AtomN,AtomSK]),
|
||||
TSz is TCSz+TISz,
|
||||
TSzK is TSz//1024,
|
||||
TCSzK is TCSz//1024,
|
||||
TISzK is TISz//1024,
|
||||
format(user_error, 'Total User Code~n ~D clauses taking ~D KB~n ~D KB in clauses + ~D KB in indices~n',
|
||||
[TCls,TSzK,TCSzK,TISzK]),
|
||||
statistics(static_code,[SCl,SI,SI1,SI2,SI3]),
|
||||
SClK is SCl//1024,
|
||||
SIK is SI//1024,
|
||||
SI1K is SI1//1024,
|
||||
SI2K is SI2//1024,
|
||||
SI3K is SI3//1024,
|
||||
ST is SCl+SI,
|
||||
STK is ST//1024,
|
||||
format(user_error, 'Total Static code=~D KB~n ~D KB in clauses + ~D KB in indices (~D+~D+~D)~n',
|
||||
[STK,SClK,SIK,SI1K,SI2K,SI3K]),
|
||||
statistics(dynamic_code,[DCl,DI,DI1,DI2,DI3,DI4]),
|
||||
DClK is DCl//1024,
|
||||
DIK is DI//1024,
|
||||
DI1K is DI1//1024,
|
||||
DI2K is DI2//1024,
|
||||
DI3K is DI3//1024,
|
||||
DI4K is DI4//1024,
|
||||
DT is DCl+DI,
|
||||
DTK is DT//1024,
|
||||
format(user_error, 'Total Dynamic code=~D KB~n ~D KB in clauses + ~D KB in indices (~D+~D+~D+~D)~n',
|
||||
[DTK,DClK,DIK,DI1K,DI2K,DI3K,DI4K]),
|
||||
total_erased(DCls,DSZ,ICls,ISZ),
|
||||
(DCls =:= 0 ->
|
||||
true
|
||||
;
|
||||
DSZK is DSZ//1024,
|
||||
format(user_error, ' ~D erased clauses not reclaimed (~D KB)~n',[DCls,DSZK])
|
||||
),
|
||||
(ICls =:= 0 ->
|
||||
true
|
||||
;
|
||||
ISZK is ISZ//1024,
|
||||
format(user_error, ' ~D erased indices not reclaimed (~D KB)~n',[ICls,ISZK])
|
||||
),
|
||||
!.
|
||||
|
||||
db_usage:-
|
||||
write(mem_dump_error),nl.
|
||||
|
||||
|
||||
/** @pred db_static
|
||||
|
||||
|
||||
List memory usage for every static predicate.
|
||||
|
||||
|
||||
*/
|
||||
db_static :-
|
||||
db_static(-1).
|
||||
|
||||
/** @pred db_static(+ _Threshold_)
|
||||
|
||||
List memory usage for every static predicate. Predicate must use more
|
||||
than _Threshold_ bytes.
|
||||
|
||||
|
||||
*/
|
||||
db_static(Min) :-
|
||||
setof(p(Sz,M:P,Cls,CSz,ISz),
|
||||
PN^(current_module(M),
|
||||
current_predicate(PN,M:P),
|
||||
\+ predicate_property(M:P,dynamic),
|
||||
predicate_statistics(M:P,Cls,CSz,ISz),
|
||||
Sz is (CSz+ISz),
|
||||
Sz > Min),All),
|
||||
format(user_error,' Static user code~n===========================~n',[]),
|
||||
display_preds(All).
|
||||
|
||||
/** @pred db_dynamic
|
||||
|
||||
|
||||
List memory usage for every dynamic predicate.
|
||||
|
||||
|
||||
*/
|
||||
db_dynamic :-
|
||||
db_dynamic(-1).
|
||||
|
||||
/** @pred db_dynamic(+ _Threshold_)
|
||||
|
||||
List memory usage for every dynamic predicate. Predicate must use more
|
||||
than _Threshold_ bytes.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
db_dynamic(Min) :-
|
||||
setof(p(Sz,M:P,Cls,CSz,ISz,ECls,ECSz,EISz),
|
||||
PN^(current_module(M),
|
||||
current_predicate(PN,M:P),
|
||||
predicate_property(M:P,dynamic),
|
||||
predicate_statistics(M:P,Cls,CSz,ISz),
|
||||
predicate_erased_statistics(M:P,ECls,ECSz,EISz),
|
||||
Sz is (CSz+ISz+ECSz+EISz),
|
||||
Sz > Min),
|
||||
All),
|
||||
format(user_error,' Dynamic user code~n===========================~n',[]),
|
||||
display_dpreds(All).
|
||||
|
||||
display_preds([]).
|
||||
display_preds([p(Sz,M:P,Cls,CSz,ISz)|_]) :-
|
||||
functor(P,A,N),
|
||||
KSz is Sz//1024,
|
||||
KCSz is CSz//1024,
|
||||
KISz is ISz//1024,
|
||||
(M = user -> Name = A/N ; Name = M:A/N),
|
||||
format(user_error,'~w~t~36+:~t~D~7+ clauses using~|~t~D~8+ KB (~D + ~D)~n',[Name,Cls,KSz,KCSz,KISz]),
|
||||
fail.
|
||||
display_preds([_|All]) :-
|
||||
display_preds(All).
|
||||
|
||||
|
||||
display_dpreds([]).
|
||||
display_dpreds([p(Sz,M:P,Cls,CSz,ISz,ECls,ECSz,EISz)|_]) :-
|
||||
functor(P,A,N),
|
||||
KSz is Sz//1024,
|
||||
KCSz is CSz//1024,
|
||||
KISz is ISz//1024,
|
||||
(M = user -> Name = A/N ; Name = M:A/N),
|
||||
format(user_error,'~w~t~36+:~t~D~7+ clauses using~|~t~D~8+ KB (~D + ~D)~n',[Name,Cls,KSz,KCSz,KISz]),
|
||||
(ECls =:= 0
|
||||
->
|
||||
true
|
||||
;
|
||||
ECSzK is ECSz//1024,
|
||||
format(user_error,' ~D erased clauses: ~D KB~n',[ECls,ECSzK])
|
||||
),
|
||||
(EISz =:= 0
|
||||
->
|
||||
true
|
||||
;
|
||||
EISzK is EISz//1024,
|
||||
format(user_error,' ~D KB erased indices~n',[EISzK])
|
||||
),
|
||||
fail.
|
||||
display_dpreds([_|All]) :-
|
||||
display_dpreds(All).
|
||||
|
||||
|
||||
sumall(LEDAll, TEDCls, TEDCSz, TEDISz) :-
|
||||
sumall(LEDAll, 0, TEDCls, 0, TEDCSz, 0, TEDISz).
|
||||
|
||||
sumall([], TEDCls, TEDCls, TEDCSz, TEDCSz, TEDISz, TEDISz).
|
||||
sumall([p(Cls,CSz,ISz)|LEDAll], TEDCls0, TEDCls, TEDCSz0, TEDCSz, TEDISz0, TEDISz) :-
|
||||
TEDClsI is Cls+TEDCls0,
|
||||
TEDCSzI is CSz+TEDCSz0,
|
||||
TEDISzI is ISz+TEDISz0,
|
||||
sumall(LEDAll, TEDClsI, TEDCls, TEDCSzI, TEDCSz, TEDISzI, TEDISz).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
708
packages/python/swig/yap4py/prolog/dgraphs.yap
Normal file
708
packages/python/swig/yap4py/prolog/dgraphs.yap
Normal file
@ -0,0 +1,708 @@
|
||||
/**
|
||||
* @file dgraphs.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:23:20 2015
|
||||
*
|
||||
* @brief Directed Graph Processing Utilities.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module( dgraphs,
|
||||
[
|
||||
dgraph_vertices/2,
|
||||
dgraph_edge/3,
|
||||
dgraph_edges/2,
|
||||
dgraph_add_vertex/3,
|
||||
dgraph_add_vertices/3,
|
||||
dgraph_del_vertex/3,
|
||||
dgraph_del_vertices/3,
|
||||
dgraph_add_edge/4,
|
||||
dgraph_add_edges/3,
|
||||
dgraph_del_edge/4,
|
||||
dgraph_del_edges/3,
|
||||
dgraph_to_ugraph/2,
|
||||
ugraph_to_dgraph/2,
|
||||
dgraph_neighbors/3,
|
||||
dgraph_neighbours/3,
|
||||
dgraph_complement/2,
|
||||
dgraph_transpose/2,
|
||||
dgraph_compose/3,
|
||||
dgraph_transitive_closure/2,
|
||||
dgraph_symmetric_closure/2,
|
||||
dgraph_top_sort/2,
|
||||
dgraph_top_sort/3,
|
||||
dgraph_min_path/5,
|
||||
dgraph_max_path/5,
|
||||
dgraph_min_paths/3,
|
||||
dgraph_isomorphic/4,
|
||||
dgraph_path/3,
|
||||
dgraph_path/4,
|
||||
dgraph_leaves/2,
|
||||
dgraph_reachable/3
|
||||
]).
|
||||
|
||||
/** @defgroup dgraphs Directed Graphs
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following graph manipulation routines use the red-black tree library
|
||||
to try to avoid linear-time scans of the graph for all graph
|
||||
operations. Graphs are represented as a red-black tree, where the key is
|
||||
the vertex, and the associated value is a list of vertices reachable
|
||||
from that vertex through an edge (ie, a list of edges).
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred dgraph_new(+ _Graph_)
|
||||
|
||||
|
||||
Create a new directed graph. This operation must be performed before
|
||||
trying to use the graph.
|
||||
|
||||
|
||||
*/
|
||||
:- reexport(library(rbtrees),
|
||||
[rb_new/1 as dgraph_new]).
|
||||
|
||||
:- use_module(library(rbtrees),
|
||||
[rb_new/1,
|
||||
rb_empty/1,
|
||||
rb_lookup/3,
|
||||
rb_apply/4,
|
||||
rb_insert/4,
|
||||
rb_visit/2,
|
||||
rb_keys/2,
|
||||
rb_delete/3,
|
||||
rb_map/3,
|
||||
rb_clone/3,
|
||||
ord_list_to_rbtree/2]).
|
||||
|
||||
:- use_module(library(ordsets),
|
||||
[ord_insert/3,
|
||||
ord_union/3,
|
||||
ord_subtract/3,
|
||||
ord_del_element/3,
|
||||
ord_memberchk/2]).
|
||||
|
||||
:- use_module(library(wdgraphs),
|
||||
[dgraph_to_wdgraph/2,
|
||||
wdgraph_min_path/5,
|
||||
wdgraph_max_path/5,
|
||||
wdgraph_min_paths/3]).
|
||||
|
||||
|
||||
/** @pred dgraph_add_edge(+ _Graph_, + _N1_, + _N2_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by adding the edge
|
||||
_N1_- _N2_ to the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_add_edge(Vs0,V1,V2,Vs2) :-
|
||||
dgraph_new_edge(V1,V2,Vs0,Vs1),
|
||||
dgraph_add_vertex(Vs1,V2,Vs2).
|
||||
|
||||
|
||||
/** @pred dgraph_add_edges(+ _Graph_, + _Edges_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by adding the list of
|
||||
edges _Edges_ to the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_add_edges(V0, Edges, VF) :-
|
||||
rb_empty(V0), !,
|
||||
sort(Edges,SortedEdges),
|
||||
all_vertices_in_edges(SortedEdges,Vertices),
|
||||
sort(Vertices,SortedVertices),
|
||||
edges2graphl(SortedVertices, SortedEdges, GraphL),
|
||||
ord_list_to_rbtree(GraphL, VF).
|
||||
dgraph_add_edges(G0, Edges, GF) :-
|
||||
sort(Edges,SortedEdges),
|
||||
all_vertices_in_edges(SortedEdges,Vertices),
|
||||
sort(Vertices,SortedVertices),
|
||||
dgraph_add_edges(SortedVertices,SortedEdges, G0, GF).
|
||||
|
||||
all_vertices_in_edges([],[]).
|
||||
all_vertices_in_edges([V1-V2|Edges],[V1,V2|Vertices]) :-
|
||||
all_vertices_in_edges(Edges,Vertices).
|
||||
|
||||
edges2graphl([], [], []).
|
||||
edges2graphl([V|Vertices], [VV-V1|SortedEdges], [V-[V1|Children]|GraphL]) :-
|
||||
V == VV, !,
|
||||
get_extra_children(SortedEdges,VV,Children,RemEdges),
|
||||
edges2graphl(Vertices, RemEdges, GraphL).
|
||||
edges2graphl([V|Vertices], SortedEdges, [V-[]|GraphL]) :-
|
||||
edges2graphl(Vertices, SortedEdges, GraphL).
|
||||
|
||||
|
||||
dgraph_add_edges([],[]) --> [].
|
||||
dgraph_add_edges([V|Vs],[V0-V1|Es]) --> { V == V0 }, !,
|
||||
{ get_extra_children(Es,V,Children,REs) },
|
||||
dgraph_update_vertex(V,[V1|Children]),
|
||||
dgraph_add_edges(Vs,REs).
|
||||
dgraph_add_edges([V|Vs],Es) --> !,
|
||||
dgraph_update_vertex(V,[]),
|
||||
dgraph_add_edges(Vs,Es).
|
||||
|
||||
get_extra_children([V-C|Es],VV,[C|Children],REs) :- V == VV, !,
|
||||
get_extra_children(Es,VV,Children,REs).
|
||||
get_extra_children(Es,_,[],Es).
|
||||
|
||||
dgraph_update_vertex(V,Children, Vs0, Vs) :-
|
||||
rb_apply(Vs0, V, add_edges(Children), Vs), !.
|
||||
dgraph_update_vertex(V,Children, Vs0, Vs) :-
|
||||
rb_insert(Vs0,V,Children,Vs).
|
||||
|
||||
add_edges(E0,E1,E) :-
|
||||
ord_union(E0,E1,E).
|
||||
|
||||
dgraph_new_edge(V1,V2,Vs0,Vs) :-
|
||||
rb_apply(Vs0, V1, insert_edge(V2), Vs), !.
|
||||
dgraph_new_edge(V1,V2,Vs0,Vs) :-
|
||||
rb_insert(Vs0,V1,[V2],Vs).
|
||||
|
||||
insert_edge(V2, Children0, Children) :-
|
||||
ord_insert(Children0,V2,Children).
|
||||
|
||||
/** @pred dgraph_add_vertices(+ _Graph_, + _Vertices_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by adding the list of
|
||||
vertices _Vertices_ to the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_add_vertices(G, [], G).
|
||||
dgraph_add_vertices(G0, [V|Vs], GF) :-
|
||||
dgraph_add_vertex(G0, V, G1),
|
||||
dgraph_add_vertices(G1, Vs, GF).
|
||||
|
||||
|
||||
/** @pred dgraph_add_vertex(+ _Graph_, + _Vertex_, - _NewGraph_)
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by adding
|
||||
vertex _Vertex_ to the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_add_vertex(Vs0, V, Vs0) :-
|
||||
rb_lookup(V,_,Vs0), !.
|
||||
dgraph_add_vertex(Vs0, V, Vs) :-
|
||||
rb_insert(Vs0, V, [], Vs).
|
||||
|
||||
|
||||
/** @pred dgraph_edges(+ _Graph_, - _Edges_)
|
||||
|
||||
|
||||
Unify _Edges_ with all edges appearing in graph
|
||||
_Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_edges(Vs,Edges) :-
|
||||
rb_visit(Vs,L0),
|
||||
cvt2edges(L0,Edges).
|
||||
|
||||
/** @pred dgraph_vertices(+ _Graph_, - _Vertices_)
|
||||
|
||||
|
||||
Unify _Vertices_ with all vertices appearing in graph
|
||||
_Graph_.
|
||||
|
||||
*/
|
||||
dgraph_vertices(Vs,Vertices) :-
|
||||
rb_keys(Vs,Vertices).
|
||||
|
||||
cvt2edges([],[]).
|
||||
cvt2edges([V-Children|L0],Edges) :-
|
||||
children2edges(Children,V,Edges,Edges0),
|
||||
cvt2edges(L0,Edges0).
|
||||
|
||||
children2edges([],_,Edges,Edges).
|
||||
children2edges([Child|L0],V,[V-Child|EdgesF],Edges0) :-
|
||||
children2edges(L0,V,EdgesF,Edges0).
|
||||
|
||||
/** @pred dgraph_neighbours(+ _Vertex_, + _Graph_, - _Vertices_)
|
||||
|
||||
|
||||
Unify _Vertices_ with the list of neighbours of vertex _Vertex_
|
||||
in _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_neighbours(V,Vertices,Children) :-
|
||||
rb_lookup(V,Children,Vertices).
|
||||
|
||||
/** @pred dgraph_neighbors(+ _Vertex_, + _Graph_, - _Vertices_)
|
||||
|
||||
|
||||
Unify _Vertices_ with the list of neighbors of vertex _Vertex_
|
||||
in _Graph_. If the vertice is not in the graph fail.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_neighbors(V,Vertices,Children) :-
|
||||
rb_lookup(V,Children,Vertices).
|
||||
|
||||
add_vertices(Graph, [], Graph).
|
||||
add_vertices(Graph, [V|Vertices], NewGraph) :-
|
||||
rb_insert(Graph, V, [], IntGraph),
|
||||
add_vertices(IntGraph, Vertices, NewGraph).
|
||||
|
||||
/** @pred dgraph_complement(+ _Graph_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with the graph complementary to _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_complement(Vs0,VsF) :-
|
||||
dgraph_vertices(Vs0,Vertices),
|
||||
rb_map(Vs0,complement(Vertices),VsF).
|
||||
|
||||
complement(Vs,Children,NewChildren) :-
|
||||
ord_subtract(Vs,Children,NewChildren).
|
||||
|
||||
/** @pred dgraph_del_edge(+ _Graph_, + _N1_, + _N2_, - _NewGraph_)
|
||||
|
||||
|
||||
Succeeds if _NewGraph_ unifies with a new graph obtained by
|
||||
removing the edge _N1_- _N2_ from the graph _Graph_. Notice
|
||||
that no vertices are deleted.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_del_edge(Vs0,V1,V2,Vs1) :-
|
||||
rb_apply(Vs0, V1, delete_edge(V2), Vs1).
|
||||
|
||||
/** @pred dgraph_del_edges(+ _Graph_, + _Edges_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by removing the list of
|
||||
edges _Edges_ from the graph _Graph_. Notice that no vertices
|
||||
are deleted.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_del_edges(G0, Edges, Gf) :-
|
||||
sort(Edges,SortedEdges),
|
||||
continue_del_edges(SortedEdges, G0, Gf).
|
||||
|
||||
continue_del_edges([]) --> [].
|
||||
continue_del_edges([V-V1|Es]) --> !,
|
||||
{ get_extra_children(Es,V,Children,REs) },
|
||||
contract_vertex(V,[V1|Children]),
|
||||
continue_del_edges(REs).
|
||||
|
||||
contract_vertex(V,Children, Vs0, Vs) :-
|
||||
rb_apply(Vs0, V, del_edges(Children), Vs).
|
||||
|
||||
del_edges(ToRemove,E0,E) :-
|
||||
ord_subtract(E0,ToRemove,E).
|
||||
|
||||
/** @pred dgraph_del_vertex(+ _Graph_, + _Vertex_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by deleting vertex
|
||||
_Vertex_ and all the edges that start from or go to _Vertex_ to
|
||||
the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_del_vertex(Vs0, V, Vsf) :-
|
||||
rb_delete(Vs0, V, Vs1),
|
||||
rb_map(Vs1, delete_edge(V), Vsf).
|
||||
|
||||
delete_edge(Edges0, V, Edges) :-
|
||||
ord_del_element(Edges0, V, Edges).
|
||||
|
||||
/** @pred dgraph_del_vertices(+ _Graph_, + _Vertices_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by deleting the list of
|
||||
vertices _Vertices_ and all the edges that start from or go to a
|
||||
vertex in _Vertices_ to the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_del_vertices(G0, Vs, GF) :-
|
||||
sort(Vs,SortedVs),
|
||||
delete_all(SortedVs, G0, G1),
|
||||
delete_remaining_edges(SortedVs, G1, GF).
|
||||
|
||||
% it would be nice to be able to delete a set of elements from an RB tree
|
||||
% but I don't how to do it yet.
|
||||
delete_all([]) --> [].
|
||||
delete_all([V|Vs],Vs0,Vsf) :-
|
||||
rb_delete(Vs0, V, Vsi),
|
||||
delete_all(Vs,Vsi,Vsf).
|
||||
|
||||
delete_remaining_edges(SortedVs,Vs0,Vsf) :-
|
||||
rb_map(Vs0, del_edges(SortedVs), Vsf).
|
||||
|
||||
/** @pred dgraph_transpose(+ _Graph_, - _Transpose_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained from _Graph_ by
|
||||
replacing all edges of the form _V1-V2_ by edges of the form
|
||||
_V2-V1_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_transpose(Graph, TGraph) :-
|
||||
rb_visit(Graph, Edges),
|
||||
transpose(Edges, Nodes, TEdges, []),
|
||||
dgraph_new(G0),
|
||||
% make sure we have all vertices, even if they are unconnected.
|
||||
dgraph_add_vertices(G0, Nodes, G1),
|
||||
dgraph_add_edges(G1, TEdges, TGraph).
|
||||
|
||||
transpose([], []) --> [].
|
||||
transpose([V-Edges|MoreVs], [V|Vs]) -->
|
||||
transpose_edges(Edges, V),
|
||||
transpose(MoreVs, Vs).
|
||||
|
||||
transpose_edges([], _V) --> [].
|
||||
transpose_edges(E.Edges, V) -->
|
||||
[E-V],
|
||||
transpose_edges(Edges, V).
|
||||
|
||||
dgraph_compose(T1,T2,CT) :-
|
||||
rb_visit(T1,Nodes),
|
||||
compose(Nodes,T2,NewNodes),
|
||||
dgraph_new(CT0),
|
||||
dgraph_add_edges(CT0,NewNodes,CT).
|
||||
|
||||
compose([],_,[]).
|
||||
compose([V-Children|Nodes],T2,NewNodes) :-
|
||||
compose2(Children,V,T2,NewNodes,NewNodes0),
|
||||
compose(Nodes,T2,NewNodes0).
|
||||
|
||||
compose2([],_,_,NewNodes,NewNodes).
|
||||
compose2([C|Children],V,T2,NewNodes,NewNodes0) :-
|
||||
rb_lookup(C, GrandChildren, T2),
|
||||
compose3(GrandChildren, V, NewNodes,NewNodesI),
|
||||
compose2(Children,V,T2,NewNodesI,NewNodes0).
|
||||
|
||||
compose3([], _, NewNodes, NewNodes).
|
||||
compose3([GC|GrandChildren], V, [V-GC|NewNodes], NewNodes0) :-
|
||||
compose3(GrandChildren, V, NewNodes, NewNodes0).
|
||||
|
||||
/** @pred dgraph_transitive_closure(+ _Graph_, - _Closure_)
|
||||
|
||||
|
||||
Unify _Closure_ with the transitive closure of graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_transitive_closure(G,Closure) :-
|
||||
dgraph_edges(G,Edges),
|
||||
continue_closure(Edges,G,Closure).
|
||||
|
||||
continue_closure([], Closure, Closure) :- !.
|
||||
continue_closure(Edges, G, Closure) :-
|
||||
transit_graph(Edges,G,NewEdges),
|
||||
dgraph_add_edges(G, NewEdges, GN),
|
||||
continue_closure(NewEdges, GN, Closure).
|
||||
|
||||
transit_graph([],_,[]).
|
||||
transit_graph([V-V1|Edges],G,NewEdges) :-
|
||||
rb_lookup(V1, GrandChildren, G),
|
||||
transit_graph2(GrandChildren, V, G, NewEdges, MoreEdges),
|
||||
transit_graph(Edges, G, MoreEdges).
|
||||
|
||||
transit_graph2([], _, _, NewEdges, NewEdges).
|
||||
transit_graph2([GC|GrandChildren], V, G, NewEdges, MoreEdges) :-
|
||||
is_edge(V,GC,G), !,
|
||||
transit_graph2(GrandChildren, V, G, NewEdges, MoreEdges).
|
||||
transit_graph2([GC|GrandChildren], V, G, [V-GC|NewEdges], MoreEdges) :-
|
||||
transit_graph2(GrandChildren, V, G, NewEdges, MoreEdges).
|
||||
|
||||
is_edge(V1,V2,G) :-
|
||||
rb_lookup(V1,Children,G),
|
||||
ord_memberchk(V2, Children).
|
||||
|
||||
/** @pred dgraph_symmetric_closure(+ _Graph_, - _Closure_)
|
||||
|
||||
|
||||
Unify _Closure_ with the symmetric closure of graph _Graph_,
|
||||
that is, if _Closure_ contains an edge _U-V_ it must also
|
||||
contain the edge _V-U_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_symmetric_closure(G,S) :-
|
||||
dgraph_edges(G, Edges),
|
||||
invert_edges(Edges, InvertedEdges),
|
||||
dgraph_add_edges(G, InvertedEdges, S).
|
||||
|
||||
invert_edges([], []).
|
||||
invert_edges([V1-V2|Edges], [V2-V1|InvertedEdges]) :-
|
||||
invert_edges(Edges, InvertedEdges).
|
||||
|
||||
/** @pred dgraph_top_sort(+ _Graph_, - _Vertices_)
|
||||
|
||||
|
||||
Unify _Vertices_ with the topological sort of graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_top_sort(G, Q) :-
|
||||
dgraph_top_sort(G, Q, []).
|
||||
|
||||
/** @pred dgraph_top_sort(+ _Graph_, - _Vertices_, ? _Vertices0_)
|
||||
|
||||
Unify the difference list _Vertices_- _Vertices0_ with the
|
||||
topological sort of graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_top_sort(G, Q, RQ0) :-
|
||||
% O(E)
|
||||
rb_visit(G, Vs),
|
||||
% O(E)
|
||||
invert_and_link(Vs, Links, UnsortedInvertedEdges, AllVs, Q),
|
||||
% O(V)
|
||||
rb_clone(G, LinkedG, Links),
|
||||
% O(Elog(E))
|
||||
sort(UnsortedInvertedEdges, InvertedEdges),
|
||||
% O(E)
|
||||
dgraph_vertices(G, AllVs),
|
||||
start_queue(AllVs, InvertedEdges, Q, RQ),
|
||||
continue_queue(Q, LinkedG, RQ, RQ0).
|
||||
|
||||
invert_and_link([], [], [], [], []).
|
||||
invert_and_link([V-Vs|Edges], [V-NVs|ExtraEdges], UnsortedInvertedEdges, [V|AllVs],[_|Q]) :-
|
||||
inv_links(Vs, NVs, V, UnsortedInvertedEdges, UnsortedInvertedEdges0),
|
||||
invert_and_link(Edges, ExtraEdges, UnsortedInvertedEdges0, AllVs, Q).
|
||||
|
||||
inv_links([],[],_,UnsortedInvertedEdges,UnsortedInvertedEdges).
|
||||
inv_links([V2|Vs],[l(V2,A,B,S,E)|VLnks],V1,[V2-e(A,B,S,E)|UnsortedInvertedEdges],UnsortedInvertedEdges0) :-
|
||||
inv_links(Vs,VLnks,V1,UnsortedInvertedEdges,UnsortedInvertedEdges0).
|
||||
|
||||
dup([], []).
|
||||
dup([_|AllVs], [_|Q]) :-
|
||||
dup(AllVs, Q).
|
||||
|
||||
start_queue([], [], RQ, RQ).
|
||||
start_queue([V|AllVs], [VV-e(S,B,S,E)|InvertedEdges], Q, RQ) :- V == VV, !,
|
||||
link_edges(InvertedEdges, VV, B, S, E, RemainingEdges),
|
||||
start_queue(AllVs, RemainingEdges, Q, RQ).
|
||||
start_queue([V|AllVs], InvertedEdges, [V|Q], RQ) :-
|
||||
start_queue(AllVs, InvertedEdges, Q, RQ).
|
||||
|
||||
link_edges([V-e(A,B,S,E)|InvertedEdges], VV, A, S, E, RemEdges) :- V == VV, !,
|
||||
link_edges(InvertedEdges, VV, B, S, E, RemEdges).
|
||||
link_edges(RemEdges, _, A, _, A, RemEdges).
|
||||
|
||||
continue_queue([], _, RQ0, RQ0).
|
||||
continue_queue([V|Q], LinkedG, RQ, RQ0) :-
|
||||
rb_lookup(V, Links, LinkedG),
|
||||
close_links(Links, RQ, RQI),
|
||||
% not clear whether I should deleted V from LinkedG
|
||||
continue_queue(Q, LinkedG, RQI, RQ0).
|
||||
|
||||
close_links([], RQ, RQ).
|
||||
close_links([l(V,A,A,S,E)|Links], RQ, RQ0) :-
|
||||
( S == E -> RQ = [V| RQ1] ; RQ = RQ1),
|
||||
close_links(Links, RQ1, RQ0).
|
||||
|
||||
/** @pred ugraph_to_dgraph( + _UGraph_, - _Graph_)
|
||||
|
||||
|
||||
Unify _Graph_ with the directed graph obtain from _UGraph_,
|
||||
represented in the form used in the _ugraphs_ unweighted graphs
|
||||
library.
|
||||
|
||||
*/
|
||||
ugraph_to_dgraph(UG, DG) :-
|
||||
ord_list_to_rbtree(UG, DG).
|
||||
|
||||
/** @pred dgraph_to_ugraph(+ _Graph_, - _UGraph_)
|
||||
|
||||
|
||||
Unify _UGraph_ with the representation used by the _ugraphs_
|
||||
unweighted graphs library, that is, a list of the form
|
||||
_V-Neighbors_, where _V_ is a node and _Neighbors_ the nodes
|
||||
children.
|
||||
|
||||
*/
|
||||
dgraph_to_ugraph(DG, UG) :-
|
||||
rb_visit(DG, UG).
|
||||
|
||||
/** @pred dgraph_edge(+ _N1_, + _N2_, + _Graph_)
|
||||
|
||||
|
||||
Edge _N1_- _N2_ is an edge in directed graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_edge(N1, N2, G) :-
|
||||
rb_lookup(N1, Ns, G),
|
||||
ord_memberchk(N2, Ns).
|
||||
|
||||
/** @pred dgraph_min_path(+ _V1_, + _V1_, + _Graph_, - _Path_, ? _Costt_)
|
||||
|
||||
|
||||
Unify the list _Path_ with the minimal cost path between nodes
|
||||
_N1_ and _N2_ in graph _Graph_. Path _Path_ has cost
|
||||
_Cost_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_min_path(V1, V2, Graph, Path, Cost) :-
|
||||
dgraph_to_wdgraph(Graph, WGraph),
|
||||
wdgraph_min_path(V1, V2, WGraph, Path, Cost).
|
||||
|
||||
/** @pred dgraph_max_path(+ _V1_, + _V1_, + _Graph_, - _Path_, ? _Costt_)
|
||||
|
||||
|
||||
Unify the list _Path_ with the maximal cost path between nodes
|
||||
_N1_ and _N2_ in graph _Graph_. Path _Path_ has cost
|
||||
_Cost_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_max_path(V1, V2, Graph, Path, Cost) :-
|
||||
dgraph_to_wdgraph(Graph, WGraph),
|
||||
wdgraph_max_path(V1, V2, WGraph, Path, Cost).
|
||||
|
||||
/** @pred dgraph_min_paths(+ _V1_, + _Graph_, - _Paths_)
|
||||
|
||||
|
||||
Unify the list _Paths_ with the minimal cost paths from node
|
||||
_N1_ to the nodes in graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_min_paths(V1, Graph, Paths) :-
|
||||
dgraph_to_wdgraph(Graph, WGraph),
|
||||
wdgraph_min_paths(V1, WGraph, Paths).
|
||||
|
||||
/** @pred dgraph_path(+ _Vertex_, + _Vertex1_, + _Graph_, ? _Path_)
|
||||
|
||||
The path _Path_ is a path starting at vertex _Vertex_ in graph
|
||||
_Graph_ and ending at path _Vertex2_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_path(V1, V2, Graph, Path) :-
|
||||
rb_new(E0),
|
||||
rb_lookup(V1, Children, Graph),
|
||||
dgraph_path_children(Children, V2, E0, Graph, Path).
|
||||
|
||||
dgraph_path_children([V1|_], V2, _E1, _Graph, []) :- V1 == V2.
|
||||
dgraph_path_children([V1|_], V2, E1, Graph, [V1|Path]) :-
|
||||
V2 \== V1,
|
||||
\+ rb_lookup(V1, _, E0),
|
||||
rb_insert(E0, V2, [], E1),
|
||||
rb_lookup(V1, Children, Graph),
|
||||
dgraph_path_children(Children, V2, E1, Graph, Path).
|
||||
dgraph_path_children([_|Children], V2, E1, Graph, Path) :-
|
||||
dgraph_path_children(Children, V2, E1, Graph, Path).
|
||||
|
||||
|
||||
do_path([], _, _, []).
|
||||
do_path([C|Children], G, SoFar, Path) :-
|
||||
do_children([C|Children], G, SoFar, Path).
|
||||
|
||||
do_children([V|_], G, SoFar, [V|Path]) :-
|
||||
rb_lookup(V, Children, G),
|
||||
ord_subtract(Children, SoFar, Ch),
|
||||
ord_insert(SoFar, V, NextSoFar),
|
||||
do_path(Ch, G, NextSoFar, Path).
|
||||
do_children([_|Children], G, SoFar, Path) :-
|
||||
do_children(Children, G, SoFar, Path).
|
||||
|
||||
/** @pred dgraph_path(+ _Vertex_, + _Graph_, ? _Path_)
|
||||
|
||||
|
||||
The path _Path_ is a path starting at vertex _Vertex_ in graph
|
||||
_Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_path(V, G, [V|P]) :-
|
||||
rb_lookup(V, Children, G),
|
||||
ord_del_element(Children, V, Ch),
|
||||
do_path(Ch, G, [V], P).
|
||||
|
||||
|
||||
/** @pred dgraph_isomorphic(+ _Vs_, + _NewVs_, + _G0_, - _GF_)
|
||||
|
||||
|
||||
Unify the list _GF_ with the graph isomorphic to _G0_ where
|
||||
vertices in _Vs_ map to vertices in _NewVs_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_isomorphic(Vs, Vs2, G1, G2) :-
|
||||
rb_new(Map0),
|
||||
mapping(Vs,Vs2,Map0,Map),
|
||||
dgraph_edges(G1,Edges),
|
||||
translate_edges(Edges,Map,TEdges),
|
||||
dgraph_new(G20),
|
||||
dgraph_add_vertices(Vs2,G20,G21),
|
||||
dgraph_add_edges(G21,TEdges,G2).
|
||||
|
||||
mapping([],[],Map,Map).
|
||||
mapping([V1|Vs],[V2|Vs2],Map0,Map) :-
|
||||
rb_insert(Map0,V1,V2,MapI),
|
||||
mapping(Vs,Vs2,MapI,Map).
|
||||
|
||||
|
||||
|
||||
translate_edges([],_,[]).
|
||||
translate_edges([V1-V2|Edges],Map,[NV1-NV2|TEdges]) :-
|
||||
rb_lookup(V1,NV1,Map),
|
||||
rb_lookup(V2,NV2,Map),
|
||||
translate_edges(Edges,Map,TEdges).
|
||||
|
||||
/** @pred dgraph_reachable(+ _Vertex_, + _Graph_, ? _Edges_)
|
||||
|
||||
|
||||
The path _Path_ is a path starting at vertex _Vertex_ in graph
|
||||
_Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_reachable(V, G, Edges) :-
|
||||
rb_lookup(V, Children, G),
|
||||
ord_list_to_rbtree([V-[]],Done0),
|
||||
reachable(Children, Done0, _, G, Edges, []).
|
||||
|
||||
reachable([], Done, Done, _, Edges, Edges).
|
||||
reachable([V|Vertices], Done0, DoneF, G, EdgesF, Edges0) :-
|
||||
rb_lookup(V,_, Done0), !,
|
||||
reachable(Vertices, Done0, DoneF, G, EdgesF, Edges0).
|
||||
reachable([V|Vertices], Done0, DoneF, G, [V|EdgesF], Edges0) :-
|
||||
rb_lookup(V, Kids, G),
|
||||
rb_insert(Done0, V, [], Done1),
|
||||
reachable(Kids, Done1, DoneI, G, EdgesF, EdgesI),
|
||||
reachable(Vertices, DoneI, DoneF, G, EdgesI, Edges0).
|
||||
|
||||
/** @pred dgraph_leaves(+ _Graph_, ? _Vertices_)
|
||||
|
||||
|
||||
The vertices _Vertices_ have no outgoing edge in graph
|
||||
_Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_leaves(Graph, Vertices) :-
|
||||
rb_visit(Graph, Pairs),
|
||||
vertices_without_children(Pairs, Vertices).
|
||||
|
||||
vertices_without_children([], []).
|
||||
vertices_without_children((V-[]).Pairs, V.Vertices) :-
|
||||
vertices_without_children(Pairs, Vertices).
|
||||
vertices_without_children(_V-[_|_].Pairs, Vertices) :-
|
||||
vertices_without_children(Pairs, Vertices).
|
||||
|
||||
%% @}/** @} */
|
||||
|
242
packages/python/swig/yap4py/prolog/exo_interval.yap
Normal file
242
packages/python/swig/yap4py/prolog/exo_interval.yap
Normal file
@ -0,0 +1,242 @@
|
||||
/**
|
||||
* @file exo_interval.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date 2013
|
||||
*
|
||||
* @brief This file implements a very simple interval solver
|
||||
* designed to interact with the exo
|
||||
* data-base.
|
||||
* It assumes simple queries and a contiguous interval,
|
||||
* and does not really expect to do non-trivial
|
||||
* constraint propagation and solving.
|
||||
*
|
||||
*
|
||||
*/
|
||||
:- module(exo_interval,
|
||||
[max/2,
|
||||
min/2,
|
||||
any/2,
|
||||
max/1,
|
||||
min/1,
|
||||
maximum/1,
|
||||
minimum/1,
|
||||
any/1,
|
||||
(#<)/2,
|
||||
(#>)/2,
|
||||
(#=<)/2,
|
||||
(#>=)/2,
|
||||
(#=)/2,
|
||||
op(700, xfx, (#>)),
|
||||
op(700, xfx, (#<)),
|
||||
op(700, xfx, (#>=)),
|
||||
op(700, xfx, (#=<)),
|
||||
op(700, xfx, (#=))]).
|
||||
|
||||
|
||||
/**
|
||||
|
||||
@defgroup exo_interval Exo Intervals
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This package assumes you use exo-compilation, that is, that you loaded
|
||||
the pedicate using the `exo` option to load_files/2, In this
|
||||
case, YAP includes a package for improved search on intervals of
|
||||
integers.
|
||||
|
||||
The package is activated by `udi` declarations that state what is
|
||||
the argument of interest:
|
||||
|
||||
~~~~~{.prolog}
|
||||
:- udi(diagnoses(exo_interval,?,?)).
|
||||
|
||||
:- load_files(db, [consult(exo)]).
|
||||
~~~~~
|
||||
It is designed to optimise the following type of queries:
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- max(X, diagnoses(X, 9, Y), X).
|
||||
|
||||
?- min(X, diagnoses(X, 9, 36211117), X).
|
||||
|
||||
?- X #< Y, min(X, diagnoses(X, 9, 36211117), X ), diagnoses(Y, 9, _).
|
||||
~~~~~
|
||||
The first argument gives the time, the second the patient, and the
|
||||
third the condition code. The first query should find the last time
|
||||
the patient 9 had any code reported, the second looks for the first
|
||||
report of code 36211117, and the last searches for reports after this
|
||||
one. All queries run in constant or log(n) time.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
/** @pred max( _X_, _Vs_)
|
||||
First Argument is the greatest element of a list.
|
||||
|
||||
+ lex_order( _Vs_)
|
||||
All elements must be ordered.
|
||||
|
||||
|
||||
|
||||
The following predicates control search:
|
||||
|
||||
|
||||
*/
|
||||
/** @pred max(+ _Expression_)
|
||||
Maximizes _Expression_ within the current constraint store. This is
|
||||
the same as computing the supremum and equating the expression to that
|
||||
supremum.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred min( _X_, _Vs_)
|
||||
First Argument is the least element of a list.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred min(+ _Expression_)
|
||||
Minimizes _Expression_ within the current constraint store. This is
|
||||
the same as computing the infimum and equation the expression to that
|
||||
infimum.
|
||||
|
||||
|
||||
*/
|
||||
:- meta_predicate max(?,0), min(?,0), any(?,0).
|
||||
|
||||
max(X, G) :-
|
||||
insert_atts(X, i(_,_,max)),
|
||||
call(G).
|
||||
|
||||
min(X, G) :-
|
||||
insert_atts(X, i(_,_,min)),
|
||||
call(G).
|
||||
|
||||
max(X) :-
|
||||
insert_atts(X, i(_,_,max)).
|
||||
|
||||
maximum(X) :-
|
||||
insert_atts(X, i(_,_,maximum)).
|
||||
|
||||
any(X) :-
|
||||
insert_atts(X, i(_,_,any)).
|
||||
|
||||
min(X) :-
|
||||
insert_atts(X, i(_,_,min)).
|
||||
|
||||
minimum(X) :-
|
||||
insert_atts(X, i(_,_,minimum)).
|
||||
|
||||
least(X) :-
|
||||
insert_atts(X, i(_,_,least)).
|
||||
|
||||
X #> Y :-
|
||||
( var(X) -> insert_atts(X, i(Y,_,_))
|
||||
;
|
||||
( var(Y) -> insert_atts(Y, i(_,X,_) ) ;
|
||||
true
|
||||
)
|
||||
;
|
||||
var(Y) -> insert_atts(Y, i(_,X,_))
|
||||
;
|
||||
X > Y
|
||||
).
|
||||
|
||||
X #>= Y :-
|
||||
( var(X) -> insert_atts(X, i(Y-1,_,_))
|
||||
;
|
||||
X >= Y
|
||||
).
|
||||
|
||||
X #< Y :-
|
||||
( var(X) -> insert_atts(X, i(_,Y,_))
|
||||
;
|
||||
X < Y
|
||||
).
|
||||
|
||||
X #=< Y :-
|
||||
( var(X) -> insert_atts(X, i(Y+1,_,_))
|
||||
;
|
||||
X =< Y
|
||||
).
|
||||
|
||||
X #= Y :-
|
||||
( var(X) -> insert_atts(X, i(Y-1,Y+1,_)) ;
|
||||
X =:= Y
|
||||
).
|
||||
|
||||
|
||||
attribute_goals(X) -->
|
||||
{ get_attr(X, exo_interval, Op) },
|
||||
( { Op = max } -> [max(X)] ;
|
||||
{ Op = min } -> [min(X)] ;
|
||||
{ Op = '>'(Y) } -> [X #> Y] ;
|
||||
{ Op = '<'(Y) } -> [X #< Y] ;
|
||||
{ Op = range(A,B,C) } ->
|
||||
range_min(A,X),
|
||||
range_max(B,X),
|
||||
range_op(C, X)
|
||||
).
|
||||
|
||||
range_min(Y, _X) -->
|
||||
{ var(Y) }, !,
|
||||
[].
|
||||
range_min(Y, X) -->
|
||||
[X #> Y].
|
||||
|
||||
range_max(Y, _X) -->
|
||||
{ var(Y) }, !,
|
||||
[].
|
||||
range_max(Y, X) -->
|
||||
[X #< Y].
|
||||
|
||||
range_op(Y, _X) -->
|
||||
{ var(Y) }, !,
|
||||
[].
|
||||
range_op(Y, X) -->
|
||||
{ Op =.. [Y, X] },
|
||||
[Op].
|
||||
|
||||
insert_atts(V, Att) :-
|
||||
( nonvar(V) ->
|
||||
throw( error(uninstantion_error(V), exo_interval) )
|
||||
; attvar(V) ->
|
||||
get_attr(V, exo_interval, Att0),
|
||||
expand_atts(Att, Att0, NAtt)
|
||||
;
|
||||
NAtt = Att
|
||||
),
|
||||
put_attr(V, exo_interval, NAtt).
|
||||
|
||||
expand_atts(i(A1, B1, C1), i(A2, B2, C2), i(A3,B3,C3)) :-
|
||||
expand_min(A1, A2, A3),
|
||||
expand_max(B1, B2, B3),
|
||||
expand_op(C1, C2, C3).
|
||||
|
||||
expand_min(A1, A2, A3) :-
|
||||
(var(A1) -> A3 = A2;
|
||||
var(A2) -> A3 = A1;
|
||||
ground(A1), ground(A2) -> A3 is max(A1,A2) ;
|
||||
A3 = max(A1,A2)
|
||||
).
|
||||
|
||||
expand_max(A1, A2, A3) :-
|
||||
(var(A1) -> A3 = A2;
|
||||
var(A2) -> A3 = A1;
|
||||
ground(A1), ground(A2) -> A3 is min(A1,A2) ;
|
||||
A3 = min(A1,A2)
|
||||
).
|
||||
|
||||
expand_op(A1, A2, A3) :-
|
||||
(var(A1) -> A3 = A2;
|
||||
var(A2) -> A3 = A1;
|
||||
A1 == A2 -> A3 = A1;
|
||||
A1 == unique -> A3 = unique;
|
||||
A2 == unique -> A3 = unique;
|
||||
A2 == min, A1 = max -> A3 = unique;
|
||||
A1 == min, A2 = max -> A3 = unique;
|
||||
A1 == min -> A3 = min; A2 == min -> A3 = min;
|
||||
A1 == max -> A3 = max; A2 == max -> A3 = max;
|
||||
A3 = any
|
||||
).
|
||||
%% @}
|
||||
|
165
packages/python/swig/yap4py/prolog/expand_macros.yap
Normal file
165
packages/python/swig/yap4py/prolog/expand_macros.yap
Normal file
@ -0,0 +1,165 @@
|
||||
/**
|
||||
* @file expand_macros.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 15:16:12 2015
|
||||
*
|
||||
* @brief utilities that perform macro expansion for maplist/2 and
|
||||
* friends.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%%
|
||||
%% preprocessing for meta-calls
|
||||
%%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
:- module( expand_macros,
|
||||
[compile_aux/2,
|
||||
pred_name/4,
|
||||
transformation_id/1,
|
||||
allowed_expansion/1,
|
||||
allowed_module/2] ).
|
||||
|
||||
|
||||
:- use_module(library(lists), [append/3]).
|
||||
:- use_module(library(charsio), [format_to_chars/3, read_from_chars/2]).
|
||||
:- use_module(library(error), [must_be/2]).
|
||||
:- use_module(library(occurs), [sub_term/2]).
|
||||
|
||||
:- multifile allowed_module/2.
|
||||
|
||||
:- dynamic number_of_expansions/1.
|
||||
|
||||
number_of_expansions(0).
|
||||
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%
|
||||
% utilities
|
||||
%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
compile_aux([Clause|Clauses], Module) :-
|
||||
% compile the predicate declaration if needed
|
||||
(
|
||||
Clause = (Head :- _)
|
||||
;
|
||||
Clause = Head
|
||||
),
|
||||
!,
|
||||
functor(Head, F, N),
|
||||
( current_predicate(Module:F/N)
|
||||
->
|
||||
true
|
||||
;
|
||||
% format'*** Creating auxiliary predicate ~q~n', [F/N]),
|
||||
% checklist(portray_clause, [Clause|Clauses]),
|
||||
compile_term([Clause|Clauses], Module)
|
||||
).
|
||||
|
||||
compile_term([], _).
|
||||
compile_term([Clause|Clauses], Module) :-
|
||||
assert_static(Module:Clause),
|
||||
compile_term(Clauses, Module).
|
||||
|
||||
append_args(Term, Args, NewTerm) :-
|
||||
Term =.. [Meta|OldArgs],
|
||||
append(OldArgs, Args, GoalArgs),
|
||||
NewTerm =.. [Meta|GoalArgs].
|
||||
|
||||
aux_preds(Module:Meta, MetaVars, Pred, PredVars, Proto, _, OModule) :- !,
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto, Module, OModule).
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto, Module, Module) :-
|
||||
Meta =.. [F|Args],
|
||||
aux_args(Args, MetaVars, PredArgs, PredVars, ProtoArgs),
|
||||
Pred =.. [F|PredArgs],
|
||||
Proto =.. [F|ProtoArgs].
|
||||
|
||||
aux_args([], [], [], [], []).
|
||||
aux_args([Arg|Args], MVars, [Arg|PArgs], PVars, [Arg|ProtoArgs]) :-
|
||||
ground(Arg), !,
|
||||
aux_args(Args, MVars, PArgs, PVars, ProtoArgs).
|
||||
aux_args([Arg|Args], [Arg|MVars], [PVar|PArgs], [PVar|PVars], ['_'|ProtoArgs]) :-
|
||||
aux_args(Args, MVars, PArgs, PVars, ProtoArgs).
|
||||
|
||||
pred_name(Macro, Arity, _ , Name) :-
|
||||
transformation_id(Id),
|
||||
atomic_concat(['$$$__Auxiliary_predicate__ for',Macro,'/',Arity,' ',Id], Name).
|
||||
|
||||
transformation_id(Id) :-
|
||||
retract(number_of_expansions(Id)),
|
||||
Id1 is Id+1,
|
||||
assert(number_of_expansions(Id1)).
|
||||
|
||||
|
||||
harmless_dcgexception(instantiation_error). % ex: phrase(([1],x:X,[3]),L)
|
||||
harmless_dcgexception(type_error(callable,_)). % ex: phrase(27,L)
|
||||
|
||||
|
||||
allowed_expansion(QExpand) :-
|
||||
strip_module(QExpand, Mod, Pred),
|
||||
goal_expansion_allowed(Pred, Mod).
|
||||
|
||||
goal_expansion_allowed(Pred, Mod) :-
|
||||
allowed_module(Pred,Mod),
|
||||
once( prolog_load_context(_, _) ), % make sure we are compiling.
|
||||
|
||||
|
||||
|
||||
|
||||
allowed_module(checklist(_,_),expand_macros).
|
||||
allowed_module(checklist(_,_),apply_macros).
|
||||
allowed_module(checklist(_,_),maplist).
|
||||
allowed_module(maplist(_,_),expand_macros).
|
||||
allowed_module(maplist(_,_),apply_macros).
|
||||
allowed_module(maplist(_,_),maplist).
|
||||
allowed_module(maplist(_,_,_),expand_macros).
|
||||
allowed_module(maplist(_,_,_),apply_macros).
|
||||
allowed_module(maplist(_,_,_),maplist).
|
||||
allowed_module(maplist(_,_,_,_),expand_macros).
|
||||
allowed_module(maplist(_,_,_,_),apply_macros).
|
||||
allowed_module(maplist(_,_,_,_),maplist).
|
||||
allowed_module(maplist(_,_,_,_,_),expand_macros).
|
||||
allowed_module(maplist(_,_,_,_,_),apply_macros).
|
||||
allowed_module(maplist(_,_,_,_,_),maplist).
|
||||
allowed_module(maplist(_,_,_,_,_,_),expand_macros).
|
||||
allowed_module(maplist(_,_,_,_,_,_),apply_macros).
|
||||
allowed_module(maplist(_,_,_,_,_,_),maplist).
|
||||
allowed_module(selectlist(_,_,_),expand_macros).
|
||||
allowed_module(selectlist(_,_,_),apply_macros).
|
||||
allowed_module(selectlist(_,_,_),maplist).
|
||||
allowed_module(include(_,_,_),expand_macros).
|
||||
allowed_module(include(_,_,_),apply_macros).
|
||||
allowed_module(include(_,_,_),maplist).
|
||||
allowed_module(exclude(_,_,_),expand_macros).
|
||||
allowed_module(exclude(_,_,_),apply_macros).
|
||||
allowed_module(exclude(_,_,_),maplist).
|
||||
allowed_module(partition(_,_,_,_),expand_macros).
|
||||
allowed_module(partition(_,_,_,_),apply_macros).
|
||||
allowed_module(partition(_,_,_,_),maplist).
|
||||
allowed_module(partition(_,_,_,_,_),expand_macros).
|
||||
allowed_module(partition(_,_,_,_,_),apply_macros).
|
||||
allowed_module(partition(_,_,_,_,_),maplist).
|
||||
allowed_module(convlist(_,_,_),expand_macros).
|
||||
allowed_module(convlist(_,_,_),apply_macros).
|
||||
allowed_module(convlist(_,_,_),maplist).
|
||||
allowed_module(sumlist(_,_,_,_),expand_macros).
|
||||
allowed_module(sumlist(_,_,_,_),apply_macros).
|
||||
allowed_module(sumlist(_,_,_,_),maplist).
|
||||
allowed_module(mapargs(_,_,_),expand_macros).
|
||||
allowed_module(mapargs(_,_,_),apply_macros).
|
||||
allowed_module(mapargs(_,_,_),maplist).
|
||||
allowed_module(sumargs(_,_,_,_),expand_macros).
|
||||
allowed_module(sumargs(_,_,_,_),apply_macros).
|
||||
allowed_module(sumargs(_,_,_,_),maplist).
|
||||
allowed_module(mapnodes(_,_,_),expand_macros).
|
||||
allowed_module(mapnodes(_,_,_),apply_macros).
|
||||
allowed_module(mapnodes(_,_,_),maplist).
|
||||
allowed_module(checknodes(_,_),expand_macros).
|
||||
allowed_module(checknodes(_,_),apply_macros).
|
||||
allowed_module(checknodes(_,_),maplist).
|
||||
allowed_module(sumnodes(_,_,_,_),expand_macros).
|
||||
allowed_module(sumnodes(_,_,_,_),apply_macros).
|
||||
allowed_module(sumnodes(_,_,_,_),maplist).
|
589
packages/python/swig/yap4py/prolog/flags.yap
Normal file
589
packages/python/swig/yap4py/prolog/flags.yap
Normal file
@ -0,0 +1,589 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
/**
|
||||
* @file library/flags.yap
|
||||
* @author Theofrastos Mantadelis, Bernd Gutmann, Paulo Moura
|
||||
* @date Tue Nov 17 15:18:02 2015
|
||||
*
|
||||
* @brief Flag Manipulation in Prolog
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Flags was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Contributions to this file:
|
||||
% Author: Theofrastos Mantadelis
|
||||
% Sugestions: Bernd Gutmann, Paulo Moura
|
||||
% $Date: 2011-02-15 13:33:01 +0100 (Tue, 15 Feb 2011) $
|
||||
% $Revision: 15 $
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
:- module(flags, [flag_define/2,
|
||||
flag_define/5,
|
||||
flag_define/7,
|
||||
flag_set/2,
|
||||
flag_set/3,
|
||||
flag_unsafe_set/2,
|
||||
flag_get/2,
|
||||
flags_reset/0,
|
||||
flags_reset/1,
|
||||
flags_save/1,
|
||||
flags_load/1,
|
||||
flag_groups/1,
|
||||
flag_group_chk/1,
|
||||
flag_help/0,
|
||||
flags_print/0,
|
||||
defined_flag/7]).
|
||||
|
||||
/**
|
||||
* @defgroup flags Flag Manipulation in Prolog
|
||||
* @ingroup library
|
||||
*
|
||||
* Routines to manipulate flags: they allow defining, set,
|
||||
* resetting.
|
||||
* @{
|
||||
*/
|
||||
|
||||
|
||||
:- use_module(library(lists), [append/3, memberchk/2, member/2]).
|
||||
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown, error).
|
||||
|
||||
:- dynamic(['$defined_flag$'/7, '$store_flag_value$'/2]).
|
||||
:- meta_predicate(flag_define(+, +, +, ?, ?, ?, :)).
|
||||
:- meta_predicate(flag_define(+, :)).
|
||||
:- meta_predicate(validate(+, :, ?, +)).
|
||||
:- multifile(flags_type_definition/3).
|
||||
|
||||
flag_define(FlagName, InputOptions):-
|
||||
strip_module(InputOptions, Module, UserOptions),
|
||||
Defaults = [flag_group(general), flag_type(nonvar), default_value(true), description(FlagName), access(read_write), handler(true)],
|
||||
append(UserOptions, Defaults, Options),
|
||||
memberchk(flag_group(FlagGroup), Options),
|
||||
memberchk(flag_type(FlagType), Options),
|
||||
memberchk(default_value(DefaultValue), Options),
|
||||
memberchk(description(Description), Options),
|
||||
memberchk(access(Access), Options),
|
||||
memberchk(handler(Handler), Options),
|
||||
flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler).
|
||||
|
||||
flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description):-
|
||||
flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, read_write, true).
|
||||
|
||||
flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, MHandler):-
|
||||
strip_module(MHandler, Module, Handler),
|
||||
nonvar(FlagName),
|
||||
nonvar(FlagGroup),
|
||||
nonvar(FlagType),
|
||||
nonvar(Access),
|
||||
nonvar(Handler), !,
|
||||
(\+ atom(FlagName) ->
|
||||
throw(error(type_error(atom, FlagName), message('Flag name needs to be an atom.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler))))
|
||||
; \+ atom(FlagGroup) ->
|
||||
throw(error(type_error(atom, FlagGroup), message('Flag group needs to be an atom.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler))))
|
||||
; \+ flag_type(FlagType) ->
|
||||
throw(error(domain_error(flag_type, FlagType), message('Unknown flag type.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Module:Handler))))
|
||||
; \+ validate_type(FlagType) ->
|
||||
throw(error(evaluation_error(type_validation), message('Validation of flag type failed, check custom domain.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler))))
|
||||
; '$defined_flag$'(FlagName, _FlagGroup, _FlagType, _DefaultValue, _Description, _Access, _Handler) ->
|
||||
throw(error(permission_error(create, flag, FlagName), message('Re-defining a flag is not allowed.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler))))
|
||||
; \+ memberchk(Access, [read_write, read_only, hidden, hidden_read_only]),
|
||||
throw(error(domain_error(access, Access), message('Wrong access attribute, available are: read_write, read_only, hidden, hidden_read_only.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler))))
|
||||
; \+ callable(Handler) -> % the Handler comes from: strip_module(MHandler, Module, Handler)
|
||||
throw(error(type_error(callable, Handler), message('Flag handler needs to be callable.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler))))
|
||||
;
|
||||
validate(FlagType, Module:Handler, DefaultValue, FlagName),
|
||||
assertz('$defined_flag$'(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Module:Handler)),
|
||||
assertz('$store_flag_value$'(FlagName, DefaultValue)),
|
||||
(Handler == true ->
|
||||
true
|
||||
;
|
||||
call(Module:Handler, stored, DefaultValue)
|
||||
)
|
||||
).
|
||||
flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Handler):-
|
||||
throw(error(instantiation_error, message('Flag name, group, type, access and handler need to be instantiated.', flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Handler)))).
|
||||
|
||||
flag_groups(FlagGroups):-
|
||||
all(FlagGroup, ('$defined_flag$'(_FlagName, FlagGroup, _FlagType, _DefaultValue, _Description, Access, _Handler), Access \== hidden, Access \== hidden_read_only), FlagGroups).
|
||||
|
||||
flag_group_chk(FlagGroup):-
|
||||
nonvar(FlagGroup),
|
||||
'$defined_flag$'(_FlagName, FlagGroup, _FlagType, _DefaultValue, _Description, _Access, _Handler), !.
|
||||
|
||||
flag_type(Type):-
|
||||
flags_type_definition(Type, _, _).
|
||||
|
||||
% flags_type_definition(TypeName, TypeHandler, TypeValidator).
|
||||
flags_type_definition(nonvar, nonvar, true).
|
||||
flags_type_definition(atom, atom, true).
|
||||
flags_type_definition(atomic, atomic, true).
|
||||
flags_type_definition(integer, integer, true).
|
||||
flags_type_definition(float, float, true).
|
||||
flags_type_definition(number, number, true).
|
||||
flags_type_definition(ground, ground, true).
|
||||
flags_type_definition(compound, compound, true).
|
||||
flags_type_definition(is_list, is_list, true).
|
||||
flags_type_definition(callable, callable, true).
|
||||
flags_type_definition(in_interval(Type, Interval), in_interval(Type, Interval), in_interval(Type, Interval)).
|
||||
flags_type_definition(integer_in_interval(Interval), in_interval(integer, Interval), in_interval(integer, Interval)).
|
||||
flags_type_definition(positive_integer, in_interval(integer, (0, (+inf))), true).
|
||||
flags_type_definition(non_negative_integer, in_interval(integer, ([0], (+inf))), true).
|
||||
flags_type_definition(negative_integer, in_interval(integer, ((-inf), 0)), true).
|
||||
flags_type_definition(float_in_interval(Interval), in_interval(float, Interval), in_interval(float, Interval)).
|
||||
flags_type_definition(positive_float, in_interval(float, (0.0, (+inf))), true).
|
||||
flags_type_definition(non_negative_float, in_interval(float, ([0.0], (+inf))), true).
|
||||
flags_type_definition(negative_float, in_interval(float, ((-inf), 0.0)), true).
|
||||
flags_type_definition(number_in_interval(Interval), in_interval(number, Interval), in_interval(number, Interval)).
|
||||
flags_type_definition(positive_number, in_interval(number, (0.0, (+inf))), true).
|
||||
flags_type_definition(non_negative_number, in_interval(number, ([0.0], (+inf))), true).
|
||||
flags_type_definition(negative_number, in_interval(number, ((-inf), 0.0)), true).
|
||||
flags_type_definition(in_domain(Domain), in_domain(Domain), in_domain(Domain)).
|
||||
flags_type_definition(boolean, in_domain([true, false]), true).
|
||||
flags_type_definition(switch, in_domain([on, off]), true).
|
||||
|
||||
in_domain(Domain):-
|
||||
ground(Domain),
|
||||
is_list(Domain).
|
||||
in_domain(Domain, Value):-
|
||||
ground(Value),
|
||||
memberchk(Value, Domain).
|
||||
|
||||
in_interval(Type, Interval):-
|
||||
is_list(Interval), !,
|
||||
Interval \== [],
|
||||
in_interval_conj(Type, Interval).
|
||||
in_interval(Type, Interval):-
|
||||
in_interval_single(Type, Interval).
|
||||
|
||||
in_interval_conj(_Type, []).
|
||||
in_interval_conj(Type, [Interval|Rest]):-
|
||||
in_interval_single(Type, Interval),
|
||||
in_interval_conj(Type, Rest).
|
||||
|
||||
in_interval_single(Type, ([Min], [Max])):-
|
||||
!, call(Type, Min),
|
||||
call(Type, Max),
|
||||
Min =< Max.
|
||||
|
||||
in_interval_single(Type, ([Min], Max)):-
|
||||
!, call(Type, Min),
|
||||
type_or_inf(Type, Max),
|
||||
Min < Max.
|
||||
|
||||
in_interval_single(Type, (Min, [Max])):-
|
||||
!, type_or_inf(Type, Min),
|
||||
call(Type, Max),
|
||||
Min < Max.
|
||||
|
||||
in_interval_single(Type, (Min, Max)):-
|
||||
type_or_inf(Type, Min),
|
||||
type_or_inf(Type, Max),
|
||||
Min < Max,
|
||||
Max - Min > 0.0.
|
||||
|
||||
type_or_inf(Type, Value):-
|
||||
nonvar(Type), nonvar(Value),
|
||||
Value == (+inf), !.
|
||||
type_or_inf(Type, Value):-
|
||||
nonvar(Type), nonvar(Value),
|
||||
Value == (-inf), !.
|
||||
type_or_inf(Type, Value):- call(Type, Value).
|
||||
|
||||
in_interval(Type, [Interval|_Rest], Value):-
|
||||
in_interval(Type, Interval, Value), !.
|
||||
in_interval(Type, [_Interval|Rest], Value):-
|
||||
in_interval(Type, Rest, Value).
|
||||
|
||||
in_interval(Type, ([Min], [Max]), Value):-
|
||||
!, call(Type, Value),
|
||||
Value >= Min,
|
||||
Value =< Max.
|
||||
|
||||
in_interval(Type, ([Min], Max), Value):-
|
||||
!, call(Type, Value),
|
||||
Value >= Min,
|
||||
Value < Max.
|
||||
|
||||
in_interval(Type, (Min, [Max]), Value):-
|
||||
!, call(Type, Value),
|
||||
Value > Min,
|
||||
Value =< Max.
|
||||
|
||||
in_interval(Type, (Min, Max), Value):-
|
||||
call(Type, Value),
|
||||
Value > Min,
|
||||
Value < Max.
|
||||
|
||||
validate_type(Type):-
|
||||
flags_type_definition(Type, _, TypeValidater),
|
||||
call(TypeValidater).
|
||||
|
||||
validate(FlagType, Handler, Value, FlagName):-
|
||||
strip_module(Handler, _Module, true),
|
||||
!, flags_type_definition(FlagType, FlagValidator, _),
|
||||
(call(FlagValidator, Value) ->
|
||||
true
|
||||
;
|
||||
throw(error(validation_error(FlagType, Value), message('Validation of value fails.', validate(FlagType, Value, FlagName))))
|
||||
).
|
||||
validate(FlagType, Handler, Value, FlagName):-
|
||||
flags_type_definition(FlagType, FlagValidator, _),
|
||||
((call(Handler, validating, Value), (call(FlagValidator, Value); call(Handler, validate, Value))) ->
|
||||
call(Handler, validated, Value)
|
||||
;
|
||||
throw(error(validation_error(FlagType, Value), message('Validation of value fails.', validate(FlagType, Handler, Value, FlagName))))
|
||||
).
|
||||
|
||||
flag_set(FlagName, FlagValue):-
|
||||
flag_set(FlagName, _OldValue, FlagValue).
|
||||
flag_set(FlagName, OldValue, FlagValue):-
|
||||
atom(FlagName),
|
||||
'$defined_flag$'(FlagName, _FlagGroup, FlagType, _DefaultValue, _Description, Access, Module:Handler), !,
|
||||
(Access \== read_only, Access \== hidden_read_only ->
|
||||
validate(FlagType, Module:Handler, FlagValue, FlagName),
|
||||
retract('$store_flag_value$'(FlagName, OldValue)),
|
||||
assertz('$store_flag_value$'(FlagName, FlagValue)),
|
||||
(Handler == true ->
|
||||
true
|
||||
;
|
||||
call(Module:Handler, stored, FlagValue)
|
||||
)
|
||||
;
|
||||
throw(error(permission_error(set, flag, FlagName), message('Setting the flag value is not allowed.',flag_set(FlagName, OldValue, FlagValue))))
|
||||
).
|
||||
flag_set(FlagName, OldValue, FlagValue):-
|
||||
throw(error(existence_error(flag, FlagName), message('The flag is not defined.', flag_set(FlagName, OldValue, FlagValue)))).
|
||||
|
||||
flag_unsafe_set(FlagName, FlagValue):-
|
||||
retract('$store_flag_value$'(FlagName, _)),
|
||||
assertz('$store_flag_value$'(FlagName, FlagValue)).
|
||||
|
||||
flag_get(FlagName, FlagValue):-
|
||||
\+ '$store_flag_value$'(FlagName, _),
|
||||
throw(error(existence_error(flag, FlagName), message('The flag is not defined.', flag_get(FlagName, FlagValue)))).
|
||||
flag_get(FlagName, FlagValue):-
|
||||
'$store_flag_value$'(FlagName, FlagValue).
|
||||
|
||||
flags_reset:-
|
||||
retractall('$store_flag_value$'(_, _)),
|
||||
'$defined_flag$'(FlagName, _FlagGroup, _FlagType, DefaultValue, _Description, _Access, Module:Handler),
|
||||
assertz('$store_flag_value$'(FlagName, DefaultValue)),
|
||||
(Handler == true ->
|
||||
true
|
||||
;
|
||||
call(Module:Handler, stored, DefaultValue)
|
||||
),
|
||||
fail.
|
||||
flags_reset.
|
||||
|
||||
flags_reset(FlagGroup):-
|
||||
'$defined_flag$'(FlagName, FlagGroup, _FlagType, DefaultValue, _Description, _Access, Module:Handler),
|
||||
retractall('$store_flag_value$'(FlagName, _)),
|
||||
assertz('$store_flag_value$'(FlagName, DefaultValue)),
|
||||
(Handler == true ->
|
||||
true
|
||||
;
|
||||
call(Module:Handler, stored, DefaultValue)
|
||||
),
|
||||
fail.
|
||||
flags_reset(_).
|
||||
|
||||
flags_save(FileName):-
|
||||
tell(FileName),
|
||||
catch(('$store_flag_value$'(FlagName, Value),
|
||||
write_canonical('$store_flag_value$'(FlagName, Value)),
|
||||
write('.'), nl),
|
||||
Exception, clean_and_throw(told, Exception)),
|
||||
fail.
|
||||
flags_save(_FileName):-
|
||||
told.
|
||||
|
||||
flags_load(FileName):-
|
||||
see(FileName),
|
||||
catch((read('$store_flag_value$'(FlagName, Value)),
|
||||
flag_set(FlagName, Value)),
|
||||
Exception, clean_and_throw(seen, Exception)),
|
||||
fail.
|
||||
flags_load(_FileName):-
|
||||
seen.
|
||||
|
||||
clean_and_throw(Action, Exception):-
|
||||
Action,
|
||||
throw(Exception).
|
||||
|
||||
flag_help:-
|
||||
format('This is a short tutorial for the flags library.~nExported predicates:~n'),
|
||||
format(' flag_define/5 : defines a new flag without a handler~n'),
|
||||
format(' flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description)~n'),
|
||||
format(' flag_define/6 : defines a new flag with a handler~n'),
|
||||
format(' flag_define(FlagName, FlagGroup, FlagType, DefaultValue, Description, Handler)~n'),
|
||||
format(' FlagName : the name of the flag~n'),
|
||||
format(' FlagGroup : the name of the flag group~n'),
|
||||
format(' FlagType : the type of the flag available types are:~n'),
|
||||
flag_help_types,
|
||||
format(' DefaultValue : the default value for the flag~n'),
|
||||
format(' Description : a flag description~n'),
|
||||
format(' Handler : a handler~n'),
|
||||
flags:flag_help_handler,
|
||||
format(' flag_groups/1 : returns all the flag groups in a list~n'),
|
||||
format(' flag_group_chk/1 : checks if a group exists~n'),
|
||||
format(' flag_set/2 : sets the value of a flag~n'),
|
||||
format(' flag_get/2 : gets the value of a flag~n'),
|
||||
format(' flag_store/2 : sets the value of a flag ignoring all tests and handlers~n'),
|
||||
format(' flag_reset/0 : resets all flags to their default value~n'),
|
||||
format(' flag_reset/1 : resets all flags of a group to their default value~n'),
|
||||
format(' flag_help/0 : this screen~n'),
|
||||
format(' flags_print/0 : shows the current flags/values~n').
|
||||
flag_help_types:-
|
||||
flag_type(FlagType),
|
||||
format(' ~w~n', [FlagType]),
|
||||
fail.
|
||||
flag_help_types.
|
||||
|
||||
flag_help_handler:-
|
||||
format(' Handler important notes:~n'),
|
||||
format(' Conjuction: external_handler(validating, Value):-...~n'),
|
||||
format(' Disjunction: external_handler(validate, Value):-...~n'),
|
||||
format(' After: external_handler(validated, Value):-...~n'),
|
||||
format(' After set: external_handler(stored, Value):-...~n'),
|
||||
format(' this is implemented as (validating,(original;validated))~n'),
|
||||
format(' validating|original|validate|result~n'),
|
||||
format(' true | true | true | true~n'),
|
||||
format(' true | true | fail | true~n'),
|
||||
format(' true | fail | true | true~n'),
|
||||
format(' true | fail | fail | fail~n'),
|
||||
format(' fail | true | true | fail~n'),
|
||||
format(' fail | true | fail | fail~n'),
|
||||
format(' fail | fail | true | fail~n'),
|
||||
format(' fail | fail | fail | fail~n'),
|
||||
format(' Default behaviour is validating->true, validate->fail~n'),
|
||||
format(' To completly replace original set validate->true~n'),
|
||||
format(' To add new values to original set validating->true~n'),
|
||||
format(' To remove values from original set validate->fail~n'),
|
||||
format(' Example definition with a handler:~n'),
|
||||
format(' flag_define(myflag, mygroup, in_interval(integer, [(-5, 5),([15],[25])]), 0, description, my_handler).~n'),
|
||||
format(' my_handler(validate, Value):-Value is 10.~n'),
|
||||
format(' my_handler(validating, Value).~n'),
|
||||
format(' my_handler(validated, Value).~n'),
|
||||
format(' my_handler(stored, Value).~n'),
|
||||
format(' This has defined a flag that accepts integers (-5,5)v[15,25].~n'),
|
||||
format(' The handler adds the value 10 in those.~n').
|
||||
|
||||
flags_print:-
|
||||
flag_groups(Groups),
|
||||
forall(member(Group, Groups), flags_print(Group)).
|
||||
flags_print(Group):-
|
||||
format(' ~w:~n~w~38+ ~w~19+ ~w~10+ ~w~10+~n', [Group, 'Description', 'Domain', 'Flag', 'Value']),
|
||||
fail.
|
||||
flags_print(FlagGroup):-
|
||||
'$defined_flag$'(FlagName, FlagGroup, FlagType, _DefaultValue, Description, Access, _Handler),
|
||||
Access \== hidden, Access \== hidden_read_only,
|
||||
flag_get(FlagName, Value),
|
||||
format('~w~38+ ~w~19+ ~w~10+ ~q~10+~n', [Description, FlagType, FlagName, Value]),
|
||||
fail.
|
||||
flags_print(_).
|
||||
|
||||
defined_flag(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Handler):-
|
||||
'$defined_flag$'(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Handler),
|
||||
Access \== hidden, Access \== hidden_read_only.
|
||||
defined_flag(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Handler):-
|
||||
nonvar(FlagName), nonvar(FlagGroup),
|
||||
'$defined_flag$'(FlagName, FlagGroup, FlagType, DefaultValue, Description, Access, Handler).
|
||||
|
||||
%% @}
|
44
packages/python/swig/yap4py/prolog/gensym.yap
Normal file
44
packages/python/swig/yap4py/prolog/gensym.yap
Normal file
@ -0,0 +1,44 @@
|
||||
/**
|
||||
* @file gensym.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 18:37:13 2015
|
||||
*
|
||||
* @brief Generate a new atom.
|
||||
*
|
||||
*
|
||||
*/
|
||||
:- module(gensym, [
|
||||
gensym/2,
|
||||
reset_gensym/1,
|
||||
reset_gensym/0
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup gensym Generate a new symbol.
|
||||
* @ingroup library
|
||||
*
|
||||
* Predicates to create new atoms based on the prefix _Atom_.
|
||||
* They use a counter, stored as a
|
||||
* dynamic predicate, to construct the atom's suffix.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- dynamic gensym_key/2.
|
||||
|
||||
gensym(Atom, New) :-
|
||||
retract(gensym_key(Atom,Id)), !,
|
||||
atomic_concat(Atom,Id,New),
|
||||
NId is Id+1,
|
||||
assert(gensym_key(Atom,NId)).
|
||||
gensym(Atom, New) :-
|
||||
atomic_concat(Atom,1,New),
|
||||
assert(gensym_key(Atom,2)).
|
||||
|
||||
reset_gensym(Atom) :-
|
||||
retract(gensym_key(Atom,_)).
|
||||
|
||||
reset_gensym :-
|
||||
retractall(gensym_key(_,_)).
|
||||
|
||||
|
70
packages/python/swig/yap4py/prolog/hacks.yap
Normal file
70
packages/python/swig/yap4py/prolog/hacks.yap
Normal file
@ -0,0 +1,70 @@
|
||||
/**
|
||||
* @file library/hacks.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 19:00:25 2015
|
||||
*
|
||||
* @brief Prolog hacking
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(yap_hacks, [
|
||||
current_choicepoint/1,
|
||||
cut_by/1,
|
||||
cut_at/1,
|
||||
current_choicepoints/1,
|
||||
choicepoint/7,
|
||||
current_continuations/1,
|
||||
continuation/4,
|
||||
stack_dump/0,
|
||||
stack_dump/1,
|
||||
enable_interrupts/0,
|
||||
disable_interrupts/0,
|
||||
virtual_alarm/3,
|
||||
fully_strip_module/3,
|
||||
context_variables/1
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup yap_hacks YAP hacking
|
||||
* @ingroup library
|
||||
*
|
||||
* Manipulate the Prolog stacks, including setting and resetting
|
||||
* choice-points.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
stack_dump :-
|
||||
stack_dump(-1).
|
||||
|
||||
stack_dump(Max) :-
|
||||
current_choicepoints(CPs),
|
||||
current_continuations([Env|Envs]),
|
||||
continuation(Env,_,ContP,_),
|
||||
length(CPs, LCPs),
|
||||
length(Envs, LEnvs),
|
||||
format(user_error,'~n~n~tStack Dump~t~40+~n~nAddress~tChoiceP~16+ Cur/Next Clause Goal~n',[LCPs,LEnvs]),
|
||||
'$hacks':display_stack_info(CPs, Envs, Max, ContP, StackInfo, []),
|
||||
run_formats(StackInfo, user_error).
|
||||
|
||||
run_formats([], _).
|
||||
run_formats([Com-Args|StackInfo], Stream) :-
|
||||
format(Stream, Com, Args),
|
||||
run_formats(StackInfo, user_error).
|
||||
|
||||
virtual_alarm(Interval, Goal, Left) :-
|
||||
Interval == 0, !,
|
||||
virtual_alarm(0, 0, Left0, _),
|
||||
on_signal(sig_vtalarm, _, Goal),
|
||||
Left = Left0.
|
||||
virtual_alarm(Interval, Goal, Left) :-
|
||||
integer(Interval), !,
|
||||
on_signal(sig_vtalarm, _, Goal),
|
||||
virtual_alarm(Interval, 0, Left, _).
|
||||
virtual_alarm(Interval.USecs, Goal, Left.LUSecs) :-
|
||||
on_signal(sig_vtalarm, _, Goal),
|
||||
virtual_alarm(Interval, USecs, Left, LUSecs).
|
||||
|
||||
fully_strip_module(T,M,S) :-
|
||||
'$hacks':fully_strip_module(T,M,S).
|
283
packages/python/swig/yap4py/prolog/heaps.yap
Normal file
283
packages/python/swig/yap4py/prolog/heaps.yap
Normal file
@ -0,0 +1,283 @@
|
||||
/**
|
||||
* @file heaps.yap
|
||||
* @author R.A.O'Keefe, included as an YAP library by Vitor Santos Costa, 1999.
|
||||
* @date 29 November 1983
|
||||
*
|
||||
* @brief Implement heaps in Prolog.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(heaps,[
|
||||
add_to_heap/4, % Heap x Key x Datum -> Heap
|
||||
get_from_heap/4, % Heap -> Key x Datum x Heap
|
||||
empty_heap/1, % Heap
|
||||
heap_size/2, % Heap -> Size
|
||||
heap_to_list/2, % Heap -> List
|
||||
list_to_heap/2, % List -> Heap
|
||||
min_of_heap/3, % Heap -> Key x Datum
|
||||
min_of_heap/5 % Heap -> (Key x Datum) x (Key x Datum)
|
||||
]).
|
||||
|
||||
|
||||
/** @defgroup heaps Heaps
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
A heap is a labelled binary tree where the key of each node is less than
|
||||
or equal to the keys of its sons. The point of a heap is that we can
|
||||
keep on adding new elements to the heap and we can keep on taking out
|
||||
the minimum element. If there are N elements total, the total time is
|
||||
O(NlgN). If you know all the elements in advance, you are better off
|
||||
doing a merge-sort, but this file is for when you want to do say a
|
||||
best-first search, and have no idea when you start how many elements
|
||||
there will be, let alone what they are.
|
||||
|
||||
The following heap manipulation routines are available once included
|
||||
with the `use_module(library(heaps))` command.
|
||||
|
||||
- add_to_heap/4
|
||||
- empty_heap/1
|
||||
- get_from_heap/4
|
||||
- heap_size/2
|
||||
- heap_to_list/2
|
||||
- list_to_heap/2
|
||||
- min_of_heap/3
|
||||
- min_of_heap/5
|
||||
|
||||
|
||||
A heap is a labelled binary tree where the key of each node is less
|
||||
than or equal to the keys of its sons. The point of a heap is that
|
||||
we can keep on adding new elements to the heap and we can keep on
|
||||
taking out the minimum element. If there are N elements total, the
|
||||
total time is O(NlgN). If you know all the elements in advance, you
|
||||
are better off doing a merge-sort, but this file is for when you want
|
||||
to do say a best-first search, and have no idea when you start how
|
||||
many elements there will be, let alone what they are.
|
||||
|
||||
A heap is represented as a triple t(N, Free, Tree) where N is the
|
||||
number of elements in the tree, Free is a list of integers which
|
||||
specifies unused positions in the tree, and Tree is a tree made of
|
||||
t terms for empty subtrees and
|
||||
t(Key,Datum,Lson,Rson) terms for the rest
|
||||
The nodes of the tree are notionally numbered like this:
|
||||
1
|
||||
2 3
|
||||
4 6 5 7
|
||||
8 12 10 14 9 13 11 15
|
||||
.. .. .. .. .. .. .. .. .. .. .. .. .. .. .. ..
|
||||
The idea is that if the maximum number of elements that have been in
|
||||
the heap so far is M, and the tree currently has K elements, the tree
|
||||
is some subtreee of the tree of this form having exactly M elements,
|
||||
and the Free list is a list of K-M integers saying which of the
|
||||
positions in the M-element tree are currently unoccupied. This free
|
||||
list is needed to ensure that the cost of passing N elements through
|
||||
the heap is O(NlgM) instead of O(NlgN). For M say 100 and N say 10^4
|
||||
this means a factor of two. The cost of the free list is slight.
|
||||
The storage cost of a heap in a copying Prolog (which Dec-10 Prolog is
|
||||
not) is 2K+3M words.
|
||||
|
||||
|
||||
|
||||
*/
|
||||
|
||||
/*
|
||||
:- mode
|
||||
add_to_heap(+, +, +, -),
|
||||
add_to_heap(+, +, +, +, -),
|
||||
add_to_heap(+, +, +, +, +, +, -, -),
|
||||
sort2(+, +, +, +, -, -, -, -),
|
||||
get_from_heap(+, ?, ?, -),
|
||||
repair_heap(+, +, +, -),
|
||||
heap_size(+, ?),
|
||||
heap_to_list(+, -),
|
||||
heap_tree_to_list(+, -),
|
||||
heap_tree_to_list(+, +, -),
|
||||
list_to_heap(+, -),
|
||||
list_to_heap(+, +, +, -),
|
||||
min_of_heap(+, ?, ?),
|
||||
min_of_heap(+, ?, ?, ?, ?),
|
||||
min_of_heap(+, +, ?, ?).
|
||||
*/
|
||||
|
||||
|
||||
%% @pred add_to_heap(OldHeap, Key, Datum, NewHeap)
|
||||
%
|
||||
% inserts the new Key-Datum pair into the heap. The insertion is
|
||||
% not stable, that is, if you insert several pairs with the same
|
||||
% Key it is not defined which of them will come out first, and it
|
||||
% is possible for any of them to come out first depending on the
|
||||
% history of the heap. If you need a stable heap, you could add
|
||||
% a counter to the heap and include the counter at the time of
|
||||
% insertion in the key. If the free list is empty, the tree will
|
||||
% be grown, otherwise one of the empty slots will be re-used. (I
|
||||
% use imperative programming language, but the heap code is as
|
||||
% pure as the trees code, you can create any number of variants
|
||||
% starting from the same heap, and they will share what common
|
||||
% structure they can without interfering with each other.)
|
||||
|
||||
add_to_heap(t(M,[],OldTree), Key, Datum, t(N,[],NewTree)) :- !,
|
||||
N is M+1,
|
||||
add_to_heap(N, Key, Datum, OldTree, NewTree).
|
||||
add_to_heap(t(M,[H|T],OldTree), Key, Datum, t(N,T,NewTree)) :-
|
||||
N is M+1,
|
||||
add_to_heap(H, Key, Datum, OldTree, NewTree).
|
||||
|
||||
|
||||
add_to_heap(1, Key, Datum, _, t(Key,Datum,t,t)) :- !.
|
||||
add_to_heap(N, Key, Datum, t(K1,D1,L1,R1), t(K2,D2,L2,R2)) :-
|
||||
E is N mod 2,
|
||||
M is N//2,
|
||||
% M > 0, % only called from list_to_heap/4,add_to_heap/4
|
||||
sort2(Key, Datum, K1, D1, K2, D2, K3, D3),
|
||||
add_to_heap(E, M, K3, D3, L1, R1, L2, R2).
|
||||
|
||||
|
||||
add_to_heap(0, N, Key, Datum, L1, R, L2, R) :- !,
|
||||
add_to_heap(N, Key, Datum, L1, L2).
|
||||
add_to_heap(1, N, Key, Datum, L, R1, L, R2) :- !,
|
||||
add_to_heap(N, Key, Datum, R1, R2).
|
||||
|
||||
|
||||
sort2(Key1, Datum1, Key2, Datum2, Key1, Datum1, Key2, Datum2) :-
|
||||
Key1 @< Key2,
|
||||
!.
|
||||
sort2(Key1, Datum1, Key2, Datum2, Key2, Datum2, Key1, Datum1).
|
||||
|
||||
|
||||
|
||||
%% @pred @pred get_from_heap(+ _Heap_,- _key_,- _Datum_,- _Heap_)
|
||||
%
|
||||
% returns the Key-Datum pair in OldHeap with the smallest Key, and
|
||||
% also a New Heap which is the Old Heap with that pair deleted.
|
||||
% The easy part is picking off the smallest element. The hard part
|
||||
% is repairing the heap structure. repair_heap/4 takes a pair of
|
||||
% heaps and returns a new heap built from their elements, and the
|
||||
% position number of the gap in the new tree. Note that repair_heap
|
||||
% is *not* tail-recursive.
|
||||
|
||||
get_from_heap(t(N,Free,t(Key,Datum,L,R)), Key, Datum, t(M,[Hole|Free],Tree)) :-
|
||||
M is N-1,
|
||||
repair_heap(L, R, Tree, Hole).
|
||||
|
||||
|
||||
repair_heap(t(K1,D1,L1,R1), t(K2,D2,L2,R2), t(K2,D2,t(K1,D1,L1,R1),R3), N) :-
|
||||
K2 @< K1,
|
||||
!,
|
||||
repair_heap(L2, R2, R3, M),
|
||||
N is 2*M+1.
|
||||
repair_heap(t(K1,D1,L1,R1), t(K2,D2,L2,R2), t(K1,D1,L3,t(K2,D2,L2,R2)), N) :- !,
|
||||
repair_heap(L1, R1, L3, M),
|
||||
N is 2*M.
|
||||
repair_heap(t(K1,D1,L1,R1), t, t(K1,D1,L3,t), N) :- !,
|
||||
repair_heap(L1, R1, L3, M),
|
||||
N is 2*M.
|
||||
repair_heap(t, t(K2,D2,L2,R2), t(K2,D2,t,R3), N) :- !,
|
||||
repair_heap(L2, R2, R3, M),
|
||||
N is 2*M+1.
|
||||
repair_heap(t, t, t, 1) :- !.
|
||||
|
||||
|
||||
|
||||
%% @pred heap_size(+ _Heap_, - _Size_)
|
||||
%
|
||||
% reports the number of elements currently in the heap.
|
||||
|
||||
heap_size(t(Size,_,_), Size).
|
||||
|
||||
|
||||
|
||||
%% @pred heap_to_list(+ _Heap_, - _List_)
|
||||
%
|
||||
% returns the current set of Key-Datum pairs in the Heap as a
|
||||
% List, sorted into ascending order of Keys. This is included
|
||||
% simply because I think every data structure foo ought to have
|
||||
% a foo_to_list and list_to_foo relation (where, of course, it
|
||||
% makes sense!) so that conversion between arbitrary data
|
||||
% structures is as easy as possible. This predicate is basically
|
||||
% just a merge sort, where we can exploit the fact that the tops
|
||||
% of the subtrees are smaller than their descendants.
|
||||
|
||||
heap_to_list(t(_,_,Tree), List) :-
|
||||
heap_tree_to_list(Tree, List).
|
||||
|
||||
|
||||
heap_tree_to_list(t, []) :- !.
|
||||
heap_tree_to_list(t(Key,Datum,Lson,Rson), [Key-Datum|Merged]) :-
|
||||
heap_tree_to_list(Lson, Llist),
|
||||
heap_tree_to_list(Rson, Rlist),
|
||||
heap_tree_to_list(Llist, Rlist, Merged).
|
||||
|
||||
|
||||
heap_tree_to_list([H1|T1], [H2|T2], [H2|T3]) :-
|
||||
H2 @< H1,
|
||||
!,
|
||||
heap_tree_to_list([H1|T1], T2, T3).
|
||||
heap_tree_to_list([H1|T1], T2, [H1|T3]) :- !,
|
||||
heap_tree_to_list(T1, T2, T3).
|
||||
heap_tree_to_list([], T, T) :- !.
|
||||
heap_tree_to_list(T, [], T).
|
||||
|
||||
|
||||
|
||||
%% @pred list_to_heap(+ _List_, - _Heap_)
|
||||
%
|
||||
% takes a list of Key-Datum pairs (such as keysort could be used to
|
||||
% sort) and forms them into a heap. We could do that a wee bit
|
||||
% faster by keysorting the list and building the tree directly, but
|
||||
% this algorithm makes it obvious that the result is a heap, and
|
||||
% could be adapted for use when the ordering predicate is not @<
|
||||
% and hence keysort is inapplicable.
|
||||
|
||||
list_to_heap(List, Heap) :-
|
||||
list_to_heap(List, 0, t, Heap).
|
||||
|
||||
|
||||
list_to_heap([], N, Tree, t(N,[],Tree)) :- !.
|
||||
list_to_heap([Key-Datum|Rest], M, OldTree, Heap) :-
|
||||
N is M+1,
|
||||
add_to_heap(N, Key, Datum, OldTree, MidTree),
|
||||
list_to_heap(Rest, N, MidTree, Heap).
|
||||
|
||||
|
||||
|
||||
%% @pred min_of_heap(Heap, Key, Datum)
|
||||
%
|
||||
% returns the Key-Datum pair at the top of the heap (which is of
|
||||
% course the pair with the smallest Key), but does not remove it
|
||||
% from the heap. It fails if the heap is empty.
|
||||
|
||||
|
||||
/** @pred min_of_heap(+ _Heap_, - _Key_, - _Datum_)
|
||||
|
||||
|
||||
Returns the Key-Datum pair at the top of the heap (which is of course
|
||||
the pair with the smallest Key), but does not remove it from the heap.
|
||||
*/
|
||||
min_of_heap(t(_,_,t(Key,Datum,_,_)), Key, Datum).
|
||||
|
||||
|
||||
%% @pred @pred min_of_heap(+ _Heap_, - _Key1_, - _Datum1_, -_Key2_, - _Datum2_)
|
||||
%
|
||||
% returns the smallest (Key1) and second smallest (Key2) pairs in
|
||||
% the heap, without deleting them. It fails if the heap does not
|
||||
% have at least two elements.
|
||||
min_of_heap(t(_,_,t(Key1,Datum1,Lson,Rson)), Key1, Datum1, Key2, Datum2) :-
|
||||
min_of_heap(Lson, Rson, Key2, Datum2).
|
||||
|
||||
|
||||
min_of_heap(t(Ka,_Da,_,_), t(Kb,Db,_,_), Kb, Db) :-
|
||||
Kb @< Ka, !.
|
||||
min_of_heap(t(Ka,Da,_,_), _, Ka, Da).
|
||||
min_of_heap(t, t(Kb,Db,_,_), Kb, Db).
|
||||
|
||||
/** @pred empty_heap(? _Heap_)
|
||||
|
||||
|
||||
Succeeds if _Heap_ is an empty heap.
|
||||
*/
|
||||
empty_heap(t(0,[],t)).
|
||||
|
||||
|
||||
/** @} */
|
||||
|
49
packages/python/swig/yap4py/prolog/itries.yap
Normal file
49
packages/python/swig/yap4py/prolog/itries.yap
Normal file
@ -0,0 +1,49 @@
|
||||
/**
|
||||
* @file itries.yap
|
||||
* @author Ricardo Rocha
|
||||
* @date
|
||||
*
|
||||
* @brief Tries module for ILP
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
/*********************************
|
||||
File: itries.yap
|
||||
Author: Ricardo Rocha
|
||||
Comments: Tries module for ILP
|
||||
version: $ID$
|
||||
*********************************/
|
||||
|
||||
:- module(itries, [
|
||||
itrie_open/1,
|
||||
itrie_close/1,
|
||||
itrie_close_all/0,
|
||||
itrie_mode/2,
|
||||
itrie_timestamp/2,
|
||||
itrie_put_entry/2,
|
||||
itrie_update_entry/2,
|
||||
itrie_check_entry/3,
|
||||
itrie_get_entry/2,
|
||||
itrie_get_data/2,
|
||||
itrie_traverse/2,
|
||||
itrie_remove_entry/1,
|
||||
itrie_remove_subtree/1,
|
||||
itrie_add/2,
|
||||
itrie_subtract/2,
|
||||
itrie_join/2,
|
||||
itrie_intersect/2,
|
||||
itrie_count_join/3,
|
||||
itrie_count_intersect/3,
|
||||
itrie_save/2,
|
||||
itrie_save_as_trie/2,
|
||||
itrie_load/2,
|
||||
itrie_save2stream/2,
|
||||
itrie_loadFromstream/2,
|
||||
itrie_stats/4,
|
||||
itrie_max_stats/4,
|
||||
itrie_usage/4,
|
||||
itrie_print/1
|
||||
]).
|
||||
|
||||
:- load_foreign_files([itries], [], init_itries).
|
221
packages/python/swig/yap4py/prolog/lam_mpi.yap
Normal file
221
packages/python/swig/yap4py/prolog/lam_mpi.yap
Normal file
@ -0,0 +1,221 @@
|
||||
% Author: Nuno A. Fonseca
|
||||
% Date: 2006-06-01
|
||||
% $Id: lam_mpi.yap,v 1.1 2006-06-04 18:43:38 nunofonseca Exp $
|
||||
|
||||
|
||||
:- module(lam_mpi, [
|
||||
mpi_init/0,
|
||||
mpi_finalize/0,
|
||||
mpi_comm_size/1,
|
||||
mpi_comm_rank/1,
|
||||
mpi_version/2,
|
||||
mpi_send/3,
|
||||
mpi_isend/4,
|
||||
mpi_recv/3,
|
||||
mpi_irecv/3,
|
||||
mpi_wait/2,
|
||||
mpi_wait_recv/3,
|
||||
mpi_test/2,
|
||||
mpi_test_recv/3,
|
||||
mpi_bcast/2,
|
||||
mpi_ibcast2/2,
|
||||
mpi_ibcast2/3,
|
||||
mpi_bcast2/2,
|
||||
mpi_bcast2/3,
|
||||
mpi_barrier/0,
|
||||
mpi_msg_buffer_size/2,
|
||||
mpi_msg_size/2,
|
||||
mpi_gc/0,
|
||||
mpi_default_buffer_size/2
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup lam_mpi MPI Interface
|
||||
* @ingroup library
|
||||
@{
|
||||
|
||||
This library provides a set of utilities for interfacing with LAM MPI.
|
||||
The following routines are available once included with the
|
||||
`use_module(library(lam_mpi))` command. The yap should be
|
||||
invoked using the LAM mpiexec or mpirun commands (see LAM manual for
|
||||
more details).
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred mpi_barrier
|
||||
|
||||
|
||||
Collective communication predicate. Performs a barrier
|
||||
synchronization among all processes. Note that a collective
|
||||
communication means that all processes call the same predicate. To be
|
||||
able to use a regular `mpi_recv` to receive the messages, one
|
||||
should use `mpi_bcast2`.
|
||||
*/
|
||||
/** @pred mpi_bcast2(+ _Root_, ? _Data_)
|
||||
|
||||
|
||||
|
||||
Broadcasts the message _Data_ from the process with rank _Root_
|
||||
to all other processes.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_comm_rank(- _Rank_)
|
||||
|
||||
|
||||
Unifies _Rank_ with the rank of the current process in the MPI environment.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_comm_size(- _Size_)
|
||||
|
||||
|
||||
Unifies _Size_ with the number of processes in the MPI environment.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_finalize
|
||||
|
||||
|
||||
Terminates the MPI execution environment. Every process must call this predicate before exiting.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_gc
|
||||
|
||||
|
||||
|
||||
Attempts to perform garbage collection with all the open handles
|
||||
associated with send and non-blocking broadcasts. For each handle it
|
||||
tests it and the message has been delivered the handle and the buffer
|
||||
are released.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_init
|
||||
|
||||
|
||||
Sets up the mpi environment. This predicate should be called before any other MPI predicate.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_irecv(? _Source_,? _Tag_,- _Handle_)
|
||||
|
||||
|
||||
|
||||
Non-blocking communication predicate. The predicate returns an
|
||||
_Handle_ for a message that will be received from processor with
|
||||
rank _Source_ and tag _Tag_. Note that the predicate succeeds
|
||||
immediately, even if no message has been received. The predicate
|
||||
`mpi_wait_recv` should be used to obtain the data associated to
|
||||
the handle.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_isend(+ _Data_,+ _Dest_,+ _Tag_,- _Handle_)
|
||||
|
||||
|
||||
|
||||
Non blocking communication predicate. The message in _Data_, with
|
||||
tag _Tag_, is sent whenever possible to the processor with rank
|
||||
_Dest_. An _Handle_ to the message is returned to be used to
|
||||
check for the status of the message, using the `mpi_wait` or
|
||||
`mpi_test` predicates. Until `mpi_wait` is called, the
|
||||
memory allocated for the buffer containing the message is not
|
||||
released.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_msg_size( _Msg_, - _MsgSize_)
|
||||
|
||||
|
||||
Unify _MsgSize_ with the number of bytes YAP would need to send the
|
||||
message _Msg_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_recv(? _Source_,? _Tag_,- _Data_)
|
||||
|
||||
|
||||
|
||||
Blocking communication predicate. The predicate blocks until a message
|
||||
is received from processor with rank _Source_ and tag _Tag_.
|
||||
The message is placed in _Data_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_send(+ _Data_,+ _Dest_,+ _Tag_)
|
||||
|
||||
|
||||
|
||||
Blocking communication predicate. The message in _Data_, with tag
|
||||
_Tag_, is sent immediately to the processor with rank _Dest_.
|
||||
The predicate succeeds after the message being sent.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_test(? _Handle_,- _Status_)
|
||||
|
||||
|
||||
|
||||
Provides information regarding the handle _Handle_, ie., if a
|
||||
communication operation has been completed. If the operation
|
||||
associate with _Hanlde_ has been completed the predicate succeeds
|
||||
with the completion status in _Status_, otherwise it fails.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_test_recv(? _Handle_,- _Status_,- _Data_)
|
||||
|
||||
|
||||
|
||||
Provides information regarding a handle. If the message associated
|
||||
with handle _Hanlde_ is buffered then the predicate succeeds
|
||||
unifying _Status_ with the status of the message and _Data_
|
||||
with the message itself. Otherwise, the predicate fails.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_version(- _Major_,- _Minor_)
|
||||
|
||||
|
||||
Unifies _Major_ and _Minor_ with, respectively, the major and minor version of the MPI.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_wait(? _Handle_,- _Status_)
|
||||
|
||||
|
||||
|
||||
Completes a non-blocking operation. If the operation was a
|
||||
`mpi_send`, the predicate blocks until the message is buffered
|
||||
or sent by the runtime system. At this point the send buffer is
|
||||
released. If the operation was a `mpi_recv`, it waits until the
|
||||
message is copied to the receive buffer. _Status_ is unified with
|
||||
the status of the message.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred mpi_wait_recv(? _Handle_,- _Status_,- _Data_)
|
||||
|
||||
|
||||
|
||||
Completes a non-blocking receive operation. The predicate blocks until
|
||||
a message associated with handle _Hanlde_ is buffered. The
|
||||
predicate succeeds unifying _Status_ with the status of the
|
||||
message and _Data_ with the message itself.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- load_foreign_files([yap_mpi], [], init_mpi).
|
||||
|
||||
mpi_msg_size(Term, Size) :-
|
||||
terms:export_term(Term, Buf, Size),
|
||||
terms:kill_exported_term(Buf).
|
||||
/** @} */
|
||||
|
217
packages/python/swig/yap4py/prolog/lambda.pl
Normal file
217
packages/python/swig/yap4py/prolog/lambda.pl
Normal file
@ -0,0 +1,217 @@
|
||||
/**
|
||||
* @file heaps.yap
|
||||
* @author Ulrich Neumerkel
|
||||
* @date 2009
|
||||
*
|
||||
* @brief Lambda expressions in Prolog.
|
||||
*
|
||||
*
|
||||
*/
|
||||
/*
|
||||
Author:
|
||||
E-mail: ulrich@complang.tuwien.ac.at
|
||||
Copyright (C): 2009 Ulrich Neumerkel. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY Ulrich Neumerkel ``AS IS'' AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ulrich Neumerkel OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
The views and conclusions contained in the software and documentation
|
||||
are those of the authors and should not be interpreted as representing
|
||||
official policies, either expressed or implied, of Ulrich Neumerkel.
|
||||
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- module(lambda, [
|
||||
(^)/3, (^)/4, (^)/5, (^)/6, (^)/7, (^)/8, (^)/9,
|
||||
(\)/1, (\)/2, (\)/3, (\)/4, (\)/5, (\)/6, (\)/7,
|
||||
(+\)/2, (+\)/3, (+\)/4, (+\)/5, (+\)/6, (+\)/7,
|
||||
op(201,xfx,+\)]).
|
||||
|
||||
/**
|
||||
@defgroup Lambda expressions
|
||||
@ingroup library
|
||||
|
||||
This library provides lambda expressions to simplify higher order
|
||||
programming based on call/N.
|
||||
|
||||
Lambda expressions are represented by ordinary Prolog terms.
|
||||
There are two kinds of lambda expressions:
|
||||
|
||||
~~~~
|
||||
Free+\X1^X2^ ..^XN^Goal
|
||||
|
||||
\X1^X2^ ..^XN^Goal
|
||||
~~~~
|
||||
|
||||
The second is a shorthand for t+\X1^X2^..^XN^Goal
|
||||
|
||||
+ _Xi_ are the parameters.
|
||||
|
||||
+ _Goal_ is a goal or continuation. Syntax note: Operators within Goal
|
||||
require parentheses due to the low precedence of the ^ operator.
|
||||
|
||||
+ _Free_ contains variables that are valid outside the scope of the lambda
|
||||
expression. They are thus free variables within.
|
||||
|
||||
All other variables of Goal are considered local variables. They must
|
||||
not appear outside the lambda expression. This restriction is
|
||||
currently not checked. Violations may lead to unexpected bindings.
|
||||
|
||||
In the following example the parentheses around X>3 are necessary.
|
||||
|
||||
~~~~~
|
||||
?- use_module(library(lambda)).
|
||||
?- use_module(library(apply)).
|
||||
|
||||
?- maplist(\X^(X>3),[4,5,9]).
|
||||
true.
|
||||
~~~~~
|
||||
|
||||
In the following _X_ is a variable that is shared by both instances of
|
||||
the lambda expression. The second query illustrates the cooperation of
|
||||
continuations and lambdas. The lambda expression is in this case a
|
||||
continuation expecting a further argument.
|
||||
|
||||
~~~~~
|
||||
?- Xs = [A,B], maplist(X+\Y^dif(X,Y), Xs).
|
||||
Xs = [A, B],
|
||||
dif(X, A),
|
||||
dif(X, B).
|
||||
|
||||
?- Xs = [A,B], maplist(X+\dif(X), Xs).
|
||||
Xs = [A, B],
|
||||
dif(X, A),
|
||||
dif(X, B).
|
||||
~~~~~
|
||||
|
||||
The following queries are all equivalent. To see this, use
|
||||
the fact f(x,y).
|
||||
~~~~~
|
||||
?- call(f,A1,A2).
|
||||
?- call(\X^f(X),A1,A2).
|
||||
?- call(\X^Y^f(X,Y), A1,A2).
|
||||
?- call(\X^(X+\Y^f(X,Y)), A1,A2).
|
||||
?- call(call(f, A1),A2).
|
||||
?- call(f(A1),A2).
|
||||
?- f(A1,A2).
|
||||
A1 = x,
|
||||
A2 = y.
|
||||
~~~~~
|
||||
|
||||
Further discussions
|
||||
http://www.complang.tuwien.ac.at/ulrich/Prolog-inedit/ISO-Hiord
|
||||
|
||||
@tbd Static expansion similar to apply_macros.
|
||||
@author Ulrich Neumerkel
|
||||
*/
|
||||
|
||||
:- meta_predicate no_hat_call(0).
|
||||
|
||||
:- meta_predicate
|
||||
^(?,0,?),
|
||||
^(?,1,?,?),
|
||||
^(?,2,?,?,?),
|
||||
^(?,3,?,?,?,?),
|
||||
^(?,4,?,?,?,?,?).
|
||||
|
||||
^(V1,Goal,V1) :-
|
||||
no_hat_call(Goal).
|
||||
^(V1,Goal,V1,V2) :-
|
||||
call(Goal,V2).
|
||||
^(V1,Goal,V1,V2,V3) :-
|
||||
call(Goal,V2,V3).
|
||||
^(V1,Goal,V1,V2,V3,V4) :-
|
||||
call(Goal,V2,V3,V4).
|
||||
^(V1,Goal,V1,V2,V3,V4,V5) :-
|
||||
call(Goal,V2,V3,V4,V5).
|
||||
^(V1,Goal,V1,V2,V3,V4,V5,V6) :-
|
||||
call(Goal,V2,V3,V4,V5,V6).
|
||||
^(V1,Goal,V1,V2,V3,V4,V5,V6,V7) :-
|
||||
call(Goal,V2,V3,V4,V5,V6,V7).
|
||||
|
||||
:- meta_predicate
|
||||
\(0),
|
||||
\(1,?),
|
||||
\(2,?,?),
|
||||
\(3,?,?,?),
|
||||
\(4,?,?,?,?),
|
||||
\(5,?,?,?,?,?),
|
||||
\(6,?,?,?,?,?,?).
|
||||
|
||||
\(FC) :-
|
||||
copy_term_nat(FC,C),no_hat_call(C).
|
||||
\(FC,V1) :-
|
||||
copy_term_nat(FC,C),call(C,V1).
|
||||
\(FC,V1,V2) :-
|
||||
copy_term_nat(FC,C),call(C,V1,V2).
|
||||
\(FC,V1,V2,V3) :-
|
||||
copy_term_nat(FC,C),call(C,V1,V2,V3).
|
||||
\(FC,V1,V2,V3,V4) :-
|
||||
copy_term_nat(FC,C),call(C,V1,V2,V3,V4).
|
||||
\(FC,V1,V2,V3,V4,V5) :-
|
||||
copy_term_nat(FC,C),call(C,V1,V2,V3,V4,V5).
|
||||
\(FC,V1,V2,V3,V4,V5,V6) :-
|
||||
copy_term_nat(FC,C),call(C,V1,V2,V3,V4,V5,V6).
|
||||
|
||||
:- meta_predicate
|
||||
+\(?,0),
|
||||
+\(?,1,?),
|
||||
+\(?,2,?,?),
|
||||
+\(?,3,?,?,?),
|
||||
+\(?,4,?,?,?,?),
|
||||
+\(?,5,?,?,?,?,?),
|
||||
+\(?,6,?,?,?,?,?,?).
|
||||
|
||||
+\(GV,FC) :-
|
||||
copy_term_nat(GV+FC,GV+C),no_hat_call(C).
|
||||
+\(GV,FC,V1) :-
|
||||
copy_term_nat(GV+FC,GV+C),call(C,V1).
|
||||
+\(GV,FC,V1,V2) :-
|
||||
copy_term_nat(GV+FC,GV+C),call(C,V1,V2).
|
||||
+\(GV,FC,V1,V2,V3) :-
|
||||
copy_term_nat(GV+FC,GV+C),call(C,V1,V2,V3).
|
||||
+\(GV,FC,V1,V2,V3,V4) :-
|
||||
copy_term_nat(GV+FC,GV+C),call(C,V1,V2,V3,V4).
|
||||
+\(GV,FC,V1,V2,V3,V4,V5) :-
|
||||
copy_term_nat(GV+FC,GV+C),call(C,V1,V2,V3,V4,V5).
|
||||
+\(GV,FC,V1,V2,V3,V4,V5,V6) :-
|
||||
copy_term_nat(GV+FC,GV+C),call(C,V1,V2,V3,V4,V5,V6).
|
||||
|
||||
|
||||
%% no_hat_call(:Goal)
|
||||
%
|
||||
% Like call, but issues an error for a goal (^)/2. Such goals are
|
||||
% likely the result of an insufficient number of arguments.
|
||||
|
||||
no_hat_call(MGoal) :-
|
||||
strip_module(MGoal, _, Goal),
|
||||
( nonvar(Goal),
|
||||
Goal = (_^_)
|
||||
-> throw(error(existence_error(lambda_parameters,Goal),_))
|
||||
; call(MGoal)
|
||||
).
|
||||
|
||||
% I would like to replace this by:
|
||||
% V1^Goal :- throw(error(existence_error(lambda_parameters,V1^Goal),_)).
|
527
packages/python/swig/yap4py/prolog/lineutils.yap
Normal file
527
packages/python/swig/yap4py/prolog/lineutils.yap
Normal file
@ -0,0 +1,527 @@
|
||||
/**
|
||||
* @file lineutils.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 22:02:22 2015
|
||||
*
|
||||
* @brief line text processing.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(lineutils,
|
||||
[search_for/2,
|
||||
search_for/3,
|
||||
scan_natural/3,
|
||||
scan_integer/3,
|
||||
natural/3,
|
||||
integer/3,
|
||||
blank/3,
|
||||
split/2,
|
||||
split/3,
|
||||
split/4,
|
||||
split/5,
|
||||
split_unquoted/3,
|
||||
fields/2,
|
||||
fields/3,
|
||||
glue/3,
|
||||
copy_line/2,
|
||||
filter/3,
|
||||
file_filter/3,
|
||||
file_select/2,
|
||||
file_filter_with_initialization/5,
|
||||
file_filter_with_start_end/5,
|
||||
file_filter_with_initialization/5 as file_filter_with_init,
|
||||
process/2
|
||||
]).
|
||||
|
||||
/** @defgroup line_utils Line Manipulation Utilities
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This package provides a set of useful predicates to manipulate
|
||||
sequences of characters codes, usually first read in as a line. It is
|
||||
available by loading the
|
||||
~~~~
|
||||
:- use_module(library(lineutils)).
|
||||
~~~~
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- meta_predicate
|
||||
filter(+,+,2),
|
||||
file_filter(+,+,2),
|
||||
file_filter_with_initialization(+,+,2,+,:),
|
||||
file_filter_with_start_end(+,+,2,2,2),
|
||||
process(+,1).
|
||||
|
||||
:- use_module(library(lists),
|
||||
[member/2,
|
||||
append/3]).
|
||||
|
||||
:- use_module(library(readutil),
|
||||
[read_line_to_codes/2]).
|
||||
|
||||
/**
|
||||
@pred search_for(+ _Char_,+ _Line_)
|
||||
Search for a character _Char_ in the list of codes _Line_.
|
||||
*/
|
||||
search_for(C,L) :-
|
||||
search_for(C, L, []).
|
||||
|
||||
search_for(C) --> [C], !.
|
||||
search_for(C) --> [_],
|
||||
search_for(C).
|
||||
|
||||
/** @pred scan_integer(? _Int_,+ _Line_,+ _RestOfLine_)
|
||||
|
||||
Scan the list of codes _Line_ for an integer _Nat_, either a
|
||||
positive, zero, or negative integer, and unify _RestOfLine_ with
|
||||
the remainder of the line.
|
||||
*/
|
||||
scan_integer(N) -->
|
||||
"-", !,
|
||||
scan_natural(0, N0),
|
||||
N is -N0.
|
||||
scan_integer(N) -->
|
||||
scan_natural(0, N).
|
||||
|
||||
/** @pred integer(? _Int_,+ _Line_,+ _RestOfLine_)
|
||||
|
||||
Scan the list of codes _Line_ for an integer _Nat_, either a
|
||||
positive, zero, or negative integer, and unify _RestOfLine_ with
|
||||
the remainder of the line.
|
||||
*/
|
||||
integer(N) -->
|
||||
"-", !,
|
||||
natural(0, N0),
|
||||
N is -N0.
|
||||
integer(N) -->
|
||||
natural(0, N).
|
||||
|
||||
/** @pred scan_natural(? _Nat_,+ _Line_,+ _RestOfLine_)
|
||||
|
||||
Scan the list of codes _Line_ for a natural number _Nat_, zero
|
||||
or a positive integer, and unify _RestOfLine_ with the remainder
|
||||
of the line.
|
||||
*/
|
||||
scan_natural(N) -->
|
||||
scan_natural(0, N).
|
||||
|
||||
scan_natural(N0,N) -->
|
||||
[C],
|
||||
{C >= 0'0, C =< 0'9 }, !,
|
||||
{ N1 is N0*10+(C-0'0) }, %'
|
||||
get_natural(N1,N).
|
||||
scan_natural(N,N) --> [].
|
||||
|
||||
/** @pred natural(? _Nat_,+ _Line_,+ _RestOfLine_)
|
||||
|
||||
Scan the list of codes _Line_ for a natural number _Nat_, zero
|
||||
or a positive integer, and unify _RestOfLine_ with the remainder
|
||||
of the line.
|
||||
*/
|
||||
natural(N) -->
|
||||
natural(0, N).
|
||||
|
||||
natural(N0,N) -->
|
||||
[C],
|
||||
{C >= 0'0, C =< 0'9 }, !,
|
||||
{ N1 is N0*10+(C-0'0) }, %'
|
||||
get_natural(N1,N).
|
||||
natural(N,N) --> [].
|
||||
|
||||
/** @pred skip_whitespace(+ _Line_,+ _RestOfLine_)
|
||||
|
||||
Scan the list of codes _Line_ for white space, namely for tabbing and space characters.
|
||||
*/
|
||||
skip_whitespace([0' |Blanks]) -->
|
||||
" ",
|
||||
skip_whitespace( Blanks ).
|
||||
skip_whitespace([0' |Blanks]) -->
|
||||
" ",
|
||||
skip_whitespace( Blanks ).
|
||||
skip_whitespace( [] ) -->
|
||||
!.
|
||||
|
||||
/** @pred blank(+ _Line_,+ _RestOfLine_)
|
||||
|
||||
The list of codes _Line_ is formed by white space, namely by tabbing and space characters.
|
||||
*/
|
||||
blank([0' |Blanks]) -->
|
||||
" ",
|
||||
blank( Blanks ).
|
||||
blank([0' |Blanks]) -->
|
||||
" ",
|
||||
blank( Blanks ).
|
||||
blank( [] ) -->
|
||||
[].
|
||||
|
||||
|
||||
/** @pred split(+ _Line_,- _Split_)
|
||||
|
||||
Unify _Words_ with a set of strings obtained from _Line_ by
|
||||
using the blank characters as separators.
|
||||
*/
|
||||
split(String, Strings) :-
|
||||
split_at_blank(" ", Strings, String, []).
|
||||
|
||||
/** @pred split(+ _Line_,+ _Separators_,- _Split_)
|
||||
|
||||
|
||||
|
||||
Unify _Words_ with a set of strings obtained from _Line_ by
|
||||
using the character codes in _Separators_ as separators. As an
|
||||
example, consider:
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- split("Hello * I am free"," *",S).
|
||||
|
||||
S = ["Hello","I","am","free"] ?
|
||||
|
||||
no
|
||||
~~~~~
|
||||
|
||||
*/
|
||||
split(String, SplitCodes, Strings) :-
|
||||
split_at_blank(SplitCodes, Strings, String, []).
|
||||
|
||||
split_at_blank(SplitCodes, More) -->
|
||||
[C],
|
||||
{ member(C, SplitCodes) }, !,
|
||||
split_at_blank(SplitCodes, More).
|
||||
split_at_blank(SplitCodes, [[C|New]| More]) -->
|
||||
[C], !,
|
||||
split_(SplitCodes, New, More).
|
||||
split_at_blank(_, []) --> [].
|
||||
|
||||
split_(SplitCodes, [], More) -->
|
||||
[C],
|
||||
{ member(C, SplitCodes) }, !,
|
||||
split_at_blank(SplitCodes, More).
|
||||
split_(SplitCodes, [C|New], Set) -->
|
||||
[C], !,
|
||||
split_(SplitCodes, New, Set).
|
||||
split_(_, [], []) --> [].
|
||||
|
||||
|
||||
split(Text, SplitCodes, DoubleQs, SingleQs, Strings) :-
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings, Text, []).
|
||||
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings) -->
|
||||
[C],
|
||||
!,
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings, C).
|
||||
split_element(_SplitCodes, _DoubleQs, _SingleQs, []) --> !.
|
||||
split_element(_SplitCodes, _DoubleQs, _SingleQs, [[]]) --> [].
|
||||
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings, C) -->
|
||||
{ member( C, SingleQs ) },
|
||||
!,
|
||||
[C2],
|
||||
{ Strings = [[C2|String]|More] },
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, [String| More]).
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, [[]|Strings], C) -->
|
||||
{ member( C, SplitCodes ) },
|
||||
!,
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings).
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings, C) -->
|
||||
{ member( C, DoubleQs ) } ,
|
||||
!,
|
||||
split_within(SplitCodes, C-DoubleQs, SingleQs, Strings).
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, [[C|String]|Strings], C) -->
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, [String|Strings]).
|
||||
|
||||
split_within(SplitCodes, DoubleQs, SingleQs, Strings) -->
|
||||
[C],
|
||||
split_within(SplitCodes, DoubleQs, SingleQs, Strings, C).
|
||||
|
||||
split_within(SplitCodes, DoubleQs, SingleQs, Strings, C) -->
|
||||
{ member( C, SingleQs ) },
|
||||
!,
|
||||
[C2],
|
||||
{ Strings = [[C2|String]|More] },
|
||||
split_within(SplitCodes, DoubleQs, SingleQs, [String| More]).
|
||||
split_within(SplitCodes, DoubleQs, C-SingleQs, Strings, C) -->
|
||||
!,
|
||||
split_element(SplitCodes, DoubleQs, SingleQs, Strings).
|
||||
split_within(SplitCodes, DoubleQs, SingleQs, [[C|String]|Strings], C) -->
|
||||
split_within(SplitCodes, DoubleQs, SingleQs, [String|Strings]).
|
||||
|
||||
/** @pred split_unquoted(+ _Line_,+ _Separators_,- _Split_)
|
||||
|
||||
|
||||
|
||||
Unify _Words_ with a set of strings obtained from _Line_ by
|
||||
using the character codes in _Separators_ as separators, but treat text wi
|
||||
thin double quotes as a single unit. As an
|
||||
example, consider:
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- split("Hello * I \"am free\""," *",S).
|
||||
|
||||
S = ["Hello","I","am free"] ?
|
||||
|
||||
no
|
||||
~~~~~
|
||||
|
||||
*/
|
||||
split_unquoted(String, SplitCodes, Strings) :-
|
||||
split_unquoted_at_blank(SplitCodes, Strings, String, []).
|
||||
|
||||
split_unquoted_at_blank(SplitCodes, [[0'"|New]|More]) --> %0'"
|
||||
"\"",
|
||||
split_quoted(New, More),
|
||||
split_unquoted_at_blank(SplitCodes, More).
|
||||
split_unquoted_at_blank(SplitCodes, More) -->
|
||||
[C],
|
||||
{ member(C, SplitCodes) }, !,
|
||||
split_unquoted_at_blank(SplitCodes, More).
|
||||
split_unquoted_at_blank(SplitCodes, [[C|New]| More]) -->
|
||||
[C], !,
|
||||
split_unquoted(SplitCodes, New, More).
|
||||
split_unquoted_at_blank(_, []) --> [].
|
||||
|
||||
split_unquoted(SplitCodes, [], More) -->
|
||||
[C],
|
||||
{ member(C, SplitCodes) }, !,
|
||||
split_unquoted_at_blank(SplitCodes, More).
|
||||
split_unquoted(SplitCodes, [C|New], Set) -->
|
||||
[C], !,
|
||||
split_unquoted(SplitCodes, New, Set).
|
||||
split_unquoted(_, [], []) --> [].
|
||||
|
||||
|
||||
/** @pred split_quoted(+ _Line_,+ _Separators_, GroupQuotes, SingleQuotes, - _Split_)
|
||||
|
||||
|
||||
|
||||
Unify _Words_ with a set of strings obtained from _Line_ by
|
||||
using the character codes in _Separators_ as separators, but treat text within quotes as a single unit. As an
|
||||
example, consider:
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- split_quoted("Hello * I \"am free\""," *",S).
|
||||
|
||||
S = ["Hello","I","am free"] ?
|
||||
|
||||
no
|
||||
~~~~~
|
||||
|
||||
*/
|
||||
split_quoted( [0'"], _More) --> %0'"
|
||||
"\"".
|
||||
split_quoted( [0'\\ ,C|New], More) -->
|
||||
%0'"
|
||||
"\\",
|
||||
[C],
|
||||
split_quoted(New, More).
|
||||
split_quoted( [C|New], More) --> %0'"
|
||||
[C],
|
||||
split_quoted(New, More).
|
||||
|
||||
/** @pred fields(+ _Line_,- _Split_)
|
||||
|
||||
Unify _Words_ with a set of strings obtained from _Line_ by
|
||||
using the blank characters as field separators.
|
||||
|
||||
*/
|
||||
fields(String, Strings) :-
|
||||
fields(" ", Strings, String, []).
|
||||
|
||||
/** @pred fields(+ _Line_,+ _Separators_,- _Split_)
|
||||
|
||||
Unify _Words_ with a set of strings obtained from _Line_ by
|
||||
using the character codes in _Separators_ as separators for
|
||||
fields. If two separators occur in a row, the field is considered
|
||||
empty. As an example, consider:
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- fields("Hello I am free"," *",S).
|
||||
|
||||
S = ["Hello","","I","am","","free"] ?
|
||||
~~~~~
|
||||
*/
|
||||
fields(String, FieldsCodes, Strings) :-
|
||||
dofields(FieldsCodes, First, More, String, []),
|
||||
(
|
||||
First = [], More = []
|
||||
->
|
||||
Strings = []
|
||||
;
|
||||
Strings = [First|More]
|
||||
).
|
||||
|
||||
dofields(FieldsCodes, [], New.More) -->
|
||||
[C],
|
||||
{ member(C, FieldsCodes) }, !,
|
||||
dofields(FieldsCodes, New, More).
|
||||
dofields(FieldsCodes, [C|New], Set) -->
|
||||
[C], !,
|
||||
dofields(FieldsCodes, New, Set).
|
||||
dofields(_, [], []) --> [].
|
||||
|
||||
/** @pred glue(+ _Words_,+ _Separator_,- _Line_)
|
||||
|
||||
Unify _Line_ with string obtained by glueing _Words_ with
|
||||
the character code _Separator_.
|
||||
*/
|
||||
glue([], _, []).
|
||||
glue([A], _, A) :- !.
|
||||
glue([H|T], [B|_], Merged) :-
|
||||
append(H, [B|Rest], Merged),
|
||||
glue(T, [B], Rest).
|
||||
|
||||
/** @pred copy_line(+ _StreamInput_,+ _StreamOutput_)
|
||||
|
||||
Copy a line from _StreamInput_ to _StreamOutput_.
|
||||
*/
|
||||
copy_line(StreamInp, StreamOut) :-
|
||||
read_line_to_codes(StreamInp, Line),
|
||||
format(StreamOut, '~s~n', [Line]).
|
||||
|
||||
|
||||
/** @pred filter(+ _StreamInp_, + _StreamOut_, + _Goal_)
|
||||
|
||||
For every line _LineIn_ in stream _StreamInp_, execute
|
||||
`call(Goal,LineIn,LineOut)`, and output _LineOut_ to
|
||||
stream _StreamOut_. If `call(Goal,LineIn,LineOut)` fails,
|
||||
nothing will be output but execution continues with the next
|
||||
line. As an example, consider a procedure to select the second and
|
||||
fifth field of a CSV table :
|
||||
~~~~~{.prolog}
|
||||
select(Sep, In, Out) :-
|
||||
fields(In, Sep, [_,F2,_,_,F5|_]),
|
||||
fields(Out,Sep, [F2,F5]).
|
||||
|
||||
select :-
|
||||
filter(",",
|
||||
~~~~~
|
||||
|
||||
*/
|
||||
filter(StreamInp, StreamOut, Command) :-
|
||||
repeat,
|
||||
read_line_to_codes(StreamInp, Line),
|
||||
(
|
||||
Line == end_of_file
|
||||
->
|
||||
!
|
||||
;
|
||||
call(Command, Line, NewLine),
|
||||
ground(NewLine),
|
||||
format(StreamOut, '~s~n', [NewLine]),
|
||||
fail
|
||||
).
|
||||
|
||||
/** @pred process(+ _StreamInp_, + _Goal_) is meta
|
||||
|
||||
For every line _LineIn_ in stream _StreamInp_, call
|
||||
`call(Goal,LineIn)`.
|
||||
*/
|
||||
process(StreamInp, Command) :-
|
||||
repeat,
|
||||
read_line_to_codes(StreamInp, Line),
|
||||
(
|
||||
Line == end_of_file
|
||||
->
|
||||
!
|
||||
;
|
||||
call(Command, Line),
|
||||
fail
|
||||
).
|
||||
|
||||
/**
|
||||
* @pred file_filter(+ _FileIn_, + _FileOut_, + _Goal_) is meta
|
||||
*
|
||||
* @param _FileIn_ File to process
|
||||
* @param _FileOut_ Output file, often user_error
|
||||
* @param _Goal_ to be metacalled, receives FileIn and FileOut as
|
||||
* extra arguments
|
||||
*
|
||||
* @return succeeds
|
||||
|
||||
For every line _LineIn_ in file _FileIn_, execute
|
||||
`call(Goal,LineIn,LineOut)`, and output _LineOut_ to file
|
||||
_FileOut_.
|
||||
|
||||
The input stream is accessible through the alias `filter_input`, and
|
||||
the output stream is accessible through `filter_output`.
|
||||
*/
|
||||
file_filter(Inp, Out, Command) :-
|
||||
open(Inp, read, StreamInp, [alias(filter_input)]),
|
||||
open(Out, write, StreamOut),
|
||||
filter(StreamInp, StreamOut, Command),
|
||||
close(StreamInp),
|
||||
close(StreamOut).
|
||||
|
||||
/** @pred file_filter_with_initialization(+ _FileIn_, + _FileOut_, + _Goal_, + _FormatCommand_, + _Arguments_)
|
||||
|
||||
Same as file_filter/3, but before starting the filter execute
|
||||
`format/3` on the output stream, using _FormatCommand_ and
|
||||
_Arguments_.
|
||||
*/
|
||||
file_filter_with_initialization(Inp, Out, Command, FormatString, Parameters) :-
|
||||
open(Inp, read, StreamInp, [alias(filter_input)]),
|
||||
open(Out, write, StreamOut, [alias(filter_output)]),
|
||||
format(StreamOut, FormatString, Parameters),
|
||||
filter(StreamInp, StreamOut, Command),
|
||||
close(StreamInp),
|
||||
close(StreamOut).
|
||||
|
||||
|
||||
/** @pred file_filter_with_start_end(+ FileIn, + FileOut, + Goal, + StartGoal, + EndGoal)
|
||||
|
||||
Same as file_filter/3, but before starting the filter execute
|
||||
_StartGoal_, and call _ENdGoal_ as an epilog.
|
||||
|
||||
The input stream are always accessible through `filter_output` and `filter_input`.
|
||||
*/
|
||||
file_filter_with_start_end(Inp, Out, Command, StartGoal, EndGoal) :-
|
||||
open(Inp, read, StreamInp, [alias(filter_input)]),
|
||||
open(Out, write, StreamOut, [alias(filter_output)]),
|
||||
call( StartGoal, StreamInp, StreamOut ),
|
||||
filter(StreamInp, StreamOut, Command),
|
||||
call( EndGoal, StreamInp, StreamOut ),
|
||||
close(StreamInp),
|
||||
close(StreamOut).
|
||||
|
||||
|
||||
/**
|
||||
* @pred file_select(+ _FileIn_, + _Goal_) is meta
|
||||
*
|
||||
* @param _FileIn_ File to process
|
||||
* @param _Goal_ to be metacalled, receives FileIn as
|
||||
* extra arguments
|
||||
*
|
||||
* @return bindings to arguments of _Goal_.
|
||||
|
||||
For every line _LineIn_ in file _FileIn_, execute
|
||||
`call(`Goal,LineIn)`.
|
||||
|
||||
The input stream is accessible through the alias `filter_input`, and
|
||||
the output stream is accessible through `filter_output`.
|
||||
*/
|
||||
file_select(Inp, Command) :-
|
||||
( retract(alias(F)) -> true ; F = '' ),
|
||||
atom_concat(filter_input, F, Alias),
|
||||
open(Inp, read, StreamInp, [Alias]),
|
||||
atom_concat('_', F, NF),
|
||||
assert( alias(NF) ),
|
||||
repeat,
|
||||
read_line_to_codes(StreamInp, Line),
|
||||
(
|
||||
Line == end_of_file
|
||||
->
|
||||
close(StreamInp),
|
||||
retract(alias(NF)),
|
||||
assert(alias(F)),
|
||||
!,
|
||||
atom_concat(filter_input, F, Alias),
|
||||
fail
|
||||
;
|
||||
call(Command, Line)
|
||||
).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
50
packages/python/swig/yap4py/prolog/listing.yap
Normal file
50
packages/python/swig/yap4py/prolog/listing.yap
Normal file
@ -0,0 +1,50 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: listing.yap *
|
||||
* Last rev: *
|
||||
* mods: *
|
||||
* comments: listing a prolog program *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
/**
|
||||
* @file library/listing.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 22:03:59 2015
|
||||
*
|
||||
* @brief Emulate SWI Prolog's listing.
|
||||
*
|
||||
*
|
||||
*/
|
||||
:- module(swi_listing,
|
||||
[ listing/0,
|
||||
listing/1,
|
||||
portray_clause/1, % +Clause
|
||||
portray_clause/2, % +Stream, +Clause
|
||||
portray_clause/3 % +Stream, +Clause, +Options
|
||||
]).
|
||||
|
||||
|
||||
|
||||
/*
|
||||
|
||||
* @defgroup swi_listing SWI Prolog listing emulation
|
||||
* @ingroup library
|
||||
|
||||
emulates listing.pl, but just the interface for now.
|
||||
|
||||
*/
|
||||
|
||||
|
||||
:- meta_predicate portray_clause( +, + , : ).
|
||||
|
||||
portray_clause(Stream, Term, M:Options) :-
|
||||
portray_clause( Stream, Term ).
|
630
packages/python/swig/yap4py/prolog/lists.yap
Normal file
630
packages/python/swig/yap4py/prolog/lists.yap
Normal file
@ -0,0 +1,630 @@
|
||||
/**
|
||||
* @file library/lists.yap
|
||||
* @author Bob Welham, Lawrence Byrd, and R. A. O'Keefe. Contributions from Vitor Santos Costa, Jan Wielemaker and others.
|
||||
* @date 1999
|
||||
*
|
||||
* @addtogroup lists The Prolog Library
|
||||
*
|
||||
* @ingroup library
|
||||
*
|
||||
* @{
|
||||
*
|
||||
* @brief List Manipulation Predicates
|
||||
*
|
||||
*
|
||||
*/
|
||||
% This file has been included as an YAP library by Vitor Santos Costa, 1999
|
||||
|
||||
:- module(lists,
|
||||
[
|
||||
append/3,
|
||||
append/2,
|
||||
delete/3,
|
||||
intersection/3,
|
||||
flatten/2,
|
||||
last/2,
|
||||
list_concat/2,
|
||||
max_list/2,
|
||||
list_to_set/2,
|
||||
member/2,
|
||||
memberchk/2,
|
||||
min_list/2,
|
||||
nextto/3,
|
||||
nth/3,
|
||||
nth/4,
|
||||
nth0/3,
|
||||
nth0/4,
|
||||
nth1/3,
|
||||
nth1/4,
|
||||
numlist/3,
|
||||
permutation/2,
|
||||
prefix/2,
|
||||
remove_duplicates/2,
|
||||
reverse/2,
|
||||
same_length/2,
|
||||
select/3,
|
||||
selectchk/3,
|
||||
sublist/2,
|
||||
substitute/4,
|
||||
subtract/3,
|
||||
suffix/2,
|
||||
sum_list/2,
|
||||
sum_list/3,
|
||||
sumlist/2
|
||||
]).
|
||||
|
||||
|
||||
/** @defgroup lists List Manipulation
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following list manipulation routines are available once included
|
||||
with the `use_module(library(lists))` command.
|
||||
|
||||
*/
|
||||
|
||||
/** @pred list_concat(+ _Lists_,? _List_)
|
||||
|
||||
|
||||
True when _Lists_ is a list of lists and _List_ is the
|
||||
concatenation of _Lists_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred max_list(? _Numbers_, ? _Max_)
|
||||
|
||||
|
||||
True when _Numbers_ is a list of numbers, and _Max_ is the maximum.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred min_list(? _Numbers_, ? _Min_)
|
||||
|
||||
|
||||
True when _Numbers_ is a list of numbers, and _Min_ is the minimum.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth(? _N_, ? _List_, ? _Elem_)
|
||||
|
||||
|
||||
The same as nth1/3.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth(? _N_, ? _List_, ? _Elem_, ? _Rest_)
|
||||
|
||||
Same as `nth1/4`.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth0(? _N_, ? _List_, ? _Elem_)
|
||||
|
||||
|
||||
True when _Elem_ is the Nth member of _List_,
|
||||
counting the first as element 0. (That is, throw away the first
|
||||
N elements and unify _Elem_ with the next.) It can only be used to
|
||||
select a particular element given the list and index. For that
|
||||
task it is more efficient than member/2
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth0(? _N_, ? _List_, ? _Elem_, ? _Rest_)
|
||||
|
||||
Unifies _Elem_ with the Nth element of _List_,
|
||||
counting from 0, and _Rest_ with the other elements. It can be used
|
||||
to select the Nth element of _List_ (yielding _Elem_ and _Rest_), or to
|
||||
insert _Elem_ before the Nth (counting from 1) element of _Rest_, when
|
||||
it yields _List_, e.g. `nth0(2, List, c, [a,b,d,e])` unifies List with
|
||||
`[a,b,c,d,e]`. `nth/4` is the same except that it counts from 1. `nth0/4`
|
||||
can be used to insert _Elem_ after the Nth element of _Rest_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth1(+ _Index_,? _List_,? _Elem_)
|
||||
|
||||
|
||||
Succeeds when the _Index_-th element of _List_ unifies with
|
||||
_Elem_. Counting starts at 1.
|
||||
|
||||
Set environment variable. _Name_ and _Value_ should be
|
||||
instantiated to atoms or integers. The environment variable will be
|
||||
passed to `shell/[0-2]` and can be requested using `getenv/2`.
|
||||
They also influence expand_file_name/2.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth1(? _N_, ? _List_, ? _Elem_)
|
||||
|
||||
|
||||
The same as nth0/3, except that it counts from
|
||||
1, that is `nth(1, [H|_], H)`.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nth1(? _N_, ? _List_, ? _Elem_, ? _Rest_)
|
||||
|
||||
Unifies _Elem_ with the Nth element of _List_, counting from 1,
|
||||
and _Rest_ with the other elements. It can be used to select the
|
||||
Nth element of _List_ (yielding _Elem_ and _Rest_), or to
|
||||
insert _Elem_ before the Nth (counting from 1) element of
|
||||
_Rest_, when it yields _List_, e.g. `nth(3, List, c, [a,b,d,e])` unifies List with `[a,b,c,d,e]`. `nth/4`
|
||||
can be used to insert _Elem_ after the Nth element of _Rest_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred numlist(+ _Low_, + _High_, + _List_)
|
||||
|
||||
|
||||
If _Low_ and _High_ are integers with _Low_ =<
|
||||
_High_, unify _List_ to a list `[Low, Low+1, ...High]`. See
|
||||
also between/3.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred permutation(+ _List_,? _Perm_)
|
||||
|
||||
|
||||
True when _List_ and _Perm_ are permutations of each other.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred remove_duplicates(+ _List_, ? _Pruned_)
|
||||
|
||||
|
||||
Removes duplicated elements from _List_. Beware: if the _List_ has
|
||||
non-ground elements, the result may surprise you.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred same_length(? _List1_, ? _List2_)
|
||||
|
||||
|
||||
True when _List1_ and _List2_ are both lists and have the same number
|
||||
of elements. No relation between the values of their elements is
|
||||
implied.
|
||||
Modes `same_length(-,+)` and `same_length(+,-)` generate either list given
|
||||
the other; mode `same_length(-,-)` generates two lists of the same length,
|
||||
in which case the arguments will be bound to lists of length 0, 1, 2, ...
|
||||
|
||||
*/
|
||||
|
||||
|
||||
%% @pred append(? _Lists_,? _Combined_)
|
||||
%
|
||||
% Concatenate a list of lists. Is true if Lists is a list of
|
||||
% lists, and List is the concatenation of these lists.
|
||||
%
|
||||
% @param ListOfLists must be a list of -possibly- partial lists
|
||||
|
||||
append(ListOfLists, List) :-
|
||||
% must_be(list, ListOfLists),
|
||||
append_(ListOfLists, List).
|
||||
|
||||
append_([], []).
|
||||
append_([L], L).
|
||||
append_([L1,L2], L) :-
|
||||
append(L1,L2,L).
|
||||
append_([L1,L2|[L3|LL]], L) :-
|
||||
append(L1,L2,LI),
|
||||
append_([LI|[L3|LL]],L).
|
||||
|
||||
/** @pred last(+ _List_,? _Last_)
|
||||
|
||||
|
||||
True when _List_ is a list and _Last_ is identical to its last element.
|
||||
d(_, [X], L).
|
||||
*/
|
||||
|
||||
last([H|List], Last) :-
|
||||
last(List, H, Last).
|
||||
|
||||
last([], Last, Last).
|
||||
last([H|List], _, Last) :-
|
||||
last(List, H, Last).
|
||||
|
||||
% nextto(X, Y, List)
|
||||
% is true when X and Y appear side-by-side in List. It could be written as
|
||||
% nextto(X, Y, List) :- append(_, [X,Y,_], List).
|
||||
% It may be used to enumerate successive pairs from the list.
|
||||
|
||||
nextto(X,Y, [X,Y|_]).
|
||||
nextto(X,Y, [_|List]) :-
|
||||
nextto(X,Y, List).
|
||||
|
||||
% nth0(?N, +List, ?Elem) is true when Elem is the Nth member of List,
|
||||
% counting the first as element 0. (That is, throw away the first
|
||||
% N elements and unify Elem with the next.) It can only be used to
|
||||
% select a particular element given the list and index. For that
|
||||
% task it is more efficient than nmember.
|
||||
% nth(+N, +List, ?Elem) is the same as nth0, except that it counts from
|
||||
% 1, that is nth(1, [H|_], H).
|
||||
|
||||
nth0(V, In, Element) :- var(V), !,
|
||||
generate_nth(0, V, In, Element).
|
||||
nth0(0, [Head|_], Head) :- !.
|
||||
nth0(N, [_|Tail], Elem) :-
|
||||
M is N-1,
|
||||
find_nth0(M, Tail, Elem).
|
||||
|
||||
find_nth0(0, [Head|_], Head) :- !.
|
||||
find_nth0(N, [_|Tail], Elem) :-
|
||||
M is N-1,
|
||||
find_nth0(M, Tail, Elem).
|
||||
|
||||
|
||||
nth1(V, In, Element) :- var(V), !,
|
||||
generate_nth(1, V, In, Element).
|
||||
nth1(1, [Head|_], Head) :- !.
|
||||
nth1(N, [_|Tail], Elem) :-
|
||||
nonvar(N), !,
|
||||
M is N-1, % should be succ(M, N)
|
||||
find_nth(M, Tail, Elem).
|
||||
|
||||
nth(V, In, Element) :- var(V), !,
|
||||
generate_nth(1, V, In, Element).
|
||||
nth(1, [Head|_], Head) :- !.
|
||||
nth(N, [_|Tail], Elem) :-
|
||||
nonvar(N), !,
|
||||
M is N-1, % should be succ(M, N)
|
||||
find_nth(M, Tail, Elem).
|
||||
|
||||
find_nth(1, [Head|_], Head) :- !.
|
||||
find_nth(N, [_|Tail], Elem) :-
|
||||
M is N-1,
|
||||
find_nth(M, Tail, Elem).
|
||||
|
||||
|
||||
generate_nth(I, I, [Head|_], Head).
|
||||
generate_nth(I, IN, [_|List], El) :-
|
||||
I1 is I+1,
|
||||
generate_nth(I1, IN, List, El).
|
||||
|
||||
|
||||
|
||||
% nth0(+N, ?List, ?Elem, ?Rest) unifies Elem with the Nth element of List,
|
||||
% counting from 0, and Rest with the other elements. It can be used
|
||||
% to select the Nth element of List (yielding Elem and Rest), or to
|
||||
% insert Elem before the Nth (counting from 1) element of Rest, when
|
||||
% it yields List, e.g. nth0(2, List, c, [a,b,d,e]) unifies List with
|
||||
% [a,b,c,d,e]. nth is the same except that it counts from 1. nth
|
||||
% can be used to insert Elem after the Nth element of Rest.
|
||||
|
||||
nth0(V, In, Element, Tail) :- var(V), !,
|
||||
generate_nth(0, V, In, Element, Tail).
|
||||
nth0(0, [Head|Tail], Head, Tail) :- !.
|
||||
nth0(N, [Head|Tail], Elem, [Head|Rest]) :-
|
||||
M is N-1,
|
||||
nth0(M, Tail, Elem, Rest).
|
||||
|
||||
find_nth0(0, [Head|Tail], Head, Tail) :- !.
|
||||
find_nth0(N, [Head|Tail], Elem, [Head|Rest]) :-
|
||||
M is N-1,
|
||||
find_nth0(M, Tail, Elem, Rest).
|
||||
|
||||
|
||||
|
||||
nth1(V, In, Element, Tail) :- var(V), !,
|
||||
generate_nth(1, V, In, Element, Tail).
|
||||
nth1(1, [Head|Tail], Head, Tail) :- !.
|
||||
nth1(N, [Head|Tail], Elem, [Head|Rest]) :-
|
||||
M is N-1,
|
||||
nth1(M, Tail, Elem, Rest).
|
||||
|
||||
nth(V, In, Element, Tail) :- var(V), !,
|
||||
generate_nth(1, V, In, Element, Tail).
|
||||
nth(1, [Head|Tail], Head, Tail) :- !.
|
||||
nth(N, [Head|Tail], Elem, [Head|Rest]) :-
|
||||
M is N-1,
|
||||
nth(M, Tail, Elem, Rest).
|
||||
|
||||
find_nth(1, [Head|Tail], Head, Tail) :- !.
|
||||
find_nth(N, [Head|Tail], Elem, [Head|Rest]) :-
|
||||
M is N-1,
|
||||
find_nth(M, Tail, Elem, Rest).
|
||||
|
||||
|
||||
generate_nth(I, I, [Head|Tail], Head, Tail).
|
||||
generate_nth(I, IN, [E|List], El, [E|Tail]) :-
|
||||
I1 is I+1,
|
||||
generate_nth(I1, IN, List, El, Tail).
|
||||
|
||||
|
||||
|
||||
% permutation(List, Perm)
|
||||
% is true when List and Perm are permutations of each other. Of course,
|
||||
% if you just want to test that, the best way is to keysort/2 the two
|
||||
% lists and see if the results are the same. Or you could use list_to_bag
|
||||
% (from BagUtl.Pl) to see if they convert to the same bag. The point of
|
||||
% perm is to generate permutations. The arguments may be either way round,
|
||||
% the only effect will be the order in which the permutations are tried.
|
||||
% Be careful: this is quite efficient, but the number of permutations of an
|
||||
% N-element list is N!, even for a 7-element list that is 5040.
|
||||
|
||||
permutation([], []).
|
||||
permutation(List, [First|Perm]) :-
|
||||
select(First, List, Rest), % tries each List element in turn
|
||||
permutation(Rest, Perm).
|
||||
|
||||
|
||||
% prefix(Part, Whole) iff Part is a leading substring of Whole
|
||||
|
||||
prefix([], _).
|
||||
prefix([Elem | Rest_of_part], [Elem | Rest_of_whole]) :-
|
||||
prefix(Rest_of_part, Rest_of_whole).
|
||||
|
||||
% remove_duplicates(List, Pruned)
|
||||
% removes duplicated elements from List. Beware: if the List has
|
||||
% non-ground elements, the result may surprise you.
|
||||
|
||||
remove_duplicates([], []).
|
||||
remove_duplicates([Elem|L], [Elem|NL]) :-
|
||||
delete(L, Elem, Temp),
|
||||
remove_duplicates(Temp, NL).
|
||||
|
||||
% reverse(List, Reversed)
|
||||
% is true when List and Reversed are lists with the same elements
|
||||
% but in opposite orders. rev/2 is a synonym for reverse/2.
|
||||
|
||||
reverse(List, Reversed) :-
|
||||
reverse(List, [], Reversed).
|
||||
|
||||
reverse([], Reversed, Reversed).
|
||||
reverse([Head|Tail], Sofar, Reversed) :-
|
||||
reverse(Tail, [Head|Sofar], Reversed).
|
||||
|
||||
|
||||
% same_length(?List1, ?List2)
|
||||
% is true when List1 and List2 are both lists and have the same number
|
||||
% of elements. No relation between the values of their elements is
|
||||
% implied.
|
||||
% Modes same_length(-,+) and same_length(+,-) generate either list given
|
||||
% the other; mode same_length(-,-) generates two lists of the same length,
|
||||
% in which case the arguments will be bound to lists of length 0, 1, 2, ...
|
||||
|
||||
same_length([], []).
|
||||
same_length([_|List1], [_|List2]) :-
|
||||
same_length(List1, List2).
|
||||
|
||||
|
||||
/** @pred selectchk(? _Element_, ? _List_, ? _Residue_)
|
||||
|
||||
|
||||
Semi-deterministic selection from a list. Steadfast: defines as
|
||||
|
||||
~~~~~{.prolog}
|
||||
selectchk(Elem, List, Residue) :-
|
||||
select(Elem, List, Rest0), !,
|
||||
Rest = Rest0.
|
||||
~~~~~
|
||||
*/
|
||||
selectchk(Elem, List, Rest) :-
|
||||
select(Elem, List, Rest0), !,
|
||||
Rest = Rest0.
|
||||
|
||||
|
||||
|
||||
/** @pred select(? _Element_, ? _List_, ? _Residue_)
|
||||
|
||||
|
||||
True when _Set_ is a list, _Element_ occurs in _List_, and
|
||||
_Residue_ is everything in _List_ except _Element_ (things
|
||||
stay in the same order).
|
||||
*/
|
||||
select(Element, [Element|Rest], Rest).
|
||||
select(Element, [Head|Tail], [Head|Rest]) :-
|
||||
select(Element, Tail, Rest).
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% sublist(?Sub, +List) is nondet.
|
||||
%
|
||||
% True if all elements of Sub appear in List in the same order.
|
||||
%
|
||||
% ALlo, both `append(_,Sublist,S)` and `append(S,_,List)` hold.
|
||||
sublist(L, L).
|
||||
sublist(Sub, [H|T]) :-
|
||||
'$sublist1'(T, H, Sub).
|
||||
|
||||
'$sublist1'(Sub, _, Sub).
|
||||
'$sublist1'([H|T], _, Sub) :-
|
||||
'$sublist1'(T, H, Sub).
|
||||
'$sublist1'([H|T], X, [X|Sub]) :-
|
||||
'$sublist1'(T, H, Sub).
|
||||
|
||||
% substitute(X, XList, Y, YList)
|
||||
% is true when XList and YList only differ in that the elements X in XList
|
||||
% are replaced by elements Y in the YList.
|
||||
substitute(X, XList, Y, YList) :-
|
||||
substitute2(XList, X, Y, YList).
|
||||
|
||||
substitute2([], _, _, []).
|
||||
substitute2([X0|XList], X, Y, [Y|YList]) :-
|
||||
X == X0, !,
|
||||
substitute2(XList, X, Y, YList).
|
||||
substitute2([X0|XList], X, Y, [X0|YList]) :-
|
||||
substitute2(XList, X, Y, YList).
|
||||
|
||||
/** @pred suffix(? _Suffix_, ? _List_)
|
||||
|
||||
Holds when `append(_,Suffix,List)` holds.
|
||||
*/
|
||||
suffix(Suffix, Suffix).
|
||||
suffix(Suffix, [_|List]) :-
|
||||
suffix(Suffix,List).
|
||||
|
||||
/** @pred sumlist(? _Numbers_, ? _Total_)
|
||||
|
||||
|
||||
True when _Numbers_ is a list of integers, and _Total_ is their
|
||||
sum. The same as sum_list/2, please do use sum_list/2
|
||||
instead.
|
||||
|
||||
|
||||
*/
|
||||
sumlist(Numbers, Total) :-
|
||||
sumlist(Numbers, 0, Total).
|
||||
|
||||
/** @pred sum_list(? _Numbers_, + _SoFar_, ? _Total_)
|
||||
|
||||
True when _Numbers_ is a list of numbers, and _Total_ is the sum of their total plus _SoFar_.
|
||||
*/
|
||||
sum_list(Numbers, SoFar, Total) :-
|
||||
sumlist(Numbers, SoFar, Total).
|
||||
|
||||
/** @pred sum_list(? _Numbers_, ? _Total_)
|
||||
|
||||
|
||||
True when _Numbers_ is a list of numbers, and _Total_ is their sum.
|
||||
*/
|
||||
sum_list(Numbers, Total) :-
|
||||
sumlist(Numbers, 0, Total).
|
||||
|
||||
sumlist([], Total, Total).
|
||||
sumlist([Head|Tail], Sofar, Total) :-
|
||||
Next is Sofar+Head,
|
||||
sumlist(Tail, Next, Total).
|
||||
|
||||
|
||||
% list_concat(Lists, List)
|
||||
% is true when Lists is a list of lists, and List is the
|
||||
% concatenation of these lists.
|
||||
|
||||
list_concat([], []).
|
||||
list_concat([H|T], L) :-
|
||||
list_concat(H, L, Li),
|
||||
list_concat(T, Li).
|
||||
|
||||
list_concat([], L, L).
|
||||
list_concat([H|T], [H|Lf], Li) :-
|
||||
list_concat(T, Lf, Li).
|
||||
|
||||
|
||||
|
||||
/** @pred flatten(+ _List_, ? _FlattenedList_)
|
||||
|
||||
|
||||
Flatten a list of lists _List_ into a single list
|
||||
_FlattenedList_.
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- flatten([[1],[2,3],[4,[5,6],7,8]],L).
|
||||
|
||||
L = [1,2,3,4,5,6,7,8] ? ;
|
||||
|
||||
no
|
||||
~~~~~
|
||||
*/
|
||||
flatten(X,Y) :- flatten_list(X,Y,[]).
|
||||
|
||||
flatten_list(V) --> {var(V)}, !, [V].
|
||||
flatten_list([]) --> !.
|
||||
flatten_list([H|T]) --> !, flatten_list(H),flatten_list(T).
|
||||
flatten_list(H) --> [H].
|
||||
|
||||
max_list([H|L],Max) :-
|
||||
max_list(L,H,Max).
|
||||
|
||||
max_list([],Max,Max).
|
||||
max_list([H|L],Max0,Max) :-
|
||||
(
|
||||
H > Max0
|
||||
->
|
||||
max_list(L,H,Max)
|
||||
;
|
||||
max_list(L,Max0,Max)
|
||||
).
|
||||
|
||||
min_list([H|L],Max) :-
|
||||
min_list(L,H,Max).
|
||||
|
||||
min_list([],Max,Max).
|
||||
min_list([H|L],Max0,Max) :-
|
||||
(
|
||||
H < Max0
|
||||
->
|
||||
min_list(L, H, Max)
|
||||
;
|
||||
min_list(L, Max0, Max)
|
||||
).
|
||||
|
||||
%% numlist(+Low, +High, -List) is semidet.
|
||||
%
|
||||
% List is a list [Low, Low+1, ... High]. Fails if High < Low.%
|
||||
%
|
||||
% @error type_error(integer, Low)
|
||||
% @error type_error(integer, High)
|
||||
|
||||
numlist(L, U, Ns) :-
|
||||
must_be(integer, L),
|
||||
must_be(integer, U),
|
||||
L =< U,
|
||||
numlist_(L, U, Ns).
|
||||
|
||||
numlist_(U, U, OUT) :- !, OUT = [U].
|
||||
numlist_(L, U, [L|Ns]) :-
|
||||
succ(L, L2),
|
||||
numlist_(L2, U, Ns).
|
||||
|
||||
|
||||
/** @pred intersection(+ _Set1_, + _Set2_, + _Set3_)
|
||||
|
||||
|
||||
Succeeds if _Set3_ unifies with the intersection of _Set1_ and
|
||||
_Set2_. _Set1_ and _Set2_ are lists without duplicates. They
|
||||
need not be ordered.
|
||||
|
||||
The code was copied from SWI-Prolog's list library.
|
||||
|
||||
*/
|
||||
|
||||
% copied from SWI lists library.
|
||||
intersection([], _, []) :- !.
|
||||
intersection([X|T], L, Intersect) :-
|
||||
memberchk(X, L), !,
|
||||
Intersect = [X|R],
|
||||
intersection(T, L, R).
|
||||
intersection([_|T], L, R) :-
|
||||
intersection(T, L, R).
|
||||
|
||||
%% subtract(+Set, +Delete, -Result) is det.
|
||||
%
|
||||
% Delete all elements from `Set' that occur in `Delete' (a set)
|
||||
% and unify the result with `Result'. Deletion is based on
|
||||
% unification using memberchk/2. The complexity is |Delete|*|Set|.
|
||||
%
|
||||
% @see ord_subtract/3.
|
||||
|
||||
subtract([], _, []) :- !.
|
||||
subtract([E|T], D, R) :-
|
||||
memberchk(E, D), !,
|
||||
subtract(T, D, R).
|
||||
subtract([H|T], D, [H|R]) :-
|
||||
subtract(T, D, R).
|
||||
|
||||
%% list_to_set(+List, ?Set) is det.
|
||||
%
|
||||
% True when Set has the same element as List in the same order.
|
||||
% The left-most copy of the duplicate is retained. The complexity
|
||||
% of this operation is |List|^2.
|
||||
%
|
||||
% @see sort/2.
|
||||
|
||||
list_to_set(List, Set) :-
|
||||
list_to_set_(List, Set0),
|
||||
Set = Set0.
|
||||
|
||||
list_to_set_([], R) :-
|
||||
close_list(R).
|
||||
list_to_set_([H|T], R) :-
|
||||
memberchk(H, R), !,
|
||||
list_to_set_(T, R).
|
||||
|
||||
close_list([]) :- !.
|
||||
close_list([_|T]) :-
|
||||
close_list(T).
|
||||
|
||||
|
||||
%% @}
|
||||
/** @} */
|
188
packages/python/swig/yap4py/prolog/log2md.yap
Normal file
188
packages/python/swig/yap4py/prolog/log2md.yap
Normal file
@ -0,0 +1,188 @@
|
||||
/**
|
||||
* @file log2md.yap
|
||||
* @author Vitor Santos Costa
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- op(650,yfx, <-- ),
|
||||
op(650,yfx, <-* ).
|
||||
|
||||
:- module( log2md,
|
||||
[open_log/1,
|
||||
log_title/1,
|
||||
log_section/1,
|
||||
log_subsection/1,
|
||||
log_paragraph/1,
|
||||
log_unit/2,
|
||||
(<--)/2,
|
||||
(<-*)/2,
|
||||
log_goal/1,
|
||||
log_goal/1 as log_clause,
|
||||
out/1,
|
||||
out/2,
|
||||
outln/1,
|
||||
outln/2] ).
|
||||
|
||||
:- use_module( library( maplist) ).
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @defgroup Log2MD Log Output of Tests in Markdown format.
|
||||
*
|
||||
* @ingroup Regression System Tests
|
||||
*
|
||||
* These primitives support writing a user-specified log of execution to an
|
||||
* output file. The output file can be used for testing or debugging.
|
||||
*
|
||||
* Primitives include the ability to write a title, a Prolog clause or
|
||||
* goal, and hooks for tracing calls. The log_goal/2 can be used to
|
||||
* start a goal. Arguments of the form `<--/2` and `*->/2` can be used to
|
||||
* track calls.
|
||||
*
|
||||
* The output format is markdown.
|
||||
*/
|
||||
|
||||
open_log(F) :-
|
||||
open( F, write, _Out, [alias(log)]).
|
||||
|
||||
/**
|
||||
* @pred log_title( +String ) is det
|
||||
*
|
||||
* @param [in] S is a Prolog atom or string describing a title.
|
||||
*
|
||||
*/
|
||||
log_title( S ) :-
|
||||
out( '## Report on ~a~n~n', [S]).
|
||||
|
||||
/**
|
||||
* @pred log_section( +String ) is det
|
||||
*
|
||||
* @param [in] S is a Prolog atom or string describing a title.
|
||||
*
|
||||
*/
|
||||
log_section( S ) :-
|
||||
out( '### Report on ~a~n~n', [S]).
|
||||
|
||||
/**
|
||||
* @pred log_section( +String ) is det
|
||||
*
|
||||
* @param [in] S is a Prolog atom or string describing a title.
|
||||
*
|
||||
*/
|
||||
log_subsection( S ) :-
|
||||
out( '#### Report on ~a~n~n', [S]).
|
||||
|
||||
/**
|
||||
* @pred log_section( +String ) is det
|
||||
*
|
||||
* @param [in] S is a Prolog atom or string describing a title.
|
||||
*
|
||||
*/
|
||||
log_paragraph( S ) :-
|
||||
out( '##### Report on ~a~n~n', [S]).
|
||||
|
||||
/**
|
||||
* @pred log_unit( +String, + Level ) is det
|
||||
*
|
||||
* @param [in] _String_ is a Prolog atom or string describing a title
|
||||
* @param [in] _Level_ is an integer number larager than 1 (do notice that )
|
||||
*large numbers may be ignored ).
|
||||
*
|
||||
*
|
||||
*/
|
||||
log_unit( S ) :-
|
||||
out( '## Report on ~a~n~n', [S]).
|
||||
|
||||
/**
|
||||
* @pred clause( +Term ) is det
|
||||
*
|
||||
* @param [in] Term is a Prolog clause or goal that it is going to
|
||||
* be printed out using portray_clause/2.
|
||||
*
|
||||
*/
|
||||
log_goal( DecoratedClause ) :-
|
||||
take_decorations(DecoratedClause, Clause),
|
||||
out( '~~~~~~~~{.prolog}~n'),
|
||||
portray_clause( user_error , Clause ),
|
||||
portray_clause( log , Clause ),
|
||||
out( '~~~~~~~~~n', []).
|
||||
|
||||
take_decorations( G, G ) :-
|
||||
var(G),
|
||||
!.
|
||||
take_decorations(_ <-- G, NG ) :-
|
||||
!,
|
||||
take_decorations( G, NG ).
|
||||
take_decorations(_ <-* G, NG ) :-
|
||||
!,
|
||||
take_decorations( G, NG ).
|
||||
take_decorations(G, NG ) :-
|
||||
G =.. [F|Args],
|
||||
maplist( take_decorations, Args, NArgs ),
|
||||
NG =.. [F|NArgs].
|
||||
|
||||
:- meta_predicate ( + <-- 0 ),
|
||||
( + <-* 0 ).
|
||||
|
||||
/**
|
||||
* @pred log_goal( +Tag , :Goal )
|
||||
*
|
||||
* @param [in] evaluate goal _Goal_ with output before,
|
||||
* during and after the goal has been evaluated.
|
||||
*
|
||||
*/
|
||||
A <-* Goal :-
|
||||
(
|
||||
outln(A),
|
||||
log_goal( Goal ),
|
||||
call( Goal )
|
||||
*->
|
||||
out('succeded as~n'), log_goal(Goal)
|
||||
;
|
||||
out( 'failed~n'),
|
||||
fail
|
||||
).
|
||||
|
||||
/**
|
||||
* @pred `<--`( +Tag , :Goal )
|
||||
*
|
||||
* @param [in] output goal _Goal_ before and after being evaluated, but only
|
||||
* taking the first solution. The _Tag_ must be an atom or a string.
|
||||
*
|
||||
*/
|
||||
Tag <-- Goal :-
|
||||
(
|
||||
outln(Tag),
|
||||
log_goal( Goal ),
|
||||
call( Goal )
|
||||
->
|
||||
out('succeded as~n'),
|
||||
log_goal(Goal),
|
||||
fail
|
||||
;
|
||||
out(failed)
|
||||
).
|
||||
|
||||
|
||||
/**
|
||||
* @pred out(+Format, +Args)
|
||||
*
|
||||
* @param [in] format the string given Args . The output is sent to
|
||||
* user_error and to a stream with alias `log`;
|
||||
*
|
||||
*/
|
||||
out(Format, Args) :-
|
||||
format( log, Format, Args),
|
||||
format( user_error, Format, Args).
|
||||
|
||||
out(Format) :-
|
||||
format( log, Format, []),
|
||||
format( user_error, Format, []).
|
||||
|
||||
outln(Format, Args) :-
|
||||
out(Format, Args), out('~n').
|
||||
outln(Format) :-
|
||||
out(Format), out('~n').
|
384
packages/python/swig/yap4py/prolog/mapargs.yap
Normal file
384
packages/python/swig/yap4py/prolog/mapargs.yap
Normal file
@ -0,0 +1,384 @@
|
||||
/**
|
||||
* @file library/mapargs.yap
|
||||
* @author Lawrence Byrd + Richard A. O'Keefe, VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @author : E. Alphonse from code by Joachim Schimpf, Jan Wielemaker, Vitor Santos Costa
|
||||
* @date 4 August 1984 and Ken Johnson 11-8-87
|
||||
*
|
||||
* @brief Macros to apply a predicate to all sub-terms of a term.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(mapargs,[ mapargs/2, % :Goal, +S
|
||||
mapargs/3, % :Goal, +S, -S
|
||||
mapargs/4, % :Goal, +S, -S1, -S2
|
||||
mapargs/5, % :Goal, +S, -S1, -S2, -S3
|
||||
mapargs/6, % :Goal, +S, -S1, -S2, -S3, -S4
|
||||
sumargs/4,
|
||||
foldargs/4, % :Pred, +S, ?V0, ?V
|
||||
foldargs/5, % :Pred, +S, ?S1, ?V0, ?V
|
||||
foldargs/6, % :Pred, +S, ?S1, ?S2, ?V0, ?V
|
||||
foldargs/7 % :Pred, +S, ?S1, ?S2, ?S3, ?V0, ?V
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup mapargs Apply a predicate to all arguments of a term
|
||||
* @ingroup library
|
||||
*/
|
||||
|
||||
|
||||
:- use_module(library(maputils)).
|
||||
:- use_module(library(lists), [append/3]).
|
||||
|
||||
:- meta_predicate
|
||||
mapargs(1,+),
|
||||
mapargs_args(1,+,+),
|
||||
mapargs(2,+,-),
|
||||
mapargs_args(2,+,-,+),
|
||||
mapargs(3,+,-,-),
|
||||
mapargs_args(2,+,-,-,+),
|
||||
mapargs(4,+,-,-,-),
|
||||
mapargs_args(2,+,-,-,-,+),
|
||||
mapargs(5,+,-,-,-,-),
|
||||
mapargs_args(2,+,-,-,-,-,+),
|
||||
sumargs(3,+,+,-),
|
||||
sumargs_args(3,+,+,-,+),
|
||||
foldargs(3, +, +, -),
|
||||
foldargs(4, +, ?, +, -),
|
||||
foldargs(5, +, ?, ?, +, -),
|
||||
foldargs(6, +, ?, ?, ?, +, -).
|
||||
|
||||
|
||||
mapargs(Pred, TermIn) :-
|
||||
functor(TermIn, _F, N),
|
||||
mapargs_args(Pred, TermIn, 0, N).
|
||||
|
||||
mapargs_args(Pred, TermIn, I, N) :-
|
||||
( I == N -> true ;
|
||||
I1 is I+1,
|
||||
arg(I1, TermIn, InArg),
|
||||
call(Pred, InArg),
|
||||
mapargs_args(Pred, TermIn, I1, N) ).
|
||||
|
||||
mapargs(Pred, TermIn, TermOut) :-
|
||||
functor(TermIn, F, N),
|
||||
functor(TermOut, F, N),
|
||||
mapargs_args(Pred, TermIn, TermOut, 0, N).
|
||||
|
||||
mapargs_args(Pred, TermIn, TermOut, I, N) :-
|
||||
( I == N -> true ;
|
||||
I1 is I+1,
|
||||
arg(I1, TermIn, InArg),
|
||||
arg(I1, TermOut, OutArg),
|
||||
call(Pred, InArg, OutArg),
|
||||
mapargs_args(Pred, TermIn, TermOut, I1, N) ).
|
||||
|
||||
mapargs(Pred, TermIn, TermOut1, TermOut2) :-
|
||||
functor(TermIn, F, N),
|
||||
functor(TermOut1, F, N),
|
||||
functor(TermOut2, F, N),
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, 0, N).
|
||||
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, I, N) :-
|
||||
( I == N -> true ;
|
||||
I1 is I+1,
|
||||
arg(I1, TermIn, InArg),
|
||||
arg(I1, TermOut1, OutArg1),
|
||||
arg(I1, TermOut2, OutArg2),
|
||||
call(Pred, InArg, OutArg1, OutArg2),
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, I1, N) ).
|
||||
|
||||
mapargs(Pred, TermIn, TermOut1, TermOut2, TermOut3) :-
|
||||
functor(TermIn, F, N),
|
||||
functor(TermOut1, F, N),
|
||||
functor(TermOut2, F, N),
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, TermOut3, 0, N).
|
||||
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, TermOut3, I, N) :-
|
||||
( I == N -> true ;
|
||||
I1 is I+1,
|
||||
arg(I1, TermIn, InArg),
|
||||
arg(I1, TermOut1, OutArg1),
|
||||
arg(I1, TermOut2, OutArg2),
|
||||
arg(I1, TermOut3, OutArg3),
|
||||
call(Pred, InArg, OutArg1, OutArg2, OutArg3),
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, TermOut3, I1, N) ).
|
||||
|
||||
mapargs(Pred, TermIn, TermOut1, TermOut2, TermOut3, TermOut4) :-
|
||||
functor(TermIn, F, N),
|
||||
functor(TermOut1, F, N),
|
||||
functor(TermOut2, F, N),
|
||||
functor(TermOut3, F, N),
|
||||
functor(TermOut4, F, N),
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, TermOut3, TermOut4, 0, N).
|
||||
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, TermOut3, TermOut4, I, N) :-
|
||||
( I == 0 -> true ;
|
||||
I1 is I+1,
|
||||
arg(I1, TermIn, InArg),
|
||||
arg(I1, TermOut1, OutArg1),
|
||||
arg(I1, TermOut2, OutArg2),
|
||||
arg(I1, TermOut3, OutArg3),
|
||||
arg(I1, TermOut4, OutArg4),
|
||||
call(Pred, InArg, OutArg1, OutArg2, OutArg3, OutArg4),
|
||||
mapargs_args(Pred, TermIn, TermOut1, TermOut2, TermOut3, TermOut4, I1, N) ).
|
||||
|
||||
sumargs(Pred, Term, A0, A1) :-
|
||||
functor(Term, _, N),
|
||||
sumargs(Pred, Term, A0, A1, N).
|
||||
|
||||
sumargs_args(_, _, A0, A1, 0) :-
|
||||
!,
|
||||
A0 = A1.
|
||||
sumargs_args(Pred, Term, A1, A3, N) :-
|
||||
arg(N, Term, Arg),
|
||||
N1 is N - 1,
|
||||
call(Pred, Arg, A1, A2),
|
||||
sumargs_args(Pred, Term, A2, A3, N1).
|
||||
|
||||
|
||||
foldargs(Goal, S, V0, V) :-
|
||||
functor(S, _, Ar),
|
||||
foldargs_(Goal, S, V0, V, 0, Ar).
|
||||
|
||||
foldargs_(Goal, S, V0, V, I, N) :-
|
||||
( I == N -> V0 = V ;
|
||||
I1 is I+1,
|
||||
arg(I1, S, A),
|
||||
call(Goal, A, V0, V1),
|
||||
foldargs_(Goal, S, V1, V, I1, N) ).
|
||||
|
||||
foldargs(Goal, S, O1, V0, V) :-
|
||||
functor(S, N, Ar),
|
||||
functor(O1, N, Ar),
|
||||
foldargs_(Goal, S, O1, V0, V, 0, Ar).
|
||||
|
||||
foldargs_(Goal, S, O1, V0, V, I, N) :-
|
||||
( I == N -> V0 = V ;
|
||||
I1 is I+1,
|
||||
arg(I1, S, A),
|
||||
arg(I1, O1, A1),
|
||||
call(Goal, A, A1, V0, V1),
|
||||
foldargs_(Goal, S, O1, V1, V, I1, N) ).
|
||||
|
||||
foldargs(Goal, S, O1, O2, V0, V) :-
|
||||
functor(S, N, Ar),
|
||||
functor(O1, N, Ar),
|
||||
functor(O2, N, Ar),
|
||||
foldargs_(Goal, S, O1, O2, V0, V, 0, Ar).
|
||||
|
||||
foldargs_(Goal, S, O1, O2, V0, V, I, N) :-
|
||||
( I == N -> V0 = V ;
|
||||
I1 is I+1,
|
||||
arg(I1, S, A),
|
||||
arg(I1, O1, A1),
|
||||
arg(I1, O2, A2),
|
||||
call(Goal, A, A1, A2, V0, V1),
|
||||
foldargs_(Goal, S, O1, O2, V1, V, I1, N) ).
|
||||
|
||||
foldargs(Goal, S, O1, O2, O3, V0, V) :-
|
||||
functor(S, N, Ar),
|
||||
functor(O1, N, Ar),
|
||||
functor(O2, N, Ar),
|
||||
functor(O3, N, Ar),
|
||||
foldargs_(Goal, S, O1, O2, O3, V0, V, 0, Ar).
|
||||
|
||||
foldargs_(Goal, S, O1, O2, O3, V0, V, I, N) :-
|
||||
( I == N -> V0 = V ;
|
||||
I1 is I+1,
|
||||
arg(I1, S, A),
|
||||
arg(I1, O1, A1),
|
||||
arg(I1, O2, A2),
|
||||
arg(I1, O3, A3),
|
||||
call(Goal, A, A1, A2, A3, V0, V1),
|
||||
foldargs_(Goal, S, O1, O2, O3, V1, V, I1, N) ).
|
||||
|
||||
|
||||
goal_expansion(mapargs(Meta, In), (functor(In, _Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(mapargs, 1, Proto, GoalName),
|
||||
append(MetaVars, [In, 0, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, I, Ar], RecursionHead),
|
||||
append_args(Pred, [AIn], Apply),
|
||||
append_args(HeadPrefix, [In, I1, Ar], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == 0 -> true ; I1 is I+1, arg(I1, In, AIn), Apply, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(mapargs(Meta, In, Out), (functor(In, Name, Ar), functor(Out, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(mapargs, 2, Proto, GoalName),
|
||||
append(MetaVars, [In, Out, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out, I], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut], Apply),
|
||||
append_args(HeadPrefix, [In, Out, I1], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == 0 -> true ; arg(I, In, AIn), arg(I, Out, AOut), Apply, I1 is I-1, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(mapargs(Meta, In, Out1, Out2), (functor(In, Name, Ar), functor(Out1, Name, Ar), functor(Out2, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(mapargs, 3, Proto, GoalName),
|
||||
append(MetaVars, [In, Out1, Out2, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out1, Out2, I], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut1, AOut2], Apply),
|
||||
append_args(HeadPrefix, [In, Out1, Out2, I1], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == 0 -> true ; arg(I, In, AIn), arg(I, Out1, AOut1), arg(I, Out2, AOut2), Apply, I1 is I-1, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(mapargs(Meta, In, Out1, Out2, Out3), (functor(In, Name, Ar), functor(Out1, Name, Ar), functor(Out3, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(mapargs, 4, Proto, GoalName),
|
||||
append(MetaVars, [In, Out1, Out2, Out3, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out1, Out2, Out3, I], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut1, AOut2, AOut3], Apply),
|
||||
append_args(HeadPrefix, [In, Out1, Out2, Out3, I1], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == 0 -> true ; arg(I, In, AIn), arg(I, Out1, AOut1), arg(I, Out2, AOut2), arg(I, Out3, AOut3), Apply, I1 is I-1, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(mapargs(Meta, In, Out1, Out2, Out3, Out4), (functor(In, Name, Ar), functor(Out1, Name, Ar), functor(Out3, Name, Ar), functor(Out4, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(mapargs, 4, Proto, GoalName),
|
||||
append(MetaVars, [In, Out1, Out2, Out3, Out4, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out1, Out2, Out3, Out4, I], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut1, AOut2, AOut3, AOut4], Apply),
|
||||
append_args(HeadPrefix, [In, Out1, Out2, Out3, Out4, I1], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == 0 -> true ; arg(I, In, AIn), arg(I, Out1, AOut1), arg(I, Out2, AOut2), arg(I, Out3, AOut3), arg(I, Out4, AOut4), Apply, I1 is I-1, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(sumargs(Meta, Term, AccIn, AccOut), Mod:Goal) :-
|
||||
goal_expansion_allowed,
|
||||
prolog_load_context(module, Mod),
|
||||
Goal = (
|
||||
Term =.. [_|TermArgs],
|
||||
sumlist(Meta, TermArgs, AccIn, AccOut)
|
||||
).
|
||||
|
||||
goal_expansion(foldargs(Meta, In, Acc0, AccF), (functor(In, _Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(foldargs, 1, Proto, GoalName),
|
||||
append(MetaVars, [In, Acc0, AccF, 0, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, VAcc0, VAccF, I, Ar], RecursionHead),
|
||||
append_args(Pred, [AIn, VAcc0, VAccI], Apply),
|
||||
append_args(HeadPrefix, [In, VAccI, VAccF, I1, Ar], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == Ar -> VAcc0 = VAccF ; I1 is I+1, arg(I1, In, AIn), Apply, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(foldargs(Meta, In, Out1, Acc0, AccF), (functor(In, Name, Ar), functor(Out1, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(foldargs, 2, Proto, GoalName),
|
||||
append(MetaVars, [In, Out1, Acc0, AccF, 0, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out1, VAcc0, VAccF, I, Ar], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut1, VAcc0, VAccI], Apply),
|
||||
append_args(HeadPrefix, [In, Out1, VAccI, VAccF, I1, Ar], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == Ar -> VAcc0 = VAccF ; I1 is I+1, arg(I1, In, AIn), arg(I1, Out1, AOut1), Apply, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(foldargs(Meta, In, Out1, Out2, Acc0, AccF), (functor(In, Name, Ar), functor(Out1, Name, Ar), functor(Out2, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(foldargs, 3, Proto, GoalName),
|
||||
append(MetaVars, [In, Out1, Out2, Acc0, AccF, 0, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out1, Out2, VAcc0, VAccF, I, Ar], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut1, AOut2, VAcc0, VAccI], Apply),
|
||||
append_args(HeadPrefix, [In, Out1, Out2, VAccI, VAccF, I1, Ar], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == Ar -> VAcc0 = VAccF ; I1 is I+1, arg(I1, In, AIn), arg(I1, Out1, AOut1), arg(I1, Out2, AOut2), Apply, RecursiveCall )
|
||||
], Mod).
|
||||
|
||||
goal_expansion(foldargs(Meta, In, Out1, Out2, Out3, Acc0, AccF), (functor(In, Name, Ar), functor(Out1, Name, Ar), functor(Out2, Name, Ar), functor(Out3, Name, Ar), Mod:Goal)) :-
|
||||
goal_expansion_allowed,
|
||||
callable(Meta),
|
||||
prolog_load_context(module, Mod),
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto),
|
||||
!,
|
||||
% the new goal
|
||||
pred_name(foldargs, 4, Proto, GoalName),
|
||||
append(MetaVars, [In, Out1, Out2, Out3, Acc0, AccF, 0, Ar], GoalArgs),
|
||||
Goal =.. [GoalName|GoalArgs],
|
||||
% the new predicate declaration
|
||||
HeadPrefix =.. [GoalName|PredVars],
|
||||
% the new predicate declaration
|
||||
append_args(HeadPrefix, [In, Out1, Out2, Out3, VAcc0, VAccF, I, Ar], RecursionHead),
|
||||
append_args(Pred, [AIn, AOut1, AOut2, AOut3, VAcc0, VAccI], Apply),
|
||||
append_args(HeadPrefix, [In, Out1, Out2, Out3, VAccI, VAccF, I1, Ar], RecursiveCall),
|
||||
compile_aux([
|
||||
(RecursionHead :- I == Ar -> VAcc0 = VAccF ; I1 is I+1, arg(I1, In, AIn), arg(I1, Out1, AOut1), arg(I1, Out2, AOut2), arg(I1, Out3, AOut3), Apply, RecursiveCall )
|
||||
], Mod).
|
1295
packages/python/swig/yap4py/prolog/maplist.yap
Normal file
1295
packages/python/swig/yap4py/prolog/maplist.yap
Normal file
File diff suppressed because it is too large
Load Diff
106
packages/python/swig/yap4py/prolog/maputils.yap
Normal file
106
packages/python/swig/yap4py/prolog/maputils.yap
Normal file
@ -0,0 +1,106 @@
|
||||
/**
|
||||
* @file maputils.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 22:48:58 2015
|
||||
*
|
||||
* @brief Auxiliary routines for map... libraries
|
||||
*
|
||||
*
|
||||
*/
|
||||
%%%%%%%%%%%%%%%%%%%%
|
||||
% map utilities
|
||||
%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(maputils,
|
||||
[compile_aux/2,
|
||||
goal_expansion_allowed/0,
|
||||
pred_name/4,
|
||||
aux_preds/5,
|
||||
append_args/3]).
|
||||
|
||||
/**
|
||||
* @addtogroup maplist
|
||||
*
|
||||
* Auxiliary routines
|
||||
*
|
||||
*@{
|
||||
*/
|
||||
:- use_module(library(lists), [append/3]).
|
||||
|
||||
:- dynamic number_of_expansions/1.
|
||||
|
||||
number_of_expansions(0).
|
||||
|
||||
%
|
||||
% compile auxiliary routines for term expansion
|
||||
%
|
||||
compile_aux([Clause|Clauses], Module) :-
|
||||
% compile the predicate declaration if needed
|
||||
( Clause = (Head :- _)
|
||||
; Clause = Head ),
|
||||
!,
|
||||
functor(Head, F, N),
|
||||
( current_predicate(Module:F/N)
|
||||
->
|
||||
true
|
||||
;
|
||||
% format("*** Creating auxiliary predicate ~q~n", [F/N]),
|
||||
% checklist(portray_clause, [Clause|Clauses]),
|
||||
compile_term([Clause|Clauses], Module)
|
||||
).
|
||||
|
||||
compile_term([], _).
|
||||
compile_term([Clause|Clauses], Module) :-
|
||||
assert_static(Module:Clause),
|
||||
compile_term(Clauses, Module).
|
||||
|
||||
append_args(Term, Args, NewTerm) :-
|
||||
Term =.. [Meta|OldArgs],
|
||||
append(OldArgs, Args, GoalArgs),
|
||||
NewTerm =.. [Meta|GoalArgs].
|
||||
|
||||
aux_preds(Meta, _, _, _, _) :-
|
||||
var(Meta), !,
|
||||
fail.
|
||||
aux_preds(_:Meta, MetaVars, Pred, PredVars, Proto) :- !,
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto).
|
||||
aux_preds(Meta, MetaVars, Pred, PredVars, Proto) :-
|
||||
Meta =.. [F|Args],
|
||||
aux_args(Args, MetaVars, PredArgs, PredVars, ProtoArgs),
|
||||
Pred =.. [F|PredArgs],
|
||||
Proto =.. [F|ProtoArgs].
|
||||
|
||||
aux_args([], [], [], [], []).
|
||||
aux_args([Arg|Args], MVars, [Arg|PArgs], PVars, [Arg|ProtoArgs]) :-
|
||||
ground(Arg), !,
|
||||
aux_args(Args, MVars, PArgs, PVars, ProtoArgs).
|
||||
aux_args([Arg|Args], [Arg|MVars], [PVar|PArgs], [PVar|PVars], ['_'|ProtoArgs]) :-
|
||||
aux_args(Args, MVars, PArgs, PVars, ProtoArgs).
|
||||
|
||||
pred_name(Macro, Arity, _ , Name) :-
|
||||
prolog_load_context(file, FullFileName),
|
||||
file_base_name( FullFileName, File ),
|
||||
prolog_load_context(term_position, Pos),
|
||||
stream_position_data( line_count, Pos, Line ), !,
|
||||
transformation_id(Id),
|
||||
atomic_concat(['$$$ for ',Macro,'/',Arity,', line ',Line,' in ',File,' ',Id], Name).
|
||||
pred_name(Macro, Arity, _ , Name) :-
|
||||
transformation_id(Id),
|
||||
atomic_concat(['$$$__expansion__ for ',Macro,'/',Arity,' ',Id], Name).
|
||||
|
||||
transformation_id(Id) :-
|
||||
retract(number_of_expansions(Id)),
|
||||
Id1 is Id+1,
|
||||
assert(number_of_expansions(Id1)).
|
||||
|
||||
%% goal_expansion_allowed is semidet.
|
||||
%
|
||||
% `True` if we can use
|
||||
% goal-expansion.
|
||||
goal_expansion_allowed :-
|
||||
once( prolog_load_context(_, _) ), % make sure we are compiling.
|
||||
\+ current_prolog_flag(xref, true).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
328
packages/python/swig/yap4py/prolog/matlab.yap
Normal file
328
packages/python/swig/yap4py/prolog/matlab.yap
Normal file
@ -0,0 +1,328 @@
|
||||
/**
|
||||
* @file matlab.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 22:51:48 2015
|
||||
*
|
||||
* @brief YAP Matlab interface.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(matlab,
|
||||
[start_matlab/1,
|
||||
close_matlab/0,
|
||||
matlab_on/0,
|
||||
matlab_eval_string/1,
|
||||
matlab_eval_string/2,
|
||||
matlab_cells/2,
|
||||
matlab_cells/3,
|
||||
matlab_initialized_cells/4,
|
||||
matlab_zeros/2,
|
||||
matlab_zeros/3,
|
||||
matlab_zeros/4,
|
||||
matlab_matrix/4,
|
||||
matlab_vector/2,
|
||||
matlab_vector/3,
|
||||
matlab_set/4,
|
||||
matlab_get_variable/2,
|
||||
matlab_item/3,
|
||||
matlab_item/4,
|
||||
matlab_item1/3,
|
||||
matlab_item1/4,
|
||||
matlab_sequence/3,
|
||||
matlab_call/2]).
|
||||
|
||||
/** @defgroup matlab MATLAB Package Interface
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The MathWorks MATLAB is a widely used package for array
|
||||
processing. YAP now includes a straightforward interface to MATLAB. To
|
||||
actually use it, you need to install YAP calling `configure` with
|
||||
the `--with-matlab=DIR` option, and you need to call
|
||||
`use_module(library(lists))` command.
|
||||
|
||||
Accessing the matlab dynamic libraries can be complicated. In Linux
|
||||
machines, to use this interface, you may have to set the environment
|
||||
variable <tt>LD_LIBRARY_PATH</tt>. Next, follows an example using bash in a
|
||||
64-bit Linux PC:
|
||||
|
||||
~~~~~
|
||||
export LD_LIBRARY_PATH=''$MATLAB_HOME"/sys/os/glnxa64:''$MATLAB_HOME"/bin/glnxa64:''$LD_LIBRARY_PATH"
|
||||
~~~~~
|
||||
where `MATLAB_HOME` is the directory where matlab is installed
|
||||
at. Please replace `ax64` for `x86` on a 32-bit PC.
|
||||
|
||||
*/
|
||||
|
||||
/*
|
||||
|
||||
@pred start_matlab(+ _Options_)
|
||||
|
||||
|
||||
Start a matlab session. The argument _Options_ may either be the
|
||||
empty string/atom or the command to call matlab. The command may fail.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
/** @pred close_matlab
|
||||
|
||||
|
||||
Stop the current matlab session.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_cells(+ _SizeX_, + _SizeY_, ? _Array_)
|
||||
|
||||
MATLAB will create an empty array of cells of size _SizeX_ and
|
||||
_SizeY_, and if _Array_ is bound to an atom, store the array
|
||||
in the matlab variable with name _Array_. Corresponds to the
|
||||
MATLAB command `cells`.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_cells(+ _Size_, ? _Array_)
|
||||
|
||||
|
||||
MATLAB will create an empty vector of cells of size _Size_, and if
|
||||
_Array_ is bound to an atom, store the array in the matlab
|
||||
variable with name _Array_. Corresponds to the MATLAB command `cells`.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_eval_string(+ _Command_)
|
||||
|
||||
|
||||
Holds if matlab evaluated successfully the command _Command_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_eval_string(+ _Command_, - _Answer_)
|
||||
|
||||
MATLAB will evaluate the command _Command_ and unify _Answer_
|
||||
with a string reporting the result.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_get_variable(+ _MatVar_, - _List_)
|
||||
|
||||
|
||||
Unify MATLAB variable _MatVar_ with the List _List_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_initialized_cells(+ _SizeX_, + _SizeY_, + _List_, ? _Array_)
|
||||
|
||||
|
||||
MATLAB will create an array of cells of size _SizeX_ and
|
||||
_SizeY_, initialized from the list _List_, and if _Array_
|
||||
is bound to an atom, store the array in the matlab variable with name
|
||||
_Array_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_item(+ _MatVar_, + _X_, + _Y_, ? _Val_)
|
||||
|
||||
Read or set MATLAB _MatVar_( _X_, _Y_) from/to _Val_. Use
|
||||
`C` notation for matrix access (ie, starting from 0).
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_item(+ _MatVar_, + _X_, ? _Val_)
|
||||
|
||||
|
||||
Read or set MATLAB _MatVar_( _X_) from/to _Val_. Use
|
||||
`C` notation for matrix access (ie, starting from 0).
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_item1(+ _MatVar_, + _X_, + _Y_, ? _Val_)
|
||||
|
||||
Read or set MATLAB _MatVar_( _X_, _Y_) from/to _Val_. Use
|
||||
MATLAB notation for matrix access (ie, starting from 1).
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_item1(+ _MatVar_, + _X_, ? _Val_)
|
||||
|
||||
|
||||
Read or set MATLAB _MatVar_( _X_) from/to _Val_. Use
|
||||
MATLAB notation for matrix access (ie, starting from 1).
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_matrix(+ _SizeX_, + _SizeY_, + _List_, ? _Array_)
|
||||
|
||||
|
||||
MATLAB will create an array of floats of size _SizeX_ and _SizeY_,
|
||||
initialized from the list _List_, and if _Array_ is bound to
|
||||
an atom, store the array in the matlab variable with name _Array_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_on
|
||||
|
||||
|
||||
Holds if a matlab session is on.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_sequence(+ _Min_, + _Max_, ? _Array_)
|
||||
|
||||
|
||||
MATLAB will create a sequence going from _Min_ to _Max_, and
|
||||
if _Array_ is bound to an atom, store the sequence in the matlab
|
||||
variable with name _Array_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_set(+ _MatVar_, + _X_, + _Y_, + _Value_)
|
||||
|
||||
|
||||
Call MATLAB to set element _MatVar_( _X_, _Y_) to
|
||||
_Value_. Notice that this command uses the MATLAB array access
|
||||
convention.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_vector(+ _Size_, + _List_, ? _Array_)
|
||||
|
||||
|
||||
MATLAB will create a vector of floats of size _Size_, initialized
|
||||
from the list _List_, and if _Array_ is bound to an atom,
|
||||
store the array in the matlab variable with name _Array_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_zeros(+ _SizeX_, + _SizeY_, + _SizeZ_, ? _Array_)
|
||||
|
||||
MATLAB will create an array of zeros of size _SizeX_, _SizeY_,
|
||||
and _SizeZ_. If _Array_ is bound to an atom, store the array
|
||||
in the matlab variable with name _Array_. Corresponds to the
|
||||
MATLAB command `zeros`.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_zeros(+ _SizeX_, + _SizeY_, ? _Array_)
|
||||
|
||||
MATLAB will create an array of zeros of size _SizeX_ and
|
||||
_SizeY_, and if _Array_ is bound to an atom, store the array
|
||||
in the matlab variable with name _Array_. Corresponds to the
|
||||
MATLAB command `zeros`.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred matlab_zeros(+ _Size_, ? _Array_)
|
||||
|
||||
|
||||
MATLAB will create a vector of zeros of size _Size_, and if
|
||||
_Array_ is bound to an atom, store the array in the matlab
|
||||
variable with name _Array_. Corresponds to the MATLAB command
|
||||
`zeros`.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- ensure_loaded(library(lists)).
|
||||
|
||||
tell_warning :-
|
||||
print_message(warning,functionality(matlab)).
|
||||
|
||||
:- ( catch(load_foreign_files([matlab], ['eng','mx','ut'], init_matlab),_,fail) -> true ; tell_warning).
|
||||
|
||||
matlab_eval_sequence(S) :-
|
||||
atomic_concat(S,S1),
|
||||
matlab_eval_string(S1).
|
||||
|
||||
matlab_eval_sequence(S,O) :-
|
||||
atomic_concat(S,S1),
|
||||
matlab_eval_string(S1,O).
|
||||
|
||||
matlab_vector( Vec, L) :-
|
||||
length(Vec, LV),
|
||||
matlab_vector(LV, Vec, L).
|
||||
|
||||
matlab_sequence(Min,Max,L) :-
|
||||
mksequence(Min,Max,Vector),
|
||||
Dim is (Max-Min)+1,
|
||||
matlab_matrix(1,Dim,Vector,L).
|
||||
|
||||
mksequence(Min,Min,[Min]) :- !.
|
||||
mksequence(Min,Max,[Min|Vector]) :-
|
||||
Min1 is Min+1,
|
||||
mksequence(Min1,Max,Vector).
|
||||
|
||||
matlab_call(S,Out) :-
|
||||
S=..[Func|Args],
|
||||
build_args(Args,L0,[]),
|
||||
process_arg_entry(L0,L),
|
||||
build_output(Out,Lf,['= ',Func|L]),
|
||||
atomic_concat(Lf,Command),
|
||||
matlab_eval_string(Command).
|
||||
|
||||
matlab_call(S,Out,Result) :-
|
||||
S=..[Func|Args],
|
||||
build_args(Args,L0,[]),
|
||||
process_arg_entry(L0,L),
|
||||
build_output(Out,Lf,[' = ',Func|L]),
|
||||
atomic_concat(Lf,Command),
|
||||
matlab_eval_string(Command,Result).
|
||||
|
||||
build_output(Out,['[ '|L],L0) :-
|
||||
is_list(Out), !,
|
||||
build_outputs(Out,L,[']'|L0]).
|
||||
build_output(Out,Lf,L0) :-
|
||||
build_arg(Out,Lf,L0).
|
||||
|
||||
build_outputs([],L,L).
|
||||
build_outputs([Out|Outs],[Out,' '|L],L0) :-
|
||||
build_outputs(Outs,L,L0).
|
||||
|
||||
build_args([],L,L).
|
||||
build_args([Arg],Lf,L0) :- !,
|
||||
build_arg(Arg,Lf,[')'|L0]).
|
||||
build_args([Arg|Args],L,L0) :-
|
||||
build_arg(Arg,L,[', '|L1]),
|
||||
build_args(Args,L1,L0).
|
||||
|
||||
build_arg(V,_,_) :- var(V), !,
|
||||
throw(error(instantiation_error)).
|
||||
build_arg(Arg,[Arg|L],L) :- atomic(Arg), !.
|
||||
build_arg(\S0,['\'',S0,'\''|L],L) :-
|
||||
atom(S0), !.
|
||||
build_arg([S1|S2],['['|L],L0) :-
|
||||
is_list(S2), !,
|
||||
build_arglist([S1|S2],L,L0).
|
||||
build_arg([S1|S2],L,L0) :- !,
|
||||
build_arg(S1,L,['.'|L1]),
|
||||
build_arg(S2,L1,L0).
|
||||
build_arg(S1:S2,L,L0) :- !,
|
||||
build_arg(S1,L,[':'|L1]),
|
||||
build_arg(S2,L1,L0).
|
||||
build_arg(F,[N,'{'|L],L0) :- %N({A}) = N{A}
|
||||
F=..[N,{A}], !,
|
||||
build_arg(A,L,['}'|L0]).
|
||||
build_arg(F,[N,'('|L],L0) :-
|
||||
F=..[N|As],
|
||||
build_args(As,L,L0).
|
||||
|
||||
build_arglist([A],L,L0) :- !,
|
||||
build_arg(A,L,[' ]'|L0]).
|
||||
build_arglist([A|As],L,L0) :-
|
||||
build_arg(A,L,[' ,'|L1]),
|
||||
build_arglist(As,L1,L0).
|
||||
|
||||
build_string([],['\''|L],L).
|
||||
build_string([S0|S],[C|Lf],L0) :-
|
||||
char_code(C,S0),
|
||||
build_string(S,Lf,L0).
|
||||
|
||||
|
||||
process_arg_entry([],[]) :- !.
|
||||
process_arg_entry(L,['('|L]).
|
||||
/** @} */
|
||||
|
1403
packages/python/swig/yap4py/prolog/matrix.yap
Normal file
1403
packages/python/swig/yap4py/prolog/matrix.yap
Normal file
File diff suppressed because it is too large
Load Diff
232
packages/python/swig/yap4py/prolog/nb.yap
Normal file
232
packages/python/swig/yap4py/prolog/nb.yap
Normal file
@ -0,0 +1,232 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: nb.yap *
|
||||
* Last rev: 5/12/99 *
|
||||
* mods: *
|
||||
* comments: non-backtrackable data-structures *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
/**
|
||||
* @file nb.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 23:18:13 2015
|
||||
*
|
||||
* @brief stub for global (non-backtrackable) variables.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(nb, [
|
||||
nb_create_accumulator/2,
|
||||
nb_add_to_accumulator/2,
|
||||
nb_accumulator_value/2,
|
||||
nb_queue/1,
|
||||
nb_queue/2,
|
||||
nb_queue_close/3,
|
||||
nb_queue_enqueue/2,
|
||||
nb_queue_dequeue/2,
|
||||
nb_queue_peek/2,
|
||||
nb_queue_empty/1,
|
||||
nb_queue_size/2,
|
||||
nb_queue_replace/3,
|
||||
nb_heap/2,
|
||||
nb_heap_close/1,
|
||||
nb_heap_add/3,
|
||||
nb_heap_del/3,
|
||||
nb_heap_peek/3,
|
||||
nb_heap_empty/1,
|
||||
nb_heap_size/2,
|
||||
nb_beam/2,
|
||||
nb_beam_close/1,
|
||||
nb_beam_add/3,
|
||||
nb_beam_del/3,
|
||||
nb_beam_peek/3,
|
||||
nb_beam_empty/1,
|
||||
% nb_beam_check/1,
|
||||
nb_beam_size/2]).
|
||||
|
||||
/** @defgroup nb Non-Backtrackable Data Structures
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following routines implement well-known data-structures using global
|
||||
non-backtrackable variables (implemented on the Prolog stack). The
|
||||
data-structures currently supported are Queues, Heaps, and Beam for Beam
|
||||
search. They are allowed through `library(nb)`.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
/** @pred nb_beam(+ _DefaultSize_,- _Beam_)
|
||||
|
||||
|
||||
Create a _Beam_ with default size _DefaultSize_. Note that size
|
||||
is fixed throughout.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_beam_add(+ _Beam_, + _Key_, + _Value_)
|
||||
|
||||
|
||||
Add _Key_- _Value_ to the beam _Beam_. The key is sorted on
|
||||
_Key_ only.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_beam_close(+ _Beam_)
|
||||
|
||||
|
||||
Close the beam _Beam_: no further elements can be added.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_beam_del(+ _Beam_, - _Key_, - _Value_)
|
||||
|
||||
|
||||
Remove element _Key_- _Value_ with smallest _Value_ in beam
|
||||
_Beam_. Fail if the beam is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_beam_empty(+ _Beam_)
|
||||
|
||||
|
||||
Succeeds if _Beam_ is empty.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_beam_peek(+ _Beam_, - _Key_, - _Value_))
|
||||
|
||||
|
||||
_Key_- _Value_ is the element with smallest _Key_ in the beam
|
||||
_Beam_. Fail if the beam is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_beam_size(+ _Beam_, - _Size_)
|
||||
|
||||
|
||||
Unify _Size_ with the number of elements in the beam _Beam_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap(+ _DefaultSize_,- _Heap_)
|
||||
|
||||
|
||||
Create a _Heap_ with default size _DefaultSize_. Note that size
|
||||
will expand as needed.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap_add(+ _Heap_, + _Key_, + _Value_)
|
||||
|
||||
|
||||
Add _Key_- _Value_ to the heap _Heap_. The key is sorted on
|
||||
_Key_ only.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap_close(+ _Heap_)
|
||||
|
||||
|
||||
Close the heap _Heap_: no further elements can be added.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap_del(+ _Heap_, - _Key_, - _Value_)
|
||||
|
||||
|
||||
Remove element _Key_- _Value_ with smallest _Value_ in heap
|
||||
_Heap_. Fail if the heap is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap_empty(+ _Heap_)
|
||||
|
||||
|
||||
Succeeds if _Heap_ is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap_peek(+ _Heap_, - _Key_, - _Value_))
|
||||
|
||||
|
||||
_Key_- _Value_ is the element with smallest _Key_ in the heap
|
||||
_Heap_. Fail if the heap is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_heap_size(+ _Heap_, - _Size_)
|
||||
|
||||
|
||||
Unify _Size_ with the number of elements in the heap _Heap_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue(- _Queue_)
|
||||
|
||||
|
||||
Create a _Queue_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue_close(+ _Queue_, - _Head_, ? _Tail_)
|
||||
|
||||
|
||||
Unify the queue _Queue_ with a difference list
|
||||
_Head_- _Tail_. The queue will now be empty and no further
|
||||
elements can be added.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue_dequeue(+ _Queue_, - _Element_)
|
||||
|
||||
|
||||
Remove _Element_ from the front of the queue _Queue_. Fail if
|
||||
the queue is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue_empty(+ _Queue_)
|
||||
|
||||
|
||||
Succeeds if _Queue_ is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue_enqueue(+ _Queue_, + _Element_)
|
||||
|
||||
|
||||
Add _Element_ to the front of the queue _Queue_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue_peek(+ _Queue_, - _Element_)
|
||||
|
||||
|
||||
_Element_ is the front of the queue _Queue_. Fail if
|
||||
the queue is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred nb_queue_size(+ _Queue_, - _Size_)
|
||||
|
||||
|
||||
Unify _Size_ with the number of elements in the queue _Queue_.
|
||||
|
||||
|
||||
*/
|
||||
/** @} */
|
||||
|
501
packages/python/swig/yap4py/prolog/ordsets.yap
Normal file
501
packages/python/swig/yap4py/prolog/ordsets.yap
Normal file
@ -0,0 +1,501 @@
|
||||
/**
|
||||
* @file ordsets.yap
|
||||
* @author : R.A.O'Keefe
|
||||
* @date 22 May 1983
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date 1999
|
||||
* @brief
|
||||
*
|
||||
*
|
||||
*/
|
||||
% This file has been included as an YAP library by Vitor Santos Costa, 1999
|
||||
|
||||
:- module(ordsets, [
|
||||
list_to_ord_set/2, % List -> Set
|
||||
merge/3, % OrdList x OrdList -> OrdList
|
||||
ord_add_element/3, % Set x Elem -> Set
|
||||
ord_del_element/3, % Set x Elem -> Set
|
||||
ord_disjoint/2, % Set x Set ->
|
||||
ord_insert/3, % Set x Elem -> Set
|
||||
ord_member/2, % Set -> Elem
|
||||
ord_intersect/2, % Set x Set ->
|
||||
ord_intersect/3, % Set x Set -> Set
|
||||
ord_intersection/3, % Set x Set -> Set
|
||||
ord_intersection/4, % Set x Set -> Set x Set
|
||||
ord_seteq/2, % Set x Set ->
|
||||
ord_setproduct/3, % Set x Set -> Set
|
||||
ord_subset/2, % Set x Set ->
|
||||
ord_subtract/3, % Set x Set -> Set
|
||||
ord_symdiff/3, % Set x Set -> Set
|
||||
ord_union/2, % Set^2 -> Set
|
||||
ord_union/3, % Set x Set -> Set
|
||||
ord_union/4, % Set x Set -> Set x Set,
|
||||
ord_empty/1, % -> Set
|
||||
ord_memberchk/2 % Element X Set
|
||||
]).
|
||||
|
||||
/** @defgroup ordsets Ordered Sets
|
||||
* @ingroup library
|
||||
* @{
|
||||
|
||||
The following ordered set manipulation routines are available once
|
||||
included with the `use_module(library(ordsets))` command. An
|
||||
ordered set is represented by a list having unique and ordered
|
||||
elements. Output arguments are guaranteed to be ordered sets, if the
|
||||
relevant inputs are. This is a slightly patched version of Richard
|
||||
O'Keefe's original library.
|
||||
|
||||
In this module, sets are represented by ordered lists with no
|
||||
duplicates. Thus {c,r,a,f,t} would be [a,c,f,r,t]. The ordering
|
||||
is defined by the @< family of term comparison predicates, which
|
||||
is the ordering used by sort/2 and setof/3.
|
||||
|
||||
The benefit of the ordered representation is that the elementary
|
||||
set operations can be done in time proportional to the Sum of the
|
||||
argument sizes rather than their Product. Some of the unordered
|
||||
set routines, such as member/2, length/2, select/3 can be used
|
||||
unchanged. The main difficulty with the ordered representation is
|
||||
remembering to use it!
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred ord_add_element(+ _Set1_, + _Element_, ? _Set2_)
|
||||
|
||||
|
||||
Inserting _Element_ in _Set1_ returns _Set2_. It should give
|
||||
exactly the same result as `merge(Set1, [Element], Set2)`, but a
|
||||
bit faster, and certainly more clearly. The same as ord_insert/3.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_del_element(+ _Set1_, + _Element_, ? _Set2_)
|
||||
|
||||
|
||||
Removing _Element_ from _Set1_ returns _Set2_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_disjoint(+ _Set1_, + _Set2_)
|
||||
|
||||
|
||||
Holds when the two ordered sets have no element in common.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_insert(+ _Set1_, + _Element_, ? _Set2_)
|
||||
|
||||
|
||||
Inserting _Element_ in _Set1_ returns _Set2_. It should give
|
||||
exactly the same result as `merge(Set1, [Element], Set2)`, but a
|
||||
bit faster, and certainly more clearly. The same as ord_add_element/3.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_intersect(+ _Set1_, + _Set2_)
|
||||
|
||||
|
||||
Holds when the two ordered sets have at least one element in common.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_intersection(+ _Set1_, + _Set2_, ? _Intersection_)
|
||||
|
||||
Holds when Intersection is the ordered representation of _Set1_
|
||||
and _Set2_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_intersection(+ _Set1_, + _Set2_, ? _Intersection_, ? _Diff_)
|
||||
|
||||
Holds when Intersection is the ordered representation of _Set1_
|
||||
and _Set2_. _Diff_ is the difference between _Set2_ and _Set1_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_member(+ _Element_, + _Set_)
|
||||
|
||||
|
||||
Holds when _Element_ is a member of _Set_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_seteq(+ _Set1_, + _Set2_)
|
||||
|
||||
|
||||
Holds when the two arguments represent the same set.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_setproduct(+ _Set1_, + _Set2_, - _Set_)
|
||||
|
||||
|
||||
If Set1 and Set2 are ordered sets, Product will be an ordered
|
||||
set of x1-x2 pairs.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_subset(+ _Set1_, + _Set2_)
|
||||
|
||||
|
||||
Holds when every element of the ordered set _Set1_ appears in the
|
||||
ordered set _Set2_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_subtract(+ _Set1_, + _Set2_, ? _Difference_)
|
||||
|
||||
|
||||
Holds when _Difference_ contains all and only the elements of _Set1_
|
||||
which are not also in _Set2_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_symdiff(+ _Set1_, + _Set2_, ? _Difference_)
|
||||
|
||||
|
||||
Holds when _Difference_ is the symmetric difference of _Set1_
|
||||
and _Set2_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_union(+ _Set1_, + _Set2_, ? _Union_)
|
||||
|
||||
Holds when _Union_ is the union of _Set1_ and _Set2_.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_union(+ _Set1_, + _Set2_, ? _Union_, ? _Diff_)
|
||||
|
||||
Holds when _Union_ is the union of _Set1_ and _Set2_ and
|
||||
_Diff_ is the difference.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
/** @pred ord_union(+ _Sets_, ? _Union_)
|
||||
|
||||
|
||||
Holds when _Union_ is the union of the lists _Sets_.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
/*
|
||||
:- mode
|
||||
list_to_ord_set(+, ?),
|
||||
merge(+, +, -),
|
||||
ord_disjoint(+, +),
|
||||
ord_disjoint(+, +, +, +, +),
|
||||
ord_insert(+, +, ?),
|
||||
ord_insert(+, +, +, +, ?),
|
||||
ord_intersect(+, +),
|
||||
ord_intersect(+, +, +, +, +),
|
||||
ord_intersect(+, +, ?),
|
||||
ord_intersect(+, +, +, +, +, ?),
|
||||
ord_seteq(+, +),
|
||||
ord_subset(+, +),
|
||||
ord_subset(+, +, +, +, +),
|
||||
ord_subtract(+, +, ?),
|
||||
ord_subtract(+, +, +, +, +, ?),
|
||||
ord_symdiff(+, +, ?),
|
||||
ord_symdiff(+, +, +, +, +, ?),
|
||||
ord_union(+, +, ?),
|
||||
ord_union(+, +, +, +, +, ?).
|
||||
*/
|
||||
|
||||
|
||||
%% @pred list_to_ord_set(+List, ?Set)
|
||||
% is true when Set is the ordered representation of the set represented
|
||||
% by the unordered representation List. The only reason for giving it
|
||||
% a name at all is that you may not have realised that sort/2 could be
|
||||
% used this way.
|
||||
|
||||
list_to_ord_set(List, Set) :-
|
||||
sort(List, Set).
|
||||
|
||||
|
||||
%% @ored merge(+List1, +List2, -Merged)
|
||||
% is true when Merged is the stable merge of the two given lists.
|
||||
% If the two lists are not ordered, the merge doesn't mean a great
|
||||
% deal. Merging is perfectly well defined when the inputs contain
|
||||
% duplicates, and all copies of an element are preserved in the
|
||||
% output, e.g. merge("122357", "34568", "12233455678"). Study this
|
||||
% routine carefully, as it is the basis for all the rest.
|
||||
|
||||
merge([Head1|Tail1], [Head2|Tail2], [Head2|Merged]) :-
|
||||
Head1 @> Head2, !,
|
||||
merge([Head1|Tail1], Tail2, Merged).
|
||||
merge([Head1|Tail1], List2, [Head1|Merged]) :-
|
||||
List2 \== [], !,
|
||||
merge(Tail1, List2, Merged).
|
||||
merge([], List2, List2) :- !.
|
||||
merge(List1, [], List1).
|
||||
|
||||
|
||||
|
||||
%% @ored ord_disjoint(+Set1, +Set2)
|
||||
% is true when the two ordered sets have no element in common. If the
|
||||
% arguments are not ordered, I have no idea what happens.
|
||||
|
||||
ord_disjoint([], _) :- !.
|
||||
ord_disjoint(_, []) :- !.
|
||||
ord_disjoint([Head1|Tail1], [Head2|Tail2]) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_disjoint(Order, Head1, Tail1, Head2, Tail2).
|
||||
|
||||
ord_disjoint(<, _, Tail1, Head2, Tail2) :-
|
||||
ord_disjoint(Tail1, [Head2|Tail2]).
|
||||
ord_disjoint(>, Head1, Tail1, _, Tail2) :-
|
||||
ord_disjoint([Head1|Tail1], Tail2).
|
||||
|
||||
|
||||
|
||||
%% @ored ord_insert(+Set1, +Element, ?Set2)
|
||||
% ord_add_element(+Set1, +Element, ?Set2)
|
||||
% is the equivalent of add_element for ordered sets. It should give
|
||||
% exactly the same result as merge(Set1, [Element], Set2), but a bit
|
||||
% faster, and certainly more clearly.
|
||||
|
||||
ord_add_element([], Element, [Element]).
|
||||
ord_add_element([Head|Tail], Element, Set) :-
|
||||
compare(Order, Head, Element),
|
||||
ord_insert(Order, Head, Tail, Element, Set).
|
||||
|
||||
|
||||
ord_insert([], Element, [Element]).
|
||||
ord_insert([Head|Tail], Element, Set) :-
|
||||
compare(Order, Head, Element),
|
||||
ord_insert(Order, Head, Tail, Element, Set).
|
||||
|
||||
|
||||
ord_insert(<, Head, Tail, Element, [Head|Set]) :-
|
||||
ord_insert(Tail, Element, Set).
|
||||
ord_insert(=, Head, Tail, _, [Head|Tail]).
|
||||
ord_insert(>, Head, Tail, Element, [Element,Head|Tail]).
|
||||
|
||||
|
||||
|
||||
%% @pred ord_intersect(+Set1, +Set2)
|
||||
% is true when the two ordered sets have at least one element in common.
|
||||
% Note that the test is == rather than = .
|
||||
|
||||
ord_intersect([Head1|Tail1], [Head2|Tail2]) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_intersect(Order, Head1, Tail1, Head2, Tail2).
|
||||
|
||||
ord_intersect(=, _, _, _, _).
|
||||
ord_intersect(<, _, Tail1, Head2, Tail2) :-
|
||||
ord_intersect(Tail1, [Head2|Tail2]).
|
||||
ord_intersect(>, Head1, Tail1, _, Tail2) :-
|
||||
ord_intersect([Head1|Tail1], Tail2).
|
||||
|
||||
ord_intersect(L1, L2, L) :-
|
||||
ord_intersection(L1, L2, L).
|
||||
|
||||
|
||||
%% @pred ord_intersection(+Set1, +Set2, ?Intersection)
|
||||
% is true when Intersection is the ordered representation of Set1
|
||||
% and Set2, provided that Set1 and Set2 are ordered sets.
|
||||
|
||||
ord_intersection([], _, []) :- !.
|
||||
ord_intersection([_|_], [], []) :- !.
|
||||
ord_intersection([Head1|Tail1], [Head2|Tail2], Intersection) :-
|
||||
( Head1 == Head2 ->
|
||||
Intersection = [Head1|Tail],
|
||||
ord_intersection(Tail1, Tail2, Tail)
|
||||
;
|
||||
Head1 @< Head2 ->
|
||||
ord_intersection(Tail1, [Head2|Tail2], Intersection)
|
||||
;
|
||||
ord_intersection([Head1|Tail1], Tail2, Intersection)
|
||||
).
|
||||
|
||||
%% @pred ord_intersection(+Set1, +Set2, ?Intersection, ?Difference)
|
||||
% is true when Intersection is the ordered representation of Set1
|
||||
% and Set2, provided that Set1 and Set2 are ordered sets.
|
||||
|
||||
ord_intersection([], L, [], L) :- !.
|
||||
ord_intersection([_|_], [], [], []) :- !.
|
||||
ord_intersection([Head1|Tail1], [Head2|Tail2], Intersection, Difference) :-
|
||||
( Head1 == Head2 ->
|
||||
Intersection = [Head1|Tail],
|
||||
ord_intersection(Tail1, Tail2, Tail, Difference)
|
||||
;
|
||||
Head1 @< Head2 ->
|
||||
ord_intersection(Tail1, [Head2|Tail2], Intersection, Difference)
|
||||
;
|
||||
Difference = [Head2|HDifference],
|
||||
ord_intersection([Head1|Tail1], Tail2, Intersection, HDifference)
|
||||
).
|
||||
|
||||
|
||||
% ord_seteq(+Set1, +Set2)
|
||||
% is true when the two arguments represent the same set. Since they
|
||||
% are assumed to be ordered representations, they must be identical.
|
||||
|
||||
|
||||
ord_seteq(Set1, Set2) :-
|
||||
Set1 == Set2.
|
||||
|
||||
|
||||
|
||||
% ord_subset(+Set1, +Set2)
|
||||
% is true when every element of the ordered set Set1 appears in the
|
||||
% ordered set Set2.
|
||||
|
||||
ord_subset([], _) :- !.
|
||||
ord_subset([Head1|Tail1], [Head2|Tail2]) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_subset(Order, Head1, Tail1, Head2, Tail2).
|
||||
|
||||
ord_subset(=, _, Tail1, _, Tail2) :-
|
||||
ord_subset(Tail1, Tail2).
|
||||
ord_subset(>, Head1, Tail1, _, Tail2) :-
|
||||
ord_subset([Head1|Tail1], Tail2).
|
||||
|
||||
|
||||
|
||||
% ord_subtract(+Set1, +Set2, ?Difference)
|
||||
% is true when Difference contains all and only the elements of Set1
|
||||
% which are not also in Set2.
|
||||
|
||||
|
||||
ord_subtract(Set1, [], Set1) :- !.
|
||||
ord_subtract([], _, []) :- !.
|
||||
ord_subtract([Head1|Tail1], [Head2|Tail2], Difference) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_subtract(Order, Head1, Tail1, Head2, Tail2, Difference).
|
||||
|
||||
ord_subtract(=, _, Tail1, _, Tail2, Difference) :-
|
||||
ord_subtract(Tail1, Tail2, Difference).
|
||||
ord_subtract(<, Head1, Tail1, Head2, Tail2, [Head1|Difference]) :-
|
||||
ord_subtract(Tail1, [Head2|Tail2], Difference).
|
||||
ord_subtract(>, Head1, Tail1, _, Tail2, Difference) :-
|
||||
ord_subtract([Head1|Tail1], Tail2, Difference).
|
||||
|
||||
|
||||
% ord_del_element(+Set1, Element, ?Rest)
|
||||
% is true when Rest contains the elements of Set1
|
||||
% except for Set1
|
||||
|
||||
|
||||
ord_del_element([], _, []).
|
||||
ord_del_element([Head1|Tail1], Head2, Rest) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_del_element(Order, Head1, Tail1, Head2, Rest).
|
||||
|
||||
ord_del_element(=, _, Tail1, _, Tail1).
|
||||
ord_del_element(<, Head1, Tail1, Head2, [Head1|Difference]) :-
|
||||
ord_del_element(Tail1, Head2, Difference).
|
||||
ord_del_element(>, Head1, Tail1, _, [Head1|Tail1]).
|
||||
|
||||
|
||||
|
||||
%% @pred ord_symdiff(+Set1, +Set2, ?Difference)
|
||||
% is true when Difference is the symmetric difference of Set1 and Set2.
|
||||
|
||||
ord_symdiff(Set1, [], Set1) :- !.
|
||||
ord_symdiff([], Set2, Set2) :- !.
|
||||
ord_symdiff([Head1|Tail1], [Head2|Tail2], Difference) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_symdiff(Order, Head1, Tail1, Head2, Tail2, Difference).
|
||||
|
||||
ord_symdiff(=, _, Tail1, _, Tail2, Difference) :-
|
||||
ord_symdiff(Tail1, Tail2, Difference).
|
||||
ord_symdiff(<, Head1, Tail1, Head2, Tail2, [Head1|Difference]) :-
|
||||
ord_symdiff(Tail1, [Head2|Tail2], Difference).
|
||||
ord_symdiff(>, Head1, Tail1, Head2, Tail2, [Head2|Difference]) :-
|
||||
ord_symdiff([Head1|Tail1], Tail2, Difference).
|
||||
|
||||
|
||||
|
||||
% ord_union(+Set1, +Set2, ?Union)
|
||||
% is true when Union is the union of Set1 and Set2. Note that when
|
||||
% something occurs in both sets, we want to retain only one copy.
|
||||
|
||||
ord_union([S|Set1], [], [S|Set1]).
|
||||
ord_union([], Set2, Set2).
|
||||
ord_union([Head1|Tail1], [Head2|Tail2], Union) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_union(Order, Head1, Tail1, Head2, Tail2, Union).
|
||||
|
||||
ord_union(=, Head, Tail1, _, Tail2, [Head|Union]) :-
|
||||
ord_union(Tail1, Tail2, Union).
|
||||
ord_union(<, Head1, Tail1, Head2, Tail2, [Head1|Union]) :-
|
||||
ord_union(Tail1, [Head2|Tail2], Union).
|
||||
ord_union(>, Head1, Tail1, Head2, Tail2, [Head2|Union]) :-
|
||||
ord_union([Head1|Tail1], Tail2, Union).
|
||||
|
||||
|
||||
%% @pred ord_union(+Set1, +Set2, ?Union, ?Difference)
|
||||
% is true when Union is the union of Set1 and Set2 and Difference is the
|
||||
% difference between Set2 and Set1.
|
||||
|
||||
ord_union(Set1, [], Set1, []) :- !.
|
||||
ord_union([], Set2, Set2, Set2) :- !.
|
||||
ord_union([Head1|Tail1], [Head2|Tail2], Union, Diff) :-
|
||||
compare(Order, Head1, Head2),
|
||||
ord_union(Order, Head1, Tail1, Head2, Tail2, Union, Diff).
|
||||
|
||||
ord_union(=, Head, Tail1, _, Tail2, [Head|Union], Diff) :-
|
||||
ord_union(Tail1, Tail2, Union, Diff).
|
||||
ord_union(<, Head1, Tail1, Head2, Tail2, [Head1|Union], Diff) :-
|
||||
ord_union(Tail1, [Head2|Tail2], Union, Diff).
|
||||
ord_union(>, Head1, Tail1, Head2, Tail2, [Head2|Union], [Head2|Diff]) :-
|
||||
ord_union([Head1|Tail1], Tail2, Union, Diff).
|
||||
|
||||
|
||||
|
||||
%% @pred ord_setproduct(+Set1, +Set2, ?Product)
|
||||
% is in fact identical to setproduct(Set1, Set2, Product).
|
||||
% If Set1 and Set2 are ordered sets, Product will be an ordered
|
||||
% set of x1-x2 pairs. Note that we cannot solve for Set1 and
|
||||
% Set2, because there are infinitely many solutions when
|
||||
% Product is empty, and may be a large number in other cases.
|
||||
|
||||
ord_setproduct([], _, []).
|
||||
ord_setproduct([H|T], L, Product) :-
|
||||
ord_setproduct(L, H, Product, Rest),
|
||||
ord_setproduct(T, L, Rest).
|
||||
|
||||
ord_setproduct([], _, L, L).
|
||||
ord_setproduct([H|T], X, [X-H|TX], TL) :-
|
||||
ord_setproduct(T, X, TX, TL).
|
||||
|
||||
|
||||
ord_member(El,[H|T]):-
|
||||
compare(Op,El,H),
|
||||
ord_member(Op,El,T).
|
||||
|
||||
ord_member(=,_,_).
|
||||
ord_member(>,El,[H|T]) :-
|
||||
compare(Op,El,H),
|
||||
ord_member(Op,El,T).
|
||||
|
||||
ord_union([], []).
|
||||
ord_union([Set|Sets], Union) :-
|
||||
length([Set|Sets], NumberOfSets),
|
||||
ord_union_all(NumberOfSets, [Set|Sets], Union, []).
|
||||
|
||||
ord_union_all(N,Sets0,Union,Sets) :-
|
||||
( N=:=1 -> Sets0=[Union|Sets]
|
||||
; N=:=2 -> Sets0=[Set1,Set2|Sets],
|
||||
ord_union(Set1,Set2,Union)
|
||||
; A is N>>1,
|
||||
Z is N-A,
|
||||
ord_union_all(A, Sets0, X, Sets1),
|
||||
ord_union_all(Z, Sets1, Y, Sets),
|
||||
ord_union(X, Y, Union)
|
||||
).
|
||||
|
||||
ord_empty([]).
|
||||
|
||||
ord_memberchk(Element, [E|_]) :- E == Element, !.
|
||||
ord_memberchk(Element, [_|Set]) :-
|
||||
ord_memberchk(Element, Set).
|
||||
|
||||
/** @} */
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user