Merge branch 'master' of xato:0517
This commit is contained in:
commit
6a2d74ac6e
@ -493,8 +493,7 @@ Prop Yap_GetPredPropByAtom(Atom at, Term cur_mod)
|
||||
return (p0);
|
||||
}
|
||||
|
||||
inline static Prop GetPredPropByAtomHavingLockInThisModule(AtomEntry *ae,
|
||||
Term cur_mod)
|
||||
inline static Prop GetPredPropByAtomHavingLockInThisModule(AtomEntry *ae, Term cur_mod)
|
||||
/* get predicate entry for ap/arity; create it if neccessary. */
|
||||
{
|
||||
Prop p0;
|
||||
@ -528,14 +527,15 @@ Prop Yap_GetPredPropByAtomInThisModule(Atom at, Term cur_mod)
|
||||
return (p0);
|
||||
}
|
||||
|
||||
|
||||
Prop Yap_GetPredPropByFunc(Functor f, Term cur_mod)
|
||||
/* get predicate entry for ap/arity; */
|
||||
{
|
||||
Prop p0;
|
||||
|
||||
FUNC_READ_LOCK(f);
|
||||
|
||||
p0 = GetPredPropByFuncHavingLock(f, cur_mod);
|
||||
|
||||
FUNC_READ_UNLOCK(f);
|
||||
return (p0);
|
||||
}
|
||||
|
@ -3396,10 +3396,18 @@ X_API Functor YAP_IntToFunctor(Int i) { return TR_Functors[i]; }
|
||||
|
||||
X_API void *YAP_shared(void) { return LOCAL_shared; }
|
||||
|
||||
void yap_init(void) {}
|
||||
X_API PredEntry *YAP_TopGoal(void)
|
||||
{
|
||||
YAP_Functor f = Yap_MkFunctor(Yap_LookupAtom("yap_query"),3);
|
||||
Term tmod = MkAtomTerm(Yap_LookupAtom("yapi"));
|
||||
PredEntry *p = RepPredProp(Yap_GetPredPropByFunc(f, tmod));
|
||||
return p;
|
||||
}
|
||||
|
||||
void yap_init(void) {}
|
||||
|
||||
#endif // C_INTERFACE_C
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
|
20
C/cdmgr.c
20
C/cdmgr.c
@ -1647,6 +1647,14 @@ bool Yap_constPred(PredEntry *p) {
|
||||
pred_flags_t pflags;
|
||||
pflags = p->PredFlags;
|
||||
|
||||
if (pflags &
|
||||
((UserCPredFlag | CArgsPredFlag | NumberDBPredFlag | AtomDBPredFlag |
|
||||
TestPredFlag | AsmPredFlag | CPredFlag | BinaryPredFlag)))
|
||||
return true;
|
||||
|
||||
if (p->PredFlags &
|
||||
(SysExportPredFlag | MultiFileFlag | DynamicPredFlag | LogUpdatePredFlag))
|
||||
return false;
|
||||
if (Yap_isSystemModule(p->ModuleOfPred)) {
|
||||
if (p->cs.p_code.NOfClauses == 0) {
|
||||
p->src.OwnerFile = Yap_source_file_name();
|
||||
@ -1656,15 +1664,7 @@ bool Yap_constPred(PredEntry *p) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (pflags &
|
||||
((UserCPredFlag | CArgsPredFlag | NumberDBPredFlag | AtomDBPredFlag |
|
||||
TestPredFlag | AsmPredFlag | CPredFlag | BinaryPredFlag)))
|
||||
return true;
|
||||
|
||||
if (p->PredFlags &
|
||||
(SysExportPredFlag | MultiFileFlag | DynamicPredFlag | LogUpdatePredFlag))
|
||||
return false;
|
||||
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1719,7 +1719,6 @@ bool Yap_addclause(Term t, yamop *cp, Term tmode, Term mod, Term *t4ref)
|
||||
at = NameOfFunctor(f);
|
||||
p = RepPredProp(PredPropByFunc(f, mod));
|
||||
}
|
||||
Yap_PutValue(AtomAbol, TermNil);
|
||||
PELOCK(20, p);
|
||||
/* we are redefining a prolog module predicate */
|
||||
if (Yap_constPred(p)) {
|
||||
@ -1727,6 +1726,7 @@ bool Yap_addclause(Term t, yamop *cp, Term tmode, Term mod, Term *t4ref)
|
||||
UNLOCKPE(30, p);
|
||||
return false;
|
||||
}
|
||||
Yap_PutValue(AtomAbol, TermNil);
|
||||
pflags = p->PredFlags;
|
||||
/* we are redefining a prolog module predicate */
|
||||
if (pflags & MegaClausePredFlag) {
|
||||
|
14
CXX/yapdb.hh
14
CXX/yapdb.hh
@ -46,7 +46,7 @@ class YAPModule;
|
||||
class YAPModule : protected YAPAtomTerm {
|
||||
friend class YAPPredicate;
|
||||
friend class YAPModuleProp;
|
||||
YAPModule(Term t) : YAPAtomTerm(t){};
|
||||
YAPModule(YAP_Term t) : YAPAtomTerm(t){};
|
||||
Term t() { return gt(); }
|
||||
Term curModule() { CACHE_REGS return Yap_CurrentModule(); }
|
||||
|
||||
@ -138,6 +138,11 @@ protected:
|
||||
|
||||
PredEntry *asPred() { return ap; };
|
||||
|
||||
/// Empty constructor for predicates
|
||||
///
|
||||
/// Just do nothing.
|
||||
inline YAPPredicate() {
|
||||
}
|
||||
/// String constructor for predicates
|
||||
///
|
||||
/// It also communicates the array of arguments t[]
|
||||
@ -181,6 +186,13 @@ protected:
|
||||
///
|
||||
inline YAPPredicate(PredEntry *pe) { ap = pe; }
|
||||
|
||||
/// Functor constructor for predicates, is given a specific module.
|
||||
/// This version avoids manufacturing objects
|
||||
inline YAPPredicate(Functor f, Term mod) {
|
||||
ap = RepPredProp(PredPropByFunc(f, mod));
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
|
||||
/// Functor constructor for predicates
|
||||
|
478
CXX/yapi.cpp
478
CXX/yapi.cpp
@ -435,16 +435,7 @@ void YAPQuery::openQuery(Term t)
|
||||
XREGS[i + 1] = ts[i];
|
||||
}
|
||||
}
|
||||
// oq = LOCAL_execution;
|
||||
// LOCAL_execution = this;
|
||||
q_open = true;
|
||||
q_state = 0;
|
||||
q_flags = true; // PL_Q_PASS_EXCEPTION;
|
||||
|
||||
q_p = P;
|
||||
q_cp = CP;
|
||||
// make sure this is safe
|
||||
q_handles = LOCAL_CurSlot;
|
||||
setNext();
|
||||
}
|
||||
|
||||
bool YAPEngine::call(YAPPredicate ap, YAPTerm ts[])
|
||||
@ -833,6 +824,7 @@ void Yap_displayWithJava(int c)
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
void YAPEngine::doInit(YAP_file_type_t BootMode)
|
||||
{
|
||||
if ((BootMode = YAP_Init(&engine_args->init_args)) == YAP_FOUND_BOOT_ERROR)
|
||||
@ -852,228 +844,264 @@ void YAPEngine::doInit(YAP_file_type_t BootMode)
|
||||
|
||||
do_init_python();
|
||||
#endif
|
||||
|
||||
YAPQuery initq = YAPQuery(YAPAtom("$init_system"));
|
||||
if (initq.next())
|
||||
{
|
||||
initq.cut();
|
||||
}
|
||||
else
|
||||
{
|
||||
// should throw exception
|
||||
}
|
||||
YAP_Functor f = YAP_MkFunctor(YAP_LookupAtom("$init_system"), 3);
|
||||
YAP_PredEntryPtr p = YAP_FunctorToPred( f );
|
||||
YAPQuery initq = YAPQuery(YAPPredicate(p), nullptr);
|
||||
if (initq.next())
|
||||
{
|
||||
initq.cut();
|
||||
}
|
||||
else
|
||||
{
|
||||
// should throw exception
|
||||
}
|
||||
}
|
||||
|
||||
YAPEngine::YAPEngine(int argc, char *argv[],
|
||||
YAPCallback *cb)
|
||||
: _callback(0) { // a single engine can be active
|
||||
YAPCallback *cb)
|
||||
: _callback(0) { // a single engine can be active
|
||||
|
||||
YAP_file_type_t BootMode;
|
||||
engine_args = new YAPEngineArgs();
|
||||
BootMode = YAP_parse_yap_arguments(argc, argv, &engine_args->init_args);
|
||||
// delYAPCallback()b
|
||||
// if (cb)
|
||||
// setYAPCallback(cb);
|
||||
doInit(BootMode);
|
||||
}
|
||||
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at)
|
||||
{
|
||||
CACHE_REGS
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at, uintptr_t arity)
|
||||
{
|
||||
CACHE_REGS
|
||||
if (arity)
|
||||
{
|
||||
Functor f = Yap_MkFunctor(at.a, arity);
|
||||
ap = RepPredProp(PredPropByFunc(f, Yap_CurrentModule()));
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
}
|
||||
|
||||
/// auxiliary routine to find a predicate in the current module.
|
||||
PredEntry *YAPPredicate::getPred(YAPTerm &tt, Term *&outp)
|
||||
{
|
||||
CACHE_REGS
|
||||
Term m = Yap_CurrentModule(), t = tt.term();
|
||||
t = Yap_StripModule(t, &m);
|
||||
if (IsVarTerm(t) || IsNumTerm(t))
|
||||
{
|
||||
if (IsVarTerm(t))
|
||||
Yap_ThrowError(INSTANTIATION_ERROR, tt.term(), 0);
|
||||
else if (IsNumTerm(t))
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, tt.term(), 0);
|
||||
throw YAPError();
|
||||
}
|
||||
tt.put(t);
|
||||
if (IsAtomTerm(t))
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(AtomOfTerm(t), m));
|
||||
outp = (Term *)NULL;
|
||||
return ap;
|
||||
}
|
||||
else if (IsPairTerm(t))
|
||||
{
|
||||
Term ts[2];
|
||||
ts[0] = t;
|
||||
ts[1] = m;
|
||||
t = Yap_MkApplTerm(FunctorCsult, 2, ts);
|
||||
tt.put(t);
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
Functor f = FunctorOfTerm(t);
|
||||
if (IsExtensionFunctor(f))
|
||||
{
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, t, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByFunc(f, m));
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
return ap;
|
||||
}
|
||||
|
||||
X_API bool YAPPrologPredicate::assertClause(YAPTerm cl, bool last,
|
||||
YAPTerm source)
|
||||
{
|
||||
CACHE_REGS
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = cl.gt();
|
||||
Term sourcet;
|
||||
Term ntt = cl.gt();
|
||||
if (source.initialized())
|
||||
sourcet = source.gt();
|
||||
else
|
||||
sourcet = TermZERO;
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
sourcet); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &ntt;
|
||||
if (Yap_addclause(ntt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
bool YAPPrologPredicate::assertFact(YAPTerm *cl, bool last)
|
||||
{
|
||||
CACHE_REGS
|
||||
arity_t i;
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = AbsAppl(HR);
|
||||
*HR++ = (CELL)(ap->FunctorOfPred);
|
||||
for (i = 0; i < ap->ArityOfPE; i++, cl++)
|
||||
*HR++ = cl->gt();
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
tt); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &tt;
|
||||
if (Yap_addclause(tt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
void *YAPPrologPredicate::retractClause(YAPTerm skeleton, bool all)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string YAPError::text()
|
||||
{
|
||||
char buf[256];
|
||||
std::string s = "";
|
||||
if (LOCAL_ActiveError->errorFunction)
|
||||
{
|
||||
s += LOCAL_ActiveError->errorFile;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->errorLine);
|
||||
s += buf;
|
||||
s += ":0 in C-code";
|
||||
}
|
||||
if (LOCAL_ActiveError->prologPredLine)
|
||||
{
|
||||
s += "\n";
|
||||
s += LOCAL_ActiveError->prologPredFile->StrOfAE;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredLine);
|
||||
s += buf; // std::to_string(LOCAL_ActiveError->prologPredLine) ;
|
||||
// YAPIntegerTerm(LOCAL_ActiveError->prologPredLine).text();
|
||||
s += ":0 ";
|
||||
s += LOCAL_ActiveError->prologPredModule;
|
||||
s += ":";
|
||||
s += (LOCAL_ActiveError->prologPredName)->StrOfAE;
|
||||
s += "/";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredArity);
|
||||
s += // std::to_string(LOCAL_ActiveError->prologPredArity);
|
||||
buf;
|
||||
}
|
||||
s += " error ";
|
||||
if (LOCAL_ActiveError->classAsText != nullptr)
|
||||
s += LOCAL_ActiveError->classAsText->StrOfAE;
|
||||
s += ".";
|
||||
s += LOCAL_ActiveError->errorAsText->StrOfAE;
|
||||
s += ".\n";
|
||||
if (LOCAL_ActiveError->errorTerm)
|
||||
{
|
||||
Term t = LOCAL_ActiveError->errorTerm->Entry;
|
||||
if (t)
|
||||
{
|
||||
s += "error term is: ";
|
||||
s += YAPTerm(t).text();
|
||||
s += "\n";
|
||||
YAP_file_type_t BootMode;
|
||||
engine_args = new YAPEngineArgs();
|
||||
BootMode = YAP_parse_yap_arguments(argc, argv, &engine_args->init_args);
|
||||
// delYAPCallback()b
|
||||
// if (cb)
|
||||
// setYAPCallback(cb);
|
||||
doInit(BootMode);
|
||||
}
|
||||
}
|
||||
printf("%s\n", s.c_str());
|
||||
return s.c_str();
|
||||
}
|
||||
|
||||
void YAPEngine::reSet()
|
||||
{
|
||||
/* ignore flags for now */
|
||||
BACKUP_MACHINE_REGS();
|
||||
Yap_RebootHandles(worker_id);
|
||||
while (B->cp_b)
|
||||
B = B->cp_b;
|
||||
P = FAILCODE;
|
||||
Yap_exec_absmi(true, YAP_EXEC_ABSMI);
|
||||
/* recover stack space */
|
||||
HR = B->cp_h;
|
||||
TR = B->cp_tr;
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at)
|
||||
{
|
||||
CACHE_REGS
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
|
||||
YAPPredicate::YAPPredicate(YAPAtom at, uintptr_t arity)
|
||||
{
|
||||
CACHE_REGS
|
||||
if (arity)
|
||||
{
|
||||
Functor f = Yap_MkFunctor(at.a, arity);
|
||||
ap = RepPredProp(PredPropByFunc(f, Yap_CurrentModule()));
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(at.a, Yap_CurrentModule()));
|
||||
}
|
||||
}
|
||||
|
||||
/// auxiliary routine to find a predicate in the current module.
|
||||
PredEntry *YAPPredicate::getPred(YAPTerm &tt, Term *&outp)
|
||||
{
|
||||
CACHE_REGS
|
||||
Term m = Yap_CurrentModule(), t = tt.term();
|
||||
t = Yap_StripModule(t, &m);
|
||||
if (IsVarTerm(t) || IsNumTerm(t))
|
||||
{
|
||||
if (IsVarTerm(t))
|
||||
Yap_ThrowError(INSTANTIATION_ERROR, tt.term(), 0);
|
||||
else if (IsNumTerm(t))
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, tt.term(), 0);
|
||||
throw YAPError();
|
||||
}
|
||||
tt.put(t);
|
||||
if (IsAtomTerm(t))
|
||||
{
|
||||
ap = RepPredProp(PredPropByAtom(AtomOfTerm(t), m));
|
||||
outp = (Term *)NULL;
|
||||
return ap;
|
||||
}
|
||||
else if (IsPairTerm(t))
|
||||
{
|
||||
Term ts[2];
|
||||
ts[0] = t;
|
||||
ts[1] = m;
|
||||
t = Yap_MkApplTerm(FunctorCsult, 2, ts);
|
||||
tt.put(t);
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
Functor f = FunctorOfTerm(t);
|
||||
if (IsExtensionFunctor(f))
|
||||
{
|
||||
Yap_ThrowError(TYPE_ERROR_CALLABLE, t, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
ap = RepPredProp(PredPropByFunc(f, m));
|
||||
outp = RepAppl(t) + 1;
|
||||
}
|
||||
return ap;
|
||||
}
|
||||
|
||||
X_API bool YAPPrologPredicate::assertClause(YAPTerm cl, bool last,
|
||||
YAPTerm source)
|
||||
{
|
||||
CACHE_REGS
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = cl.gt();
|
||||
Term sourcet;
|
||||
Term ntt = cl.gt();
|
||||
if (source.initialized())
|
||||
sourcet = source.gt();
|
||||
else
|
||||
sourcet = TermZERO;
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
sourcet); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &ntt;
|
||||
if (Yap_addclause(ntt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
bool YAPPrologPredicate::assertFact(YAPTerm *cl, bool last)
|
||||
{
|
||||
CACHE_REGS
|
||||
arity_t i;
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = AbsAppl(HR);
|
||||
*HR++ = (CELL)(ap->FunctorOfPred);
|
||||
for (i = 0; i < ap->ArityOfPE; i++, cl++)
|
||||
*HR++ = cl->gt();
|
||||
yamop *codeaddr = Yap_cclause(tt, ap->ArityOfPE, Yap_CurrentModule(),
|
||||
tt); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
if (LOCAL_ErrorMessage)
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
return false;
|
||||
}
|
||||
Term *tref = &tt;
|
||||
if (Yap_addclause(tt, codeaddr, (last ? TermAssertz : TermAsserta),
|
||||
Yap_CurrentModule(), tref))
|
||||
{
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
}
|
||||
|
||||
void *YAPPrologPredicate::retractClause(YAPTerm skeleton, bool all)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string YAPError::text()
|
||||
{
|
||||
char buf[256];
|
||||
std::string s = "";
|
||||
if (LOCAL_ActiveError->errorFunction)
|
||||
{
|
||||
s += LOCAL_ActiveError->errorFile;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->errorLine);
|
||||
s += buf;
|
||||
s += ":0 in C-code";
|
||||
}
|
||||
if (LOCAL_ActiveError->prologPredLine)
|
||||
{
|
||||
s += "\n";
|
||||
s += LOCAL_ActiveError->prologPredFile->StrOfAE;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredLine);
|
||||
s += buf; // std::to_string(LOCAL_ActiveError->prologPredLine) ;
|
||||
// YAPIntegerTerm(LOCAL_ActiveError->prologPredLine).text();
|
||||
s += ":0 ";
|
||||
s += LOCAL_ActiveError->prologPredModule;
|
||||
s += ":";
|
||||
s += (LOCAL_ActiveError->prologPredName)->StrOfAE;
|
||||
s += "/";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError->prologPredArity);
|
||||
s += // std::to_string(LOCAL_ActiveError->prologPredArity);
|
||||
buf;
|
||||
}
|
||||
s += " error ";
|
||||
if (LOCAL_ActiveError->classAsText != nullptr)
|
||||
s += LOCAL_ActiveError->classAsText->StrOfAE;
|
||||
s += ".";
|
||||
s += LOCAL_ActiveError->errorAsText->StrOfAE;
|
||||
s += ".\n";
|
||||
if (LOCAL_ActiveError->errorTerm)
|
||||
{
|
||||
Term t = LOCAL_ActiveError->errorTerm->Entry;
|
||||
if (t)
|
||||
{
|
||||
s += "error term is: ";
|
||||
s += YAPTerm(t).text();
|
||||
s += "\n";
|
||||
}
|
||||
}
|
||||
printf("%s\n", s.c_str());
|
||||
return s.c_str();
|
||||
}
|
||||
|
||||
void YAPEngine::reSet()
|
||||
{
|
||||
/* ignore flags for now */
|
||||
BACKUP_MACHINE_REGS();
|
||||
Yap_RebootHandles(worker_id);
|
||||
while (B->cp_b)
|
||||
B = B->cp_b;
|
||||
P = FAILCODE;
|
||||
Yap_exec_absmi(true, YAP_EXEC_ABSMI);
|
||||
/* recover stack space */
|
||||
HR = B->cp_h;
|
||||
TR = B->cp_tr;
|
||||
#ifdef DEPTH_LIMIT
|
||||
DEPTH = B->cp_depth;
|
||||
DEPTH = B->cp_depth;
|
||||
#endif /* DEPTH_LIMIT */
|
||||
YENV = ENV = B->cp_env;
|
||||
YENV = ENV = B->cp_env;
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
|
||||
YAPError::YAPError(yap_error_number id, YAPTerm culprit, std::string txt)
|
||||
{
|
||||
ID = id;
|
||||
goal = culprit.text();
|
||||
info = txt;
|
||||
}
|
||||
YAPError::YAPError(yap_error_number id, YAPTerm culprit, std::string txt)
|
||||
{
|
||||
ID = id;
|
||||
goal = culprit.text();
|
||||
info = txt;
|
||||
}
|
||||
|
||||
Term YAPEngine::top_level( std::string s)
|
||||
|
||||
{
|
||||
|
||||
/// parse string s and make term with var names
|
||||
/// available.
|
||||
Term tp;
|
||||
ARG1 = YAP_ReadBuffer(s.data(), &tp);
|
||||
ARG2 = tp;
|
||||
ARG3 = MkVarTerm();
|
||||
YAPPredicate p = YAPPredicate(YAP_TopGoal());
|
||||
YAPQuery *Q = new YAPQuery(p,0);
|
||||
if (Q->next()) {
|
||||
Term ts[2];
|
||||
ts[0]= MkAddressTerm(Q);
|
||||
ts[1]= ARG3;
|
||||
return YAP_MkApplTerm(YAP_MkFunctor(YAP_LookupAtom("t"), 2), 2, ts);
|
||||
}
|
||||
YAPError();
|
||||
return 0;
|
||||
}
|
||||
|
||||
Term YAPEngine::next_answer(YAPQuery * &Q) {
|
||||
|
||||
/// parse string s and make term with var names
|
||||
/// available.
|
||||
if (Q->next()) {
|
||||
Term ts[2];
|
||||
ts[0]= MkAddressTerm(Q);
|
||||
ts[1]= ARG3;
|
||||
return YAP_MkApplTerm(YAP_MkFunctor(YAP_LookupAtom("t"), 2), 2, ts);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
130
CXX/yapq.hh
130
CXX/yapq.hh
@ -2,7 +2,7 @@
|
||||
/**
|
||||
* @file yapq.hh
|
||||
*
|
||||
* @defgroup yap-cplus-query-hanadlinge Query Handling in the YAP interface.
|
||||
* @defgroup yap-cplus-query-handling Query Handling in the YAP interface.
|
||||
* @brief Engine and Query Management
|
||||
*
|
||||
* @ingroup yap-cplus-interface
|
||||
@ -46,9 +46,24 @@ class YAPQuery : public YAPPredicate
|
||||
// temporaries
|
||||
Term tnames, tgoal ;
|
||||
|
||||
inline void setNext() { // oq = LOCAL_execution;
|
||||
// LOCAL_execution = this;
|
||||
q_open = true;
|
||||
q_state = 0;
|
||||
q_flags = true; // PL_Q_PASS_EXCEPTION;
|
||||
|
||||
q_p = P;
|
||||
q_cp = CP;
|
||||
// make sure this is safe
|
||||
q_handles = LOCAL_CurSlot;
|
||||
}
|
||||
|
||||
void openQuery(Term t);
|
||||
|
||||
|
||||
public:
|
||||
YAPQuery() {
|
||||
};
|
||||
/// main constructor, uses a predicate and an array of terms
|
||||
///
|
||||
/// It is given a YAPPredicate _p_ , and an array of terms that must have at
|
||||
@ -74,63 +89,68 @@ public:
|
||||
/// goal.
|
||||
inline YAPQuery(const char *s) : YAPPredicate(s, tgoal, tnames)
|
||||
{
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %ld",
|
||||
LOCAL_CurSlot);
|
||||
if (!ap)
|
||||
return;
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
goal = YAPTerm(tgoal);
|
||||
names = YAPPairTerm(tnames);
|
||||
openQuery(tgoal);
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %ld",
|
||||
LOCAL_CurSlot);
|
||||
if (!ap)
|
||||
return;
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
goal = YAPTerm(tgoal);
|
||||
names = YAPPairTerm(tnames);
|
||||
openQuery(tgoal);
|
||||
};
|
||||
// inline YAPQuery() : YAPPredicate(s, tgoal, tnames)
|
||||
// {
|
||||
// __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %ld",
|
||||
// LOCAL_CurSlot);
|
||||
// if (!ap)
|
||||
// return;
|
||||
// __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
// goal = YAPTerm(tgoal);
|
||||
// names = YAPPairTerm(tnames);
|
||||
// openQuery(tgoal);
|
||||
// };
|
||||
/// string constructor with just an atom
|
||||
///
|
||||
/// It is given an atom, and a Prolog term that should be a callable
|
||||
/// goal, say `main`, `init`, `live`.
|
||||
inline YAPQuery(YAPAtom g) : YAPPredicate(g)
|
||||
{
|
||||
goal = YAPAtomTerm(g);
|
||||
names = YAPPairTerm( );
|
||||
openQuery(goal.term());
|
||||
};
|
||||
/// It i;
|
||||
///};
|
||||
|
||||
/// set flags for query execution, currently only for exception handling
|
||||
void setFlag(int flag) { q_flags |= flag; }
|
||||
/// reset flags for query execution, currently only for exception handling
|
||||
void resetFlag(int flag) { q_flags &= ~flag; }
|
||||
/// first query
|
||||
///
|
||||
/// actually implemented by calling the next();
|
||||
inline bool first() { return next(); }
|
||||
/// ask for the next solution of the current query
|
||||
/// same call for every solution
|
||||
bool next();
|
||||
/// does this query have open choice-points?
|
||||
/// or is it deterministic?
|
||||
bool deterministic();
|
||||
/// represent the top-goal
|
||||
const char *text();
|
||||
/// remove alternatives in the current search space, and finish the current
|
||||
/// query
|
||||
/// finish the current query: undo all bindings.
|
||||
void close();
|
||||
/// query variables.
|
||||
void cut();
|
||||
Term namedVars() {return names.term(); };
|
||||
/// query variables, but copied out
|
||||
std::vector<Term> namedVarsVector() {
|
||||
return names.listToArray(); };
|
||||
/// convert a ref to a binding.
|
||||
YAPTerm getTerm(yhandle_t t);
|
||||
/// simple YAP Query;
|
||||
/// just calls YAP and reports success or failure, Useful when we just
|
||||
/// want things done, eg YAPCommand("load_files(library(lists), )")
|
||||
inline bool command()
|
||||
{
|
||||
bool rc = next();
|
||||
close();
|
||||
return rc;
|
||||
};
|
||||
void resetFlag(int flag) { q_flags &= ~flag; }
|
||||
/// first query
|
||||
///
|
||||
/// actually implemented by calling the next();
|
||||
inline bool first() { return next(); }
|
||||
/// ask for the next solution of the current query
|
||||
/// same call for every solution
|
||||
bool next();
|
||||
/// does this query have open choice-points?
|
||||
/// or is it deterministic?
|
||||
bool deterministic();
|
||||
/// represent the top-goal
|
||||
const char *text();
|
||||
/// remove alternatives in the current search space, and finish the current
|
||||
/// query
|
||||
/// finish the current query: undo all bindings.
|
||||
void close();
|
||||
/// query variables.
|
||||
void cut();
|
||||
Term namedVars() {return names.term(); };
|
||||
/// query variables, but copied out
|
||||
std::vector<Term> namedVarsVector() {
|
||||
return names.listToArray(); };
|
||||
/// convert a ref to a binding.
|
||||
YAPTerm getTerm(yhandle_t t);
|
||||
/// simple YAP Query;
|
||||
/// just calls YAP and reports success or failure, Useful when we just
|
||||
/// want things done, eg YAPCommand("load_files(library(lists), )")
|
||||
inline bool command()
|
||||
{
|
||||
bool rc = next();
|
||||
close();
|
||||
return rc;
|
||||
};
|
||||
};
|
||||
|
||||
// Java support
|
||||
@ -408,8 +428,12 @@ public:
|
||||
{
|
||||
return setYapFlag(MkAtomTerm(Yap_LookupAtom(arg.data())), MkAtomTerm(Yap_LookupAtom(path.data())));
|
||||
};
|
||||
};
|
||||
|
||||
Term top_level( std::string s);
|
||||
Term next_answer(YAPQuery * &Q);
|
||||
|
||||
};
|
||||
|
||||
#endif /* YAPQ_HH */
|
||||
|
||||
/// @}
|
||||
/// @}
|
||||
|
197
cmake/docs/source/conf.py
Normal file
197
cmake/docs/source/conf.py
Normal file
@ -0,0 +1,197 @@
|
||||
#!/Usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# YAP documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Mar 26 10:27:55 2017.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
from recommonmark.parser import CommonMarkParser
|
||||
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.githubpages',
|
||||
'breathe'
|
||||
]
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/github/yap-6.3/cmake/docs/xml" }
|
||||
breathe_default_project = "yap"
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
source_suffix = ['.rst', '.md']
|
||||
# source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'YAP'
|
||||
copyright = '2017, Vitor Santos Costa'
|
||||
author = 'Vitor Santos Costa'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '6.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '6.3.5'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = []
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
source_parsers = {
|
||||
'.md': 'recommonmark.parser.CommonMarkParser',
|
||||
}
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
|
||||
# -- Options for HTMLHelp output ------------------------------------------
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'YAPdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'YAP.tex', 'YAP Documentation',
|
||||
'Vitor Santos Costa', 'manual'),
|
||||
]
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'yap', 'YAP Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'YAP', 'YAP Documentation',
|
||||
author, 'YAP', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
|
||||
|
||||
# -- Options for Epub output ----------------------------------------------
|
||||
|
||||
# Bibliographic Dublin Core info.
|
||||
epub_title = project
|
||||
epub_author = author
|
||||
epub_publisher = author
|
||||
epub_copyright = copyright
|
||||
|
||||
# The unique identifier of the text. This can be a ISBN number
|
||||
# or the project homepage.
|
||||
#
|
||||
# epub_identifier = ''
|
||||
|
||||
# A unique identification for the text.
|
||||
#
|
||||
# epub_uid = ''
|
||||
|
||||
# A list of files that should not be packed into the epub file.
|
||||
epub_exclude_files = ['search.html']
|
||||
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
45
cmake/docs/source/index.rst
Normal file
45
cmake/docs/source/index.rst
Normal file
@ -0,0 +1,45 @@
|
||||
.. YAP documentation master file, created by
|
||||
sphinx-quickstart on Sun Mar 26 10:27:55 2017.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to YAP's documentation!
|
||||
===============================
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
'../../md/attributes.md'
|
||||
'../../md'/builtins.md'
|
||||
'../../md'/download.md'
|
||||
'../../md'/extensions.md'
|
||||
'../../md'/fli.md'
|
||||
'../../md'/library.md'
|
||||
'../../md'/load_files.md'
|
||||
'../../md'/modules.md'
|
||||
'../../md'/packages.md'
|
||||
'../../md'/run.md'
|
||||
'../../md'/swi.md'
|
||||
'../../md'/syntax.md'
|
||||
'../../md'/yap.md'
|
||||
'classlist.rst'
|
||||
'file.rst'
|
||||
'group.rst'
|
||||
'section.rst'
|
||||
'union.rst'
|
||||
'namespace.rst'
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`search`
|
2
cmake/packages/raptor/raptor_config.h
Normal file
2
cmake/packages/raptor/raptor_config.h
Normal file
@ -0,0 +1,2 @@
|
||||
#define HAVE_RAPTOR2_RAPTOR2_H 1
|
||||
/* #undef HAVE_RAPTOR_H */
|
26
cmake/packages/real/rconfig.h
Normal file
26
cmake/packages/real/rconfig.h
Normal file
@ -0,0 +1,26 @@
|
||||
/*--------------------------------------------------------------------------
|
||||
* This file is autogenerated from rconfig.h.cmake
|
||||
* during the cmake configuration of your project. If you need to make changes
|
||||
* edit the original file NOT THIS FILE.
|
||||
* --------------------------------------------------------------------------*/
|
||||
#ifndef RCONFIG_H
|
||||
#define RCONFIG_H
|
||||
|
||||
/* Define to 1 if you have the <alloca.h> header file. */
|
||||
#ifndef HAVE_R_H
|
||||
#define HAVE_R_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <alloca.h> header file. */
|
||||
#ifndef HAVE_R_EMBEDDED_H
|
||||
#define HAVE_R_EMBEDDED_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <alloca.h> header file. */
|
||||
#ifndef HAVE_R_INTERFACE_H
|
||||
#define HAVE_R_INTERFACE_H 1
|
||||
#endif
|
||||
|
||||
|
||||
#endif
|
||||
|
1012
compile_commands.json
Normal file
1012
compile_commands.json
Normal file
File diff suppressed because it is too large
Load Diff
43
cudd_config.h
Normal file
43
cudd_config.h
Normal file
@ -0,0 +1,43 @@
|
||||
// cmake template file
|
||||
|
||||
/* Define to 1 if you have the <cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_H
|
||||
/* #undef HAVE_CUDD_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_CUDD_H
|
||||
#define HAVE_CUDD_CUDD_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDDINT_H
|
||||
/* #undef HAVE_CUDDINT_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/cuddInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_CUDDINT_H
|
||||
#define HAVE_CUDD_CUDDINT_H 1
|
||||
#endif
|
||||
|
||||
|
||||
/* Define to 1 if you have the <cuddObj.hh> header file. */
|
||||
#ifndef HAVE_CUDDOBJ_HH
|
||||
/* #undef HAVE_CUDDOBJ_HH */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/cuddObj.hh> header file. */
|
||||
#ifndef HAVE_CUDD_CUDDOBJ_HH
|
||||
/* #undef HAVE_CUDD_CUDDOBJ_HH */
|
||||
#endif
|
||||
|
||||
|
||||
/* Define to 1 if you have the <dddmpInt.h> header file. */
|
||||
#ifndef HAVE_DDDMPINT_H
|
||||
/* #undef HAVE_DDDMPINT_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <cudd/dddmpInt.h> header file. */
|
||||
#ifndef HAVE_CUDD_CUDDINT_H
|
||||
/* #undef HAVE_CUDD_DDDMPINT_H */
|
||||
#endif
|
255
docs/custom/customdoxygen.css
Normal file
255
docs/custom/customdoxygen.css
Normal file
@ -0,0 +1,255 @@
|
||||
h1, .h1, h2, .h2, h3, .h3{
|
||||
font-weight: 200 !important;
|
||||
}
|
||||
|
||||
#navrow1, #navrow2, #navrow3, #navrow4, #navrow5{
|
||||
border-bottom: 1px solid #EEEEEE;
|
||||
}
|
||||
|
||||
.adjust-right {
|
||||
margin-left: 30px !important;
|
||||
font-size: 1.15em !important;
|
||||
}
|
||||
.navbar{
|
||||
border: 0px solid #222 !important;
|
||||
}
|
||||
|
||||
|
||||
/* Sticky footer styles
|
||||
-------------------------------------------------- */
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
/* The html and body elements cannot have any padding or margin. */
|
||||
}
|
||||
|
||||
/* Wrapper for page content to push down footer */
|
||||
#wrap {
|
||||
min-height: 100%;
|
||||
height: auto;
|
||||
/* Negative indent footer by its height */
|
||||
margin: 0 auto -60px;
|
||||
/* Pad bottom by footer height */
|
||||
padding: 0 0 60px;
|
||||
}
|
||||
|
||||
/* Set the fixed height of the footer here */
|
||||
#footer {
|
||||
font-size: 0.9em;
|
||||
padding: 8px 0px;
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
.footer-row {
|
||||
line-height: 44px;
|
||||
}
|
||||
|
||||
#footer > .container {
|
||||
padding-left: 15px;
|
||||
padding-right: 15px;
|
||||
}
|
||||
|
||||
.footer-follow-icon {
|
||||
margin-left: 3px;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.footer-follow-icon img {
|
||||
width: 20px;
|
||||
}
|
||||
|
||||
.footer-link {
|
||||
padding-top: 5px;
|
||||
display: inline-block;
|
||||
color: #999999;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.footer-copyright {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
|
||||
@media (min-width: 992px) {
|
||||
.footer-row {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.footer-icons {
|
||||
text-align: right;
|
||||
}
|
||||
}
|
||||
@media (max-width: 991px) {
|
||||
.footer-row {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.footer-icons {
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
/* DOXYGEN Code Styles
|
||||
----------------------------------- */
|
||||
|
||||
|
||||
a.qindex {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
a.qindexHL {
|
||||
font-weight: bold;
|
||||
background-color: #9CAFD4;
|
||||
color: #ffffff;
|
||||
border: 1px double #869DCA;
|
||||
}
|
||||
|
||||
.contents a.qindexHL:visited {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
a.code, a.code:visited, a.line, a.line:visited {
|
||||
color: #4665A2;
|
||||
}
|
||||
|
||||
a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited {
|
||||
color: #4665A2;
|
||||
}
|
||||
|
||||
/* @end */
|
||||
|
||||
dl.el {
|
||||
margin-left: -1cm;
|
||||
}
|
||||
|
||||
pre.fragment {
|
||||
border: 1px solid #C4CFE5;
|
||||
background-color: #FBFCFD;
|
||||
padding: 4px 6px;
|
||||
margin: 4px 8px 4px 2px;
|
||||
overflow: auto;
|
||||
word-wrap: break-word;
|
||||
font-size: 9pt;
|
||||
line-height: 125%;
|
||||
font-family: monospace, fixed;
|
||||
font-size: 105%;
|
||||
}
|
||||
|
||||
div.fragment {
|
||||
padding: 4px 6px;
|
||||
margin: 4px 8px 4px 2px;
|
||||
border: 1px solid #C4CFE5;
|
||||
}
|
||||
|
||||
div.line {
|
||||
font-family: monospace, fixed;
|
||||
font-size: 13px;
|
||||
min-height: 13px;
|
||||
line-height: 1.0;
|
||||
text-wrap: unrestricted;
|
||||
white-space: -moz-pre-wrap; /* Moz */
|
||||
white-space: -pre-wrap; /* Opera 4-6 */
|
||||
white-space: -o-pre-wrap; /* Opera 7 */
|
||||
white-space: pre-wrap; /* CSS3 */
|
||||
word-wrap: break-word; /* IE 5.5+ */
|
||||
text-indent: -53px;
|
||||
padding-left: 53px;
|
||||
padding-bottom: 0px;
|
||||
margin: 0px;
|
||||
-webkit-transition-property: background-color, box-shadow;
|
||||
-webkit-transition-duration: 0.5s;
|
||||
-moz-transition-property: background-color, box-shadow;
|
||||
-moz-transition-duration: 0.5s;
|
||||
-ms-transition-property: background-color, box-shadow;
|
||||
-ms-transition-duration: 0.5s;
|
||||
-o-transition-property: background-color, box-shadow;
|
||||
-o-transition-duration: 0.5s;
|
||||
transition-property: background-color, box-shadow;
|
||||
transition-duration: 0.5s;
|
||||
}
|
||||
|
||||
div.line.glow {
|
||||
background-color: cyan;
|
||||
box-shadow: 0 0 10px cyan;
|
||||
}
|
||||
|
||||
|
||||
span.lineno {
|
||||
padding-right: 4px;
|
||||
text-align: right;
|
||||
border-right: 2px solid #0F0;
|
||||
background-color: #E8E8E8;
|
||||
white-space: pre;
|
||||
}
|
||||
span.lineno a {
|
||||
background-color: #D8D8D8;
|
||||
}
|
||||
|
||||
span.lineno a:hover {
|
||||
background-color: #C8C8C8;
|
||||
}
|
||||
|
||||
div.groupHeader {
|
||||
margin-left: 16px;
|
||||
margin-top: 12px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
div.groupText {
|
||||
margin-left: 16px;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* @group Code Colorization */
|
||||
|
||||
span.keyword {
|
||||
color: #008000
|
||||
}
|
||||
|
||||
span.keywordtype {
|
||||
color: #604020
|
||||
}
|
||||
|
||||
span.keywordflow {
|
||||
color: #e08000
|
||||
}
|
||||
|
||||
span.comment {
|
||||
color: #800000
|
||||
}
|
||||
|
||||
span.preprocessor {
|
||||
color: #806020
|
||||
}
|
||||
|
||||
span.stringliteral {
|
||||
color: #002080
|
||||
}
|
||||
|
||||
span.charliteral {
|
||||
color: #008080
|
||||
}
|
||||
|
||||
span.vhdldigit {
|
||||
color: #ff00ff
|
||||
}
|
||||
|
||||
span.vhdlchar {
|
||||
color: #000000
|
||||
}
|
||||
|
||||
span.vhdlkeyword {
|
||||
color: #700070
|
||||
}
|
||||
|
||||
span.vhdllogic {
|
||||
color: #ff0000
|
||||
}
|
||||
|
||||
blockquote {
|
||||
background-color: #F7F8FB;
|
||||
border-left: 2px solid #9CAFD4;
|
||||
margin: 0 24px 0 4px;
|
||||
padding: 0 12px 0 16px;
|
||||
}
|
||||
|
1596
docs/custom/default/customdoxygen.css
Normal file
1596
docs/custom/default/customdoxygen.css
Normal file
File diff suppressed because it is too large
Load Diff
121
docs/custom/doxy-boot.js
Normal file
121
docs/custom/doxy-boot.js
Normal file
@ -0,0 +1,121 @@
|
||||
$( document ).ready(function() {
|
||||
|
||||
$("div.headertitle").addClass("page-header");
|
||||
$("div.title").addClass("h1");
|
||||
|
||||
$('li > a[href="index.html"] > span').before("<i class='fa fa-cog'></i> ");
|
||||
$('li > a[href="index.html"] > span').text("BioGears");
|
||||
$('li > a[href="modules.html"] > span').before("<i class='fa fa-square'></i> ");
|
||||
$('li > a[href="namespaces.html"] > span').before("<i class='fa fa-bars'></i> ");
|
||||
$('li > a[href="annotated.html"] > span').before("<i class='fa fa-list-ul'></i> ");
|
||||
$('li > a[href="classes.html"] > span').before("<i class='fa fa-book'></i> ");
|
||||
$('li > a[href="inherits.html"] > span').before("<i class='fa fa-sitemap'></i> ");
|
||||
$('li > a[href="functions.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_func.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_vars.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_enum.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('li > a[href="functions_eval.html"] > span').before("<i class='fa fa-list'></i> ");
|
||||
$('img[src="ftv2ns.png"]').replaceWith('<span class="label label-danger">N</span> ');
|
||||
$('img[src="ftv2cl.png"]').replaceWith('<span class="label label-danger">C</span> ');
|
||||
|
||||
$("ul.tablist").addClass("nav nav-pills nav-justified");
|
||||
$("ul.tablist").css("margin-top", "0.5em");
|
||||
$("ul.tablist").css("margin-bottom", "0.5em");
|
||||
$("li.current").addClass("active");
|
||||
$("iframe").attr("scrolling", "yes");
|
||||
|
||||
$("#nav-path > ul").addClass("breadcrumb");
|
||||
|
||||
$("table.params").addClass("table");
|
||||
$("div.ingroups").wrapInner("<small></small>");
|
||||
$("div.levels").css("margin", "0.5em");
|
||||
$("div.levels > span").addClass("btn btn-default btn-xs");
|
||||
$("div.levels > span").css("margin-right", "0.25em");
|
||||
|
||||
$("table.directory").addClass("table table-striped");
|
||||
$("div.summary > a").addClass("btn btn-default btn-xs");
|
||||
$("table.fieldtable").addClass("table");
|
||||
$(".fragment").addClass("well");
|
||||
$(".memitem").addClass("panel panel-default");
|
||||
$(".memproto").addClass("panel-heading");
|
||||
$(".memdoc").addClass("panel-body");
|
||||
$("span.mlabel").addClass("label label-info");
|
||||
|
||||
$("table.memberdecls").addClass("table");
|
||||
$("[class^=memitem]").addClass("active");
|
||||
|
||||
$("div.ah").addClass("btn btn-default");
|
||||
$("span.mlabels").addClass("pull-right");
|
||||
$("table.mlabels").css("width", "100%")
|
||||
$("td.mlabels-right").addClass("pull-right");
|
||||
|
||||
$("div.ttc").addClass("panel panel-primary");
|
||||
$("div.ttname").addClass("panel-heading");
|
||||
$("div.ttname a").css("color", 'white');
|
||||
$("div.ttdef,div.ttdoc,div.ttdeci").addClass("panel-body");
|
||||
|
||||
$('#MSearchBox').parent().remove();
|
||||
|
||||
$('div.fragment.well div.line:first').css('margin-top', '15px');
|
||||
$('div.fragment.well div.line:last').css('margin-bottom', '15px');
|
||||
|
||||
$('table.doxtable').removeClass('doxtable').addClass('table table-striped table-bordered').each(function(){
|
||||
$(this).prepend('<thead></thead>');
|
||||
$(this).find('tbody > tr:first').prependTo($(this).find('thead'));
|
||||
|
||||
$(this).find('td > span.success').parent().addClass('success');
|
||||
$(this).find('td > span.warning').parent().addClass('warning');
|
||||
$(this).find('td > span.danger').parent().addClass('danger');
|
||||
});
|
||||
|
||||
|
||||
|
||||
if($('div.fragment.well div.ttc').length > 0)
|
||||
{
|
||||
$('div.fragment.well div.line:first').parent().removeClass('fragment well');
|
||||
}
|
||||
|
||||
$('table.memberdecls').find('.memItemRight').each(function(){
|
||||
$(this).contents().appendTo($(this).siblings('.memItemLeft'));
|
||||
$(this).siblings('.memItemLeft').attr('align', 'left');
|
||||
});
|
||||
|
||||
function getOriginalWidthOfImg(img_element) {
|
||||
var t = new Image();
|
||||
t.src = (img_element.getAttribute ? img_element.getAttribute("src") : false) || img_element.src;
|
||||
return t.width;
|
||||
}
|
||||
|
||||
$('div.dyncontent').find('img').each(function(){
|
||||
if(getOriginalWidthOfImg($(this)[0]) > $('#content>div.container').width())
|
||||
$(this).css('width', '100%');
|
||||
});
|
||||
|
||||
$(".memitem").removeClass('memitem');
|
||||
$(".memproto").removeClass('memproto');
|
||||
$(".memdoc").removeClass('memdoc');
|
||||
$("span.mlabel").removeClass('mlabel');
|
||||
$("table.memberdecls").removeClass('memberdecls');
|
||||
$("[class^=memitem]").removeClass('memitem');
|
||||
$("span.mlabels").removeClass('mlabels');
|
||||
$("table.mlabels").removeClass('mlabels');
|
||||
$("td.mlabels-right").removeClass('mlabels-right');
|
||||
$(".navpath").removeClass('navpath');
|
||||
$("li.navelem").removeClass('navelem');
|
||||
$("a.el").removeClass('el');
|
||||
$("div.ah").removeClass('ah');
|
||||
$("div.header").removeClass("header");
|
||||
|
||||
$('.mdescLeft').each(function(){
|
||||
if($(this).html()==" ") {
|
||||
$(this).siblings('.mdescRight').attr('colspan', 2);
|
||||
$(this).remove();
|
||||
}
|
||||
});
|
||||
$('td.memItemLeft').each(function(){
|
||||
if($(this).siblings('.memItemRight').html()=="") {
|
||||
$(this).attr('colspan', 2);
|
||||
$(this).siblings('.memItemRight').remove();
|
||||
}
|
||||
});
|
||||
});
|
194
docs/md/#run.md#
Normal file
194
docs/md/#run.md#
Normal file
@ -0,0 +1,194 @@
|
||||
|
||||
|
||||
@page run Running YAP
|
||||
|
||||
We next describe how to invoke YAP from the command-line, either interactively or as a script:
|
||||
|
||||
* @subpage Running_YAP_Interactively
|
||||
|
||||
* @subpage
|
||||
|
||||
@page Running_YAP_Interactively Running YAP Interactively
|
||||
|
||||
Most often you will want to use YAP in interactive mode. Assuming that
|
||||
YAP is in the user's search path, the top-level can be invoked under
|
||||
Unix with the following command:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
yap [-s n] [-h n] [-a n] [-c IP_HOST port ] [filename]
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
All the arguments and flags are optional and have the following meaning:
|
||||
|
||||
+ -?
|
||||
print a short error message.
|
||||
+ -s _Size_
|
||||
allocate _Size_ KBytes for local and global stacks. The user may
|
||||
specify <tt>M</tt> bytes.
|
||||
+ -h _Size_
|
||||
allocate _Size_ KBytes for heap and auxiliary stacks
|
||||
+ -t _Size_
|
||||
allocate _Size_ KBytes for the trail stack
|
||||
+ -L _Size_
|
||||
SWI-compatible option to allocate _Size_ K bytes for local and global stacks, the local stack
|
||||
cannot be expanded. To avoid confusion with the load option, _Size_
|
||||
must immediately follow the letter `L`.
|
||||
+ -G _Size_
|
||||
SWI-compatible option to allocate _Size_ K bytes for local and global stacks; the global
|
||||
stack cannot be expanded
|
||||
+ -T _Size_
|
||||
SWI-compatible option to allocate _Size_ K bytes for the trail stack; the trail cannot be expanded.
|
||||
+ -l _YAP_FILE_
|
||||
compile the Prolog file _YAP_FILE_ before entering the top-level.
|
||||
+ -L _YAP_FILE_
|
||||
compile the Prolog file _YAP_FILE_ and then halt. This option is
|
||||
useful for implementing scripts.
|
||||
+ -g _Goal_
|
||||
run the goal _Goal_ before top-level. The goal is converted from
|
||||
an atom to a Prolog term.
|
||||
+ -z _Goal_
|
||||
run the goal _Goal_ as top-level. The goal is converted from
|
||||
an atom to a Prolog term.
|
||||
+ -b _BOOT_FILE_
|
||||
boot code is in Prolog file _BOOT_FILE_. The filename must define
|
||||
the predicate `'$live'/0`.
|
||||
3333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333connect standard streams to host <tt>IP_HOST</tt> at port <tt>port</tt>
|
||||
+ filename
|
||||
restore state saved in the given file
|
||||
+ -f
|
||||
do not consult initial files
|
||||
+ -q
|
||||
do not print informational messages
|
||||
+ --
|
||||
separator for arguments to Prolog code. These arguments are visible
|
||||
through the unix/1 built-in predicate.
|
||||
|
||||
|
||||
Note that YAP will output an error message on the following conditions:
|
||||
|
||||
+
|
||||
a file name was given but the file does not exist or is not a saved
|
||||
YAP state;
|
||||
|
||||
+
|
||||
the necessary amount of memory could not be allocated;
|
||||
|
||||
+
|
||||
the allocated memory is not enough to restore the state.
|
||||
|
||||
|
||||
When restoring a saved state, YAP will allocate the
|
||||
same amount of memory as that in use when the state was saved, unless a
|
||||
different amount is specified by flags in the command line. By default,
|
||||
YAP restores the file startup.yss from the current directory or from
|
||||
the YAP library.
|
||||
|
||||
+
|
||||
YAP usually boots from a saved state. The saved state will use the default
|
||||
installation directory to search for the YAP binary unless you define
|
||||
the environment variable YAPBINDIR.
|
||||
|
||||
+
|
||||
YAP always tries to find saved states from the current directory
|
||||
first. If it cannot it will use the environment variable YAPLIBDIR, if
|
||||
defined, or search the default library directory.
|
||||
|
||||
|
||||
YAP will try to find library files from the YAPSHAREDIR/library
|
||||
directory.
|
||||
|
||||
@subpage Running_Prolog_Files Running Prolog Files
|
||||
|
||||
YAP can also be used to run Prolog files as scripts, at least in
|
||||
Unix-like environments. A simple example is shown next (do not forget
|
||||
that the shell comments are very important):
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/local/bin/yap -L --
|
||||
#
|
||||
# Hello World script file using YAP
|
||||
#
|
||||
# put a dot because of syntax errors .
|
||||
|
||||
vvvvvvvvvvvvvvvvvvvvvvvvvvv :- write('Hello World'), nl.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The `#!` characters specify that the script should call the binary
|
||||
file YAP. Notice that many systems will require the complete path to the
|
||||
YAP binary. The `-L` flag indicates that YAP should consult the
|
||||
current file when booting and then halt. The remaining arguments are
|
||||
then passed to YAP. Note that YAP will skip the first lines if they
|
||||
start with `#` (the comment sign for Unix's shell). YAP will
|
||||
consult the file and execute any commands.
|
||||
|
||||
A slightly more sophisticated example is:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/bin/yap -L --
|
||||
#
|
||||
# Hello Wor ld script file using YAP
|
||||
# .
|
||||
|
||||
:- initialization(main).
|
||||
|
||||
main :- write('Hello World'), nl.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The `initialization` directive tells YAP to execute the goal main
|
||||
after consulting the file. Source code is thus compiled and `main`
|
||||
executed at the end. The `.` is useful while debugging the script
|
||||
as a Prolog program: it guarantees that the syntax error will not
|
||||
propagate to the Prolog code.
|
||||
|
||||
Notice that the `--` is required so that the shell passes the extra
|
||||
arguments to YAP. As an example, consider the following script
|
||||
`dump_args`:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/bin/yap -L --
|
||||
#.
|
||||
|
||||
main( [] ).
|
||||
main( [H|T] ) :-
|
||||
write( H ), nl,
|
||||
main( T ).
|
||||
|
||||
:- unix( argv(AllArgs) ), main( AllArgs ).
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you this run this script with the arguments:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
./dump_args -s 10000
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
the script will start an YAP process with stack size `10MB`, and
|
||||
the list of arguments to the process will be empty.
|
||||
|
||||
Often one wants to run the script as any other program, and for this it
|
||||
is convenient to ignore arguments to YAP. This is possible by using
|
||||
`L --` as in the next version of `dump_args`:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
#!/usr/bin/yap -L --
|
||||
|
||||
main( [] ).
|
||||
main( [H|T] ) :-
|
||||
write( H ), nl,
|
||||
main( T ).
|
||||
|
||||
:- unix( argv(AllArgs) ), main( AllArgs ).
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The `--` indicates the next arguments are not for YAP. Instead,
|
||||
they must be sent directly to the argv built-in. Hence, running
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
./dump_args test
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
will write `test` on the standard output.
|
||||
|
390
docs/md/atts.md
Normal file
390
docs/md/atts.md
Normal file
@ -0,0 +1,390 @@
|
||||
|
||||
@ingroup extensions
|
||||
|
||||
YAP supports attributed variables, originally developed at OFAI by
|
||||
Christian Holzbaur. Attributes are a means of declaring that an
|
||||
arbitrary term is a property for a variable. These properties can be
|
||||
updated during forward execution. Moreover, the unification algorithm is
|
||||
aware of attributed variables and will call user defined handlers when
|
||||
trying to unify these variables.
|
||||
|
||||
Attributed variables provide an elegant abstraction over which one can
|
||||
extend Prolog systems. Their main application so far has been in
|
||||
implementing constraint handlers, such as Holzbaur's CLPQR, Fruewirth
|
||||
and Holzbaur's CHR, and CLP(BN).
|
||||
|
||||
Different Prolog systems implement attributed variables in different
|
||||
ways. Originally, YAP used the interface designed by SICStus
|
||||
Prolog. This interface is still
|
||||
available through the <tt>atts</tt> library, and is used by CLPBN.
|
||||
|
||||
From YAP-6.0.3 onwards we recommend using the hProlog, SWI style
|
||||
interface. We believe that this design is easier to understand and
|
||||
work with. Most packages included in YAP that use attributed
|
||||
variables, such as CHR, CLP(FD), and CLP(QR), rely on the SWI-Prolog
|
||||
interface.
|
||||
|
||||
+ @ref SICS_attributes
|
||||
+ @ref sicsatts
|
||||
+ @ref New_Style_Attribute_Declarations
|
||||
+ @ref AttributedVariables_Builtins
|
||||
+ @ref corout
|
||||
|
||||
### SICStus Style attribute declarations. {#SICS_attributes}
|
||||
|
||||
The YAP library `atts` implements attribute variables in the style of
|
||||
SICStus Prolog. Attributed variables work as follows:
|
||||
|
||||
+ Each attribute must be declared beforehand. Attributes are described
|
||||
as a functor with name and arity and are local to a module. Each
|
||||
Prolog module declares its own sets of attributes. Different modules
|
||||
may have attributes with the same name and arity.
|
||||
|
||||
+ The built-in put_atts/2 adds or deletes attributes to a
|
||||
variable. The variable may be unbound or may be an attributed
|
||||
variable. In the latter case, YAP discards previous values for the
|
||||
attributes.
|
||||
|
||||
+ The built-in get_atts/2 can be used to check the values of
|
||||
an attribute associated with a variable.
|
||||
|
||||
+ The unification algorithm calls the user-defined predicate
|
||||
verify_attributes/3 before trying to bind an attributed
|
||||
variable. Unification will resume after this call.
|
||||
|
||||
+ The user-defined predicate
|
||||
<tt>attribute_goal/2</tt> converts from an attribute to a goal.
|
||||
|
||||
+ The user-defined predicate
|
||||
<tt>project_attributes/2</tt> is used from a set of variables into a set of
|
||||
constraints or goals. One application of <tt>project_attributes/2</tt> is in
|
||||
the top-level, where it is used to output the set of
|
||||
floundered constraints at the end of a query.
|
||||
|
||||
|
||||
Attributes are compound terms associated with a variable. Each attribute
|
||||
has a <em>name</em> which is <em>private</em> to the module in which the
|
||||
attribute was defined. Variables may have at most one attribute with a
|
||||
name. Attribute names are defined through the following declaration:
|
||||
|
||||
~~~~~
|
||||
:- attribute AttributeSpec, ..., AttributeSpec.
|
||||
~~~~~
|
||||
|
||||
where each _AttributeSpec_ has the form ( _Name_/ _Arity_).
|
||||
One single such declaration is allowed per module _Module_.
|
||||
|
||||
Although the YAP module system is predicate based, attributes are local
|
||||
to modules. This is implemented by rewriting all calls to the
|
||||
built-ins that manipulate attributes so that attribute names are
|
||||
preprocessed depending on the module. The `user:goal_expansion/3`
|
||||
mechanism is used for this purpose.
|
||||
|
||||
|
||||
The attribute manipulation predicates always work as follows:
|
||||
|
||||
+ The first argument is the unbound variable associated with
|
||||
attributes,
|
||||
+ The second argument is a list of attributes. Each attribute will
|
||||
be a Prolog term or a constant, prefixed with the <tt>+</tt> and <tt>-</tt> unary
|
||||
operators. The prefix <tt>+</tt> may be dropped for convenience.
|
||||
|
||||
The following three procedures are available to the user. Notice that
|
||||
these built-ins are rewritten by the system into internal built-ins, and
|
||||
that the rewriting process <em>depends</em> on the module on which the
|
||||
built-ins have been invoked.
|
||||
|
||||
|
||||
The user-predicate predicate verify_attributes/3 is called when
|
||||
attempting to unify an attributed variable which might have attributes
|
||||
in some _Module_.
|
||||
|
||||
|
||||
Attributes are usually presented as goals. The following routines are
|
||||
used by built-in predicates such as call_residue/2 and by the
|
||||
Prolog top-level to display attributes:
|
||||
|
||||
|
||||
Constraint solvers must be able to project a set of constraints to a set
|
||||
of variables. This is useful when displaying the solution to a goal, but
|
||||
may also be used to manipulate computations. The user-defined
|
||||
project_attributes/2 is responsible for implementing this
|
||||
projection.
|
||||
|
||||
|
||||
The following examples are taken from the SICStus Prolog
|
||||
manual. The sketches the implementation of a simple finite domain
|
||||
`solver`. Note that an industrial strength solver would have to
|
||||
provide a wider range of functionality and that it quite likely would
|
||||
utilize a more efficient representation for the domains proper. The
|
||||
module exports a single predicate `domain( _-Var_, _?Domain_)` which
|
||||
associates _Domain_ (a list of terms) with _Var_. A variable can be
|
||||
queried for its domain by leaving _Domain_ unbound.
|
||||
|
||||
We do not present here a definition for project_attributes/2.
|
||||
Projecting finite domain constraints happens to be difficult.
|
||||
|
||||
~~~~~
|
||||
:- module(domain, [domain/2]).
|
||||
|
||||
:- use_module(library(atts)).
|
||||
:- use_module(library(ordsets), [
|
||||
ord_intersection/3,
|
||||
ord_intersect/2,
|
||||
list_to_ord_set/2
|
||||
]).
|
||||
|
||||
:- attribute dom/1.
|
||||
|
||||
verify_attributes(Var, Other, Goals) :-
|
||||
get_atts(Var, dom(Da)), !, % are we involved?
|
||||
( var(Other) -> % must be attributed then
|
||||
( get_atts(Other, dom(Db)) -> % has a domain?
|
||||
ord_intersection(Da, Db, Dc),
|
||||
Dc = [El|Els], % at least one element
|
||||
( Els = [] -> % exactly one element
|
||||
Goals = [Other=El] % implied binding
|
||||
; Goals = [],
|
||||
put_atts(Other, dom(Dc))% rescue intersection
|
||||
)
|
||||
; Goals = [],
|
||||
put_atts(Other, dom(Da)) % rescue the domain
|
||||
)
|
||||
; Goals = [],
|
||||
ord_intersect([Other], Da) % value in domain?
|
||||
).
|
||||
verify_attributes(_, _, []). % unification triggered
|
||||
% because of attributes
|
||||
% in other modules
|
||||
|
||||
attribute_goal(Var, domain(Var,Dom)) :- % interpretation as goal
|
||||
get_atts(Var, dom(Dom)).
|
||||
|
||||
domain(X, Dom) :-
|
||||
var(Dom), !,
|
||||
get_atts(X, dom(Dom)).
|
||||
domain(X, List) :-
|
||||
list_to_ord_set(List, Set),
|
||||
Set = [El|Els], % at least one element
|
||||
( Els = [] -> % exactly one element
|
||||
X = El % implied binding
|
||||
; put_atts(Fresh, dom(Set)),
|
||||
X = Fresh % may call
|
||||
% verify_attributes/3
|
||||
).
|
||||
~~~~~
|
||||
|
||||
Note that the _implied binding_ `Other=El` was deferred until after
|
||||
the completion of `verify_attribute/3`. Otherwise, there might be a
|
||||
danger of recursively invoking `verify_attribute/3`, which might bind
|
||||
`Var`, which is not allowed inside the scope of `verify_attribute/3`.
|
||||
Deferring unifications into the third argument of `verify_attribute/3`
|
||||
effectively serializes the calls to `verify_attribute/3`.
|
||||
|
||||
Assuming that the code resides in the file domain.yap, we
|
||||
can use it via:
|
||||
|
||||
~~~~~
|
||||
| ?- use_module(domain).
|
||||
~~~~~
|
||||
|
||||
Let's test it:
|
||||
|
||||
~~~~~
|
||||
| ?- domain(X,[5,6,7,1]), domain(Y,[3,4,5,6]), domain(Z,[1,6,7,8]).
|
||||
|
||||
domain(X,[1,5,6,7]),
|
||||
domain(Y,[3,4,5,6]),
|
||||
domain(Z,[1,6,7,8]) ?
|
||||
|
||||
yes
|
||||
| ?- domain(X,[5,6,7,1]), domain(Y,[3,4,5,6]), domain(Z,[1,6,7,8]),
|
||||
X=Y.
|
||||
|
||||
Y = X,
|
||||
domain(X,[5,6]),
|
||||
domain(Z,[1,6,7,8]) ?
|
||||
|
||||
yes
|
||||
| ?- domain(X,[5,6,7,1]), domain(Y,[3,4,5,6]), domain(Z,[1,6,7,8]),
|
||||
X=Y, Y=Z.
|
||||
|
||||
X = 6,
|
||||
Y = 6,
|
||||
Z = 6
|
||||
~~~~~
|
||||
|
||||
To demonstrate the use of the _Goals_ argument of
|
||||
verify_attributes/3, we give an implementation of
|
||||
freeze/2. We have to name it `myfreeze/2` in order to
|
||||
avoid a name clash with the built-in predicate of the same name.
|
||||
|
||||
~~~~~
|
||||
:- module(myfreeze, [myfreeze/2]).
|
||||
|
||||
:- use_module(library(atts)).
|
||||
|
||||
:- attribute frozen/1.
|
||||
|
||||
verify_attributes(Var, Other, Goals) :-
|
||||
get_atts(Var, frozen(Fa)), !, % are we involved?
|
||||
( var(Other) -> % must be attributed then
|
||||
( get_atts(Other, frozen(Fb)) % has a pending goal?
|
||||
-> put_atts(Other, frozen((Fa,Fb))) % rescue conjunction
|
||||
; put_atts(Other, frozen(Fa)) % rescue the pending goal
|
||||
),
|
||||
Goals = []
|
||||
; Goals = [Fa]
|
||||
).
|
||||
verify_attributes(_, _, []).
|
||||
|
||||
attribute_goal(Var, Goal) :- % interpretation as goal
|
||||
get_atts(Var, frozen(Goal)).
|
||||
|
||||
myfreeze(X, Goal) :- put_atts(Fresh, frozen(Goal)), Fresh = X. ~~~~~
|
||||
|
||||
Assuming that this code lives in file myfreeze.yap,
|
||||
we would use it via:
|
||||
|
||||
~~~~~
|
||||
| ?- use_module(myfreeze).
|
||||
| ?- myfreeze(X,print(bound(x,X))), X=2.
|
||||
|
||||
bound(x,2) % side effect
|
||||
X = 2 % bindings
|
||||
~~~~~
|
||||
|
||||
The two solvers even work together:
|
||||
|
||||
~~~~~
|
||||
| ?- myfreeze(X,print(bound(x,X))), domain(X,[1,2,3]),
|
||||
domain(Y,[2,10]), X=Y.
|
||||
|
||||
bound(x,2) % side effect
|
||||
X = 2, % bindings
|
||||
Y = 2
|
||||
~~~~~
|
||||
|
||||
The two example solvers interact via bindings to shared attributed
|
||||
variables only. More complicated interactions are likely to be found
|
||||
in more sophisticated solvers. The corresponding
|
||||
verify_attributes/3 predicates would typically refer to the
|
||||
attributes from other known solvers/modules via the module prefix in
|
||||
Module:get_atts/2`.
|
||||
|
||||
@}
|
||||
|
||||
@{
|
||||
### hProlog and SWI-Prolog style Attribute Declarations {#New_Style_Attribute_Declarations}
|
||||
|
||||
The following documentation is taken from the SWI-Prolog manual.
|
||||
|
||||
Binding an attributed variable schedules a goal to be executed at the
|
||||
first possible opportunity. In the current implementation the hooks are
|
||||
executed immediately after a successful unification of the clause-head
|
||||
or successful completion of a foreign language (built-in) predicate. Each
|
||||
attribute is associated to a module and the hook attr_unify_hook/2 is
|
||||
executed in this module. The example below realises a very simple and
|
||||
incomplete finite domain reasoner.
|
||||
|
||||
~~~~~
|
||||
:- module(domain,
|
||||
[ domain/2 % Var, ?Domain %
|
||||
]).
|
||||
:- use_module(library(ordsets)).
|
||||
|
||||
domain(X, Dom) :-
|
||||
var(Dom), !,
|
||||
get_attr(X, domain, Dom).
|
||||
domain(X, List) :-
|
||||
list_to_ord_set(List, Domain),
|
||||
v put_attr(Y, domain, Domain),
|
||||
X = Y.
|
||||
|
||||
% An attributed variable with attribute value Domain has been %
|
||||
% assigned the value Y %
|
||||
|
||||
attr_unify_hook(Domain, Y) :-
|
||||
( get_attr(Y, domain, Dom2)
|
||||
-> ord_intersection(Domain, Dom2, NewDomain),
|
||||
( NewDomain == []
|
||||
-> fail
|
||||
; NewDomain = [Value]
|
||||
-> Y = Value
|
||||
; put_attr(Y, domain, NewDomain)
|
||||
)
|
||||
; var(Y)
|
||||
-> put_attr( Y, domain, Domain )
|
||||
; ord_memberchk(Y, Domain)
|
||||
).
|
||||
|
||||
% Translate attributes from this module to residual goals %
|
||||
|
||||
attribute_goals(X) -->
|
||||
{ get_attr(X, domain, List) },
|
||||
[domain(X, List)].
|
||||
~~~~~
|
||||
|
||||
Before explaining the code we give some example queries:
|
||||
|
||||
The predicate `domain/2` fetches (first clause) or assigns
|
||||
(second clause) the variable a <em>domain</em>, a set of values it can
|
||||
be unified with. In the second clause first associates the domain
|
||||
with a fresh variable and then unifies X to this variable to deal
|
||||
with the possibility that X already has a domain. The
|
||||
predicate attr_unify_hook/2 is a hook called after a variable with
|
||||
a domain is assigned a value. In the simple case where the variable
|
||||
is bound to a concrete value we simply check whether this value is in
|
||||
the domain. Otherwise we take the intersection of the domains and either
|
||||
fail if the intersection is empty (first example), simply assign the
|
||||
value if there is only one value in the intersection (second example) or
|
||||
assign the intersection as the new domain of the variable (third
|
||||
example). The nonterminal `attribute_goals/3` is used to translate
|
||||
remaining attributes to user-readable goals that, when executed, reinstate
|
||||
these attributes.
|
||||
|
||||
@}
|
||||
|
||||
|
||||
@{
|
||||
### Co-routining {#CohYroutining}
|
||||
|
||||
Prolog uses a simple left-to-right flow of control. It is sometimes
|
||||
convenient to change this control so that goals will only execute when
|
||||
sufficiently instantiated. This may result in a more "data-driven"
|
||||
execution, or may be necessary to correctly implement extensions such
|
||||
as negation by failure.
|
||||
|
||||
Initially, YAP used a separate mechanism for co-routining. Nowadays, YAP uses
|
||||
attributed variables to implement co-routining.
|
||||
|
||||
Two declarations are supported:
|
||||
|
||||
+ block/1
|
||||
The argument to `block/1` is a condition on a goal or a conjunction
|
||||
of conditions, with each element separated by commas. Each condition is
|
||||
of the form `predname( _C1_,..., _CN_)`, where _N_ is the
|
||||
arity of the goal, and each _CI_ is of the form `-`, if the
|
||||
argument must suspend until the first such variable is bound, or
|
||||
`?`, otherwise.
|
||||
|
||||
+ wait/1
|
||||
The argument to `wait/1` is a predicate descriptor or a conjunction
|
||||
of these predicates. These predicates will suspend until their first
|
||||
argument is bound.
|
||||
|
||||
|
||||
The following primitives can be used:
|
||||
|
||||
- freeze/2
|
||||
|
||||
- dif/2
|
||||
|
||||
- when/2
|
||||
|
||||
- frozen/2
|
||||
|
||||
|
||||
@}
|
||||
|
||||
@}
|
24
docs/md/c
Normal file
24
docs/md/c
Normal file
@ -0,0 +1,24 @@
|
||||
YAP Core Built-ins {#core}
|
||||
=================
|
||||
|
||||
This chapter describes the core predicates that control the execution of
|
||||
Prolog programs, provide fundamental functionality such as termm manipulation or arithmetic, and support interaction with external
|
||||
resources, Many of the predicates described here have been standardised by the ISO. The standartised subset of Prolog also known as ISO-Prolog.
|
||||
|
||||
In the description of the arguments of predicates the following
|
||||
notation will be used:
|
||||
|
||||
+ a preceding plus sign will denote an argument as an "input
|
||||
argument" - it cannot be a free variable at the time of the call;
|
||||
+ a preceding minus sign will denote an "output argument";
|
||||
+ an argument with no preceding symbol can be used in both ways.
|
||||
|
||||
|
||||
@copydoc builtins
|
||||
|
||||
|
||||
@{
|
||||
@defgroup builtins YAP Core Builtins:
|
||||
|
||||
@}
|
||||
|
8
docs/md/library.md
Normal file
8
docs/md/library.md
Normal file
@ -0,0 +1,8 @@
|
||||
|
||||
@page Library YAP Library
|
||||
|
||||
|
||||
the library_directory path (set by the
|
||||
`LIBDIR` variable in the Makefile for YAP). Several files in the
|
||||
library are originally from the public-domain Edinburgh Prolog library.
|
||||
|
4
docs/source/union/d2/d77/unionseq__val__t.rst
Normal file
4
docs/source/union/d2/d77/unionseq__val__t.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union seq_val_t
|
||||
===============
|
||||
|
||||
.. doxygenunion:: seq_val_t
|
4
docs/source/union/d4/da6/unionflag_term.rst
Normal file
4
docs/source/union/d4/da6/unionflag_term.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union flagTerm
|
||||
==============
|
||||
|
||||
.. doxygenunion:: flagTerm
|
4
docs/source/union/d5/dc7/union_r_l___node.rst
Normal file
4
docs/source/union/d5/dc7/union_r_l___node.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union RL_Node
|
||||
=============
|
||||
|
||||
.. doxygenunion:: RL_Node
|
4
docs/source/union/d9/db2/union_u.rst
Normal file
4
docs/source/union/d9/db2/union_u.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union U
|
||||
=======
|
||||
|
||||
.. doxygenunion:: U
|
4
docs/source/union/da/ded/union_c_o_n_s_u_l_t___o_b_j.rst
Normal file
4
docs/source/union/da/ded/union_c_o_n_s_u_l_t___o_b_j.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union CONSULT_OBJ
|
||||
=================
|
||||
|
||||
.. doxygenunion:: CONSULT_OBJ
|
4
docs/source/union/db/de7/union_a_i.rst
Normal file
4
docs/source/union/db/de7/union_a_i.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union AI
|
||||
========
|
||||
|
||||
.. doxygenunion:: AI
|
4
docs/source/union/dd/d52/unionstatarray__elements.rst
Normal file
4
docs/source/union/dd/d52/unionstatarray__elements.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union statarray_elements
|
||||
========================
|
||||
|
||||
.. doxygenunion:: statarray_elements
|
4
docs/source/union/dd/de1/unioncell__size__t.rst
Normal file
4
docs/source/union/dd/de1/unioncell__size__t.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union cell_size_t
|
||||
=================
|
||||
|
||||
.. doxygenunion:: cell_size_t
|
4
docs/source/union/df/d0e/unionclause__ptr.rst
Normal file
4
docs/source/union/df/d0e/unionclause__ptr.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union clause_ptr
|
||||
================
|
||||
|
||||
.. doxygenunion:: clause_ptr
|
4
docs/source/union/df/d1b/unionclause__obj.rst
Normal file
4
docs/source/union/df/d1b/unionclause__obj.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union clause_obj
|
||||
================
|
||||
|
||||
.. doxygenunion:: clause_obj
|
4
docs/source/union/df/dfa/unionoptvalue.rst
Normal file
4
docs/source/union/df/dfa/unionoptvalue.rst
Normal file
@ -0,0 +1,4 @@
|
||||
Union optvalue
|
||||
==============
|
||||
|
||||
.. doxygenunion:: optvalue
|
319
docs/sx/conf.py
Normal file
319
docs/sx/conf.py
Normal file
@ -0,0 +1,319 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# yap documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Jan 5 11:01:36 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
sys.path.append( "/usr/local/lib/python3.6/site-packages/breathe")
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.pngmath',
|
||||
'breathe'
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
source_suffix = ['.rst', '.md']
|
||||
#source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'yap'
|
||||
copyright = u'2016, Vitor Santos Costa'
|
||||
author = u'Vitor Santos Costa'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = u'4.6.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u'4.6.3'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/github/yap-6.3/Debug/docs/xml" }
|
||||
|
||||
#Specify a default project:
|
||||
|
||||
breathe_default_project = "yap"
|
||||
|
||||
#Once this is done you may use the the following commands:
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||
#html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# Now only 'ja' uses this config value
|
||||
#html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'yapdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'yap.tex', u'yap Documentation',
|
||||
u'Vitor Santos Costa', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
author, 'yap', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/git/yap-6.3/Release/docs/xml/" }
|
||||
breathe_default_project = "yap"
|
||||
|
10
docs/sx/conf.py.in
Normal file
10
docs/sx/conf.py.in
Normal file
@ -0,0 +1,10 @@
|
||||
extensions = [
|
||||
breathe_projects = { "yap": "/Users/vsc/git/yap-6.3/Release/doc/xml/" }i
|
||||
breathe_default_project = "yap"
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
319
docs/sx/conf.pyo
Normal file
319
docs/sx/conf.pyo
Normal file
@ -0,0 +1,319 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# yap documentation build configuration file, created by
|
||||
# sphinx-quickstart on Tue Jan 5 11:01:36 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
sys.path.append( "/usr/local/lib/python3.6/site-packages/breathe")
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.pngmath',
|
||||
'breathe'
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
source_suffix = ['.rst', '.md']
|
||||
#source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'yap'
|
||||
copyright = u'2016, Vitor Santos Costa'
|
||||
author = u'Vitor Santos Costa'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = u'4.6.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u'4.6.3'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
breathe_projects = { "yap": "../../Debug/docs/xml" }
|
||||
|
||||
#Specify a default project:
|
||||
|
||||
breathe_default_project = "yap"
|
||||
|
||||
#Once this is done you may use the the following commands:
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||
#html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# Now only 'ja' uses this config value
|
||||
#html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'yapdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'yap.tex', u'yap Documentation',
|
||||
u'Vitor Santos Costa', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'yap', u'yap Documentation',
|
||||
author, 'yap', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||
|
||||
breathe_projects = { "yap": "/Users/vsc/git/yap-6.3/Release/docs/xml/" }
|
||||
breathe_default_project = "yap"
|
||||
|
29
docs/sx/index.rst0
Normal file
29
docs/sx/index.rst0
Normal file
@ -0,0 +1,29 @@
|
||||
.. yap documentation master file, created by
|
||||
sphinx-quickstart on Tue Jan 5 11:01:36 2016.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to yap's documentation!
|
||||
===============================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
.. doxygenindex::
|
||||
.. doxygenfunction::
|
||||
.. doxygenstruct::
|
||||
.. doxygenenum::
|
||||
.. doxygentypedef::
|
||||
.. doxygenclass::
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
36
docs/sx/make.bat
Normal file
36
docs/sx/make.bat
Normal file
@ -0,0 +1,36 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
set SPHINXPROJ=YAP
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||
|
||||
:end
|
||||
popd
|
@ -684,6 +684,8 @@ extern X_API YAP_Int YAP_FunctorToInt(YAP_Functor At);
|
||||
|
||||
extern X_API YAP_Functor YAP_IntToFunctor(YAP_Int i);
|
||||
|
||||
extern X_API YAP_PredEntryPtr YAP_TopGoal(void);
|
||||
|
||||
#define YAP_InitCPred(N, A, F) YAP_UserCPredicate(N, F, A)
|
||||
|
||||
__END_DECLS
|
||||
|
1
ipykernel
Submodule
1
ipykernel
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 629ac54cae9767310616d47d769665453619ac64
|
14
kernel
Normal file
14
kernel
Normal file
@ -0,0 +1,14 @@
|
||||
mkdir tmp_k
|
||||
cd tmp_k
|
||||
cp -a ~/github/ipykernel/* .
|
||||
for i in *py */*py */*/*py; do
|
||||
sed -i '.bk' -e 's/ipkernel/yapkernel/g' $i
|
||||
sed -i '.bk' -e 's/ipykernel/yap_kernel/g' $i
|
||||
sed -i '.bk' -e 's/IPKernelApp/YAP_KernelApp/g' $i
|
||||
sed -i '.bk' -e 's/IPythonKernel/YAPKernel/g' $i
|
||||
sed -i '.bk' -e 's/IPKernel/YAPKernel/g' $i
|
||||
done
|
||||
mv ipykernel yap_kernel
|
||||
mv ipykernel_launcher.py yap_kernel_launcher.py
|
||||
mv yap_kernel/ipkernel.py yap_kernel/yapkernel.py
|
||||
|
31
library/system/sys_config.h
Normal file
31
library/system/sys_config.h
Normal file
@ -0,0 +1,31 @@
|
||||
/* Define to 1 if you have the <openssl/ripemd.h> header file. */
|
||||
#ifndef HAVE_APACHE2_UTIL_MD5_H
|
||||
/* #undef HAVE_APACHE2_UTIL_MD5_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <openssl/ripemd.h> header file. */
|
||||
#ifndef HAVE_APR_1_APR_MD5_H
|
||||
/* #undef HAVE_APR_1_APR_MD5_H */
|
||||
#endif
|
||||
|
||||
|
||||
/* Define to 1 if you have the <openssl/md5.h> header file. */
|
||||
#ifndef HAVE_OPENSSL_MD5_H
|
||||
/* #undef HAVE_OPENSSL_MD5_H */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <openssl/ripemd.h> header file. */
|
||||
#ifndef HAVE_OPENSSL_RIPEMD_H
|
||||
/* #undef HAVE_OPENSSL_RIPEMD_H */
|
||||
#endif
|
||||
|
||||
/* "Define if you have the crypt function." */
|
||||
#ifndef HAVE_CRYPT
|
||||
/* #undef HAVE_CRYPT */
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <crypt.h> header file. */
|
||||
#ifndef HAVE_CRYPT_H
|
||||
/* #undef HAVE_CRYPT_H */
|
||||
#endif
|
||||
|
34
packages/Makefile.sav
Normal file
34
packages/Makefile.sav
Normal file
@ -0,0 +1,34 @@
|
||||
CODEGEN=code-generator.py
|
||||
DISJUNCTOR = -DDISJUNCTOR
|
||||
|
||||
yap: gecode_yap.so gecode.yap
|
||||
|
||||
gecode_yap.o: gecode_yap.cc gecode-common.icc \
|
||||
gecode_yap_cc_impl_auto_generated.icc \
|
||||
gecode_yap_cc_init_auto_generated.icc \
|
||||
gecode_yap_cc_forward_auto_generated.icc \
|
||||
disjunctor.icc disjunctor.hh
|
||||
$(CXX) -Wall -ggdb -c -shared -fPIC $(DISJUNCTOR) $(CPPFLAGS) $(CXXFLAGS) $(LDFLAGS) -o $@ $<
|
||||
|
||||
gecode_yap.so: gecode_yap.o
|
||||
$(CXX) -shared $(LDFLAGS) -o $@ $< -lgecodeint -lgecodeset -lgecodesearch
|
||||
|
||||
gecode_yap_cc_init_auto_generated.icc: $(CODEGEN)
|
||||
python $< -t yap-cc-init > $@
|
||||
|
||||
gecode_yap_cc_impl_auto_generated.icc: $(CODEGEN)
|
||||
python $< -t yap-cc-impl > $@
|
||||
|
||||
gecode_yap_cc_forward_auto_generated.icc: $(CODEGEN)
|
||||
python $< -t yap-cc-forward > $@
|
||||
|
||||
gecode_yap_auto_generated.yap: $(CODEGEN)
|
||||
python $< -t yap-prolog > $@
|
||||
|
||||
gecode.yap: gecode_yap_hand_written.yap gecode_yap_auto_generated.yap
|
||||
cat $^ > $@
|
||||
|
||||
.PHONY: yap
|
||||
|
||||
clean:
|
||||
-rm -f *.o *.so *~ *_auto_generated* gecode.yap
|
19
packages/bdd/bdd.md
Normal file
19
packages/bdd/bdd.md
Normal file
@ -0,0 +1,19 @@
|
||||
Boolean Decision Making in YAP {#BDDs}
|
||||
==============================
|
||||
|
||||
This is an experimental interface to BDD libraries. It is not as
|
||||
sophisticated as simplecudd, but it should be fun to play around with bdds.
|
||||
|
||||
It currently works with cudd only, although it should be possible to
|
||||
port to other libraries. It requires the ability to dynamically link
|
||||
with cudd binaries. This works:
|
||||
|
||||
- in fedora with standard package
|
||||
- in osx with hand-compiled and ports package
|
||||
|
||||
In ubuntu, you may want to install the fedora rpm, or just download the package from the original
|
||||
and compile it.
|
||||
|
||||
|
||||
+ @ref BDDsPL
|
||||
+ @ref CUDD
|
131
packages/configure.in
Normal file
131
packages/configure.in
Normal file
@ -0,0 +1,131 @@
|
||||
dnl Gecode support
|
||||
|
||||
AC_ARG_WITH(gecode,
|
||||
[ --with-gecode install gecode library],
|
||||
[use_gecode="$withval"], use_gecode=no)
|
||||
|
||||
if test "$use_gecode" = no; then
|
||||
PKG_GECODE=""
|
||||
else
|
||||
PKG_GECODE="packages/gecode"
|
||||
if test -d "$use_gecode"; then
|
||||
CPPFLAGS="$CPPFLAGS -I $use_gecode/include"
|
||||
GECODE_EXTRALIBS="-L $use_gecode/lib"
|
||||
GECODE_INCLUDES=" $use_gecode/include"
|
||||
GECODE_PATH="$use_gecode/bin"
|
||||
dnl gecode is usually in /usr/local
|
||||
elif test -d /usr/local/include/gecode -a x$SYSROOT = x; then
|
||||
CPPFLAGS="$CPPFLAGS -I/usr/local/include"
|
||||
GECODE_EXTRALIBS="-L/usr/local/lib"
|
||||
fi
|
||||
fi
|
||||
|
||||
AC_SUBST(PKG_GECODE)
|
||||
|
||||
AC_MSG_CHECKING([if dynamic arrays are supported])
|
||||
|
||||
AC_COMPILE_IFELSE([
|
||||
AC_LANG_PROGRAM([[void foo(int n) { int a[n]; a[1]=0; }]],[[foo(3);]])
|
||||
],[
|
||||
AC_MSG_RESULT([yes])
|
||||
AC_DEFINE([HAVE_DYNARRAY],[1],[Define if dynamic arrays are supported])
|
||||
],[
|
||||
AC_MSG_RESULT([no])
|
||||
])
|
||||
|
||||
GECODE_VERSION=unknown-gecode-version
|
||||
|
||||
|
||||
if test "$use_gecode" != no; then
|
||||
if test $i_am_cross_compiling = yes
|
||||
then
|
||||
GECODE_VERSION=4.2.1
|
||||
elif test x"$WINDOWS" = x; then
|
||||
if test $ac_cv_sizeof_int_p = 8; then
|
||||
GECODE_ARCH=x64
|
||||
else
|
||||
GECODE_ARCH=i386
|
||||
fi
|
||||
if test "$use_gecode" = yes; then
|
||||
GECODE_PATH=`which fzn-gecode`
|
||||
GECODE_PATH=`dirname "$GECODE_PATH"`
|
||||
else
|
||||
GECODE_PATH="$use_gecode"
|
||||
fi
|
||||
GECODE_INCLUDES="$GECODE_PATH"/include
|
||||
GECODE_LIBDIR="\"$GECODE_PATH\""/lib
|
||||
GECODE_BINDIR="\"$GECODE_PATH\""/bin
|
||||
GECODE_TVERSION=`echo "$GECODE_PATH"/lib/GecodeDriver-*-d-"$GECODE_ARCH".lib | cut -d'-' -f 2-4`
|
||||
GECODE_VERSION=`echo "$GECODE_TVERSION"|sed 's/-/\./g'`
|
||||
GECODE_MAJOR=`echo $GECODE_VERSION| sed 's/\(^.\).*/\1/'`
|
||||
GE_SUF="-$GECODE_TVERSION-r-$GECODE_ARCH.dll"
|
||||
GECODE_EXTRALIBS="$GECODE_BINDIR/GecodeDriver$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeSupport$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeKernel$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeInt$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeSet$GE_SUF \
|
||||
$GECODE_BINDIR/GecodeSearch$GE_SUF"
|
||||
if test "$GECODE_MAJOR" = "4"; then
|
||||
GECODE_EXTRALIBS="$GECODE_EXTRALIBS $GECODE_BINDIR/GecodeFloat$GE_SUF"
|
||||
fi
|
||||
else
|
||||
AC_MSG_CHECKING([gecode version])
|
||||
AC_RUN_IFELSE([AC_LANG_PROGRAM([[
|
||||
#include "gecode/support/config.hpp"
|
||||
#include <stdio.h>
|
||||
]],[[
|
||||
FILE* out = fopen("conftest.out","w");
|
||||
fprintf(out,"%s\n",GECODE_VERSION);
|
||||
fclose(out);
|
||||
return 0;
|
||||
]])],[GECODE_VERSION=$(cat conftest.out)
|
||||
AC_MSG_RESULT([$GECODE_VERSION])],
|
||||
[AC_MSG_ERROR([cannot determine gecode version])])
|
||||
case "$target_os" in
|
||||
*darwin*)
|
||||
if test "$use_gecode" = yes; then
|
||||
AC_MSG_CHECKING([if -framework gecode is required])
|
||||
AC_LANG_PUSH([C++])
|
||||
saved_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS -framework gecode"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
|
||||
#include "gecode/int.hh"
|
||||
]],[[
|
||||
Gecode::Exception e("","");
|
||||
return 0;
|
||||
]])],[GECODE_EXTRALIBS="-framework gecode"
|
||||
AC_MSG_RESULT([yes])],
|
||||
[AC_MSG_RESULT([no])])
|
||||
AC_LANG_POP()
|
||||
CXXFLAGS="$saved_CXXFLAGS"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
GECODE_MAJOR=`echo $GECODE_VERSION| sed 's/\(^.\).*/\1/'`
|
||||
if test "$GECODE_MAJOR" = "3"; then
|
||||
GECODE_EXTRALIBS="$GECODE_EXTRALIBS -lgecodesupport -lgecodekernel -lgecodeint -lgecodeset -lgecodesearch"
|
||||
else
|
||||
GECODE_EXTRALIBS="$GECODE_EXTRALIBS -lgecodesupport -lgecodekernel -lgecodefloat -lgecodeint -lgecodeset -lgecodesearch"
|
||||
fi
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
saved_CPPFLAGS="$CPPFLAGS"
|
||||
CPPFLAGS="$CPPFLAGS -I \"$GECODE_INCLUDES\""
|
||||
|
||||
AC_CHECK_HEADER(gecode/support/config.hpp)
|
||||
|
||||
AC_SUBST(GECODE_EXTRALIBS)
|
||||
AC_SUBST(GECODE_INCLUDES)
|
||||
AC_SUBST(GECODE_VERSION)
|
||||
AC_SUBST(GECODE_MAJOR)
|
||||
|
||||
|
||||
if test "$PKG_GECODE" = "packages/gecode"; then
|
||||
AC_CONFIG_FILES([packages/gecode/Makefile])
|
||||
fi
|
||||
|
||||
CPPFLAGS="$saved_CPPFLAGS"
|
||||
|
1
packages/gecode/5.1.0/gecode-version.txt
vendored
Normal file
1
packages/gecode/5.1.0/gecode-version.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
5.1.0
|
3493
packages/gecode/5.1.0/gecode_yap_auto_generated.yap
vendored
Normal file
3493
packages/gecode/5.1.0/gecode_yap_auto_generated.yap
vendored
Normal file
File diff suppressed because it is too large
Load Diff
28
packages/gecode/5.1.0/gecode_yap_cc_forward_auto_generated.icc
vendored
Normal file
28
packages/gecode/5.1.0/gecode_yap_cc_forward_auto_generated.icc
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
// -*- c++ -*-
|
||||
//=============================================================================
|
||||
// Copyright (C) 2011 by Denys Duchier
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify it
|
||||
// under the terms of the GNU Lesser General Public License as published by the
|
||||
// Free Software Foundation, either version 3 of the License, or (at your
|
||||
// option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
// more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Lesser General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//=============================================================================
|
||||
|
||||
static RestartMode gecode_RestartMode_from_term(YAP_Term);
|
||||
static FloatRelType gecode_FloatRelType_from_term(YAP_Term);
|
||||
static ReifyMode gecode_ReifyMode_from_term(YAP_Term);
|
||||
static IntRelType gecode_IntRelType_from_term(YAP_Term);
|
||||
static BoolOpType gecode_BoolOpType_from_term(YAP_Term);
|
||||
static IntPropLevel gecode_IntPropLevel_from_term(YAP_Term);
|
||||
static TaskType gecode_TaskType_from_term(YAP_Term);
|
||||
static TraceEvent gecode_TraceEvent_from_term(YAP_Term);
|
||||
static SetRelType gecode_SetRelType_from_term(YAP_Term);
|
||||
static SetOpType gecode_SetOpType_from_term(YAP_Term);
|
5176
packages/gecode/5.1.0/gecode_yap_cc_impl_auto_generated.icc
vendored
Normal file
5176
packages/gecode/5.1.0/gecode_yap_cc_impl_auto_generated.icc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
660
packages/gecode/5.1.0/gecode_yap_cc_init_auto_generated.icc
vendored
Normal file
660
packages/gecode/5.1.0/gecode_yap_cc_init_auto_generated.icc
vendored
Normal file
@ -0,0 +1,660 @@
|
||||
// -*- c++ -*-
|
||||
//=============================================================================
|
||||
// Copyright (C) 2011 by Denys Duchier
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify it
|
||||
// under the terms of the GNU Lesser General Public License as published by the
|
||||
// Free Software Foundation, either version 3 of the License, or (at your
|
||||
// option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
// more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Lesser General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//=============================================================================
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_NONE");
|
||||
gecode_RM_NONE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_CONSTANT");
|
||||
gecode_RM_CONSTANT = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_LINEAR");
|
||||
gecode_RM_LINEAR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_LUBY");
|
||||
gecode_RM_LUBY = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_GEOMETRIC");
|
||||
gecode_RM_GEOMETRIC = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_EQ");
|
||||
gecode_FRT_EQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_NQ");
|
||||
gecode_FRT_NQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_LQ");
|
||||
gecode_FRT_LQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_LE");
|
||||
gecode_FRT_LE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_GQ");
|
||||
gecode_FRT_GQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("FRT_GR");
|
||||
gecode_FRT_GR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_EQV");
|
||||
gecode_RM_EQV = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_IMP");
|
||||
gecode_RM_IMP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("RM_PMI");
|
||||
gecode_RM_PMI = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_EQ");
|
||||
gecode_IRT_EQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_NQ");
|
||||
gecode_IRT_NQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_LQ");
|
||||
gecode_IRT_LQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_LE");
|
||||
gecode_IRT_LE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_GQ");
|
||||
gecode_IRT_GQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IRT_GR");
|
||||
gecode_IRT_GR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_AND");
|
||||
gecode_BOT_AND = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_OR");
|
||||
gecode_BOT_OR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_IMP");
|
||||
gecode_BOT_IMP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_EQV");
|
||||
gecode_BOT_EQV = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("BOT_XOR");
|
||||
gecode_BOT_XOR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_DEF");
|
||||
gecode_IPL_DEF = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_VAL");
|
||||
gecode_IPL_VAL = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_BND");
|
||||
gecode_IPL_BND = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_DOM");
|
||||
gecode_IPL_DOM = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_SPEED");
|
||||
gecode_IPL_SPEED = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_MEMORY");
|
||||
gecode_IPL_MEMORY = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_BASIC");
|
||||
gecode_IPL_BASIC = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_ADVANCED");
|
||||
gecode_IPL_ADVANCED = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("IPL_BASIC_ADVANCED");
|
||||
gecode_IPL_BASIC_ADVANCED = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("TT_FIXP");
|
||||
gecode_TT_FIXP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TT_FIXS");
|
||||
gecode_TT_FIXS = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TT_FIXE");
|
||||
gecode_TT_FIXE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_INIT");
|
||||
gecode_TE_INIT = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_PRUNE");
|
||||
gecode_TE_PRUNE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_FIX");
|
||||
gecode_TE_FIX = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_FAIL");
|
||||
gecode_TE_FAIL = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_DONE");
|
||||
gecode_TE_DONE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_PROPAGATE");
|
||||
gecode_TE_PROPAGATE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("TE_COMMIT");
|
||||
gecode_TE_COMMIT = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_EQ");
|
||||
gecode_SRT_EQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_NQ");
|
||||
gecode_SRT_NQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_SUB");
|
||||
gecode_SRT_SUB = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_SUP");
|
||||
gecode_SRT_SUP = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_DISJ");
|
||||
gecode_SRT_DISJ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_CMPL");
|
||||
gecode_SRT_CMPL = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_LQ");
|
||||
gecode_SRT_LQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_LE");
|
||||
gecode_SRT_LE = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_GQ");
|
||||
gecode_SRT_GQ = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SRT_GR");
|
||||
gecode_SRT_GR = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_UNION");
|
||||
gecode_SOT_UNION = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_DUNION");
|
||||
gecode_SOT_DUNION = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_INTER");
|
||||
gecode_SOT_INTER = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
{ YAP_Atom X= YAP_LookupAtom("SOT_MINUS");
|
||||
gecode_SOT_MINUS = YAP_MkAtomTerm(X);
|
||||
YAP_AtomGetHold(X); }
|
||||
|
||||
YAP_UserCPredicate("gecode_constraint_unary_450", gecode_constraint_unary_450, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_446", gecode_constraint_unary_446, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_340", gecode_constraint_nvalues_340, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_342", gecode_constraint_nvalues_342, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_336", gecode_constraint_nvalues_336, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_338", gecode_constraint_nvalues_338, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_max_300", gecode_constraint_max_300, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_max_304", gecode_constraint_max_304, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_max_299", gecode_constraint_max_299, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_198", gecode_constraint_dom_198, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_204", gecode_constraint_dom_204, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_13", gecode_constraint_argmin_13, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_convex_3", gecode_constraint_convex_3, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_324", gecode_constraint_nooverlap_324, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_21", gecode_constraint_assign_21, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_30", gecode_constraint_assign_30, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_27", gecode_constraint_assign_27, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_19", gecode_constraint_assign_19, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_33", gecode_constraint_assign_33, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_23", gecode_constraint_assign_23, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_36", gecode_constraint_assign_36, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_25", gecode_constraint_assign_25, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_element_239", gecode_constraint_element_239, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_241", gecode_constraint_element_241, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_235", gecode_constraint_element_235, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_233", gecode_constraint_element_233, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_225", gecode_constraint_element_225, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_227", gecode_constraint_element_227, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_223", gecode_constraint_element_223, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_256", gecode_constraint_ite_256, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_253", gecode_constraint_ite_253, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_255", gecode_constraint_ite_255, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_258", gecode_constraint_ite_258, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_439", gecode_constraint_unary_439, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_nroot_334", gecode_constraint_nroot_334, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_94", gecode_constraint_circuit_94, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_83", gecode_constraint_circuit_83, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_196", gecode_constraint_dom_196, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_197", gecode_constraint_dom_197, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_194", gecode_constraint_dom_194, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_190", gecode_constraint_dom_190, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_221", gecode_constraint_dom_221, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_218", gecode_constraint_dom_218, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_181", gecode_constraint_dom_181, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_203", gecode_constraint_dom_203, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_202", gecode_constraint_dom_202, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_200", gecode_constraint_dom_200, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_12", gecode_constraint_argmax_12, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_329", gecode_constraint_nooverlap_329, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_330", gecode_constraint_nooverlap_330, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_243", gecode_constraint_element_243, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_element_237", gecode_constraint_element_237, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_element_231", gecode_constraint_element_231, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_element_229", gecode_constraint_element_229, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_max_302", gecode_constraint_max_302, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_max_303", gecode_constraint_max_303, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_453", gecode_constraint_unshare_453, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_451", gecode_constraint_unshare_451, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_path_353", gecode_constraint_path_353, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_path_352", gecode_constraint_path_352, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_63", gecode_constraint_branch_63, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_54", gecode_constraint_branch_54, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_69", gecode_constraint_branch_69, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_mult_322", gecode_constraint_mult_322, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_mult_321", gecode_constraint_mult_321, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_98", gecode_constraint_clause_98, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_96", gecode_constraint_clause_96, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_361", gecode_constraint_precede_361, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_7", gecode_constraint_argmax_7, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_169", gecode_constraint_distinct_169, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_member_310", gecode_constraint_member_310, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_member_311", gecode_constraint_member_311, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_member_306", gecode_constraint_member_306, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_member_307", gecode_constraint_member_307, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_mod_320", gecode_constraint_mod_320, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_271", gecode_constraint_linear_271, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_273", gecode_constraint_linear_273, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_259", gecode_constraint_linear_259, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_263", gecode_constraint_linear_263, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_291", gecode_constraint_linear_291, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_295", gecode_constraint_linear_295, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_91", gecode_constraint_circuit_91, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_379", gecode_constraint_rel_379, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_381", gecode_constraint_rel_381, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_413", gecode_constraint_rel_413, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_411", gecode_constraint_rel_411, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_407", gecode_constraint_rel_407, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_403", gecode_constraint_rel_403, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_399", gecode_constraint_rel_399, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_405", gecode_constraint_rel_405, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_375", gecode_constraint_rel_375, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_376", gecode_constraint_rel_376, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_371", gecode_constraint_rel_371, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_372", gecode_constraint_rel_372, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_368", gecode_constraint_rel_368, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_366", gecode_constraint_rel_366, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_417", gecode_constraint_rel_417, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_423", gecode_constraint_rel_423, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_421", gecode_constraint_rel_421, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_419", gecode_constraint_rel_419, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_383", gecode_constraint_rel_383, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_384", gecode_constraint_rel_384, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_387", gecode_constraint_rel_387, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_388", gecode_constraint_rel_388, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_391", gecode_constraint_rel_391, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_393", gecode_constraint_rel_393, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_395", gecode_constraint_rel_395, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_397", gecode_constraint_rel_397, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_365", gecode_constraint_rel_365, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_363", gecode_constraint_rel_363, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_min_314", gecode_constraint_min_314, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_318", gecode_constraint_min_318, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_313", gecode_constraint_min_313, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_when_456", gecode_constraint_when_456, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_when_457", gecode_constraint_when_457, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_cardinality_71", gecode_constraint_cardinality_71, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_cardinality_70", gecode_constraint_cardinality_70, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_118", gecode_constraint_count_118, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_119", gecode_constraint_count_119, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_111", gecode_constraint_count_111, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_114", gecode_constraint_count_114, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_count_115", gecode_constraint_count_115, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sqrt_437", gecode_constraint_sqrt_437, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sqrt_436", gecode_constraint_sqrt_436, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_164", gecode_constraint_cumulatives_164, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_162", gecode_constraint_cumulatives_162, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_160", gecode_constraint_cumulatives_160, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_158", gecode_constraint_cumulatives_158, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_156", gecode_constraint_cumulatives_156, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_154", gecode_constraint_cumulatives_154, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_152", gecode_constraint_cumulatives_152, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_150", gecode_constraint_cumulatives_150, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_339", gecode_constraint_nvalues_339, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_341", gecode_constraint_nvalues_341, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_335", gecode_constraint_nvalues_335, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nvalues_337", gecode_constraint_nvalues_337, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_binpacking_39", gecode_constraint_binpacking_39, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_286", gecode_constraint_linear_286, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_290", gecode_constraint_linear_290, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_278", gecode_constraint_linear_278, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_282", gecode_constraint_linear_282, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_abs_6", gecode_constraint_abs_6, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_convex_2", gecode_constraint_convex_2, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_div_174", gecode_constraint_div_174, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_div_173", gecode_constraint_div_173, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_385", gecode_constraint_rel_385, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_389", gecode_constraint_rel_389, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_377", gecode_constraint_rel_377, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_373", gecode_constraint_rel_373, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_369", gecode_constraint_rel_369, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_367", gecode_constraint_rel_367, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_max_301", gecode_constraint_max_301, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_path_350", gecode_constraint_path_350, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_447", gecode_constraint_unary_447, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_443", gecode_constraint_unary_443, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_440", gecode_constraint_unary_440, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_441", gecode_constraint_unary_441, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nroot_333", gecode_constraint_nroot_333, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_nroot_332", gecode_constraint_nroot_332, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_432", gecode_constraint_sorted_432, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_87", gecode_constraint_circuit_87, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_84", gecode_constraint_circuit_84, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_85", gecode_constraint_circuit_85, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_183", gecode_constraint_dom_183, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_180", gecode_constraint_dom_180, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_208", gecode_constraint_dom_208, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_207", gecode_constraint_dom_207, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_179", gecode_constraint_dom_179, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_217", gecode_constraint_dom_217, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_216", gecode_constraint_dom_216, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_214", gecode_constraint_dom_214, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_212", gecode_constraint_dom_212, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_186", gecode_constraint_dom_186, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_195", gecode_constraint_dom_195, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_192", gecode_constraint_dom_192, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_193", gecode_constraint_dom_193, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_188", gecode_constraint_dom_188, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_189", gecode_constraint_dom_189, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_220", gecode_constraint_dom_220, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_219", gecode_constraint_dom_219, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_201", gecode_constraint_dom_201, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_199", gecode_constraint_dom_199, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_abs_5", gecode_constraint_abs_5, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_abs_4", gecode_constraint_abs_4, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_81", gecode_constraint_channel_81, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_78", gecode_constraint_channel_78, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_32", gecode_constraint_assign_32, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_29", gecode_constraint_assign_29, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_35", gecode_constraint_assign_35, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_38", gecode_constraint_assign_38, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_when_455", gecode_constraint_when_455, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_414", gecode_constraint_rel_414, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_400", gecode_constraint_rel_400, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_path_351", gecode_constraint_path_351, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_44", gecode_constraint_branch_44, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_55", gecode_constraint_branch_55, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_49", gecode_constraint_branch_49, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_42", gecode_constraint_branch_42, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_58", gecode_constraint_branch_58, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_46", gecode_constraint_branch_46, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_64", gecode_constraint_branch_64, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_48", gecode_constraint_branch_48, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_mult_323", gecode_constraint_mult_323, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_88", gecode_constraint_circuit_88, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_89", gecode_constraint_circuit_89, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_86", gecode_constraint_circuit_86, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_97", gecode_constraint_clause_97, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_clause_95", gecode_constraint_clause_95, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_360", gecode_constraint_precede_360, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_359", gecode_constraint_precede_359, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_82", gecode_constraint_channel_82, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_134", gecode_constraint_cumulative_134, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_135", gecode_constraint_cumulative_135, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_130", gecode_constraint_cumulative_130, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_131", gecode_constraint_cumulative_131, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_128", gecode_constraint_cumulative_128, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_146", gecode_constraint_cumulative_146, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_147", gecode_constraint_cumulative_147, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_142", gecode_constraint_cumulative_142, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_143", gecode_constraint_cumulative_143, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_140", gecode_constraint_cumulative_140, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_171", gecode_constraint_distinct_171, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_170", gecode_constraint_distinct_170, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_165", gecode_constraint_distinct_165, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_167", gecode_constraint_distinct_167, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_member_312", gecode_constraint_member_312, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_member_308", gecode_constraint_member_308, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_mod_319", gecode_constraint_mod_319, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sqr_434", gecode_constraint_sqr_434, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sqr_433", gecode_constraint_sqr_433, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_428", gecode_constraint_sequence_428, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_426", gecode_constraint_sequence_426, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_path_347", gecode_constraint_path_347, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_path_344", gecode_constraint_path_344, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_path_345", gecode_constraint_path_345, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_divmod_177", gecode_constraint_divmod_177, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_429", gecode_constraint_sorted_429, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_252", gecode_constraint_extensional_252, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_250", gecode_constraint_extensional_250, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_248", gecode_constraint_extensional_248, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_246", gecode_constraint_extensional_246, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_93", gecode_constraint_circuit_93, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_92", gecode_constraint_circuit_92, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_14", gecode_constraint_argmin_14, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_16", gecode_constraint_argmin_16, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_79", gecode_constraint_channel_79, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_76", gecode_constraint_channel_76, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_72", gecode_constraint_channel_72, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_75", gecode_constraint_channel_75, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_74", gecode_constraint_channel_74, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_when_458", gecode_constraint_when_458, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_103", gecode_constraint_count_103, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_105", gecode_constraint_count_105, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_121", gecode_constraint_count_121, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_123", gecode_constraint_count_123, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_107", gecode_constraint_count_107, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_109", gecode_constraint_count_109, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_112", gecode_constraint_count_112, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_116", gecode_constraint_count_116, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_120", gecode_constraint_count_120, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_99", gecode_constraint_count_99, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_101", gecode_constraint_count_101, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_163", gecode_constraint_cumulatives_163, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_161", gecode_constraint_cumulatives_161, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_159", gecode_constraint_cumulatives_159, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_157", gecode_constraint_cumulatives_157, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_155", gecode_constraint_cumulatives_155, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_153", gecode_constraint_cumulatives_153, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_151", gecode_constraint_cumulatives_151, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulatives_149", gecode_constraint_cumulatives_149, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_binpacking_40", gecode_constraint_binpacking_40, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_1", gecode_constraint_branch_1, 2);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_251", gecode_constraint_extensional_251, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_249", gecode_constraint_extensional_249, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_247", gecode_constraint_extensional_247, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_extensional_245", gecode_constraint_extensional_245, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_294", gecode_constraint_linear_294, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_298", gecode_constraint_linear_298, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_262", gecode_constraint_linear_262, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_266", gecode_constraint_linear_266, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_268", gecode_constraint_linear_268, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_270", gecode_constraint_linear_270, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_284", gecode_constraint_linear_284, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_285", gecode_constraint_linear_285, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_288", gecode_constraint_linear_288, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_289", gecode_constraint_linear_289, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_276", gecode_constraint_linear_276, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_277", gecode_constraint_linear_277, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_280", gecode_constraint_linear_280, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_281", gecode_constraint_linear_281, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_328", gecode_constraint_nooverlap_328, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_327", gecode_constraint_nooverlap_327, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_div_175", gecode_constraint_div_175, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_sqr_435", gecode_constraint_sqr_435, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_80", gecode_constraint_channel_80, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_77", gecode_constraint_channel_77, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_channel_73", gecode_constraint_channel_73, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_path_348", gecode_constraint_path_348, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_path_349", gecode_constraint_path_349, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_path_346", gecode_constraint_path_346, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_448", gecode_constraint_unary_448, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_449", gecode_constraint_unary_449, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_444", gecode_constraint_unary_444, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_445", gecode_constraint_unary_445, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_unary_442", gecode_constraint_unary_442, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_430", gecode_constraint_sorted_430, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sorted_431", gecode_constraint_sorted_431, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_244", gecode_constraint_element_244, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_238", gecode_constraint_element_238, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_232", gecode_constraint_element_232, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_element_230", gecode_constraint_element_230, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_22", gecode_constraint_assign_22, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_31", gecode_constraint_assign_31, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_28", gecode_constraint_assign_28, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_20", gecode_constraint_assign_20, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_34", gecode_constraint_assign_34, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_24", gecode_constraint_assign_24, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_37", gecode_constraint_assign_37, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_assign_26", gecode_constraint_assign_26, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_element_240", gecode_constraint_element_240, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_242", gecode_constraint_element_242, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_236", gecode_constraint_element_236, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_234", gecode_constraint_element_234, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_226", gecode_constraint_element_226, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_228", gecode_constraint_element_228, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_element_224", gecode_constraint_element_224, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_56", gecode_constraint_branch_56, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_52", gecode_constraint_branch_52, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_50", gecode_constraint_branch_50, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_61", gecode_constraint_branch_61, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_59", gecode_constraint_branch_59, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_65", gecode_constraint_branch_65, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_67", gecode_constraint_branch_67, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_relax_424", gecode_constraint_relax_424, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_circuit_90", gecode_constraint_circuit_90, 7);
|
||||
YAP_UserCPredicate("gecode_constraint_pow_356", gecode_constraint_pow_356, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_pow_355", gecode_constraint_pow_355, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_precede_358", gecode_constraint_precede_358, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_9", gecode_constraint_argmax_9, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_11", gecode_constraint_argmax_11, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_133", gecode_constraint_cumulative_133, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_129", gecode_constraint_cumulative_129, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_126", gecode_constraint_cumulative_126, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_127", gecode_constraint_cumulative_127, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_145", gecode_constraint_cumulative_145, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_141", gecode_constraint_cumulative_141, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_138", gecode_constraint_cumulative_138, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_139", gecode_constraint_cumulative_139, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_172", gecode_constraint_distinct_172, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_166", gecode_constraint_distinct_166, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_distinct_168", gecode_constraint_distinct_168, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_316", gecode_constraint_min_316, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_min_317", gecode_constraint_min_317, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_sqrt_438", gecode_constraint_sqrt_438, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_427", gecode_constraint_sequence_427, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_sequence_425", gecode_constraint_sequence_425, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_454", gecode_constraint_unshare_454, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_unshare_452", gecode_constraint_unshare_452, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_path_354", gecode_constraint_path_354, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_path_343", gecode_constraint_path_343, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_divmod_176", gecode_constraint_divmod_176, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_57", gecode_constraint_branch_57, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_53", gecode_constraint_branch_53, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_51", gecode_constraint_branch_51, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_62", gecode_constraint_branch_62, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_60", gecode_constraint_branch_60, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_66", gecode_constraint_branch_66, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_68", gecode_constraint_branch_68, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_331", gecode_constraint_nooverlap_331, 9);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_15", gecode_constraint_argmin_15, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_17", gecode_constraint_argmin_17, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_125", gecode_constraint_cumulative_125, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_137", gecode_constraint_cumulative_137, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_member_309", gecode_constraint_member_309, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_member_305", gecode_constraint_member_305, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_count_100", gecode_constraint_count_100, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_102", gecode_constraint_count_102, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_122", gecode_constraint_count_122, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_124", gecode_constraint_count_124, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_108", gecode_constraint_count_108, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_110", gecode_constraint_count_110, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_104", gecode_constraint_count_104, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_count_106", gecode_constraint_count_106, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_pow_357", gecode_constraint_pow_357, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_136", gecode_constraint_cumulative_136, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_132", gecode_constraint_cumulative_132, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_148", gecode_constraint_cumulative_148, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_cumulative_144", gecode_constraint_cumulative_144, 8);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_45", gecode_constraint_branch_45, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_41", gecode_constraint_branch_41, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_43", gecode_constraint_branch_43, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_branch_47", gecode_constraint_branch_47, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_182", gecode_constraint_dom_182, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_184", gecode_constraint_dom_184, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_210", gecode_constraint_dom_210, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_209", gecode_constraint_dom_209, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_206", gecode_constraint_dom_206, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_178", gecode_constraint_dom_178, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_215", gecode_constraint_dom_215, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_213", gecode_constraint_dom_213, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_211", gecode_constraint_dom_211, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_185", gecode_constraint_dom_185, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_191", gecode_constraint_dom_191, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_187", gecode_constraint_dom_187, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_222", gecode_constraint_dom_222, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_dom_205", gecode_constraint_dom_205, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_292", gecode_constraint_linear_292, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_293", gecode_constraint_linear_293, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_296", gecode_constraint_linear_296, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_297", gecode_constraint_linear_297, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_272", gecode_constraint_linear_272, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_274", gecode_constraint_linear_274, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_260", gecode_constraint_linear_260, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_261", gecode_constraint_linear_261, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_264", gecode_constraint_linear_264, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_265", gecode_constraint_linear_265, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_267", gecode_constraint_linear_267, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_269", gecode_constraint_linear_269, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_283", gecode_constraint_linear_283, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_287", gecode_constraint_linear_287, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_275", gecode_constraint_linear_275, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_linear_279", gecode_constraint_linear_279, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_argmin_18", gecode_constraint_argmin_18, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_325", gecode_constraint_nooverlap_325, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_nooverlap_326", gecode_constraint_nooverlap_326, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_378", gecode_constraint_rel_378, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_380", gecode_constraint_rel_380, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_412", gecode_constraint_rel_412, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_410", gecode_constraint_rel_410, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_408", gecode_constraint_rel_408, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_409", gecode_constraint_rel_409, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_406", gecode_constraint_rel_406, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_401", gecode_constraint_rel_401, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_402", gecode_constraint_rel_402, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_398", gecode_constraint_rel_398, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_404", gecode_constraint_rel_404, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_374", gecode_constraint_rel_374, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_370", gecode_constraint_rel_370, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_416", gecode_constraint_rel_416, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_422", gecode_constraint_rel_422, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_415", gecode_constraint_rel_415, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_420", gecode_constraint_rel_420, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_418", gecode_constraint_rel_418, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_382", gecode_constraint_rel_382, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_386", gecode_constraint_rel_386, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_390", gecode_constraint_rel_390, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_392", gecode_constraint_rel_392, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_394", gecode_constraint_rel_394, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_396", gecode_constraint_rel_396, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_364", gecode_constraint_rel_364, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_rel_362", gecode_constraint_rel_362, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_min_315", gecode_constraint_min_315, 5);
|
||||
YAP_UserCPredicate("gecode_constraint_count_117", gecode_constraint_count_117, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_count_113", gecode_constraint_count_113, 3);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_8", gecode_constraint_argmax_8, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_argmax_10", gecode_constraint_argmax_10, 4);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_257", gecode_constraint_ite_257, 6);
|
||||
YAP_UserCPredicate("gecode_constraint_ite_254", gecode_constraint_ite_254, 6);
|
167
packages/python/#python.pl#
Normal file
167
packages/python/#python.pl#
Normal file
@ -0,0 +1,167 @@
|
||||
% % % -*-Mode : Prolog; -*-
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% Author: Vitor Santos Costa
|
||||
% E-mail: vsc@dcc.fc.up.pt
|
||||
% Copyright (C): Universidade do Porto
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% This file is part of the YAP Python Interface
|
||||
% distributed according to Perl Artistic License
|
||||
% check LICENSE file for distribution license
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%
|
||||
|
||||
:- module(python,
|
||||
[
|
||||
init_python/0,
|
||||
end_python/0,
|
||||
python_command/1,
|
||||
python_run_file/1,
|
||||
python_run_command/1,
|
||||
python_run_script/2,
|
||||
python_assign/3,
|
||||
python_import/1,
|
||||
array_to_python_list/4,
|
||||
array_to_python_tuple/4,
|
||||
array_to_python_view/5,
|
||||
python/2,
|
||||
acquire_GIL/0,
|
||||
release_GIL/0,
|
||||
python_threaded/0,
|
||||
(:=)/2,
|
||||
(:=)/1,
|
||||
% (<-)/2,
|
||||
% (<-)/1,
|
||||
op(100,fy,$),
|
||||
op(950,fy,:=),
|
||||
op(950,yfx,:=),
|
||||
op(950,fx,<-),
|
||||
op(950,yfx,<-),
|
||||
op(50, yf, []),
|
||||
op(50, yf, '()'),
|
||||
op(100, xfy, '.'),
|
||||
op(100, fy, '.')
|
||||
]).
|
||||
|
||||
|
||||
/** <module> python
|
||||
|
||||
A C-based Prolog interface to python.
|
||||
|
||||
@author Vitor Santos Costa
|
||||
@version 0:0:5, 2012/10/8
|
||||
@license Perl Artistic License
|
||||
|
||||
This is an interface to allow calling Python from Prolog. Please look
|
||||
at the SWIG package if you want to embedd Prolog with Python.
|
||||
|
||||
The interface should be activated by consulting the python lybrary. It
|
||||
immediately boots a Python image.
|
||||
|
||||
To best define the interface, one has to address two opposite goals:
|
||||
- make it as similar to python as possible
|
||||
- make all embedded language interfaces (python, R, Java) as
|
||||
similar as possible.
|
||||
|
||||
YAP supports the following translation between Prolog and Python:
|
||||
|
||||
| *Prolog* | *Pyhon* | *Prolog Examples* |
|
||||
|:-------------:|:-------------:|---------------------------------------:|
|
||||
| Numbers | Numbers | 2.3
|
||||
| | | 1545
|
||||
| | |
|
||||
| Atom | Symbols | var
|
||||
| $Atom | | $var [ = var]
|
||||
| `string` | 'string' | \`hello\`
|
||||
| "string" | ' | "hello"
|
||||
| | |
|
||||
| Atom(...) | Symb(...) | f( a, b, named=v)
|
||||
| E.F(...) | E.F (...) | mod.f( a) [ = [mod\|f(a)] ]
|
||||
| Atom() | | f() [ = '()'(f) ]
|
||||
| Lists | Lists | [1,2,3]
|
||||
| t(....) | Tuples | t(1,2,3) to (1,2,3)
|
||||
| (..., ...) | | (1,2,3)[ = (1,(2,3))]
|
||||
| {.=., .=.} | Dict | {\`one\`: 1, \`two\`: 2, \`three\`: 3}
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/************************************************************************************************************
|
||||
|
||||
|
||||
Python interface
|
||||
|
||||
Data types are
|
||||
|
||||
Python Prolog
|
||||
string atoms
|
||||
numbers numbers
|
||||
lists lists
|
||||
tuples t(...)
|
||||
generic objs __pointer__(Address)
|
||||
|
||||
$var refers to the attribute __main__.var
|
||||
|
||||
*************************************************************************************************************/
|
||||
|
||||
|
||||
:- use_module(library(shlib)).
|
||||
:- use_module(library(lists)).
|
||||
:- use_module(library(apply_macros)).
|
||||
:- use_module(library(charsio)).
|
||||
:- dynamic python_mref_cache/2, python_obj_cache/2.
|
||||
|
||||
:= (P1,P2) :- !,
|
||||
:= P1,
|
||||
:= P2.
|
||||
:= import( F ) :- !, python_import(F).
|
||||
:= F :- python_is(F,_).
|
||||
|
||||
V <- F :-
|
||||
V := F.
|
||||
|
||||
( V := F ) :-
|
||||
python_assign(V, F).
|
||||
|
||||
((<- F)) :-
|
||||
:= F.
|
||||
|
||||
python_import(Module) :-
|
||||
python_import(Module, _).
|
||||
|
||||
|
||||
python(Exp, Out) :-
|
||||
Out := Exp.
|
||||
|
||||
python_assign(V, New) :- var(V), !,
|
||||
python_is( New, V).
|
||||
python_assign(T, F) :- atom(T), !,
|
||||
python_assign_symbol(T, F).
|
||||
python_assign(T.I, F) :- !,
|
||||
python_assign_field(T, I, F).
|
||||
python_assign(T[I], F) :- !,
|
||||
python_assign_item(T, I, F).
|
||||
python_assign(F, Tuple) :-
|
||||
python_assign_tuple(F, Tuple).
|
||||
|
||||
python_command(Cmd) :-
|
||||
python_run_command(Cmd).
|
||||
|
||||
|
||||
start_python :-
|
||||
python_import('inspect', _),
|
||||
at_halt(end_python).
|
||||
|
||||
add_cwd_to_python :-
|
||||
unix(getcwd(Dir)),
|
||||
atom_concat(['sys.path.append(\"',Dir,'\")'], Command),
|
||||
python_command(Command),
|
||||
python_command("sys.argv = [\"yap\"]").
|
||||
% done
|
||||
|
||||
:- initialization( load_foreign_files([libYAPPython], [], init_python), now ).
|
||||
|
||||
:- initialization( load_foreign_library(foreign(libYAPPython), init_python), now ).
|
7
packages/python/#sc#
Normal file
7
packages/python/#sc#
Normal file
@ -0,0 +1,7 @@
|
||||
PyThreadState *_save;
|
||||
|
||||
_save = PyThreadState_Swap(NULL);
|
||||
PyEval_ReleaseLock();
|
||||
...Do some blocking I/O operation...
|
||||
PyEval_AcquireLock();
|
||||
PyThreadState_Swap(_save);
|
3
packages/python/__init__.pybk
Normal file
3
packages/python/__init__.pybk
Normal file
@ -0,0 +1,3 @@
|
||||
"""A Prolog kernel for Jupyter"""
|
||||
|
||||
__version__ = '0.0.1'
|
4
packages/python/__main__.pybk
Normal file
4
packages/python/__main__.pybk
Normal file
@ -0,0 +1,4 @@
|
||||
if __name__ == '__main__':
|
||||
from yapkernel import kernelapp as app
|
||||
import pdbl pdb.set_trace()
|
||||
app.launch_new_instance()
|
22
packages/python/examples/multiply.pybk
Normal file
22
packages/python/examples/multiply.pybk
Normal file
@ -0,0 +1,22 @@
|
||||
i = 5
|
||||
|
||||
def f(arg=i):
|
||||
print arg
|
||||
|
||||
def multiply(a,b):
|
||||
print "Will compute", a, "times", b
|
||||
c = 0
|
||||
for i in range(0, a):
|
||||
c = c + b
|
||||
return c
|
||||
|
||||
def square(a,b):
|
||||
return [a*a,b*b]
|
||||
|
||||
def lsquare(a):
|
||||
print a
|
||||
b = []
|
||||
for i in a:
|
||||
b.append(i*i)
|
||||
return b
|
||||
|
10
packages/python/examples/plot.pybk
Normal file
10
packages/python/examples/plot.pybk
Normal file
@ -0,0 +1,10 @@
|
||||
|
||||
:- [library(python)].
|
||||
|
||||
main :-
|
||||
:= import matplotlib.pyplot,
|
||||
:= Plt = ematplotlib.pyplot,
|
||||
Plt.plot([1,2,3,4]),
|
||||
Plt.ylabel(`some numbers`),
|
||||
Plt.show().
|
||||
|
28
packages/python/examples/tests.yap
Normal file
28
packages/python/examples/tests.yap
Normal file
@ -0,0 +1,28 @@
|
||||
:- use_module(library(python)).
|
||||
:- use_module(library(lists)).
|
||||
|
||||
main :-
|
||||
test(I),
|
||||
catch( dot(I), G, err(I,G) ),
|
||||
writeln('.'),
|
||||
fail.
|
||||
main.
|
||||
|
||||
test(I) :-
|
||||
findall(I, clause(det(I,_,_),_), IsF, Is0 ),
|
||||
Is0 = [],
|
||||
sort(IsF,Is),
|
||||
member(I, Is).
|
||||
|
||||
dot(I) :-
|
||||
det(I, Vs, Sol),
|
||||
Vs == Sol.
|
||||
|
||||
err(I,N) :-
|
||||
format(' test ~d failed with error: ~w',[I,N]).
|
||||
|
||||
|
||||
det(a1,[X],[2]) :- X:=2.
|
||||
det(a2,[],[]) :- x := range(1,10).
|
||||
det(b2 [X],[9]) :- X := x.length().
|
||||
det(c3,[X],[Y]) :- X:=cmath.sin(1), Y is sin(1).
|
44
packages/python/install.pybk
Normal file
44
packages/python/install.pybk
Normal file
@ -0,0 +1,44 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
from jupyter_client.kernelspec import install_kernel_spec
|
||||
except ImportError:
|
||||
from IPython.kernel.kernelspec import install_kernel_spec
|
||||
from IPython.utils.tempdir import TemporaryDirectory
|
||||
|
||||
|
||||
kernel_json = {
|
||||
"argv": [sys.executable,
|
||||
"-m", "yap_kernel",
|
||||
"-f", "{connection_file}"],
|
||||
"display_name": "yap",
|
||||
"mimetype": "text/x-prolog",
|
||||
"language": "prolog",
|
||||
"name": "yap",
|
||||
}
|
||||
|
||||
def install_my_kernel_spec(user=False):
|
||||
with TemporaryDirectory() as td:
|
||||
os.chmod(td, 0o755) # Starts off as 700, not user readable
|
||||
with open(os.path.join(td, 'kernel.json'), 'w') as f:
|
||||
json.dump(kernel_json, f, sort_keys=True)
|
||||
# TODO: Copy resources once they're specified
|
||||
|
||||
print('Installing IPython kernel spec')
|
||||
install_kernel_spec(td, 'yap', user=False, replace=True)
|
||||
|
||||
def _is_root():
|
||||
return True
|
||||
try:
|
||||
return os.geteuid() == 0
|
||||
except AttributeError:
|
||||
return False # assume not an admin on non-Unix platforms
|
||||
|
||||
def main(argv=[]):
|
||||
user = '--user' in argv or not _is_root()
|
||||
install_my_kernel_spec(user=user)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(argv=sys.argv)
|
112
packages/python/setup.py
Normal file
112
packages/python/setup.py
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# the name of the package
|
||||
name = 'ipykernel'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Minimal Python version sanity check
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
|
||||
v = sys.version_info
|
||||
if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
|
||||
error = "ERROR: %s requires Python version 2.7 or 3.3 or above." % name
|
||||
print(error, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
PY3 = (sys.version_info[0] >= 3)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# get on with it
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from glob import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
pjoin = os.path.join
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
pkg_root = pjoin(here, name)
|
||||
|
||||
packages = []
|
||||
for d, _, _ in os.walk(pjoin(here, name)):
|
||||
if os.path.exists(pjoin(d, '__init__.py')):
|
||||
packages.append(d[len(here)+1:].replace(os.path.sep, '.'))
|
||||
|
||||
package_data = {
|
||||
'ipykernel': ['resources/*.*'],
|
||||
}
|
||||
|
||||
version_ns = {}
|
||||
with open(pjoin(here, name, '_version.py')) as f:
|
||||
exec(f.read(), {}, version_ns)
|
||||
|
||||
|
||||
setup_args = dict(
|
||||
name = name,
|
||||
version = version_ns['__version__'],
|
||||
scripts = glob(pjoin('scripts', '*')),
|
||||
packages = packages,
|
||||
py_modules = ['ipykernel_launcher'],
|
||||
package_data = package_data,
|
||||
description = "IPython Kernel for Jupyter",
|
||||
author = 'IPython Development Team',
|
||||
author_email = 'ipython-dev@scipy.org',
|
||||
url = 'http://ipython.org',
|
||||
license = 'BSD',
|
||||
platforms = "Linux, Mac OS X, Windows",
|
||||
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
|
||||
classifiers = [
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
'Intended Audience :: Science/Research',
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
],
|
||||
)
|
||||
|
||||
if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv):
|
||||
import setuptools
|
||||
|
||||
setuptools_args = {}
|
||||
install_requires = setuptools_args['install_requires'] = [
|
||||
'ipython>=4.0.0',
|
||||
'traitlets>=4.1.0',
|
||||
'jupyter_client',
|
||||
'tornado>=4.0',
|
||||
]
|
||||
|
||||
if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
|
||||
from ipykernel.kernelspec import write_kernel_spec, make_yap_kernel_cmd, KERNEL_NAME
|
||||
|
||||
argv = make_yap_kernel_cmd(executable='python')
|
||||
dest = os.path.join(here, 'data_kernelspec')
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
write_kernel_spec(dest, overrides={'argv': argv})
|
||||
|
||||
setup_args['data_files'] = [
|
||||
(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))),
|
||||
]
|
||||
|
||||
extras_require = setuptools_args['extras_require'] = {
|
||||
'test:python_version=="2.7"': ['mock'],
|
||||
'test': ['nose_warnings_filters', 'nose-timer'],
|
||||
}
|
||||
|
||||
if 'setuptools' in sys.modules:
|
||||
setup_args.update(setuptools_args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup(**setup_args)
|
112
packages/python/setup.pybk
Normal file
112
packages/python/setup.pybk
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# the name of the package
|
||||
name = 'ipykernel'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Minimal Python version sanity check
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
|
||||
v = sys.version_info
|
||||
if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
|
||||
error = "ERROR: %s requires Python version 2.7 or 3.3 or above." % name
|
||||
print(error, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
PY3 = (sys.version_info[0] >= 3)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# get on with it
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from glob import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
pjoin = os.path.join
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
pkg_root = pjoin(here, name)
|
||||
|
||||
packages = []
|
||||
for d, _, _ in os.walk(pjoin(here, name)):
|
||||
if os.path.exists(pjoin(d, '__init__.py')):
|
||||
packages.append(d[len(here)+1:].replace(os.path.sep, '.'))
|
||||
|
||||
package_data = {
|
||||
'ipykernel': ['resources/*.*'],
|
||||
}
|
||||
|
||||
version_ns = {}
|
||||
with open(pjoin(here, name, '_version.py')) as f:
|
||||
exec(f.read(), {}, version_ns)
|
||||
|
||||
|
||||
setup_args = dict(
|
||||
name = name,
|
||||
version = version_ns['__version__'],
|
||||
scripts = glob(pjoin('scripts', '*')),
|
||||
packages = packages,
|
||||
py_modules = ['ipykernel_launcher'],
|
||||
package_data = package_data,
|
||||
description = "IPython Kernel for Jupyter",
|
||||
author = 'IPython Development Team',
|
||||
author_email = 'ipython-dev@scipy.org',
|
||||
url = 'http://ipython.org',
|
||||
license = 'BSD',
|
||||
platforms = "Linux, Mac OS X, Windows",
|
||||
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
|
||||
classifiers = [
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
'Intended Audience :: Science/Research',
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
],
|
||||
)
|
||||
|
||||
if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv):
|
||||
import setuptools
|
||||
|
||||
setuptools_args = {}
|
||||
install_requires = setuptools_args['install_requires'] = [
|
||||
'ipython>=4.0.0',
|
||||
'traitlets>=4.1.0',
|
||||
'jupyter_client',
|
||||
'tornado>=4.0',
|
||||
]
|
||||
|
||||
if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
|
||||
from ipykernel.kernelspec import write_kernel_spec, make_ipkernel_cmd, KERNEL_NAME
|
||||
|
||||
argv = make_ipkernel_cmd(executable='python')
|
||||
dest = os.path.join(here, 'data_kernelspec')
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
write_kernel_spec(dest, overrides={'argv': argv})
|
||||
|
||||
setup_args['data_files'] = [
|
||||
(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))),
|
||||
]
|
||||
|
||||
extras_require = setuptools_args['extras_require'] = {
|
||||
'test:python_version=="2.7"': ['mock'],
|
||||
'test': ['nose_warnings_filters', 'nose-timer'],
|
||||
}
|
||||
|
||||
if 'setuptools' in sys.modules:
|
||||
setup_args.update(setuptools_args)
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup(**setup_args)
|
@ -70,9 +70,9 @@ add_custom_target( YAP4PY ALL
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${pl_library} ${PROLOG_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${pl_boot_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/pl
|
||||
COMMAND ${CMAKE_COMMAND} -E copy ${pl_os_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/os
|
||||
COMMAND ${PYTHON_EXECUTABLE} setup.py sdist bdist_wheel
|
||||
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py sdist bdist_wheel
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||
DEPENDS STARTUP ${dlls} ${PYTHON_SOURCES} ${PROLOG_SOURCES} setup.py ${SWIG_MODULE_Py2YAP_REAL_NAME} )
|
||||
DEPENDS STARTUP ${dlls} ${PYTHON_SOURCES} ${PROLOG_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/setup.py ${SWIG_MODULE_Py2YAP_REAL_NAME} )
|
||||
|
||||
|
||||
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install --no-index -f dist yap4py
|
||||
@ -95,7 +95,7 @@ DEPENDS STARTUP ${dlls} ${PYTHON_SOURCES} ${PROLOG_SOURCES} setup.py ${SWIG_MOD
|
||||
# generate .i from doxygen .xml
|
||||
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/ftdi1_doc.i
|
||||
COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/doxy2swig.py -n
|
||||
${CMAKE_BINARY_DIR}/doc/xml/ftdi_8c.xml
|
||||
${CMAKE_BINARY_DIR}/doc/xm11l/ftdi_8c.xml
|
||||
${CMAKE_CURRENT_BINARY_DIR}/ftdi1_doc.i
|
||||
DEPENDS ${CMAKE_BINARY_DIR}/doc/xml/ftdi_8c.xml
|
||||
)
|
||||
|
17
packages/python/swig/__init__.pybk
Normal file
17
packages/python/swig/__init__.pybk
Normal file
@ -0,0 +1,17 @@
|
||||
import imp
|
||||
import os
|
||||
import ctypes
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
global yap_lib_path
|
||||
yap_lib_path = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def load( dll ):
|
||||
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
|
||||
dll = os.path.abspath(dll)
|
||||
ctypes.CDLL(dll, mode=ctypes.RTLD_GLOBAL)
|
||||
|
||||
load('libYap*')
|
14
packages/python/swig/__main__.pybk
Normal file
14
packages/python/swig/__main__.pybk
Normal file
@ -0,0 +1,14 @@
|
||||
"""The main routine of the yap python project."""
|
||||
|
||||
import sys
|
||||
import yapi
|
||||
|
||||
|
||||
def main(args=None):
|
||||
"""The main routine."""
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
yap.yapi.live(args)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
97
packages/python/swig/setup.py
Normal file
97
packages/python/swig/setup.py
Normal file
@ -0,0 +1,97 @@
|
||||
"""A setuptools based setup module.
|
||||
|
||||
See:
|
||||
https://packaging.python.org/en/latest/distributing.html
|
||||
https://github.com/pypa/sampleproject
|
||||
"""
|
||||
|
||||
# Always prefer setuptools over distutils
|
||||
from setuptools import setup
|
||||
from setuptools.extension import Extension
|
||||
# To use a consistent encoding
|
||||
from codecs import open
|
||||
from os import path, makedirs, walk
|
||||
from shutil import copytree, rmtree, copy2, move
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import os.path
|
||||
|
||||
my_extra_link_args = []
|
||||
if platform.system() == 'Darwin':
|
||||
my_extra_link_args = ['-Wl,-rpath','-Wl,']
|
||||
so = 'dylib'
|
||||
#or dll in glob('yap/dlls/*'):
|
||||
# move( dll ,'lib' )
|
||||
pls = []
|
||||
for (r,d,fs) in walk('yap/pl'):
|
||||
for f in fs:
|
||||
pls += [os.path.join(r, f)]
|
||||
for (r,d,fs) in walk('yap'):
|
||||
for f in fs:
|
||||
pls += [os.path.join(r, f)]
|
||||
# for f in glob( 'lib*.*' ):
|
||||
# ofile.write(f+"\n")
|
||||
cplus=['../../../CXX/yapi.cpp']
|
||||
py2yap=['../../../packages/python/python.c',
|
||||
'../../../packages/python/pl2py.c',
|
||||
'../../../packages/python/pybips.c',
|
||||
'../../../packages/python/py2pl.c',
|
||||
'../../../packages/python/pl2pl.c',
|
||||
'../../../packages/python/pypreds.c'
|
||||
]
|
||||
python_sources = ['yapPYTHON_wrap.cxx']+py2yap+cplus
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
# Get the long description from the README file
|
||||
|
||||
extensions=[Extension('_yap', python_sources,
|
||||
define_macros = [('MAJOR_VERSION', '1'),
|
||||
('MINOR_VERSION', '0'),
|
||||
('_YAP_NOT_INSTALLED_', '1'),
|
||||
('YAP_PYTHON', '1')],
|
||||
runtime_library_dirs=['yap4py','/usr/local/lib','/usr/local/bin'],
|
||||
swig_opts=['-modern', '-c++', '-py3','-I../../..//CXX'],
|
||||
library_dirs=['../../..','../../../CXX','../../packages/python',"/usr/local/lib/Yap","/usr/local/bin", '.'],
|
||||
extra_link_args=my_extra_link_args,
|
||||
extra_compile_args=['-g'],
|
||||
libraries=['Yap','/usr/local/lib/libgmp.dylib'],
|
||||
include_dirs=['../../..',
|
||||
'/usr/local/include',
|
||||
'../../../H',
|
||||
'../../../H/generated',
|
||||
'../../../OPTYap',
|
||||
'../../../os',
|
||||
'../../../include',
|
||||
'../../../CXX', '.']
|
||||
)]
|
||||
|
||||
setup(
|
||||
name='YAP4Py',
|
||||
version='6.3.5',
|
||||
description='The YAP Prolog compiler as a Python Library',
|
||||
url='https://github.com/vscosta/yap-6.3',
|
||||
author='Vitor Santos Costa',
|
||||
author_email='vsc@dcc.fc.up.pt',
|
||||
license='Artistic',
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'Topic :: Software Development :: Build Tools',
|
||||
'License :: OSI Approved :: Artistic License',
|
||||
'Programming Language :: Python :: 3',
|
||||
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7'],
|
||||
keywords=['Logic Programing'],
|
||||
#spackage_data={'': ['yap']},
|
||||
include_package_data=True,
|
||||
ext_modules = extensions,
|
||||
py_modules = ['yap'],
|
||||
zip_safe=False,
|
||||
eager_resources = ['yap4py'],
|
||||
packages=['yap4py'] # find_packages()
|
||||
#package_dir = {'':'yap4py'}
|
||||
)
|
17
packages/python/swig/yap4py/__init__.py
Normal file
17
packages/python/swig/yap4py/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
import imp
|
||||
import os
|
||||
import ctypes
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
global yap_lib_path
|
||||
yap_lib_path = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def load( dll ):
|
||||
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
|
||||
dll = os.path.abspath(dll)
|
||||
ctypes.CDLL(dll, mode=ctypes.RTLD_GLOBAL)
|
||||
|
||||
load('libYap*')
|
406
packages/python/swig/yap4py/prolog/INDEX.pl
Normal file
406
packages/python/swig/yap4py/prolog/INDEX.pl
Normal file
@ -0,0 +1,406 @@
|
||||
index(foreach,2,aggretate,library(aggregate)).
|
||||
index(aggregate,3,aggretate,library(aggregate)).
|
||||
index(aggregate,4,aggretate,library(aggregate)).
|
||||
index(aggregate_all,3,aggretate,library(aggregate)).
|
||||
index(aggregate_all,4,aggretate,library(aggregate)).
|
||||
index(free_variables,4,aggretate,library(aggregate)).
|
||||
index(genarg,3,arg,library(arg)).
|
||||
index(arg0,3,arg,library(arg)).
|
||||
index(genarg0,3,arg,library(arg)).
|
||||
index(args,3,arg,library(arg)).
|
||||
index(args0,3,arg,library(arg)).
|
||||
index(path_arg,3,arg,library(arg)).
|
||||
index(empty_assoc,1,assoc,library(assoc)).
|
||||
index(assoc_to_list,2,assoc,library(assoc)).
|
||||
index(is_assoc,1,assoc,library(assoc)).
|
||||
index(min_assoc,3,assoc,library(assoc)).
|
||||
index(max_assoc,3,assoc,library(assoc)).
|
||||
index(gen_assoc,3,assoc,library(assoc)).
|
||||
index(get_assoc,3,assoc,library(assoc)).
|
||||
index(get_assoc,5,assoc,library(assoc)).
|
||||
index(get_next_assoc,4,assoc,library(assoc)).
|
||||
index(get_prev_assoc,4,assoc,library(assoc)).
|
||||
index(list_to_assoc,2,assoc,library(assoc)).
|
||||
index(ord_list_to_assoc,2,assoc,library(assoc)).
|
||||
index(map_assoc,2,assoc,library(assoc)).
|
||||
index(map_assoc,3,assoc,library(assoc)).
|
||||
index(put_assoc,4,assoc,library(assoc)).
|
||||
index(del_assoc,4,assoc,library(assoc)).
|
||||
index(assoc_to_keys,2,assoc,library(assoc)).
|
||||
index(del_min_assoc,4,assoc,library(assoc)).
|
||||
index(del_max_assoc,4,assoc,library(assoc)).
|
||||
index(avl_new,1,avl,library(avl)).
|
||||
index(avl_insert,4,avl,library(avl)).
|
||||
index(avl_lookup,3,avl,library(avl)).
|
||||
index(b_hash_new,1,b_hash,library(bhash)).
|
||||
index(b_hash_new,2,b_hash,library(bhash)).
|
||||
index(b_hash_new,4,b_hash,library(bhash)).
|
||||
index(b_hash_lookup,3,b_hash,library(bhash)).
|
||||
index(b_hash_update,3,b_hash,library(bhash)).
|
||||
index(b_hash_update,4,b_hash,library(bhash)).
|
||||
index(b_hash_insert_new,4,b_hash,library(bhash)).
|
||||
index(b_hash_insert,4,b_hash,library(bhash)).
|
||||
index(format_to_chars,3,charsio,library(charsio)).
|
||||
index(format_to_chars,4,charsio,library(charsio)).
|
||||
index(write_to_chars,3,charsio,library(charsio)).
|
||||
index(write_to_chars,2,charsio,library(charsio)).
|
||||
index(atom_to_chars,3,charsio,library(charsio)).
|
||||
index(atom_to_chars,2,charsio,library(charsio)).
|
||||
index(number_to_chars,3,charsio,library(charsio)).
|
||||
index(number_to_chars,2,charsio,library(charsio)).
|
||||
index(read_from_chars,2,charsio,library(charsio)).
|
||||
index(open_chars_stream,2,charsio,library(charsio)).
|
||||
index(with_output_to_chars,2,charsio,library(charsio)).
|
||||
index(with_output_to_chars,3,charsio,library(charsio)).
|
||||
index(with_output_to_chars,4,charsio,library(charsio)).
|
||||
index(term_to_atom,2,charsio,library(charsio)).
|
||||
index(chr_show_store,1,chr,library(chr)).
|
||||
index(find_chr_constraint,1,chr,library(chr)).
|
||||
index(chr_trace,0,chr,library(chr)).
|
||||
index(chr_notrace,0,chr,library(chr)).
|
||||
index(chr_leash,1,chr,library(chr)).
|
||||
index(#>,2,clpfd,library(clpfd)).
|
||||
index(#<,2,clpfd,library(clpfd)).
|
||||
index(#>=,2,clpfd,library(clpfd)).
|
||||
index(#=<,2,clpfd,library(clpfd)).
|
||||
index(#=,2,clpfd,library(clpfd)).
|
||||
index(#\=,2,clpfd,library(clpfd)).
|
||||
index(#\,1,clpfd,library(clpfd)).
|
||||
index(#<==>,2,clpfd,library(clpfd)).
|
||||
index(#==>,2,clpfd,library(clpfd)).
|
||||
index(#<==,2,clpfd,library(clpfd)).
|
||||
index(#\/,2,clpfd,library(clpfd)).
|
||||
index(#/\,2,clpfd,library(clpfd)).
|
||||
index(in,2,clpfd,library(clpfd)).
|
||||
index(ins,2,clpfd,library(clpfd)).
|
||||
index(all_different,1,clpfd,library(clpfd)).
|
||||
index(all_distinct,1,clpfd,library(clpfd)).
|
||||
index(sum,3,clpfd,library(clpfd)).
|
||||
index(scalar_product,4,clpfd,library(clpfd)).
|
||||
index(tuples_in,2,clpfd,library(clpfd)).
|
||||
index(labeling,2,clpfd,library(clpfd)).
|
||||
index(label,1,clpfd,library(clpfd)).
|
||||
index(indomain,1,clpfd,library(clpfd)).
|
||||
index(lex_chain,1,clpfd,library(clpfd)).
|
||||
index(serialized,2,clpfd,library(clpfd)).
|
||||
index(global_cardinality,2,clpfd,library(clpfd)).
|
||||
index(global_cardinality,3,clpfd,library(clpfd)).
|
||||
index(circuit,1,clpfd,library(clpfd)).
|
||||
index(element,3,clpfd,library(clpfd)).
|
||||
index(automaton,3,clpfd,library(clpfd)).
|
||||
index(automaton,8,clpfd,library(clpfd)).
|
||||
index(transpose,2,clpfd,library(clpfd)).
|
||||
index(zcompare,3,clpfd,library(clpfd)).
|
||||
index(chain,2,clpfd,library(clpfd)).
|
||||
index(fd_var,1,clpfd,library(clpfd)).
|
||||
index(fd_inf,2,clpfd,library(clpfd)).
|
||||
index(fd_sup,2,clpfd,library(clpfd)).
|
||||
index(fd_size,2,clpfd,library(clpfd)).
|
||||
index(fd_dom,2,clpfd,library(clpfd)).
|
||||
index({},1,clpr,library(clpr)).
|
||||
index(maximize,1,clpr,library(clpr)).
|
||||
index(minimize,1,clpr,library(clpr)).
|
||||
index(inf,2,clpr,library(clpr)).
|
||||
index(inf,4,clpr,library(clpr)).
|
||||
index(sup,2,clpr,library(clpr)).
|
||||
index(sup,4,clpr,library(clpr)).
|
||||
index(bb_inf,3,clpr,library(clpr)).
|
||||
index(bb_inf,5,clpr,library(clpr)).
|
||||
index(ordering,1,clpr,library(clpr)).
|
||||
index(entailed,1,clpr,library(clpr)).
|
||||
index(clp_type,2,clpr,library(clpr)).
|
||||
index(dump,3,clpr,library(clpr)).
|
||||
index(gensym,2,gensym,library(gensym)).
|
||||
index(reset_gensym,1,gensym,library(gensym)).
|
||||
index(reset_gensym,0,gensym,library(gensym)).
|
||||
index(add_to_heap,4,heaps,library(heaps)).
|
||||
index(get_from_heap,4,heaps,library(heaps)).
|
||||
index(empty_heap,1,heaps,library(heaps)).
|
||||
index(heap_size,2,heaps,library(heaps)).
|
||||
index(heap_to_list,2,heaps,library(heaps)).
|
||||
index(list_to_heap,2,heaps,library(heaps)).
|
||||
index(min_of_heap,3,heaps,library(heaps)).
|
||||
index(min_of_heap,5,heaps,library(heaps)).
|
||||
index(jpl_get_default_jvm_opts,1,jpl,library(jpl)).
|
||||
index(jpl_set_default_jvm_opts,1,jpl,library(jpl)).
|
||||
index(jpl_get_actual_jvm_opts,1,jpl,library(jpl)).
|
||||
index(jpl_pl_lib_version,1,jpl,library(jpl)).
|
||||
index(jpl_c_lib_version,1,jpl,library(jpl)).
|
||||
index(jpl_new,3,jpl,library(jpl)).
|
||||
index(jpl_call,4,jpl,library(jpl)).
|
||||
index(jpl_get,3,jpl,library(jpl)).
|
||||
index(jpl_set,3,jpl,library(jpl)).
|
||||
index(jpl_servlet_byref,3,jpl,library(jpl)).
|
||||
index(jpl_servlet_byval,3,jpl,library(jpl)).
|
||||
index(jpl_class_to_classname,2,jpl,library(jpl)).
|
||||
index(jpl_class_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_classname_to_class,2,jpl,library(jpl)).
|
||||
index(jpl_classname_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_datum_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_false,1,jpl,library(jpl)).
|
||||
index(jpl_is_class,1,jpl,library(jpl)).
|
||||
index(jpl_is_false,1,jpl,library(jpl)).
|
||||
index(jpl_is_null,1,jpl,library(jpl)).
|
||||
index(jpl_is_object,1,jpl,library(jpl)).
|
||||
index(jpl_is_object_type,1,jpl,library(jpl)).
|
||||
index(jpl_is_ref,1,jpl,library(jpl)).
|
||||
index(jpl_is_true,1,jpl,library(jpl)).
|
||||
index(jpl_is_type,1,jpl,library(jpl)).
|
||||
index(jpl_is_void,1,jpl,library(jpl)).
|
||||
index(jpl_null,1,jpl,library(jpl)).
|
||||
index(jpl_object_to_class,2,jpl,library(jpl)).
|
||||
index(jpl_object_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_primitive_type,1,jpl,library(jpl)).
|
||||
index(jpl_ref_to_type,2,jpl,library(jpl)).
|
||||
index(jpl_true,1,jpl,library(jpl)).
|
||||
index(jpl_type_to_class,2,jpl,library(jpl)).
|
||||
index(jpl_type_to_classname,2,jpl,library(jpl)).
|
||||
index(jpl_void,1,jpl,library(jpl)).
|
||||
index(jpl_array_to_length,2,jpl,library(jpl)).
|
||||
index(jpl_array_to_list,2,jpl,library(jpl)).
|
||||
index(jpl_datums_to_array,2,jpl,library(jpl)).
|
||||
index(jpl_enumeration_element,2,jpl,library(jpl)).
|
||||
index(jpl_enumeration_to_list,2,jpl,library(jpl)).
|
||||
index(jpl_hashtable_pair,2,jpl,library(jpl)).
|
||||
index(jpl_iterator_element,2,jpl,library(jpl)).
|
||||
index(jpl_list_to_array,2,jpl,library(jpl)).
|
||||
index(jpl_list_to_array,3,jpl,library(jpl)).
|
||||
index(jpl_terms_to_array,2,jpl,library(jpl)).
|
||||
index(jpl_map_element,2,jpl,library(jpl)).
|
||||
index(jpl_set_element,2,jpl,library(jpl)).
|
||||
index(append,3,lists,library(lists)).
|
||||
index(append,2,lists,library(lists)).
|
||||
index(delete,3,lists,library(lists)).
|
||||
index(intersection,3,lists,library(lists)).
|
||||
index(flatten,2,lists,library(lists)).
|
||||
index(last,2,lists,library(lists)).
|
||||
index(list_concat,2,lists,library(lists)).
|
||||
index(max_list,2,lists,library(lists)).
|
||||
index(member,2,lists,library(lists)).
|
||||
index(memberchk,2,lists,library(lists)).
|
||||
index(min_list,2,lists,library(lists)).
|
||||
index(nextto,3,lists,library(lists)).
|
||||
index(nth,3,lists,library(lists)).
|
||||
index(nth,4,lists,library(lists)).
|
||||
index(nth0,3,lists,library(lists)).
|
||||
index(nth0,4,lists,library(lists)).
|
||||
index(nth1,3,lists,library(lists)).
|
||||
index(nth1,4,lists,library(lists)).
|
||||
index(numlist,3,lists,library(lists)).
|
||||
index(permutation,2,lists,library(lists)).
|
||||
index(prefix,2,lists,library(lists)).
|
||||
index(remove_duplicates,2,lists,library(lists)).
|
||||
index(reverse,2,lists,library(lists)).
|
||||
index(same_length,2,lists,library(lists)).
|
||||
index(select,3,lists,library(lists)).
|
||||
index(selectchk,3,lists,library(lists)).
|
||||
index(sublist,2,lists,library(lists)).
|
||||
index(substitute,4,lists,library(lists)).
|
||||
index(subtract,3,lists,library(lists)).
|
||||
index(suffix,2,lists,library(lists)).
|
||||
index(sum_list,2,lists,library(lists)).
|
||||
index(sum_list,3,lists,library(lists)).
|
||||
index(sumlist,2,lists,library(lists)).
|
||||
index(nb_queue,1,nb,library(nb)).
|
||||
index(nb_queue,2,nb,library(nb)).
|
||||
index(nb_queue_close,3,nb,library(nb)).
|
||||
index(nb_queue_enqueue,2,nb,library(nb)).
|
||||
index(nb_queue_dequeue,2,nb,library(nb)).
|
||||
index(nb_queue_peek,2,nb,library(nb)).
|
||||
index(nb_queue_empty,1,nb,library(nb)).
|
||||
index(nb_queue_size,2,nb,library(nb)).
|
||||
index(nb_heap,2,nb,library(nb)).
|
||||
index(nb_heap_close,1,nb,library(nb)).
|
||||
index(nb_heap_add,3,nb,library(nb)).
|
||||
index(nb_heap_del,3,nb,library(nb)).
|
||||
index(nb_heap_peek,3,nb,library(nb)).
|
||||
index(nb_heap_empty,1,nb,library(nb)).
|
||||
index(nb_heap_size,2,nb,library(nb)).
|
||||
index(nb_beam,2,nb,library(nb)).
|
||||
index(nb_beam_close,1,nb,library(nb)).
|
||||
index(nb_beam_add,3,nb,library(nb)).
|
||||
index(nb_beam_del,3,nb,library(nb)).
|
||||
index(nb_beam_peek,3,nb,library(nb)).
|
||||
index(nb_beam_empty,1,nb,library(nb)).
|
||||
index(nb_beam_size,2,nb,library(nb)).
|
||||
index(contains_term,2,occurs,library(occurs)).
|
||||
index(contains_var,2,occurs,library(occurs)).
|
||||
index(free_of_term,2,occurs,library(occurs)).
|
||||
index(free_of_var,2,occurs,library(occurs)).
|
||||
index(occurrences_of_term,3,occurs,library(occurs)).
|
||||
index(occurrences_of_var,3,occurs,library(occurs)).
|
||||
index(sub_term,2,occurs,library(occurs)).
|
||||
index(sub_var,2,occurs,library(occurs)).
|
||||
index(option,2,swi_option,library(option)).
|
||||
index(option,3,swi_option,library(option)).
|
||||
index(select_option,3,swi_option,library(option)).
|
||||
index(select_option,4,swi_option,library(option)).
|
||||
index(merge_options,3,swi_option,library(option)).
|
||||
index(meta_options,3,swi_option,library(option)).
|
||||
index(list_to_ord_set,2,ordsets,library(ordsets)).
|
||||
index(merge,3,ordsets,library(ordsets)).
|
||||
index(ord_add_element,3,ordsets,library(ordsets)).
|
||||
index(ord_del_element,3,ordsets,library(ordsets)).
|
||||
index(ord_disjoint,2,ordsets,library(ordsets)).
|
||||
index(ord_insert,3,ordsets,library(ordsets)).
|
||||
index(ord_member,2,ordsets,library(ordsets)).
|
||||
index(ord_intersect,2,ordsets,library(ordsets)).
|
||||
index(ord_intersect,3,ordsets,library(ordsets)).
|
||||
index(ord_intersection,3,ordsets,library(ordsets)).
|
||||
index(ord_intersection,4,ordsets,library(ordsets)).
|
||||
index(ord_seteq,2,ordsets,library(ordsets)).
|
||||
index(ord_setproduct,3,ordsets,library(ordsets)).
|
||||
index(ord_subset,2,ordsets,library(ordsets)).
|
||||
index(ord_subtract,3,ordsets,library(ordsets)).
|
||||
index(ord_symdiff,3,ordsets,library(ordsets)).
|
||||
index(ord_union,2,ordsets,library(ordsets)).
|
||||
index(ord_union,3,ordsets,library(ordsets)).
|
||||
index(ord_union,4,ordsets,library(ordsets)).
|
||||
index(ord_empty,1,ordsets,library(ordsets)).
|
||||
index(ord_memberchk,2,ordsets,library(ordsets)).
|
||||
index(pairs_keys_values,3,pairs,library(pairs)).
|
||||
index(pairs_values,2,pairs,library(pairs)).
|
||||
index(pairs_keys,2,pairs,library(pairs)).
|
||||
index(group_pairs_by_key,2,pairs,library(pairs)).
|
||||
index(transpose_pairs,2,pairs,library(pairs)).
|
||||
index(map_list_to_pairs,3,pairs,library(pairs)).
|
||||
index(xref_source,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_called,3,prolog_xref,library(prolog_xref)).
|
||||
index(xref_defined,3,prolog_xref,library(prolog_xref)).
|
||||
index(xref_definition_line,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_exported,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_module,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_op,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_clean,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_current_source,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_done,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_built_in,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_expand,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_source_file,3,prolog_xref,library(prolog_xref)).
|
||||
index(xref_source_file,4,prolog_xref,library(prolog_xref)).
|
||||
index(xref_public_list,4,prolog_xref,library(prolog_xref)).
|
||||
index(xref_meta,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_hook,1,prolog_xref,library(prolog_xref)).
|
||||
index(xref_used_class,2,prolog_xref,library(prolog_xref)).
|
||||
index(xref_defined_class,3,prolog_xref,library(prolog_xref)).
|
||||
index(set_test_options,1,plunit,library(plunit)).
|
||||
index(begin_tests,1,plunit,library(plunit)).
|
||||
index(begin_tests,2,plunit,library(plunit)).
|
||||
index(end_tests,1,plunit,library(plunit)).
|
||||
index(run_tests,0,plunit,library(plunit)).
|
||||
index(run_tests,1,plunit,library(plunit)).
|
||||
index(load_test_files,1,plunit,library(plunit)).
|
||||
index(running_tests,0,plunit,library(plunit)).
|
||||
index(test_report,1,plunit,library(plunit)).
|
||||
index(make_queue,1,queues,library(queues)).
|
||||
index(join_queue,3,queues,library(queues)).
|
||||
index(list_join_queue,3,queues,library(queues)).
|
||||
index(jump_queue,3,queues,library(queues)).
|
||||
index(list_jump_queue,3,queues,library(queues)).
|
||||
index(head_queue,2,queues,library(queues)).
|
||||
index(serve_queue,3,queues,library(queues)).
|
||||
index(length_queue,2,queues,library(queues)).
|
||||
index(empty_queue,1,queues,library(queues)).
|
||||
index(list_to_queue,2,queues,library(queues)).
|
||||
index(queue_to_list,2,queues,library(queues)).
|
||||
index(random,1,random,library(random)).
|
||||
index(random,3,random,library(random)).
|
||||
index(randseq,3,random,library(random)).
|
||||
index(randset,3,random,library(random)).
|
||||
index(getrand,1,random,library(random)).
|
||||
index(setrand,1,random,library(random)).
|
||||
index(rb_new,1,rbtrees,library(rbtrees)).
|
||||
index(rb_empty,1,rbtrees,library(rbtrees)).
|
||||
index(rb_lookup,3,rbtrees,library(rbtrees)).
|
||||
index(rb_update,4,rbtrees,library(rbtrees)).
|
||||
index(rb_update,5,rbtrees,library(rbtrees)).
|
||||
index(rb_apply,4,rbtrees,library(rbtrees)).
|
||||
index(rb_lookupall,3,rbtrees,library(rbtrees)).
|
||||
index(rb_insert,4,rbtrees,library(rbtrees)).
|
||||
index(rb_insert_new,4,rbtrees,library(rbtrees)).
|
||||
index(rb_delete,3,rbtrees,library(rbtrees)).
|
||||
index(rb_delete,4,rbtrees,library(rbtrees)).
|
||||
index(rb_visit,2,rbtrees,library(rbtrees)).
|
||||
index(rb_visit,3,rbtrees,library(rbtrees)).
|
||||
index(rb_keys,2,rbtrees,library(rbtrees)).
|
||||
index(rb_keys,3,rbtrees,library(rbtrees)).
|
||||
index(rb_map,2,rbtrees,library(rbtrees)).
|
||||
index(rb_map,3,rbtrees,library(rbtrees)).
|
||||
index(rb_partial_map,4,rbtrees,library(rbtrees)).
|
||||
index(rb_clone,3,rbtrees,library(rbtrees)).
|
||||
index(rb_clone,4,rbtrees,library(rbtrees)).
|
||||
index(rb_min,3,rbtrees,library(rbtrees)).
|
||||
index(rb_max,3,rbtrees,library(rbtrees)).
|
||||
index(rb_del_min,4,rbtrees,library(rbtrees)).
|
||||
index(rb_del_max,4,rbtrees,library(rbtrees)).
|
||||
index(rb_next,4,rbtrees,library(rbtrees)).
|
||||
index(rb_previous,4,rbtrees,library(rbtrees)).
|
||||
index(list_to_rbtree,2,rbtrees,library(rbtrees)).
|
||||
index(ord_list_to_rbtree,2,rbtrees,library(rbtrees)).
|
||||
index(is_rbtree,1,rbtrees,library(rbtrees)).
|
||||
index(rb_size,2,rbtrees,library(rbtrees)).
|
||||
index(rb_in,3,rbtrees,library(rbtrees)).
|
||||
index(read_line_to_codes,2,read_util,library(readutil)).
|
||||
index(read_line_to_codes,3,read_util,library(readutil)).
|
||||
index(read_stream_to_codes,2,read_util,library(readutil)).
|
||||
index(read_stream_to_codes,3,read_util,library(readutil)).
|
||||
index(read_file_to_codes,3,read_util,library(readutil)).
|
||||
index(read_file_to_terms,3,read_util,library(readutil)).
|
||||
index(regexp,3,regexp,library(regexp)).
|
||||
index(regexp,4,regexp,library(regexp)).
|
||||
index(load_foreign_library,1,shlib,library(shlib)).
|
||||
index(load_foreign_library,2,shlib,library(shlib)).
|
||||
index(unload_foreign_library,1,shlib,library(shlib)).
|
||||
index(unload_foreign_library,2,shlib,library(shlib)).
|
||||
index(current_foreign_library,2,shlib,library(shlib)).
|
||||
index(reload_foreign_libraries,0,shlib,library(shlib)).
|
||||
index(use_foreign_library,1,shlib,library(shlib)).
|
||||
index(use_foreign_library,2,shlib,library(shlib)).
|
||||
index(datime,1,operating_system_support,library(system)).
|
||||
index(delete_file,1,operating_system_support,library(system)).
|
||||
index(delete_file,2,operating_system_support,library(system)).
|
||||
index(directory_files,2,operating_system_support,library(system)).
|
||||
index(environ,2,operating_system_support,library(system)).
|
||||
index(exec,3,operating_system_support,library(system)).
|
||||
index(file_exists,1,operating_system_support,library(system)).
|
||||
index(file_exists,2,operating_system_support,library(system)).
|
||||
index(file_property,2,operating_system_support,library(system)).
|
||||
index(host_id,1,operating_system_support,library(system)).
|
||||
index(host_name,1,operating_system_support,library(system)).
|
||||
index(pid,1,operating_system_support,library(system)).
|
||||
index(kill,2,operating_system_support,library(system)).
|
||||
index(mktemp,2,operating_system_support,library(system)).
|
||||
index(make_directory,1,operating_system_support,library(system)).
|
||||
index(popen,3,operating_system_support,library(system)).
|
||||
index(rename_file,2,operating_system_support,library(system)).
|
||||
index(shell,0,operating_system_support,library(system)).
|
||||
index(shell,1,operating_system_support,library(system)).
|
||||
index(shell,2,operating_system_support,library(system)).
|
||||
index(sleep,1,operating_system_support,library(system)).
|
||||
index(system,0,operating_system_support,library(system)).
|
||||
index(system,1,operating_system_support,library(system)).
|
||||
index(system,2,operating_system_support,library(system)).
|
||||
index(mktime,2,operating_system_support,library(system)).
|
||||
index(tmpnam,1,operating_system_support,library(system)).
|
||||
index(tmp_file,2,operating_system_support,library(system)).
|
||||
index(tmpdir,1,operating_system_support,library(system)).
|
||||
index(wait,2,operating_system_support,library(system)).
|
||||
index(working_directory,2,operating_system_support,library(system)).
|
||||
index(term_hash,2,terms,library(terms)).
|
||||
index(term_hash,4,terms,library(terms)).
|
||||
index(instantiated_term_hash,4,terms,library(terms)).
|
||||
index(variant,2,terms,library(terms)).
|
||||
index(unifiable,3,terms,library(terms)).
|
||||
index(subsumes,2,terms,library(terms)).
|
||||
index(subsumes_chk,2,terms,library(terms)).
|
||||
index(cyclic_term,1,terms,library(terms)).
|
||||
index(variable_in_term,2,terms,library(terms)).
|
||||
index(variables_within_term,3,terms,library(terms)).
|
||||
index(new_variables_in_term,3,terms,library(terms)).
|
||||
index(time_out,3,timeout,library(timeout)).
|
||||
index(get_label,3,trees,library(trees)).
|
||||
index(list_to_tree,2,trees,library(trees)).
|
||||
index(map_tree,3,trees,library(trees)).
|
||||
index(put_label,4,trees,library(trees)).
|
||||
index(tree_size,2,trees,library(trees)).
|
||||
index(tree_to_list,2,trees,library(trees)).
|
52
packages/python/swig/yap4py/prolog/apply.yap
Normal file
52
packages/python/swig/yap4py/prolog/apply.yap
Normal file
@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @file apply.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Mon Nov 16 23:00:08 2015
|
||||
*
|
||||
* @brief Stub for maplist and friends
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(apply_stub,[]).
|
||||
|
||||
/**
|
||||
* @file apply.yap
|
||||
* @defgroup apply_stub Apply Predicates
|
||||
*
|
||||
* @ingroup library
|
||||
*
|
||||
* @{
|
||||
|
||||
This library provides a SWI-compatible set of utilities for applying a
|
||||
predicate to all elements of a list.
|
||||
|
||||
The apply library is a _stub_, it just forwards definitions to the
|
||||
@ref maplist library. The predicates forwarded are:
|
||||
|
||||
- maplist/2,
|
||||
- maplist/3,
|
||||
- maplist/4,
|
||||
- maplist/5,
|
||||
- include/3,
|
||||
- exclude/3,
|
||||
- partition/4,
|
||||
- partition/5
|
||||
|
||||
*/
|
||||
|
||||
:- reexport(library(maplist),
|
||||
[maplist/2,
|
||||
maplist/3,
|
||||
maplist/4,
|
||||
maplist/5,
|
||||
include/3,
|
||||
exclude/3,
|
||||
partition/4,
|
||||
partition/5
|
||||
]).
|
||||
|
||||
|
||||
%% @}
|
||||
|
38
packages/python/swig/yap4py/prolog/apply_macros.yap
Normal file
38
packages/python/swig/yap4py/prolog/apply_macros.yap
Normal file
@ -0,0 +1,38 @@
|
||||
|
||||
%% @file apply_macros.yap
|
||||
%% @author E. Alphonse from code by Joachim Schimpf
|
||||
%% @date 15 June 2002
|
||||
%% @nrief Purpose: Macros to apply a predicate to all elements
|
||||
% of a list or to all sub-terms of a term.
|
||||
|
||||
:- module(apply_macros, []).
|
||||
|
||||
/**
|
||||
|
||||
@defgroup apply_macros Apply Interface to maplist
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This library provides a SWI-compatible set of utilities for applying a
|
||||
predicate to all elements of a list.
|
||||
|
||||
The apply library just forwards
|
||||
definitions to the @ref maplist library, these include:
|
||||
|
||||
- maplist/2,
|
||||
- maplist/3,
|
||||
- maplist/4,
|
||||
- maplist/5,
|
||||
- include/3,
|
||||
- exclude/3,
|
||||
- partition/4,
|
||||
- partition/5
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- reexport(maplist).
|
||||
|
||||
:- reexport(mapargs).
|
||||
|
||||
%% @}
|
167
packages/python/swig/yap4py/prolog/arg.yap
Normal file
167
packages/python/swig/yap4py/prolog/arg.yap
Normal file
@ -0,0 +1,167 @@
|
||||
/**
|
||||
* @file arg.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:08:55 2015
|
||||
*
|
||||
* @brief
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
|
||||
@defgroup args Term Argument Manipulation.
|
||||
|
||||
@ingroup @library
|
||||
|
||||
@{
|
||||
|
||||
Extends arg/3 by including backtracking through arguments and access
|
||||
to sub-arguments,
|
||||
|
||||
- arg0/3
|
||||
- args/3
|
||||
- args0/3
|
||||
- genarg/3
|
||||
- genarg0/3
|
||||
- path_arg/3
|
||||
|
||||
|
||||
It is based on the Quintus Prolog arg library. Except for project, all
|
||||
predicates use the arg/3 argument pattern.
|
||||
This file has been included in the YAP library by Vitor Santos Costa, 2008. No error checking is actuallly performed within the package: this left to the C-code thaat implements arg``/3 and
|
||||
genarg/3.
|
||||
*/
|
||||
|
||||
:- module(arg,
|
||||
[
|
||||
genarg/3,
|
||||
arg0/3,
|
||||
genarg0/3,
|
||||
args/3,
|
||||
args0/3,
|
||||
% project/3
|
||||
path_arg/3
|
||||
]).
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @pred arg0( +_Index_, +_Term_ , -_Arg_ )
|
||||
*
|
||||
* Similar to arg/3, but `arg0(0,_T_,_F_)` unifies _F_ with _T_'s principal functor:
|
||||
|
||||
~~~~~~~~~
|
||||
?- arg0(0, f(a,b), A).
|
||||
A = f.
|
||||
?- arg0(1, f(a,b), A).
|
||||
A = a.
|
||||
?- arg0(2, f(a,b), A).
|
||||
A = b.
|
||||
~~~~~~~~~
|
||||
|
||||
*/
|
||||
arg0(0,T,A) :- !,
|
||||
functor(T,A,_).
|
||||
arg0(I,T,A) :-
|
||||
arg(I,T,A).
|
||||
|
||||
/**
|
||||
* @pred genarg0( +_Index_, +_Term_ , -_Arg_ )
|
||||
*
|
||||
* Similar to genarg/3, but `genarg0(0,_T_,_F_)` unifies _F_ with _T_'s principal functor:
|
||||
~~~~~~~~~
|
||||
?- genarg0(I,f(a,b),A).
|
||||
A = f,
|
||||
I = 0 ? ;
|
||||
A = a,
|
||||
I = 1 ? ;
|
||||
A = b,
|
||||
I = 2.
|
||||
~~~~~~~~~
|
||||
|
||||
*/
|
||||
genarg0(I,T,A) :-
|
||||
nonvar(I), !,
|
||||
arg0(I,T,A).
|
||||
genarg0(0,T,A) :-
|
||||
functor(T,A,_).
|
||||
genarg0(I,T,A) :-
|
||||
genarg(I,T,A).
|
||||
|
||||
/**
|
||||
* @pred args( +_Index_, +_ListOfTerms_ , -_ListOfArgs_ )
|
||||
*
|
||||
* Succeeds if _ListOfArgs_ unifies with the application of genarg/3 to every element of _ListOfTerms_.
|
||||
|
||||
It corresponds to calling maplist/3 on genarg/3:
|
||||
~~~~~~~~~
|
||||
args( I, Ts, As) :-
|
||||
maplist( genarg(I), Ts, As).
|
||||
~~~~~~~~~
|
||||
|
||||
Notice that unification allows _ListOfArgs_ to be bound, eg:
|
||||
|
||||
~~~~~~~~~
|
||||
?- args(1, [X1+Y1,X2-Y2,X3*Y3,X4/Y4], [1,1,1,1]).
|
||||
X1 = X2 = X3 = X4 = 1.
|
||||
~~~~~~~~~
|
||||
|
||||
|
||||
*/
|
||||
args(_,[],[]).
|
||||
args(I,[T|List],[A|ArgList]) :-
|
||||
genarg(I, T, A),
|
||||
args(I, List, ArgList).
|
||||
|
||||
/**
|
||||
* @pred args0( +_Index_, +_ListOfTerms_ , -_ListOfArgs_ )
|
||||
*
|
||||
* Succeeds if _ListOfArgs_ unifies with the application of genarg0/3 to every element of _ListOfTerms_.
|
||||
|
||||
It corresponds to calling maplist/3 on genarg0/3:
|
||||
~~~~~~~~~
|
||||
args( I, Ts, As) :-
|
||||
maplist( genarg0(I), Ts, As).
|
||||
~~~~~~~~~
|
||||
|
||||
Notice that unification allows _ListOfArgs_ to be bound, eg:
|
||||
|
||||
~~~~~~~~~
|
||||
?- args(1, [X1+Y1,X2-Y2,X3*Y3,X4/Y4], [1,1,1,1]).
|
||||
X1 = X2 = X3 = X4 = 1.
|
||||
~~~~~~~~~
|
||||
|
||||
|
||||
*/
|
||||
args0(_,[],[]).
|
||||
args0(I,[T|List],[A|ArgList]) :-
|
||||
genarg(I, T, A),
|
||||
args0(I, List, ArgList).
|
||||
|
||||
/**
|
||||
* @pred args0( +_ListOfTerms_ , +_Index_, -_ListOfArgs_ )
|
||||
*
|
||||
* Succeeds if _ListOfArgs_ unifies with the application of genarg0/3 to every element of _ListOfTerms_.
|
||||
|
||||
It corresponds to calling args0/3 but with a different order.
|
||||
*/
|
||||
project(Terms, Index, Args) :-
|
||||
args0(Index, Terms, Args).
|
||||
|
||||
% no error checking here!
|
||||
/**
|
||||
* @pred path_arg( +_Path_ , +_Term_, -_Arg_ )
|
||||
*
|
||||
* Succeeds if _Path_ is empty and _Arg unifies with _Term_, or if _Path_ is a list with _Head_ and _Tail_, genarg/3 succeeds on the current term, and path_arg/3 succeeds on its argument.
|
||||
*
|
||||
* Notice that it can be used to enumerate all possible paths in a term.
|
||||
*/
|
||||
path_arg([], Term, Term).
|
||||
path_arg([Index|Indices], Term, SubTerm) :-
|
||||
genarg(Index, Term, Arg),
|
||||
path_arg(Indices, Arg, SubTerm).
|
||||
|
||||
%%% @}
|
||||
|
||||
/** @} */
|
296
packages/python/swig/yap4py/prolog/assoc.yap
Normal file
296
packages/python/swig/yap4py/prolog/assoc.yap
Normal file
@ -0,0 +1,296 @@
|
||||
|
||||
/**
|
||||
* @file assoc.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 13:53:34 2015
|
||||
*
|
||||
* @brief Red-Black Implementation of Association Lists.
|
||||
*
|
||||
* This file has been included as an YAP library by Vitor Santos Costa, 1999
|
||||
*
|
||||
* Note: the keys should be bound, the associated values need not be.
|
||||
*/
|
||||
|
||||
:- module(assoc, [
|
||||
empty_assoc/1,
|
||||
assoc_to_list/2,
|
||||
is_assoc/1,
|
||||
min_assoc/3,
|
||||
max_assoc/3,
|
||||
gen_assoc/3,
|
||||
get_assoc/3,
|
||||
get_assoc/5,
|
||||
get_next_assoc/4,
|
||||
get_prev_assoc/4,
|
||||
list_to_assoc/2,
|
||||
ord_list_to_assoc/2,
|
||||
map_assoc/2,
|
||||
map_assoc/3,
|
||||
put_assoc/4,
|
||||
del_assoc/4,
|
||||
assoc_to_keys/2,
|
||||
del_min_assoc/4,
|
||||
del_max_assoc/4
|
||||
]).
|
||||
|
||||
/** @defgroup Association_Lists Association Lists
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following association list manipulation predicates are available
|
||||
once included with the `use_module(library(assoc))` command. The
|
||||
original library used Richard O'Keefe's implementation, on top of
|
||||
unbalanced binary trees. The current code utilises code from the
|
||||
red-black trees library and emulates the SICStus Prolog interface.
|
||||
|
||||
The library exports the following definitions:
|
||||
|
||||
- is/assoc/1
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
:- meta_predicate map_assoc(2, +, -), map_assoc(1, +).
|
||||
|
||||
:- use_module(library(rbtrees), [
|
||||
rb_empty/1,
|
||||
rb_visit/2,
|
||||
is_rbtree/1,
|
||||
rb_min/3,
|
||||
rb_max/3,
|
||||
rb_in/3,
|
||||
rb_lookup/3,
|
||||
rb_update/5,
|
||||
rb_next/4,
|
||||
rb_previous/4,
|
||||
list_to_rbtree/2,
|
||||
ord_list_to_rbtree/2,
|
||||
rb_map/2,
|
||||
rb_map/3,
|
||||
rb_keys/2,
|
||||
rb_update/4,
|
||||
rb_insert/4,
|
||||
rb_delete/4,
|
||||
rb_del_min/4,
|
||||
rb_del_max/4
|
||||
]).
|
||||
|
||||
/** @pred empty_assoc(+ _Assoc_)
|
||||
|
||||
Succeeds if association list _Assoc_ is empty.
|
||||
|
||||
*/
|
||||
empty_assoc(t).
|
||||
|
||||
/** @pred assoc_to_list(+ _Assoc_,? _List_)
|
||||
|
||||
|
||||
Given an association list _Assoc_ unify _List_ with a list of
|
||||
the form _Key-Val_, where the elements _Key_ are in ascending
|
||||
order.
|
||||
|
||||
|
||||
*/
|
||||
assoc_to_list(t, L) :- !, L = [].
|
||||
assoc_to_list(T, L) :-
|
||||
rb_visit(T, L).
|
||||
|
||||
/** @pred is_assoc(+ _Assoc_)
|
||||
|
||||
Succeeds if _Assoc_ is an association list, that is, if it is a
|
||||
red-black tree.
|
||||
*/
|
||||
is_assoc(t) :- !.
|
||||
is_assoc(T) :-
|
||||
is_rbtree(T).
|
||||
|
||||
/** @pred min_assoc(+ _Assoc_,- _Key_,? _Value_)
|
||||
|
||||
|
||||
Given the association list
|
||||
_Assoc_, _Key_ in the smallest key in the list, and _Value_
|
||||
the associated value.
|
||||
|
||||
|
||||
*/
|
||||
min_assoc(T,K,V) :-
|
||||
rb_min(T,K,V).
|
||||
|
||||
/** @pred max_assoc(+ _Assoc_,- _Key_,? _Value_)
|
||||
|
||||
|
||||
Given the association list
|
||||
_Assoc_, _Key_ in the largest key in the list, and _Value_
|
||||
the associated value.
|
||||
|
||||
|
||||
*/
|
||||
max_assoc(T,K,V) :-
|
||||
rb_max(T,K,V).
|
||||
|
||||
/** @pred gen_assoc( ?Key, +Assoc, ?Valu_)
|
||||
|
||||
|
||||
Given the association list _Assoc_, unify _Key_ and _Value_
|
||||
with a key-value pair in the list. It can be used to enumerate all elements
|
||||
in the association list.
|
||||
*/
|
||||
gen_assoc(K, T, V) :-
|
||||
rb_in(K,V,T).
|
||||
|
||||
/** @pred get_assoc(+ _Key_,+ _Assoc_,? _Value_)
|
||||
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the associated value.
|
||||
*/
|
||||
get_assoc(K,T,V) :-
|
||||
rb_lookup(K,V,T).
|
||||
|
||||
/** @pred get_assoc(+ _Key_,+ _Assoc_,? _Value_,+ _NAssoc_,? _NValue_)
|
||||
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the associated value _Value_ and a new association list
|
||||
_NAssoc_ where _Key_ is associated with _NValue_.
|
||||
|
||||
|
||||
*/
|
||||
get_assoc(K,T,V,NT,NV) :-
|
||||
rb_update(T,K,V,NV,NT).
|
||||
|
||||
/** @pred get_next_assoc(+ _Key_,+ _Assoc_,? _Next_,? _Value_)
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the next key, _Next_, and its value, _Value_.
|
||||
|
||||
|
||||
*/
|
||||
get_next_assoc(K,T,KN,VN) :-
|
||||
rb_next(T,K,KN,VN).
|
||||
|
||||
/** @pred get_prev_assoc(+ _Key_,+ _Assoc_,? _Next_,? _Value_)
|
||||
|
||||
|
||||
If _Key_ is one of the elements in the association list _Assoc_,
|
||||
return the previous key, _Next_, and its value, _Value_.
|
||||
|
||||
|
||||
*/
|
||||
get_prev_assoc(K,T,KP,VP) :-
|
||||
rb_previous(T,K,KP,VP).
|
||||
|
||||
/** @pred list_to_assoc(+ _List_,? _Assoc_)
|
||||
|
||||
|
||||
Given a list _List_ such that each element of _List_ is of the
|
||||
form _Key-Val_, and all the _Keys_ are unique, _Assoc_ is
|
||||
the corresponding association list.
|
||||
|
||||
|
||||
*/
|
||||
list_to_assoc(L, T) :-
|
||||
list_to_rbtree(L, T).
|
||||
|
||||
/** @pred ord_list_to_assoc(+ _List_,? _Assoc_)
|
||||
|
||||
|
||||
Given an ordered list _List_ such that each element of _List_ is
|
||||
of the form _Key-Val_, and all the _Keys_ are unique, _Assoc_ is
|
||||
the corresponding association list.
|
||||
|
||||
*/
|
||||
ord_list_to_assoc(L, T) :-
|
||||
ord_list_to_rbtree(L, T).
|
||||
|
||||
/** @pred map_assoc(+ _Pred_,+ _Assoc_)
|
||||
|
||||
|
||||
Succeeds if the unary predicate name _Pred_( _Val_) holds for every
|
||||
element in the association list.
|
||||
|
||||
|
||||
*/
|
||||
map_assoc(t, _) :- !.
|
||||
map_assoc(P, T) :-
|
||||
yap_flag(typein_module, M0),
|
||||
extract_mod(P, M0, M, G),
|
||||
functor(G, Name, 1),
|
||||
rb_map(T, M:Name).
|
||||
|
||||
/** @pred map_assoc(+ _Pred_,+ _Assoc_,? _New_)
|
||||
|
||||
Given the binary predicate name _Pred_ and the association list
|
||||
_Assoc_, _New_ in an association list with keys in _Assoc_,
|
||||
and such that if _Key-Val_ is in _Assoc_, and _Key-Ans_ is in
|
||||
_New_, then _Pred_( _Val_, _Ans_) holds.*/
|
||||
map_assoc(t, T, T) :- !.
|
||||
map_assoc(P, T, NT) :-
|
||||
yap_flag(typein_module, M0),
|
||||
extract_mod(P, M0, M, G),
|
||||
functor(G, Name, 2),
|
||||
rb_map(T, M:Name, NT).
|
||||
|
||||
|
||||
extract_mod(G,_,_) :- var(G), !, fail.
|
||||
extract_mod(M:G, _, FM, FG ) :- !,
|
||||
extract_mod(G, M, FM, FG ).
|
||||
extract_mod(G, M, M, G ).
|
||||
|
||||
/** @pred put_assoc(+ _Key_,+ _Assoc_,+ _Val_,+ _New_)
|
||||
|
||||
The association list _New_ includes and element of association
|
||||
_key_ with _Val_, and all elements of _Assoc_ that did not
|
||||
have key _Key_.
|
||||
|
||||
*/
|
||||
put_assoc(K, T, V, NT) :-
|
||||
rb_update(T, K, V, NT), !.
|
||||
put_assoc(K, t, V, NT) :- !,
|
||||
rbtrees:rb_new(K,V,NT).
|
||||
put_assoc(K, T, V, NT) :-
|
||||
rb_insert(T, K, V, NT).
|
||||
|
||||
/** @pred del_assoc(+ _Key_, + _Assoc_, ? _Val_, ? _NewAssoc_)
|
||||
|
||||
|
||||
Succeeds if _NewAssoc_ is an association list, obtained by removing
|
||||
the element with _Key_ and _Val_ from the list _Assoc_.
|
||||
|
||||
|
||||
*/
|
||||
del_assoc(K, T, V, NT) :-
|
||||
rb_delete(T, K, V, NT).
|
||||
|
||||
/** @pred del_min_assoc(+ _Assoc_, ? _Key_, ? _Val_, ? _NewAssoc_)
|
||||
|
||||
|
||||
Succeeds if _NewAssoc_ is an association list, obtained by removing
|
||||
the smallest element of the list, with _Key_ and _Val_
|
||||
from the list _Assoc_.
|
||||
|
||||
*/
|
||||
del_min_assoc(T, K, V, NT) :-
|
||||
rb_del_min(T, K, V, NT).
|
||||
|
||||
/** @pred del_max_assoc(+ _Assoc_, ? _Key_, ? _Val_, ? _NewAssoc_)
|
||||
|
||||
|
||||
Succeeds if _NewAssoc_ is an association list, obtained by removing
|
||||
the largest element of the list, with _Key_ and _Val_ from the
|
||||
list _Assoc_.
|
||||
|
||||
*/
|
||||
del_max_assoc(T, K, V, NT) :-
|
||||
rb_del_max(T, K, V, NT).
|
||||
|
||||
|
||||
assoc_to_keys(T, Ks) :-
|
||||
rb_keys(T, Ks).
|
||||
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
280
packages/python/swig/yap4py/prolog/atts.yap
Normal file
280
packages/python/swig/yap4py/prolog/atts.yap
Normal file
@ -0,0 +1,280 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: atts.yap *
|
||||
* Last rev: 8/2/88 *
|
||||
* mods: *
|
||||
* comments: attribute support for Prolog *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
:- module(attributes, [op(1150, fx, attribute)]).
|
||||
|
||||
/**
|
||||
*
|
||||
* @defgroup sicsatts SICStus style attribute declarations
|
||||
*
|
||||
* @ingroup attributes
|
||||
*
|
||||
* @{
|
||||
*
|
||||
|
||||
SICStus style attribute declarations are activated through loading the
|
||||
library <tt>atts</tt>. The command
|
||||
|
||||
~~~~~
|
||||
| ?- use_module(library(atts)).
|
||||
~~~~~
|
||||
enables this form of attributed variables.
|
||||
|
||||
The directive
|
||||
|
||||
- attribute/1
|
||||
|
||||
and the following user defined predicates can be used:
|
||||
|
||||
- Module:get_atts/2
|
||||
|
||||
- Module:put_atts/2
|
||||
|
||||
- Module:put_atts/3
|
||||
|
||||
- Module:woken_att_do/4
|
||||
|
||||
*/
|
||||
|
||||
|
||||
:- use_module(library(lists), [member/2]).
|
||||
|
||||
:- multifile
|
||||
user:goal_expansion/3.
|
||||
:- multifile
|
||||
user:term_expansion/2.
|
||||
:- multifile
|
||||
attributed_module/3.
|
||||
|
||||
:- dynamic existing_attribute/4.
|
||||
:- dynamic modules_with_attributes/1.
|
||||
:- dynamic attributed_module/3.
|
||||
|
||||
modules_with_attributes([]).
|
||||
|
||||
%
|
||||
% defining a new attribute is just a question of establishing a
|
||||
% Functor, Mod -> INT mappings
|
||||
%
|
||||
new_attribute(V) :- var(V), !,
|
||||
throw(error(instantiation_error,attribute(V))).
|
||||
new_attribute((At1,At2)) :-
|
||||
new_attribute(At1),
|
||||
new_attribute(At2).
|
||||
new_attribute(Na/Ar) :-
|
||||
source_module(Mod),
|
||||
functor(S,Na,Ar),
|
||||
existing_attribute(S,Mod,_,_) , !.
|
||||
new_attribute(Na/Ar) :-
|
||||
source_module(Mod),
|
||||
functor(S,Na,Ar),
|
||||
store_new_module(Mod,Ar,Position),
|
||||
assertz(existing_attribute(S,Mod,Ar,Position)).
|
||||
|
||||
store_new_module(Mod,Ar,ArgPosition) :-
|
||||
(
|
||||
retract(attributed_module(Mod,Position,_))
|
||||
->
|
||||
true
|
||||
;
|
||||
retract(modules_with_attributes(Mods)),
|
||||
assert(modules_with_attributes([Mod|Mods])), Position = 2
|
||||
),
|
||||
ArgPosition is Position+1,
|
||||
( Ar == 0 -> NOfAtts is Position+1 ; NOfAtts is Position+Ar),
|
||||
functor(AccessTerm,Mod,NOfAtts),
|
||||
assertz(attributed_module(Mod,NOfAtts,AccessTerm)).
|
||||
|
||||
:- user_defined_directive(attribute(G), attributes:new_attribute(G)).
|
||||
|
||||
/** @pred Module:get_atts( _-Var_, _?ListOfAttributes_)
|
||||
|
||||
|
||||
Unify the list _?ListOfAttributes_ with the attributes for the unbound
|
||||
variable _Var_. Each member of the list must be a bound term of the
|
||||
form `+( _Attribute_)`, `-( _Attribute_)` (the <tt>kbd</tt>
|
||||
prefix may be dropped). The meaning of <tt>+</tt> and <tt>-</tt> is:
|
||||
+ +( _Attribute_)
|
||||
Unifies _Attribute_ with a corresponding attribute associated with
|
||||
_Var_, fails otherwise.
|
||||
|
||||
+ -( _Attribute_)
|
||||
Succeeds if a corresponding attribute is not associated with
|
||||
_Var_. The arguments of _Attribute_ are ignored.
|
||||
|
||||
|
||||
*/
|
||||
user:goal_expansion(get_atts(Var,AccessSpec), Mod, Goal) :-
|
||||
expand_get_attributes(AccessSpec,Mod,Var,Goal).
|
||||
|
||||
/** @pred Module:put_atts( _-Var_, _?ListOfAttributes_)
|
||||
|
||||
|
||||
Associate with or remove attributes from a variable _Var_. The
|
||||
attributes are given in _?ListOfAttributes_, and the action depends
|
||||
on how they are prefixed:
|
||||
|
||||
+ +( _Attribute_ )
|
||||
Associate _Var_ with _Attribute_. A previous value for the
|
||||
attribute is simply replace (like with `set_mutable/2`).
|
||||
|
||||
+ -( _Attribute_ )
|
||||
Remove the attribute with the same name. If no such attribute existed,
|
||||
simply succeed.
|
||||
|
||||
*/
|
||||
user:goal_expansion(put_atts(Var,AccessSpec), Mod, Goal) :-
|
||||
expand_put_attributes(AccessSpec, Mod, Var, Goal).
|
||||
|
||||
|
||||
expand_get_attributes(V,_,_,_) :- var(V), !, fail.
|
||||
expand_get_attributes([],_,_,true) :- !.
|
||||
expand_get_attributes([-G1],Mod,V,attributes:free_att(V,Mod,Pos)) :-
|
||||
existing_attribute(G1,Mod,_,Pos), !.
|
||||
expand_get_attributes([+G1],Mod,V,attributes:get_att(V,Mod,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
arg(1,G1,A).
|
||||
expand_get_attributes([G1],Mod,V,attributes:get_att(V,Mod,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
arg(1,G1,A).
|
||||
expand_get_attributes(Atts,Mod,Var,attributes:get_module_atts(Var,AccessTerm)) :- Atts = [_|_], !,
|
||||
attributed_module(Mod,NOfAtts,AccessTerm),
|
||||
void_term(Void),
|
||||
cvt_atts(Atts,Mod,Void,LAtts),
|
||||
sort(LAtts,SortedLAtts),
|
||||
free_term(Free),
|
||||
build_att_term(1,NOfAtts,SortedLAtts,Free,AccessTerm).
|
||||
expand_get_attributes(Att,Mod,Var,Goal) :-
|
||||
expand_get_attributes([Att],Mod,Var,Goal).
|
||||
|
||||
build_att_term(NOfAtts,NOfAtts,[],_,_) :- !.
|
||||
build_att_term(I0,NOfAtts,[I-Info|SortedLAtts],Void,AccessTerm) :-
|
||||
I is I0+1, !,
|
||||
copy_att_args(Info,I0,NI,AccessTerm),
|
||||
build_att_term(NI,NOfAtts,SortedLAtts,Void,AccessTerm).
|
||||
build_att_term(I0,NOfAtts,SortedLAtts,Void,AccessTerm) :-
|
||||
I is I0+1,
|
||||
arg(I,AccessTerm,Void),
|
||||
build_att_term(I,NOfAtts,SortedLAtts,Void,AccessTerm).
|
||||
|
||||
cvt_atts(V,_,_,_) :- var(V), !, fail.
|
||||
cvt_atts([],_,_,[]).
|
||||
cvt_atts([V|_],_,_,_) :- var(V), !, fail.
|
||||
cvt_atts([+Att|Atts],Mod,Void,[Pos-LAtts|Read]) :- !,
|
||||
existing_attribute(Att,Mod,_,Pos),
|
||||
(atom(Att) -> LAtts = [_] ; Att=..[_|LAtts]),
|
||||
cvt_atts(Atts,Mod,Void,Read).
|
||||
cvt_atts([-Att|Atts],Mod,Void,[Pos-LVoids|Read]) :- !,
|
||||
existing_attribute(Att,Mod,_,Pos),
|
||||
(
|
||||
atom(Att)
|
||||
->
|
||||
LVoids = [Void]
|
||||
;
|
||||
Att =..[_|LAtts],
|
||||
void_vars(LAtts,Void,LVoids)
|
||||
),
|
||||
cvt_atts(Atts,Mod,Void,Read).
|
||||
cvt_atts([Att|Atts],Mod,Void,[Pos-LAtts|Read]) :- !,
|
||||
existing_attribute(Att,Mod,_,Pos),
|
||||
(atom(Att) -> LAtts = [_] ; Att=..[_|LAtts]),
|
||||
cvt_atts(Atts,Mod,Void,Read).
|
||||
|
||||
copy_att_args([],I,I,_).
|
||||
copy_att_args([V|Info],I,NI,AccessTerm) :-
|
||||
I1 is I+1,
|
||||
arg(I1,AccessTerm,V),
|
||||
copy_att_args(Info,I1,NI,AccessTerm).
|
||||
|
||||
void_vars([],_,[]).
|
||||
void_vars([_|LAtts],Void,[Void|LVoids]) :-
|
||||
void_vars(LAtts,Void,LVoids).
|
||||
|
||||
expand_put_attributes(V,_,_,_) :- var(V), !, fail.
|
||||
expand_put_attributes([-G1],Mod,V,attributes:rm_att(V,Mod,NOfAtts,Pos)) :-
|
||||
existing_attribute(G1,Mod,_,Pos), !,
|
||||
attributed_module(Mod,NOfAtts,_).
|
||||
expand_put_attributes([+G1],Mod,V,attributes:put_att(V,Mod,NOfAtts,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
attributed_module(Mod,NOfAtts,_),
|
||||
arg(1,G1,A).
|
||||
expand_put_attributes([G1],Mod,V,attributes:put_att(V,Mod,NOfAtts,Pos,A)) :-
|
||||
existing_attribute(G1,Mod,1,Pos), !,
|
||||
attributed_module(Mod,NOfAtts,_),
|
||||
arg(1,G1,A).
|
||||
expand_put_attributes(Atts,Mod,Var,attributes:put_module_atts(Var,AccessTerm)) :- Atts = [_|_], !,
|
||||
attributed_module(Mod,NOfAtts,AccessTerm),
|
||||
void_term(Void),
|
||||
cvt_atts(Atts,Mod,Void,LAtts),
|
||||
sort(LAtts,SortedLAtts),
|
||||
free_term(Free),
|
||||
build_att_term(1,NOfAtts,SortedLAtts,Free,AccessTerm).
|
||||
expand_put_attributes(Att,Mod,Var,Goal) :-
|
||||
expand_put_attributes([Att],Mod,Var,Goal).
|
||||
|
||||
woken_att_do(AttVar, Binding, NGoals, DoNotBind) :-
|
||||
modules_with_attributes(AttVar,Mods0),
|
||||
modules_with_attributes(Mods),
|
||||
find_used(Mods,Mods0,[],ModsI),
|
||||
do_verify_attributes(ModsI, AttVar, Binding, Goals),
|
||||
process_goals(Goals, NGoals, DoNotBind).
|
||||
|
||||
% dirty trick to be able to unbind a variable that has been constrained.
|
||||
process_goals([], [], _).
|
||||
process_goals((M:do_not_bind_variable(Gs)).Goals, (M:Gs).NGoals, true) :- !,
|
||||
process_goals(Goals, NGoals, _).
|
||||
process_goals(G.Goals, G.NGoals, Do) :-
|
||||
process_goals(Goals, NGoals, Do).
|
||||
|
||||
find_used([],_,L,L).
|
||||
find_used([M|Mods],Mods0,L0,Lf) :-
|
||||
member(M,Mods0), !,
|
||||
find_used(Mods,Mods0,[M|L0],Lf).
|
||||
find_used([_|Mods],Mods0,L0,Lf) :-
|
||||
find_used(Mods,Mods0,L0,Lf).
|
||||
|
||||
/** @pred Module:verify_attributes( _-Var_, _+Value_, _-Goals_)
|
||||
|
||||
The predicate is called when trying to unify the attributed variable
|
||||
_Var_ with the Prolog term _Value_. Note that _Value_ may be
|
||||
itself an attributed variable, or may contain attributed variables. The
|
||||
goal <tt>verify_attributes/3</tt> is actually called before _Var_ is
|
||||
unified with _Value_.
|
||||
|
||||
It is up to the user to define which actions may be performed by
|
||||
<tt>verify_attributes/3</tt> but the procedure is expected to return in
|
||||
_Goals_ a list of goals to be called <em>after</em> _Var_ is
|
||||
unified with _Value_. If <tt>verify_attributes/3</tt> fails, the
|
||||
unification will fail.
|
||||
|
||||
Notice that the <tt>verify_attributes/3</tt> may be called even if _Var_<
|
||||
has no attributes in module <tt>Module</tt>. In this case the routine should
|
||||
simply succeed with _Goals_ unified with the empty list.
|
||||
|
||||
|
||||
*/
|
||||
do_verify_attributes([], _, _, []).
|
||||
do_verify_attributes([Mod|Mods], AttVar, Binding, [Mod:Goal|Goals]) :-
|
||||
current_predicate(verify_attributes,Mod:verify_attributes(_,_,_)), !,
|
||||
Mod:verify_attributes(AttVar, Binding, Goal),
|
||||
do_verify_attributes(Mods, AttVar, Binding, Goals).
|
||||
do_verify_attributes([_|Mods], AttVar, Binding, Goals) :-
|
||||
do_verify_attributes(Mods, AttVar, Binding, Goals).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
127
packages/python/swig/yap4py/prolog/autoloader.yap
Normal file
127
packages/python/swig/yap4py/prolog/autoloader.yap
Normal file
@ -0,0 +1,127 @@
|
||||
|
||||
:- module(autoloader,[make_library_index/0]).
|
||||
|
||||
:- use_module(library(lists),[append/3]).
|
||||
|
||||
:- dynamic exported/3, loaded/1.
|
||||
|
||||
make_library_index :-
|
||||
scan_library_exports,
|
||||
scan_swi_exports.
|
||||
|
||||
scan_library_exports :-
|
||||
% init table file.
|
||||
open('INDEX.pl', write, W),
|
||||
close(W),
|
||||
scan_exports('../GPL/aggregate', library(aggregate)),
|
||||
scan_exports(apply, library(apply)),
|
||||
scan_exports(arg, library(arg)),
|
||||
scan_exports(assoc, library(assoc)),
|
||||
scan_exports(avl, library(avl)),
|
||||
scan_exports(bhash, library(bhash)),
|
||||
scan_exports(charsio, library(charsio)),
|
||||
scan_exports('../packages/chr/chr_swi', library(chr)),
|
||||
scan_exports(clp/clpfd, library(clpfd)),
|
||||
scan_exports('../packages/clpqr/clpr', library(clpr)),
|
||||
scan_exports(gensym, library(gensym)),
|
||||
scan_exports(heaps, library(heaps)),
|
||||
scan_exports('../packages/jpl/jpl', library(jpl)),
|
||||
scan_exports(lists, library(lists)),
|
||||
scan_exports(nb, library(nb)),
|
||||
scan_exports(occurs, library(occurs)),
|
||||
scan_exports('../LGPL/option', library(option)),
|
||||
scan_exports(ordsets, library(ordsets)),
|
||||
scan_exports(pairs, library(pairs)),
|
||||
scan_exports('../LGPL/prolog_xref', library(prolog_xref)),
|
||||
scan_exports('../packages/plunit/plunit', library(plunit)),
|
||||
scan_exports(queues, library(queues)),
|
||||
scan_exports(random, library(random)),
|
||||
scan_exports(rbtrees, library(rbtrees)),
|
||||
scan_exports('../LGPL/readutil', library(readutil)),
|
||||
scan_exports(regexp, library(regexp)),
|
||||
scan_exports('../LGPL/shlib', library(shlib)),
|
||||
scan_exports(system, library(system)),
|
||||
scan_exports(terms, library(terms)),
|
||||
scan_exports(timeout, library(timeout)),
|
||||
scan_exports(trees, library(trees)).
|
||||
|
||||
scan_exports(Library, CallName) :-
|
||||
absolute_file_name(Library, Path,
|
||||
[ file_type(prolog),
|
||||
access(read),
|
||||
file_errors(fail)
|
||||
]),
|
||||
open(Path, read, O),
|
||||
!,
|
||||
get_exports(O, Exports, Module),
|
||||
close(O),
|
||||
open('INDEX.pl', append, W),
|
||||
publish_exports(Exports, W, CallName, Module),
|
||||
close(W).
|
||||
scan_exports(Library) :-
|
||||
format(user_error,'[ warning: library ~w not defined ]~n',[Library]).
|
||||
|
||||
%
|
||||
% SWI is the only language that uses autoload.
|
||||
%
|
||||
scan_swi_exports :-
|
||||
retractall(exported(_,_,_)),
|
||||
absolute_file_name(dialect/swi, Path,
|
||||
[ file_type(prolog),
|
||||
access(read),
|
||||
file_errors(fail)
|
||||
]),
|
||||
open(Path, read, O),
|
||||
get_exports(O, Exports, Module),
|
||||
get_reexports(O, Reexports, Exports),
|
||||
close(O),
|
||||
open('dialect/swi/INDEX.pl', write, W),
|
||||
publish_exports(Reexports, W, library(dialect/swi), Module),
|
||||
close(W).
|
||||
|
||||
get_exports(O, Exports, Module) :-
|
||||
read(O, (:- module(Module,Exports))), !.
|
||||
get_exports(O, Exports, Module) :-
|
||||
get_exports(O, Exports, Module).
|
||||
|
||||
get_reexports(O, Exports, ExportsL) :-
|
||||
read(O, (:- reexport(_File,ExportsI))), !,
|
||||
get_reexports(O, Exports0, ExportsL),
|
||||
append(ExportsI, Exports0, Exports).
|
||||
get_reexports(_, Exports, Exports).
|
||||
|
||||
publish_exports([], _, _, _).
|
||||
publish_exports([F/A|Exports], W, Path, Module) :-
|
||||
publish_export(F, A, W, Path, Module),
|
||||
publish_exports(Exports, W, Path, Module).
|
||||
publish_exports([F//A0|Exports], W, Path, Module) :-
|
||||
A is A0+2,
|
||||
publish_export(F, A, W, Path, Module),
|
||||
publish_exports(Exports, W, Path, Module).
|
||||
publish_exports([op(_,_,_)|Exports], W, Path, Module) :-
|
||||
publish_exports(Exports, W, Path, Module).
|
||||
|
||||
publish_export(F, A, _, _, Module) :-
|
||||
exported(F, A, M), M \= Module, !,
|
||||
format(user_error,'[ warning: clash between ~a and ~a over ~a/~d ]~n',[Module,M,F,A]).
|
||||
publish_export(F, A, W, Path, Module) :-
|
||||
assert(exported(F, A, Module)), !,
|
||||
portray_clause(W, index(F, A, Module, Path)).
|
||||
|
||||
find_predicate(G,ExportingModI) :-
|
||||
nonvar(G), !,
|
||||
functor(G, Name, Arity),
|
||||
index(Name,Arity,ExportingModI,File),
|
||||
ensure_file_loaded(File).
|
||||
find_predicate(G,ExportingModI) :-
|
||||
var(G),
|
||||
index(Name,Arity,ExportingModI,File),
|
||||
functor(G, Name, Arity),
|
||||
ensure_file_loaded(File).
|
||||
|
||||
ensure_file_loaded(File) :-
|
||||
loaded(File), !.
|
||||
ensure_file_loaded(File) :-
|
||||
load_files(autoloader:File,[silent(true),if(not_loaded)]),
|
||||
assert(loaded(File)).
|
||||
|
152
packages/python/swig/yap4py/prolog/avl.yap
Normal file
152
packages/python/swig/yap4py/prolog/avl.yap
Normal file
@ -0,0 +1,152 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: regexp.yap *
|
||||
* Last rev: 5/15/2000 *
|
||||
* mods: *
|
||||
* comments: AVL trees in YAP (from code by M. van Emden, P. Vasey) *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
/**
|
||||
* @file avl.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 00:59:28 2015
|
||||
*
|
||||
* @brief Support for constructing AVL trees
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
|
||||
:- module(avl, [
|
||||
avl_new/1,
|
||||
avl_insert/4,
|
||||
avl_lookup/3
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup avl AVL Trees
|
||||
* @ingroup library
|
||||
@{
|
||||
Supports constructing AVL trees, available through the directive:
|
||||
|
||||
~~~~~~~
|
||||
:- use_module(library(avl)).
|
||||
~~~~~~~
|
||||
|
||||
It includes the following predicates:
|
||||
|
||||
- avl_insert/4
|
||||
- avl_lookup/3
|
||||
- avl_new/1
|
||||
|
||||
AVL trees are balanced search binary trees. They are named after their
|
||||
inventors, Adelson-Velskii and Landis, and they were the first
|
||||
dynamically balanced trees to be proposed. The YAP AVL tree manipulation
|
||||
predicates library uses code originally written by Martin van Emdem and
|
||||
published in the Logic Programming Newsletter, Autumn 1981. A bug in
|
||||
this code was fixed by Philip Vasey, in the Logic Programming
|
||||
Newsletter, Summer 1982. The library currently only includes routines to
|
||||
insert and lookup elements in the tree. Please try red-black trees if
|
||||
you need deletion.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred avl_new(+ _T_)
|
||||
|
||||
|
||||
Create a new tree.
|
||||
|
||||
|
||||
*/
|
||||
avl_new([]).
|
||||
|
||||
/** @pred avl_insert(+ _Key_,? _Value_,+ _T0_,- _TF_)
|
||||
|
||||
|
||||
Add an element with key _Key_ and _Value_ to the AVL tree
|
||||
_T0_ creating a new AVL tree _TF_. Duplicated elements are
|
||||
allowed.
|
||||
|
||||
|
||||
*/
|
||||
avl_insert(Key, Value, T0, TF) :-
|
||||
insert(T0, Key, Value, TF, _).
|
||||
|
||||
insert([], Key, Value, avl([],Key,Value,-,[]), yes).
|
||||
insert(avl(L,Root,RVal,Bl,R), E, Value, NewTree, WhatHasChanged) :-
|
||||
E @< Root, !,
|
||||
insert(L, E, Value, NewL, LeftHasChanged),
|
||||
adjust(avl(NewL,Root,RVal,Bl,R), LeftHasChanged, left, NewTree, WhatHasChanged).
|
||||
insert(avl(L,Root,RVal,Bl,R), E, Val, NewTree, WhatHasChanged) :-
|
||||
% E @>= Root, currently we allow duplicated values, although
|
||||
% lookup will only fetch the first.
|
||||
insert(R, E, Val,NewR, RightHasChanged),
|
||||
adjust(avl(L,Root,RVal,Bl,NewR), RightHasChanged, right, NewTree, WhatHasChanged).
|
||||
|
||||
adjust(Oldtree, no, _, Oldtree, no).
|
||||
adjust(avl(L,Root,RVal,Bl,R), yes, Lor, NewTree, WhatHasChanged) :-
|
||||
table(Bl, Lor, Bl1, WhatHasChanged, ToBeRebalanced),
|
||||
rebalance(avl(L, Root, RVal, Bl, R), Bl1, ToBeRebalanced, NewTree).
|
||||
|
||||
% balance where balance whole tree to be
|
||||
% before inserted after increased rebalanced
|
||||
table(- , left , < , yes , no ).
|
||||
table(- , right , > , yes , no ).
|
||||
table(< , left , - , no , yes ).
|
||||
table(< , right , - , no , no ).
|
||||
table(> , left , - , no , no ).
|
||||
table(> , right , - , no , yes ).
|
||||
|
||||
rebalance(avl(Lst, Root, RVal, _Bl, Rst), Bl1, no, avl(Lst, Root, RVal, Bl1,Rst)).
|
||||
rebalance(OldTree, _, yes, NewTree) :-
|
||||
avl_geq(OldTree,NewTree).
|
||||
|
||||
avl_geq(avl(Alpha,A,VA,>,avl(Beta,B,VB,>,Gamma)),
|
||||
avl(avl(Alpha,A,VA,-,Beta),B,VB,-,Gamma)).
|
||||
avl_geq(avl(avl(Alpha,A,VA,<,Beta),B,VB,<,Gamma),
|
||||
avl(Alpha,A,VA,-,avl(Beta,B,VB,-,Gamma))).
|
||||
avl_geq(avl(Alpha,A,VA,>,avl(avl(Beta,X,VX,Bl1,Gamma),B,VB,<,Delta)),
|
||||
avl(avl(Alpha,A,VA,Bl2,Beta),X,VX,-,avl(Gamma,B,VB,Bl3,Delta))) :-
|
||||
table2(Bl1,Bl2,Bl3).
|
||||
avl_geq(avl(avl(Alpha,A,VA,>,avl(Beta,X,VX,Bl1,Gamma)),B,VB,<,Delta),
|
||||
avl(avl(Alpha,A,VA,Bl2,Beta),X,VX,-,avl(Gamma,B,VB,Bl3,Delta))) :-
|
||||
table2(Bl1,Bl2,Bl3).
|
||||
|
||||
table2(< ,- ,> ).
|
||||
table2(> ,< ,- ).
|
||||
table2(- ,- ,- ).
|
||||
|
||||
/** @pred avl_lookup(+ _Key_,- _Value_,+ _T_)
|
||||
|
||||
|
||||
Lookup an element with key _Key_ in the AVL tree
|
||||
_T_, returning the value _Value_.
|
||||
|
||||
*/
|
||||
|
||||
avl_lookup(Key, Value, avl(L,Key0,KVal,_,R)) :-
|
||||
compare(Cmp, Key, Key0),
|
||||
avl_lookup(Cmp, Value, L, R, Key, KVal).
|
||||
|
||||
avl_lookup(=, Value, _, _, _, Value).
|
||||
avl_lookup(<, Value, L, _, Key, _) :-
|
||||
avl_lookup(Key, Value, L).
|
||||
avl_lookup(>, Value, _, R, Key, _) :-
|
||||
avl_lookup(Key, Value, R).
|
||||
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
332
packages/python/swig/yap4py/prolog/bhash.yap
Normal file
332
packages/python/swig/yap4py/prolog/bhash.yap
Normal file
@ -0,0 +1,332 @@
|
||||
%% -*- Prolog -*-
|
||||
|
||||
/**
|
||||
* @file bhash.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:11:29 2015
|
||||
*
|
||||
* @brief Backtrackable Hash Tables
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- source.
|
||||
:- yap_flag(unknown,error).
|
||||
:- style_check(all).
|
||||
|
||||
:- module(b_hash, [ b_hash_new/1,
|
||||
b_hash_new/2,
|
||||
b_hash_new/4,
|
||||
b_hash_lookup/3,
|
||||
b_hash_update/3,
|
||||
b_hash_update/4,
|
||||
b_hash_insert_new/4,
|
||||
b_hash_insert/4,
|
||||
b_hash_size/2,
|
||||
b_hash_code/2,
|
||||
is_b_hash/1,
|
||||
b_hash_to_list/2,
|
||||
b_hash_values_to_list/2,
|
||||
b_hash_keys_to_list/2
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup bhash Backtrackable Hash Tables
|
||||
* @ingroup library
|
||||
|
||||
@{
|
||||
|
||||
This library implements hash-arrays.
|
||||
It requires the hash key to be a ground term. The library can
|
||||
be loaded as
|
||||
|
||||
:- use_module( library( bhash ) ).
|
||||
|
||||
This code relies on backtrackable updates. The default hash key is
|
||||
generated by term_hash/4.
|
||||
|
||||
*/
|
||||
|
||||
:- use_module(library(terms), [ term_hash/4 ]).
|
||||
|
||||
|
||||
:- meta_predicate(b_hash_new(-,+,3,2)).
|
||||
|
||||
array_default_size(2048).
|
||||
|
||||
/** @pred is_b_hash( +Hash )
|
||||
|
||||
Term _Hash_ is a hash table.
|
||||
*/
|
||||
is_b_hash(V) :- var(V), !, fail.
|
||||
is_b_hash(hash(_,_,_,_,_)).
|
||||
|
||||
/** @pred b_hash_new( -NewHash )
|
||||
|
||||
Create a empty hash table _NewHash_, with size 2048 entries.
|
||||
*/
|
||||
b_hash_new(hash(Keys, Vals, Size, N, _, _)) :-
|
||||
array_default_size(Size),
|
||||
array(Keys, Size),
|
||||
array(Vals, Size),
|
||||
create_mutable(0, N).
|
||||
|
||||
/** @pred b_hash_new( -_NewHash_, +_Size_ )
|
||||
|
||||
Create a empty hash table, with size _Size_ entries.
|
||||
*/
|
||||
b_hash_new(hash(Keys, Vals, Size, N, _, _), Size) :-
|
||||
array(Keys, Size),
|
||||
array(Vals, Size),
|
||||
create_mutable(0, N).
|
||||
|
||||
/** @pred b_hash_new( -_NewHash_, +_Size_, :_Hash_, :_Cmp_ )
|
||||
|
||||
Create a empty hash table, with size _Size_ entries.
|
||||
_Hash_ defines a partition function, and _Cmp_ defined a comparison function.
|
||||
*/
|
||||
b_hash_new(hash(Keys,Vals, Size, N, HashF, CmpF), Size, HashF, CmpF) :-
|
||||
array(Keys, Size),
|
||||
array(Vals, Size),
|
||||
create_mutable(0, N).
|
||||
|
||||
/**
|
||||
@pred b_hash_size( +_Hash_, -_Size_ )
|
||||
|
||||
_Size_ unifies with the size of the hash table _Hash_.
|
||||
*/
|
||||
b_hash_size(hash(_, _, Size, _, _, _), Size).
|
||||
|
||||
/**
|
||||
@pred b_hash_lookup( +_Key_, ?_Val_, +_Hash_ )
|
||||
|
||||
Search the ground term _Key_ in table _Hash_ and unify _Val_ with the associated entry.
|
||||
*/
|
||||
b_hash_lookup(Key, Val, hash(Keys, Vals, Size, _, F, CmpF)):-
|
||||
hash_f(Key, Size, Index, F),
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex),
|
||||
array_element(Vals, ActualIndex, Mutable),
|
||||
get_mutable(Val, Mutable).
|
||||
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex) :-
|
||||
array_element(Keys, Index, El),
|
||||
nonvar(El),
|
||||
(
|
||||
cmp_f(CmpF, El, Key)
|
||||
->
|
||||
Index = ActualIndex
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
fetch_key(Keys, I1, Size, Key, CmpF, ActualIndex)
|
||||
).
|
||||
|
||||
/**
|
||||
@pred b_hash_update( +_Key_, +_Hash_, +NewVal )
|
||||
|
||||
Update to the value associated with the ground term _Key_ in table _Hash_ to _NewVal_.
|
||||
*/
|
||||
b_hash_update(Hash, Key, NewVal):-
|
||||
Hash = hash(Keys, Vals, Size, _, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex),
|
||||
array_element(Vals, ActualIndex, Mutable),
|
||||
update_mutable(NewVal, Mutable).
|
||||
|
||||
/**
|
||||
@pred b_hash_update( +_Key_, -_OldVal_, +_Hash_, +NewVal )
|
||||
|
||||
Update to the value associated with the ground term _Key_ in table _Hash_ to _NewVal_, and unify _OldVal_ with the current value.
|
||||
*/
|
||||
b_hash_update(Hash, Key, OldVal, NewVal):-
|
||||
Hash = hash(Keys, Vals, Size, _, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
fetch_key(Keys, Index, Size, Key, CmpF, ActualIndex),
|
||||
array_element(Vals, ActualIndex, Mutable),
|
||||
get_mutable(OldVal, Mutable),
|
||||
update_mutable(NewVal, Mutable).
|
||||
|
||||
/** b_hash_insert(+_Hash_, +_Key_, _Val_, +_NewHash_ )
|
||||
|
||||
Insert the term _Key_-_Val_ in table _Hash_ and unify _NewHash_ with the result. If ground term _Key_ exists, update the dictionary.
|
||||
*/
|
||||
b_hash_insert(Hash, Key, NewVal, NewHash):-
|
||||
Hash = hash(Keys, Vals, Size, N, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
find_or_insert(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash).
|
||||
|
||||
find_or_insert(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash) :-
|
||||
array_element(Keys, Index, El),
|
||||
(
|
||||
var(El)
|
||||
->
|
||||
add_element(Keys, Index, Size, N, Vals, Key, NewVal, Hash, NewHash)
|
||||
;
|
||||
cmp_f(CmpF, El, Key)
|
||||
->
|
||||
% do rb_update
|
||||
array_element(Vals, Index, Mutable),
|
||||
update_mutable(NewVal, Mutable),
|
||||
Hash = NewHash
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
find_or_insert(Keys, I1, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash)
|
||||
).
|
||||
|
||||
/**
|
||||
@pred b_hash_insert_new(+_Hash_, +_Key_, _Val_, +_NewHash_ )
|
||||
|
||||
Insert the term _Key_-_Val_ in table _Hash_ and unify _NewHash_ with the result. If ground term _Key_ exists, fail.
|
||||
*/
|
||||
b_hash_insert_new(Hash, Key, NewVal, NewHash):-
|
||||
Hash = hash(Keys, Vals, Size, N, F, CmpF),
|
||||
hash_f(Key,Size,Index,F),
|
||||
find_or_insert_new(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash).
|
||||
|
||||
find_or_insert_new(Keys, Index, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash) :-
|
||||
array_element(Keys, Index, El),
|
||||
(
|
||||
var(El)
|
||||
->
|
||||
add_element(Keys, Index, Size, N, Vals, Key, NewVal, Hash, NewHash)
|
||||
;
|
||||
cmp_f(CmpF, El, Key)
|
||||
->
|
||||
fail
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
find_or_insert_new(Keys, I1, Size, N, CmpF, Vals, Key, NewVal, Hash, NewHash)
|
||||
).
|
||||
|
||||
add_element(Keys, Index, Size, N, Vals, Key, NewVal, Hash, NewHash) :-
|
||||
get_mutable(NEls, N),
|
||||
NN is NEls+1,
|
||||
update_mutable(NN, N),
|
||||
array_element(Keys, Index, Key),
|
||||
update_mutable(NN, N),
|
||||
array_element(Vals, Index, Mutable),
|
||||
create_mutable(NewVal, Mutable),
|
||||
(
|
||||
NN > Size/3
|
||||
->
|
||||
expand_array(Hash, NewHash)
|
||||
;
|
||||
Hash = NewHash
|
||||
).
|
||||
|
||||
expand_array(Hash, NewHash) :-
|
||||
Hash == NewHash, !,
|
||||
Hash = hash(Keys, Vals, Size, _X, F, _CmpF),
|
||||
new_size(Size, NewSize),
|
||||
array(NewKeys, NewSize),
|
||||
array(NewVals, NewSize),
|
||||
copy_hash_table(Size, Keys, Vals, F, NewSize, NewKeys, NewVals),
|
||||
/* overwrite in place */
|
||||
setarg(1, Hash, NewKeys),
|
||||
setarg(2, Hash, NewVals),
|
||||
setarg(3, Hash, NewSize).
|
||||
|
||||
expand_array(Hash, hash(NewKeys, NewVals, NewSize, X, F, CmpF)) :-
|
||||
Hash = hash(Keys, Vals, Size, X, F, CmpF),
|
||||
new_size(Size, NewSize),
|
||||
array(NewKeys, NewSize),
|
||||
array(NewVals, NewSize),
|
||||
copy_hash_table(Size, Keys, Vals, F, NewSize, NewKeys, NewVals).
|
||||
|
||||
new_size(Size, NewSize) :-
|
||||
Size > 1048576, !,
|
||||
NewSize is Size+1048576.
|
||||
new_size(Size, NewSize) :-
|
||||
NewSize is Size*2.
|
||||
|
||||
copy_hash_table(0, _, _, _, _, _, _) :- !.
|
||||
copy_hash_table(I1, Keys, Vals, F, Size, NewKeys, NewVals) :-
|
||||
I is I1-1,
|
||||
array_element(Keys, I, Key),
|
||||
nonvar(Key), !,
|
||||
array_element(Vals, I, Val),
|
||||
insert_el(Key, Val, Size, F, NewKeys, NewVals),
|
||||
copy_hash_table(I, Keys, Vals, F, Size, NewKeys, NewVals).
|
||||
copy_hash_table(I1, Keys, Vals, F, Size, NewKeys, NewVals) :-
|
||||
I is I1-1,
|
||||
copy_hash_table(I, Keys, Vals, F, Size, NewKeys, NewVals).
|
||||
|
||||
insert_el(Key, Val, Size, F, NewKeys, NewVals) :-
|
||||
hash_f(Key,Size,Index, F),
|
||||
find_free(Index, Size, NewKeys, TrueIndex),
|
||||
array_element(NewKeys, TrueIndex, Key),
|
||||
array_element(NewVals, TrueIndex, Val).
|
||||
|
||||
find_free(Index, Size, Keys, NewIndex) :-
|
||||
array_element(Keys, Index, El),
|
||||
(
|
||||
var(El)
|
||||
->
|
||||
NewIndex = Index
|
||||
;
|
||||
I1 is (Index+1) mod Size,
|
||||
find_free(I1, Size, Keys, NewIndex)
|
||||
).
|
||||
|
||||
hash_f(Key, Size, Index, F) :-
|
||||
var(F), !,
|
||||
term_hash(Key,-1,Size,Index).
|
||||
hash_f(Key, Size, Index, F) :-
|
||||
call(F, Key, Size, Index).
|
||||
|
||||
cmp_f(F, A, B) :-
|
||||
var(F), !,
|
||||
A == B.
|
||||
cmp_f(F, A, B) :-
|
||||
call(F, A, B).
|
||||
|
||||
/**
|
||||
@pred b_hash_to_list(+_Hash_, -_KeyValList_ )
|
||||
|
||||
The term _KeyValList_ unifies with a list containing all terms _Key_-_Val_ in the hash table.
|
||||
*/
|
||||
b_hash_to_list(hash(Keys, Vals, _, _, _, _), LKeyVals) :-
|
||||
Keys =.. (_.LKs),
|
||||
Vals =.. (_.LVs),
|
||||
mklistpairs(LKs, LVs, LKeyVals).
|
||||
|
||||
/**
|
||||
@pred b_key_to_list(+_Hash_, -_KeyList_ )
|
||||
|
||||
The term _KeyList_ unifies with a list containing all keys in the hash table.
|
||||
*/
|
||||
b_hash_keys_to_list(hash(Keys, _, _, _, _, _), LKeys) :-
|
||||
Keys =.. (_.LKs),
|
||||
mklistels(LKs, LKeys).
|
||||
|
||||
/**
|
||||
@pred b_key_to_list(+_Hash_, -_ValList_ )
|
||||
|
||||
The term _`valList_ unifies with a list containing all values in the hash table.
|
||||
*/
|
||||
b_hash_values_to_list(hash(_, Vals, _, _, _, _), LVals) :-
|
||||
Vals =.. (_.LVs),
|
||||
mklistvals(LVs, LVals).
|
||||
|
||||
mklistpairs([], [], []).
|
||||
mklistpairs(V.LKs, _.LVs, KeyVals) :- var(V), !,
|
||||
mklistpairs(LKs, LVs, KeyVals).
|
||||
mklistpairs(K.LKs, V.LVs, (K-VV).KeyVals) :-
|
||||
get_mutable(VV, V),
|
||||
mklistpairs(LKs, LVs, KeyVals).
|
||||
|
||||
mklistels([], []).
|
||||
mklistels(V.Els, NEls) :- var(V), !,
|
||||
mklistels(Els, NEls).
|
||||
mklistels(K.Els, K.NEls) :-
|
||||
mklistels(Els, NEls).
|
||||
|
||||
mklistvals([], []).
|
||||
mklistvals(V.Vals, NVals) :- var(V), !,
|
||||
mklistvals(Vals, NVals).
|
||||
mklistvals(K.Vals, KK.NVals) :-
|
||||
get_mutable(KK, K),
|
||||
mklistvals(Vals, NVals).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
477
packages/python/swig/yap4py/prolog/block_diagram.yap
Normal file
477
packages/python/swig/yap4py/prolog/block_diagram.yap
Normal file
@ -0,0 +1,477 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
|
||||
/**
|
||||
* @file block_diagram.yap
|
||||
* @author Theofrastos Mantadelis, Sugestions from Paulo Moura
|
||||
* @date Tue Nov 17 14:12:02 2015
|
||||
*
|
||||
* @brief Graph the program structure.
|
||||
*
|
||||
* @{
|
||||
*/
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Flags was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Contributions to this file:
|
||||
% Author: Theofrastos Mantadelis
|
||||
% Sugestions: Paulo Moura
|
||||
% Version: 1
|
||||
% Date: 19/11/2010
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
/** @defgroup block_diagram Block Diagram
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This library provides a way of visualizing a prolog program using
|
||||
modules with blocks. To use it use:
|
||||
`:-use_module(library(block_diagram))`.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
:- module(block_diagram, [make_diagram/2, make_diagram/5]).
|
||||
|
||||
/* ---------------------------------------------------------------------- *\
|
||||
|* Missing stuff: a parameter that bounds the module connection depth *|
|
||||
|* and a parameter that diseables/limits the text over edges *|
|
||||
\* ---------------------------------------------------------------------- */
|
||||
|
||||
:- style_check(all).
|
||||
:- yap_flag(unknown, error).
|
||||
|
||||
|
||||
:- use_module(library(charsio), [term_to_atom/2]).
|
||||
:- use_module(library(lists), [memberchk/2, member/2, append/3]).
|
||||
:- use_module(library(system), [working_directory/2]).
|
||||
:- dynamic([seen_module/1, parameter/1]).
|
||||
|
||||
parameter(texts((+inf))).
|
||||
parameter(depth((+inf))).
|
||||
parameter(default_ext('.yap')).
|
||||
|
||||
/** @pred make_diagram(+Inputfilename, +Ouputfilename)
|
||||
|
||||
|
||||
|
||||
This will crawl the files following the use_module, ensure_loaded directives withing the inputfilename.
|
||||
The result will be a file in dot format.
|
||||
You can make a pdf at the shell by asking `dot -Tpdf filename > output.pdf`.
|
||||
|
||||
|
||||
*/
|
||||
make_diagram(InputFile, OutputFile):-
|
||||
tell(OutputFile),
|
||||
write('digraph G {\nrankdir=BT'), nl,
|
||||
extract_name_file(InputFile, Name, File),
|
||||
nb_setval(depth, 0),
|
||||
read_module_file(File, Name),
|
||||
write_explicit,
|
||||
write('}'), nl,
|
||||
told.
|
||||
|
||||
/** @pred make_diagram(+Inputfilename, +Ouputfilename, +Predicate, +Depth, +Extension)
|
||||
|
||||
|
||||
The same as make_diagram/2 but you can define how many of the imported/exporeted predicates will be shown with predicate, and how deep the crawler is allowed to go with depth. The extension is used if the file use module directives do not include a file extension.
|
||||
|
||||
*/
|
||||
make_diagram(InputFile, OutputFile, Texts, Depth, Ext):-
|
||||
integer(Texts),
|
||||
integer(Depth),
|
||||
retractall(parameter(_)),
|
||||
assertz(parameter(texts(Texts))),
|
||||
assertz(parameter(depth(Depth))),
|
||||
assertz(parameter(default_ext(Ext))),
|
||||
make_diagram(InputFile, OutputFile),
|
||||
retractall(parameter(_)),
|
||||
assertz(parameter(texts((+inf)))),
|
||||
assertz(parameter(depth((+inf)))),
|
||||
assertz(parameter(default_ext('.yap'))).
|
||||
|
||||
path_seperator('\\'):-
|
||||
yap_flag(windows, true), !.
|
||||
path_seperator('/').
|
||||
|
||||
split_path_file(PathFile, Path, File):-
|
||||
path_seperator(PathSeperator),
|
||||
atom_concat(Path, File, PathFile),
|
||||
name(PathSeperator, [PathSeperatorName]),
|
||||
name(File, FileName),
|
||||
\+ memberchk(PathSeperatorName, FileName),
|
||||
!.
|
||||
split_file_ext(FileExt, File, Ext):-
|
||||
atom_concat(File, Ext, FileExt),
|
||||
atom_concat('.', _, Ext),
|
||||
name('.', [DotName]),
|
||||
name(Ext, ExtName),
|
||||
findall(A, (member(A, ExtName), A = DotName), L),
|
||||
length(L, 1), !.
|
||||
|
||||
|
||||
parse_module_directive(':-'(module(Name)), _):-
|
||||
seen_module(node(Name)), !.
|
||||
parse_module_directive(':-'(module(Name, _Exported)), _):-
|
||||
seen_module(node(Name)), !.
|
||||
parse_module_directive(':-'(module(Name, Exported)), Shape):-
|
||||
!, \+ seen_module(node(Name)),
|
||||
assertz(seen_module(node(Name))),
|
||||
list_to_message(Exported, ExportedMessage),
|
||||
atom_concat([Name, ' [shape=', Shape,',label="', Name, '\\n', ExportedMessage, '"]'], NodeDefinition),
|
||||
write(NodeDefinition), nl.
|
||||
parse_module_directive(':-'(module(Name)), Shape):-
|
||||
\+ seen_module(node(Name)),
|
||||
assertz(seen_module(node(Name))),
|
||||
atom_concat([Name, ' [shape=', Shape,',label="', Name, '"]'], NodeDefinition),
|
||||
write(NodeDefinition), nl.
|
||||
|
||||
extract_name_file(PathFile, Name, FinalFile):-
|
||||
split_path_file(PathFile, Path, FileName), Path \== '', !,
|
||||
extract_name_file(FileName, Name, File),
|
||||
atom_concat(Path, File, FinalFile).
|
||||
extract_name_file(File, Name, File):-
|
||||
split_file_ext(File, Name, _), !.
|
||||
extract_name_file(Name, Name, File):-
|
||||
parameter(default_ext(Ext)),
|
||||
atom_concat(Name, Ext, File).
|
||||
|
||||
read_use_module_directive(':-'(ensure_loaded(library(Name))), Name, library(Name), []):- !.
|
||||
read_use_module_directive(':-'(ensure_loaded(Path)), Name, FinalFile, []):-
|
||||
extract_name_file(Path, Name, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(library(Name))), Name, library(Name), []):- !.
|
||||
read_use_module_directive(':-'(use_module(Path)), Name, FinalFile, []):-
|
||||
extract_name_file(Path, Name, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(library(Name), Import)), Name, library(Name), Import):- !.
|
||||
read_use_module_directive(':-'(use_module(Path, Import)), Name, FinalFile, Import):-
|
||||
extract_name_file(Path, Name, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(Name, Path, Import)), Name, FinalFile, Import):-
|
||||
nonvar(Path),
|
||||
extract_name_file(Path, _, FinalFile), !.
|
||||
read_use_module_directive(':-'(use_module(Name, Path, Import)), Name, FinalFile, Import):-
|
||||
var(Path),
|
||||
extract_name_file(Name, _, FinalFile), !.
|
||||
|
||||
parse_use_module_directive(Module, Directive):-
|
||||
read_use_module_directive(Directive, Name, File, Imported),
|
||||
parse_use_module_directive(Module, Name, File, Imported).
|
||||
parse_use_module_directive(Module, Name, _File, _Imported):-
|
||||
seen_module(edge(Module, Name)), !.
|
||||
parse_use_module_directive(Module, Name, File, Imported):-
|
||||
\+ seen_module(edge(Module, Name)),
|
||||
assertz(seen_module(edge(Module, Name))),
|
||||
read_module_file(File, Name),
|
||||
list_to_message(Imported, ImportedMessage),
|
||||
atom_concat([Module, ' -> ', Name, ' [label="', ImportedMessage, '"]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
list_to_message(List, Message):-
|
||||
length(List, Len),
|
||||
parameter(texts(TextCnt)),
|
||||
(Len > TextCnt + 1 ->
|
||||
append(FirstCnt, _, List),
|
||||
length(FirstCnt, TextCnt),
|
||||
append(FirstCnt, ['...'], First)
|
||||
;
|
||||
First = List
|
||||
),
|
||||
list_to_message(First, '', Message).
|
||||
|
||||
list_to_message([], Message, Message).
|
||||
list_to_message([H|T], '', FinalMessage):-
|
||||
term_to_atom(H, HAtom), !,
|
||||
list_to_message(T, HAtom, FinalMessage).
|
||||
list_to_message([H|T], AccMessage, FinalMessage):-
|
||||
term_to_atom(H, HAtom),
|
||||
atom_concat([AccMessage, '\\n', HAtom], NewMessage),
|
||||
list_to_message(T, NewMessage, FinalMessage).
|
||||
|
||||
read_module_file(library(Module), Module):-
|
||||
!, parse_module_directive(':-'(module(Module, [])), component).
|
||||
read_module_file(File, Module):-
|
||||
parameter(depth(MaxDepth)),
|
||||
nb_getval(depth, Depth),
|
||||
MaxDepth > Depth,
|
||||
split_path_file(File, Path, FileName),
|
||||
catch((working_directory(CurDir,Path), open(FileName, read, S)), _, (parse_module_directive(':-'(module(Module, [])), box3d), fail)),
|
||||
NDepth is Depth + 1,
|
||||
nb_setval(depth, NDepth),
|
||||
repeat,
|
||||
catch(read(S, Next),_,fail),
|
||||
process(Module, Next),
|
||||
nb_setval(depth, Depth),
|
||||
close(S), working_directory(_,CurDir), !.
|
||||
read_module_file(_, _).
|
||||
|
||||
/** @pred process(+ _StreamInp_, + _Goal_)
|
||||
|
||||
|
||||
|
||||
For every line _LineIn_ in stream _StreamInp_, call
|
||||
`call(Goal,LineIn)`.
|
||||
|
||||
|
||||
*/
|
||||
process(_, end_of_file):-!.
|
||||
process(_, Term):-
|
||||
parse_module_directive(Term, box), !, fail.
|
||||
process(Module, Term):-
|
||||
parse_use_module_directive(Module, Term), !, fail.
|
||||
process(Module, Term):-
|
||||
find_explicit_qualification(Module, Term), fail.
|
||||
|
||||
find_explicit_qualification(OwnerModule, ':-'(Module:Goal)):-
|
||||
!, explicit_qualification(OwnerModule, Module, Goal).
|
||||
find_explicit_qualification(OwnerModule, ':-'(_Head, Body)):-
|
||||
find_explicit_qualification(OwnerModule, Body).
|
||||
find_explicit_qualification(OwnerModule, (Module:Goal, RestBody)):-
|
||||
!, explicit_qualification(OwnerModule, Module, Goal),
|
||||
find_explicit_qualification(OwnerModule, RestBody).
|
||||
find_explicit_qualification(OwnerModule, (_Goal, RestBody)):-
|
||||
!, find_explicit_qualification(OwnerModule, RestBody).
|
||||
find_explicit_qualification(OwnerModule, Module:Goal):-
|
||||
!, explicit_qualification(OwnerModule, Module, Goal).
|
||||
find_explicit_qualification(_OwnerModule, _Goal).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
nonvar(Goal), nonvar(ToModule), !,
|
||||
functor(Goal, FunctorName, Arity),
|
||||
\+ seen_module(explicit(InModule, ToModule, FunctorName/Arity)),
|
||||
assertz(seen_module(explicit(InModule, ToModule, FunctorName/Arity))).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
var(Goal), nonvar(ToModule), !,
|
||||
\+ seen_module(explicit(InModule, ToModule, 'DYNAMIC')),
|
||||
assertz(seen_module(explicit(InModule, ToModule, 'DYNAMIC'))).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
nonvar(Goal), var(ToModule), !,
|
||||
functor(Goal, FunctorName, Arity),
|
||||
\+ seen_module(explicit(InModule, 'DYNAMIC', FunctorName/Arity)),
|
||||
assertz(seen_module(explicit(InModule, 'DYNAMIC', FunctorName/Arity))).
|
||||
|
||||
explicit_qualification(InModule, ToModule, Goal):-
|
||||
var(Goal), var(ToModule),
|
||||
\+ seen_module(explicit(InModule, 'DYNAMIC', 'DYNAMIC')),
|
||||
assertz(seen_module(explicit(InModule, 'DYNAMIC', 'DYNAMIC'))).
|
||||
|
||||
write_explicit:-
|
||||
seen_module(explicit(InModule, ToModule, _Goal)),
|
||||
\+ seen_module(generate_explicit(InModule, ToModule)),
|
||||
assertz(seen_module(generate_explicit(InModule, ToModule))),
|
||||
all(Goal, seen_module(explicit(InModule, ToModule, Goal)), Goals),
|
||||
list_to_message(Goals, Explicit),
|
||||
atom_concat([InModule, ' -> ', ToModule, ' [label="', Explicit, '",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl, fail.
|
||||
write_explicit.
|
||||
|
||||
/*
|
||||
functor(Goal, FunctorName, Arity),
|
||||
term_to_atom(FunctorName/Arity, Imported),
|
||||
atom_concat([InModule, ' -> ', ToModule, ' [label="', Imported, '",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
atom_concat([InModule, ' -> ', ToModule, ' [label="DYNAMIC",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
functor(Goal, FunctorName, Arity),
|
||||
term_to_atom(FunctorName/Arity, Imported),
|
||||
atom_concat([InModule, ' -> DYNAMIC [label="', Imported, '",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
|
||||
atom_concat([InModule, ' -> DYNAMIC [label="DYNAMIC",style=dashed]'], NodeConnection),
|
||||
write(NodeConnection), nl.
|
||||
*/
|
||||
|
||||
%% @} @}
|
422
packages/python/swig/yap4py/prolog/c_alarms.yap
Normal file
422
packages/python/swig/yap4py/prolog/c_alarms.yap
Normal file
@ -0,0 +1,422 @@
|
||||
%%% -*- Mode: Prolog; -*-
|
||||
/**
|
||||
* @file c_alarms.yap
|
||||
* @author Theofrastos Mantadelis
|
||||
* @date Tue Nov 17 14:50:03 2015
|
||||
*
|
||||
* @brief Concurrent alarms
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Concurrent alarms was developed at Katholieke Universiteit Leuven
|
||||
%
|
||||
% Copyright 2010
|
||||
% Katholieke Universiteit Leuven
|
||||
%
|
||||
% Contributions to this file:
|
||||
% Author: Theofrastos Mantadelis
|
||||
% $Date: 2011-02-04 16:04:49 +0100 (Fri, 04 Feb 2011) $
|
||||
% $Revision: 11 $
|
||||
% Contributions: The timer implementation is inspired by Bernd Gutmann's timers
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Artistic License 2.0
|
||||
%
|
||||
% Copyright (c) 2000-2006, The Perl Foundation.
|
||||
%
|
||||
% Everyone is permitted to copy and distribute verbatim copies of this
|
||||
% license document, but changing it is not allowed. Preamble
|
||||
%
|
||||
% This license establishes the terms under which a given free software
|
||||
% Package may be copied, modified, distributed, and/or
|
||||
% redistributed. The intent is that the Copyright Holder maintains some
|
||||
% artistic control over the development of that Package while still
|
||||
% keeping the Package available as open source and free software.
|
||||
%
|
||||
% You are always permitted to make arrangements wholly outside of this
|
||||
% license directly with the Copyright Holder of a given Package. If the
|
||||
% terms of this license do not permit the full use that you propose to
|
||||
% make of the Package, you should contact the Copyright Holder and seek
|
||||
% a different licensing arrangement. Definitions
|
||||
%
|
||||
% "Copyright Holder" means the individual(s) or organization(s) named in
|
||||
% the copyright notice for the entire Package.
|
||||
%
|
||||
% "Contributor" means any party that has contributed code or other
|
||||
% material to the Package, in accordance with the Copyright Holder's
|
||||
% procedures.
|
||||
%
|
||||
% "You" and "your" means any person who would like to copy, distribute,
|
||||
% or modify the Package.
|
||||
%
|
||||
% "Package" means the collection of files distributed by the Copyright
|
||||
% Holder, and derivatives of that collection and/or of those files. A
|
||||
% given Package may consist of either the Standard Version, or a
|
||||
% Modified Version.
|
||||
%
|
||||
% "Distribute" means providing a copy of the Package or making it
|
||||
% accessible to anyone else, or in the case of a company or
|
||||
% organization, to others outside of your company or organization.
|
||||
%
|
||||
% "Distributor Fee" means any fee that you charge for Distributing this
|
||||
% Package or providing support for this Package to another party. It
|
||||
% does not mean licensing fees.
|
||||
%
|
||||
% "Standard Version" refers to the Package if it has not been modified,
|
||||
% or has been modified only in ways explicitly requested by the
|
||||
% Copyright Holder.
|
||||
%
|
||||
% "Modified Version" means the Package, if it has been changed, and such
|
||||
% changes were not explicitly requested by the Copyright Holder.
|
||||
%
|
||||
% "Original License" means this Artistic License as Distributed with the
|
||||
% Standard Version of the Package, in its current version or as it may
|
||||
% be modified by The Perl Foundation in the future.
|
||||
%
|
||||
% "Source" form means the source code, documentation source, and
|
||||
% configuration files for the Package.
|
||||
%
|
||||
% "Compiled" form means the compiled bytecode, object code, binary, or
|
||||
% any other form resulting from mechanical transformation or translation
|
||||
% of the Source form.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%
|
||||
% Permission for Use and Modification Without Distribution
|
||||
%
|
||||
% (1) You are permitted to use the Standard Version and create and use
|
||||
% Modified Versions for any purpose without restriction, provided that
|
||||
% you do not Distribute the Modified Version.
|
||||
%
|
||||
% Permissions for Redistribution of the Standard Version
|
||||
%
|
||||
% (2) You may Distribute verbatim copies of the Source form of the
|
||||
% Standard Version of this Package in any medium without restriction,
|
||||
% either gratis or for a Distributor Fee, provided that you duplicate
|
||||
% all of the original copyright notices and associated disclaimers. At
|
||||
% your discretion, such verbatim copies may or may not include a
|
||||
% Compiled form of the Package.
|
||||
%
|
||||
% (3) You may apply any bug fixes, portability changes, and other
|
||||
% modifications made available from the Copyright Holder. The resulting
|
||||
% Package will still be considered the Standard Version, and as such
|
||||
% will be subject to the Original License.
|
||||
%
|
||||
% Distribution of Modified Versions of the Package as Source
|
||||
%
|
||||
% (4) You may Distribute your Modified Version as Source (either gratis
|
||||
% or for a Distributor Fee, and with or without a Compiled form of the
|
||||
% Modified Version) provided that you clearly document how it differs
|
||||
% from the Standard Version, including, but not limited to, documenting
|
||||
% any non-standard features, executables, or modules, and provided that
|
||||
% you do at least ONE of the following:
|
||||
%
|
||||
% (a) make the Modified Version available to the Copyright Holder of the
|
||||
% Standard Version, under the Original License, so that the Copyright
|
||||
% Holder may include your modifications in the Standard Version. (b)
|
||||
% ensure that installation of your Modified Version does not prevent the
|
||||
% user installing or running the Standard Version. In addition, the
|
||||
% modified Version must bear a name that is different from the name of
|
||||
% the Standard Version. (c) allow anyone who receives a copy of the
|
||||
% Modified Version to make the Source form of the Modified Version
|
||||
% available to others under (i) the Original License or (ii) a license
|
||||
% that permits the licensee to freely copy, modify and redistribute the
|
||||
% Modified Version using the same licensing terms that apply to the copy
|
||||
% that the licensee received, and requires that the Source form of the
|
||||
% Modified Version, and of any works derived from it, be made freely
|
||||
% available in that license fees are prohibited but Distributor Fees are
|
||||
% allowed.
|
||||
%
|
||||
% Distribution of Compiled Forms of the Standard Version or
|
||||
% Modified Versions without the Source
|
||||
%
|
||||
% (5) You may Distribute Compiled forms of the Standard Version without
|
||||
% the Source, provided that you include complete instructions on how to
|
||||
% get the Source of the Standard Version. Such instructions must be
|
||||
% valid at the time of your distribution. If these instructions, at any
|
||||
% time while you are carrying out such distribution, become invalid, you
|
||||
% must provide new instructions on demand or cease further
|
||||
% distribution. If you provide valid instructions or cease distribution
|
||||
% within thirty days after you become aware that the instructions are
|
||||
% invalid, then you do not forfeit any of your rights under this
|
||||
% license.
|
||||
%
|
||||
% (6) You may Distribute a Modified Version in Compiled form without the
|
||||
% Source, provided that you comply with Section 4 with respect to the
|
||||
% Source of the Modified Version.
|
||||
%
|
||||
% Aggregating or Linking the Package
|
||||
%
|
||||
% (7) You may aggregate the Package (either the Standard Version or
|
||||
% Modified Version) with other packages and Distribute the resulting
|
||||
% aggregation provided that you do not charge a licensing fee for the
|
||||
% Package. Distributor Fees are permitted, and licensing fees for other
|
||||
% components in the aggregation are permitted. The terms of this license
|
||||
% apply to the use and Distribution of the Standard or Modified Versions
|
||||
% as included in the aggregation.
|
||||
%
|
||||
% (8) You are permitted to link Modified and Standard Versions with
|
||||
% other works, to embed the Package in a larger work of your own, or to
|
||||
% build stand-alone binary or bytecode versions of applications that
|
||||
% include the Package, and Distribute the result without restriction,
|
||||
% provided the result does not expose a direct interface to the Package.
|
||||
%
|
||||
% Items That are Not Considered Part of a Modified Version
|
||||
%
|
||||
% (9) Works (including, but not limited to, modules and scripts) that
|
||||
% merely extend or make use of the Package, do not, by themselves, cause
|
||||
% the Package to be a Modified Version. In addition, such works are not
|
||||
% considered parts of the Package itself, and are not subject to the
|
||||
% terms of this license.
|
||||
%
|
||||
% General Provisions
|
||||
%
|
||||
% (10) Any use, modification, and distribution of the Standard or
|
||||
% Modified Versions is governed by this Artistic License. By using,
|
||||
% modifying or distributing the Package, you accept this license. Do not
|
||||
% use, modify, or distribute the Package, if you do not accept this
|
||||
% license.
|
||||
%
|
||||
% (11) If your Modified Version has been derived from a Modified Version
|
||||
% made by someone other than you, you are nevertheless required to
|
||||
% ensure that your Modified Version complies with the requirements of
|
||||
% this license.
|
||||
%
|
||||
% (12) This license does not grant you the right to use any trademark,
|
||||
% service mark, tradename, or logo of the Copyright Holder.
|
||||
%
|
||||
% (13) This license includes the non-exclusive, worldwide,
|
||||
% free-of-charge patent license to make, have made, use, offer to sell,
|
||||
% sell, import and otherwise transfer the Package with respect to any
|
||||
% patent claims licensable by the Copyright Holder that are necessarily
|
||||
% infringed by the Package. If you institute patent litigation
|
||||
% (including a cross-claim or counterclaim) against any party alleging
|
||||
% that the Package constitutes direct or contributory patent
|
||||
% infringement, then this Artistic License to you shall terminate on the
|
||||
% date that such litigation is filed.
|
||||
%
|
||||
% (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT
|
||||
% HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED
|
||||
% WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
% PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT
|
||||
% PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT
|
||||
% HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
% INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE
|
||||
% OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
%
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
:- module(c_alarms, [set_alarm/3,
|
||||
unset_alarm/1,
|
||||
time_out_call_once/3,
|
||||
timer_start/1,
|
||||
timer_restart/1,
|
||||
timer_stop/2,
|
||||
timer_elapsed/2,
|
||||
timer_pause/2]).
|
||||
|
||||
/** @defgroup c_alarms Concurrent Alarms
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
This library provides a concurrent signals. To use it use:
|
||||
`:-use_module(library(c_alarms))`.
|
||||
*/
|
||||
|
||||
|
||||
:- use_module(library(lists), [member/2, memberchk/2, delete/3]).
|
||||
:- use_module(library(ordsets), [ord_add_element/3]).
|
||||
:- use_module(library(apply_macros), [maplist/3]).
|
||||
|
||||
:- dynamic('$timer'/3).
|
||||
|
||||
:- meta_predicate(set_alarm(+, 0, -)).
|
||||
:- meta_predicate(time_out_call_once(+, 0, -)).
|
||||
:- meta_predicate(prove_once(0)).
|
||||
|
||||
:- initialization(local_init).
|
||||
|
||||
local_init:-
|
||||
bb_put(alarms, []),
|
||||
bb_put(identity, 0).
|
||||
|
||||
get_next_identity(ID):-
|
||||
bb_get(identity, ID),
|
||||
NID is ID + 1,
|
||||
bb_put(identity, NID).
|
||||
|
||||
set_alarm(Seconds, Execute, ID):-
|
||||
bb_get(alarms, []),
|
||||
get_next_identity(ID), !,
|
||||
bb_put(alarms, [alarm(Seconds, ID, Execute)]),
|
||||
alarm(Seconds, alarm_handler, _).
|
||||
|
||||
%% set_alarm(+Seconds, +Execute, -ID)
|
||||
%
|
||||
% calls Executes after a time interval of Seconds
|
||||
% ID is returned to be able to unset the alarm (the call will not be executed)
|
||||
% set_alarm/3 supports multiple & nested settings of alarms.
|
||||
% Known Bug: There is the case that an alarm might trigger +-1 second of the set time.
|
||||
%
|
||||
set_alarm(Seconds, Execute, ID):-
|
||||
get_next_identity(ID), !,
|
||||
bb_get(alarms, [alarm(CurrentSeconds, CurrentID, CurrentExecute)|Alarms]),
|
||||
alarm(0, true, Remaining),
|
||||
Elapsed is CurrentSeconds - Remaining - 1,
|
||||
maplist(subtract(Elapsed), [alarm(CurrentSeconds, CurrentID, CurrentExecute)|Alarms], RemainingAlarms),
|
||||
ord_add_element(RemainingAlarms, alarm(Seconds, ID, Execute), [alarm(NewSeconds, NewID, NewToExecute)|NewAlarms]),
|
||||
bb_put(alarms, [alarm(NewSeconds, NewID, NewToExecute)|NewAlarms]),
|
||||
alarm(NewSeconds, alarm_handler, _).
|
||||
set_alarm(Seconds, Execute, ID):-
|
||||
throw(error(permission_error(create, alarm, set_alarm(Seconds, Execute, ID)), 'Non permitted alarm identifier.')).
|
||||
|
||||
subtract(Elapsed, alarm(Seconds, ID, Execute), alarm(NewSeconds, ID, Execute)):-
|
||||
NewSeconds is Seconds - Elapsed.
|
||||
|
||||
%% unset_alarm(+ID)
|
||||
%
|
||||
% It will unschedule the alarm.
|
||||
% It will not affect other concurrent alarms.
|
||||
%
|
||||
unset_alarm(ID):-
|
||||
\+ ground(ID),
|
||||
throw(error(instantiation_error, 'Alarm ID needs to be instantiated.')).
|
||||
unset_alarm(ID):-
|
||||
bb_get(alarms, Alarms),
|
||||
\+ memberchk(alarm(_Seconds, ID, _Execute), Alarms),
|
||||
throw(error(existence_error(alarm, unset_alarm(ID)), 'Alarm does not exist.')).
|
||||
unset_alarm(ID):-
|
||||
alarm(0, true, Remaining),
|
||||
bb_get(alarms, Alarms),
|
||||
[alarm(Seconds, _, _)|_] = Alarms,
|
||||
Elapsed is Seconds - Remaining - 1,
|
||||
delete_alarm(Alarms, ID, NewAlarms),
|
||||
bb_put(alarms, NewAlarms),
|
||||
(NewAlarms = [alarm(NewSeconds, _, _)|_] ->
|
||||
RemainingSeconds is NewSeconds - Elapsed,
|
||||
alarm(RemainingSeconds, alarm_handler, _)
|
||||
;
|
||||
true
|
||||
).
|
||||
|
||||
delete_alarm(Alarms, ID, NewAlarms):-
|
||||
memberchk(alarm(Seconds, ID, Execute), Alarms),
|
||||
delete(Alarms, alarm(Seconds, ID, Execute), NewAlarms).
|
||||
|
||||
alarm_handler:-
|
||||
bb_get(alarms, [alarm(_, _, CurrentExecute)|[]]),
|
||||
bb_put(alarms, []),
|
||||
call(CurrentExecute).
|
||||
alarm_handler:-
|
||||
bb_get(alarms, [alarm(Elapsed, CurrentID, CurrentExecute)|Alarms]),
|
||||
maplist(subtract(Elapsed), Alarms, NewAlarms),
|
||||
find_zeros(NewAlarms, ZeroAlarms),
|
||||
findall(alarm(S, ID, E), (member(alarm(S, ID, E), NewAlarms), S > 0), NonZeroAlarms),
|
||||
bb_put(alarms, NonZeroAlarms),
|
||||
(NonZeroAlarms = [alarm(NewSeconds, _, _)|_] ->
|
||||
alarm(NewSeconds, alarm_handler, _)
|
||||
;
|
||||
true
|
||||
),
|
||||
execute([alarm(0, CurrentID, CurrentExecute)|ZeroAlarms]).
|
||||
|
||||
find_zeros([], []).
|
||||
find_zeros([alarm(0, ID, E)|T], [alarm(0, ID, E)|R]):-
|
||||
find_zeros(T, R).
|
||||
find_zeros([alarm(S, _, _)|T], R):-
|
||||
S > 0,
|
||||
find_zeros(T, R).
|
||||
|
||||
execute([]).
|
||||
execute([alarm(_, _, Execute)|R]):-
|
||||
call(Execute),
|
||||
execute(R).
|
||||
|
||||
%% time_out_call(+Seconds, +Goal, -Return)
|
||||
%
|
||||
% It will will execute the closure Goal and returns its success or failure at Return.
|
||||
% If the goal times out in Seconds then Return = timeout.
|
||||
time_out_call_once(Seconds, Goal, Return):-
|
||||
bb_get(identity, ID),
|
||||
set_alarm(Seconds, throw(timeout(ID)), ID),
|
||||
catch((
|
||||
prove_once(Goal, Return),
|
||||
unset_alarm(ID))
|
||||
, Exception, (
|
||||
(Exception == timeout(ID) ->
|
||||
Return = timeout
|
||||
;
|
||||
unset_alarm(ID),
|
||||
throw(Exception)
|
||||
))).
|
||||
|
||||
prove_once(Goal, success):-
|
||||
once(Goal), !.
|
||||
prove_once(_Goal, failure).
|
||||
|
||||
timer_start(Name):-
|
||||
\+ ground(Name),
|
||||
throw(error(instantiation_error, 'Timer name needs to be instantiated.')).
|
||||
timer_start(Name):-
|
||||
'$timer'(Name, _, _),
|
||||
throw(error(permission_error(create, timer, timer_start(Name)), 'Timer already exists.')).
|
||||
timer_start(Name):-
|
||||
statistics(walltime, [StartTime, _]),
|
||||
assertz('$timer'(Name, running, StartTime)).
|
||||
|
||||
timer_restart(Name):-
|
||||
\+ ground(Name),
|
||||
throw(error(instantiation_error, 'Timer name needs to be instantiated.')).
|
||||
timer_restart(Name):-
|
||||
\+ '$timer'(Name, _, _), !,
|
||||
statistics(walltime, [StartTime, _]),
|
||||
assertz('$timer'(Name, running, StartTime)).
|
||||
timer_restart(Name):-
|
||||
retract('$timer'(Name, running, _)), !,
|
||||
statistics(walltime, [StartTime, _]),
|
||||
assertz('$timer'(Name, running, StartTime)).
|
||||
timer_restart(Name):-
|
||||
retract('$timer'(Name, paused, Duration)),
|
||||
statistics(walltime, [StartTime, _]),
|
||||
Elapsed is StartTime - Duration,
|
||||
assertz('$timer'(Name, running, Elapsed)).
|
||||
|
||||
timer_stop(Name, Elapsed):-
|
||||
\+ '$timer'(Name, _, _),
|
||||
throw(error(existence_error(timer, timer_stop(Name, Elapsed)), 'Timer does not exist.')).
|
||||
timer_stop(Name, Elapsed):-
|
||||
retract('$timer'(Name, running, StartTime)), !,
|
||||
statistics(walltime, [EndTime, _]),
|
||||
Elapsed is EndTime - StartTime.
|
||||
timer_stop(Name, Elapsed):-
|
||||
retract('$timer'(Name, paused, Elapsed)).
|
||||
|
||||
timer_elapsed(Name, Elapsed):-
|
||||
\+ '$timer'(Name, _, _),
|
||||
throw(error(existence_error(timer, timer_elapsed(Name, Elapsed)), 'Timer does not exist.')).
|
||||
timer_elapsed(Name, Elapsed):-
|
||||
'$timer'(Name, running, StartTime), !,
|
||||
statistics(walltime, [EndTime, _]),
|
||||
Elapsed is EndTime - StartTime.
|
||||
timer_elapsed(Name, Elapsed):-
|
||||
'$timer'(Name, paused, Elapsed).
|
||||
|
||||
timer_pause(Name, Elapsed):-
|
||||
\+ '$timer'(Name, _, _),
|
||||
throw(error(existence_error(timer, timer_pause(Name, Elapsed)), 'Timer does not exist.')).
|
||||
timer_pause(Name, Elapsed):-
|
||||
'$timer'(Name, paused, _),
|
||||
throw(error(permission_error(timer, timer_pause(Name, Elapsed)), 'Timer already paused.')).
|
||||
timer_pause(Name, Elapsed):-
|
||||
retract('$timer'(Name, _, StartTime)),
|
||||
statistics(walltime, [EndTime, _]),
|
||||
Elapsed is EndTime - StartTime,
|
||||
assertz('$timer'(Name, paused, Elapsed)).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
230
packages/python/swig/yap4py/prolog/charsio.yap
Normal file
230
packages/python/swig/yap4py/prolog/charsio.yap
Normal file
@ -0,0 +1,230 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: charsio.yap *
|
||||
* Last rev: 5/12/99 *
|
||||
* mods: *
|
||||
* comments: I/O on character strings *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
/**
|
||||
* @file charsio.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:17:33 2015
|
||||
*
|
||||
* @brief Several operations on text.
|
||||
* @{
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(charsio, [
|
||||
format_to_chars/3,
|
||||
format_to_chars/4,
|
||||
write_to_chars/3,
|
||||
write_to_chars/2,
|
||||
atom_to_chars/3,
|
||||
atom_to_chars/2,
|
||||
number_to_chars/3,
|
||||
number_to_chars/2,
|
||||
read_from_chars/2,
|
||||
open_chars_stream/2,
|
||||
with_output_to_chars/2,
|
||||
with_output_to_chars/3,
|
||||
with_output_to_chars/4,
|
||||
term_to_atom/2
|
||||
]).
|
||||
|
||||
/** @defgroup charsio Operations on Sequences of Codes.
|
||||
@ingroup library
|
||||
|
||||
Term to sequence of codes conversion, mostly replaced by engine code.
|
||||
You can use the following directive to load the files.
|
||||
|
||||
|
||||
~~~~~~~
|
||||
:- use_module(library(avl)).
|
||||
~~~~~~~
|
||||
|
||||
It includes the following predicates:
|
||||
- atom_to_chars/2
|
||||
- atom_to_chars/3
|
||||
- format_to_chars/3
|
||||
- format_to_chars/4
|
||||
- number_to_chars/2
|
||||
- number_to_chars/3
|
||||
- open_chars_stream/2
|
||||
- read_from_chars/2
|
||||
- term_to_atom/2
|
||||
- with_output_to_chars/2
|
||||
- with_output_to_chars/3
|
||||
- with_output_to_chars/4
|
||||
- write_to_chars/2
|
||||
- write_to_chars/3
|
||||
|
||||
*/
|
||||
|
||||
:- meta_predicate(with_output_to_chars(0,?)).
|
||||
:- meta_predicate(with_output_to_chars(0,-,?)).
|
||||
:- meta_predicate(with_output_to_chars(0,-,?,?)).
|
||||
|
||||
/** @pred format_to_chars(+ _Form_, + _Args_, - _Result_)
|
||||
|
||||
Execute the built-in procedure format/2 with form _Form_ and
|
||||
arguments _Args_ outputting the result to the string of character
|
||||
codes _Result_.
|
||||
*/
|
||||
format_to_chars(Format, Args, Codes) :-
|
||||
format(codes(Codes), Format, Args).
|
||||
|
||||
/** @pred format_to_chars(+ _Form_, + _Args_, - _Result_, - _Result0_)
|
||||
|
||||
Execute the built-in procedure format/2 with form _Form_ and
|
||||
arguments _Args_ outputting the result to the difference list of
|
||||
character codes _Result-Result0_.
|
||||
|
||||
*/
|
||||
format_to_chars(Format, Args, OUT, L0) :-
|
||||
format(codes(OUT, L0), Format, Args).
|
||||
|
||||
/** @pred write_to_chars(+ _Term_, - _Result_)
|
||||
|
||||
Execute the built-in procedure write/1 with argument _Term_
|
||||
outputting the result to the string of character codes _Result_.
|
||||
*/
|
||||
write_to_chars(Term, Codes) :-
|
||||
format(codes(Codes), '~w', [Term]).
|
||||
|
||||
/** @pred write_to_chars(+ _Term_, - _Result0_, - _Result_)
|
||||
|
||||
Execute the built-in procedure write/1 with argument _Term_
|
||||
outputting the result to the difference list of character codes
|
||||
_Result-Result0_.
|
||||
*/
|
||||
write_to_chars(Term, Out, Tail) :-
|
||||
format(codes(Out,Tail),'~w',[Term]).
|
||||
|
||||
/** @pred atom_to_chars(+ _Atom_, - _Result_)
|
||||
|
||||
Convert the atom _Atom_ to the string of character codes
|
||||
_Result_.
|
||||
*/
|
||||
atom_to_chars(Atom, OUT) :-
|
||||
atom_codes(Atom, OUT).
|
||||
|
||||
/** @pred atom_to_chars(+ _Atom_, - _Result0_, - _Result_)
|
||||
|
||||
Convert the atom _Atom_ to the difference list of character codes
|
||||
_Result-Result0_.
|
||||
*/
|
||||
atom_to_chars(Atom, L0, OUT) :-
|
||||
format(codes(L0, OUT), '~a', [Atom]).
|
||||
|
||||
/** @pred number_to_chars(+ _Number_, - _Result_)
|
||||
|
||||
Convert the number _Number_ to the string of character codes
|
||||
_Result_.
|
||||
*/
|
||||
number_to_chars(Number, OUT) :-
|
||||
number_codes(Number, OUT).
|
||||
|
||||
/** @pred number_to_chars(+ _Number_, - _Result0_, - _Result_)
|
||||
|
||||
Convert the atom _Number_ to the difference list of character codes
|
||||
_Result-Result0_.
|
||||
*/
|
||||
number_to_chars(Number, L0, OUT) :-
|
||||
var(Number), !,
|
||||
throw(error(instantiation_error,number_to_chars(Number, L0, OUT))).
|
||||
number_to_chars(Number, L0, OUT) :-
|
||||
number(Number), !,
|
||||
format(codes(L0, OUT), '~w', [Number]).
|
||||
number_to_chars(Number, L0, OUT) :-
|
||||
throw(error(type_error(number,Number),number_to_chars(Number, L0, OUT))).
|
||||
|
||||
/** @pred open_chars_stream(+ _Chars_, - _Stream_)
|
||||
|
||||
Open the list of character codes _Chars_ as a stream _Stream_.
|
||||
*/
|
||||
open_chars_stream(Codes, Stream) :-
|
||||
open_chars_stream(Codes, Stream, '').
|
||||
|
||||
open_chars_stream(Codes, Stream, Postfix) :-
|
||||
predicate_property(memory_file:open_memory_file(_,_,_),_), !,
|
||||
memory_file:new_memory_file(MF),
|
||||
memory_file:open_memory_file(MF, write, Out),
|
||||
format(Out, '~s~w', [Codes, Postfix]),
|
||||
close(Out),
|
||||
memory_file:open_memory_file(MF, read, Stream,
|
||||
[ free_on_close(true)
|
||||
]).
|
||||
open_chars_stream(Codes, Stream, Postfix) :-
|
||||
ensure_loaded(library(memfile)),
|
||||
open_chars_stream(Codes, Stream, Postfix).
|
||||
|
||||
/** @pred with_output_to_chars(? _Goal_, - _Chars_)
|
||||
|
||||
Execute goal _Goal_ such that its standard output will be sent to a
|
||||
memory buffer. After successful execution the contents of the memory
|
||||
buffer will be converted to the list of character codes _Chars_.
|
||||
*/
|
||||
with_output_to_chars(Goal, Codes) :-
|
||||
with_output_to(codes(Codes), Goal).
|
||||
|
||||
/** @pred with_output_to_chars(? _Goal_, ? _Chars0_, - _Chars_)
|
||||
|
||||
Execute goal _Goal_ such that its standard output will be sent to a
|
||||
memory buffer. After successful execution the contents of the memory
|
||||
buffer will be converted to the difference list of character codes
|
||||
_Chars-Chars0_.
|
||||
*/
|
||||
with_output_to_chars(Goal, Codes, L0) :-
|
||||
with_output_to(codes(Codes, L0), Goal).
|
||||
%% with_output_to_chars(:Goal, -Stream, -Codes, ?Tail) is det.
|
||||
%
|
||||
% As with_output_to_chars/2, but Stream is unified with the
|
||||
% temporary stream.
|
||||
|
||||
/** @pred with_output_to_chars(? _Goal_, - _Stream_, ? _Chars0_, - _Chars_)
|
||||
|
||||
|
||||
Execute goal _Goal_ such that its standard output will be sent to a
|
||||
memory buffer. After successful execution the contents of the memory
|
||||
buffer will be converted to the difference list of character codes
|
||||
_Chars-Chars0_ and _Stream_ receives the stream corresponding to
|
||||
the memory buffer.
|
||||
|
||||
*/
|
||||
with_output_to_chars(Goal, Stream, Codes, Tail) :-
|
||||
with_output_to(codes(Codes, Tail), with_stream(Stream, Goal)).
|
||||
|
||||
with_stream(Stream, Goal) :-
|
||||
current_output(Stream),
|
||||
call(Goal).
|
||||
|
||||
/** @pred read_from_chars(+ _Chars_, - _Term_)
|
||||
|
||||
Parse the list of character codes _Chars_ and return the result in
|
||||
the term _Term_. The character codes to be read must terminate with
|
||||
a dot character such that either (i) the dot character is followed by
|
||||
blank characters; or (ii) the dot character is the last character in the
|
||||
string.
|
||||
|
||||
@compat The SWI-Prolog version does not require Codes to end
|
||||
in a full-stop.
|
||||
*/
|
||||
read_from_chars("", end_of_file) :- !.
|
||||
read_from_chars(List, Term) :-
|
||||
atom_to_term(List, Term, _).
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
|
96
packages/python/swig/yap4py/prolog/clauses.yap
Normal file
96
packages/python/swig/yap4py/prolog/clauses.yap
Normal file
@ -0,0 +1,96 @@
|
||||
/**
|
||||
* @file clauses.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 14:51:30 2015
|
||||
*
|
||||
* @brief Utilities for clause manipulation.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(clauses,
|
||||
[list2conj/2,
|
||||
conj2list/2,
|
||||
clauselength/2]).
|
||||
|
||||
%% @{
|
||||
|
||||
/**
|
||||
* @defgroup clauses Clause Manipulation
|
||||
* @ingroup library
|
||||
|
||||
This library supports a number of useful utilities that come up over and
|
||||
over again when manipulating Prolog programs. This will include
|
||||
operations and conversion to other structures.
|
||||
|
||||
@author Vitor Santos Costa
|
||||
*/
|
||||
|
||||
/** conj2list( +Conj, -List) is det
|
||||
Generate a list from a conjunction of literals.
|
||||
|
||||
It is often easier to apply operations on lists than on clauses
|
||||
*/
|
||||
conj2list( M:Conj, List ) :-
|
||||
conj2list_( Conj, M, List, [] ).
|
||||
|
||||
conj2list( Conj, List ) :-
|
||||
conj2list_( Conj, List, [] ).
|
||||
|
||||
|
||||
conj2list_( C ) -->
|
||||
{ var(C) },
|
||||
!,
|
||||
[C].
|
||||
conj2list_( true ) --> !.
|
||||
conj2list_( (C1, C2) ) -->
|
||||
!,
|
||||
conj2list_( C1 ),
|
||||
conj2list_( C2 ).
|
||||
conj2list_( C ) -->
|
||||
[C].
|
||||
|
||||
conj2list_( C, M ) -->
|
||||
{ var(C) },
|
||||
!,
|
||||
[M: C].
|
||||
conj2list_( true , _) --> !.
|
||||
conj2list_( (C1, C2), M ) -->
|
||||
!,
|
||||
conj2list_( C1, M ),
|
||||
conj2list_( C2, M ).
|
||||
conj2list_( C, M ) -->
|
||||
{ strip_module(M:C, NM, NC) },
|
||||
[NM:NC].
|
||||
|
||||
/** list2conj( +List, -Conj) is det
|
||||
Generate a conjunction from a list of literals.
|
||||
|
||||
Notice Mthat this relies on indexing within the list to avoid creating
|
||||
choice-points.
|
||||
*/
|
||||
list2conj([], true).
|
||||
list2conj([Last], Last).
|
||||
list2conj([Head,Next|Tail], (Head,Goals)) :-
|
||||
list2conj([Next|Tail], Goals).
|
||||
|
||||
/** clauselength( +Clause, -Length) is det
|
||||
Count the number of literals in a clause (head counts as one).
|
||||
|
||||
Notice that this is 1+length(conj2list), as we ignore disjunctions.
|
||||
*/
|
||||
clauselength( (_Head :- Conj), Length ) :-
|
||||
clauselength( Conj, Length, 1 ).
|
||||
|
||||
|
||||
clauselength( C, I1, I ) :-
|
||||
{ var(C) },
|
||||
!,
|
||||
I1 is I+1.
|
||||
clauselength( (C1, C2), I2, I ) :- !,
|
||||
clauselength( C1, I1, I ),
|
||||
clauselength( C2, I2, I1 ).
|
||||
clauselength( _C, I1, I ) :-
|
||||
I1 is I+1.
|
||||
|
||||
%%@}
|
216
packages/python/swig/yap4py/prolog/coinduction.yap
Normal file
216
packages/python/swig/yap4py/prolog/coinduction.yap
Normal file
@ -0,0 +1,216 @@
|
||||
/**
|
||||
* @file coinduction.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>, Arvin Bansal,
|
||||
*
|
||||
*
|
||||
* @date Tue Nov 17 14:55:02 2015
|
||||
*
|
||||
* @brief Co-inductive execution
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: coinduction.yap *
|
||||
* Last rev: 8/2/88 *
|
||||
* mods: *
|
||||
* comments: coinduction support for Prolog *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
% :- yap_flag(unknown,error).
|
||||
% :- style_check(all).
|
||||
|
||||
%
|
||||
% Code originally written by Arvin Bansal and Vitor Santos Costa
|
||||
% Includes nice extensions from Jan Wielemaker (from the SWI version).
|
||||
%
|
||||
|
||||
:- module(coinduction,
|
||||
[ (coinductive)/1,
|
||||
op(1150, fx, (coinductive))
|
||||
]).
|
||||
|
||||
:- use_module(library(error)).
|
||||
|
||||
/** <module> coinduction Co-Logic Programming
|
||||
@ingroup library
|
||||
|
||||
This simple module implements the directive coinductive/1 as described
|
||||
in "Co-Logic Programming: Extending Logic Programming with Coinduction"
|
||||
by Luke Somin et al. The idea behind coinduction is that a goal succeeds
|
||||
if it unifies to a parent goal. This enables some interesting programs,
|
||||
notably on infinite trees (cyclic terms).
|
||||
|
||||
~~~~
|
||||
:- use_module(library(coinduction)).
|
||||
|
||||
:- coinductive stream/1.
|
||||
stream([H|T]) :- i(H), stream(T).
|
||||
|
||||
% inductive
|
||||
i(0).
|
||||
i(s(N)) :- i(N).
|
||||
|
||||
?- X=[s(s(A))|X], stream(X).
|
||||
X= [s(s(A))|X], stream(X).
|
||||
A = 0,
|
||||
X = [s(s(0)),**]
|
||||
~~~~
|
||||
|
||||
This predicate is true for any cyclic list containing only 1-s,
|
||||
regardless of the cycle-length.
|
||||
|
||||
@bug Programs mixing normal predicates and coinductive predicates must
|
||||
be _stratified_. The theory does not apply to normal Prolog calling
|
||||
coinductive predicates, calling normal Prolog predicates, etc.
|
||||
|
||||
Stratification is not checked or enforced in any other way and thus
|
||||
left as a responsibility to the user.
|
||||
@see "Co-Logic Programming: Extending Logic Programming with Coinduction"
|
||||
by Luke Somin et al.
|
||||
|
||||
@{
|
||||
|
||||
*/
|
||||
|
||||
:- meta_predicate coinductive(:).
|
||||
|
||||
:- dynamic coinductive/3.
|
||||
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
coinductive(Spec) :-
|
||||
var(Spec),
|
||||
!,
|
||||
throw(error(instantiation_error,coinductive(Spec))).
|
||||
coinductive(Module:Spec) :-
|
||||
coinductive_declaration(Spec, Module, coinductive(Module:Spec)).
|
||||
coinductive(Spec) :-
|
||||
prolog_load_context(module, Module),
|
||||
coinductive_declaration(Spec, Module, coinductive(Spec)).
|
||||
|
||||
coinductive_declaration(Spec, _M, G) :-
|
||||
var(Spec),
|
||||
!,
|
||||
throw(error(instantiation_error,G)).
|
||||
coinductive_declaration((A,B), M, G) :- !,
|
||||
coinductive_declaration(A, M, G),
|
||||
coinductive_declaration(B, M, G).
|
||||
coinductive_declaration(M:Spec, _, G) :- !,
|
||||
coinductive_declaration(Spec, M, G).
|
||||
coinductive_declaration(Spec, M, _G) :-
|
||||
valid_pi(Spec, F, N),
|
||||
functor(S,F,N),
|
||||
atomic_concat(['__coinductive__',F,'/',N],NF),
|
||||
functor(NS,NF,N),
|
||||
match_args(N,S,NS),
|
||||
atomic_concat(['__stack_',M,':',F,'/',N],SF),
|
||||
nb_setval(SF, _),
|
||||
assert((M:S :-
|
||||
b_getval(SF,L),
|
||||
coinduction:in_stack(S, L, End),
|
||||
(
|
||||
nonvar(End)
|
||||
->
|
||||
true
|
||||
;
|
||||
End = [S|_],
|
||||
M:NS)
|
||||
)
|
||||
),
|
||||
assert(coinduction:coinductive(S,M,NS)).
|
||||
|
||||
valid_pi(Name/Arity, Name, Arity) :-
|
||||
must_be(atom, Name),
|
||||
must_be(integer, Arity).
|
||||
|
||||
match_args(0,_,_) :- !.
|
||||
match_args(I,S1,S2) :-
|
||||
arg(I,S1,A),
|
||||
arg(I,S2,A),
|
||||
I1 is I-1,
|
||||
match_args(I1,S1,S2).
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
co_term_expansion((M:H :- B), _, (M:NH :- B)) :- !,
|
||||
co_term_expansion((H :- B), M, (NH :- B)).
|
||||
co_term_expansion((H :- B), M, (NH :- B)) :- !,
|
||||
coinductive(H, M, NH), !.
|
||||
co_term_expansion(H, M, NH) :-
|
||||
coinductive(H, M, NH), !.
|
||||
|
||||
user:term_expansion(M:Cl,M:NCl ) :- !,
|
||||
co_term_expansion(Cl, M, NCl).
|
||||
|
||||
user:term_expansion(G, NG) :-
|
||||
prolog_load_context(module, Module),
|
||||
co_term_expansion(G, Module, NG).
|
||||
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
in_stack(_, V, V) :- var(V), !.
|
||||
in_stack(G, [G|_], [G|_]) :- !.
|
||||
in_stack(G, [_|T], End) :- in_stack(G, T, End).
|
||||
|
||||
writeG_val(G_var) :-
|
||||
b_getval(G_var, G_val),
|
||||
write(G_var), write(' ==> '), write(G_val), nl.
|
||||
|
||||
%-----------------------------------------------------
|
||||
|
||||
/**
|
||||
|
||||
Some examples from Coinductive Logic Programming and its Applications by Gopal Gupta et al, ICLP 97
|
||||
|
||||
~~~~
|
||||
:- coinductive stream/1.
|
||||
stream([H|T]) :- i(H), stream(T).
|
||||
|
||||
% inductive
|
||||
i(0).
|
||||
i(s(N)) :- i(N).
|
||||
|
||||
% Are there infinitely many "occurrences" of arg1 in arg2?
|
||||
:- coinductive comember/2.
|
||||
|
||||
comember(X, L) :-
|
||||
drop(X, L, L1),
|
||||
comember(X, L1).
|
||||
|
||||
% Drop some prefix of arg2 upto an "occurrence" of arg1 from arg2,
|
||||
% yielding arg3.
|
||||
% ("Occurrence" of X = something unifiable with X.)
|
||||
%:- table(drop/3). % not working; needs tabling supporting cyclic terms!
|
||||
drop(H, [H| T], T).
|
||||
drop(H, [_| T], T1) :-
|
||||
drop(H, T, T1).
|
||||
|
||||
|
||||
% X = [1, 2, 3| X], comember(E, X).
|
||||
|
||||
user:p(E) :-
|
||||
X = [1, 2, 3| X],
|
||||
comember(E, X),
|
||||
format('~w~n',[E]),
|
||||
get_code(_),
|
||||
fail.
|
||||
|
||||
~~~~
|
||||
|
||||
@}
|
||||
*/
|
||||
|
70
packages/python/swig/yap4py/prolog/dbqueues.yap
Normal file
70
packages/python/swig/yap4py/prolog/dbqueues.yap
Normal file
@ -0,0 +1,70 @@
|
||||
s/**
|
||||
* @file dbqueues.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 15:01:49 2015
|
||||
*
|
||||
* @brief A library to support queues with no-backtrackable queues.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
:- module(nbqueue, [
|
||||
nb_enqueue/2,
|
||||
nb_dequeue/2,
|
||||
nb_clean_queue/1,
|
||||
nb_size/2
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup dbqueues Non-backtrackable queues in YAP.
|
||||
* @ingroup library
|
||||
|
||||
A library to implement queues of NB Terms
|
||||
|
||||
*/
|
||||
|
||||
|
||||
:- unhide_atom('$init_nb_queue').
|
||||
:- unhide_atom('$nb_enqueue').
|
||||
:- unhide_atom('$nb_dequeue').
|
||||
|
||||
|
||||
nb_enqueue(Name,El) :- var(Name),
|
||||
throw(error(instantiation_error(Name),nb_enqueue(Name,El))).
|
||||
nb_enqueue(Name,El) :- \+ atom(Name), !,
|
||||
throw(error(type_error_atom(Name),nb_enqueue(Name,El))).
|
||||
nb_enqueue(Name,El) :-
|
||||
recorded('$nb_queue',[Name|Ref],_), !,
|
||||
prolog:'$nb_enqueue'(Ref, El).
|
||||
nb_enqueue(Name,El) :-
|
||||
prolog:'$init_nb_queue'(Ref),
|
||||
recorda('$nb_queue',[Name|Ref],_),
|
||||
prolog:'$nb_enqueue'(Ref,El).
|
||||
|
||||
|
||||
nb_dequeue(Name,El) :- var(Name),
|
||||
throw(error(instantiation_error(Name),nb_dequeue(Name,El))).
|
||||
nb_dequeue(Name,El) :- \+ atom(Name), !,
|
||||
throw(error(type_error_atom(Name),nb_dequeue(Name,El))).
|
||||
nb_dequeue(Name,El) :-
|
||||
recorded('$nb_queue',[Name|Ref],R),
|
||||
( prolog:'$nb_dequeue'(Ref, El) ->
|
||||
true
|
||||
;
|
||||
erase(R),
|
||||
fail
|
||||
).
|
||||
|
||||
nb_clean_queue(Name) :-
|
||||
recorded('$nb_queue',[Name|Ref],R), !,
|
||||
erase(R),
|
||||
nb_dequeue_all(Ref).
|
||||
nb_clean_queue(_).
|
||||
|
||||
nb_dequeue_all(Ref) :-
|
||||
( prolog:'$nb_dequeue'(Ref, _) -> nb_dequeue_all(Ref) ; true ).
|
||||
|
||||
nb_dequeue_size(Ref, Size) :-
|
||||
prolog:'$nb_size'(Ref, Size).
|
||||
|
208
packages/python/swig/yap4py/prolog/dbusage.yap
Normal file
208
packages/python/swig/yap4py/prolog/dbusage.yap
Normal file
@ -0,0 +1,208 @@
|
||||
/**
|
||||
* @file dbusage.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 15:04:52 2015
|
||||
*
|
||||
* @brief Useful statistics on memory usage
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module(dbusage, [
|
||||
db_usage/0,
|
||||
db_static/0,
|
||||
db_static/1,
|
||||
db_dynamic/0,
|
||||
db_dynamic/1
|
||||
]).
|
||||
|
||||
/**
|
||||
* @defgroup dbusage Memory Usage in Prolog Data-Base
|
||||
* @ingroup library
|
||||
@{
|
||||
|
||||
This library provides a set of utilities for studying memory usage in YAP.
|
||||
The following routines are available once included with the
|
||||
`use_module(library(dbusage))` command.
|
||||
*/
|
||||
|
||||
/** @pred db_usage
|
||||
Give general overview of data-base usage in the system.
|
||||
*/
|
||||
db_usage :-
|
||||
statistics(heap,[HeapUsed,HeapFree]),
|
||||
statistics(local_stack,[GInU,FreeS]),
|
||||
statistics(global_stack,[SInU,_]),
|
||||
statistics(trail,[TInU,FreeT]),
|
||||
HeapUsedK is HeapUsed//1024,
|
||||
HeapFreeK is HeapFree//1024,
|
||||
StackSpace is (GInU+SInU+FreeS+TInU+FreeT)//1024,
|
||||
format(user_error, 'Heap Space = ~D KB (+ ~D KB free)~n',[HeapUsedK,HeapFreeK]),
|
||||
format(user_error, 'Stack Space = ~D KB~n',[StackSpace]),
|
||||
findall(p(Cls,CSz,ISz),
|
||||
(current_module(M),
|
||||
current_predicate(_,M:P),
|
||||
predicate_statistics(M:P,Cls,CSz,ISz)),LAll),
|
||||
sumall(LAll, TCls, TCSz, TISz),
|
||||
statistics(atoms,[AtomN,AtomS]),
|
||||
AtomSK is AtomS//1024,
|
||||
format(user_error, '~D Atoms taking ~D KB~n',[AtomN,AtomSK]),
|
||||
TSz is TCSz+TISz,
|
||||
TSzK is TSz//1024,
|
||||
TCSzK is TCSz//1024,
|
||||
TISzK is TISz//1024,
|
||||
format(user_error, 'Total User Code~n ~D clauses taking ~D KB~n ~D KB in clauses + ~D KB in indices~n',
|
||||
[TCls,TSzK,TCSzK,TISzK]),
|
||||
statistics(static_code,[SCl,SI,SI1,SI2,SI3]),
|
||||
SClK is SCl//1024,
|
||||
SIK is SI//1024,
|
||||
SI1K is SI1//1024,
|
||||
SI2K is SI2//1024,
|
||||
SI3K is SI3//1024,
|
||||
ST is SCl+SI,
|
||||
STK is ST//1024,
|
||||
format(user_error, 'Total Static code=~D KB~n ~D KB in clauses + ~D KB in indices (~D+~D+~D)~n',
|
||||
[STK,SClK,SIK,SI1K,SI2K,SI3K]),
|
||||
statistics(dynamic_code,[DCl,DI,DI1,DI2,DI3,DI4]),
|
||||
DClK is DCl//1024,
|
||||
DIK is DI//1024,
|
||||
DI1K is DI1//1024,
|
||||
DI2K is DI2//1024,
|
||||
DI3K is DI3//1024,
|
||||
DI4K is DI4//1024,
|
||||
DT is DCl+DI,
|
||||
DTK is DT//1024,
|
||||
format(user_error, 'Total Dynamic code=~D KB~n ~D KB in clauses + ~D KB in indices (~D+~D+~D+~D)~n',
|
||||
[DTK,DClK,DIK,DI1K,DI2K,DI3K,DI4K]),
|
||||
total_erased(DCls,DSZ,ICls,ISZ),
|
||||
(DCls =:= 0 ->
|
||||
true
|
||||
;
|
||||
DSZK is DSZ//1024,
|
||||
format(user_error, ' ~D erased clauses not reclaimed (~D KB)~n',[DCls,DSZK])
|
||||
),
|
||||
(ICls =:= 0 ->
|
||||
true
|
||||
;
|
||||
ISZK is ISZ//1024,
|
||||
format(user_error, ' ~D erased indices not reclaimed (~D KB)~n',[ICls,ISZK])
|
||||
),
|
||||
!.
|
||||
|
||||
db_usage:-
|
||||
write(mem_dump_error),nl.
|
||||
|
||||
|
||||
/** @pred db_static
|
||||
|
||||
|
||||
List memory usage for every static predicate.
|
||||
|
||||
|
||||
*/
|
||||
db_static :-
|
||||
db_static(-1).
|
||||
|
||||
/** @pred db_static(+ _Threshold_)
|
||||
|
||||
List memory usage for every static predicate. Predicate must use more
|
||||
than _Threshold_ bytes.
|
||||
|
||||
|
||||
*/
|
||||
db_static(Min) :-
|
||||
setof(p(Sz,M:P,Cls,CSz,ISz),
|
||||
PN^(current_module(M),
|
||||
current_predicate(PN,M:P),
|
||||
\+ predicate_property(M:P,dynamic),
|
||||
predicate_statistics(M:P,Cls,CSz,ISz),
|
||||
Sz is (CSz+ISz),
|
||||
Sz > Min),All),
|
||||
format(user_error,' Static user code~n===========================~n',[]),
|
||||
display_preds(All).
|
||||
|
||||
/** @pred db_dynamic
|
||||
|
||||
|
||||
List memory usage for every dynamic predicate.
|
||||
|
||||
|
||||
*/
|
||||
db_dynamic :-
|
||||
db_dynamic(-1).
|
||||
|
||||
/** @pred db_dynamic(+ _Threshold_)
|
||||
|
||||
List memory usage for every dynamic predicate. Predicate must use more
|
||||
than _Threshold_ bytes.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
db_dynamic(Min) :-
|
||||
setof(p(Sz,M:P,Cls,CSz,ISz,ECls,ECSz,EISz),
|
||||
PN^(current_module(M),
|
||||
current_predicate(PN,M:P),
|
||||
predicate_property(M:P,dynamic),
|
||||
predicate_statistics(M:P,Cls,CSz,ISz),
|
||||
predicate_erased_statistics(M:P,ECls,ECSz,EISz),
|
||||
Sz is (CSz+ISz+ECSz+EISz),
|
||||
Sz > Min),
|
||||
All),
|
||||
format(user_error,' Dynamic user code~n===========================~n',[]),
|
||||
display_dpreds(All).
|
||||
|
||||
display_preds([]).
|
||||
display_preds([p(Sz,M:P,Cls,CSz,ISz)|_]) :-
|
||||
functor(P,A,N),
|
||||
KSz is Sz//1024,
|
||||
KCSz is CSz//1024,
|
||||
KISz is ISz//1024,
|
||||
(M = user -> Name = A/N ; Name = M:A/N),
|
||||
format(user_error,'~w~t~36+:~t~D~7+ clauses using~|~t~D~8+ KB (~D + ~D)~n',[Name,Cls,KSz,KCSz,KISz]),
|
||||
fail.
|
||||
display_preds([_|All]) :-
|
||||
display_preds(All).
|
||||
|
||||
|
||||
display_dpreds([]).
|
||||
display_dpreds([p(Sz,M:P,Cls,CSz,ISz,ECls,ECSz,EISz)|_]) :-
|
||||
functor(P,A,N),
|
||||
KSz is Sz//1024,
|
||||
KCSz is CSz//1024,
|
||||
KISz is ISz//1024,
|
||||
(M = user -> Name = A/N ; Name = M:A/N),
|
||||
format(user_error,'~w~t~36+:~t~D~7+ clauses using~|~t~D~8+ KB (~D + ~D)~n',[Name,Cls,KSz,KCSz,KISz]),
|
||||
(ECls =:= 0
|
||||
->
|
||||
true
|
||||
;
|
||||
ECSzK is ECSz//1024,
|
||||
format(user_error,' ~D erased clauses: ~D KB~n',[ECls,ECSzK])
|
||||
),
|
||||
(EISz =:= 0
|
||||
->
|
||||
true
|
||||
;
|
||||
EISzK is EISz//1024,
|
||||
format(user_error,' ~D KB erased indices~n',[EISzK])
|
||||
),
|
||||
fail.
|
||||
display_dpreds([_|All]) :-
|
||||
display_dpreds(All).
|
||||
|
||||
|
||||
sumall(LEDAll, TEDCls, TEDCSz, TEDISz) :-
|
||||
sumall(LEDAll, 0, TEDCls, 0, TEDCSz, 0, TEDISz).
|
||||
|
||||
sumall([], TEDCls, TEDCls, TEDCSz, TEDCSz, TEDISz, TEDISz).
|
||||
sumall([p(Cls,CSz,ISz)|LEDAll], TEDCls0, TEDCls, TEDCSz0, TEDCSz, TEDISz0, TEDISz) :-
|
||||
TEDClsI is Cls+TEDCls0,
|
||||
TEDCSzI is CSz+TEDCSz0,
|
||||
TEDISzI is ISz+TEDISz0,
|
||||
sumall(LEDAll, TEDClsI, TEDCls, TEDCSzI, TEDCSz, TEDISzI, TEDISz).
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
708
packages/python/swig/yap4py/prolog/dgraphs.yap
Normal file
708
packages/python/swig/yap4py/prolog/dgraphs.yap
Normal file
@ -0,0 +1,708 @@
|
||||
/**
|
||||
* @file dgraphs.yap
|
||||
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
|
||||
* @date Tue Nov 17 01:23:20 2015
|
||||
*
|
||||
* @brief Directed Graph Processing Utilities.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
:- module( dgraphs,
|
||||
[
|
||||
dgraph_vertices/2,
|
||||
dgraph_edge/3,
|
||||
dgraph_edges/2,
|
||||
dgraph_add_vertex/3,
|
||||
dgraph_add_vertices/3,
|
||||
dgraph_del_vertex/3,
|
||||
dgraph_del_vertices/3,
|
||||
dgraph_add_edge/4,
|
||||
dgraph_add_edges/3,
|
||||
dgraph_del_edge/4,
|
||||
dgraph_del_edges/3,
|
||||
dgraph_to_ugraph/2,
|
||||
ugraph_to_dgraph/2,
|
||||
dgraph_neighbors/3,
|
||||
dgraph_neighbours/3,
|
||||
dgraph_complement/2,
|
||||
dgraph_transpose/2,
|
||||
dgraph_compose/3,
|
||||
dgraph_transitive_closure/2,
|
||||
dgraph_symmetric_closure/2,
|
||||
dgraph_top_sort/2,
|
||||
dgraph_top_sort/3,
|
||||
dgraph_min_path/5,
|
||||
dgraph_max_path/5,
|
||||
dgraph_min_paths/3,
|
||||
dgraph_isomorphic/4,
|
||||
dgraph_path/3,
|
||||
dgraph_path/4,
|
||||
dgraph_leaves/2,
|
||||
dgraph_reachable/3
|
||||
]).
|
||||
|
||||
/** @defgroup dgraphs Directed Graphs
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following graph manipulation routines use the red-black tree library
|
||||
to try to avoid linear-time scans of the graph for all graph
|
||||
operations. Graphs are represented as a red-black tree, where the key is
|
||||
the vertex, and the associated value is a list of vertices reachable
|
||||
from that vertex through an edge (ie, a list of edges).
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred dgraph_new(+ _Graph_)
|
||||
|
||||
|
||||
Create a new directed graph. This operation must be performed before
|
||||
trying to use the graph.
|
||||
|
||||
|
||||
*/
|
||||
:- reexport(library(rbtrees),
|
||||
[rb_new/1 as dgraph_new]).
|
||||
|
||||
:- use_module(library(rbtrees),
|
||||
[rb_new/1,
|
||||
rb_empty/1,
|
||||
rb_lookup/3,
|
||||
rb_apply/4,
|
||||
rb_insert/4,
|
||||
rb_visit/2,
|
||||
rb_keys/2,
|
||||
rb_delete/3,
|
||||
rb_map/3,
|
||||
rb_clone/3,
|
||||
ord_list_to_rbtree/2]).
|
||||
|
||||
:- use_module(library(ordsets),
|
||||
[ord_insert/3,
|
||||
ord_union/3,
|
||||
ord_subtract/3,
|
||||
ord_del_element/3,
|
||||
ord_memberchk/2]).
|
||||
|
||||
:- use_module(library(wdgraphs),
|
||||
[dgraph_to_wdgraph/2,
|
||||
wdgraph_min_path/5,
|
||||
wdgraph_max_path/5,
|
||||
wdgraph_min_paths/3]).
|
||||
|
||||
|
||||
/** @pred dgraph_add_edge(+ _Graph_, + _N1_, + _N2_, - _NewGraph_)
|
||||
|
||||
|
||||
Unify _NewGraph_ with a new graph obtained by adding the edge
|
||||
_N1_- _N2_ to the graph _Graph_.
|
||||
|
||||
|
||||
*/
|
||||
dgraph_add_edge(Vs0,V1,V2,Vs2) :-
|
||||