interface work
This commit is contained in:
parent
e39b1e0127
commit
97350da9bd
41
CXX/yapdb.hh
41
CXX/yapdb.hh
@ -66,8 +66,8 @@ public:
|
||||
* @brief YAPFunctor represents Prolog functors Name/Arity
|
||||
*/
|
||||
class YAPFunctor : public YAPProp {
|
||||
friend class YAPApplTerm;
|
||||
friend class YAPTerm;
|
||||
friend class YAPApplTerm;
|
||||
friend class YAPTerm;
|
||||
friend class YAPPredicate;
|
||||
friend class YAPQuery;
|
||||
Functor f;
|
||||
@ -136,7 +136,7 @@ protected:
|
||||
YAPPredicate(const char *s0, Term &out, Term &names) {
|
||||
CACHE_REGS
|
||||
BACKUP_MACHINE_REGS();
|
||||
Term *modp = NULL;;
|
||||
Term *modp = NULL;
|
||||
|
||||
out = Yap_StringToTerm(s0, strlen(s0) + 1, &LOCAL_encoding, 1200, &names);
|
||||
// extern char *s0;
|
||||
@ -144,8 +144,9 @@ protected:
|
||||
// Yap_DebugPlWrite(out);
|
||||
// delete [] ns;
|
||||
if (out == 0L)
|
||||
throw YAPError();
|
||||
ap = getPred(out, modp);
|
||||
ap = nullptr;
|
||||
else
|
||||
ap = getPred(out, modp);
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
|
||||
@ -157,6 +158,15 @@ protected:
|
||||
ap = getPred(t, v);
|
||||
}
|
||||
|
||||
/// Term constructor for predicates
|
||||
///
|
||||
/// It is just a call to getPred
|
||||
inline YAPPredicate(YAPTerm t) {
|
||||
Term *v = nullptr;
|
||||
Term tt = t.term();
|
||||
ap = getPred(tt, v);
|
||||
}
|
||||
|
||||
/// Cast constructor for predicates,
|
||||
/// if we have the implementation data.
|
||||
///
|
||||
@ -171,7 +181,7 @@ public:
|
||||
YAPPredicate(YAPFunctor f) {
|
||||
CACHE_REGS
|
||||
ap = RepPredProp(PredPropByFunc(f.f, Yap_CurrentModule()));
|
||||
};
|
||||
}
|
||||
|
||||
/// Functor constructor for predicates, is given a specific module.
|
||||
///
|
||||
@ -228,6 +238,7 @@ public:
|
||||
///
|
||||
/// we return a positive number.
|
||||
uintptr_t getArity() { return ap->ArityOfPE; }
|
||||
arity_t arity() { return ap->ArityOfPE; }
|
||||
};
|
||||
|
||||
/**
|
||||
@ -237,18 +248,16 @@ public:
|
||||
*/
|
||||
class YAPPrologPredicate : public YAPPredicate {
|
||||
public:
|
||||
YAPPrologPredicate(YAPAtom name, uintptr_t arity,
|
||||
YAPModule module = YAPModule(), bool tabled = false,
|
||||
bool logical_updates = false, bool local = false,
|
||||
bool sourced = true, bool discontiguous = false,
|
||||
bool multiFile = false, bool hidden = false,
|
||||
bool untraceable = false, bool unspyable = false,
|
||||
bool meta = false, bool sync = false,
|
||||
bool quasi_quotable = false, size_t mega_clause = 0);
|
||||
YAPPrologPredicate(YAPTerm t);
|
||||
/// add a new clause
|
||||
void *assertClause(YAPTerm clause, bool last = true,
|
||||
YAPTerm source = YAPTerm(TermNil));
|
||||
YAPTerm source = YAPTerm());
|
||||
/// retract at least the first clause matching the predicate.
|
||||
void *retractClause(YAPTerm skeleton, bool all = false);
|
||||
void *clause(YAPTerm skeleton, YAPTerm &body);
|
||||
/// return the Nth clause (if source is available)
|
||||
// YAPTerm clause(size_t index, YAPPredicate p) { return YAPTerm(); };
|
||||
/// return the Nth clause (if source is available)
|
||||
YAPTerm *nextClause() { return nullptr; };
|
||||
};
|
||||
|
||||
/**
|
||||
|
178
CXX/yapi.cpp
178
CXX/yapi.cpp
@ -1,8 +1,8 @@
|
||||
|
||||
#define YAP_CPP_INTERFACE 1
|
||||
|
||||
#include <string>
|
||||
#include "yapi.hh"
|
||||
#include <string>
|
||||
|
||||
extern "C" {
|
||||
|
||||
@ -361,7 +361,7 @@ intptr_t YAPTerm::hashTerm(size_t sz, size_t depth, bool variant) {
|
||||
|
||||
const char *YAPTerm::text() {
|
||||
CACHE_REGS
|
||||
size_t length;
|
||||
size_t length = 0;
|
||||
encoding_t enc = LOCAL_encoding;
|
||||
char *os;
|
||||
|
||||
@ -372,7 +372,10 @@ const char *YAPTerm::text() {
|
||||
return 0;
|
||||
}
|
||||
RECOVER_MACHINE_REGS();
|
||||
return os;
|
||||
length = strlen(os) + 1;
|
||||
char *sm = (char *)malloc(length + 1);
|
||||
strcpy(sm, os);
|
||||
return sm;
|
||||
}
|
||||
|
||||
const char *YAPQuery::text() { return goal.text(); }
|
||||
@ -518,9 +521,8 @@ bool YAPEngine::call(YAPPredicate ap, YAPTerm ts[]) {
|
||||
YAP_dogoalinfo q;
|
||||
Term terr;
|
||||
jmp_buf q_env;
|
||||
std::vector<Term> vt(arity);
|
||||
for (arity_t i = 0; i < arity; i++)
|
||||
vt[i] = ts[i].term();
|
||||
XREGS[i + 1] = ts[i].term();
|
||||
q.CurSlot = Yap_StartSlots();
|
||||
q.p = P;
|
||||
q.cp = CP;
|
||||
@ -536,7 +538,55 @@ bool YAPEngine::call(YAPPredicate ap, YAPTerm ts[]) {
|
||||
}
|
||||
// don't forget, on success these guys may create slots
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "exec ");
|
||||
result = (bool)YAP_EnterGoal(ap.asPred(), &vt[0], &q);
|
||||
result = (bool)YAP_EnterGoal(ap.asPred(), nullptr, &q);
|
||||
if ((terr = Yap_GetException())) {
|
||||
YAP_LeaveGoal(false, &q);
|
||||
throw YAPError();
|
||||
}
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "out %d", result);
|
||||
|
||||
if (!result) {
|
||||
YAP_LeaveGoal(false, &q);
|
||||
} else {
|
||||
YAP_LeaveGoal(FALSE, &q);
|
||||
}
|
||||
RECOVER_MACHINE_REGS();
|
||||
return result;
|
||||
}
|
||||
|
||||
bool YAPEngine::call(YAPTerm Yt) {
|
||||
CACHE_REGS
|
||||
BACKUP_MACHINE_REGS();
|
||||
Term t = Yt.term(), terr, tmod = CurrentModule, *ts = nullptr;
|
||||
PredEntry *ap = Yap_get_pred(t, tmod, "C++");
|
||||
arity_t arity = ap->ArityOfPE;
|
||||
bool result;
|
||||
YAP_dogoalinfo q;
|
||||
jmp_buf q_env;
|
||||
|
||||
if (IsApplTerm(t)) {
|
||||
ts = RepAppl(t) + 1;
|
||||
} else {
|
||||
ts = RepPair(t);
|
||||
}
|
||||
for (arity_t i = 0; i < arity; i++)
|
||||
XREGS[i + 1] = ts[i];
|
||||
q.CurSlot = Yap_StartSlots();
|
||||
q.p = P;
|
||||
q.cp = CP;
|
||||
// make sure this is safe
|
||||
|
||||
if (setjmp(q_env)) {
|
||||
if ((terr = Yap_PeekException())) {
|
||||
YAP_LeaveGoal(false, &q);
|
||||
Yap_CloseHandles(q.CurSlot);
|
||||
throw YAPError();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// don't forget, on success these guys may create slots
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "exec ");
|
||||
result = (bool)YAP_EnterGoal(ap, nullptr, &q);
|
||||
if ((terr = Yap_GetException())) {
|
||||
YAP_LeaveGoal(false, &q);
|
||||
throw YAPError();
|
||||
@ -859,77 +909,22 @@ PredEntry *YAPPredicate::getPred(Term &t, Term *&outp) {
|
||||
return ap;
|
||||
}
|
||||
|
||||
YAPPrologPredicate::YAPPrologPredicate(
|
||||
YAPAtom name, arity_t arity, YAPModule mod, bool tabled,
|
||||
bool logical_updates, bool is_thread_local, bool sourced,
|
||||
bool discontiguous, bool multiFile, bool hidden, bool untraceable,
|
||||
bool unspyable, bool meta, bool moduleTransparent, bool quasiQuotable,
|
||||
size_t mega_clause)
|
||||
: YAPPredicate(name, arity, mod) {
|
||||
if (!ap)
|
||||
return;
|
||||
if (is_thread_local) {
|
||||
if (ap->cs.p_code.NOfClauses || tabled)
|
||||
return;
|
||||
ap->PredFlags |= (ThreadLocalPredFlag | LogUpdatePredFlag);
|
||||
} else if (logical_updates) {
|
||||
if (ap->cs.p_code.NOfClauses || tabled)
|
||||
return;
|
||||
ap->PredFlags |= LogUpdatePredFlag;
|
||||
ap->CodeOfPred = FAILCODE;
|
||||
ap->OpcodeOfPred = FAILCODE->opc;
|
||||
}
|
||||
if (tabled) {
|
||||
ap->PredFlags |= TabledPredFlag;
|
||||
if (ap->cs.p_code.NOfClauses || tabled)
|
||||
return;
|
||||
ap->PredFlags |= TabledPredFlag;
|
||||
}
|
||||
if (sourced) {
|
||||
ap->PredFlags |= SourcePredFlag;
|
||||
}
|
||||
if (discontiguous) {
|
||||
ap->PredFlags |= DiscontiguousPredFlag;
|
||||
}
|
||||
if (multiFile) {
|
||||
ap->PredFlags |= MultiFileFlag;
|
||||
}
|
||||
if (hidden) {
|
||||
ap->PredFlags |= HiddenPredFlag;
|
||||
}
|
||||
if (untraceable) {
|
||||
ap->PredFlags |= SourcePredFlag;
|
||||
}
|
||||
if (unspyable) {
|
||||
ap->PredFlags |= NoSpyPredFlag;
|
||||
}
|
||||
if (meta) {
|
||||
ap->PredFlags |= MetaPredFlag;
|
||||
} else if (moduleTransparent) {
|
||||
ap->PredFlags |= ModuleTransparentPredFlag;
|
||||
}
|
||||
if (quasiQuotable) {
|
||||
ap->PredFlags |= QuasiQuotationPredFlag;
|
||||
}
|
||||
if (untraceable) {
|
||||
ap->PredFlags |= SourcePredFlag;
|
||||
}
|
||||
if (hidden) {
|
||||
ap->PredFlags |= SourcePredFlag;
|
||||
}
|
||||
}
|
||||
YAPPrologPredicate::YAPPrologPredicate(YAPTerm t) : YAPPredicate(t) {}
|
||||
|
||||
void *YAPPrologPredicate::assertClause(YAPTerm clause, bool last,
|
||||
YAPTerm source) {
|
||||
void *YAPPrologPredicate::assertClause(YAPTerm cl, bool last, YAPTerm source) {
|
||||
CACHE_REGS
|
||||
|
||||
RECOVER_MACHINE_REGS();
|
||||
Term tt = clause.gt();
|
||||
Term sourcet = source.gt();
|
||||
Term tt = cl.gt();
|
||||
Term sourcet;
|
||||
Term ntt = cl.gt();
|
||||
if (source.initialized())
|
||||
sourcet = source.gt();
|
||||
else
|
||||
sourcet = TermZERO;
|
||||
yamop *codeaddr = Yap_cclause(tt, PP->ArityOfPE, Yap_CurrentModule(),
|
||||
sourcet); /* vsc: give the number of arguments
|
||||
to cclause in case there is overflow */
|
||||
Term ntt = clause.gt();
|
||||
if (LOCAL_ErrorMessage) {
|
||||
RECOVER_MACHINE_REGS();
|
||||
return 0;
|
||||
@ -940,46 +935,45 @@ void *YAPPrologPredicate::assertClause(YAPTerm clause, bool last,
|
||||
RECOVER_MACHINE_REGS();
|
||||
}
|
||||
return tref;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void *YAPPrologPredicate::retractClause(YAPTerm skeleton, bool all) {
|
||||
return 0;
|
||||
}
|
||||
void *YAPPrologPredicate::clause(YAPTerm skeleton, YAPTerm &body) { return 0; }
|
||||
|
||||
const char *YAPError::text() {
|
||||
|
||||
|
||||
char buf[256];
|
||||
std::string s = "";
|
||||
if (LOCAL_ActiveError.errorFunction) {
|
||||
s += LOCAL_ActiveError.errorFile;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError.errorLine);
|
||||
s += buf;
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError.errorLine);
|
||||
s += buf;
|
||||
s += ":0 in C-code";
|
||||
}
|
||||
if (LOCAL_ActiveError.prologPredLine) {
|
||||
s += "\n" ;
|
||||
s+= LOCAL_ActiveError.prologPredFile->StrOfAE ;
|
||||
s+= ":" ;
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredLine);
|
||||
s+= buf; // std::to_string(LOCAL_ActiveError.prologPredLine) ;
|
||||
// YAPIntegerTerm(LOCAL_ActiveError.prologPredLine).text();
|
||||
s+= ":0 " ;
|
||||
s+= LOCAL_ActiveError.prologPredModule ;
|
||||
s+= ":" ;
|
||||
s+= (LOCAL_ActiveError.prologPredName)->StrOfAE ;
|
||||
s+= "/" ;
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredArity);
|
||||
s+= // std::to_string(LOCAL_ActiveError.prologPredArity);
|
||||
buf;
|
||||
s += "\n";
|
||||
s += LOCAL_ActiveError.prologPredFile->StrOfAE;
|
||||
s += ":";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredLine);
|
||||
s += buf; // std::to_string(LOCAL_ActiveError.prologPredLine) ;
|
||||
// YAPIntegerTerm(LOCAL_ActiveError.prologPredLine).text();
|
||||
s += ":0 ";
|
||||
s += LOCAL_ActiveError.prologPredModule;
|
||||
s += ":";
|
||||
s += (LOCAL_ActiveError.prologPredName)->StrOfAE;
|
||||
s += "/";
|
||||
sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredArity);
|
||||
s += // std::to_string(LOCAL_ActiveError.prologPredArity);
|
||||
buf;
|
||||
}
|
||||
s += " error ";
|
||||
if (LOCAL_ActiveError.classAsText != nullptr)
|
||||
s += LOCAL_ActiveError.classAsText->StrOfAE;
|
||||
s += ".";
|
||||
s += LOCAL_ActiveError.errorAsText->StrOfAE;
|
||||
s += ".\n";
|
||||
s += ".";
|
||||
s += LOCAL_ActiveError.errorAsText->StrOfAE;
|
||||
s += ".\n";
|
||||
if (LOCAL_ActiveError.errorTerm) {
|
||||
Term t = Yap_PopTermFromDB(LOCAL_ActiveError.errorTerm);
|
||||
if (t) {
|
||||
|
@ -70,6 +70,7 @@ extern "C" {
|
||||
extern PyObject *term_to_python(yhandle_t t, bool eval);
|
||||
extern PyObject *deref_term_to_python(yhandle_t t);
|
||||
X_API bool init_python(void);
|
||||
extern Term pythonToYAP(PyObject *p);
|
||||
|
||||
extern PyObject *py_Main;
|
||||
|
||||
|
20
CXX/yapq.hh
20
CXX/yapq.hh
@ -52,10 +52,12 @@ public:
|
||||
/// It is given a string, calls the parser and obtains a Prolog term that
|
||||
/// should be a callable
|
||||
/// goal.
|
||||
inline YAPQuery(const char *s): YAPPredicate(s, tgoal, names) {
|
||||
BACKUP_H();
|
||||
inline YAPQuery(const char *s) : YAPPredicate(s, tgoal, names) {
|
||||
BACKUP_H();
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %d",
|
||||
LOCAL_CurSlot);
|
||||
if (!ap)
|
||||
return;
|
||||
goal = YAPTerm(tgoal);
|
||||
vnames = YAPListTerm(names);
|
||||
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text());
|
||||
@ -66,10 +68,10 @@ public:
|
||||
///
|
||||
/// It is given an atom, and a Prolog term that should be a callable
|
||||
/// goal, say `main`, `init`, `live`.
|
||||
inline YAPQuery(YAPAtom g) : YAPPredicate( g ) {
|
||||
goal = YAPAtomTerm( g );
|
||||
vnames = YAPListTerm();
|
||||
openQuery();
|
||||
inline YAPQuery(YAPAtom g) : YAPPredicate(g) {
|
||||
goal = YAPAtomTerm(g);
|
||||
vnames = YAPListTerm();
|
||||
openQuery();
|
||||
};
|
||||
|
||||
/// set flags for query execution, currently only for exception handling
|
||||
@ -164,9 +166,7 @@ public:
|
||||
_callback->run(s);
|
||||
}
|
||||
/// stop yap
|
||||
void close() {
|
||||
Yap_exit(0);
|
||||
}
|
||||
void close() { Yap_exit(0); }
|
||||
|
||||
/// execute the callback with a text argument.
|
||||
bool hasError() { return LOCAL_Error_TYPE != YAP_NO_ERROR; }
|
||||
@ -178,6 +178,8 @@ public:
|
||||
inline YAPTerm getTerm(yhandle_t h) { return YAPTerm(h); }
|
||||
/// current directory for the engine
|
||||
bool call(YAPPredicate ap, YAPTerm ts[]);
|
||||
/// current directory for the engine
|
||||
bool call(YAPTerm t);
|
||||
|
||||
const char *currentDir() {
|
||||
char dir[1024];
|
||||
|
28
CXX/yapt.hh
28
CXX/yapt.hh
@ -29,7 +29,7 @@ public:
|
||||
/// YAPTerm
|
||||
// do nothing constructor
|
||||
YAPTerm() { mk(MkVarTerm()); }
|
||||
YAPTerm(yhandle_t i) { t = i; };
|
||||
// YAPTerm(yhandle_t i) { t = i; };
|
||||
/// pointer to term
|
||||
YAPTerm(void *ptr);
|
||||
/// parse string s and construct a term.
|
||||
@ -37,6 +37,12 @@ public:
|
||||
Term tp;
|
||||
mk(YAP_ReadBuffer(s, &tp));
|
||||
}
|
||||
/// construct a term out of an integer (if you know object type use
|
||||
/// YAPIntegerTerm)
|
||||
YAPTerm(long int num) { mk(MkIntegerTerm(num)); }
|
||||
/// construct a term out of an integer (if you know object type use
|
||||
/// YAPIntegerTerm)
|
||||
YAPTerm(double num) { mk(MkFloatTerm(num)); }
|
||||
/// parse string s and construct a term.
|
||||
YAPTerm(YAPFunctor f, YAPTerm ts[]);
|
||||
/// extract the tag of a term, after dereferencing.
|
||||
@ -118,6 +124,9 @@ public:
|
||||
|
||||
/// return a handle to the term
|
||||
inline yhandle_t handle() { return t; };
|
||||
|
||||
/// whether the term actually refers to a live object
|
||||
inline bool initialized() { return t != 0; };
|
||||
};
|
||||
|
||||
/**
|
||||
@ -125,8 +134,9 @@ public:
|
||||
*/
|
||||
class YAPVarTerm : public YAPTerm {
|
||||
YAPVarTerm(Term t) {
|
||||
if (IsVarTerm(t))
|
||||
if (IsVarTerm(t)) {
|
||||
mk(t);
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
@ -217,7 +227,18 @@ public:
|
||||
class YAPIntegerTerm : public YAPNumberTerm {
|
||||
public:
|
||||
YAPIntegerTerm(intptr_t i);
|
||||
intptr_t getInteger() { return IntegerOfTerm(gt()); }
|
||||
intptr_t getInteger() { return IntegerOfTerm(gt()); };
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Floating Point Term
|
||||
*/
|
||||
|
||||
class YAPFloatTerm : public YAPNumberTerm {
|
||||
public:
|
||||
YAPFloatTerm(double dbl) { mk(MkFloatTerm(dbl)); };
|
||||
|
||||
double getFl() { return FloatOfTerm(gt()); };
|
||||
};
|
||||
|
||||
class YAPListTerm : public YAPTerm {
|
||||
@ -330,5 +351,4 @@ public:
|
||||
// Getter: outputs the name as a sequence of ISO-LATIN1 codes;
|
||||
const char *text() { return (const char *)AtomOfTerm(gt())->StrOfAE; }
|
||||
};
|
||||
|
||||
#endif /* YAPT_HH */
|
||||
|
@ -78,11 +78,11 @@ int assign_python(PyObject *root, term_t t, PyObject *e) {
|
||||
right = get_p_int(term_to_python(targ, true), PyObject_Size(lhs));
|
||||
if (!PySequence_Check(lhs))
|
||||
return -1;
|
||||
PL_reset_term_refs(targ);
|
||||
PL_reset_term_refs(targ);
|
||||
return PySequence_SetSlice(lhs, left, right, e);
|
||||
} else {
|
||||
rhs = term_to_python(trhs, true);
|
||||
PL_reset_term_refs(targ);
|
||||
PL_reset_term_refs(targ);
|
||||
return PyObject_SetItem(lhs, rhs, e);
|
||||
}
|
||||
}
|
||||
@ -254,3 +254,12 @@ foreign_t python_to_term(PyObject *pVal, term_t t) {
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
X_API YAP_Term pythonToYAP(PyObject *pVal) {
|
||||
term_t t = PL_new_term_ref();
|
||||
if (!python_to_term(pVal, t))
|
||||
return 0;
|
||||
YAP_Term tt = YAP_GetFromSlot(t);
|
||||
YAP_RecoverSlots(1, t);
|
||||
return tt;
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
|
||||
if __name__ == '__main__':
|
||||
from yap_kernel import kernelapp as app
|
||||
app.launch_new_instance()
|
||||
import yap_kernel.kernelapp
|
||||
yap_kernel.kernelapp.launch_new_instance()
|
||||
|
@ -1,152 +0,0 @@
|
||||
/*************************************************************************
|
||||
* *
|
||||
* YAP Prolog *
|
||||
* *
|
||||
* Yap Prolog was developed at NCCUP - Universidade do Porto *
|
||||
* *
|
||||
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
|
||||
* *
|
||||
**************************************************************************
|
||||
* *
|
||||
* File: callcount.yap *
|
||||
* Last rev: 8/2/02 *
|
||||
* mods: *
|
||||
* comments: Some profiling predicates available in yap *
|
||||
* *
|
||||
*************************************************************************/
|
||||
|
||||
%% @{
|
||||
|
||||
/** @defgroup Profiling Profiling Prolog Programs
|
||||
@ingroup extensions
|
||||
|
||||
YAP includes two profilers. The count profiler keeps information on the
|
||||
number of times a predicate was called. This information can be used to
|
||||
detect what are the most commonly called predicates in the program. The
|
||||
count profiler can be compiled by setting YAP's flag profiling
|
||||
to `on`. The time-profiler is a `gprof` profiler, and counts
|
||||
how many ticks are being spent on specific predicates, or on other
|
||||
system functions such as internal data-base accesses or garbage collects.
|
||||
|
||||
The YAP profiling sub-system is currently under
|
||||
development. Functionality for this sub-system will increase with newer
|
||||
implementation.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
%% @{
|
||||
|
||||
/** @defgroup Call_Counting Counting Calls
|
||||
@ingroup Profiling
|
||||
|
||||
Predicates compiled with YAP's flag call_counting set to
|
||||
`on` update counters on the numbers of calls and of
|
||||
retries. Counters are actually decreasing counters, so that they can be
|
||||
used as timers. Three counters are available:
|
||||
|
||||
+ `calls`: number of predicate calls since execution started or since
|
||||
system was reset;
|
||||
+ `retries`: number of retries for predicates called since
|
||||
execution started or since counters were reset;
|
||||
+ `calls_and_retries`: count both on predicate calls and
|
||||
retries.
|
||||
|
||||
These counters can be used to find out how many calls a certain
|
||||
goal takes to execute. They can also be used as timers.
|
||||
|
||||
The code for the call counters piggybacks on the profiling
|
||||
code. Therefore, activating the call counters also activates the profiling
|
||||
counters.
|
||||
|
||||
These are the predicates that access and manipulate the call counters.
|
||||
*/
|
||||
|
||||
:- system_module( '$_callcount', [call_count/3,
|
||||
call_count_data/3,
|
||||
call_count_reset/0], []).
|
||||
|
||||
:- use_system_module( '$_errors', ['$do_error'/2]).
|
||||
|
||||
|
||||
/** @pred call_count_data(- _Calls_, - _Retries_, - _CallsAndRetries_)
|
||||
|
||||
|
||||
Give current call count data. The first argument gives the current value
|
||||
for the _Calls_ counter, next the _Retries_ counter, and last
|
||||
the _CallsAndRetries_ counter.
|
||||
|
||||
*/
|
||||
call_count_data(Calls, Retries, Both) :-
|
||||
'$call_count_info'(Calls, Retries, Both).
|
||||
|
||||
/** @pred call_count_reset
|
||||
|
||||
|
||||
Reset call count counters. All timers are also reset.
|
||||
|
||||
*/
|
||||
call_count_reset :-
|
||||
'$call_count_reset'.
|
||||
|
||||
/** @pred call_count(? _CallsMax_, ? _RetriesMax_, ? _CallsAndRetriesMax_)
|
||||
|
||||
|
||||
Set call counters as timers. YAP will generate an exception
|
||||
if one of the instantiated call counters decreases to 0:
|
||||
|
||||
+ _CallsMax_
|
||||
|
||||
throw the exception `call_counter` when the
|
||||
counter `calls` reaches 0;
|
||||
|
||||
+ _RetriesMax_
|
||||
|
||||
throw the exception `retry_counter` when the
|
||||
counter `retries` reaches 0;
|
||||
|
||||
+ _CallsAndRetriesMax_
|
||||
|
||||
throw the exception
|
||||
`call_and_retry_counter` when the counter `calls_and_retries`
|
||||
reaches 0.
|
||||
|
||||
YAP will ignore counters that are called with unbound arguments.
|
||||
|
||||
Next, we show a simple example of how to use call counters:
|
||||
|
||||
~~~~~{.prolog}
|
||||
?- yap_flag(call_counting,on), [-user]. l :- l. end_of_file. yap_flag(call_counting,off).
|
||||
|
||||
yes
|
||||
|
||||
yes
|
||||
?- catch((call_count(10000,_,_),l),call_counter,format("limit_exceeded.~n",[])).
|
||||
|
||||
limit_exceeded.
|
||||
|
||||
yes
|
||||
~~~~~
|
||||
Notice that we first compile the looping predicate `l/0` with
|
||||
call_counting `on`. Next, we catch/3 to handle an
|
||||
exception when `l/0` performs more than 10000 reductions.
|
||||
|
||||
|
||||
*/
|
||||
call_count(Calls, Retries, Both) :-
|
||||
'$check_if_call_count_on'(Calls, CallsOn),
|
||||
'$check_if_call_count_on'(Retries, RetriesOn),
|
||||
'$check_if_call_count_on'(Both, BothOn),
|
||||
'$call_count_set'(Calls, CallsOn, Retries, RetriesOn, Both, BothOn).
|
||||
|
||||
'$check_if_call_count_on'(Calls, 1) :- integer(Calls), !.
|
||||
'$check_if_call_count_on'(Calls, 0) :- var(Calls), !.
|
||||
'$check_if_call_count_on'(Calls, A) :-
|
||||
'$do_error'(type_error(integer,Calls),call_count(A)).
|
||||
|
||||
%% @}
|
||||
|
||||
/**
|
||||
@}
|
||||
*/
|
||||
|
@ -1,100 +1,209 @@
|
||||
from IPython.core.interactive import ExecutionResult
|
||||
import yap
|
||||
import sys
|
||||
import syslog
|
||||
# -*- coding: utf-8 -*-
|
||||
"""YAP Stuff for Main IPython class."""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de>
|
||||
# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
|
||||
# Copyright (C) 2008-2011 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import __future__
|
||||
import abc
|
||||
import ast
|
||||
import atexit
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import runpy
|
||||
import signal
|
||||
|
||||
def yap_run_cell(self, s, store_history=True, silent=False,
|
||||
shell_futures=True):
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import types
|
||||
import subprocess
|
||||
import warnings
|
||||
import yap
|
||||
from io import open as io_open
|
||||
|
||||
result = ExecutionResult()
|
||||
from pickleshare import PickleShareDB
|
||||
|
||||
if (not s) or s.isspace():
|
||||
self.last_execution_succeeded = True
|
||||
return result
|
||||
from traitlets.config.configurable import SingletonConfigurable
|
||||
from IPython.core import oinspect
|
||||
from IPython.core import magic
|
||||
from IPython.core import page
|
||||
from IPython.core import prefilter
|
||||
from IPython.core import shadowns
|
||||
from IPython.core import ultratb
|
||||
from IPython.core import interactiveshell
|
||||
from IPython.core.alias import Alias, AliasManager
|
||||
from IPython.core.autocall import ExitAutocall
|
||||
from IPython.core.builtin_trap import BuiltinTrap
|
||||
from IPython.core.events import EventManager, available_events
|
||||
from IPython.core.compilerop import CachingCompiler, check_linecache_ipython
|
||||
from IPython.core.debugger import Pdb
|
||||
from IPython.core.display_trap import DisplayTrap
|
||||
from IPython.core.displayhook import DisplayHook
|
||||
from IPython.core.displaypub import DisplayPublisher
|
||||
from IPython.core.error import InputRejected, UsageError
|
||||
from IPython.core.extensions import ExtensionManager
|
||||
from IPython.core.formatters import DisplayFormatter
|
||||
from IPython.core.history import HistoryManager
|
||||
from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
|
||||
from IPython.core.logger import Logger
|
||||
from IPython.core.macro import Macro
|
||||
from IPython.core.payload import PayloadManager
|
||||
from IPython.core.prefilter import PrefilterManager
|
||||
from IPython.core.profiledir import ProfileDir
|
||||
from IPython.core.usage import default_banner
|
||||
from IPython.core.interactiveshell import InteractiveShellABC, InteractiveShell, ExecutionResult
|
||||
from IPython.testing.skipdoctest import skip_doctest_py2, skip_doctest
|
||||
from IPython.utils import PyColorize
|
||||
from IPython.utils import io
|
||||
from IPython.utils import py3compat
|
||||
from IPython.utils import openpy
|
||||
from IPython.utils.decorators import undoc
|
||||
from IPython.utils.io import ask_yes_no
|
||||
from IPython.utils.ipstruct import Struct
|
||||
from IPython.paths import get_ipython_dir
|
||||
from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists
|
||||
from IPython.utils.process import system, getoutput
|
||||
from IPython.utils.py3compat import (builtin_mod, unicode_type, string_types,
|
||||
with_metaclass, iteritems)
|
||||
from IPython.utils.strdispatch import StrDispatch
|
||||
from IPython.utils.syspathcontext import prepended_to_syspath
|
||||
from IPython.utils.text import format_screen, LSString, SList, DollarFormatter
|
||||
from IPython.utils.tempdir import TemporaryDirectory
|
||||
from traitlets import (
|
||||
Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type,
|
||||
observe, default,
|
||||
)
|
||||
from warnings import warn
|
||||
from logging import error
|
||||
|
||||
if store_history:
|
||||
result.execution_count = self.execution_count
|
||||
class YAPInteractiveShell:
|
||||
"""An enhanced, interactive shell for YAP."""
|
||||
|
||||
def error_before_exec(value):
|
||||
result.error_before_exec = value
|
||||
self.last_execution_succeeded = False
|
||||
return result
|
||||
def __init__(self, kernel):
|
||||
self.yapeng = yap.YAPEngine()
|
||||
self.q = None
|
||||
self.shell = kernel.shell
|
||||
self.shell.run_cell = self.run_cell
|
||||
|
||||
def closeq(self):
|
||||
if self.q:
|
||||
self.q.close()
|
||||
self.q = None
|
||||
|
||||
def run_cell(self, s, store_history=True, silent=False, shell_futures=True):
|
||||
|
||||
"""Run a complete IPython cell.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
raw_cell : str
|
||||
The code (including IPython code such as %magic functions) to run.
|
||||
store_history : bool
|
||||
If True, the raw and translated cell will be stored in IPython's
|
||||
history. For user code calling back into IPython's machinery, this
|
||||
should be set to False.
|
||||
silent : bool
|
||||
If True, avoid side-effects, such as implicit displayhooks and
|
||||
and logging. silent=True forces store_history=False.
|
||||
shell_futures : bool
|
||||
If True, the code will share future statements with the interactive
|
||||
shell. It will both be affected by previous __future__ imports, and
|
||||
any __future__ imports in the code will affect the shell. If False,
|
||||
__future__ imports are not shared in either direction.
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : :class:`ExecutionResult`
|
||||
"""
|
||||
|
||||
result = ExecutionResult()
|
||||
|
||||
if (not s) or s.isspace():
|
||||
self.shell.last_execution_succeeded = True
|
||||
return result
|
||||
|
||||
if store_history:
|
||||
result.execution_count = self.shell.execution_count
|
||||
|
||||
def error_before_exec(value):
|
||||
result.error_before_exec = value
|
||||
self.shell.last_execution_succeeded = False
|
||||
return result
|
||||
|
||||
|
||||
if not self.engine:
|
||||
try:
|
||||
self.engine = yap.Engine()
|
||||
except:
|
||||
return error_before_exec( sys.exc_info()[1])
|
||||
if not self.q:
|
||||
try:
|
||||
self.q = self.yapeng.query(s)
|
||||
except SyntaxError:
|
||||
return error_before_exec( sys.exc_info()[1])
|
||||
|
||||
if not self.q:
|
||||
try:
|
||||
self.q = self.engine.query(s)
|
||||
except SyntaxError:
|
||||
return error_before_exec( sys.exc_info()[1])
|
||||
cell = s # cell has to exist so it can be stored/logged
|
||||
|
||||
cell = s # cell has to exist so it can be stored/logged
|
||||
# Store raw and processed history
|
||||
# if not silent:
|
||||
# self.shell..logger.log(cell, s)
|
||||
|
||||
# Store raw and processed history
|
||||
# if not silent:
|
||||
# self.logger.log(cell, s)
|
||||
|
||||
try:
|
||||
f = io.StringIO()
|
||||
with redirect_stdout(f):
|
||||
goal = self.q.next()
|
||||
print('{0}'.format(f.getvalue()))
|
||||
# Execute the user code
|
||||
has_raised = False
|
||||
if goal:
|
||||
myvs = self.q.namedVarsCopy()
|
||||
if myvs:
|
||||
i = 0
|
||||
for peq in myvs:
|
||||
name = peq[0]
|
||||
bind = peq[1]
|
||||
if bind.isVar():
|
||||
var = yap.YAPAtom('$VAR')
|
||||
f = yap.YAPFunctor(var, 1)
|
||||
bind.unify(yap.YAPApplTerm(f, (name)))
|
||||
else:
|
||||
i = bind.numberVars(i, True)
|
||||
print(name.text() + " = " + bind.text())
|
||||
else:
|
||||
print("yes")
|
||||
if self.q.deterministic():
|
||||
self.closeq()
|
||||
else:
|
||||
print("No (more) answers")
|
||||
self.closeq()
|
||||
except:
|
||||
result.error_in_exec = sys.exc_info()[1]
|
||||
# self.showtraceback()
|
||||
has_raised = True
|
||||
has_raised = False
|
||||
try:
|
||||
#f = io.StringIO()
|
||||
# with redirect_stdout(f):
|
||||
run = self.q.next()
|
||||
# print('{0}'.format(f.getvalue()))
|
||||
# Execute the user code
|
||||
if run:
|
||||
myvs = self.q.namedVarsCopy()
|
||||
if myvs:
|
||||
i = 0
|
||||
for peq in myvs:
|
||||
name = peq[0]
|
||||
bind = peq[1]
|
||||
if bind.isVar():
|
||||
var = yap.YAPAtom('$VAR')
|
||||
f = yap.YAPFunctor(var, 1)
|
||||
bind.unify(yap.YAPApplTerm(f, (name)))
|
||||
else:
|
||||
i = bind.numberVars(i, True)
|
||||
print(name.text() + " = " + bind.text())
|
||||
else:
|
||||
print("yes")
|
||||
if self.q.deterministic():
|
||||
self.closeq()
|
||||
else:
|
||||
print("No (more) answers")
|
||||
self.closeq()
|
||||
except:
|
||||
result.error_in_exec = sys.exc_info()[1]
|
||||
# self.showtraceback()
|
||||
has_raised = True
|
||||
self.closeq()
|
||||
|
||||
|
||||
self.last_execution_succeeded = not has_raised
|
||||
result.result = self.last_execution_succeeded
|
||||
self.shell.last_execution_succeeded = not has_raised
|
||||
result.result = self.shell.last_execution_succeeded
|
||||
print( self.q )
|
||||
# Reset this so later displayed values do not modify the
|
||||
# ExecutionResult
|
||||
# self.displayhook.exec_result = None
|
||||
|
||||
# Reset this so later displayed values do not modify the
|
||||
# ExecutionResult
|
||||
# self.displayhook.exec_result = None
|
||||
#self.events.trigger('post_execute')
|
||||
#if not silent:
|
||||
# self.events.trigger('post_run_cell')
|
||||
|
||||
#self.events.trigger('post_execute')
|
||||
#if not silent:
|
||||
# self.events.trigger('post_run_cell')
|
||||
if store_history:
|
||||
# Write output to the database. Does nothing unless
|
||||
# history output logging is enabled.
|
||||
# self.history_manager.store_output(self.execution_count)
|
||||
# Each cell is a *single* input, regardless of how many lines it has
|
||||
self.shell.execution_count += 1
|
||||
|
||||
if store_history:
|
||||
# Write output to the database. Does nothing unless
|
||||
# history output logging is enabled.
|
||||
# self.history_manager.store_output(self.execution_count)
|
||||
# Each cell is a *single* input, regardless of how many lines it has
|
||||
self.execution_count += 1
|
||||
|
||||
return result
|
||||
|
||||
def closeq(self):
|
||||
if self.q:
|
||||
self.q.close()
|
||||
self.q = None
|
||||
return result
|
||||
|
@ -1,3 +1,6 @@
|
||||
|
||||
:- use_module(library(python)).
|
||||
|
||||
:- if( current_prolog_flag(apple, true) ).
|
||||
|
||||
:- putenv( 'LC_CTYPE', 'en_us:UTF-8').
|
||||
|
@ -1,92 +0,0 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from metakernel import MetaKernel
|
||||
|
||||
from metakernel import register_ipython_magics
|
||||
register_ipython_magics()
|
||||
|
||||
class MetaKernelyap(MetaKernel):
|
||||
implementation = 'MetaKernel YAP'
|
||||
implementation_version = '1.0'
|
||||
language = 'text'
|
||||
language_version = '0.1'
|
||||
banner = "MetaKernel YAP"
|
||||
language_info = {
|
||||
'mimetype': 'text/plain',
|
||||
'name': 'text',
|
||||
# ------ If different from 'language':
|
||||
'codemirror_mode': {
|
||||
"version": 2,
|
||||
"name": "prolog"
|
||||
}
|
||||
'pygments_lexer': 'language',
|
||||
'version' : "0.0.1",
|
||||
'file_extension': '.yap',
|
||||
'help_links': MetaKernel.help_links,
|
||||
}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
MetaKernel.__init__(self, **kwargs)
|
||||
self._start_yap()
|
||||
self.qq = None sq
|
||||
|
||||
def _start_yap(self):
|
||||
# Signal handlers are inherited by forked processes, and we can't easily
|
||||
# reset it from the subprocess. Since kernelapp ignores SIGINT except in
|
||||
# message handlers, we need to temporarily reset the SIGINT handler here
|
||||
# so that yap and its children are interruptible.
|
||||
sig = signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
try:
|
||||
engine = yap.YAPEngine()
|
||||
engine.query("load_files(library(python), [])").command()
|
||||
banner = "YAP {0} Kernel".format(self.engine.version())
|
||||
|
||||
finally:
|
||||
signal.signal(signal.SIGINT, sig)
|
||||
|
||||
# Register Yap function to write image data to temporary file
|
||||
#self.yapwrapper.run_command(image_setup_cmd)
|
||||
|
||||
def get_usage(self):
|
||||
return "This is the YAP kernel."
|
||||
|
||||
def do_execute_direct(self, code):
|
||||
if not code.strip():
|
||||
return {'status': 'ok', 'execution_count': self.execution_count,
|
||||
'payload': [], 'user_expressions': {}}
|
||||
|
||||
interrupted = False
|
||||
try:
|
||||
print self.q
|
||||
if self.q is None:
|
||||
self.q = self.engine.query(code.rstrip())
|
||||
if self.q.next():
|
||||
vs = self.q.namedVars()
|
||||
if vs.length() > 0:
|
||||
l = []
|
||||
while vs.length() > 0:
|
||||
eq = vs.car()
|
||||
l.append(' '.join([getArg(1).text(), '=', eq.getArg(2).text())
|
||||
vs = vs.cdr()
|
||||
l.append(';')
|
||||
o = '\n'.join(l)
|
||||
else:
|
||||
return 'yes'
|
||||
self.q = None
|
||||
|
||||
else:
|
||||
return 'no'
|
||||
self.q = None
|
||||
|
||||
|
||||
|
||||
def repr(self, data):
|
||||
return repr(data)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
from ipykernel.kernelapp import IPKernelApp
|
||||
except ImportError:
|
||||
from IPython.kernel.zmq.kernelapp import IPKernelApp
|
||||
IPKernelApp.launch_instance(kernel_class=MetaKernelyap)
|
@ -16,6 +16,7 @@ from tornado import ioloop
|
||||
import zmq
|
||||
from zmq.eventloop import ioloop as zmq_ioloop
|
||||
from zmq.eventloop.zmqstream import ZMQStream
|
||||
from ipykernel.zmqshell import ZMQInteractiveShell
|
||||
|
||||
from IPython.core.application import (
|
||||
BaseIPythonApplication, base_flags, base_aliases, catch_config_error
|
||||
@ -42,7 +43,6 @@ from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows
|
||||
from jupyter_client.session import (
|
||||
Session, session_flags, session_aliases,
|
||||
)
|
||||
from ipykernel.zmqshell import ZMQInteractiveShell
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Flags and Aliases
|
||||
@ -96,7 +96,7 @@ To read more about this, see https://github.com/ipython/ipython/issues/2049
|
||||
# Application class for starting an IPython Kernel
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
ConnectionFileMixin):
|
||||
name='YAP-kernel'
|
||||
aliases = Dict(kernel_aliases)
|
||||
@ -107,7 +107,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
klass='ipykernel.kernelbase.Kernel',
|
||||
help="""The Kernel subclass to be used.
|
||||
|
||||
This should allow easy re-use of the IPKernelApp entry point
|
||||
This should allow easy re-use of the YAPKernelApp entry point
|
||||
to configure and launch kernels other than IPython's own.
|
||||
""").tag(config=True)
|
||||
kernel = Any()
|
||||
@ -117,7 +117,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
|
||||
subcommands = {
|
||||
'install': (
|
||||
'.kernelspec.InstallYAPKernelSpecApp',
|
||||
'yap_kernel.kernelspec.InstallYAPKernelSpecApp',
|
||||
'Install the YAP kernel'
|
||||
),
|
||||
}
|
||||
@ -209,7 +209,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
if not self.connection_file:
|
||||
self.connection_file = "kernel-%s.json"%os.getpid()
|
||||
try:
|
||||
self.connection_file = filefind(self.connection_file, ['.', self.connection_dir])
|
||||
self.connection_file = filefind(self.connection_file, ['.',self.connection_dir])
|
||||
except IOError:
|
||||
self.log.debug("Connection file not found: %s", self.connection_file)
|
||||
# This means I own it, and I'll create it in this directory:
|
||||
@ -382,7 +382,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
if not os.environ.get('MPLBACKEND'):
|
||||
os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline'
|
||||
|
||||
# Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
|
||||
# Provide a wrapper for :meth:`YAPInteractiveShellApp.init_gui_pylab`
|
||||
# to ensure that any exception is printed straight to stderr.
|
||||
# Normally _showtraceback associates the reply with an execution,
|
||||
# which means frontends will never draw it, as this exception
|
||||
@ -478,7 +478,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
launch_new_instance = YAPKernelApp.launch_instance
|
||||
|
||||
def main():
|
||||
"""Run an IPKernel as an application"""
|
||||
"""Run an YAPKernel as an application"""
|
||||
app = YAPKernelApp.instance()
|
||||
app.initialize()
|
||||
app.start()
|
||||
|
@ -1,188 +1,490 @@
|
||||
"""The IPython kernel spec for Jupyter"""
|
||||
"""An Application for launching a kernel"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Copyright (c) YAP Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import errno
|
||||
import json
|
||||
import atexit
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import signal
|
||||
import traceback
|
||||
import logging
|
||||
|
||||
from jupyter_client.kernelspec import KernelSpecManager
|
||||
from tornado import ioloop
|
||||
import zmq
|
||||
from zmq.eventloop import ioloop as zmq_ioloop
|
||||
from zmq.eventloop.zmqstream import ZMQStream
|
||||
|
||||
pjoin = os.path.join
|
||||
from IPython.core.application import (
|
||||
BaseIPythonApplication, base_flags, base_aliases, catch_config_error
|
||||
)
|
||||
|
||||
KERNEL_NAME = 'YAP%i' % sys.version_info[0]
|
||||
from IPython.core.profiledir import ProfileDir
|
||||
from IPython.core.shellapp import (
|
||||
InteractiveShellApp, shell_flags, shell_aliases
|
||||
)
|
||||
|
||||
# path to kernelspec resources
|
||||
RESOURCES = pjoin(os.path.dirname(__file__), 'resources')
|
||||
from IPython.utils import io
|
||||
from ipython_genutils.path import filefind, ensure_dir_exists
|
||||
from traitlets import (
|
||||
Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default
|
||||
)
|
||||
from ipython_genutils.importstring import import_item
|
||||
from jupyter_core.paths import jupyter_runtime_dir
|
||||
from jupyter_client import write_connection_file
|
||||
from jupyter_client.connect import ConnectionFileMixin
|
||||
|
||||
# local imports
|
||||
from ipykernel.iostream import IOPubThread
|
||||
from ipykernel.heartbeat import Heartbeat
|
||||
from .yap_kernel import YAPKernel
|
||||
from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows
|
||||
from jupyter_client.session import (
|
||||
Session, session_flags, session_aliases,
|
||||
)
|
||||
from ipykernel.zmqshell import ZMQInteractiveShell
|
||||
|
||||
def make_ipkernel_cmd(mod='ipykernel', executable=None, extra_arguments=None, **kw):
|
||||
"""Build Popen command list for launching an IPython kernel.
|
||||
#-----------------------------------------------------------------------------
|
||||
# Flags and Aliases
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
Parameters
|
||||
----------
|
||||
mod : str, optional (default 'ipykernel')
|
||||
A string of an IPython module whose __main__ starts an IPython kernel
|
||||
kernel_aliases = dict(base_aliases)
|
||||
kernel_aliases.update({
|
||||
'ip' : 'YAPKernelApp.ip',
|
||||
'hb' : 'YAPKernelApp.hb_port',
|
||||
'shell' : 'YAPKernelApp.shell_port',
|
||||
'iopub' : 'YAPKernelApp.iopub_port',
|
||||
'stdin' : 'YAPKernelApp.stdin_port',
|
||||
'control' : 'YAPKernelApp.control_port',
|
||||
'f' : 'YAPKernelApp.connection_file',
|
||||
'transport': 'YAPKernelApp.transport',
|
||||
})
|
||||
|
||||
executable : str, optional (default sys.executable)
|
||||
The Python executable to use for the kernel process.
|
||||
kernel_flags = dict(base_flags)
|
||||
kernel_flags.update({
|
||||
'no-stdout' : (
|
||||
{'YAPKernelApp' : {'no_stdout' : True}},
|
||||
"redirect stdout to the null device"),
|
||||
'no-stderr' : (
|
||||
{'YAPKernelApp' : {'no_stderr' : True}},
|
||||
"redirect stderr to the null device"),
|
||||
'pylab' : (
|
||||
{'YAPKernelApp' : {'pylab' : 'auto'}},
|
||||
"""Pre-load matplotlib and numpy for interactive use with
|
||||
the default matplotlib backend."""),
|
||||
})
|
||||
|
||||
extra_arguments : list, optional
|
||||
A list of extra arguments to pass when executing the launch code.
|
||||
# inherit flags&aliases for any IPython shell apps
|
||||
kernel_aliases.update(shell_aliases)
|
||||
kernel_flags.update(shell_flags)
|
||||
|
||||
Returns
|
||||
-------
|
||||
# inherit flags&aliases for Sessions
|
||||
kernel_aliases.update(session_aliases)
|
||||
kernel_flags.update(session_flags)
|
||||
|
||||
A Popen command list
|
||||
"""
|
||||
if executable is None:
|
||||
executable = sys.executable
|
||||
extra_arguments = extra_arguments or []
|
||||
arguments = [executable, '-m', mod, '-f', '{connection_file}']
|
||||
arguments.extend(extra_arguments)
|
||||
_ctrl_c_message = """\
|
||||
NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
|
||||
|
||||
return arguments
|
||||
To exit, you will have to explicitly quit this process, by either sending
|
||||
"quit" from a client, or using Ctrl-\\ in UNIX-like environments.
|
||||
|
||||
To read more about this, see https://github.com/ipython/ipython/issues/2049
|
||||
|
||||
def get_kernel_dict(extra_arguments=None):
|
||||
"""Construct dict for kernel.json"""
|
||||
return {
|
||||
'argv': make_ipkernel_cmd(extra_arguments=extra_arguments),
|
||||
'display_name': 'Python %i' % sys.version_info[0],
|
||||
'language': 'python',
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Application class for starting an YAP Kernel
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
ConnectionFileMixin):
|
||||
name='YAP-kernel'
|
||||
aliases = Dict(kernel_aliases)
|
||||
flags = Dict(kernel_flags)
|
||||
classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session]
|
||||
# the kernel class, as an importstring
|
||||
kernel_class = Type('yap_kernel.yap_kernel.YAPKernel',
|
||||
klass='ipykernel.kernelbase.Kernel',
|
||||
help="""The Kernel subclass to be used.
|
||||
|
||||
This should allow easy re-use of the YAPKernelApp entry point
|
||||
to configure and launch kernels other than YAP's own.
|
||||
""").tag(config=True)
|
||||
kernel = Any()
|
||||
poller = Any() # don't restrict this even though current pollers are all Threads
|
||||
heartbeat = Instance(Heartbeat, allow_none=True)
|
||||
ports = Dict()
|
||||
|
||||
subcommands = {
|
||||
'install': (
|
||||
'yap_kernel.kernelspec.InstallYAPKernelSpecApp',
|
||||
'Install the YAP kernel'
|
||||
),
|
||||
}
|
||||
|
||||
# connection info:
|
||||
connection_dir = Unicode()
|
||||
|
||||
def write_kernel_spec(path=None, overrides=None, extra_arguments=None):
|
||||
"""Write a kernel spec directory to `path`
|
||||
@default('connection_dir')
|
||||
def _default_connection_dir(self):
|
||||
return jupyter_runtime_dir()
|
||||
|
||||
If `path` is not specified, a temporary directory is created.
|
||||
If `overrides` is given, the kernelspec JSON is updated before writing.
|
||||
@property
|
||||
def abs_connection_file(self):
|
||||
if os.path.basename(self.connection_file) == self.connection_file:
|
||||
return os.path.join(self.connection_dir, self.connection_file)
|
||||
else:
|
||||
return self.connection_file
|
||||
|
||||
The path to the kernelspec is always returned.
|
||||
"""
|
||||
if path is None:
|
||||
path = os.path.join(tempfile.mkdtemp(suffix='_kernels'), KERNEL_NAME)
|
||||
# streams, etc.
|
||||
no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True)
|
||||
no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True)
|
||||
outstream_class = DottedObjectName('ipykernel.iostream.OutStream',
|
||||
help="The importstring for the OutStream factory").tag(config=True)
|
||||
displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook',
|
||||
help="The importstring for the DisplayHook factory").tag(config=True)
|
||||
|
||||
# stage resources
|
||||
shutil.copytree(RESOURCES, path)
|
||||
# write kernel.json
|
||||
kernel_dict = get_kernel_dict(extra_arguments)
|
||||
# polling
|
||||
parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0),
|
||||
help="""kill this process if its parent dies. On Windows, the argument
|
||||
specifies the HANDLE of the parent process, otherwise it is simply boolean.
|
||||
""").tag(config=True)
|
||||
interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0),
|
||||
help="""ONLY USED ON WINDOWS
|
||||
Interrupt this process when the parent is signaled.
|
||||
""").tag(config=True)
|
||||
|
||||
if overrides:
|
||||
kernel_dict.update(overrides)
|
||||
with open(pjoin(path, 'kernel.json'), 'w') as f:
|
||||
json.dump(kernel_dict, f, indent=1)
|
||||
def init_crash_handler(self):
|
||||
sys.excepthook = self.excepthook
|
||||
|
||||
return path
|
||||
def excepthook(self, etype, evalue, tb):
|
||||
# write uncaught traceback to 'real' stderr, not zmq-forwarder
|
||||
traceback.print_exception(etype, evalue, tb, file=sys.__stderr__)
|
||||
|
||||
def init_poller(self):
|
||||
if sys.platform == 'win32':
|
||||
if self.interrupt or self.parent_handle:
|
||||
self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
|
||||
elif self.parent_handle:
|
||||
self.poller = ParentPollerUnix()
|
||||
|
||||
def install(kernel_spec_manager=None, user=False, kernel_name=KERNEL_NAME, display_name=None,
|
||||
prefix=None, profile=None):
|
||||
"""Install the IPython kernelspec for Jupyter
|
||||
def _bind_socket(self, s, port):
|
||||
iface = '%s://%s' % (self.transport, self.ip)
|
||||
if self.transport == 'tcp':
|
||||
if port <= 0:
|
||||
port = s.bind_to_random_port(iface)
|
||||
else:
|
||||
s.bind("tcp://%s:%i" % (self.ip, port))
|
||||
elif self.transport == 'ipc':
|
||||
if port <= 0:
|
||||
port = 1
|
||||
path = "%s-%i" % (self.ip, port)
|
||||
while os.path.exists(path):
|
||||
port = port + 1
|
||||
path = "%s-%i" % (self.ip, port)
|
||||
else:
|
||||
path = "%s-%i" % (self.ip, port)
|
||||
s.bind("ipc://%s" % path)
|
||||
return port
|
||||
|
||||
Parameters
|
||||
----------
|
||||
def write_connection_file(self):
|
||||
"""write connection info to JSON file"""
|
||||
cf = self.abs_connection_file
|
||||
self.log.debug("Writing connection file: %s", cf)
|
||||
write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
|
||||
shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
|
||||
iopub_port=self.iopub_port, control_port=self.control_port)
|
||||
|
||||
kernel_spec_manager: KernelSpecManager [optional]
|
||||
A KernelSpecManager to use for installation.
|
||||
If none provided, a default instance will be created.
|
||||
user: bool [default: False]
|
||||
Whether to do a user-only install, or system-wide.
|
||||
kernel_name: str, optional
|
||||
Specify a name for the kernelspec.
|
||||
This is needed for having multiple IPython kernels for different environments.
|
||||
display_name: str, optional
|
||||
Specify the display name for the kernelspec
|
||||
profile: str, optional
|
||||
Specify a custom profile to be loaded by the kernel.
|
||||
prefix: str, optional
|
||||
Specify an install prefix for the kernelspec.
|
||||
This is needed to install into a non-default location, such as a conda/virtual-env.
|
||||
def cleanup_connection_file(self):
|
||||
cf = self.abs_connection_file
|
||||
self.log.debug("Cleaning up connection file: %s", cf)
|
||||
try:
|
||||
os.remove(cf)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
Returns
|
||||
-------
|
||||
self.cleanup_ipc_files()
|
||||
|
||||
The path where the kernelspec was installed.
|
||||
"""
|
||||
if kernel_spec_manager is None:
|
||||
kernel_spec_manager = KernelSpecManager()
|
||||
def init_connection_file(self):
|
||||
if not self.connection_file:
|
||||
self.connection_file = "kernel-%s.json"%os.getpid()
|
||||
try:
|
||||
self.connection_file = filefind(self.connection_file, ['.', self.connection_dir])
|
||||
except IOError:
|
||||
self.log.debug("Connection file not found: %s", self.connection_file)
|
||||
# This means I own it, and I'll create it in this directory:
|
||||
ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700)
|
||||
# Also, I will clean it up:
|
||||
atexit.register(self.cleanup_connection_file)
|
||||
return
|
||||
try:
|
||||
self.load_connection_file()
|
||||
except Exception:
|
||||
self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
|
||||
self.exit(1)
|
||||
|
||||
if (kernel_name != KERNEL_NAME) and (display_name is None):
|
||||
# kernel_name is specified and display_name is not
|
||||
# default display_name to kernel_name
|
||||
display_name = kernel_name
|
||||
overrides = {}
|
||||
if display_name:
|
||||
overrides["display_name"] = display_name
|
||||
if profile:
|
||||
extra_arguments = ["--profile", profile]
|
||||
if not display_name:
|
||||
# add the profile to the default display name
|
||||
overrides["display_name"] = 'Python %i [profile=%s]' % (sys.version_info[0], profile)
|
||||
else:
|
||||
extra_arguments = None
|
||||
path = write_kernel_spec(overrides=overrides, extra_arguments=extra_arguments)
|
||||
dest = kernel_spec_manager.install_kernel_spec(
|
||||
path, kernel_name=kernel_name, user=user, prefix=prefix)
|
||||
# cleanup afterward
|
||||
shutil.rmtree(path)
|
||||
return dest
|
||||
def init_sockets(self):
|
||||
# Create a context, a session, and the kernel sockets.
|
||||
self.log.info("Starting the kernel at pid: %i", os.getpid())
|
||||
context = zmq.Context.instance()
|
||||
# Uncomment this to try closing the context.
|
||||
# atexit.register(context.term)
|
||||
|
||||
# Entrypoint
|
||||
self.shell_socket = context.socket(zmq.ROUTER)
|
||||
self.shell_socket.linger = 1000
|
||||
self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
|
||||
self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
|
||||
|
||||
from traitlets.config import Application
|
||||
self.stdin_socket = context.socket(zmq.ROUTER)
|
||||
self.stdin_socket.linger = 1000
|
||||
self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
|
||||
self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
|
||||
|
||||
self.control_socket = context.socket(zmq.ROUTER)
|
||||
self.control_socket.linger = 1000
|
||||
self.control_port = self._bind_socket(self.control_socket, self.control_port)
|
||||
self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
|
||||
|
||||
class InstallYAPKernelSpecApp(Application):
|
||||
"""Dummy app wrapping argparse"""
|
||||
name = 'ipython-kernel-install'
|
||||
self.init_iopub(context)
|
||||
|
||||
def init_iopub(self, context):
|
||||
self.iopub_socket = context.socket(zmq.PUB)
|
||||
self.iopub_socket.linger = 1000
|
||||
self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
|
||||
self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
|
||||
self.configure_tornado_logger()
|
||||
self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True)
|
||||
self.iopub_thread.start()
|
||||
# backward-compat: wrap iopub socket API in background thread
|
||||
self.iopub_socket = self.iopub_thread.background_socket
|
||||
|
||||
def init_heartbeat(self):
|
||||
"""start the heart beating"""
|
||||
# heartbeat doesn't share context, because it mustn't be blocked
|
||||
# by the GIL, which is accessed by libzmq when freeing zero-copy messages
|
||||
hb_ctx = zmq.Context()
|
||||
self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
|
||||
self.hb_port = self.heartbeat.port
|
||||
self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
|
||||
self.heartbeat.start()
|
||||
|
||||
def log_connection_info(self):
|
||||
"""display connection info, and store ports"""
|
||||
basename = os.path.basename(self.connection_file)
|
||||
if basename == self.connection_file or \
|
||||
os.path.dirname(self.connection_file) == self.connection_dir:
|
||||
# use shortname
|
||||
tail = basename
|
||||
else:
|
||||
tail = self.connection_file
|
||||
lines = [
|
||||
"To connect another client to this kernel, use:",
|
||||
" --existing %s" % tail,
|
||||
]
|
||||
# log connection info
|
||||
# info-level, so often not shown.
|
||||
# frontends should use the %connect_info magic
|
||||
# to see the connection info
|
||||
for line in lines:
|
||||
self.log.info(line)
|
||||
# also raw print to the terminal if no parent_handle (`ipython kernel`)
|
||||
# unless log-level is CRITICAL (--quiet)
|
||||
if not self.parent_handle and self.log_level < logging.CRITICAL:
|
||||
io.rprint(_ctrl_c_message)
|
||||
for line in lines:
|
||||
io.rprint(line)
|
||||
|
||||
self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
|
||||
stdin=self.stdin_port, hb=self.hb_port,
|
||||
control=self.control_port)
|
||||
|
||||
def init_blackhole(self):
|
||||
"""redirects stdout/stderr to devnull if necessary"""
|
||||
if self.no_stdout or self.no_stderr:
|
||||
blackhole = open(os.devnull, 'w')
|
||||
if self.no_stdout:
|
||||
sys.stdout = sys.__stdout__ = blackhole
|
||||
if self.no_stderr:
|
||||
sys.stderr = sys.__stderr__ = blackhole
|
||||
|
||||
def init_io(self):
|
||||
"""Redirect input streams and set a display hook."""
|
||||
if self.outstream_class:
|
||||
outstream_factory = import_item(str(self.outstream_class))
|
||||
sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout')
|
||||
sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr')
|
||||
if self.displayhook_class:
|
||||
displayhook_factory = import_item(str(self.displayhook_class))
|
||||
self.displayhook = displayhook_factory(self.session, self.iopub_socket)
|
||||
sys.displayhook = self.displayhook
|
||||
|
||||
self.patch_io()
|
||||
|
||||
def patch_io(self):
|
||||
"""Patch important libraries that can't handle sys.stdout forwarding"""
|
||||
try:
|
||||
import faulthandler
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
# Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible
|
||||
# updates to the upstream API and update accordingly (up-to-date as of Python 3.5):
|
||||
# https://docs.python.org/3/library/faulthandler.html#faulthandler.enable
|
||||
|
||||
# change default file to __stderr__ from forwarded stderr
|
||||
faulthandler_enable = faulthandler.enable
|
||||
def enable(file=sys.__stderr__, all_threads=True, **kwargs):
|
||||
return faulthandler_enable(file=file, all_threads=all_threads, **kwargs)
|
||||
|
||||
faulthandler.enable = enable
|
||||
|
||||
if hasattr(faulthandler, 'register'):
|
||||
faulthandler_register = faulthandler.register
|
||||
def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs):
|
||||
return faulthandler_register(signum, file=file, all_threads=all_threads,
|
||||
chain=chain, **kwargs)
|
||||
faulthandler.register = register
|
||||
|
||||
def init_signal(self):
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
|
||||
def init_kernel(self):
|
||||
"""Create the Kernel object itself"""
|
||||
shell_stream = ZMQStream(self.shell_socket)
|
||||
control_stream = ZMQStream(self.control_socket)
|
||||
|
||||
kernel_factory = self.kernel_class.instance
|
||||
|
||||
kernel = kernel_factory(parent=self, session=self.session,
|
||||
shell_streams=[shell_stream, control_stream],
|
||||
iopub_thread=self.iopub_thread,
|
||||
iopub_socket=self.iopub_socket,
|
||||
stdin_socket=self.stdin_socket,
|
||||
log=self.log,
|
||||
profile_dir=self.profile_dir,
|
||||
user_ns=self.user_ns,
|
||||
)
|
||||
kernel.record_ports({
|
||||
name + '_port': port for name, port in self.ports.items()
|
||||
})
|
||||
self.kernel = kernel
|
||||
|
||||
# Allow the displayhook to get the execution count
|
||||
self.displayhook.get_execution_count = lambda: kernel.execution_count
|
||||
|
||||
def init_gui_pylab(self):
|
||||
"""Enable GUI event loop integration, taking pylab into account."""
|
||||
|
||||
# Register inline backend as default
|
||||
# this is higher priority than matplotlibrc,
|
||||
# but lower priority than anything else (mpl.use() for instance).
|
||||
# This only affects matplotlib >= 1.5
|
||||
if not os.environ.get('MPLBACKEND'):
|
||||
os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline'
|
||||
|
||||
# Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
|
||||
# to ensure that any exception is printed straight to stderr.
|
||||
# Normally _showtraceback associates the reply with an execution,
|
||||
# which means frontends will never draw it, as this exception
|
||||
# is not associated with any execute request.
|
||||
|
||||
shell = self.shell
|
||||
_showtraceback = shell._showtraceback
|
||||
try:
|
||||
# replace error-sending traceback with stderr
|
||||
def print_tb(etype, evalue, stb):
|
||||
print ("GUI event loop or pylab initialization failed",
|
||||
file=sys.stderr)
|
||||
print (shell.InteractiveTB.stb2text(stb), file=sys.stderr)
|
||||
shell._showtraceback = print_tb
|
||||
InteractiveShellApp.init_gui_pylab(self)
|
||||
finally:
|
||||
shell._showtraceback = _showtraceback
|
||||
|
||||
def init_shell(self):
|
||||
self.shell = getattr(self.kernel, 'shell', None)
|
||||
if self.shell:
|
||||
self.shell.configurables.append(self)
|
||||
|
||||
def init_extensions(self):
|
||||
super(YAPKernelApp, self).init_extensions()
|
||||
# BEGIN HARDCODED WIDGETS HACK
|
||||
# Ensure ipywidgets extension is loaded if available
|
||||
extension_man = self.shell.extension_manager
|
||||
if 'ipywidgets' not in extension_man.loaded:
|
||||
try:
|
||||
extension_man.load_extension('ipywidgets')
|
||||
except ImportError as e:
|
||||
self.log.debug('ipywidgets package not installed. Widgets will not be available.')
|
||||
# END HARDCODED WIDGETS HACK
|
||||
|
||||
def configure_tornado_logger(self):
|
||||
""" Configure the tornado logging.Logger.
|
||||
|
||||
Must set up the tornado logger or else tornado will call
|
||||
basicConfig for the root logger which makes the root logger
|
||||
go to the real sys.stderr instead of the capture streams.
|
||||
This function mimics the setup of logging.basicConfig.
|
||||
"""
|
||||
logger = logging.getLogger('tornado')
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter(logging.BASIC_FORMAT)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
@catch_config_error
|
||||
def initialize(self, argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
self.argv = argv
|
||||
super(YAPKernelApp, self).initialize(argv)
|
||||
if self.subapp is not None:
|
||||
return
|
||||
# register zmq IOLoop with tornado
|
||||
zmq_ioloop.install()
|
||||
self.init_blackhole()
|
||||
self.init_connection_file()
|
||||
self.init_poller()
|
||||
self.init_sockets()
|
||||
self.init_heartbeat()
|
||||
# writing/displaying connection info must be *after* init_sockets/heartbeat
|
||||
self.write_connection_file()
|
||||
# Log connection info after writing connection file, so that the connection
|
||||
# file is definitely available at the time someone reads the log.
|
||||
self.log_connection_info()
|
||||
self.init_io()
|
||||
self.init_signal()
|
||||
self.init_kernel()
|
||||
# shell init steps
|
||||
self.init_path()
|
||||
self.init_shell()
|
||||
if self.shell:
|
||||
self.init_gui_pylab()
|
||||
self.init_extensions()
|
||||
self.init_code()
|
||||
# flush stdout/stderr, so that anything written to these streams during
|
||||
# initialization do not get associated with the first execution request
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
def start(self):
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(prog=self.name,
|
||||
description="Install the YAP kernel spec.")
|
||||
parser.add_argument('--user', action='store_true',
|
||||
help="Install for the current user instead of system-wide")
|
||||
parser.add_argument('--name', type=str, default=KERNEL_NAME,
|
||||
help="Specify a name for the kernelspec."
|
||||
" This is needed to have multiple IPython kernels at the same time.")
|
||||
parser.add_argument('--display-name', type=str,
|
||||
help="Specify the display name for the kernelspec."
|
||||
" This is helpful when you have multiple IPython kernels.")
|
||||
parser.add_argument('--profile', type=str,
|
||||
help="Specify an IPython profile to load. "
|
||||
"This can be used to create custom versions of the kernel.")
|
||||
parser.add_argument('--prefix', type=str,
|
||||
help="Specify an install prefix for the kernelspec."
|
||||
" This is needed to install into a non-default location, such as a conda/virtual-env.")
|
||||
parser.add_argument('--sys-prefix', action='store_const', const=sys.prefix, dest='prefix',
|
||||
help="Install to Python's sys.prefix."
|
||||
" Shorthand for --prefix='%s'. For use in conda/virtual-envs." % sys.prefix)
|
||||
opts = parser.parse_args(self.argv)
|
||||
if self.subapp is not None:
|
||||
return self.subapp.start()
|
||||
if self.poller is not None:
|
||||
self.poller.start()
|
||||
self.kernel.start()
|
||||
try:
|
||||
dest = install(user=opts.user, kernel_name=opts.name, profile=opts.profile,
|
||||
prefix=opts.prefix, display_name=opts.display_name)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EACCES:
|
||||
print(e, file=sys.stderr)
|
||||
if opts.user:
|
||||
print("Perhaps you want `sudo` or `--user`?", file=sys.stderr)
|
||||
self.exit(1)
|
||||
raise
|
||||
print("Installed kernelspec %s in %s" % (opts.name, dest))
|
||||
ioloop.IOLoop.instance().start()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
launch_new_instance = YAPKernelApp.launch_instance
|
||||
|
||||
def main():
|
||||
"""Run an IPKernel as an application"""
|
||||
app = YAPKernelApp.instance()
|
||||
app.initialize()
|
||||
app.start()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
InstallYAPKernelSpecApp.launch_instance()
|
||||
main()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -48,6 +48,7 @@ setup_args = dict(
|
||||
license = 'BSD',
|
||||
platforms = "Linux, Mac OS X, Windows",
|
||||
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
|
||||
data_files=[('share/Yap/js', ['${CMAKE_SOURCE_DIR}/misc/editors/prolog.js'])],
|
||||
classifiers = [
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
|
@ -1,488 +0,0 @@
|
||||
"""An Application for launching a kernel"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
import signal
|
||||
import traceback
|
||||
import logging
|
||||
|
||||
from tornado import ioloop
|
||||
import zmq
|
||||
from zmq.eventloop import ioloop as zmq_ioloop
|
||||
from zmq.eventloop.zmqstream import ZMQStream
|
||||
|
||||
from IPython.core.application import (
|
||||
BaseIPythonApplication, base_flags, base_aliases, catch_config_error
|
||||
)
|
||||
from IPython.core.profiledir import ProfileDir
|
||||
from IPython.core.shellapp import (
|
||||
InteractiveShellApp, shell_flags, shell_aliases
|
||||
)
|
||||
from IPython.utils import io
|
||||
from ipython_genutils.path import filefind, ensure_dir_exists
|
||||
from traitlets import (
|
||||
Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default
|
||||
)
|
||||
from ipython_genutils.importstring import import_item
|
||||
from jupyter_core.paths import jupyter_runtime_dir
|
||||
from jupyter_client import write_connection_file
|
||||
from jupyter_client.connect import ConnectionFileMixin
|
||||
|
||||
# local imports
|
||||
from ipykernel.iostream import IOPubThread
|
||||
from ipykernel.heartbeat import Heartbeat
|
||||
from .yap_kernel import YAPKernel
|
||||
from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows
|
||||
from jupyter_client.session import (
|
||||
Session, session_flags, session_aliases,
|
||||
)
|
||||
from ipykernel.zmqshell import ZMQInteractiveShell
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Flags and Aliases
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
kernel_aliases = dict(base_aliases)
|
||||
kernel_aliases.update({
|
||||
'ip' : 'YAPKernelApp.ip',
|
||||
'hb' : 'YAPKernelApp.hb_port',
|
||||
'shell' : 'YAPKernelApp.shell_port',
|
||||
'iopub' : 'YAPKernelApp.iopub_port',
|
||||
'stdin' : 'YAPKernelApp.stdin_port',
|
||||
'control' : 'YAPKernelApp.control_port',
|
||||
'f' : 'YAPKernelApp.connection_file',
|
||||
'transport': 'YAPKernelApp.transport',
|
||||
})
|
||||
|
||||
kernel_flags = dict(base_flags)
|
||||
kernel_flags.update({
|
||||
'no-stdout' : (
|
||||
{'YAPKernelApp' : {'no_stdout' : True}},
|
||||
"redirect stdout to the null device"),
|
||||
'no-stderr' : (
|
||||
{'YAPKernelApp' : {'no_stderr' : True}},
|
||||
"redirect stderr to the null device"),
|
||||
'pylab' : (
|
||||
{'YAPKernelApp' : {'pylab' : 'auto'}},
|
||||
"""Pre-load matplotlib and numpy for interactive use with
|
||||
the default matplotlib backend."""),
|
||||
})
|
||||
|
||||
# inherit flags&aliases for any IPython shell apps
|
||||
kernel_aliases.update(shell_aliases)
|
||||
kernel_flags.update(shell_flags)
|
||||
|
||||
# inherit flags&aliases for Sessions
|
||||
kernel_aliases.update(session_aliases)
|
||||
kernel_flags.update(session_flags)
|
||||
|
||||
_ctrl_c_message = """\
|
||||
NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
|
||||
|
||||
To exit, you will have to explicitly quit this process, by either sending
|
||||
"quit" from a client, or using Ctrl-\\ in UNIX-like environments.
|
||||
|
||||
To read more about this, see https://github.com/ipython/ipython/issues/2049
|
||||
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Application class for starting an IPython Kernel
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
|
||||
ConnectionFileMixin):
|
||||
name='YAP-kernel'
|
||||
aliases = Dict(kernel_aliases)
|
||||
flags = Dict(kernel_flags)
|
||||
classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session]
|
||||
# the kernel class, as an importstring
|
||||
kernel_class = Type('yap_kernel.yap_kernel.YAPKernel',
|
||||
klass='ipykernel.kernelbase.Kernel',
|
||||
help="""The Kernel subclass to be used.
|
||||
|
||||
This should allow easy re-use of the IPKernelApp entry point
|
||||
to configure and launch kernels other than IPython's own.
|
||||
""").tag(config=True)
|
||||
kernel = Any()
|
||||
poller = Any() # don't restrict this even though current pollers are all Threads
|
||||
heartbeat = Instance(Heartbeat, allow_none=True)
|
||||
ports = Dict()
|
||||
|
||||
subcommands = {
|
||||
'install': (
|
||||
'.kernelspec.InstallYAPKernelSpecApp',
|
||||
'Install the YAP kernel'
|
||||
),
|
||||
}
|
||||
|
||||
# connection info:
|
||||
connection_dir = Unicode()
|
||||
|
||||
@default('connection_dir')
|
||||
def _default_connection_dir(self):
|
||||
return jupyter_runtime_dir()
|
||||
|
||||
@property
|
||||
def abs_connection_file(self):
|
||||
if os.path.basename(self.connection_file) == self.connection_file:
|
||||
return os.path.join(self.connection_dir, self.connection_file)
|
||||
else:
|
||||
return self.connection_file
|
||||
|
||||
# streams, etc.
|
||||
no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True)
|
||||
no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True)
|
||||
outstream_class = DottedObjectName('ipykernel.iostream.OutStream',
|
||||
help="The importstring for the OutStream factory").tag(config=True)
|
||||
displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook',
|
||||
help="The importstring for the DisplayHook factory").tag(config=True)
|
||||
|
||||
# polling
|
||||
parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0),
|
||||
help="""kill this process if its parent dies. On Windows, the argument
|
||||
specifies the HANDLE of the parent process, otherwise it is simply boolean.
|
||||
""").tag(config=True)
|
||||
interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0),
|
||||
help="""ONLY USED ON WINDOWS
|
||||
Interrupt this process when the parent is signaled.
|
||||
""").tag(config=True)
|
||||
|
||||
def init_crash_handler(self):
|
||||
sys.excepthook = self.excepthook
|
||||
|
||||
def excepthook(self, etype, evalue, tb):
|
||||
# write uncaught traceback to 'real' stderr, not zmq-forwarder
|
||||
traceback.print_exception(etype, evalue, tb, file=sys.__stderr__)
|
||||
|
||||
def init_poller(self):
|
||||
if sys.platform == 'win32':
|
||||
if self.interrupt or self.parent_handle:
|
||||
self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
|
||||
elif self.parent_handle:
|
||||
self.poller = ParentPollerUnix()
|
||||
|
||||
def _bind_socket(self, s, port):
|
||||
iface = '%s://%s' % (self.transport, self.ip)
|
||||
if self.transport == 'tcp':
|
||||
if port <= 0:
|
||||
port = s.bind_to_random_port(iface)
|
||||
else:
|
||||
s.bind("tcp://%s:%i" % (self.ip, port))
|
||||
elif self.transport == 'ipc':
|
||||
if port <= 0:
|
||||
port = 1
|
||||
path = "%s-%i" % (self.ip, port)
|
||||
while os.path.exists(path):
|
||||
port = port + 1
|
||||
path = "%s-%i" % (self.ip, port)
|
||||
else:
|
||||
path = "%s-%i" % (self.ip, port)
|
||||
s.bind("ipc://%s" % path)
|
||||
return port
|
||||
|
||||
def write_connection_file(self):
|
||||
"""write connection info to JSON file"""
|
||||
cf = self.abs_connection_file
|
||||
self.log.debug("Writing connection file: %s", cf)
|
||||
write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
|
||||
shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
|
||||
iopub_port=self.iopub_port, control_port=self.control_port)
|
||||
|
||||
def cleanup_connection_file(self):
|
||||
cf = self.abs_connection_file
|
||||
self.log.debug("Cleaning up connection file: %s", cf)
|
||||
try:
|
||||
os.remove(cf)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
self.cleanup_ipc_files()
|
||||
|
||||
def init_connection_file(self):
|
||||
if not self.connection_file:
|
||||
self.connection_file = "kernel-%s.json"%os.getpid()
|
||||
try:
|
||||
self.connection_file = filefind(self.connection_file, ['.', self.connection_dir])
|
||||
except IOError:
|
||||
self.log.debug("Connection file not found: %s", self.connection_file)
|
||||
# This means I own it, and I'll create it in this directory:
|
||||
ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700)
|
||||
# Also, I will clean it up:
|
||||
atexit.register(self.cleanup_connection_file)
|
||||
return
|
||||
try:
|
||||
self.load_connection_file()
|
||||
except Exception:
|
||||
self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
|
||||
self.exit(1)
|
||||
|
||||
def init_sockets(self):
|
||||
# Create a context, a session, and the kernel sockets.
|
||||
self.log.info("Starting the kernel at pid: %i", os.getpid())
|
||||
context = zmq.Context.instance()
|
||||
# Uncomment this to try closing the context.
|
||||
# atexit.register(context.term)
|
||||
|
||||
self.shell_socket = context.socket(zmq.ROUTER)
|
||||
self.shell_socket.linger = 1000
|
||||
self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
|
||||
self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
|
||||
|
||||
self.stdin_socket = context.socket(zmq.ROUTER)
|
||||
self.stdin_socket.linger = 1000
|
||||
self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
|
||||
self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
|
||||
|
||||
self.control_socket = context.socket(zmq.ROUTER)
|
||||
self.control_socket.linger = 1000
|
||||
self.control_port = self._bind_socket(self.control_socket, self.control_port)
|
||||
self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
|
||||
|
||||
self.init_iopub(context)
|
||||
|
||||
def init_iopub(self, context):
|
||||
self.iopub_socket = context.socket(zmq.PUB)
|
||||
self.iopub_socket.linger = 1000
|
||||
self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
|
||||
self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
|
||||
self.configure_tornado_logger()
|
||||
self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True)
|
||||
self.iopub_thread.start()
|
||||
# backward-compat: wrap iopub socket API in background thread
|
||||
self.iopub_socket = self.iopub_thread.background_socket
|
||||
|
||||
def init_heartbeat(self):
|
||||
"""start the heart beating"""
|
||||
# heartbeat doesn't share context, because it mustn't be blocked
|
||||
# by the GIL, which is accessed by libzmq when freeing zero-copy messages
|
||||
hb_ctx = zmq.Context()
|
||||
self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
|
||||
self.hb_port = self.heartbeat.port
|
||||
self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
|
||||
self.heartbeat.start()
|
||||
|
||||
def log_connection_info(self):
|
||||
"""display connection info, and store ports"""
|
||||
basename = os.path.basename(self.connection_file)
|
||||
if basename == self.connection_file or \
|
||||
os.path.dirname(self.connection_file) == self.connection_dir:
|
||||
# use shortname
|
||||
tail = basename
|
||||
else:
|
||||
tail = self.connection_file
|
||||
lines = [
|
||||
"To connect another client to this kernel, use:",
|
||||
" --existing %s" % tail,
|
||||
]
|
||||
# log connection info
|
||||
# info-level, so often not shown.
|
||||
# frontends should use the %connect_info magic
|
||||
# to see the connection info
|
||||
for line in lines:
|
||||
self.log.info(line)
|
||||
# also raw print to the terminal if no parent_handle (`ipython kernel`)
|
||||
# unless log-level is CRITICAL (--quiet)
|
||||
if not self.parent_handle and self.log_level < logging.CRITICAL:
|
||||
io.rprint(_ctrl_c_message)
|
||||
for line in lines:
|
||||
io.rprint(line)
|
||||
|
||||
self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
|
||||
stdin=self.stdin_port, hb=self.hb_port,
|
||||
control=self.control_port)
|
||||
|
||||
def init_blackhole(self):
|
||||
"""redirects stdout/stderr to devnull if necessary"""
|
||||
if self.no_stdout or self.no_stderr:
|
||||
blackhole = open(os.devnull, 'w')
|
||||
if self.no_stdout:
|
||||
sys.stdout = sys.__stdout__ = blackhole
|
||||
if self.no_stderr:
|
||||
sys.stderr = sys.__stderr__ = blackhole
|
||||
|
||||
def init_io(self):
|
||||
"""Redirect input streams and set a display hook."""
|
||||
if self.outstream_class:
|
||||
outstream_factory = import_item(str(self.outstream_class))
|
||||
sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout')
|
||||
sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr')
|
||||
if self.displayhook_class:
|
||||
displayhook_factory = import_item(str(self.displayhook_class))
|
||||
self.displayhook = displayhook_factory(self.session, self.iopub_socket)
|
||||
sys.displayhook = self.displayhook
|
||||
|
||||
self.patch_io()
|
||||
|
||||
def patch_io(self):
|
||||
"""Patch important libraries that can't handle sys.stdout forwarding"""
|
||||
try:
|
||||
import faulthandler
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
# Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible
|
||||
# updates to the upstream API and update accordingly (up-to-date as of Python 3.5):
|
||||
# https://docs.python.org/3/library/faulthandler.html#faulthandler.enable
|
||||
|
||||
# change default file to __stderr__ from forwarded stderr
|
||||
faulthandler_enable = faulthandler.enable
|
||||
def enable(file=sys.__stderr__, all_threads=True, **kwargs):
|
||||
return faulthandler_enable(file=file, all_threads=all_threads, **kwargs)
|
||||
|
||||
faulthandler.enable = enable
|
||||
|
||||
if hasattr(faulthandler, 'register'):
|
||||
faulthandler_register = faulthandler.register
|
||||
def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs):
|
||||
return faulthandler_register(signum, file=file, all_threads=all_threads,
|
||||
chain=chain, **kwargs)
|
||||
faulthandler.register = register
|
||||
|
||||
def init_signal(self):
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
|
||||
def init_kernel(self):
|
||||
"""Create the Kernel object itself"""
|
||||
shell_stream = ZMQStream(self.shell_socket)
|
||||
control_stream = ZMQStream(self.control_socket)
|
||||
|
||||
kernel_factory = self.kernel_class.instance
|
||||
|
||||
kernel = kernel_factory(parent=self, session=self.session,
|
||||
shell_streams=[shell_stream, control_stream],
|
||||
iopub_thread=self.iopub_thread,
|
||||
iopub_socket=self.iopub_socket,
|
||||
stdin_socket=self.stdin_socket,
|
||||
log=self.log,
|
||||
profile_dir=self.profile_dir,
|
||||
user_ns=self.user_ns,
|
||||
)
|
||||
kernel.record_ports({
|
||||
name + '_port': port for name, port in self.ports.items()
|
||||
})
|
||||
self.kernel = kernel
|
||||
|
||||
# Allow the displayhook to get the execution count
|
||||
self.displayhook.get_execution_count = lambda: kernel.execution_count
|
||||
|
||||
def init_gui_pylab(self):
|
||||
"""Enable GUI event loop integration, taking pylab into account."""
|
||||
|
||||
# Register inline backend as default
|
||||
# this is higher priority than matplotlibrc,
|
||||
# but lower priority than anything else (mpl.use() for instance).
|
||||
# This only affects matplotlib >= 1.5
|
||||
if not os.environ.get('MPLBACKEND'):
|
||||
os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline'
|
||||
|
||||
# Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
|
||||
# to ensure that any exception is printed straight to stderr.
|
||||
# Normally _showtraceback associates the reply with an execution,
|
||||
# which means frontends will never draw it, as this exception
|
||||
# is not associated with any execute request.
|
||||
|
||||
shell = self.shell
|
||||
_showtraceback = shell._showtraceback
|
||||
try:
|
||||
# replace error-sending traceback with stderr
|
||||
def print_tb(etype, evalue, stb):
|
||||
print ("GUI event loop or pylab initialization failed",
|
||||
file=sys.stderr)
|
||||
print (shell.InteractiveTB.stb2text(stb), file=sys.stderr)
|
||||
shell._showtraceback = print_tb
|
||||
InteractiveShellApp.init_gui_pylab(self)
|
||||
finally:
|
||||
shell._showtraceback = _showtraceback
|
||||
|
||||
def init_shell(self):
|
||||
self.shell = getattr(self.kernel, 'shell', None)
|
||||
if self.shell:
|
||||
self.shell.configurables.append(self)
|
||||
|
||||
def init_extensions(self):
|
||||
super(YAPKernelApp, self).init_extensions()
|
||||
# BEGIN HARDCODED WIDGETS HACK
|
||||
# Ensure ipywidgets extension is loaded if available
|
||||
extension_man = self.shell.extension_manager
|
||||
if 'ipywidgets' not in extension_man.loaded:
|
||||
try:
|
||||
extension_man.load_extension('ipywidgets')
|
||||
except ImportError as e:
|
||||
self.log.debug('ipywidgets package not installed. Widgets will not be available.')
|
||||
# END HARDCODED WIDGETS HACK
|
||||
|
||||
def configure_tornado_logger(self):
|
||||
""" Configure the tornado logging.Logger.
|
||||
|
||||
Must set up the tornado logger or else tornado will call
|
||||
basicConfig for the root logger which makes the root logger
|
||||
go to the real sys.stderr instead of the capture streams.
|
||||
This function mimics the setup of logging.basicConfig.
|
||||
"""
|
||||
logger = logging.getLogger('tornado')
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter(logging.BASIC_FORMAT)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
@catch_config_error
|
||||
def initialize(self, argv=None):
|
||||
super(YAPKernelApp, self).initialize(argv)
|
||||
if self.subapp is not None:
|
||||
return
|
||||
# register zmq IOLoop with tornado
|
||||
zmq_ioloop.install()
|
||||
self.init_blackhole()
|
||||
self.init_connection_file()
|
||||
self.init_poller()
|
||||
self.init_sockets()
|
||||
self.init_heartbeat()
|
||||
# writing/displaying connection info must be *after* init_sockets/heartbeat
|
||||
self.write_connection_file()
|
||||
# Log connection info after writing connection file, so that the connection
|
||||
# file is definitely available at the time someone reads the log.
|
||||
self.log_connection_info()
|
||||
self.init_io()
|
||||
self.init_signal()
|
||||
self.init_kernel()
|
||||
# shell init steps
|
||||
self.init_path()
|
||||
self.init_shell()
|
||||
if self.shell:
|
||||
self.init_gui_pylab()
|
||||
self.init_extensions()
|
||||
self.init_code()
|
||||
# flush stdout/stderr, so that anything written to these streams during
|
||||
# initialization do not get associated with the first execution request
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
def start(self):
|
||||
if self.subapp is not None:
|
||||
return self.subapp.start()
|
||||
if self.poller is not None:
|
||||
self.poller.start()
|
||||
self.kernel.start()
|
||||
try:
|
||||
ioloop.IOLoop.instance().start()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
launch_new_instance = YAPKernelApp.launch_instance
|
||||
|
||||
def main():
|
||||
"""Run an IPKernel as an application"""
|
||||
app = YAPKernelApp.instance()
|
||||
app.initialize()
|
||||
app.start()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,15 +1,28 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from ipykernel.ipkernel import IPythonKernel
|
||||
|
||||
import sys
|
||||
import signal
|
||||
import yap
|
||||
# import ipdb
|
||||
# ipdb,set_trace()
|
||||
import io
|
||||
import getpass
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from IPython.core import release
|
||||
from ipython_genutils.py3compat import builtin_mod, PY3, unicode_type, safe_unicode
|
||||
from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
|
||||
from traitlets import Instance, Type, Any, List
|
||||
|
||||
from ipykernel.comm import CommManager
|
||||
from ipykernel.kernelbase import Kernel as KernelBase
|
||||
from ipykernel.zmqshell import ZMQInteractiveShell
|
||||
from .interactiveshell import YAPInteractiveShell
|
||||
from IPython.core.interactiveshell import InteractiveShellABC, InteractiveShell
|
||||
from contextlib import redirect_stdout
|
||||
|
||||
|
||||
|
||||
kernel_json = {
|
||||
"argv": [sys.executable,
|
||||
"argv": [sys.executable,
|
||||
"-m", "yap_kernel",
|
||||
"-f", "{connection_file}"],
|
||||
"display_name": " YAP-6.3",
|
||||
@ -22,11 +35,26 @@ def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
class YAPKernel(IPythonKernel):
|
||||
class YAPKernel(KernelBase):
|
||||
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
|
||||
allow_none=True)
|
||||
shell_class = Type(ZMQInteractiveShell )
|
||||
user_ns = Instance(dict, args=None, allow_none=True)
|
||||
def _user_ns_changed(self, name, old, new):
|
||||
if self.shell is not None:
|
||||
self.shell.user_ns = new
|
||||
self.shell.init_user_ns()
|
||||
|
||||
# A reference to the Python builtin 'raw_input' function.
|
||||
# (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
|
||||
_sys_raw_input = Any()
|
||||
_sys_eval_input = Any()
|
||||
|
||||
|
||||
implementation = 'YAP Kernel'
|
||||
implementation_version = '1.0'
|
||||
language = 'text'
|
||||
language_version = '0.1'
|
||||
language_version = '6.3'
|
||||
banner = "YAP-6.3"
|
||||
language_info = {
|
||||
'mimetype': 'text/prolog',
|
||||
@ -41,63 +69,349 @@ class YAPKernel(IPythonKernel):
|
||||
'file_extension': '.yap',
|
||||
}
|
||||
|
||||
def init_yap(self, **kwargs):
|
||||
# Signal handlers are inherited by
|
||||
# forked processes,
|
||||
# and we can't easily
|
||||
# reset it from the subprocess. Since kernelapp
|
||||
# ignores SIGINT except in
|
||||
# message handlers, we need to temporarily
|
||||
# reset the SIGINT handler
|
||||
# here so that yap and its children are interruptible.
|
||||
sig = signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
try:
|
||||
self.engine = yap.YAPEngine()
|
||||
self.q = None
|
||||
self.engine.query("load_files(library(python), [])").command()
|
||||
self.engine.query("load_files(library(jupyter), [])").command()
|
||||
banner = "YAP6-3 Kernel"
|
||||
self.olines = banner
|
||||
finally:
|
||||
signal.signal(signal.SIGINT, sig)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to history management
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# sp = super(YAPKernel, self)
|
||||
super(YAPKernel, self).__init__(**kwargs)
|
||||
self.init_yap(**kwargs)
|
||||
self.shell.run_cell = self.yap_run_cell
|
||||
# Initialize the InteractiveShell subclass
|
||||
self.shell = self.shell_class.instance(parent=self,
|
||||
profile_dir = self.profile_dir,
|
||||
user_ns = self.user_ns,
|
||||
kernel = self,
|
||||
)
|
||||
self.shell.displayhook.session = self.session
|
||||
self.shell.displayhook.pub_socket = self.iopub_socket
|
||||
self.shell.displayhook.topic = self._topic('execute_result')
|
||||
self.shell.display_pub.session = self.session
|
||||
self.shell.display_pub.pub_socket = self.iopub_socket
|
||||
|
||||
self.comm_manager = CommManager(parent=self, kernel=self)
|
||||
|
||||
# self.shell._last_traceback = None
|
||||
self.shell.configurables.append(self.comm_manager)
|
||||
comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
|
||||
for msg_type in comm_msg_types:
|
||||
self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
|
||||
self.yap_shell = YAPInteractiveShell( self )
|
||||
|
||||
|
||||
def get_usage(self):
|
||||
return "This is the YAP kernel."
|
||||
|
||||
def yap_run_cell(self, s, store_history=False, silent=False,
|
||||
shell_futures=True):
|
||||
help_links = List([
|
||||
{
|
||||
'text': "Python",
|
||||
'url': "http://docs.python.org/%i.%i" % sys.version_info[:2],
|
||||
},
|
||||
{
|
||||
'text': "YAP",
|
||||
'url': "http://YAP.org/documentation.html",
|
||||
},
|
||||
{
|
||||
'text': "NumPy",
|
||||
'url': "http://docs.scipy.org/doc/numpy/reference/",
|
||||
},
|
||||
{
|
||||
'text': "SciPy",
|
||||
'url': "http://docs.scipy.org/doc/scipy/reference/",
|
||||
},
|
||||
{
|
||||
'text': "Matplotlib",
|
||||
'url': "http://matplotlib.org/contents.html",
|
||||
},
|
||||
{
|
||||
'text': "SymPy",
|
||||
'url': "http://docs.sympy.org/latest/index.html",
|
||||
},
|
||||
{
|
||||
'text': "pandas",
|
||||
'url': "http://pandas.pydata.org/pandas-docs/stable/",
|
||||
},
|
||||
]).tag(config=True)
|
||||
|
||||
if not self.q:
|
||||
self.q = self.engine.query(s)
|
||||
if self.q.next():
|
||||
myvs = self.q.namedVarsCopy()
|
||||
if myvs:
|
||||
i = 0
|
||||
for peq in myvs:
|
||||
name = peq[0]
|
||||
bind = peq[1]
|
||||
if bind.isVar():
|
||||
var = yap.YAPAtom('$VAR')
|
||||
f = yap.YAPFunctor(var, 1)
|
||||
bind.unify(yap.YAPApplTerm(f, (name)))
|
||||
else:
|
||||
i = bind.numberVars(i, True)
|
||||
print(name.text() + " = " + bind.text())
|
||||
else:
|
||||
print("yes")
|
||||
if self.q.deterministic():
|
||||
self.closeq()
|
||||
return
|
||||
print("No (more) answers")
|
||||
self.closeq()
|
||||
return
|
||||
# Kernel info fields
|
||||
implementation = 'YAP'
|
||||
implementation_version = release.version
|
||||
language_info = {
|
||||
'name': 'python',
|
||||
'version': sys.version.split()[0],
|
||||
'mimetype': 'text/x-python',
|
||||
'codemirror_mode': {
|
||||
'name': 'prolog',
|
||||
'version': sys.version_info[0]
|
||||
},
|
||||
'pygments_lexer': 'prolog',
|
||||
'nbconvert_exporter': 'python',
|
||||
'file_extension': '.yap'
|
||||
}
|
||||
|
||||
def closeq(self):
|
||||
if self.q:
|
||||
self.q.close()
|
||||
self.q = None
|
||||
@property
|
||||
def banner(self):
|
||||
return self.shell.banner
|
||||
|
||||
def start(self):
|
||||
self.shell.exit_now = False
|
||||
super(YAPKernel, self).start()
|
||||
|
||||
def set_parent(self, ident, parent):
|
||||
"""Overridden from parent to tell the display hook and output streams
|
||||
about the parent message.
|
||||
"""
|
||||
super(YAPKernel, self).set_parent(ident, parent)
|
||||
self.shell.set_parent(parent)
|
||||
|
||||
def init_metadata(self, parent):
|
||||
"""Initialize metadata.
|
||||
|
||||
Run at the beginning of each execution request.
|
||||
"""
|
||||
md = super(YAPKernel, self).init_metadata(parent)
|
||||
# FIXME: remove deprecated ipyparallel-specific code
|
||||
# This is required for ipyparallel < 5.0
|
||||
md.update({
|
||||
'dependencies_met' : True,
|
||||
'engine' : self.ident,
|
||||
})
|
||||
return md
|
||||
|
||||
def finish_metadata(self, parent, metadata, reply_content):
|
||||
"""Finish populating metadata.
|
||||
|
||||
Run after completing an execution request.
|
||||
"""
|
||||
# FIXME: remove deprecated ipyparallel-specific code
|
||||
# This is required by ipyparallel < 5.0
|
||||
metadata['status'] = reply_content['status']
|
||||
if reply_content['status'] == 'error' and reply_content['ename'] == 'UnmetDependency':
|
||||
metadata['dependencies_met'] = False
|
||||
|
||||
return metadata
|
||||
|
||||
def _forward_input(self, allow_stdin=False):
|
||||
"""Forward raw_input and getpass to the current frontend.
|
||||
|
||||
via input_request
|
||||
"""
|
||||
self._allow_stdin = allow_stdin
|
||||
|
||||
if PY3:
|
||||
self._sys_raw_input = builtin_mod.input
|
||||
builtin_mod.input = self.raw_input
|
||||
else:
|
||||
self._sys_raw_input = builtin_mod.raw_input
|
||||
self._sys_eval_input = builtin_mod.input
|
||||
builtin_mod.raw_input = self.raw_input
|
||||
builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt))
|
||||
self._save_getpass = getpass.getpass
|
||||
getpass.getpass = self.getpass
|
||||
|
||||
def _restore_input(self):
|
||||
"""Restore raw_input, getpass"""
|
||||
if PY3:
|
||||
builtin_mod.input = self._sys_raw_input
|
||||
else:
|
||||
builtin_mod.raw_input = self._sys_raw_input
|
||||
builtin_mod.input = self._sys_eval_input
|
||||
|
||||
getpass.getpass = self._save_getpass
|
||||
|
||||
@property
|
||||
def execution_count(self):
|
||||
return self.shell.execution_count
|
||||
|
||||
@execution_count.setter
|
||||
def execution_count(self, value):
|
||||
# Ignore the incrememnting done by KernelBase, in favour of our shell's
|
||||
# execution counter.
|
||||
pass
|
||||
|
||||
def do_execute(self, code, silent, store_history=True,
|
||||
user_expressions=None, allow_stdin=False):
|
||||
shell = self.shell # we'll need this a lot here
|
||||
|
||||
self._forward_input(allow_stdin)
|
||||
|
||||
reply_content = {}
|
||||
try:
|
||||
res = shell.run_cell(code, store_history=store_history, silent=silent)
|
||||
finally:
|
||||
self._restore_input()
|
||||
|
||||
if res.error_before_exec is not None:
|
||||
err = res.error_before_exec
|
||||
else:
|
||||
err = res.error_in_exec
|
||||
|
||||
if res.success:
|
||||
reply_content[u'status'] = u'ok'
|
||||
elif isinstance(err, KeyboardInterrupt):
|
||||
reply_content[u'status'] = u'aborted'
|
||||
else:
|
||||
reply_content[u'status'] = u'error'
|
||||
|
||||
reply_content.update({
|
||||
# u'traceback': shell._last_traceback or [],
|
||||
u'ename': unicode_type(type(err).__name__),
|
||||
u'evalue': safe_unicode(err),
|
||||
})
|
||||
|
||||
# FIXME: deprecate piece for ipyparallel:
|
||||
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id,
|
||||
method='execute')
|
||||
reply_content['engine_info'] = e_info
|
||||
|
||||
|
||||
# Return the execution counter so clients can display prompts
|
||||
reply_content['execution_count'] = shell.execution_count - 1
|
||||
|
||||
if 'traceback' in reply_content:
|
||||
self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
|
||||
|
||||
|
||||
# At this point, we can tell whether the main code execution succeeded
|
||||
# or not. If it did, we proceed to evaluate user_expressions
|
||||
if reply_content['status'] == 'ok':
|
||||
reply_content[u'user_expressions'] = \
|
||||
shell.user_expressions(user_expressions or {})
|
||||
else:
|
||||
# If there was an error, don't even try to compute expressions
|
||||
reply_content[u'user_expressions'] = {}
|
||||
|
||||
# Payloads should be retrieved regardless of outcome, so we can both
|
||||
# recover partial output (that could have been generated early in a
|
||||
# block, before an error) and always clear the payload system.
|
||||
reply_content[u'payload'] = shell.payload_manager.read_payload()
|
||||
# Be aggressive about clearing the payload because we don't want
|
||||
# it to sit in memory until the next execute_request comes in.
|
||||
shell.payload_manager.clear_payload()
|
||||
|
||||
return reply_content
|
||||
|
||||
def do_complete(self, code, cursor_pos):
|
||||
# FIXME: YAP completers currently assume single line,
|
||||
# but completion messages give multi-line context
|
||||
# For now, extract line from cell, based on cursor_pos:
|
||||
if cursor_pos is None:
|
||||
cursor_pos = len(code)
|
||||
line, offset = line_at_cursor(code, cursor_pos)
|
||||
line_cursor = cursor_pos - offset
|
||||
|
||||
txt, matches = self.shell.complete('', line, line_cursor)
|
||||
return {'matches' : matches,
|
||||
'cursor_end' : cursor_pos,
|
||||
'cursor_start' : cursor_pos - len(txt),
|
||||
'metadata' : {},
|
||||
'status' : 'ok'}
|
||||
|
||||
def do_inspect(self, code, cursor_pos, detail_level=0):
|
||||
name = token_at_cursor(code, cursor_pos)
|
||||
info = self.shell.object_inspect(name)
|
||||
|
||||
reply_content = {'status' : 'ok'}
|
||||
reply_content['data'] = data = {}
|
||||
reply_content['metadata'] = {}
|
||||
reply_content['found'] = info['found']
|
||||
if info['found']:
|
||||
info_text = self.shell.object_inspect_text(
|
||||
name,
|
||||
detail_level=detail_level,
|
||||
)
|
||||
data['text/plain'] = info_text
|
||||
|
||||
return reply_content
|
||||
|
||||
def do_history(self, hist_access_type, output, raw, session=0, start=0,
|
||||
stop=None, n=None, pattern=None, unique=False):
|
||||
if hist_access_type == 'tail':
|
||||
hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
|
||||
include_latest=True)
|
||||
|
||||
elif hist_access_type == 'range':
|
||||
hist = self.shell.history_manager.get_range(session, start, stop,
|
||||
raw=raw, output=output)
|
||||
|
||||
elif hist_access_type == 'search':
|
||||
hist = self.shell.history_manager.search(
|
||||
pattern, raw=raw, output=output, n=n, unique=unique)
|
||||
else:
|
||||
hist = []
|
||||
|
||||
return {
|
||||
'status': 'ok',
|
||||
'history' : list(hist),
|
||||
}
|
||||
|
||||
def do_shutdown(self, restart):
|
||||
self.shell.exit_now = True
|
||||
return dict(status='ok', restart=restart)
|
||||
|
||||
def do_is_complete(self, code):
|
||||
status, indent_spaces = self.shell.input_transformer_manager.check_complete(code)
|
||||
r = {'status': status}
|
||||
if status == 'incomplete':
|
||||
r['indent'] = ' ' * indent_spaces
|
||||
return r
|
||||
|
||||
def do_apply(self, content, bufs, msg_id, reply_metadata):
|
||||
from .serialize import serialize_object, unpack_apply_message
|
||||
shell = self.shell
|
||||
try:
|
||||
working = shell.user_ns
|
||||
|
||||
prefix = "_"+str(msg_id).replace("-","")+"_"
|
||||
|
||||
f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
|
||||
|
||||
fname = getattr(f, '__name__', 'f')
|
||||
|
||||
fname = prefix+"f"
|
||||
argname = prefix+"args"
|
||||
kwargname = prefix+"kwargs"
|
||||
resultname = prefix+"result"
|
||||
|
||||
ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
|
||||
# print ns
|
||||
working.update(ns)
|
||||
code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
|
||||
try:
|
||||
exec(code, shell.user_global_ns, shell.user_ns)
|
||||
result = working.get(resultname)
|
||||
finally:
|
||||
for key in ns:
|
||||
working.pop(key)
|
||||
|
||||
result_buf = serialize_object(result,
|
||||
buffer_threshold=self.session.buffer_threshold,
|
||||
item_threshold=self.session.item_threshold,
|
||||
)
|
||||
|
||||
except BaseException as e:
|
||||
# invoke YAP traceback formatting
|
||||
shell.showtraceback()
|
||||
reply_content = {
|
||||
u'traceback': shell._last_traceback or [],
|
||||
u'ename': unicode_type(type(e).__name__),
|
||||
u'evalue': safe_unicode(e),
|
||||
}
|
||||
# FIXME: deprecate piece for ipyparallel:
|
||||
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
|
||||
reply_content['engine_info'] = e_info
|
||||
|
||||
self.send_response(self.iopub_socket, u'error', reply_content,
|
||||
ident=self._topic('error'))
|
||||
self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
|
||||
result_buf = []
|
||||
reply_content['status'] = 'error'
|
||||
else:
|
||||
reply_content = {'status' : 'ok'}
|
||||
|
||||
return reply_content, result_buf
|
||||
|
||||
def do_clear(self):
|
||||
self.shell.reset(False)
|
||||
return dict(status='ok')
|
||||
|
@ -20,16 +20,15 @@ class T(tuple):
|
||||
return str(self.name) + str(self.tuple)
|
||||
|
||||
|
||||
|
||||
def query_prolog(engine, s):
|
||||
|
||||
def answer( q ):
|
||||
def answer(q):
|
||||
try:
|
||||
return q.next()
|
||||
except Exception as e:
|
||||
print( e.args[1] )
|
||||
print(e.args[1])
|
||||
return False
|
||||
|
||||
|
||||
q = engine.query(s)
|
||||
ask = True
|
||||
while answer(q):
|
||||
|
@ -2,7 +2,7 @@
|
||||
%module(directors = "1") yap
|
||||
|
||||
// Language independent exception handler
|
||||
%include exception.i
|
||||
%include exception.i
|
||||
%include stdint.i
|
||||
|
||||
%ignore *::operator[];
|
||||
@ -35,8 +35,17 @@ return *new YAPTerm();
|
||||
}
|
||||
}
|
||||
|
||||
%typemap(in) YAPIntegerTerm {
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
$1 = YAPIntegerTerm(PyInt_AsLong($input));
|
||||
#else
|
||||
$1 = YAPIntegerTerm(PyLong_AsLong($input));
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
%typemap(out) YAPIntegerTerm {
|
||||
Term t = $1.term();
|
||||
Term t = $input.term();
|
||||
Int j = IntegerOfTerm(t);
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
return PyInt_FromLong(j);
|
||||
@ -45,10 +54,15 @@ return *new YAPTerm();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
%typemap(in) YAPFloatTerm {
|
||||
$1 = YAPFloatTerm( PyFloat_AsDouble($input) );
|
||||
}
|
||||
|
||||
%typemap(out) YAPFloatTerm {
|
||||
Term t = $1.term();
|
||||
Term t = $1nput.term();
|
||||
Int double j = FloatOfTerm(t);
|
||||
return PyFloat_FromDouble(j);
|
||||
$1 = PyFloat_FromDouble(j);
|
||||
}
|
||||
|
||||
// translate well-known names and existing
|
||||
@ -56,13 +70,61 @@ return *new YAPTerm();
|
||||
// Everthing else let wrapped.
|
||||
// as a term
|
||||
%typemap(out) YAPAtomTerm {
|
||||
const char *s = RepAtom(AtomOfTerm($1.term()))->StrOfAE;
|
||||
const char *s = RepAtom(AtomOfTerm($1nput.term()))->StrOfAE;
|
||||
PyObject *p;
|
||||
if ((p = AtomToPy(s))) {
|
||||
return p;
|
||||
$1 = p;
|
||||
} else {
|
||||
$1 = Py_None;
|
||||
}
|
||||
}
|
||||
|
||||
// translate lists as Python Lists
|
||||
// Python symbols
|
||||
// Everthing else let wrapped.
|
||||
// as a termpc
|
||||
%typemap(in) YAPListTerm {
|
||||
PyObject *p = $input;
|
||||
Int len = PyTuple_Size(p);
|
||||
if (len == 0) {
|
||||
$1 = YAPListTerm(TermNil);
|
||||
} else {
|
||||
t = AbsPair(HR);
|
||||
for (Int i = 0; i < len; i++) {
|
||||
HR += 2;
|
||||
HR[-2] = pythonToYAP(PyTuple_GetItem(p, i));
|
||||
HR[-1] = AbsPair(HR+2);
|
||||
}
|
||||
HR[-1] = TermNil;
|
||||
$1 = YAPListTerm(t);
|
||||
}
|
||||
}
|
||||
|
||||
%typemap(typecheck) YAPListTerm {
|
||||
PyObject *it = $input;
|
||||
$1 = PyTuple_CheckExact(it);
|
||||
}
|
||||
|
||||
%typemap(in) YAPApplTerm {
|
||||
char *o = Py_TYPE(p)->tp_name;
|
||||
Int len = PyTuple_Size(p);
|
||||
|
||||
if (len == 0) {
|
||||
$1 = nullptr;
|
||||
} else {
|
||||
Term t = MkNewApplTerm(Yap_MkFunctor(Yap_LookupAtom(o),len),len);
|
||||
for (Int i = 0; i < len; i++) {
|
||||
o[i] = pythonToYAP(PyTuple_GetItem(p, i));
|
||||
}
|
||||
$1 = YAPApplTerm(t);
|
||||
}
|
||||
}
|
||||
|
||||
%typemap(typecheck) YAPApplTerm {
|
||||
PyObject *p = $input;
|
||||
$1 = (PyTuple_Check(p) && !PyTuple_CheckExact(p));
|
||||
}
|
||||
|
||||
// translate lists as Python Lists
|
||||
// Python symbols
|
||||
// Everthing else let wrapped.
|
||||
@ -206,7 +268,7 @@ return *new YAPTerm();
|
||||
$action
|
||||
} catch (YAPError e) {
|
||||
yap_error_number en = e.getID();
|
||||
LOCAL_ERROR_Type = YAP_NO_ERROR;
|
||||
LOCAL_Error_TYPE = YAP_NO_ERROR;
|
||||
switch (e.getErrorClass()) {
|
||||
case YAPC_NO_ERROR:
|
||||
break;
|
||||
|
Reference in New Issue
Block a user