From 97350da9bd93deefe526a9e8a9704a05ad1ead80 Mon Sep 17 00:00:00 2001 From: Vitor Santos Costa Date: Tue, 27 Sep 2016 12:28:54 -0500 Subject: [PATCH] interface work --- CXX/yapdb.hh | 41 +- CXX/yapi.cpp | 178 ++- CXX/yapi.hh | 1 + CXX/yapq.hh | 20 +- CXX/yapt.hh | 28 +- packages/python/py2pl.c | 13 +- packages/python/yap_kernel/__main__.py | 5 +- packages/python/yap_kernel/callcount.yap | 152 -- .../python/yap_kernel/interactiveshell.py | 277 ++-- packages/python/yap_kernel/jupyter.yap | 3 + packages/python/yap_kernel/kernel | 92 -- packages/python/yap_kernel/kernelapp.py | 14 +- packages/python/yap_kernel/kernelspec.py | 582 ++++++-- packages/python/yap_kernel/prolog.js | 1283 ----------------- packages/python/yap_kernel/setup.py.cmake | 1 + packages/python/yap_kernel/x/kernelapp.py | 488 ------- packages/python/yap_kernel/yap_kernel.py | 432 +++++- packages/python/yapex.py | 7 +- packages/swig/yap.i | 76 +- 19 files changed, 1252 insertions(+), 2441 deletions(-) delete mode 100644 packages/python/yap_kernel/callcount.yap delete mode 100644 packages/python/yap_kernel/kernel delete mode 100644 packages/python/yap_kernel/prolog.js delete mode 100644 packages/python/yap_kernel/x/kernelapp.py diff --git a/CXX/yapdb.hh b/CXX/yapdb.hh index ae8008325..e7b796255 100644 --- a/CXX/yapdb.hh +++ b/CXX/yapdb.hh @@ -66,8 +66,8 @@ public: * @brief YAPFunctor represents Prolog functors Name/Arity */ class YAPFunctor : public YAPProp { - friend class YAPApplTerm; - friend class YAPTerm; + friend class YAPApplTerm; + friend class YAPTerm; friend class YAPPredicate; friend class YAPQuery; Functor f; @@ -136,7 +136,7 @@ protected: YAPPredicate(const char *s0, Term &out, Term &names) { CACHE_REGS BACKUP_MACHINE_REGS(); - Term *modp = NULL;; + Term *modp = NULL; out = Yap_StringToTerm(s0, strlen(s0) + 1, &LOCAL_encoding, 1200, &names); // extern char *s0; @@ -144,8 +144,9 @@ protected: // Yap_DebugPlWrite(out); // delete [] ns; if (out == 0L) - throw YAPError(); - ap = getPred(out, modp); + ap = nullptr; + else + ap = getPred(out, modp); RECOVER_MACHINE_REGS(); } @@ -157,6 +158,15 @@ protected: ap = getPred(t, v); } + /// Term constructor for predicates + /// + /// It is just a call to getPred + inline YAPPredicate(YAPTerm t) { + Term *v = nullptr; + Term tt = t.term(); + ap = getPred(tt, v); + } + /// Cast constructor for predicates, /// if we have the implementation data. /// @@ -171,7 +181,7 @@ public: YAPPredicate(YAPFunctor f) { CACHE_REGS ap = RepPredProp(PredPropByFunc(f.f, Yap_CurrentModule())); - }; + } /// Functor constructor for predicates, is given a specific module. /// @@ -228,6 +238,7 @@ public: /// /// we return a positive number. uintptr_t getArity() { return ap->ArityOfPE; } + arity_t arity() { return ap->ArityOfPE; } }; /** @@ -237,18 +248,16 @@ public: */ class YAPPrologPredicate : public YAPPredicate { public: - YAPPrologPredicate(YAPAtom name, uintptr_t arity, - YAPModule module = YAPModule(), bool tabled = false, - bool logical_updates = false, bool local = false, - bool sourced = true, bool discontiguous = false, - bool multiFile = false, bool hidden = false, - bool untraceable = false, bool unspyable = false, - bool meta = false, bool sync = false, - bool quasi_quotable = false, size_t mega_clause = 0); + YAPPrologPredicate(YAPTerm t); + /// add a new clause void *assertClause(YAPTerm clause, bool last = true, - YAPTerm source = YAPTerm(TermNil)); + YAPTerm source = YAPTerm()); + /// retract at least the first clause matching the predicate. void *retractClause(YAPTerm skeleton, bool all = false); - void *clause(YAPTerm skeleton, YAPTerm &body); + /// return the Nth clause (if source is available) + // YAPTerm clause(size_t index, YAPPredicate p) { return YAPTerm(); }; + /// return the Nth clause (if source is available) + YAPTerm *nextClause() { return nullptr; }; }; /** diff --git a/CXX/yapi.cpp b/CXX/yapi.cpp index e74278ec0..a363ec84e 100644 --- a/CXX/yapi.cpp +++ b/CXX/yapi.cpp @@ -1,8 +1,8 @@ #define YAP_CPP_INTERFACE 1 -#include #include "yapi.hh" +#include extern "C" { @@ -361,7 +361,7 @@ intptr_t YAPTerm::hashTerm(size_t sz, size_t depth, bool variant) { const char *YAPTerm::text() { CACHE_REGS - size_t length; + size_t length = 0; encoding_t enc = LOCAL_encoding; char *os; @@ -372,7 +372,10 @@ const char *YAPTerm::text() { return 0; } RECOVER_MACHINE_REGS(); - return os; + length = strlen(os) + 1; + char *sm = (char *)malloc(length + 1); + strcpy(sm, os); + return sm; } const char *YAPQuery::text() { return goal.text(); } @@ -518,9 +521,8 @@ bool YAPEngine::call(YAPPredicate ap, YAPTerm ts[]) { YAP_dogoalinfo q; Term terr; jmp_buf q_env; - std::vector vt(arity); for (arity_t i = 0; i < arity; i++) - vt[i] = ts[i].term(); + XREGS[i + 1] = ts[i].term(); q.CurSlot = Yap_StartSlots(); q.p = P; q.cp = CP; @@ -536,7 +538,55 @@ bool YAPEngine::call(YAPPredicate ap, YAPTerm ts[]) { } // don't forget, on success these guys may create slots __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "exec "); - result = (bool)YAP_EnterGoal(ap.asPred(), &vt[0], &q); + result = (bool)YAP_EnterGoal(ap.asPred(), nullptr, &q); + if ((terr = Yap_GetException())) { + YAP_LeaveGoal(false, &q); + throw YAPError(); + } + __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "out %d", result); + + if (!result) { + YAP_LeaveGoal(false, &q); + } else { + YAP_LeaveGoal(FALSE, &q); + } + RECOVER_MACHINE_REGS(); + return result; +} + +bool YAPEngine::call(YAPTerm Yt) { + CACHE_REGS + BACKUP_MACHINE_REGS(); + Term t = Yt.term(), terr, tmod = CurrentModule, *ts = nullptr; + PredEntry *ap = Yap_get_pred(t, tmod, "C++"); + arity_t arity = ap->ArityOfPE; + bool result; + YAP_dogoalinfo q; + jmp_buf q_env; + + if (IsApplTerm(t)) { + ts = RepAppl(t) + 1; + } else { + ts = RepPair(t); + } + for (arity_t i = 0; i < arity; i++) + XREGS[i + 1] = ts[i]; + q.CurSlot = Yap_StartSlots(); + q.p = P; + q.cp = CP; + // make sure this is safe + + if (setjmp(q_env)) { + if ((terr = Yap_PeekException())) { + YAP_LeaveGoal(false, &q); + Yap_CloseHandles(q.CurSlot); + throw YAPError(); + } + return false; + } + // don't forget, on success these guys may create slots + __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "exec "); + result = (bool)YAP_EnterGoal(ap, nullptr, &q); if ((terr = Yap_GetException())) { YAP_LeaveGoal(false, &q); throw YAPError(); @@ -859,77 +909,22 @@ PredEntry *YAPPredicate::getPred(Term &t, Term *&outp) { return ap; } -YAPPrologPredicate::YAPPrologPredicate( - YAPAtom name, arity_t arity, YAPModule mod, bool tabled, - bool logical_updates, bool is_thread_local, bool sourced, - bool discontiguous, bool multiFile, bool hidden, bool untraceable, - bool unspyable, bool meta, bool moduleTransparent, bool quasiQuotable, - size_t mega_clause) - : YAPPredicate(name, arity, mod) { - if (!ap) - return; - if (is_thread_local) { - if (ap->cs.p_code.NOfClauses || tabled) - return; - ap->PredFlags |= (ThreadLocalPredFlag | LogUpdatePredFlag); - } else if (logical_updates) { - if (ap->cs.p_code.NOfClauses || tabled) - return; - ap->PredFlags |= LogUpdatePredFlag; - ap->CodeOfPred = FAILCODE; - ap->OpcodeOfPred = FAILCODE->opc; - } - if (tabled) { - ap->PredFlags |= TabledPredFlag; - if (ap->cs.p_code.NOfClauses || tabled) - return; - ap->PredFlags |= TabledPredFlag; - } - if (sourced) { - ap->PredFlags |= SourcePredFlag; - } - if (discontiguous) { - ap->PredFlags |= DiscontiguousPredFlag; - } - if (multiFile) { - ap->PredFlags |= MultiFileFlag; - } - if (hidden) { - ap->PredFlags |= HiddenPredFlag; - } - if (untraceable) { - ap->PredFlags |= SourcePredFlag; - } - if (unspyable) { - ap->PredFlags |= NoSpyPredFlag; - } - if (meta) { - ap->PredFlags |= MetaPredFlag; - } else if (moduleTransparent) { - ap->PredFlags |= ModuleTransparentPredFlag; - } - if (quasiQuotable) { - ap->PredFlags |= QuasiQuotationPredFlag; - } - if (untraceable) { - ap->PredFlags |= SourcePredFlag; - } - if (hidden) { - ap->PredFlags |= SourcePredFlag; - } -} +YAPPrologPredicate::YAPPrologPredicate(YAPTerm t) : YAPPredicate(t) {} -void *YAPPrologPredicate::assertClause(YAPTerm clause, bool last, - YAPTerm source) { +void *YAPPrologPredicate::assertClause(YAPTerm cl, bool last, YAPTerm source) { CACHE_REGS RECOVER_MACHINE_REGS(); - Term tt = clause.gt(); - Term sourcet = source.gt(); + Term tt = cl.gt(); + Term sourcet; + Term ntt = cl.gt(); + if (source.initialized()) + sourcet = source.gt(); + else + sourcet = TermZERO; yamop *codeaddr = Yap_cclause(tt, PP->ArityOfPE, Yap_CurrentModule(), sourcet); /* vsc: give the number of arguments to cclause in case there is overflow */ - Term ntt = clause.gt(); if (LOCAL_ErrorMessage) { RECOVER_MACHINE_REGS(); return 0; @@ -940,46 +935,45 @@ void *YAPPrologPredicate::assertClause(YAPTerm clause, bool last, RECOVER_MACHINE_REGS(); } return tref; - return 0; } + void *YAPPrologPredicate::retractClause(YAPTerm skeleton, bool all) { return 0; } -void *YAPPrologPredicate::clause(YAPTerm skeleton, YAPTerm &body) { return 0; } const char *YAPError::text() { - + char buf[256]; std::string s = ""; if (LOCAL_ActiveError.errorFunction) { s += LOCAL_ActiveError.errorFile; s += ":"; - sprintf(buf, "%ld", (long int)LOCAL_ActiveError.errorLine); - s += buf; + sprintf(buf, "%ld", (long int)LOCAL_ActiveError.errorLine); + s += buf; s += ":0 in C-code"; } if (LOCAL_ActiveError.prologPredLine) { - s += "\n" ; - s+= LOCAL_ActiveError.prologPredFile->StrOfAE ; - s+= ":" ; - sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredLine); - s+= buf; // std::to_string(LOCAL_ActiveError.prologPredLine) ; - // YAPIntegerTerm(LOCAL_ActiveError.prologPredLine).text(); - s+= ":0 " ; - s+= LOCAL_ActiveError.prologPredModule ; - s+= ":" ; - s+= (LOCAL_ActiveError.prologPredName)->StrOfAE ; - s+= "/" ; - sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredArity); - s+= // std::to_string(LOCAL_ActiveError.prologPredArity); - buf; + s += "\n"; + s += LOCAL_ActiveError.prologPredFile->StrOfAE; + s += ":"; + sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredLine); + s += buf; // std::to_string(LOCAL_ActiveError.prologPredLine) ; + // YAPIntegerTerm(LOCAL_ActiveError.prologPredLine).text(); + s += ":0 "; + s += LOCAL_ActiveError.prologPredModule; + s += ":"; + s += (LOCAL_ActiveError.prologPredName)->StrOfAE; + s += "/"; + sprintf(buf, "%ld", (long int)LOCAL_ActiveError.prologPredArity); + s += // std::to_string(LOCAL_ActiveError.prologPredArity); + buf; } s += " error "; if (LOCAL_ActiveError.classAsText != nullptr) s += LOCAL_ActiveError.classAsText->StrOfAE; - s += "."; - s += LOCAL_ActiveError.errorAsText->StrOfAE; - s += ".\n"; + s += "."; + s += LOCAL_ActiveError.errorAsText->StrOfAE; + s += ".\n"; if (LOCAL_ActiveError.errorTerm) { Term t = Yap_PopTermFromDB(LOCAL_ActiveError.errorTerm); if (t) { diff --git a/CXX/yapi.hh b/CXX/yapi.hh index dee2d7467..ae0971bbb 100644 --- a/CXX/yapi.hh +++ b/CXX/yapi.hh @@ -70,6 +70,7 @@ extern "C" { extern PyObject *term_to_python(yhandle_t t, bool eval); extern PyObject *deref_term_to_python(yhandle_t t); X_API bool init_python(void); +extern Term pythonToYAP(PyObject *p); extern PyObject *py_Main; diff --git a/CXX/yapq.hh b/CXX/yapq.hh index 2c15cc649..c56bd3a0c 100644 --- a/CXX/yapq.hh +++ b/CXX/yapq.hh @@ -52,10 +52,12 @@ public: /// It is given a string, calls the parser and obtains a Prolog term that /// should be a callable /// goal. - inline YAPQuery(const char *s): YAPPredicate(s, tgoal, names) { - BACKUP_H(); + inline YAPQuery(const char *s) : YAPPredicate(s, tgoal, names) { + BACKUP_H(); __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "got game %d", LOCAL_CurSlot); + if (!ap) + return; goal = YAPTerm(tgoal); vnames = YAPListTerm(names); __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "%s", vnames.text()); @@ -66,10 +68,10 @@ public: /// /// It is given an atom, and a Prolog term that should be a callable /// goal, say `main`, `init`, `live`. - inline YAPQuery(YAPAtom g) : YAPPredicate( g ) { - goal = YAPAtomTerm( g ); - vnames = YAPListTerm(); - openQuery(); + inline YAPQuery(YAPAtom g) : YAPPredicate(g) { + goal = YAPAtomTerm(g); + vnames = YAPListTerm(); + openQuery(); }; /// set flags for query execution, currently only for exception handling @@ -164,9 +166,7 @@ public: _callback->run(s); } /// stop yap - void close() { - Yap_exit(0); - } + void close() { Yap_exit(0); } /// execute the callback with a text argument. bool hasError() { return LOCAL_Error_TYPE != YAP_NO_ERROR; } @@ -178,6 +178,8 @@ public: inline YAPTerm getTerm(yhandle_t h) { return YAPTerm(h); } /// current directory for the engine bool call(YAPPredicate ap, YAPTerm ts[]); + /// current directory for the engine + bool call(YAPTerm t); const char *currentDir() { char dir[1024]; diff --git a/CXX/yapt.hh b/CXX/yapt.hh index 6c4444ab2..0edd8d949 100644 --- a/CXX/yapt.hh +++ b/CXX/yapt.hh @@ -29,7 +29,7 @@ public: /// YAPTerm // do nothing constructor YAPTerm() { mk(MkVarTerm()); } - YAPTerm(yhandle_t i) { t = i; }; + // YAPTerm(yhandle_t i) { t = i; }; /// pointer to term YAPTerm(void *ptr); /// parse string s and construct a term. @@ -37,6 +37,12 @@ public: Term tp; mk(YAP_ReadBuffer(s, &tp)); } + /// construct a term out of an integer (if you know object type use + /// YAPIntegerTerm) + YAPTerm(long int num) { mk(MkIntegerTerm(num)); } + /// construct a term out of an integer (if you know object type use + /// YAPIntegerTerm) + YAPTerm(double num) { mk(MkFloatTerm(num)); } /// parse string s and construct a term. YAPTerm(YAPFunctor f, YAPTerm ts[]); /// extract the tag of a term, after dereferencing. @@ -118,6 +124,9 @@ public: /// return a handle to the term inline yhandle_t handle() { return t; }; + + /// whether the term actually refers to a live object + inline bool initialized() { return t != 0; }; }; /** @@ -125,8 +134,9 @@ public: */ class YAPVarTerm : public YAPTerm { YAPVarTerm(Term t) { - if (IsVarTerm(t)) + if (IsVarTerm(t)) { mk(t); + } } public: @@ -217,7 +227,18 @@ public: class YAPIntegerTerm : public YAPNumberTerm { public: YAPIntegerTerm(intptr_t i); - intptr_t getInteger() { return IntegerOfTerm(gt()); } + intptr_t getInteger() { return IntegerOfTerm(gt()); }; +}; + +/** + * @brief Floating Point Term + */ + +class YAPFloatTerm : public YAPNumberTerm { +public: + YAPFloatTerm(double dbl) { mk(MkFloatTerm(dbl)); }; + + double getFl() { return FloatOfTerm(gt()); }; }; class YAPListTerm : public YAPTerm { @@ -330,5 +351,4 @@ public: // Getter: outputs the name as a sequence of ISO-LATIN1 codes; const char *text() { return (const char *)AtomOfTerm(gt())->StrOfAE; } }; - #endif /* YAPT_HH */ diff --git a/packages/python/py2pl.c b/packages/python/py2pl.c index 92b0e5116..d55d27405 100644 --- a/packages/python/py2pl.c +++ b/packages/python/py2pl.c @@ -78,11 +78,11 @@ int assign_python(PyObject *root, term_t t, PyObject *e) { right = get_p_int(term_to_python(targ, true), PyObject_Size(lhs)); if (!PySequence_Check(lhs)) return -1; - PL_reset_term_refs(targ); + PL_reset_term_refs(targ); return PySequence_SetSlice(lhs, left, right, e); } else { rhs = term_to_python(trhs, true); - PL_reset_term_refs(targ); + PL_reset_term_refs(targ); return PyObject_SetItem(lhs, rhs, e); } } @@ -254,3 +254,12 @@ foreign_t python_to_term(PyObject *pVal, term_t t) { #endif } } + +X_API YAP_Term pythonToYAP(PyObject *pVal) { + term_t t = PL_new_term_ref(); + if (!python_to_term(pVal, t)) + return 0; + YAP_Term tt = YAP_GetFromSlot(t); + YAP_RecoverSlots(1, t); + return tt; +} diff --git a/packages/python/yap_kernel/__main__.py b/packages/python/yap_kernel/__main__.py index 081358e04..101460b76 100644 --- a/packages/python/yap_kernel/__main__.py +++ b/packages/python/yap_kernel/__main__.py @@ -1,3 +1,4 @@ + if __name__ == '__main__': - from yap_kernel import kernelapp as app - app.launch_new_instance() + import yap_kernel.kernelapp + yap_kernel.kernelapp.launch_new_instance() diff --git a/packages/python/yap_kernel/callcount.yap b/packages/python/yap_kernel/callcount.yap deleted file mode 100644 index f8be21661..000000000 --- a/packages/python/yap_kernel/callcount.yap +++ /dev/null @@ -1,152 +0,0 @@ -/************************************************************************* -* * -* YAP Prolog * -* * -* Yap Prolog was developed at NCCUP - Universidade do Porto * -* * -* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * -* * -************************************************************************** -* * -* File: callcount.yap * -* Last rev: 8/2/02 * -* mods: * -* comments: Some profiling predicates available in yap * -* * -*************************************************************************/ - -%% @{ - -/** @defgroup Profiling Profiling Prolog Programs -@ingroup extensions - -YAP includes two profilers. The count profiler keeps information on the -number of times a predicate was called. This information can be used to -detect what are the most commonly called predicates in the program. The -count profiler can be compiled by setting YAP's flag profiling -to `on`. The time-profiler is a `gprof` profiler, and counts -how many ticks are being spent on specific predicates, or on other -system functions such as internal data-base accesses or garbage collects. - -The YAP profiling sub-system is currently under -development. Functionality for this sub-system will increase with newer -implementation. - - - */ - -%% @{ - -/** @defgroup Call_Counting Counting Calls -@ingroup Profiling - -Predicates compiled with YAP's flag call_counting set to -`on` update counters on the numbers of calls and of -retries. Counters are actually decreasing counters, so that they can be -used as timers. Three counters are available: - -+ `calls`: number of predicate calls since execution started or since -system was reset; -+ `retries`: number of retries for predicates called since -execution started or since counters were reset; -+ `calls_and_retries`: count both on predicate calls and -retries. - -These counters can be used to find out how many calls a certain -goal takes to execute. They can also be used as timers. - -The code for the call counters piggybacks on the profiling -code. Therefore, activating the call counters also activates the profiling -counters. - -These are the predicates that access and manipulate the call counters. -*/ - -:- system_module( '$_callcount', [call_count/3, - call_count_data/3, - call_count_reset/0], []). - -:- use_system_module( '$_errors', ['$do_error'/2]). - - -/** @pred call_count_data(- _Calls_, - _Retries_, - _CallsAndRetries_) - - -Give current call count data. The first argument gives the current value -for the _Calls_ counter, next the _Retries_ counter, and last -the _CallsAndRetries_ counter. - -*/ -call_count_data(Calls, Retries, Both) :- - '$call_count_info'(Calls, Retries, Both). - -/** @pred call_count_reset - - -Reset call count counters. All timers are also reset. - -*/ -call_count_reset :- - '$call_count_reset'. - -/** @pred call_count(? _CallsMax_, ? _RetriesMax_, ? _CallsAndRetriesMax_) - - -Set call counters as timers. YAP will generate an exception -if one of the instantiated call counters decreases to 0: - -+ _CallsMax_ - - throw the exception `call_counter` when the -counter `calls` reaches 0; - -+ _RetriesMax_ - - throw the exception `retry_counter` when the -counter `retries` reaches 0; - -+ _CallsAndRetriesMax_ - - throw the exception -`call_and_retry_counter` when the counter `calls_and_retries` -reaches 0. - - YAP will ignore counters that are called with unbound arguments. - -Next, we show a simple example of how to use call counters: - -~~~~~{.prolog} - ?- yap_flag(call_counting,on), [-user]. l :- l. end_of_file. yap_flag(call_counting,off). - -yes - -yes - ?- catch((call_count(10000,_,_),l),call_counter,format("limit_exceeded.~n",[])). - -limit_exceeded. - -yes -~~~~~ -Notice that we first compile the looping predicate `l/0` with -call_counting `on`. Next, we catch/3 to handle an -exception when `l/0` performs more than 10000 reductions. - - - */ -call_count(Calls, Retries, Both) :- - '$check_if_call_count_on'(Calls, CallsOn), - '$check_if_call_count_on'(Retries, RetriesOn), - '$check_if_call_count_on'(Both, BothOn), - '$call_count_set'(Calls, CallsOn, Retries, RetriesOn, Both, BothOn). - -'$check_if_call_count_on'(Calls, 1) :- integer(Calls), !. -'$check_if_call_count_on'(Calls, 0) :- var(Calls), !. -'$check_if_call_count_on'(Calls, A) :- - '$do_error'(type_error(integer,Calls),call_count(A)). - -%% @} - -/** -@} -*/ - diff --git a/packages/python/yap_kernel/interactiveshell.py b/packages/python/yap_kernel/interactiveshell.py index 93ab9c2e0..2e37043d6 100644 --- a/packages/python/yap_kernel/interactiveshell.py +++ b/packages/python/yap_kernel/interactiveshell.py @@ -1,100 +1,209 @@ -from IPython.core.interactive import ExecutionResult -import yap -import sys -import syslog +# -*- coding: utf-8 -*- +"""YAP Stuff for Main IPython class.""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2001 Janko Hauser +# Copyright (C) 2001-2007 Fernando Perez. +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +from __future__ import absolute_import, print_function + +import __future__ +import abc +import ast +import atexit +import functools import os +import re +import runpy +import signal -def yap_run_cell(self, s, store_history=True, silent=False, - shell_futures=True): +import sys +import tempfile +import traceback +import types +import subprocess +import warnings +import yap +from io import open as io_open - result = ExecutionResult() +from pickleshare import PickleShareDB - if (not s) or s.isspace(): - self.last_execution_succeeded = True - return result +from traitlets.config.configurable import SingletonConfigurable +from IPython.core import oinspect +from IPython.core import magic +from IPython.core import page +from IPython.core import prefilter +from IPython.core import shadowns +from IPython.core import ultratb +from IPython.core import interactiveshell +from IPython.core.alias import Alias, AliasManager +from IPython.core.autocall import ExitAutocall +from IPython.core.builtin_trap import BuiltinTrap +from IPython.core.events import EventManager, available_events +from IPython.core.compilerop import CachingCompiler, check_linecache_ipython +from IPython.core.debugger import Pdb +from IPython.core.display_trap import DisplayTrap +from IPython.core.displayhook import DisplayHook +from IPython.core.displaypub import DisplayPublisher +from IPython.core.error import InputRejected, UsageError +from IPython.core.extensions import ExtensionManager +from IPython.core.formatters import DisplayFormatter +from IPython.core.history import HistoryManager +from IPython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2 +from IPython.core.logger import Logger +from IPython.core.macro import Macro +from IPython.core.payload import PayloadManager +from IPython.core.prefilter import PrefilterManager +from IPython.core.profiledir import ProfileDir +from IPython.core.usage import default_banner +from IPython.core.interactiveshell import InteractiveShellABC, InteractiveShell, ExecutionResult +from IPython.testing.skipdoctest import skip_doctest_py2, skip_doctest +from IPython.utils import PyColorize +from IPython.utils import io +from IPython.utils import py3compat +from IPython.utils import openpy +from IPython.utils.decorators import undoc +from IPython.utils.io import ask_yes_no +from IPython.utils.ipstruct import Struct +from IPython.paths import get_ipython_dir +from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists +from IPython.utils.process import system, getoutput +from IPython.utils.py3compat import (builtin_mod, unicode_type, string_types, + with_metaclass, iteritems) +from IPython.utils.strdispatch import StrDispatch +from IPython.utils.syspathcontext import prepended_to_syspath +from IPython.utils.text import format_screen, LSString, SList, DollarFormatter +from IPython.utils.tempdir import TemporaryDirectory +from traitlets import ( + Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type, + observe, default, +) +from warnings import warn +from logging import error - if store_history: - result.execution_count = self.execution_count +class YAPInteractiveShell: + """An enhanced, interactive shell for YAP.""" - def error_before_exec(value): - result.error_before_exec = value - self.last_execution_succeeded = False - return result + def __init__(self, kernel): + self.yapeng = yap.YAPEngine() + self.q = None + self.shell = kernel.shell + self.shell.run_cell = self.run_cell + + def closeq(self): + if self.q: + self.q.close() + self.q = None + + def run_cell(self, s, store_history=True, silent=False, shell_futures=True): + + """Run a complete IPython cell. + + Parameters + ---------- + raw_cell : str + The code (including IPython code such as %magic functions) to run. + store_history : bool + If True, the raw and translated cell will be stored in IPython's + history. For user code calling back into IPython's machinery, this + should be set to False. + silent : bool + If True, avoid side-effects, such as implicit displayhooks and + and logging. silent=True forces store_history=False. + shell_futures : bool + If True, the code will share future statements with the interactive + shell. It will both be affected by previous __future__ imports, and + any __future__ imports in the code will affect the shell. If False, + __future__ imports are not shared in either direction. + + Returns + ------- + result : :class:`ExecutionResult` + """ + + result = ExecutionResult() + + if (not s) or s.isspace(): + self.shell.last_execution_succeeded = True + return result + + if store_history: + result.execution_count = self.shell.execution_count + + def error_before_exec(value): + result.error_before_exec = value + self.shell.last_execution_succeeded = False + return result - if not self.engine: - try: - self.engine = yap.Engine() - except: - return error_before_exec( sys.exc_info()[1]) + if not self.q: + try: + self.q = self.yapeng.query(s) + except SyntaxError: + return error_before_exec( sys.exc_info()[1]) - if not self.q: - try: - self.q = self.engine.query(s) - except SyntaxError: - return error_before_exec( sys.exc_info()[1]) + cell = s # cell has to exist so it can be stored/logged - cell = s # cell has to exist so it can be stored/logged + # Store raw and processed history + # if not silent: + # self.shell..logger.log(cell, s) - # Store raw and processed history - # if not silent: - # self.logger.log(cell, s) - - try: - f = io.StringIO() - with redirect_stdout(f): - goal = self.q.next() - print('{0}'.format(f.getvalue())) - # Execute the user code - has_raised = False - if goal: - myvs = self.q.namedVarsCopy() - if myvs: - i = 0 - for peq in myvs: - name = peq[0] - bind = peq[1] - if bind.isVar(): - var = yap.YAPAtom('$VAR') - f = yap.YAPFunctor(var, 1) - bind.unify(yap.YAPApplTerm(f, (name))) - else: - i = bind.numberVars(i, True) - print(name.text() + " = " + bind.text()) - else: - print("yes") - if self.q.deterministic(): - self.closeq() - else: - print("No (more) answers") - self.closeq() - except: - result.error_in_exec = sys.exc_info()[1] - # self.showtraceback() - has_raised = True + has_raised = False + try: + #f = io.StringIO() + # with redirect_stdout(f): + run = self.q.next() + # print('{0}'.format(f.getvalue())) + # Execute the user code + if run: + myvs = self.q.namedVarsCopy() + if myvs: + i = 0 + for peq in myvs: + name = peq[0] + bind = peq[1] + if bind.isVar(): + var = yap.YAPAtom('$VAR') + f = yap.YAPFunctor(var, 1) + bind.unify(yap.YAPApplTerm(f, (name))) + else: + i = bind.numberVars(i, True) + print(name.text() + " = " + bind.text()) + else: + print("yes") + if self.q.deterministic(): + self.closeq() + else: + print("No (more) answers") + self.closeq() + except: + result.error_in_exec = sys.exc_info()[1] + # self.showtraceback() + has_raised = True + self.closeq() - self.last_execution_succeeded = not has_raised - result.result = self.last_execution_succeeded + self.shell.last_execution_succeeded = not has_raised + result.result = self.shell.last_execution_succeeded + print( self.q ) + # Reset this so later displayed values do not modify the + # ExecutionResult + # self.displayhook.exec_result = None - # Reset this so later displayed values do not modify the - # ExecutionResult - # self.displayhook.exec_result = None + #self.events.trigger('post_execute') + #if not silent: + # self.events.trigger('post_run_cell') - #self.events.trigger('post_execute') - #if not silent: - # self.events.trigger('post_run_cell') + if store_history: + # Write output to the database. Does nothing unless + # history output logging is enabled. + # self.history_manager.store_output(self.execution_count) + # Each cell is a *single* input, regardless of how many lines it has + self.shell.execution_count += 1 - if store_history: - # Write output to the database. Does nothing unless - # history output logging is enabled. - # self.history_manager.store_output(self.execution_count) - # Each cell is a *single* input, regardless of how many lines it has - self.execution_count += 1 - - return result - -def closeq(self): - if self.q: - self.q.close() - self.q = None + return result diff --git a/packages/python/yap_kernel/jupyter.yap b/packages/python/yap_kernel/jupyter.yap index 3f9d71fe3..33e7c8223 100644 --- a/packages/python/yap_kernel/jupyter.yap +++ b/packages/python/yap_kernel/jupyter.yap @@ -1,3 +1,6 @@ + +:- use_module(library(python)). + :- if( current_prolog_flag(apple, true) ). :- putenv( 'LC_CTYPE', 'en_us:UTF-8'). diff --git a/packages/python/yap_kernel/kernel b/packages/python/yap_kernel/kernel deleted file mode 100644 index 2f909311d..000000000 --- a/packages/python/yap_kernel/kernel +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import print_function - -from metakernel import MetaKernel - -from metakernel import register_ipython_magics -register_ipython_magics() - -class MetaKernelyap(MetaKernel): - implementation = 'MetaKernel YAP' - implementation_version = '1.0' - language = 'text' - language_version = '0.1' - banner = "MetaKernel YAP" - language_info = { - 'mimetype': 'text/plain', - 'name': 'text', - # ------ If different from 'language': - 'codemirror_mode': { - "version": 2, - "name": "prolog" - } - 'pygments_lexer': 'language', - 'version' : "0.0.1", - 'file_extension': '.yap', - 'help_links': MetaKernel.help_links, - } - - def __init__(self, **kwargs): - - MetaKernel.__init__(self, **kwargs) - self._start_yap() - self.qq = None sq - - def _start_yap(self): - # Signal handlers are inherited by forked processes, and we can't easily - # reset it from the subprocess. Since kernelapp ignores SIGINT except in - # message handlers, we need to temporarily reset the SIGINT handler here - # so that yap and its children are interruptible. - sig = signal.signal(signal.SIGINT, signal.SIG_DFL) - try: - engine = yap.YAPEngine() - engine.query("load_files(library(python), [])").command() - banner = "YAP {0} Kernel".format(self.engine.version()) - - finally: - signal.signal(signal.SIGINT, sig) - - # Register Yap function to write image data to temporary file - #self.yapwrapper.run_command(image_setup_cmd) - - def get_usage(self): - return "This is the YAP kernel." - - def do_execute_direct(self, code): - if not code.strip(): - return {'status': 'ok', 'execution_count': self.execution_count, - 'payload': [], 'user_expressions': {}} - - interrupted = False - try: - print self.q - if self.q is None: - self.q = self.engine.query(code.rstrip()) - if self.q.next(): - vs = self.q.namedVars() - if vs.length() > 0: - l = [] - while vs.length() > 0: - eq = vs.car() - l.append(' '.join([getArg(1).text(), '=', eq.getArg(2).text()) - vs = vs.cdr() - l.append(';') - o = '\n'.join(l) - else: - return 'yes' - self.q = None - - else: - return 'no' - self.q = None - - - - def repr(self, data): - return repr(data) - -if __name__ == '__main__': - try: - from ipykernel.kernelapp import IPKernelApp - except ImportError: - from IPython.kernel.zmq.kernelapp import IPKernelApp - IPKernelApp.launch_instance(kernel_class=MetaKernelyap) diff --git a/packages/python/yap_kernel/kernelapp.py b/packages/python/yap_kernel/kernelapp.py index b400f5d99..c4eb3e66f 100644 --- a/packages/python/yap_kernel/kernelapp.py +++ b/packages/python/yap_kernel/kernelapp.py @@ -16,6 +16,7 @@ from tornado import ioloop import zmq from zmq.eventloop import ioloop as zmq_ioloop from zmq.eventloop.zmqstream import ZMQStream +from ipykernel.zmqshell import ZMQInteractiveShell from IPython.core.application import ( BaseIPythonApplication, base_flags, base_aliases, catch_config_error @@ -42,7 +43,6 @@ from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows from jupyter_client.session import ( Session, session_flags, session_aliases, ) -from ipykernel.zmqshell import ZMQInteractiveShell #----------------------------------------------------------------------------- # Flags and Aliases @@ -96,7 +96,7 @@ To read more about this, see https://github.com/ipython/ipython/issues/2049 # Application class for starting an IPython Kernel #----------------------------------------------------------------------------- -class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, +class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, ConnectionFileMixin): name='YAP-kernel' aliases = Dict(kernel_aliases) @@ -107,7 +107,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, klass='ipykernel.kernelbase.Kernel', help="""The Kernel subclass to be used. - This should allow easy re-use of the IPKernelApp entry point + This should allow easy re-use of the YAPKernelApp entry point to configure and launch kernels other than IPython's own. """).tag(config=True) kernel = Any() @@ -117,7 +117,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, subcommands = { 'install': ( - '.kernelspec.InstallYAPKernelSpecApp', + 'yap_kernel.kernelspec.InstallYAPKernelSpecApp', 'Install the YAP kernel' ), } @@ -209,7 +209,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, if not self.connection_file: self.connection_file = "kernel-%s.json"%os.getpid() try: - self.connection_file = filefind(self.connection_file, ['.', self.connection_dir]) + self.connection_file = filefind(self.connection_file, ['.',self.connection_dir]) except IOError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, and I'll create it in this directory: @@ -382,7 +382,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, if not os.environ.get('MPLBACKEND'): os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline' - # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` + # Provide a wrapper for :meth:`YAPInteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception @@ -478,7 +478,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, launch_new_instance = YAPKernelApp.launch_instance def main(): - """Run an IPKernel as an application""" + """Run an YAPKernel as an application""" app = YAPKernelApp.instance() app.initialize() app.start() diff --git a/packages/python/yap_kernel/kernelspec.py b/packages/python/yap_kernel/kernelspec.py index 337ab96a1..fd2ac5ca4 100644 --- a/packages/python/yap_kernel/kernelspec.py +++ b/packages/python/yap_kernel/kernelspec.py @@ -1,188 +1,490 @@ -"""The IPython kernel spec for Jupyter""" +"""An Application for launching a kernel""" -# Copyright (c) IPython Development Team. +# Copyright (c) YAP Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import print_function -import errno -import json +import atexit import os -import shutil import sys -import tempfile +import signal +import traceback +import logging -from jupyter_client.kernelspec import KernelSpecManager +from tornado import ioloop +import zmq +from zmq.eventloop import ioloop as zmq_ioloop +from zmq.eventloop.zmqstream import ZMQStream -pjoin = os.path.join +from IPython.core.application import ( + BaseIPythonApplication, base_flags, base_aliases, catch_config_error +) -KERNEL_NAME = 'YAP%i' % sys.version_info[0] +from IPython.core.profiledir import ProfileDir +from IPython.core.shellapp import ( + InteractiveShellApp, shell_flags, shell_aliases +) -# path to kernelspec resources -RESOURCES = pjoin(os.path.dirname(__file__), 'resources') +from IPython.utils import io +from ipython_genutils.path import filefind, ensure_dir_exists +from traitlets import ( + Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default +) +from ipython_genutils.importstring import import_item +from jupyter_core.paths import jupyter_runtime_dir +from jupyter_client import write_connection_file +from jupyter_client.connect import ConnectionFileMixin +# local imports +from ipykernel.iostream import IOPubThread +from ipykernel.heartbeat import Heartbeat +from .yap_kernel import YAPKernel +from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows +from jupyter_client.session import ( + Session, session_flags, session_aliases, +) +from ipykernel.zmqshell import ZMQInteractiveShell -def make_ipkernel_cmd(mod='ipykernel', executable=None, extra_arguments=None, **kw): - """Build Popen command list for launching an IPython kernel. +#----------------------------------------------------------------------------- +# Flags and Aliases +#----------------------------------------------------------------------------- - Parameters - ---------- - mod : str, optional (default 'ipykernel') - A string of an IPython module whose __main__ starts an IPython kernel +kernel_aliases = dict(base_aliases) +kernel_aliases.update({ + 'ip' : 'YAPKernelApp.ip', + 'hb' : 'YAPKernelApp.hb_port', + 'shell' : 'YAPKernelApp.shell_port', + 'iopub' : 'YAPKernelApp.iopub_port', + 'stdin' : 'YAPKernelApp.stdin_port', + 'control' : 'YAPKernelApp.control_port', + 'f' : 'YAPKernelApp.connection_file', + 'transport': 'YAPKernelApp.transport', +}) - executable : str, optional (default sys.executable) - The Python executable to use for the kernel process. +kernel_flags = dict(base_flags) +kernel_flags.update({ + 'no-stdout' : ( + {'YAPKernelApp' : {'no_stdout' : True}}, + "redirect stdout to the null device"), + 'no-stderr' : ( + {'YAPKernelApp' : {'no_stderr' : True}}, + "redirect stderr to the null device"), + 'pylab' : ( + {'YAPKernelApp' : {'pylab' : 'auto'}}, + """Pre-load matplotlib and numpy for interactive use with + the default matplotlib backend."""), +}) - extra_arguments : list, optional - A list of extra arguments to pass when executing the launch code. +# inherit flags&aliases for any IPython shell apps +kernel_aliases.update(shell_aliases) +kernel_flags.update(shell_flags) - Returns - ------- +# inherit flags&aliases for Sessions +kernel_aliases.update(session_aliases) +kernel_flags.update(session_flags) - A Popen command list - """ - if executable is None: - executable = sys.executable - extra_arguments = extra_arguments or [] - arguments = [executable, '-m', mod, '-f', '{connection_file}'] - arguments.extend(extra_arguments) +_ctrl_c_message = """\ +NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work. - return arguments +To exit, you will have to explicitly quit this process, by either sending +"quit" from a client, or using Ctrl-\\ in UNIX-like environments. +To read more about this, see https://github.com/ipython/ipython/issues/2049 -def get_kernel_dict(extra_arguments=None): - """Construct dict for kernel.json""" - return { - 'argv': make_ipkernel_cmd(extra_arguments=extra_arguments), - 'display_name': 'Python %i' % sys.version_info[0], - 'language': 'python', +""" + +#----------------------------------------------------------------------------- +# Application class for starting an YAP Kernel +#----------------------------------------------------------------------------- + +class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, + ConnectionFileMixin): + name='YAP-kernel' + aliases = Dict(kernel_aliases) + flags = Dict(kernel_flags) + classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session] + # the kernel class, as an importstring + kernel_class = Type('yap_kernel.yap_kernel.YAPKernel', + klass='ipykernel.kernelbase.Kernel', + help="""The Kernel subclass to be used. + + This should allow easy re-use of the YAPKernelApp entry point + to configure and launch kernels other than YAP's own. + """).tag(config=True) + kernel = Any() + poller = Any() # don't restrict this even though current pollers are all Threads + heartbeat = Instance(Heartbeat, allow_none=True) + ports = Dict() + + subcommands = { + 'install': ( + 'yap_kernel.kernelspec.InstallYAPKernelSpecApp', + 'Install the YAP kernel' + ), } + # connection info: + connection_dir = Unicode() -def write_kernel_spec(path=None, overrides=None, extra_arguments=None): - """Write a kernel spec directory to `path` + @default('connection_dir') + def _default_connection_dir(self): + return jupyter_runtime_dir() - If `path` is not specified, a temporary directory is created. - If `overrides` is given, the kernelspec JSON is updated before writing. + @property + def abs_connection_file(self): + if os.path.basename(self.connection_file) == self.connection_file: + return os.path.join(self.connection_dir, self.connection_file) + else: + return self.connection_file - The path to the kernelspec is always returned. - """ - if path is None: - path = os.path.join(tempfile.mkdtemp(suffix='_kernels'), KERNEL_NAME) + # streams, etc. + no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True) + no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True) + outstream_class = DottedObjectName('ipykernel.iostream.OutStream', + help="The importstring for the OutStream factory").tag(config=True) + displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook', + help="The importstring for the DisplayHook factory").tag(config=True) - # stage resources - shutil.copytree(RESOURCES, path) - # write kernel.json - kernel_dict = get_kernel_dict(extra_arguments) + # polling + parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), + help="""kill this process if its parent dies. On Windows, the argument + specifies the HANDLE of the parent process, otherwise it is simply boolean. + """).tag(config=True) + interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), + help="""ONLY USED ON WINDOWS + Interrupt this process when the parent is signaled. + """).tag(config=True) - if overrides: - kernel_dict.update(overrides) - with open(pjoin(path, 'kernel.json'), 'w') as f: - json.dump(kernel_dict, f, indent=1) + def init_crash_handler(self): + sys.excepthook = self.excepthook - return path + def excepthook(self, etype, evalue, tb): + # write uncaught traceback to 'real' stderr, not zmq-forwarder + traceback.print_exception(etype, evalue, tb, file=sys.__stderr__) + def init_poller(self): + if sys.platform == 'win32': + if self.interrupt or self.parent_handle: + self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) + elif self.parent_handle: + self.poller = ParentPollerUnix() -def install(kernel_spec_manager=None, user=False, kernel_name=KERNEL_NAME, display_name=None, - prefix=None, profile=None): - """Install the IPython kernelspec for Jupyter + def _bind_socket(self, s, port): + iface = '%s://%s' % (self.transport, self.ip) + if self.transport == 'tcp': + if port <= 0: + port = s.bind_to_random_port(iface) + else: + s.bind("tcp://%s:%i" % (self.ip, port)) + elif self.transport == 'ipc': + if port <= 0: + port = 1 + path = "%s-%i" % (self.ip, port) + while os.path.exists(path): + port = port + 1 + path = "%s-%i" % (self.ip, port) + else: + path = "%s-%i" % (self.ip, port) + s.bind("ipc://%s" % path) + return port - Parameters - ---------- + def write_connection_file(self): + """write connection info to JSON file""" + cf = self.abs_connection_file + self.log.debug("Writing connection file: %s", cf) + write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, + shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, + iopub_port=self.iopub_port, control_port=self.control_port) - kernel_spec_manager: KernelSpecManager [optional] - A KernelSpecManager to use for installation. - If none provided, a default instance will be created. - user: bool [default: False] - Whether to do a user-only install, or system-wide. - kernel_name: str, optional - Specify a name for the kernelspec. - This is needed for having multiple IPython kernels for different environments. - display_name: str, optional - Specify the display name for the kernelspec - profile: str, optional - Specify a custom profile to be loaded by the kernel. - prefix: str, optional - Specify an install prefix for the kernelspec. - This is needed to install into a non-default location, such as a conda/virtual-env. + def cleanup_connection_file(self): + cf = self.abs_connection_file + self.log.debug("Cleaning up connection file: %s", cf) + try: + os.remove(cf) + except (IOError, OSError): + pass - Returns - ------- + self.cleanup_ipc_files() - The path where the kernelspec was installed. - """ - if kernel_spec_manager is None: - kernel_spec_manager = KernelSpecManager() + def init_connection_file(self): + if not self.connection_file: + self.connection_file = "kernel-%s.json"%os.getpid() + try: + self.connection_file = filefind(self.connection_file, ['.', self.connection_dir]) + except IOError: + self.log.debug("Connection file not found: %s", self.connection_file) + # This means I own it, and I'll create it in this directory: + ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700) + # Also, I will clean it up: + atexit.register(self.cleanup_connection_file) + return + try: + self.load_connection_file() + except Exception: + self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) + self.exit(1) - if (kernel_name != KERNEL_NAME) and (display_name is None): - # kernel_name is specified and display_name is not - # default display_name to kernel_name - display_name = kernel_name - overrides = {} - if display_name: - overrides["display_name"] = display_name - if profile: - extra_arguments = ["--profile", profile] - if not display_name: - # add the profile to the default display name - overrides["display_name"] = 'Python %i [profile=%s]' % (sys.version_info[0], profile) - else: - extra_arguments = None - path = write_kernel_spec(overrides=overrides, extra_arguments=extra_arguments) - dest = kernel_spec_manager.install_kernel_spec( - path, kernel_name=kernel_name, user=user, prefix=prefix) - # cleanup afterward - shutil.rmtree(path) - return dest + def init_sockets(self): + # Create a context, a session, and the kernel sockets. + self.log.info("Starting the kernel at pid: %i", os.getpid()) + context = zmq.Context.instance() + # Uncomment this to try closing the context. + # atexit.register(context.term) -# Entrypoint + self.shell_socket = context.socket(zmq.ROUTER) + self.shell_socket.linger = 1000 + self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) + self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) -from traitlets.config import Application + self.stdin_socket = context.socket(zmq.ROUTER) + self.stdin_socket.linger = 1000 + self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) + self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) + self.control_socket = context.socket(zmq.ROUTER) + self.control_socket.linger = 1000 + self.control_port = self._bind_socket(self.control_socket, self.control_port) + self.log.debug("control ROUTER Channel on port: %i" % self.control_port) -class InstallYAPKernelSpecApp(Application): - """Dummy app wrapping argparse""" - name = 'ipython-kernel-install' + self.init_iopub(context) + def init_iopub(self, context): + self.iopub_socket = context.socket(zmq.PUB) + self.iopub_socket.linger = 1000 + self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) + self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) + self.configure_tornado_logger() + self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True) + self.iopub_thread.start() + # backward-compat: wrap iopub socket API in background thread + self.iopub_socket = self.iopub_thread.background_socket + + def init_heartbeat(self): + """start the heart beating""" + # heartbeat doesn't share context, because it mustn't be blocked + # by the GIL, which is accessed by libzmq when freeing zero-copy messages + hb_ctx = zmq.Context() + self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) + self.hb_port = self.heartbeat.port + self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) + self.heartbeat.start() + + def log_connection_info(self): + """display connection info, and store ports""" + basename = os.path.basename(self.connection_file) + if basename == self.connection_file or \ + os.path.dirname(self.connection_file) == self.connection_dir: + # use shortname + tail = basename + else: + tail = self.connection_file + lines = [ + "To connect another client to this kernel, use:", + " --existing %s" % tail, + ] + # log connection info + # info-level, so often not shown. + # frontends should use the %connect_info magic + # to see the connection info + for line in lines: + self.log.info(line) + # also raw print to the terminal if no parent_handle (`ipython kernel`) + # unless log-level is CRITICAL (--quiet) + if not self.parent_handle and self.log_level < logging.CRITICAL: + io.rprint(_ctrl_c_message) + for line in lines: + io.rprint(line) + + self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, + stdin=self.stdin_port, hb=self.hb_port, + control=self.control_port) + + def init_blackhole(self): + """redirects stdout/stderr to devnull if necessary""" + if self.no_stdout or self.no_stderr: + blackhole = open(os.devnull, 'w') + if self.no_stdout: + sys.stdout = sys.__stdout__ = blackhole + if self.no_stderr: + sys.stderr = sys.__stderr__ = blackhole + + def init_io(self): + """Redirect input streams and set a display hook.""" + if self.outstream_class: + outstream_factory = import_item(str(self.outstream_class)) + sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout') + sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr') + if self.displayhook_class: + displayhook_factory = import_item(str(self.displayhook_class)) + self.displayhook = displayhook_factory(self.session, self.iopub_socket) + sys.displayhook = self.displayhook + + self.patch_io() + + def patch_io(self): + """Patch important libraries that can't handle sys.stdout forwarding""" + try: + import faulthandler + except ImportError: + pass + else: + # Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible + # updates to the upstream API and update accordingly (up-to-date as of Python 3.5): + # https://docs.python.org/3/library/faulthandler.html#faulthandler.enable + + # change default file to __stderr__ from forwarded stderr + faulthandler_enable = faulthandler.enable + def enable(file=sys.__stderr__, all_threads=True, **kwargs): + return faulthandler_enable(file=file, all_threads=all_threads, **kwargs) + + faulthandler.enable = enable + + if hasattr(faulthandler, 'register'): + faulthandler_register = faulthandler.register + def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs): + return faulthandler_register(signum, file=file, all_threads=all_threads, + chain=chain, **kwargs) + faulthandler.register = register + + def init_signal(self): + signal.signal(signal.SIGINT, signal.SIG_IGN) + + def init_kernel(self): + """Create the Kernel object itself""" + shell_stream = ZMQStream(self.shell_socket) + control_stream = ZMQStream(self.control_socket) + + kernel_factory = self.kernel_class.instance + + kernel = kernel_factory(parent=self, session=self.session, + shell_streams=[shell_stream, control_stream], + iopub_thread=self.iopub_thread, + iopub_socket=self.iopub_socket, + stdin_socket=self.stdin_socket, + log=self.log, + profile_dir=self.profile_dir, + user_ns=self.user_ns, + ) + kernel.record_ports({ + name + '_port': port for name, port in self.ports.items() + }) + self.kernel = kernel + + # Allow the displayhook to get the execution count + self.displayhook.get_execution_count = lambda: kernel.execution_count + + def init_gui_pylab(self): + """Enable GUI event loop integration, taking pylab into account.""" + + # Register inline backend as default + # this is higher priority than matplotlibrc, + # but lower priority than anything else (mpl.use() for instance). + # This only affects matplotlib >= 1.5 + if not os.environ.get('MPLBACKEND'): + os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline' + + # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` + # to ensure that any exception is printed straight to stderr. + # Normally _showtraceback associates the reply with an execution, + # which means frontends will never draw it, as this exception + # is not associated with any execute request. + + shell = self.shell + _showtraceback = shell._showtraceback + try: + # replace error-sending traceback with stderr + def print_tb(etype, evalue, stb): + print ("GUI event loop or pylab initialization failed", + file=sys.stderr) + print (shell.InteractiveTB.stb2text(stb), file=sys.stderr) + shell._showtraceback = print_tb + InteractiveShellApp.init_gui_pylab(self) + finally: + shell._showtraceback = _showtraceback + + def init_shell(self): + self.shell = getattr(self.kernel, 'shell', None) + if self.shell: + self.shell.configurables.append(self) + + def init_extensions(self): + super(YAPKernelApp, self).init_extensions() + # BEGIN HARDCODED WIDGETS HACK + # Ensure ipywidgets extension is loaded if available + extension_man = self.shell.extension_manager + if 'ipywidgets' not in extension_man.loaded: + try: + extension_man.load_extension('ipywidgets') + except ImportError as e: + self.log.debug('ipywidgets package not installed. Widgets will not be available.') + # END HARDCODED WIDGETS HACK + + def configure_tornado_logger(self): + """ Configure the tornado logging.Logger. + + Must set up the tornado logger or else tornado will call + basicConfig for the root logger which makes the root logger + go to the real sys.stderr instead of the capture streams. + This function mimics the setup of logging.basicConfig. + """ + logger = logging.getLogger('tornado') + handler = logging.StreamHandler() + formatter = logging.Formatter(logging.BASIC_FORMAT) + handler.setFormatter(formatter) + logger.addHandler(handler) + + @catch_config_error def initialize(self, argv=None): - if argv is None: - argv = sys.argv[1:] - self.argv = argv + super(YAPKernelApp, self).initialize(argv) + if self.subapp is not None: + return + # register zmq IOLoop with tornado + zmq_ioloop.install() + self.init_blackhole() + self.init_connection_file() + self.init_poller() + self.init_sockets() + self.init_heartbeat() + # writing/displaying connection info must be *after* init_sockets/heartbeat + self.write_connection_file() + # Log connection info after writing connection file, so that the connection + # file is definitely available at the time someone reads the log. + self.log_connection_info() + self.init_io() + self.init_signal() + self.init_kernel() + # shell init steps + self.init_path() + self.init_shell() + if self.shell: + self.init_gui_pylab() + self.init_extensions() + self.init_code() + # flush stdout/stderr, so that anything written to these streams during + # initialization do not get associated with the first execution request + sys.stdout.flush() + sys.stderr.flush() def start(self): - import argparse - parser = argparse.ArgumentParser(prog=self.name, - description="Install the YAP kernel spec.") - parser.add_argument('--user', action='store_true', - help="Install for the current user instead of system-wide") - parser.add_argument('--name', type=str, default=KERNEL_NAME, - help="Specify a name for the kernelspec." - " This is needed to have multiple IPython kernels at the same time.") - parser.add_argument('--display-name', type=str, - help="Specify the display name for the kernelspec." - " This is helpful when you have multiple IPython kernels.") - parser.add_argument('--profile', type=str, - help="Specify an IPython profile to load. " - "This can be used to create custom versions of the kernel.") - parser.add_argument('--prefix', type=str, - help="Specify an install prefix for the kernelspec." - " This is needed to install into a non-default location, such as a conda/virtual-env.") - parser.add_argument('--sys-prefix', action='store_const', const=sys.prefix, dest='prefix', - help="Install to Python's sys.prefix." - " Shorthand for --prefix='%s'. For use in conda/virtual-envs." % sys.prefix) - opts = parser.parse_args(self.argv) + if self.subapp is not None: + return self.subapp.start() + if self.poller is not None: + self.poller.start() + self.kernel.start() try: - dest = install(user=opts.user, kernel_name=opts.name, profile=opts.profile, - prefix=opts.prefix, display_name=opts.display_name) - except OSError as e: - if e.errno == errno.EACCES: - print(e, file=sys.stderr) - if opts.user: - print("Perhaps you want `sudo` or `--user`?", file=sys.stderr) - self.exit(1) - raise - print("Installed kernelspec %s in %s" % (opts.name, dest)) + ioloop.IOLoop.instance().start() + except KeyboardInterrupt: + pass + +launch_new_instance = YAPKernelApp.launch_instance + +def main(): + """Run an IPKernel as an application""" + app = YAPKernelApp.instance() + app.initialize() + app.start() if __name__ == '__main__': - InstallYAPKernelSpecApp.launch_instance() + main() diff --git a/packages/python/yap_kernel/prolog.js b/packages/python/yap_kernel/prolog.js deleted file mode 100644 index f8d172812..000000000 --- a/packages/python/yap_kernel/prolog.js +++ /dev/null @@ -1,1283 +0,0 @@ -// CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -(function(mod) { - if (typeof exports == "object" && typeof module == "object") // CommonJS - mod(require("../../lib/codemirror")); - else if (typeof define == "function" && define.amd) // AMD - define(["../../lib/codemirror"], mod); - else // Plain browser env - mod(CodeMirror); -})(function(CodeMirror) { - "use strict"; - - CodeMirror.defineMode("prolog", function(cmConfig, modeConfig) { - - function chain(stream, state, f) { - state.tokenize = f; - return f(stream, state); - } - - /******************************* - * CONFIG DATA * - *******************************/ - - var config = { quasiQuotations: false, /* {|Syntax||Quotation|} */ - dicts: false, /* tag{k:v, ...} */ - unicodeEscape: true, /* \uXXXX and \UXXXXXXXX */ - multiLineQuoted: true, /* "...\n..." */ - groupedIntegers: false /* 10 000 or 10_000 */ - }; - - var quoteType = { '"': "string", - "'": "qatom", - "`": "bqstring" - }; - - var isSingleEscChar = /[abref\\'"nrtsv]/; - var isOctalDigit = /[0-7]/; - var isHexDigit = /[0-9a-fA-F]/; - - var isSymbolChar = /[-#$&*+./:<=>?@\\^~]/; /* Prolog glueing symbols chars */ - var isSoloChar = /[[\]{}(),;|!]/; /* Prolog solo chars */ - var isNeck = /^(:-|-->)$/; - var isControlOp = /^(,|;|->|\*->|\\+|\|)$/; - - - /******************************* - * CHARACTER ESCAPES * - *******************************/ - - function readDigits(stream, re, count) { - if ( count > 0 ) { - while( count-- > 0 ) { - if ( !re.test(stream.next()) ) - return false; - } - } else { - while ( re.test(stream.peek()) ) - stream.next(); - } - return true; - } - - function readEsc(stream) { - var next = stream.next(); - if ( isSingleEscChar.test(next) ) - return true; - switch( next ) - { case "u": - if ( config.unicodeEscape ) - return readDigits(stream, isHexDigit, 4); /* SWI */ - return false; - case "U": - if ( config.unicodeEscape ) - return readDigits(stream, isHexDigit, 8); /* SWI */ - return false; - case null: return true; /* end of line */ - case "c": stream.eatSpace(); return true; - case "x": return readDigits(stream, isHexDigit, 2); - } - if ( isOctalDigit.test(next) ) { - if ( !readDigits(stream, isOctalDigit, -1) ) - return false; - if ( stream.peek() == "\\" ) /* SWI: optional closing \ */ - stream.next(); - return true; - } - return false; - } - - function nextUntilUnescaped(stream, state, end) { - var next; - while ((next = stream.next()) != null) { - if ( next == end && end != stream.peek() ) - { state.nesting.pop(); - return false; - } - if ( next == "\\" ) - { if ( !readEsc(stream) ) - return false; - } - } - return config.multiLineQuoted; - } - - /******************************* - * CONTEXT NESTING * - *******************************/ - - function nesting(state) { - return state.nesting.slice(-1)[0]; - } - - /* Called on every non-comment token */ - function setArg1(state) { - var nest = nesting(state); - if ( nest ) { - if ( nest.arg == 0 ) /* nested in a compound */ - nest.arg = 1; - else if ( nest.type == "control" ) - state.goalStart = false; - } else - state.goalStart = false; - } - - function setArgAlignment(state) { - var nest = nesting(state); - if ( nest && !nest.alignment && nest.arg != undefined ) { - if ( nest.arg == 0 ) - nest.alignment = nest.leftCol ? nest.leftCol+4 : nest.column+4; - else - nest.alignment = nest.column+1; - } - } - - function nextArg(state) { - var nest = nesting(state); - if ( nest ) { - if ( nest.arg ) /* nested in a compound */ - nest.arg++; - else if ( nest.type == "control" ) - state.goalStart = true; /* FIXME: also needed for ; and -> */ - } else - state.goalStart = true; - } - - function isControl(state) { /* our terms are goals */ - var nest = nesting(state); - if ( nest ) { - if ( nest.type == "control" ) { - return true; - } - return false; - } else - return state.inBody; - } - - // Used as scratch variables to communicate multiple values without - // consing up tons of objects. - var type, content; - function ret(tp, style, cont) { - type = tp; content = cont; - return style; - } - - function peekSpace(stream) { /* TBD: handle block comment as space */ - if ( stream.eol() || - /[\s%]/.test(stream.peek()) ) - return true; - return false; - } - - - /******************************* - * SUB TOKENISERS * - *******************************/ - - function plTokenBase(stream, state) { - var ch = stream.next(); - - if ( ch == "(" ) { - if ( state.lastType == "functor" ) { - state.nesting.push({ functor: state.functorName, - column: stream.column(), - leftCol: state.functorColumn, - arg: 0 - }); - delete state.functorName; - delete state.functorColumn; - } else { - state.nesting.push({ type: "control", - closeColumn: stream.column(), - alignment: stream.column()+4 - }); - } - return ret("solo", null, "("); - } - - if ( ch == "{" && state.lastType == "tag" ) { - state.nesting.push({ tag: state.tagName, - column: stream.column(), - leftCol: state.tagColumn, - arg: 0 - }); - delete state.tagName; - delete state.tagColumn; - return ret("dict_open", null); - } - - if ( ch == "/" && stream.eat("*") ) - return chain(stream, state, plTokenComment); - - if ( ch == "%" ) { - stream.skipToEnd(); - return ret("comment", "comment"); - } - - setArg1(state); - - if ( isSoloChar.test(ch) ) { - switch ( ch ) - { case ")": - state.nesting.pop(); - break; - case "]": - state.nesting.pop(); - return ret("list_close", null); - case "}": - { var nest = nesting(state); - var type = (nest && nest.tag) ? "dict_close" : "brace_term_close"; - - state.nesting.pop(); - return ret(type, null); - } - case ",": - if ( stream.eol() ) - state.commaAtEOL = true; - nextArg(state); - /*FALLTHROUGH*/ - case ";": - if ( isControl(state) ) - state.goalStart = true; - break; - case "[": - state.nesting.push({ type: "list", - closeColumn: stream.column(), - alignment: stream.column()+2 - }); - return ret("list_open", null); - break; - case "{": - if ( config.quasiQuotations && stream.eat("|") ) { - state.nesting.push({ type: "quasi-quotation", - alignment: stream.column()+1 - }); - return ret("qq_open", "qq_open"); - } else { - state.nesting.push({ type: "curly", - closeColumn: stream.column(), - alignment: stream.column()+2 - }); - return ret("brace_term_open", null); - } - break; - case "|": - if ( config.quasiQuotations ) { - if ( stream.eat("|") ) { - state.tokenize = plTokenQuasiQuotation; - return ret("qq_sep", "qq_sep"); - } else if ( stream.eat("}") ) { - state.nesting.pop(); - return ret("qq_close", "qq_close"); - } - } - if ( isControl(state) ) - state.goalStart = true; - break; - } - return ret("solo", null, ch); - } - - if (ch == '"' || ch == "'" || ch == "`") - { state.nesting.push({ type: "quoted", - alignment: stream.column()+1 - }); - return chain(stream, state, plTokenString(ch)); - } - - if ( ch == "0" ) { - if ( stream.eat(/x/i)) { - stream.eatWhile(/[\da-f]/i); - return ret("number", "number"); - } - if ( stream.eat(/o/i)) { - stream.eatWhile(/[0-7]/i); - return ret("number", "number"); - } - if ( stream.eat(/'/) ) { /* 0' */ - var next = stream.next(); - if ( next == "\\" ) { - if ( !readEsc(stream) ) - return ret("error", "error"); - } - return ret("code", "code"); - } - } - - if ( /\d/.test(ch) || /[+-]/.test(ch) && stream.eat(/\d/)) { - if ( config.groupedIntegers ) - stream.match(/^\d*((_|\s+)\d+)*(?:\.\d+)?(?:[eE][+\-]?\d+)?/); - else - stream.match(/^\d*(?:\.\d+)?(?:[eE][+\-]?\d+)?/); - return ret(ch == "-" ? "neg-number" : - ch == "+" ? "pos-number" : - "number"); - } - - if ( isSymbolChar.test(ch) ) { - stream.eatWhile(isSymbolChar); - var atom = stream.current(); - if ( atom == "." && peekSpace(stream) ) { - if ( nesting(state) ) { - return ret("fullstop", "error", atom); - } else { - } return ret("fullstop", "fullstop", atom); - } else if ( isNeck.test(atom) ) { - return ret("neck", "neck", atom); - } else if ( isControl(state) && isControlOp.test(atom) ) { - state.goalStart = true; - return ret("symbol", "operator", atom); - } else - return ret("symbol", "operator", atom); - } - - stream.eatWhile(/[\w_]/); - var word = stream.current(); - if ( stream.peek() == "{" && config.dicts ) { - state.tagName = word; /* tmp state extension */ - state.tagColumn = stream.column(); - return ret("tag", "tag", word); - } else if ( ch == "_" ) { - if ( word.length == 1 ) { - return ret("var", "anon", word); - } else { - var sec = word.charAt(1); - if ( sec == sec.toUpperCase() ) - return ret("var", "var-2", word); - } - return ret("var", "var", word); - } else if ( ch == ch.toUpperCase() ) { - return ret("var", "var", word); - } else if ( stream.peek() == "(" ) { - state.functorName = word; /* tmp state extension */ - state.functorColumn = stream.column(); - return ret("functor", "functor", word); - } else - return ret("atom", "atom", word); - } - - function plTokenString(quote) { - return function(stream, state) { - if (!nextUntilUnescaped(stream, state, quote)) { - state.tokenize = plTokenBase; - if ( stream.peek() == "(" ) { /* 'quoted functor'() */ - var word = stream.current(); - state.functorName = word; /* tmp state extension */ - return ret("functor", "functor", word); - } - if ( stream.peek() == "{" && config.dicts ) { /* 'quoted tag'{} */ - var word = stream.current(); - state.tagName = word; /* tmp state extension */ - return ret("tag", "tag", word); - } - } - return ret(quoteType[quote], quoteType[quote]); - }; - } - - function plTokenQuasiQuotation(stream, state) { - var maybeEnd = false, ch; - while (ch = stream.next()) { - if (ch == "}" && maybeEnd) { - state.tokenize = plTokenBase; - stream.backUp(2); - break; - } - maybeEnd = (ch == "|"); - } - return ret("qq_content", "qq_content"); - } - - function plTokenComment(stream, state) { - var maybeEnd = false, ch; - while (ch = stream.next()) { - if (ch == "/" && maybeEnd) { - state.tokenize = plTokenBase; - break; - } - maybeEnd = (ch == "*"); - } - return ret("comment", "comment"); - } - - - // /******************************* - // * ACTIVE KEYS * - // *******************************/ - - // /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Support if-then-else layout like this: - - // goal :- - // ( Condition - // -> IfTrue - // ; IfFalse - // ). - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - - - // CodeMirror.commands.prologStartIfThenElse = function(cm) { - // var start = cm.getCursor("start"); - // var token = cm.getTokenAt(start, true); - - // if ( token.state.goalStart == true ) - // { cm.replaceSelection("( ", "end"); - // return; - // } - - // return CodeMirror.Pass; - // } - - // CodeMirror.commands.prologStartThen = function(cm) { - // var start = cm.getCursor("start"); - // var token = cm.getTokenAt(start, true); - - // /* FIXME: These functions are copied from prolog.js. How - // can we reuse these? - // */ - // function nesting(state) { - // var len = state.nesting.length; - // if ( len > 0 ) - // return state.nesting[len-1]; - // return null; - // } - - // function isControl(state) { /* our terms are goals */ - // var nest = nesting(state); - // if ( nest ) { - // if ( nest.type == "control" ) { - // return true; - // } - // return false; - // } else - // return state.inBody; - // } - - // if ( start.ch == token.end && - // token.type == "operator" && - // token.string == "-" && - // isControl(token.state) ) - // { cm.replaceSelection("> ", "end"); - // return; - // } - - // return CodeMirror.Pass; - // } - - // CodeMirror.commands.prologStartElse = function(cm) { - // var start = cm.getCursor("start"); - // var token = cm.getTokenAt(start, true); - - // if ( token.start == 0 && start.ch == token.end && - // !/\S/.test(token.string) ) - // { cm.replaceSelection("; ", "end"); - // return; - // } - - // return CodeMirror.Pass; - // } - - // CodeMirror.defineOption("prologKeys", null, function(cm, val, prev) { - // if (prev && prev != CodeMirror.Init) - // cm.removeKeyMap("prolog"); - // if ( val ) { - // var map = { name: "prolog", - // "'('": "prologStartIfThenElse", - // "'>'": "prologStartThen", - // "';'": "prologStartElse", - // "Ctrl-L": "refreshHighlight" - // }; - // cm.addKeyMap(map); - // } - // }); - - // }); - //Default (SWI-)Prolog operator table. To be used later to enhance the - //offline experience. - - var ops = { "-->": { p:1200, t:"xfx" }, - ":-": [ { p:1200, t:"xfx" }, - { p:1200, t:"fx" } - ], - "?-": { p:1200, t:"fx" }, - - "dynamic": { p:1150, t:"fx" }, - "discontiguous": { p:1150, t:"fx" }, - "initialization": { p:1150, t:"fx" }, - "meta_predicate": { p:1150, t:"fx" }, - "module_transparent": { p:1150, t:"fx" }, - "multifile": { p:1150, t:"fx" }, - "thread_local": { p:1150, t:"fx" }, - "volatile": { p:1150, t:"fx" }, - - ";": { p:1100, t:"xfy" }, - "|": { p:1100, t:"xfy" }, - - "->": { p:1050, t:"xfy" }, - "*->": { p:1050, t:"xfy" }, - - ",": { p:1000, t:"xfy" }, - - "\\+": { p:900, t:"fy" }, - - "~": { p:900, t:"fx" }, - - "<": { p:700, t:"xfx" }, - "=": { p:700, t:"xfx" }, - "=..": { p:700, t:"xfx" }, - "=@=": { p:700, t:"xfx" }, - "=:=": { p:700, t:"xfx" }, - "=<": { p:700, t:"xfx" }, - "==": { p:700, t:"xfx" }, - "=\\=": { p:700, t:"xfx" }, - ">": { p:700, t:"xfx" }, - ">=": { p:700, t:"xfx" }, - "@<": { p:700, t:"xfx" }, - "@=<": { p:700, t:"xfx" }, - "@>": { p:700, t:"xfx" }, - "@>=": { p:700, t:"xfx" }, - "\\=": { p:700, t:"xfx" }, - "\\==": { p:700, t:"xfx" }, - "is": { p:700, t:"xfx" }, - - ":": { p:600, t:"xfy" }, - - "+": [ { p:500, t:"yfx" }, - { p:200, t:"fy" } - ], - "-": [ { p:500, t:"yfx" }, - { p:200, t:"fy" } - ], - "/\\": { p:500, t:"yfx" }, - "\\/": { p:500, t:"yfx" }, - "xor": { p:500, t:"yfx" }, - - "?": { p:500, t:"fx" }, - - "*": { p:400, t:"yfx" }, - "/": { p:400, t:"yfx" }, - "//": { p:400, t:"yfx" }, - "rdiv": { p:400, t:"yfx" }, - "<<": { p:400, t:"yfx" }, - ">>": { p:400, t:"yfx" }, - "mod": { p:400, t:"yfx" }, - "rem": { p:400, t:"yfx" }, - - "**": { p:200, t:"xfx" }, - "^": { p:200, t:"xfy" }, - - "\\": { p:200, t:"fy" } - }; - - var translType = { - "comment": "comment", - "var": "variable-2", /* JavaScript Types */ - "atom": "atom", - "qatom": "atom", - "bqstring": "string", - "symbol": "atom", - "functor": "keyword", - "tag": "tag", - "number": "number", - "string": "string", - "code": "number", - "neg-number": "number", - "pos-number": "number", - "list_open": "bracket", - "list_close": "bracket", - "qq_open": "bracket", - "qq_sep": "operator", - "qq_close": "bracket", - "dict_open": "bracket", - "dict_close": "bracket", - "brace_term_open": "bracket", - "brace_term_close": "bracket", - "neck": "keyword", - "fullstop": "keyword" - }; - - var builtins = { - "asserta": "prolog", - "atomic_list_concat": "prolog", - "char_type": "prolog", - "compile_expressions": "prolog", - "compile": "prolog", - "create_prolog_flag": "prolog", - "current_module": "prolog", - "current_op": "prolog", - "del_attrs": "prolog", - "depth_bound_call": "prolog", - "dule": "prolog", - "exo_files": "prolog", - "export_list": "prolog", - "foreign_directory": "prolog", - "garbage_collect_atoms": "prolog", - "garbage_collect": "prolog", - "get_attrs": "prolog", - "hread_signal": "prolog", - "ignore": "prolog", - "incore": "prolog", - "initialization": "prolog", - "int_message": "prolog", - "message_to_string": "prolog", - "module_property": "prolog", - "msort": "prolog", - "mutex_unlock_all": "prolog", - "no_style_check": "prolog", - "nospy": "prolog", - "notrace": "prolog", - "ortray_clause": "prolog", - "otherwise": "prolog", - "predsort": "prolog", - "prolog_initialization": "prolog", - "qend_program": "prolog", - "qsave_file": "prolog", - "recordaifnot": "prolog", - "set_base_module": "prolog", - "sformat": "prolog", - "source_file": "prolog", - "split_path_file": "prolog", - "stream_position": "prolog", - "system_error": "prolog", - "system_module": "prolog", - "t_head": "prolog", - "table_statistics": "prolog", - "tabling_mode": "prolog", - "tabling_statistics": "prolog", - "thread_defaults": "prolog", - "thread_local": "prolog", - "thread_set_defaults": "prolog", - "thread_statistics": "prolog", - "unix": "prolog", - "use_system_module": "prolog", - "user_defined_directive": "prolog", - "version": "prolog", - "C": "prolog", - "abolish_all_tables": "prolog", - "abolish_frozen_choice_points": "prolog", - "abolish_module": "prolog", - "abolish_table": "prolog", - "abolish": "prolog", - "abort": "prolog", - "absolute_file_name": "prolog", - "absolute_file_system_path": "prolog", - "access_file": "prolog", - "access": "prolog", - "acyclic_term": "prolog", - "add_import_module": "prolog", - "add_to_array_element": "prolog", - "add_to_path": "prolog", - "alarm": "prolog", - "all": "prolog", - "always_prompt_user": "prolog", - "arena_size": "prolog", - "arg": "prolog", - "array_element": "prolog", - "array": "prolog", - "assert_static": "prolog", - "asserta_static": "prolog", - "assertz_static": "prolog", - "assertz": "prolog", - "assert": "prolog", - "at_end_of_line": "prolog", - "at_end_of_stream_0": "prolog", - "at_end_of_stream": "prolog", - "at_halt": "prolog", - "atom_chars": "prolog", - "atom_codes": "prolog", - "atom_concat": "prolog", - "atom_length": "prolog", - "atom_number": "prolog", - "atom_string": "prolog", - "atom_to_term": "prolog", - "atomic_concat": "prolog", - "atomic_length": "prolog", - "atomic_list_concat": "prolog", - "atomics_to_string": "prolog", - "atomic": "prolog", - "atom": "prolog", - "attvar": "prolog", - "b_getval": "prolog", - "b_setval": "prolog", - "bagof": "prolog", - "bb_delete": "prolog", - "bb_get": "prolog", - "bb_put": "prolog", - "bb_update": "prolog", - "between": "prolog", - "bootstrap": "prolog", - "break": "prolog", - "call_cleanup": "prolog", - "call_count_data": "prolog", - "call_count_reset": "prolog", - "call_count": "prolog", - "call_residue_vars": "prolog", - "call_residue": "prolog", - "call_shared_object_function": "prolog", - "call_with_args": "prolog", - "callable": "prolog", - "call": "prolog", - "catch_ball": "prolog", - "catch": "prolog", - "cd": "prolog", - "cfile_search_path": "prolog", - "char_code": "prolog", - "char_conversion": "prolog", - "char_type": "prolog", - "clause_property": "prolog", - "clause": "prolog", - "close_shared_object": "prolog", - "close_static_array": "prolog", - "close": "prolog", - "code_type": "prolog", - "commons_directory": "prolog", - "commons_library": "prolog", - "compare": "prolog", - "compile_expressions": "prolog", - "compile_predicates": "prolog", - "compile": "prolog", - "compound": "prolog", - "consult_depth": "prolog", - "consult": "prolog", - "context_module": "prolog", - "copy_term_nat": "prolog", - "copy_term": "prolog", - "create_mutable": "prolog", - "create_prolog_flag": "prolog", - "creep_allowed": "prolog", - "current_atom": "prolog", - "current_char_conversion": "prolog", - "current_host": "prolog", - "current_input": "prolog", - "current_key": "prolog", - "current_line_number": "prolog", - "current_module": "prolog", - "current_mutex": "prolog", - "current_op": "prolog", - "current_output": "prolog", - "current_predicate": "prolog", - "current_prolog_flag": "prolog", - "current_reference_count": "prolog", - "current_stream": "prolog", - "current_thread": "prolog", - "db_files": "prolog", - "db_reference": "prolog", - "debugging": "prolog", - "debug": "prolog", - "decrease_reference_count": "prolog", - "del_attrs": "prolog", - "del_attr": "prolog", - "delete_import_module": "prolog", - "depth_bound_call": "prolog", - "dif": "prolog", - "discontiguous": "prolog", - "display": "prolog", - "do_c_built_in": "prolog", - "do_c_built_metacall": "prolog", - "do_not_compile_expressions": "prolog", - "dump_active_goals": "prolog", - "dum": "prolog", - "duplicate_term": "prolog", - "dynamic_predicate": "prolog", - "dynamic_update_array": "prolog", - "dynamic": "prolog", - "eamconsult": "prolog", - "eamtrans": "prolog", - "end_of_file": "prolog", - "ensure_loaded": "prolog", - "eraseall": "prolog", - "erased": "prolog", - "erase": "prolog", - "exists_directory": "prolog", - "exists_file": "prolog", - "exists_source": "prolog", - "exists": "prolog", - "exo_files": "prolog", - "expand_exprs": "prolog", - "expand_expr": "prolog", - "expand_file_name": "prolog", - "expand_goal": "prolog", - "expand_term": "prolog", - "expects_dialect": "prolog", - "export_list": "prolog", - "export_resource": "prolog", - "export": "prolog", - "extend": "prolog", - "fail": "prolog", - "false": "prolog", - "file_base_name": "prolog", - "file_directory_name": "prolog", - "file_exists": "prolog", - "file_name_extension": "prolog", - "file_search_path": "prolog", - "file_size": "prolog", - "fileerrors": "prolog", - "findall": "prolog", - "float": "prolog", - "flush_output": "prolog", - "forall": "prolog", - "foreign_directory": "prolog", - "format": "prolog", - "freeze_choice_point": "prolog", - "freeze": "prolog", - "frozen": "prolog", - "functor": "prolog", - "garbage_collect_atoms": "prolog", - "garbage_collect": "prolog", - "gc": "prolog", - "get0": "prolog", - "get_attr": "prolog", - "get_byte": "prolog", - "get_char": "prolog", - "get_code": "prolog", - "get_depth_limit": "prolog", - "get_mutable": "prolog", - "get_string_code": "prolog", - "get_value": "prolog", - "getcwd": "prolog", - "getenv": "prolog", - "get": "prolog", - "global_trie_statistics": "prolog", - "ground": "prolog", - "grow_heap": "prolog", - "grow_stack": "prolog", - "halt": "prolog", - "heap_space_info": "prolog", - "hide_atom": "prolog", - "hide_predicate": "prolog", - "hostname_address": "prolog", - "hread_get_message": "prolog", - "if": "prolog", - "ignore": "prolog", - "import_module": "prolog", - "incore": "prolog", - "increase_reference_count": "prolog", - "init_random_state": "prolog", - "initialization": "prolog", - "instance_property": "prolog", - "instance": "prolog", - "integer": "prolog", - "is_absolute_file_name": "prolog", - "is_list": "prolog", - "is_mutable": "prolog", - "is_tabled": "prolog", - "isinf": "prolog", - "isnan": "prolog", - "is": "prolog", - "key_erased_statistics": "prolog", - "key_statistics": "prolog", - "keysort": "prolog", - "leash": "prolog", - "length": "prolog", - "libraries_directories": "prolog", - "line_count": "prolog", - "listing": "prolog", - "load_absolute_foreign_files": "prolog", - "load_db": "prolog", - "load_files": "prolog", - "load_foreign_files": "prolog", - "log_event": "prolog", - "logsum": "prolog", - "ls_imports": "prolog", - "ls": "prolog", - "make_directory": "prolog", - "make_library_index": "prolog", - "make": "prolog", - "message_queue_create": "prolog", - "message_queue_destroy": "prolog", - "message_queue_property": "prolog", - "message_to_string": "prolog", - "mmapped_array": "prolog", - "module_property": "prolog", - "module_state": "prolog", - "module": "prolog", - "msort": "prolog", - "multifile": "prolog", - "must_be_of_type": "prolog", - "mutex_create": "prolog", - "mutex_property": "prolog", - "mutex_unlock_all": "prolog", - "name": "prolog", - "nb_create": "prolog", - "nb_current": "prolog", - "nb_delete": "prolog", - "nb_getval": "prolog", - "nb_linkarg": "prolog", - "nb_linkval": "prolog", - "nb_set_bit": "prolog", - "nb_set_shared_arg": "prolog", - "nb_set_shared_val": "prolog", - "nb_setarg": "prolog", - "nb_setval": "prolog", - "new_system_module": "prolog", - "nl": "prolog", - "no_source": "prolog", - "no_style_check": "prolog", - "nodebug": "prolog", - "nofileeleerrors": "prolog", - "nogc": "prolog", - "nonvar": "prolog", - "nospyall": "prolog", - "nospy": "prolog", - "notrace": "prolog", - "not": "prolog", - "nth_clause": "prolog", - "nth_instance": "prolog", - "number_atom": "prolog", - "number_chars": "prolog", - "number_codes": "prolog", - "number_string": "prolog", - "numbervars": "prolog", - "number": "prolog", - "on_exception": "prolog", - "on_signal": "prolog", - "once": "prolog", - "opaque": "prolog", - "open_pipe_stream": "prolog", - "open_shared_object": "prolog", - "open": "prolog", - "opt_statistics": "prolog", - "op": "prolog", - "or_statistics": "prolog", - "otherwise": "prolog", - "parallel_findall": "prolog", - "parallel_findfirst": "prolog", - "parallel_once": "prolog", - "parallel": "prolog", - "path": "prolog", - "peek_byte": "prolog", - "peek_char": "prolog", - "peek_code": "prolog", - "peek": "prolog", - "phrase": "prolog", - "plus": "prolog", - "portray_clause": "prolog", - "predicate_erased_statistics": "prolog", - "predicate_property": "prolog", - "predicate_statistics": "prolog", - "predmerge": "prolog", - "predsort": "prolog", - "primitive": "prolog", - "print_message_lines": "prolog", - "print_message": "prolog", - "print": "prolog", - "private": "prolog", - "profalt": "prolog", - "profend": "prolog", - "profile_data": "prolog", - "profile_reset": "prolog", - "profinit": "prolog", - "profoff": "prolog", - "profon": "prolog", - "prolog_current_frame": "prolog", - "prolog_file_name": "prolog", - "prolog_file_type": "prolog", - "prolog_flag_property": "prolog", - "prolog_flag": "prolog", - "prolog_initialization": "prolog", - "prolog_load_context": "prolog", - "prolog_to_os_filename": "prolog", - "prolog": "prolog", - "prompt1": "prolog", - "prompt": "prolog", - "put_attrs": "prolog", - "put_attr": "prolog", - "put_byte": "prolog", - "put_char1": "prolog", - "put_char": "prolog", - "put_code": "prolog", - "putenv": "prolog", - "put": "prolog", - "pwd": "prolog", - "qend_program": "prolog", - "qload_file": "prolog", - "qload_module": "prolog", - "qpack_clean_up_to_disjunction": "prolog", - "qsave_file": "prolog", - "qsave_module": "prolog", - "qsave_program": "prolog", - "raise_exception": "prolog", - "rational_term_to_tree": "prolog", - "rational": "prolog", - "read_clause": "prolog", - "read_sig": "prolog", - "read_term_from_atomic": "prolog", - "read_term_from_atom": "prolog", - "read_term_from_string": "prolog", - "read_term": "prolog", - "read": "prolog", - "real_path": "prolog", - "reconsult": "prolog", - "recorda_at": "prolog", - "recordaifnot": "prolog", - "recorda": "prolog", - "recorded": "prolog", - "recordz_at": "prolog", - "recordzifnot": "prolog", - "recordz": "prolog", - "release_random_state": "prolog", - "remove_from_path": "prolog", - "rename": "prolog", - "repeat": "prolog", - "reset_static_array": "prolog", - "reset_total_choicepoints": "prolog", - "resize_static_array": "prolog", - "restore": "prolog", - "retractall": "prolog", - "retract": "prolog", - "rmdir": "prolog", - "same_file": "prolog", - "save_program": "prolog", - "seeing": "prolog", - "seen": "prolog", - "see": "prolog", - "set_base_module": "prolog", - "set_input": "prolog", - "set_output": "prolog", - "set_prolog_flag": "prolog", - "set_random_state": "prolog", - "set_stream_position": "prolog", - "set_stream": "prolog", - "set_value": "prolog", - "setarg": "prolog", - "setenv": "prolog", - "setof": "prolog", - "setup_call_catcher_cleanup": "prolog", - "setup_call_cleanup": "prolog", - "sformat": "prolog", - "show_all_local_tables": "prolog", - "show_all_tables": "prolog", - "show_global_trieshow_tabled_predicates": "prolog", - "show_global_trie": "prolog", - "show_low_level_trace": "prolog", - "show_tabled_predicates": "prolog", - "show_table": "prolog", - "showprofres": "prolog", - "sh": "prolog", - "simple": "prolog", - "skip1": "prolog", - "skip": "prolog", - "socket_accept": "prolog", - "socket_bind": "prolog", - "socket_close": "prolog", - "socket_connect": "prolog", - "socket_listen": "prolog", - "socket": "prolog", - "sort2": "prolog", - "sort": "prolog", - "source_file_property": "prolog", - "source_file": "prolog", - "source_location": "prolog", - "source_mode": "prolog", - "source_module": "prolog", - "source": "prolog", - "split_path_file": "prolog", - "spy": "prolog", - "srandom": "prolog", - "start_low_level_trace": "prolog", - "stash_predicate": "prolog", - "static_array_location": "prolog", - "static_array_properties": "prolog", - "static_array_to_term": "prolog", - "static_array": "prolog", - "statistics": "prolog", - "stop_low_level_trace": "prolog", - "stream_position_data": "prolog", - "stream_position": "prolog", - "stream_property": "prolog", - "stream_select": "prolog", - "string_chars": "prolog", - "string_codes": "prolog", - "string_code": "prolog", - "string_concat": "prolog", - "string_length": "prolog", - "string_number": "prolog", - "string_to_atomic": "prolog", - "string_to_atom": "prolog", - "string_to_list": "prolog", - "string": "prolog", - "strip_module": "prolog", - "style_check": "prolog", - "sub_atom": "prolog", - "sub_string": "prolog", - "subsumes_term": "prolog", - "succ": "prolog", - "sys_debug": "prolog", - "system_error": "prolog", - "system_library": "prolog", - "system_module": "prolog", - "system_predicate": "prolog", - "system": "prolog", - "t_body": "prolog", - "t_head": "prolog", - "t_hgoal": "prolog", - "t_hlist": "prolog", - "t_tidy": "prolog", - "tab1": "prolog", - "table_statistics": "prolog", - "table": "prolog", - "tabling_mode": "prolog", - "tabling_statistics": "prolog", - "tab": "prolog", - "telling": "prolog", - "tell": "prolog", - "term_attvars": "prolog", - "term_factorized": "prolog", - "term_to_atom": "prolog", - "term_to_string": "prolog", - "term_variables": "prolog", - "thread_at_exit": "prolog", - "thread_cancel": "prolog", - "thread_create": "prolog", - "thread_defaults": "prolog", - "thread_default": "prolog", - "thread_detach": "prolog", - "thread_exit": "prolog", - "thread_get_message": "prolog", - "thread_join": "prolog", - "thread_local": "prolog", - "thread_peek_message": "prolog", - "thread_property": "prolog", - "thread_self": "prolog", - "thread_send_message": "prolog", - "thread_set_defaults": "prolog", - "thread_set_default": "prolog", - "thread_signal": "prolog", - "thread_sleep": "prolog", - "thread_statistics": "prolog", - "threads": "prolog", - "throw": "prolog", - "time_file64": "prolog", - "time_file": "prolog", - "time": "prolog", - "told": "prolog", - "tolower": "prolog", - "total_choicepoints": "prolog", - "total_erased": "prolog", - "toupper": "prolog", - "trace": "prolog", - "true_file_name": "prolog", - "true": "prolog", - "tthread_peek_message": "prolog", - "ttyget0": "prolog", - "ttyget": "prolog", - "ttynl": "prolog", - "ttyput": "prolog", - "ttyskip": "prolog", - "udi": "prolog", - "unhide_atom": "prolog", - "unify_with_occurs_check": "prolog", - "unix": "prolog", - "unknown": "prolog", - "unload_file": "prolog", - "unload_module": "prolog", - "unnumbervars": "prolog", - "update_array": "prolog", - "update_mutable": "prolog", - "use_module": "prolog", - "use_system_module": "prolog", - "user_defined_directive": "prolog", - "var": "prolog", - "version": "prolog", - "volatile": "prolog", - "wake_choice_point": "prolog", - "when": "prolog", - "with_mutex": "prolog", - "with_output_to": "prolog", - "working_directory": "prolog", - "write_canonical": "prolog", - "write_depth": "prolog", - "write_term": "prolog", - "writeln": "prolog", - "writeq": "prolog", - "write": "prolog", - "yap_flag": "prolog" - }; - - /******************************* - * RETURN OBJECT * - *******************************/ - - return { - startState: function() { - return { - tokenize: plTokenBase, - inBody: false, - goalStart: false, - lastType: null, - nesting: new Array(), /* ([{}]) nesting FIXME: copy this */ - curTerm: null, /* term index in metainfo */ - curToken: null /* token in term */ - }; - }, - - - token: function(stream, state) { - var nest; - - if ( state.curTerm == null && mode - Config.metainfo ) { - state.curTerm = 0; - state.curToken = 0; - } - - if ( stream.sol() ) - delete state.commaAtEOL; - - if ( state.tokenize == plTokenBase && stream.eatSpace() ) { - if ( stream.eol() ) - setArgAlignment(state); - return null; - } - - var style = state.tokenize(stream, state); - - if ( stream.eol() ) - setArgAlignment(state); - - if ( type == "neck" ) { - state.inBody = true; - state.goalStart = true; - } else if ( type == "fullstop" ) { - state.inBody = false; - state.goalStart = false; - } - - state.lastType = type; - - - if ( builtins[state.curToken] == "prolog") - return "builtin"; - if ( ops[state.curToken]) - return "operator"; - return translType[type]; - }, - - indent: function(state, textAfter) { - if (state.tokenize == plTokenComment) return CodeMirror.Pass; - - var nest; - if ( (nest=nesting(state)) ) { - if ( nest.closeColumn && !state.commaAtEOL ) - return nest.closeColumn; - return nest.alignment; - } - if ( !state.inBody ) - return 0; - - return 4; - }, - - // theme: "prolog", - - blockCommentStart: "/*", /* continuecomment.js support */ - blockCommentEnd: "*/", - blockCommentContinue: " * ", - lineComment: "%", - }; - - }); - -CodeMirror.defineMIME("text/x-prolog", "prolog"); -}); diff --git a/packages/python/yap_kernel/setup.py.cmake b/packages/python/yap_kernel/setup.py.cmake index bbf609d9b..69418dd25 100644 --- a/packages/python/yap_kernel/setup.py.cmake +++ b/packages/python/yap_kernel/setup.py.cmake @@ -48,6 +48,7 @@ setup_args = dict( license = 'BSD', platforms = "Linux, Mac OS X, Windows", keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'], + data_files=[('share/Yap/js', ['${CMAKE_SOURCE_DIR}/misc/editors/prolog.js'])], classifiers = [ 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', diff --git a/packages/python/yap_kernel/x/kernelapp.py b/packages/python/yap_kernel/x/kernelapp.py deleted file mode 100644 index b400f5d99..000000000 --- a/packages/python/yap_kernel/x/kernelapp.py +++ /dev/null @@ -1,488 +0,0 @@ -"""An Application for launching a kernel""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from __future__ import print_function - -import atexit -import os -import sys -import signal -import traceback -import logging - -from tornado import ioloop -import zmq -from zmq.eventloop import ioloop as zmq_ioloop -from zmq.eventloop.zmqstream import ZMQStream - -from IPython.core.application import ( - BaseIPythonApplication, base_flags, base_aliases, catch_config_error -) -from IPython.core.profiledir import ProfileDir -from IPython.core.shellapp import ( - InteractiveShellApp, shell_flags, shell_aliases -) -from IPython.utils import io -from ipython_genutils.path import filefind, ensure_dir_exists -from traitlets import ( - Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default -) -from ipython_genutils.importstring import import_item -from jupyter_core.paths import jupyter_runtime_dir -from jupyter_client import write_connection_file -from jupyter_client.connect import ConnectionFileMixin - -# local imports -from ipykernel.iostream import IOPubThread -from ipykernel.heartbeat import Heartbeat -from .yap_kernel import YAPKernel -from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows -from jupyter_client.session import ( - Session, session_flags, session_aliases, -) -from ipykernel.zmqshell import ZMQInteractiveShell - -#----------------------------------------------------------------------------- -# Flags and Aliases -#----------------------------------------------------------------------------- - -kernel_aliases = dict(base_aliases) -kernel_aliases.update({ - 'ip' : 'YAPKernelApp.ip', - 'hb' : 'YAPKernelApp.hb_port', - 'shell' : 'YAPKernelApp.shell_port', - 'iopub' : 'YAPKernelApp.iopub_port', - 'stdin' : 'YAPKernelApp.stdin_port', - 'control' : 'YAPKernelApp.control_port', - 'f' : 'YAPKernelApp.connection_file', - 'transport': 'YAPKernelApp.transport', -}) - -kernel_flags = dict(base_flags) -kernel_flags.update({ - 'no-stdout' : ( - {'YAPKernelApp' : {'no_stdout' : True}}, - "redirect stdout to the null device"), - 'no-stderr' : ( - {'YAPKernelApp' : {'no_stderr' : True}}, - "redirect stderr to the null device"), - 'pylab' : ( - {'YAPKernelApp' : {'pylab' : 'auto'}}, - """Pre-load matplotlib and numpy for interactive use with - the default matplotlib backend."""), -}) - -# inherit flags&aliases for any IPython shell apps -kernel_aliases.update(shell_aliases) -kernel_flags.update(shell_flags) - -# inherit flags&aliases for Sessions -kernel_aliases.update(session_aliases) -kernel_flags.update(session_flags) - -_ctrl_c_message = """\ -NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work. - -To exit, you will have to explicitly quit this process, by either sending -"quit" from a client, or using Ctrl-\\ in UNIX-like environments. - -To read more about this, see https://github.com/ipython/ipython/issues/2049 - -""" - -#----------------------------------------------------------------------------- -# Application class for starting an IPython Kernel -#----------------------------------------------------------------------------- - -class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp, - ConnectionFileMixin): - name='YAP-kernel' - aliases = Dict(kernel_aliases) - flags = Dict(kernel_flags) - classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session] - # the kernel class, as an importstring - kernel_class = Type('yap_kernel.yap_kernel.YAPKernel', - klass='ipykernel.kernelbase.Kernel', - help="""The Kernel subclass to be used. - - This should allow easy re-use of the IPKernelApp entry point - to configure and launch kernels other than IPython's own. - """).tag(config=True) - kernel = Any() - poller = Any() # don't restrict this even though current pollers are all Threads - heartbeat = Instance(Heartbeat, allow_none=True) - ports = Dict() - - subcommands = { - 'install': ( - '.kernelspec.InstallYAPKernelSpecApp', - 'Install the YAP kernel' - ), - } - - # connection info: - connection_dir = Unicode() - - @default('connection_dir') - def _default_connection_dir(self): - return jupyter_runtime_dir() - - @property - def abs_connection_file(self): - if os.path.basename(self.connection_file) == self.connection_file: - return os.path.join(self.connection_dir, self.connection_file) - else: - return self.connection_file - - # streams, etc. - no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True) - no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True) - outstream_class = DottedObjectName('ipykernel.iostream.OutStream', - help="The importstring for the OutStream factory").tag(config=True) - displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook', - help="The importstring for the DisplayHook factory").tag(config=True) - - # polling - parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), - help="""kill this process if its parent dies. On Windows, the argument - specifies the HANDLE of the parent process, otherwise it is simply boolean. - """).tag(config=True) - interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), - help="""ONLY USED ON WINDOWS - Interrupt this process when the parent is signaled. - """).tag(config=True) - - def init_crash_handler(self): - sys.excepthook = self.excepthook - - def excepthook(self, etype, evalue, tb): - # write uncaught traceback to 'real' stderr, not zmq-forwarder - traceback.print_exception(etype, evalue, tb, file=sys.__stderr__) - - def init_poller(self): - if sys.platform == 'win32': - if self.interrupt or self.parent_handle: - self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) - elif self.parent_handle: - self.poller = ParentPollerUnix() - - def _bind_socket(self, s, port): - iface = '%s://%s' % (self.transport, self.ip) - if self.transport == 'tcp': - if port <= 0: - port = s.bind_to_random_port(iface) - else: - s.bind("tcp://%s:%i" % (self.ip, port)) - elif self.transport == 'ipc': - if port <= 0: - port = 1 - path = "%s-%i" % (self.ip, port) - while os.path.exists(path): - port = port + 1 - path = "%s-%i" % (self.ip, port) - else: - path = "%s-%i" % (self.ip, port) - s.bind("ipc://%s" % path) - return port - - def write_connection_file(self): - """write connection info to JSON file""" - cf = self.abs_connection_file - self.log.debug("Writing connection file: %s", cf) - write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, - shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, - iopub_port=self.iopub_port, control_port=self.control_port) - - def cleanup_connection_file(self): - cf = self.abs_connection_file - self.log.debug("Cleaning up connection file: %s", cf) - try: - os.remove(cf) - except (IOError, OSError): - pass - - self.cleanup_ipc_files() - - def init_connection_file(self): - if not self.connection_file: - self.connection_file = "kernel-%s.json"%os.getpid() - try: - self.connection_file = filefind(self.connection_file, ['.', self.connection_dir]) - except IOError: - self.log.debug("Connection file not found: %s", self.connection_file) - # This means I own it, and I'll create it in this directory: - ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700) - # Also, I will clean it up: - atexit.register(self.cleanup_connection_file) - return - try: - self.load_connection_file() - except Exception: - self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) - self.exit(1) - - def init_sockets(self): - # Create a context, a session, and the kernel sockets. - self.log.info("Starting the kernel at pid: %i", os.getpid()) - context = zmq.Context.instance() - # Uncomment this to try closing the context. - # atexit.register(context.term) - - self.shell_socket = context.socket(zmq.ROUTER) - self.shell_socket.linger = 1000 - self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) - self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) - - self.stdin_socket = context.socket(zmq.ROUTER) - self.stdin_socket.linger = 1000 - self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) - self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) - - self.control_socket = context.socket(zmq.ROUTER) - self.control_socket.linger = 1000 - self.control_port = self._bind_socket(self.control_socket, self.control_port) - self.log.debug("control ROUTER Channel on port: %i" % self.control_port) - - self.init_iopub(context) - - def init_iopub(self, context): - self.iopub_socket = context.socket(zmq.PUB) - self.iopub_socket.linger = 1000 - self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) - self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) - self.configure_tornado_logger() - self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True) - self.iopub_thread.start() - # backward-compat: wrap iopub socket API in background thread - self.iopub_socket = self.iopub_thread.background_socket - - def init_heartbeat(self): - """start the heart beating""" - # heartbeat doesn't share context, because it mustn't be blocked - # by the GIL, which is accessed by libzmq when freeing zero-copy messages - hb_ctx = zmq.Context() - self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) - self.hb_port = self.heartbeat.port - self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) - self.heartbeat.start() - - def log_connection_info(self): - """display connection info, and store ports""" - basename = os.path.basename(self.connection_file) - if basename == self.connection_file or \ - os.path.dirname(self.connection_file) == self.connection_dir: - # use shortname - tail = basename - else: - tail = self.connection_file - lines = [ - "To connect another client to this kernel, use:", - " --existing %s" % tail, - ] - # log connection info - # info-level, so often not shown. - # frontends should use the %connect_info magic - # to see the connection info - for line in lines: - self.log.info(line) - # also raw print to the terminal if no parent_handle (`ipython kernel`) - # unless log-level is CRITICAL (--quiet) - if not self.parent_handle and self.log_level < logging.CRITICAL: - io.rprint(_ctrl_c_message) - for line in lines: - io.rprint(line) - - self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, - stdin=self.stdin_port, hb=self.hb_port, - control=self.control_port) - - def init_blackhole(self): - """redirects stdout/stderr to devnull if necessary""" - if self.no_stdout or self.no_stderr: - blackhole = open(os.devnull, 'w') - if self.no_stdout: - sys.stdout = sys.__stdout__ = blackhole - if self.no_stderr: - sys.stderr = sys.__stderr__ = blackhole - - def init_io(self): - """Redirect input streams and set a display hook.""" - if self.outstream_class: - outstream_factory = import_item(str(self.outstream_class)) - sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout') - sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr') - if self.displayhook_class: - displayhook_factory = import_item(str(self.displayhook_class)) - self.displayhook = displayhook_factory(self.session, self.iopub_socket) - sys.displayhook = self.displayhook - - self.patch_io() - - def patch_io(self): - """Patch important libraries that can't handle sys.stdout forwarding""" - try: - import faulthandler - except ImportError: - pass - else: - # Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible - # updates to the upstream API and update accordingly (up-to-date as of Python 3.5): - # https://docs.python.org/3/library/faulthandler.html#faulthandler.enable - - # change default file to __stderr__ from forwarded stderr - faulthandler_enable = faulthandler.enable - def enable(file=sys.__stderr__, all_threads=True, **kwargs): - return faulthandler_enable(file=file, all_threads=all_threads, **kwargs) - - faulthandler.enable = enable - - if hasattr(faulthandler, 'register'): - faulthandler_register = faulthandler.register - def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs): - return faulthandler_register(signum, file=file, all_threads=all_threads, - chain=chain, **kwargs) - faulthandler.register = register - - def init_signal(self): - signal.signal(signal.SIGINT, signal.SIG_IGN) - - def init_kernel(self): - """Create the Kernel object itself""" - shell_stream = ZMQStream(self.shell_socket) - control_stream = ZMQStream(self.control_socket) - - kernel_factory = self.kernel_class.instance - - kernel = kernel_factory(parent=self, session=self.session, - shell_streams=[shell_stream, control_stream], - iopub_thread=self.iopub_thread, - iopub_socket=self.iopub_socket, - stdin_socket=self.stdin_socket, - log=self.log, - profile_dir=self.profile_dir, - user_ns=self.user_ns, - ) - kernel.record_ports({ - name + '_port': port for name, port in self.ports.items() - }) - self.kernel = kernel - - # Allow the displayhook to get the execution count - self.displayhook.get_execution_count = lambda: kernel.execution_count - - def init_gui_pylab(self): - """Enable GUI event loop integration, taking pylab into account.""" - - # Register inline backend as default - # this is higher priority than matplotlibrc, - # but lower priority than anything else (mpl.use() for instance). - # This only affects matplotlib >= 1.5 - if not os.environ.get('MPLBACKEND'): - os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline' - - # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` - # to ensure that any exception is printed straight to stderr. - # Normally _showtraceback associates the reply with an execution, - # which means frontends will never draw it, as this exception - # is not associated with any execute request. - - shell = self.shell - _showtraceback = shell._showtraceback - try: - # replace error-sending traceback with stderr - def print_tb(etype, evalue, stb): - print ("GUI event loop or pylab initialization failed", - file=sys.stderr) - print (shell.InteractiveTB.stb2text(stb), file=sys.stderr) - shell._showtraceback = print_tb - InteractiveShellApp.init_gui_pylab(self) - finally: - shell._showtraceback = _showtraceback - - def init_shell(self): - self.shell = getattr(self.kernel, 'shell', None) - if self.shell: - self.shell.configurables.append(self) - - def init_extensions(self): - super(YAPKernelApp, self).init_extensions() - # BEGIN HARDCODED WIDGETS HACK - # Ensure ipywidgets extension is loaded if available - extension_man = self.shell.extension_manager - if 'ipywidgets' not in extension_man.loaded: - try: - extension_man.load_extension('ipywidgets') - except ImportError as e: - self.log.debug('ipywidgets package not installed. Widgets will not be available.') - # END HARDCODED WIDGETS HACK - - def configure_tornado_logger(self): - """ Configure the tornado logging.Logger. - - Must set up the tornado logger or else tornado will call - basicConfig for the root logger which makes the root logger - go to the real sys.stderr instead of the capture streams. - This function mimics the setup of logging.basicConfig. - """ - logger = logging.getLogger('tornado') - handler = logging.StreamHandler() - formatter = logging.Formatter(logging.BASIC_FORMAT) - handler.setFormatter(formatter) - logger.addHandler(handler) - - @catch_config_error - def initialize(self, argv=None): - super(YAPKernelApp, self).initialize(argv) - if self.subapp is not None: - return - # register zmq IOLoop with tornado - zmq_ioloop.install() - self.init_blackhole() - self.init_connection_file() - self.init_poller() - self.init_sockets() - self.init_heartbeat() - # writing/displaying connection info must be *after* init_sockets/heartbeat - self.write_connection_file() - # Log connection info after writing connection file, so that the connection - # file is definitely available at the time someone reads the log. - self.log_connection_info() - self.init_io() - self.init_signal() - self.init_kernel() - # shell init steps - self.init_path() - self.init_shell() - if self.shell: - self.init_gui_pylab() - self.init_extensions() - self.init_code() - # flush stdout/stderr, so that anything written to these streams during - # initialization do not get associated with the first execution request - sys.stdout.flush() - sys.stderr.flush() - - def start(self): - if self.subapp is not None: - return self.subapp.start() - if self.poller is not None: - self.poller.start() - self.kernel.start() - try: - ioloop.IOLoop.instance().start() - except KeyboardInterrupt: - pass - -launch_new_instance = YAPKernelApp.launch_instance - -def main(): - """Run an IPKernel as an application""" - app = YAPKernelApp.instance() - app.initialize() - app.start() - - -if __name__ == '__main__': - main() diff --git a/packages/python/yap_kernel/yap_kernel.py b/packages/python/yap_kernel/yap_kernel.py index 11fb46730..4156545be 100644 --- a/packages/python/yap_kernel/yap_kernel.py +++ b/packages/python/yap_kernel/yap_kernel.py @@ -1,15 +1,28 @@ from __future__ import print_function -from ipykernel.ipkernel import IPythonKernel - -import sys import signal import yap -# import ipdb -# ipdb,set_trace() +import io +import getpass +import sys +import traceback + +from IPython.core import release +from ipython_genutils.py3compat import builtin_mod, PY3, unicode_type, safe_unicode +from IPython.utils.tokenutil import token_at_cursor, line_at_cursor +from traitlets import Instance, Type, Any, List + +from ipykernel.comm import CommManager +from ipykernel.kernelbase import Kernel as KernelBase +from ipykernel.zmqshell import ZMQInteractiveShell +from .interactiveshell import YAPInteractiveShell +from IPython.core.interactiveshell import InteractiveShellABC, InteractiveShell +from contextlib import redirect_stdout + + kernel_json = { - "argv": [sys.executable, + "argv": [sys.executable, "-m", "yap_kernel", "-f", "{connection_file}"], "display_name": " YAP-6.3", @@ -22,11 +35,26 @@ def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) -class YAPKernel(IPythonKernel): +class YAPKernel(KernelBase): + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + shell_class = Type(ZMQInteractiveShell ) + user_ns = Instance(dict, args=None, allow_none=True) + def _user_ns_changed(self, name, old, new): + if self.shell is not None: + self.shell.user_ns = new + self.shell.init_user_ns() + + # A reference to the Python builtin 'raw_input' function. + # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3) + _sys_raw_input = Any() + _sys_eval_input = Any() + + implementation = 'YAP Kernel' implementation_version = '1.0' language = 'text' - language_version = '0.1' + language_version = '6.3' banner = "YAP-6.3" language_info = { 'mimetype': 'text/prolog', @@ -41,63 +69,349 @@ class YAPKernel(IPythonKernel): 'file_extension': '.yap', } - def init_yap(self, **kwargs): - # Signal handlers are inherited by - # forked processes, - # and we can't easily - # reset it from the subprocess. Since kernelapp - # ignores SIGINT except in - # message handlers, we need to temporarily - # reset the SIGINT handler - # here so that yap and its children are interruptible. - sig = signal.signal(signal.SIGINT, signal.SIG_DFL) - try: - self.engine = yap.YAPEngine() - self.q = None - self.engine.query("load_files(library(python), [])").command() - self.engine.query("load_files(library(jupyter), [])").command() - banner = "YAP6-3 Kernel" - self.olines = banner - finally: - signal.signal(signal.SIGINT, sig) + +#------------------------------------------------------------------------- + # Things related to history management + #------------------------------------------------------------------------- + def __init__(self, **kwargs): + # sp = super(YAPKernel, self) super(YAPKernel, self).__init__(**kwargs) - self.init_yap(**kwargs) - self.shell.run_cell = self.yap_run_cell + # Initialize the InteractiveShell subclass + self.shell = self.shell_class.instance(parent=self, + profile_dir = self.profile_dir, + user_ns = self.user_ns, + kernel = self, + ) + self.shell.displayhook.session = self.session + self.shell.displayhook.pub_socket = self.iopub_socket + self.shell.displayhook.topic = self._topic('execute_result') + self.shell.display_pub.session = self.session + self.shell.display_pub.pub_socket = self.iopub_socket + + self.comm_manager = CommManager(parent=self, kernel=self) + +# self.shell._last_traceback = None + self.shell.configurables.append(self.comm_manager) + comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ] + for msg_type in comm_msg_types: + self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) + self.yap_shell = YAPInteractiveShell( self ) + def get_usage(self): return "This is the YAP kernel." - def yap_run_cell(self, s, store_history=False, silent=False, - shell_futures=True): + help_links = List([ + { + 'text': "Python", + 'url': "http://docs.python.org/%i.%i" % sys.version_info[:2], + }, + { + 'text': "YAP", + 'url': "http://YAP.org/documentation.html", + }, + { + 'text': "NumPy", + 'url': "http://docs.scipy.org/doc/numpy/reference/", + }, + { + 'text': "SciPy", + 'url': "http://docs.scipy.org/doc/scipy/reference/", + }, + { + 'text': "Matplotlib", + 'url': "http://matplotlib.org/contents.html", + }, + { + 'text': "SymPy", + 'url': "http://docs.sympy.org/latest/index.html", + }, + { + 'text': "pandas", + 'url': "http://pandas.pydata.org/pandas-docs/stable/", + }, + ]).tag(config=True) - if not self.q: - self.q = self.engine.query(s) - if self.q.next(): - myvs = self.q.namedVarsCopy() - if myvs: - i = 0 - for peq in myvs: - name = peq[0] - bind = peq[1] - if bind.isVar(): - var = yap.YAPAtom('$VAR') - f = yap.YAPFunctor(var, 1) - bind.unify(yap.YAPApplTerm(f, (name))) - else: - i = bind.numberVars(i, True) - print(name.text() + " = " + bind.text()) - else: - print("yes") - if self.q.deterministic(): - self.closeq() - return - print("No (more) answers") - self.closeq() - return + # Kernel info fields + implementation = 'YAP' + implementation_version = release.version + language_info = { + 'name': 'python', + 'version': sys.version.split()[0], + 'mimetype': 'text/x-python', + 'codemirror_mode': { + 'name': 'prolog', + 'version': sys.version_info[0] + }, + 'pygments_lexer': 'prolog', + 'nbconvert_exporter': 'python', + 'file_extension': '.yap' + } - def closeq(self): - if self.q: - self.q.close() - self.q = None + @property + def banner(self): + return self.shell.banner + + def start(self): + self.shell.exit_now = False + super(YAPKernel, self).start() + + def set_parent(self, ident, parent): + """Overridden from parent to tell the display hook and output streams + about the parent message. + """ + super(YAPKernel, self).set_parent(ident, parent) + self.shell.set_parent(parent) + + def init_metadata(self, parent): + """Initialize metadata. + + Run at the beginning of each execution request. + """ + md = super(YAPKernel, self).init_metadata(parent) + # FIXME: remove deprecated ipyparallel-specific code + # This is required for ipyparallel < 5.0 + md.update({ + 'dependencies_met' : True, + 'engine' : self.ident, + }) + return md + + def finish_metadata(self, parent, metadata, reply_content): + """Finish populating metadata. + + Run after completing an execution request. + """ + # FIXME: remove deprecated ipyparallel-specific code + # This is required by ipyparallel < 5.0 + metadata['status'] = reply_content['status'] + if reply_content['status'] == 'error' and reply_content['ename'] == 'UnmetDependency': + metadata['dependencies_met'] = False + + return metadata + + def _forward_input(self, allow_stdin=False): + """Forward raw_input and getpass to the current frontend. + + via input_request + """ + self._allow_stdin = allow_stdin + + if PY3: + self._sys_raw_input = builtin_mod.input + builtin_mod.input = self.raw_input + else: + self._sys_raw_input = builtin_mod.raw_input + self._sys_eval_input = builtin_mod.input + builtin_mod.raw_input = self.raw_input + builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt)) + self._save_getpass = getpass.getpass + getpass.getpass = self.getpass + + def _restore_input(self): + """Restore raw_input, getpass""" + if PY3: + builtin_mod.input = self._sys_raw_input + else: + builtin_mod.raw_input = self._sys_raw_input + builtin_mod.input = self._sys_eval_input + + getpass.getpass = self._save_getpass + + @property + def execution_count(self): + return self.shell.execution_count + + @execution_count.setter + def execution_count(self, value): + # Ignore the incrememnting done by KernelBase, in favour of our shell's + # execution counter. + pass + + def do_execute(self, code, silent, store_history=True, + user_expressions=None, allow_stdin=False): + shell = self.shell # we'll need this a lot here + + self._forward_input(allow_stdin) + + reply_content = {} + try: + res = shell.run_cell(code, store_history=store_history, silent=silent) + finally: + self._restore_input() + + if res.error_before_exec is not None: + err = res.error_before_exec + else: + err = res.error_in_exec + + if res.success: + reply_content[u'status'] = u'ok' + elif isinstance(err, KeyboardInterrupt): + reply_content[u'status'] = u'aborted' + else: + reply_content[u'status'] = u'error' + + reply_content.update({ + # u'traceback': shell._last_traceback or [], + u'ename': unicode_type(type(err).__name__), + u'evalue': safe_unicode(err), + }) + + # FIXME: deprecate piece for ipyparallel: + e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, + method='execute') + reply_content['engine_info'] = e_info + + + # Return the execution counter so clients can display prompts + reply_content['execution_count'] = shell.execution_count - 1 + + if 'traceback' in reply_content: + self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback'])) + + + # At this point, we can tell whether the main code execution succeeded + # or not. If it did, we proceed to evaluate user_expressions + if reply_content['status'] == 'ok': + reply_content[u'user_expressions'] = \ + shell.user_expressions(user_expressions or {}) + else: + # If there was an error, don't even try to compute expressions + reply_content[u'user_expressions'] = {} + + # Payloads should be retrieved regardless of outcome, so we can both + # recover partial output (that could have been generated early in a + # block, before an error) and always clear the payload system. + reply_content[u'payload'] = shell.payload_manager.read_payload() + # Be aggressive about clearing the payload because we don't want + # it to sit in memory until the next execute_request comes in. + shell.payload_manager.clear_payload() + + return reply_content + + def do_complete(self, code, cursor_pos): + # FIXME: YAP completers currently assume single line, + # but completion messages give multi-line context + # For now, extract line from cell, based on cursor_pos: + if cursor_pos is None: + cursor_pos = len(code) + line, offset = line_at_cursor(code, cursor_pos) + line_cursor = cursor_pos - offset + + txt, matches = self.shell.complete('', line, line_cursor) + return {'matches' : matches, + 'cursor_end' : cursor_pos, + 'cursor_start' : cursor_pos - len(txt), + 'metadata' : {}, + 'status' : 'ok'} + + def do_inspect(self, code, cursor_pos, detail_level=0): + name = token_at_cursor(code, cursor_pos) + info = self.shell.object_inspect(name) + + reply_content = {'status' : 'ok'} + reply_content['data'] = data = {} + reply_content['metadata'] = {} + reply_content['found'] = info['found'] + if info['found']: + info_text = self.shell.object_inspect_text( + name, + detail_level=detail_level, + ) + data['text/plain'] = info_text + + return reply_content + + def do_history(self, hist_access_type, output, raw, session=0, start=0, + stop=None, n=None, pattern=None, unique=False): + if hist_access_type == 'tail': + hist = self.shell.history_manager.get_tail(n, raw=raw, output=output, + include_latest=True) + + elif hist_access_type == 'range': + hist = self.shell.history_manager.get_range(session, start, stop, + raw=raw, output=output) + + elif hist_access_type == 'search': + hist = self.shell.history_manager.search( + pattern, raw=raw, output=output, n=n, unique=unique) + else: + hist = [] + + return { + 'status': 'ok', + 'history' : list(hist), + } + + def do_shutdown(self, restart): + self.shell.exit_now = True + return dict(status='ok', restart=restart) + + def do_is_complete(self, code): + status, indent_spaces = self.shell.input_transformer_manager.check_complete(code) + r = {'status': status} + if status == 'incomplete': + r['indent'] = ' ' * indent_spaces + return r + + def do_apply(self, content, bufs, msg_id, reply_metadata): + from .serialize import serialize_object, unpack_apply_message + shell = self.shell + try: + working = shell.user_ns + + prefix = "_"+str(msg_id).replace("-","")+"_" + + f,args,kwargs = unpack_apply_message(bufs, working, copy=False) + + fname = getattr(f, '__name__', 'f') + + fname = prefix+"f" + argname = prefix+"args" + kwargname = prefix+"kwargs" + resultname = prefix+"result" + + ns = { fname : f, argname : args, kwargname : kwargs , resultname : None } + # print ns + working.update(ns) + code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname) + try: + exec(code, shell.user_global_ns, shell.user_ns) + result = working.get(resultname) + finally: + for key in ns: + working.pop(key) + + result_buf = serialize_object(result, + buffer_threshold=self.session.buffer_threshold, + item_threshold=self.session.item_threshold, + ) + + except BaseException as e: + # invoke YAP traceback formatting + shell.showtraceback() + reply_content = { + u'traceback': shell._last_traceback or [], + u'ename': unicode_type(type(e).__name__), + u'evalue': safe_unicode(e), + } + # FIXME: deprecate piece for ipyparallel: + e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply') + reply_content['engine_info'] = e_info + + self.send_response(self.iopub_socket, u'error', reply_content, + ident=self._topic('error')) + self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback'])) + result_buf = [] + reply_content['status'] = 'error' + else: + reply_content = {'status' : 'ok'} + + return reply_content, result_buf + + def do_clear(self): + self.shell.reset(False) + return dict(status='ok') diff --git a/packages/python/yapex.py b/packages/python/yapex.py index f27f88053..4c3809e03 100644 --- a/packages/python/yapex.py +++ b/packages/python/yapex.py @@ -20,16 +20,15 @@ class T(tuple): return str(self.name) + str(self.tuple) - def query_prolog(engine, s): - def answer( q ): + def answer(q): try: return q.next() except Exception as e: - print( e.args[1] ) + print(e.args[1]) return False - + q = engine.query(s) ask = True while answer(q): diff --git a/packages/swig/yap.i b/packages/swig/yap.i index 02ef5dfdf..f3c12b7fd 100644 --- a/packages/swig/yap.i +++ b/packages/swig/yap.i @@ -2,7 +2,7 @@ %module(directors = "1") yap // Language independent exception handler -%include exception.i +%include exception.i %include stdint.i %ignore *::operator[]; @@ -35,8 +35,17 @@ return *new YAPTerm(); } } +%typemap(in) YAPIntegerTerm { +#if PY_MAJOR_VERSION < 3 + $1 = YAPIntegerTerm(PyInt_AsLong($input)); +#else + $1 = YAPIntegerTerm(PyLong_AsLong($input)); +#endif +} + + %typemap(out) YAPIntegerTerm { - Term t = $1.term(); + Term t = $input.term(); Int j = IntegerOfTerm(t); #if PY_MAJOR_VERSION < 3 return PyInt_FromLong(j); @@ -45,10 +54,15 @@ return *new YAPTerm(); #endif } + +%typemap(in) YAPFloatTerm { + $1 = YAPFloatTerm( PyFloat_AsDouble($input) ); +} + %typemap(out) YAPFloatTerm { - Term t = $1.term(); + Term t = $1nput.term(); Int double j = FloatOfTerm(t); - return PyFloat_FromDouble(j); + $1 = PyFloat_FromDouble(j); } // translate well-known names and existing @@ -56,13 +70,61 @@ return *new YAPTerm(); // Everthing else let wrapped. // as a term %typemap(out) YAPAtomTerm { - const char *s = RepAtom(AtomOfTerm($1.term()))->StrOfAE; + const char *s = RepAtom(AtomOfTerm($1nput.term()))->StrOfAE; PyObject *p; if ((p = AtomToPy(s))) { - return p; + $1 = p; + } else { + $1 = Py_None; } } +// translate lists as Python Lists +// Python symbols +// Everthing else let wrapped. +// as a termpc +%typemap(in) YAPListTerm { + PyObject *p = $input; + Int len = PyTuple_Size(p); + if (len == 0) { + $1 = YAPListTerm(TermNil); + } else { + t = AbsPair(HR); + for (Int i = 0; i < len; i++) { + HR += 2; + HR[-2] = pythonToYAP(PyTuple_GetItem(p, i)); + HR[-1] = AbsPair(HR+2); + } + HR[-1] = TermNil; + $1 = YAPListTerm(t); + } + } + +%typemap(typecheck) YAPListTerm { + PyObject *it = $input; + $1 = PyTuple_CheckExact(it); + } + +%typemap(in) YAPApplTerm { + char *o = Py_TYPE(p)->tp_name; + Int len = PyTuple_Size(p); + + if (len == 0) { + $1 = nullptr; + } else { + Term t = MkNewApplTerm(Yap_MkFunctor(Yap_LookupAtom(o),len),len); + for (Int i = 0; i < len; i++) { + o[i] = pythonToYAP(PyTuple_GetItem(p, i)); + } + $1 = YAPApplTerm(t); + } +} + +%typemap(typecheck) YAPApplTerm { + PyObject *p = $input; + $1 = (PyTuple_Check(p) && !PyTuple_CheckExact(p)); +} + // translate lists as Python Lists // Python symbols // Everthing else let wrapped. @@ -206,7 +268,7 @@ return *new YAPTerm(); $action } catch (YAPError e) { yap_error_number en = e.getID(); -LOCAL_ERROR_Type = YAP_NO_ERROR; +LOCAL_Error_TYPE = YAP_NO_ERROR; switch (e.getErrorClass()) { case YAPC_NO_ERROR: break;