swig python support

This commit is contained in:
Vitor Santos Costa
2017-05-08 18:51:29 +01:00
parent d1e39368f4
commit c2435673cd
37 changed files with 944 additions and 1580 deletions

View File

@@ -1,43 +1,29 @@
#CHECK: PythonLibs, changed to work in WIN32
if (PYTHONLIBS_FOUND)
set (PYTHON_SOURCES python.c pl2py.c pybips.c py2pl.c pl2pl.c pypreds.c)
include(FindPythonModule)
set (PYTHON_HEADERS python.h)
set (PYTHON_SOURCES python.c pl2py.c pybips.c py2pl.c pl2pl.c pypreds.c)
set (CMAKE_POSITION_INDEPENDENT_CODE TRUE)
set (PYTHON_HEADERS python.h)
add_library (YAPPython SHARED ${PYTHON_SOURCES})
set (CMAKE_POSITION_INDEPENDENT_CODE TRUE)
set_property( SOURCE ${PYTHON_SOURCES} APPEND PROPERTY COMPILE_DEFINITIONS YAP_KERNEL=1)
add_library (YAPPython SHARED ${PYTHON_SOURCES})
set (PYTHON_PL python.pl)
target_link_libraries(YAPPython libYap ${PYTHON_LIBRARY} ${WINDLLS} ${GMP_LIBRARIES})
install(FILES python.pl DESTINATION ${libpl} )
set_property( SOURCE ${PYTHON_SOURCES} APPEND PROPERTY COMPILE_DEFINITIONS YAP_KERNEL=1)
add_to_group( pl_library PYTHON_PL )
set (PYTHON_PL python.pl)
install(FILES python.pl DESTINATION ${libpl} )
add_to_group( pl_library PYTHON_PL )
# configure_file ("setup.py.cmake" "setup.py" )
set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py")
# set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py")
#set_target_properties (YAPPython PROPERTIES PREFIX "")
#set_target_properties (YAPPython PROPERTIES PREFIX "")
find_python_module( jupyter )
if (PY_JUPYTER)
add_subdirectory(yap_kernel)
ENDIF()
IF(WIN32)
IF(WIN32)
install(TARGETS YAPPython
LIBRARY DESTINATION ${libdir}
RUNTIME DESTINATION ${bindir}
@@ -47,6 +33,5 @@ if (PYTHONLIBS_FOUND)
LIBRARY DESTINATION ${libdir}
RUNTIME DESTINATION ${libdir}
ARCHIVE DESTINATION ${libdir} )
endif()
endif()
endif (PYTHONLIBS_FOUND)

View File

@@ -1,3 +1,4 @@
if __name__ == '__main__':
from yapkernel import kernelapp as app
import pdbl pdb.set_trace()
app.launch_new_instance()

View File

@@ -32,7 +32,7 @@ void YEM(const char * exp, int line, const char *file, const char *code)
PyObject *term_to_python(term_t t, bool eval, PyObject *o) {
// o≈
YAP_Term yt = YAP_GetFromSlot(t);
Yap_DebugPlWriteln(yt);
// Yap_DebugPlWriteln(yt);
switch (PL_term_type(t)) {
case PL_VARIABLE: {
if (t==0) {

View File

@@ -1,6 +1,6 @@
#include "python.h"
static foreign_t repr_term(PyObject *pVal, term_t t) {
term_t to = PL_new_term_ref(), t1 = PL_new_term_ref();
PL_put_pointer(t1, pVal);
@@ -25,25 +25,26 @@ foreign_t assign_to_symbol(term_t t, PyObject *e) {
foreign_t python_to_term(PyObject *pVal, term_t t) {
bool rc = true;
term_t to = PL_new_term_ref();
fputs(" <<*** ",stderr); PyObject_Print(pVal,stderr,0); fputs("<<***\n",stderr);
// fputs(" <<*** ",stderr); PyObject_Print(pVal,stderr,0); fputs("<<***\n",stderr);
if (pVal == Py_None) {
fputs("<<*** ",stderr);Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs(" >>***\n",stderr);
rc= PL_unify_atom(t, ATOM_none); fputs("<<*** ",stderr);Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs(" >>***\n",stderr);
//fputs("<<*** ",stderr);Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs(" >>***\n",stderr);
rc= PL_unify_atom(t, ATOM_none);
//fputs("<<*** ",stderr);Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs(" >>***\n",stderr);
}
if (PyBool_Check(pVal)) {
if (PyObject_IsTrue(pVal)) {
rc = PL_unify_atom(t, ATOM_true);
rc = rc && PL_unify_atom(t, ATOM_true);
} else {
rc = PL_unify_atom(t, ATOM_false);
rc = rc && PL_unify_atom(t, ATOM_false);
}
} else if (PyLong_Check(pVal)) {
rc = PL_unify_int64(t, PyLong_AsLong(pVal));
rc = rc && PL_unify_int64(t, PyLong_AsLong(pVal));
#if PY_MAJOR_VERSION < 3
} else if (PyInt_Check(pVal)) {
rc = PL_unify_int64(t, PyInt_AsLong(pVal));
rc = rc && PL_unify_int64(t, PyInt_AsLong(pVal));
#endif
} else if (PyFloat_Check(pVal)) {
rc = PL_unify_float(t, PyFloat_AsDouble(pVal));
rc = rc && PL_unify_float(t, PyFloat_AsDouble(pVal));
} else if (PyComplex_Check(pVal)) {
term_t t1 = PL_new_term_ref(),
t2 = PL_new_term_ref();
@@ -52,7 +53,7 @@ fputs(" <<*** ",stderr); PyObject_Print(pVal,stderr,0); fputs("<<***\n",st
!PL_cons_functor(to, FUNCTOR_complex2, t1, t2)) {
rc = false;
} else {
rc = PL_unify(t, to);
rc = rc && PL_unify(t, to);
}
} else if (PyUnicode_Check(pVal)) {
atom_t tmp_atom;
@@ -67,41 +68,43 @@ fputs(" <<*** ",stderr); PyObject_Print(pVal,stderr,0); fputs("<<***\n",st
const char *s = PyUnicode_AsUTF8(pVal);
tmp_atom = PL_new_atom( s);
#endif
rc = PL_unify_atom(t, tmp_atom);
rc = rc && PL_unify_atom(t, tmp_atom);
} else if (PyByteArray_Check(pVal)) {
atom_t tmp_atom = PL_new_atom(PyByteArray_AsString(pVal));
rc = PL_unify_atom(t, tmp_atom);
rc = rc && PL_unify_atom(t, tmp_atom);
#if PY_MAJOR_VERSION < 3
} else if (PyString_Check(pVal)) {
atom_t tmp_atom = PL_new_atom(PyString_AsString(pVal));
rc = PL_unify_atom(t, tmp_atom);
rc = rc && PL_unify_atom(t, tmp_atom);
#endif
} else if (PyTuple_Check(pVal)) {
Py_ssize_t i, sz = PyTuple_Size(pVal);
functor_t f;
const char *s;
if ((s = (Py_TYPE(pVal)->tp_name))) {
if (!strcmp(s, "H")) {
pVal = PyTuple_GetItem(pVal, 0);
if (pVal==NULL) {
if (sz == 0) {
rc = rc && PL_unify_atom(t, ATOM_brackets);
} else {
if ((s = (Py_TYPE(pVal)->tp_name))) {
if (!strcmp(s, "H")) {
pVal = PyTuple_GetItem(pVal, 0);
if (pVal==NULL) {
pVal = Py_None;
PyErr_Clear();
}
}
if (s[0] == '$') {
char *ns = malloc(strlen(s) + 5);
strcpy(ns, "__");
strcat(ns, s + 1);
strcat(ns, "__");
f = PL_new_functor(PL_new_atom(ns), sz);
} else {
f = PL_new_functor(PL_new_atom(s), sz);
}
}
if (s[0] == '$') {
char *ns = malloc(strlen(s) + 5);
strcpy(ns, "__");
strcat(ns, s + 1);
strcat(ns, "__");
f = PL_new_functor(PL_new_atom(ns), sz);
} else {
f = PL_new_functor(PL_new_atom(s), sz);
f = PL_new_functor(ATOM_t, sz);
}
} else {
f = PL_new_functor(ATOM_t, sz);
}
if (PL_unify_functor(t, f)) {
rc = true;
for (i = 0; i < sz; i++) {
if (!PL_get_arg(i + 1, t, to))
rc = false;
@@ -115,61 +118,69 @@ fputs(" <<*** ",stderr); PyObject_Print(pVal,stderr,0); fputs("<<***\n",st
} else {
rc = false;
}
fputs(" ||*** ",stderr); Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs(" ||***\n",stderr);
} else if (PyList_Check(pVal)) {
YAP_Term yt = YAP_GetFromSlot(t);
Py_ssize_t i, sz = PyList_GET_SIZE(pVal);
for (i = 0; i < sz; i++) {
PyObject *obj;
if (!PL_unify_list(t, to, t)) {
rc = false;
break;
}
if ((obj = PyList_GetItem(pVal, i)) == NULL) {
obj = Py_None;
}
rc = rc && python_to_term(obj, to);
//fputs(" ||*** ",stderr); Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs(" ||***\n",stderr);
}
rc = rc && PL_unify_nil(t);
fputs("[***] ", stderr);
Yap_DebugPlWrite(yt); fputs("[***]\n", stderr);
} else if (PyList_Check(pVal)) {
Py_ssize_t i, sz = PyList_GET_SIZE(pVal);
if (sz == 0) {
rc = rc && PL_unify_atom(t, ATOM_nil);
} else {
for (i = 0; i < sz; i++) {
PyObject *obj;
if (!PL_unify_list(t, to, t)) {
rc = false;
break;
}
if ((obj = PyList_GetItem(pVal, i)) == NULL) {
obj = Py_None;
}
rc = rc && python_to_term(obj, to);
}
rc = rc && PL_unify_nil(t);
}
//fputs("[***] ", stderr);
//Yap_DebugPlWrite(yt); fputs("[***]\n", stderr);
} else if (PyDict_Check(pVal)) {
Py_ssize_t pos = 0;
term_t to = PL_new_term_ref(), ti = to;
int left = PyDict_Size(pVal);
PyObject *key, *value;
while (PyDict_Next(pVal, &pos, &key, &value)) {
term_t tkey = PL_new_term_ref(), tval = PL_new_term_ref(), tint,
tnew = PL_new_term_ref();
/* do something interesting with the values... */
if (!python_to_term(key, tkey)) {
continue;
if (left == 0) {
rc = rc && PL_unify_atom(t, ATOM_curly_brackets);
} else {
while (PyDict_Next(pVal, &pos, &key, &value)) {
term_t tkey = PL_new_term_ref(), tval = PL_new_term_ref(), tint,
tnew = PL_new_term_ref();
/* do something interesting with the values... */
if (!python_to_term(key, tkey)) {
continue;
}
if (!python_to_term(value, tval)) {
continue;
}
/* reuse */
tint = tkey;
if (!PL_cons_functor(tint, FUNCTOR_colon2, tkey, tval)) {
rc = false;
continue;
}
if (--left) {
if (!PL_cons_functor(tint, FUNCTOR_comma2, tint, tnew))
PL_reset_term_refs(tkey);
rc = false;
}
if (!PL_unify(ti, tint)) {
rc = false; }
ti = tnew;
PL_reset_term_refs(tkey);
}
if (!python_to_term(value, tval)) {
continue;
}
/* reuse */
tint = tkey;
if (!PL_cons_functor(tint, FUNCTOR_colon2, tkey, tval)) {
rc = false;
continue;
}
if (--left) {
if (!PL_cons_functor(tint, FUNCTOR_comma2, tint, tnew))
PL_reset_term_refs(tkey);
rc = false;
}
if (!PL_unify(ti, tint)) {
rc = false; }
ti = tnew;
PL_reset_term_refs(tkey);
rc = rc && PL_unify(t, to);
}
rc = PL_unify(t, to);
} else {
rc = repr_term(pVal, t);
rc = rc && repr_term(pVal, t);
}
PL_reset_term_refs(to);
return rc;

View File

@@ -41,11 +41,10 @@ PyObject *PythonLookupSpecial(const char *s) {
if (strcmp(s, "false") == 0) {
return Py_False;
}
if (strcmp(s, "none") == 0)
return Py_None;
if (strcmp(s, "[]") == 0) {
return PyList_New(0);
} else if (strcmp(s, "{}") == 0) {
}
if (strcmp(s, "{}") == 0) {
return PyDict_New();
/* return __main__,s */
}
@@ -53,11 +52,9 @@ PyObject *PythonLookupSpecial(const char *s) {
}
PyObject *lookupPySymbol(const char *sp, PyObject *pContext, PyObject **duc) {
PyObject *out = NULL;
PyObject *out = Py_None;
if (!sp)
return Py_None;
if (strcmp(sp, "none") == 0)
return Py_None;
if ((out = finalLookup(pContext, sp))) {
return out;
}
@@ -70,7 +67,7 @@ PyObject *lookupPySymbol(const char *sp, PyObject *pContext, PyObject **duc) {
}
PyObject *py_Local = PyEval_GetLocals();
if ((out = finalLookup(py_Local, sp))) {
return out;
return out;
}
PyObject *py_Global = PyEval_GetGlobals();
if ((out = finalLookup(py_Global, sp))) {
@@ -737,8 +734,10 @@ PyObject *term_to_nametuple(const char *s, arity_t arity, PyObject *tuple) {
typp = PyMem_Malloc(sizeof(PyTypeObject));
PyStructSequence_Desc *desc = PyMem_Malloc(sizeof(PyStructSequence_Desc));
desc->name = PyUnicode_AsUTF8(key);
const char*name = PyUnicode_AsUTF8(key);
desc->name = PyMem_Malloc(strlen(name)+1);
strcpy(desc->name, name);
Py_DECREF(key);
desc->doc = "YAPTerm";
desc->fields = pnull;
desc->n_in_sequence = 32;
@@ -748,7 +747,8 @@ PyObject *term_to_nametuple(const char *s, arity_t arity, PyObject *tuple) {
typp->tp_repr = structseq_repr;
// typp = PyStructSequence_NewType(desc);
Py_INCREF(typp);
typp->tp_flags |= Py_TPFLAGS_HEAPTYPE;
Py_INCREF(desc);
//typp->tp_flags |= Py_TPFLAGS_HEAPTYPE;
// don't do this: we cannot add a type as an atribute.
//PyModule_AddObject(py_Main, s, (PyObject *)typp);
if (py_F2P)
@@ -759,10 +759,10 @@ for (arity_t i = 0; i < arity; i++) {
PyObject *pArg = PyTuple_GET_ITEM(tuple, i);
if (pArg)
PyStructSequence_SET_ITEM(o, i, pArg);
PyObject_Print(pArg,stderr,0);fputc('\n',stderr);
}
//PyObject_Print(pArg,stderr,0);fputc('\n',stderr);
}
((PyStructSequence *)o)->ob_base.ob_size = arity;
PyObject_Print(o,stderr,0);fputc('\n',stderr);
//PyObject_Print(o,stderr,0);fputc('\n',stderr);
return o;
#else
PyObject *o1;

View File

@@ -2,15 +2,15 @@
#include "python.h"
atom_t ATOM_true, ATOM_false, ATOM_colon, ATOM_dot, ATOM_none, ATOM_t,
ATOM_comma, ATOM_builtin, ATOM_A, ATOM_V, ATOM_self;
ATOM_comma, ATOM_builtin, ATOM_A, ATOM_V, ATOM_self, ATOM_nil, ATOM_brackets, ATOM_curly_brackets;
functor_t FUNCTOR_dollar1, FUNCTOR_abs1, FUNCTOR_all1, FUNCTOR_any1,
FUNCTOR_bin1, FUNCTOR_brackets1, FUNCTOR_comma2, FUNCTOR_dir1,
FUNCTOR_float1, FUNCTOR_int1, FUNCTOR_iter1, FUNCTOR_iter2, FUNCTOR_long1,
FUNCTOR_len1, FUNCTOR_curly1, FUNCTOR_ord1, FUNCTOR_range1, FUNCTOR_range2,
FUNCTOR_range3, FUNCTOR_sum1, FUNCTOR_pointer1, FUNCTOR_complex2,
FUNCTOR_plus2, FUNCTOR_sub2, FUNCTOR_mul2, FUNCTOR_div2, FUNCTOR_hat2,
FUNCTOR_colon2, FUNCTOR_comma2, FUNCTOR_equal2, FUNCTOR_sqbrackets2,
FUNCTOR_bin1, FUNCTOR_brackets1, FUNCTOR_comma2, FUNCTOR_dir1,
FUNCTOR_float1, FUNCTOR_int1, FUNCTOR_iter1, FUNCTOR_iter2, FUNCTOR_long1,
FUNCTOR_len1, FUNCTOR_curly1, FUNCTOR_ord1, FUNCTOR_range1, FUNCTOR_range2,
FUNCTOR_range3, FUNCTOR_sum1, FUNCTOR_pointer1, FUNCTOR_complex2,
FUNCTOR_plus2, FUNCTOR_sub2, FUNCTOR_mul2, FUNCTOR_div2, FUNCTOR_hat2,
FUNCTOR_colon2, FUNCTOR_comma2, FUNCTOR_equal2, FUNCTOR_sqbrackets2,
FUNCTOR_dot2, FUNCTOR_brackets1;
X_API PyObject *py_Builtin;
@@ -53,7 +53,9 @@ static void install_py_constants(void) {
ATOM_false = PL_new_atom("false");
ATOM_dot = PL_new_atom(".");
ATOM_self = PL_new_atom("self");
ATOM_t = PL_new_atom("t");
ATOM_nil = PL_new_atom("[]");
ATOM_brackets = PL_new_atom("()");
ATOM_curly_brackets = PL_new_atom("{}");
FUNCTOR_abs1 = PL_new_functor(PL_new_atom("abs"), 1);
FUNCTOR_all1 = PL_new_functor(PL_new_atom("all"), 1);
FUNCTOR_any1 = PL_new_functor(PL_new_atom("any"), 1);

View File

@@ -48,7 +48,7 @@ extern X_API PyObject *yap_to_python(YAP_Term t, bool eval, PyObject *o);
typedef YAP_Arity arity_t;
extern atom_t ATOM_true, ATOM_false, ATOM_colon, ATOM_dot, ATOM_none, ATOM_t,
ATOM_comma, ATOM_builtin, ATOM_V, ATOM_A, ATOM_self;
ATOM_comma, ATOM_builtin, ATOM_V, ATOM_A, ATOM_self, ATOM_nil, ATOM_brackets, ATOM_curly_brackets;;
extern functor_t FUNCTOR_dollar1, FUNCTOR_abs1, FUNCTOR_all1, FUNCTOR_any1,
FUNCTOR_bin1, FUNCTOR_brackets1, FUNCTOR_comma2, FUNCTOR_dir1,

View File

@@ -3,89 +3,109 @@
INCLUDE(NewUseSWIG)
if (PYTHONLIBS_FOUND)
include(FindPythonModule)
include(FindPythonModule)
file(RELATIVE_PATH RELATIVE_SOURCE ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_SOURCE_DIR})
INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH})
file(RELATIVE_PATH RELATIVE_SOURCE ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_SOURCE_DIR})
INCLUDE_DIRECTORIES("${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/CXX")
configure_file("setup.py.cmake" ${CMAKE_CURRENT_BINARY_DIR}/setup.py)
configure_file("MANIFEST.in" ${CMAKE_CURRENT_BINARY_DIR}/MANIFEST.in)
foreach(i ${pl_library})
get_filename_component(j ${i} NAME)
configure_file(${i} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/${j})
endforeach()
foreach(i ${pl_boot_library})
get_filename_component(j ${i} NAME)
configure_file(${i} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/pl/${j})
endforeach()
foreach(i ${pl_os_library})
get_filename_component(j ${i} NAME)
configure_file(${i} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/os/${j})
endforeach()
configure_file("${CMAKE_SOURCE_DIR}/README.md" ${CMAKE_CURRENT_BINARY_DIR}/README)
configure_file("__init__.py" ${CMAKE_CURRENT_BINARY_DIR}/yap4py/__init__.py)
configure_file("yapi.py" ${CMAKE_CURRENT_BINARY_DIR}/yap4py/yapi.py)
configure_file("yapi.yap" ${CMAKE_CURRENT_BINARY_DIR}/yap4py/prolog/yapi.yap)
SET_SOURCE_FILES_PROPERTIES(../yap.i PROPERTIES CPLUSPLUS ON)
SET_SOURCE_FILES_PROPERTIES(../yap.i PROPERTIES SWIG_FLAGS "-O;-py3")
SET_SOURCE_FILES_PROPERTIES(../yap.i PROPERTIES SWIG_MODULE_NAME yap)
SET_SOURCE_FILES_PROPERTIES(../yap.i PROPERTIES OUTPUT_NAME yap)
INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH})
INCLUDE_DIRECTORIES("${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/CXX")
SWIG_ADD_LIBRARY(Py2YAP LANGUAGE python SHARED SOURCES ../yap.i )
if (WIN32)
SET_SOURCE_FILES_PROPERTIES(../../swig/yap.i PROPERTIES CPLUSPLUS ON)
SET_SOURCE_FILES_PROPERTIES(../../swig/yap.i PROPERTIES SWIG_FLAGS "-O;-py3")
SET_SOURCE_FILES_PROPERTIES(../../swiyap.i PROPERTIES SWIG_MODULE_NAME yap)
SET_SOURCE_FILES_PROPERTIES(../../yap.i PROPERTIES OUTPUT_NAME yap)
SWIG_ADD_LIBRARY(Py2YAP LANGUAGE python SHARED SOURCES ../../swig/yap.i )
if (WIN32)
SWIG_LINK_LIBRARIES(Py2YAP YAPPython libYap ${PYTHON_LIBRARIES} )
else()
else()
SWIG_LINK_LIBRARIES( Py2YAP libYap YAP++ YAPPython ${PYTHON_LIBRARIES} )
endif()
set_target_properties ( ${SWIG_MODULE_Py2YAP_REAL_NAME}
PROPERTIES
NO_SONAME ON
OUTPUT_NAME _yap
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" )
# inform we are compiling YAP
# s used in MSYS
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
DEPENDS YAPPython YAPPython YAP++
)
# inform we are compiling YAP
# s used in MSYS
execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import sysconfig; print( sysconfig.get_path( 'platlib' ) )"
OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE)
configure_file("setup.py.cmake" ${CMAKE_CURRENT_BINARY_DIR}/setup.py)
execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import sysconfig; print( sysconfig.get_path( 'platlib' ) )"
OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
get_filename_component(ABS_PYTHON_MODULE_PATH ${_ABS_PYTHON_MODULE_PATH} ABSOLUTE)
file(RELATIVE_PATH _REL_PYTHON_MODULE_PATH ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH})
#
# set ( PYTHON_MODULE_PATH
# ${ABS_PYTHON_MODULE_PATH}
# )
#
# INSTALL ( FILES ${CMAKE_CURRENT_BINARY_DIR}/yap.py DESTINATION ${PYTHON_MODULE_PATH} )
# INSTALL ( FILES ${CMAKE_CURRENT_SOURCE_DIR}/yapi.py DESTINATION ${PYTHON_MODULE_PATH} )
# INSTALL ( FILES ${CMAKE_CURRENT_SOURCE_DIR}/yapi.yap DESTINATION ${libpl} )
#
#
# INSTALL ( TARGETS ${SWIG_MODULE_Py2YAP_REAL_NAME}
# RUNTIME DESTINATION ${PYTHON_MODULE_PATH}
# ARCHIVE DESTINATION ${PYTHON_MODULE_PATH}
# LIBRARY DESTINATION ${PYTHON_MODULE_PATH}
# )
get_filename_component(ABS_PYTHON_MODULE_PATH ${_ABS_PYTHON_MODULE_PATH} ABSOLUTE)
file(RELATIVE_PATH _REL_PYTHON_MODULE_PATH ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH})
#
# set ( PYTHON_MODULE_PATH
# ${ABS_PYTHON_MODULE_PATH}
# )
#
# INSTALL ( FILES ${CMAKE_CURRENT_BINARY_DIR}/yap.py DESTINATION ${PYTHON_MODULE_PATH} )
# INSTALL ( FILES ${CMAKE_CURRENT_SOURCE_DIR}/yapi.py DESTINATION ${PYTHON_MODULE_PATH} )
# INSTALL ( FILES ${CMAKE_CURRENT_SOURCE_DIR}/yapi.yap DESTINATION ${libpl} )
#
#
# INSTALL ( TARGETS ${SWIG_MODULE_Py2YAP_REAL_NAME}
# RUNTIME DESTINATION ${PYTHON_MODULE_PATH}
# ARCHIVE DESTINATION ${PYTHON_MODULE_PATH}
# LIBRARY DESTINATION ${PYTHON_MODULE_PATH}
# )
add_custom_target(Py2YAP ALL
DEPENDS ../yap.i libYap YAP++ YAPPython _Py2YAP
)
set (dlls $<TARGET_FILE:matrix>
$<TARGET_FILE:regexp>
$<TARGET_FILE:yap_rl>
$<TARGET_FILE:tries>
$<TARGET_FILE:itries>
$<TARGET_FILE:sys>
$<TARGET_FILE:yap_random>)
if (TARGET real)
set (dlls $<TARGET_FILE:matrix>
$<TARGET_FILE:regexp>
$<TARGET_FILE:yap_rl>
$<TARGET_FILE:tries>
$<TARGET_FILE:itries>
$<TARGET_FILE:sys>
$<TARGET_FILE:real>
$<TARGET_FILE:yap_random>)
if (TARGET real)
list( APPEND dlls $<TARGET_FILE:real>
)
endif()
add_custom_command(TARGET Py2YAP
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory yap4py/os
COMMAND ${CMAKE_COMMAND} -E make_directory yap4py/pl
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/__init__.py ${dlls} $<TARGET_FILE:libYap> $<TARGET_FILE:YAPPython> $<TARGET_FILE:YAP++> ${CMAKE_CURRENT_SOURCE_DIR}/yapi.yap ${pl_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py
COMMAND ${CMAKE_COMMAND} -E copy ${pl_os_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/os
COMMAND ${CMAKE_COMMAND} -E copy ${pl_boot_library} ${CMAKE_CURRENT_BINARY_DIR}/yap4py/pl
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/README.md ${CMAKE_CURRENT_SOURCE_DIR}/MANIFEST.in ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/yapi.py ${CMAKE_CURRENT_BINARY_DIR}/yap4py
add_custom_target( YAP4PY ALL
)
COMMAND ${PYTHON_EXECUTABLE} setup.py sdist bdist_wheel
VERBATIM
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
)
if (WITH_DOCS AND DOXYGEN_FOUND)
add_custom_command (TARGET YAP4PY
COMMAND ${CMAKE_COMMAND} -E copy ${dlls} ${CMAKE_BINARY_DIR}/libYap${CMAKE_SHARED_LIBRARY_SUFFIX} ${CMAKE_BINARY_DIR}/${YAP_STARTUP} ${CMAKE_CURRENT_BINARY_DIR}/yap4py
COMMAND ${PYTHON_EXECUTABLE} setup.py clean sdist bdist_wheel
VERBATIM
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
DEPENDS ${CMAKE_BINARY_DIR}/${YAP_STARTUP} libYap ${dlls} ${pl_library} ${pl_boot_library} ${pl_os_library} yapi.py}
)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install --no-index -f dist yap4py
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})"
DEPENDS Py4YAP${CMAKE_BINARY_DIR}/${YAP_STARTUP} ${dlls} )
if (WITH_DOCS AND DOXYGEN_FOUND)
set(CMAKE_SWIG_FLAGS -DDOXYGEN=${DOXYGEN_FOUND})
@@ -94,8 +114,8 @@ endif()
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/doc
COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_BINARY_DIR}/Doxyfile.xml
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
DEPENDS ${c_headers};${c_sources};${cpp_sources};${cpp_headers}
)
DEPENDS ${c_headers};${c_sources};${cpp_sources};${cpp_headers}
)
# generate .i from doxygen .xml
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/ftdi1_doc.i
@@ -108,4 +128,3 @@ endif()
add_dependencies(${SWIG_MODULE_ftdi1_REAL_NAME} doc_i)
ENDIF ()
ENDIF ()

View File

@@ -48,21 +48,21 @@ def query_prolog(engine, s):
while answer(q):
# this new vs should contain bindings to vars
vs= q.namedVars()
print( vs )
gs = numbervars( engine, vs)
print(gs)
i=0
# iterate
for eq in gs:
name = eq[0]
binding = eq[1]
# this is tricky, we're going to bind the variables in the term so thay we can
# output X=Y. The Python way is to use dictionares.
#Instead, we use the T function to tranform the Python term back to Prolog
if name != binding:
print(name + " = " + str(binding))
#ok, that was Prolog code
print("yes")
if vs != []:
gs = numbervars( engine, vs)
i=0
# iterate
for eq in gs:
name = eq[0]
binding = eq[1]
# this is tricky, we're going to bind the variables in the term so thay we can
# output X=Y. The Python way is to use dictionares.
#Instead, we use the T function to tranform the Python term back to Prolog
if name != binding:
print(name + " = " + str(binding))
#ok, that was Prolog code
else:
print("yes")
# deterministic = one solution
if q.deterministic():
# done
@@ -88,19 +88,21 @@ def query_prolog(engine, s):
def live():
yap_lib_path = os.path.dirname(__file__)
args = yap.YAPEngineArgs()
args.setYapShareDir(os.path.join(yap_lib_path,"prolog"))
args.setYapLibDir(yap_lib_path)
args.setYapShareDir(yap_lib_path)
#args.setYapPrologBootFile(os.path.join(yap_lib_path."startup.yss"))
engine = yap.YAPEngine( args )
engine.goal( use_module( library('yapi') ) )
engine = yap.YAPEngine(args)
engine.goal( use_module(library('yapi') ) )
loop = True
while loop:
try:
s = input("?- ")
if not s:
loop = False
query_prolog(engine, s)
else:
query_prolog(engine, s)
except SyntaxError as err:
print("Syntax Error error: {0}".format(err))
except EOFError:
@@ -117,4 +119,8 @@ def live():
# initialize engine
# engine = yap.YAPEngine();
# engine = yap.YAPEngine(yap.YAPParams());
live()
#
#
if __name__ == "__main__":
live()

View File

@@ -1,19 +1,44 @@
configure_file ("setup.py.cmake" "setup.py" )
set (PYTHON_SOURCES
__main__.py
__init__.py
setup.py
_version.py
interactiveshell.py
kernelapp.py
kernelspec.py
yapkernel.py )
configure_file("setup.py" ${CMAKE_CURRENT_BINARY_DIR}/setup.py)
configure_file("MANIFEST.in" ${CMAKE_CURRENT_BINARY_DIR}/MANIFEST.in)
foreach(i ${PYTHON_SOURCES})
configure_file(${i} ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/${i})
endforeach()
configure_file("_version.py" ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/_version.py)
configure_file("YAP_KERNEL.md" ${CMAKE_CURRENT_BINARY_DIR}/README)
configure_file("${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png" ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/logo-32x32.png)
configure_file("${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png" ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/logo-64x64.png)
configure_file("${CMAKE_SOURCE_DIR}/misc/editors/prolog.js" ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/prolog.js)
set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py")
set( PYTHON_INSTALL sdist bdist_wheel)
add_custom_target ( YAPKernel ALL
COMMAND ${PYTHON_EXECUTABLE} setup.py build -f
DEPENDS yap_kernel.py
)
add_custom_target( YAPKernel ALL
)
set( PYTHON_INSTALL install)
add_custom_command (TARGET YAPKernel
COMMAND ${PYTHON_EXECUTABLE} setup.py clean sdist bdist_wheel
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
DEPENDS libYap ${SWIG_MODULE_Py2YAP_REAL_NAME}
)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${SETUP_PY} ${PYTHON_INSTALL} -f
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})")
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install --no-index -f dist yap_kernel")
install(FILES jupyter.yap
DESTINATION ${libpl}
)
install(FILES jupyter.yap
DESTINATION ${libpl}
)

View File

@@ -1,4 +1,4 @@
if __name__ == '__main__':
import yap_kernel.kernelapp
yap_kernel.kernelapp.launch_new_instance()
from yap_kernel.kernelapp import launch_new_instance
launch_new_instance()

View File

@@ -29,6 +29,7 @@ import types
import subprocess
import warnings
import yap
import yap4py.yapi
from io import open as io_open
from pickleshare import PickleShareDB
@@ -62,7 +63,7 @@ from IPython.core.prefilter import PrefilterManager
from IPython.core.profiledir import ProfileDir
from IPython.core.usage import default_banner
from IPython.core.interactiveshell import InteractiveShellABC, InteractiveShell, ExecutionResult
from IPython.testing.skipdoctest import skip_doctest_py2, skip_doctest
from IPython.testing.skipdoctest import skip_doctest
from IPython.utils import PyColorize
from IPython.utils import io
from IPython.utils import py3compat
@@ -85,16 +86,34 @@ from traitlets import (
)
from warnings import warn
from logging import error
from collections import namedtuple
use_module = namedtuple( 'use_module', 'file')
bindvars = namedtuple( 'bindvars', 'list')
library = namedtuple( 'library', 'list')
v = namedtuple( '_', 'slot')
class YAPInteractiveShell:
"""An enhanced, interactive shell for YAP."""
def __init__(self, kernel):
self.yapeng = yap.YAPEngine()
pjoin = os.path.join
here = os.path.abspath(os.path.dirname(__file__))
yap_lib_path = pjoin(here, "../yap4py/prolog" )
yap_dll_path = pjoin(here, "../yap4py" )
args = yap.YAPEngineArgs()
args.setYapLibDir(yap_dll_path)
args.setYapShareDir(yap_lib_path)
#args.setYapPrologBootFile(os.path.join(yap_lib_path."startup.yss"))
self.yapeng = yap.YAPEngine( args )
self.q = None
self.yapeng.goal( use_module( library('yapi') ) )
self.shell = kernel.shell
self.shell.run_cell = self.run_cell
def closeq(self):
if self.q:
self.q.close()
@@ -105,26 +124,29 @@ class YAPInteractiveShell:
"""Run a complete IPython cell.
Parameters
----------
raw_cell : str
The code (including IPython code such as %magic functions) to run.
store_history : bool
----------
raw_cell : str
The code (including IPython code such as %magic functions) to run.
store_history : bool
If True, the raw and translated cell will be stored in IPython's
history. For user code calling back into IPython's machinery, this
should be set to False.
silent : bool
history. For user code calling back into IPython's machinery, this
should be set to False.
silent : bool
If True, avoid side-effects, such as implicit displayhooks and
and logging. silent=True forces store_history=False.
shell_futures : bool
and logging. silent=True forces store_history=False.
shell_futures : bool
If True, the code will share future statements with the interactive
shell. It will both be affected by previous __future__ imports, and
any __future__ imports in the code will affect the shell. If False,
__future__ imports are not shared in either direction.
shell. It will both be affected by previous __future__ imports, and
any __future__ imports in the code will affect the shell. If False,
__future__ imports are not shared in either direction.
Returns
-------
result : :class:`ExecutionResult`
"""
-------
result : :class:`ExecutionResult`
"""
def numbervars(self, l):
return self.yapeng.fun(bindvars(l))
result = ExecutionResult()
@@ -161,19 +183,13 @@ class YAPInteractiveShell:
# print('{0}'.format(f.getvalue()))
# Execute the user code
if run:
myvs = self.q.namedVarsCopy()
myvs = self.numbervars(self.q.namedVars())
if myvs:
i = 0
for peq in myvs:
name = peq[0]
bind = peq[1]
if bind.isVar():
var = yap.YAPAtom('$VAR')
f = yap.YAPFunctor(var, 1)
bind.unify(yap.YAPApplTerm(f, (name)))
else:
i = bind.numberVars(i, True)
print(name.text() + " = " + bind.text())
for eq in myvs:
name = eq[0]
binding = eq[1]
if name != binding:
print(name + " = " + str(binding))
else:
print("yes")
if self.q.deterministic():

View File

@@ -38,7 +38,7 @@ from jupyter_client.connect import ConnectionFileMixin
# local imports
from ipykernel.iostream import IOPubThread
from ipykernel.heartbeat import Heartbeat
from .yap_kernel import YAPKernel
from yap_kernel.yapkernel import YAPKernel
from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows
from jupyter_client.session import (
Session, session_flags, session_aliases,
@@ -103,7 +103,7 @@ class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
flags = Dict(kernel_flags)
classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session]
# the kernel class, as an importstring
kernel_class = Type('yap_kernel.yap_kernel.YAPKernel',
kernel_class = Type('yap_kernel.yapkernel.YAPKernel',
klass='ipykernel.kernelbase.Kernel',
help="""The Kernel subclass to be used.

View File

@@ -1,490 +1,188 @@
"""An Application for launching a kernel"""
"""The IPython kernel spec for Jupyter"""
# Copyright (c) YAP Development Team.
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import atexit
import errno
import json
import os
import shutil
import sys
import signal
import traceback
import logging
import tempfile
from tornado import ioloop
import zmq
from zmq.eventloop import ioloop as zmq_ioloop
from zmq.eventloop.zmqstream import ZMQStream
from jupyter_client.kernelspec import KernelSpecManager
from IPython.core.application import (
BaseIPythonApplication, base_flags, base_aliases, catch_config_error
)
pjoin = os.path.join
from IPython.core.profiledir import ProfileDir
from IPython.core.shellapp import (
InteractiveShellApp, shell_flags, shell_aliases
)
KERNEL_NAME = 'YAPKernel'
from IPython.utils import io
from ipython_genutils.path import filefind, ensure_dir_exists
from traitlets import (
Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default
)
from ipython_genutils.importstring import import_item
from jupyter_core.paths import jupyter_runtime_dir
from jupyter_client import write_connection_file
from jupyter_client.connect import ConnectionFileMixin
# path to kernelspec resources
RESOURCES = pjoin(os.path.dirname(__file__), 'resources')
# local imports
from ipykernel.iostream import IOPubThread
from ipykernel.heartbeat import Heartbeat
from .yap_kernel import YAPKernel
from ipykernel.parentpoller import ParentPollerUnix, ParentPollerWindows
from jupyter_client.session import (
Session, session_flags, session_aliases,
)
from ipykernel.zmqshell import ZMQInteractiveShell
#-----------------------------------------------------------------------------
# Flags and Aliases
#-----------------------------------------------------------------------------
def make_yap_kernel_cmd(mod='yap_kernel', executable=None, extra_arguments=None, **kw):
"""Build Popen command list for launching an IPython kernel.
kernel_aliases = dict(base_aliases)
kernel_aliases.update({
'ip' : 'YAPKernelApp.ip',
'hb' : 'YAPKernelApp.hb_port',
'shell' : 'YAPKernelApp.shell_port',
'iopub' : 'YAPKernelApp.iopub_port',
'stdin' : 'YAPKernelApp.stdin_port',
'control' : 'YAPKernelApp.control_port',
'f' : 'YAPKernelApp.connection_file',
'transport': 'YAPKernelApp.transport',
})
Parameters
----------
mod : str, optional (default 'yap_kernel')
A string of an IPython module whose __main__ starts an IPython kernel
kernel_flags = dict(base_flags)
kernel_flags.update({
'no-stdout' : (
{'YAPKernelApp' : {'no_stdout' : True}},
"redirect stdout to the null device"),
'no-stderr' : (
{'YAPKernelApp' : {'no_stderr' : True}},
"redirect stderr to the null device"),
'pylab' : (
{'YAPKernelApp' : {'pylab' : 'auto'}},
"""Pre-load matplotlib and numpy for interactive use with
the default matplotlib backend."""),
})
executable : str, optional (default sys.executable)
The Python executable to use for the kernel process.
# inherit flags&aliases for any IPython shell apps
kernel_aliases.update(shell_aliases)
kernel_flags.update(shell_flags)
extra_arguments : list, optional
A list of extra arguments to pass when executing the launch code.
# inherit flags&aliases for Sessions
kernel_aliases.update(session_aliases)
kernel_flags.update(session_flags)
Returns
-------
_ctrl_c_message = """\
NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
A Popen command list
"""
if executable is None:
executable = sys.executable
extra_arguments = extra_arguments or []
arguments = [executable, '-m', mod, '-f', '{connection_file}']
arguments.extend(extra_arguments)
To exit, you will have to explicitly quit this process, by either sending
"quit" from a client, or using Ctrl-\\ in UNIX-like environments.
return arguments
To read more about this, see https://github.com/ipython/ipython/issues/2049
"""
#-----------------------------------------------------------------------------
# Application class for starting an YAP Kernel
#-----------------------------------------------------------------------------
class YAPKernelApp(BaseIPythonApplication, InteractiveShellApp,
ConnectionFileMixin):
name='YAP-kernel'
aliases = Dict(kernel_aliases)
flags = Dict(kernel_flags)
classes = [YAPKernel, ZMQInteractiveShell, ProfileDir, Session]
# the kernel class, as an importstring
kernel_class = Type('yap_kernel.yap_kernel.YAPKernel',
klass='ipykernel.kernelbase.Kernel',
help="""The Kernel subclass to be used.
This should allow easy re-use of the YAPKernelApp entry point
to configure and launch kernels other than YAP's own.
""").tag(config=True)
kernel = Any()
poller = Any() # don't restrict this even though current pollers are all Threads
heartbeat = Instance(Heartbeat, allow_none=True)
ports = Dict()
subcommands = {
'install': (
'yap_kernel.kernelspec.InstallYAPKernelSpecApp',
'Install the YAP kernel'
),
def get_kernel_dict(extra_arguments={'mod':'yap_kernel'}):
"""Construct dict for kernel.json"""
return {
'argv': make_yap_kernel_cmd(extra_arguments=extra_arguments),
'display_name': 'YAPKernel 6',
'language': 'prolog',
}
# connection info:
connection_dir = Unicode()
@default('connection_dir')
def _default_connection_dir(self):
return jupyter_runtime_dir()
def write_kernel_spec(path=None, overrides=None, extra_arguments=None):
"""Write a kernel spec directory to `path`
@property
def abs_connection_file(self):
if os.path.basename(self.connection_file) == self.connection_file:
return os.path.join(self.connection_dir, self.connection_file)
else:
return self.connection_file
If `path` is not specified, a temporary directory is created.
If `overrides` is given, the kernelspec JSON is updated before writing.
# streams, etc.
no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True)
no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True)
outstream_class = DottedObjectName('ipykernel.iostream.OutStream',
help="The importstring for the OutStream factory").tag(config=True)
displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook',
help="The importstring for the DisplayHook factory").tag(config=True)
The path to the kernelspec is always returned.
"""
if path is None:
path = os.path.join(tempfile.mkdtemp(suffix='_kernels'), KERNEL_NAME)
# polling
parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0),
help="""kill this process if its parent dies. On Windows, the argument
specifies the HANDLE of the parent process, otherwise it is simply boolean.
""").tag(config=True)
interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0),
help="""ONLY USED ON WINDOWS
Interrupt this process when the parent is signaled.
""").tag(config=True)
# stage resources
shutil.copytree(RESOURCES, path)
# write kernel.json
kernel_dict = get_kernel_dict(extra_arguments)
def init_crash_handler(self):
sys.excepthook = self.excepthook
if overrides:
kernel_dict.update(overrides)
with open(pjoin(path, 'kernel.json'), 'w') as f:
json.dump(kernel_dict, f, indent=1)
def excepthook(self, etype, evalue, tb):
# write uncaught traceback to 'real' stderr, not zmq-forwarder
traceback.print_exception(etype, evalue, tb, file=sys.__stderr__)
return path
def init_poller(self):
if sys.platform == 'win32':
if self.interrupt or self.parent_handle:
self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
elif self.parent_handle:
self.poller = ParentPollerUnix()
def _bind_socket(self, s, port):
iface = '%s://%s' % (self.transport, self.ip)
if self.transport == 'tcp':
if port <= 0:
port = s.bind_to_random_port(iface)
else:
s.bind("tcp://%s:%i" % (self.ip, port))
elif self.transport == 'ipc':
if port <= 0:
port = 1
path = "%s-%i" % (self.ip, port)
while os.path.exists(path):
port = port + 1
path = "%s-%i" % (self.ip, port)
else:
path = "%s-%i" % (self.ip, port)
s.bind("ipc://%s" % path)
return port
def install(kernel_spec_manager=None, user=False, kernel_name=KERNEL_NAME, display_name=None,
prefix=None, profile=None):
"""Install the IPython kernelspec for Jupyter
def write_connection_file(self):
"""write connection info to JSON file"""
cf = self.abs_connection_file
self.log.debug("Writing connection file: %s", cf)
write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
iopub_port=self.iopub_port, control_port=self.control_port)
Parameters
----------
def cleanup_connection_file(self):
cf = self.abs_connection_file
self.log.debug("Cleaning up connection file: %s", cf)
try:
os.remove(cf)
except (IOError, OSError):
pass
kernel_spec_manager: KernelSpecManager [optional]
A KernelSpecManager to use for installation.
If none provided, a default instance will be created.
user: bool [default: False]
Whether to do a user-only install, or system-wide.
kernel_name: str, optional
Specify a name for the kernelspec.
This is needed for having multiple IPython kernels for different environments.
display_name: str, optional
Specify the display name for the kernelspec
profile: str, optional
Specify a custom profile to be loaded by the kernel.
prefix: str, optional
Specify an install prefix for the kernelspec.
This is needed to install into a non-default location, such as a conda/virtual-env.
self.cleanup_ipc_files()
Returns
-------
def init_connection_file(self):
if not self.connection_file:
self.connection_file = "kernel-%s.json"%os.getpid()
try:
self.connection_file = filefind(self.connection_file, ['.', self.connection_dir])
except IOError:
self.log.debug("Connection file not found: %s", self.connection_file)
# This means I own it, and I'll create it in this directory:
ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700)
# Also, I will clean it up:
atexit.register(self.cleanup_connection_file)
return
try:
self.load_connection_file()
except Exception:
self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
self.exit(1)
The path where the kernelspec was installed.
"""
if kernel_spec_manager is None:
kernel_spec_manager = KernelSpecManager()
def init_sockets(self):
# Create a context, a session, and the kernel sockets.
self.log.info("Starting the kernel at pid: %i", os.getpid())
context = zmq.Context.instance()
# Uncomment this to try closing the context.
# atexit.register(context.term)
if (kernel_name != KERNEL_NAME) and (display_name is None):
# kernel_name is specified and display_name is not
# default display_name to kernel_name
display_name = kernel_name
overrides = {}
if display_name:
overrides["display_name"] = display_name
if profile:
extra_arguments = ["--profile", profile]
if not display_name:
# add the profile to the default display name
overrides["display_name"] = 'Python %i [profile=%s]' % (sys.version_info[0], profile)
else:
extra_arguments = None
path = write_kernel_spec(overrides=overrides, extra_arguments=extra_arguments)
dest = kernel_spec_manager.install_kernel_spec(
path, kernel_name=kernel_name, user=user, prefix=prefix)
# cleanup afterward
shutil.rmtree(path)
return dest
self.shell_socket = context.socket(zmq.ROUTER)
self.shell_socket.linger = 1000
self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
# Entrypoint
self.stdin_socket = context.socket(zmq.ROUTER)
self.stdin_socket.linger = 1000
self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
from traitlets.config import Application
self.control_socket = context.socket(zmq.ROUTER)
self.control_socket.linger = 1000
self.control_port = self._bind_socket(self.control_socket, self.control_port)
self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
self.init_iopub(context)
class InstallIPythonKernelSpecApp(Application):
"""Dummy app wrapping argparse"""
name = 'ipython-kernel-install'
def init_iopub(self, context):
self.iopub_socket = context.socket(zmq.PUB)
self.iopub_socket.linger = 1000
self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
self.configure_tornado_logger()
self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True)
self.iopub_thread.start()
# backward-compat: wrap iopub socket API in background thread
self.iopub_socket = self.iopub_thread.background_socket
def init_heartbeat(self):
"""start the heart beating"""
# heartbeat doesn't share context, because it mustn't be blocked
# by the GIL, which is accessed by libzmq when freeing zero-copy messages
hb_ctx = zmq.Context()
self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
self.hb_port = self.heartbeat.port
self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
self.heartbeat.start()
def log_connection_info(self):
"""display connection info, and store ports"""
basename = os.path.basename(self.connection_file)
if basename == self.connection_file or \
os.path.dirname(self.connection_file) == self.connection_dir:
# use shortname
tail = basename
else:
tail = self.connection_file
lines = [
"To connect another client to this kernel, use:",
" --existing %s" % tail,
]
# log connection info
# info-level, so often not shown.
# frontends should use the %connect_info magic
# to see the connection info
for line in lines:
self.log.info(line)
# also raw print to the terminal if no parent_handle (`ipython kernel`)
# unless log-level is CRITICAL (--quiet)
if not self.parent_handle and self.log_level < logging.CRITICAL:
io.rprint(_ctrl_c_message)
for line in lines:
io.rprint(line)
self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
stdin=self.stdin_port, hb=self.hb_port,
control=self.control_port)
def init_blackhole(self):
"""redirects stdout/stderr to devnull if necessary"""
if self.no_stdout or self.no_stderr:
blackhole = open(os.devnull, 'w')
if self.no_stdout:
sys.stdout = sys.__stdout__ = blackhole
if self.no_stderr:
sys.stderr = sys.__stderr__ = blackhole
def init_io(self):
"""Redirect input streams and set a display hook."""
if self.outstream_class:
outstream_factory = import_item(str(self.outstream_class))
sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout')
sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr')
if self.displayhook_class:
displayhook_factory = import_item(str(self.displayhook_class))
self.displayhook = displayhook_factory(self.session, self.iopub_socket)
sys.displayhook = self.displayhook
self.patch_io()
def patch_io(self):
"""Patch important libraries that can't handle sys.stdout forwarding"""
try:
import faulthandler
except ImportError:
pass
else:
# Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible
# updates to the upstream API and update accordingly (up-to-date as of Python 3.5):
# https://docs.python.org/3/library/faulthandler.html#faulthandler.enable
# change default file to __stderr__ from forwarded stderr
faulthandler_enable = faulthandler.enable
def enable(file=sys.__stderr__, all_threads=True, **kwargs):
return faulthandler_enable(file=file, all_threads=all_threads, **kwargs)
faulthandler.enable = enable
if hasattr(faulthandler, 'register'):
faulthandler_register = faulthandler.register
def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs):
return faulthandler_register(signum, file=file, all_threads=all_threads,
chain=chain, **kwargs)
faulthandler.register = register
def init_signal(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
def init_kernel(self):
"""Create the Kernel object itself"""
shell_stream = ZMQStream(self.shell_socket)
control_stream = ZMQStream(self.control_socket)
kernel_factory = self.kernel_class.instance
kernel = kernel_factory(parent=self, session=self.session,
shell_streams=[shell_stream, control_stream],
iopub_thread=self.iopub_thread,
iopub_socket=self.iopub_socket,
stdin_socket=self.stdin_socket,
log=self.log,
profile_dir=self.profile_dir,
user_ns=self.user_ns,
)
kernel.record_ports({
name + '_port': port for name, port in self.ports.items()
})
self.kernel = kernel
# Allow the displayhook to get the execution count
self.displayhook.get_execution_count = lambda: kernel.execution_count
def init_gui_pylab(self):
"""Enable GUI event loop integration, taking pylab into account."""
# Register inline backend as default
# this is higher priority than matplotlibrc,
# but lower priority than anything else (mpl.use() for instance).
# This only affects matplotlib >= 1.5
if not os.environ.get('MPLBACKEND'):
os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline'
# Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
# to ensure that any exception is printed straight to stderr.
# Normally _showtraceback associates the reply with an execution,
# which means frontends will never draw it, as this exception
# is not associated with any execute request.
shell = self.shell
_showtraceback = shell._showtraceback
try:
# replace error-sending traceback with stderr
def print_tb(etype, evalue, stb):
print ("GUI event loop or pylab initialization failed",
file=sys.stderr)
print (shell.InteractiveTB.stb2text(stb), file=sys.stderr)
shell._showtraceback = print_tb
InteractiveShellApp.init_gui_pylab(self)
finally:
shell._showtraceback = _showtraceback
def init_shell(self):
self.shell = getattr(self.kernel, 'shell', None)
if self.shell:
self.shell.configurables.append(self)
def init_extensions(self):
super(YAPKernelApp, self).init_extensions()
# BEGIN HARDCODED WIDGETS HACK
# Ensure ipywidgets extension is loaded if available
extension_man = self.shell.extension_manager
if 'ipywidgets' not in extension_man.loaded:
try:
extension_man.load_extension('ipywidgets')
except ImportError as e:
self.log.debug('ipywidgets package not installed. Widgets will not be available.')
# END HARDCODED WIDGETS HACK
def configure_tornado_logger(self):
""" Configure the tornado logging.Logger.
Must set up the tornado logger or else tornado will call
basicConfig for the root logger which makes the root logger
go to the real sys.stderr instead of the capture streams.
This function mimics the setup of logging.basicConfig.
"""
logger = logging.getLogger('tornado')
handler = logging.StreamHandler()
formatter = logging.Formatter(logging.BASIC_FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
@catch_config_error
def initialize(self, argv=None):
super(YAPKernelApp, self).initialize(argv)
if self.subapp is not None:
return
# register zmq IOLoop with tornado
zmq_ioloop.install()
self.init_blackhole()
self.init_connection_file()
self.init_poller()
self.init_sockets()
self.init_heartbeat()
# writing/displaying connection info must be *after* init_sockets/heartbeat
self.write_connection_file()
# Log connection info after writing connection file, so that the connection
# file is definitely available at the time someone reads the log.
self.log_connection_info()
self.init_io()
self.init_signal()
self.init_kernel()
# shell init steps
self.init_path()
self.init_shell()
if self.shell:
self.init_gui_pylab()
self.init_extensions()
self.init_code()
# flush stdout/stderr, so that anything written to these streams during
# initialization do not get associated with the first execution request
sys.stdout.flush()
sys.stderr.flush()
if argv is None:
argv = sys.argv[1:]
self.argv = argv
def start(self):
if self.subapp is not None:
return self.subapp.start()
if self.poller is not None:
self.poller.start()
self.kernel.start()
import argparse
parser = argparse.ArgumentParser(prog=self.name,
description="Install the IPython kernel spec.")
parser.add_argument('--user', action='store_true',
help="Install for the current user instead of system-wide")
parser.add_argument('--name', type=str, default=KERNEL_NAME,
help="Specify a name for the kernelspec."
" This is needed to have multiple IPython kernels at the same time.")
parser.add_argument('--display-name', type=str,
help="Specify the display name for the kernelspec."
" This is helpful when you have multiple IPython kernels.")
parser.add_argument('--profile', type=str,
help="Specify an IPython profile to load. "
"This can be used to create custom versions of the kernel.")
parser.add_argument('--prefix', type=str,
help="Specify an install prefix for the kernelspec."
" This is needed to install into a non-default location, such as a conda/virtual-env.")
parser.add_argument('--sys-prefix', action='store_const', const=sys.prefix, dest='prefix',
help="Install to Python's sys.prefix."
" Shorthand for --prefix='%s'. For use in conda/virtual-envs." % sys.prefix)
opts = parser.parse_args(self.argv)
try:
ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
pass
launch_new_instance = YAPKernelApp.launch_instance
def main():
"""Run an IPKernel as an application"""
app = YAPKernelApp.instance()
app.initialize()
app.start()
dest = install(user=opts.user, kernel_name=opts.name, profile=opts.profile,
prefix=opts.prefix, display_name=opts.display_name)
except OSError as e:
if e.errno == errno.EACCES:
print(e, file=sys.stderr)
if opts.user:
print("Perhaps you want `sudo` or `--user`?", file=sys.stderr)
self.exit(1)
raise
print("Installed kernelspec %s in %s" % (opts.name, dest))
if __name__ == '__main__':
main()
InstallIPythonKernelSpecApp.launch_instance()

View File

@@ -7,7 +7,7 @@
from __future__ import print_function
# the name of the package
name = 'ipykernel'
name = 'yap_kernel'
#-----------------------------------------------------------------------------
# Minimal Python version sanity check
@@ -56,7 +56,6 @@ setup_args = dict(
version = version_ns['__version__'],
scripts = glob(pjoin('scripts', '*')),
packages = packages,
py_modules = ['ipykernel_launcher'],
package_data = package_data,
description = "IPython Kernel for Jupyter",
author = 'IPython Development Team',
@@ -80,12 +79,12 @@ if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv):
import setuptools
setuptools_args = {}
install_requires = setuptools_args['install_requires'] = [
'ipython>=4.0.0',
'traitlets>=4.1.0',
'jupyter_client',
'tornado>=4.0',
]
# install_requires = setuptools_args['install_requires'] = [
# 'ipython>=4.0.0',
# 'traitlets>=4.1.0',
# 'jupyter_client',
# 'tornado>=4.0',
# ]
if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
from ipykernel.kernelspec import write_kernel_spec, make_ipkernel_cmd, KERNEL_NAME

View File

@@ -1,417 +0,0 @@
from __future__ import print_function
import signal
import yap
import io
import getpass
import sys
import traceback
from IPython.core import release
from ipython_genutils.py3compat import builtin_mod, PY3, unicode_type, safe_unicode
from IPython.utils.tokenutil import token_at_cursor, line_at_cursor
from traitlets import Instance, Type, Any, List
from ipykernel.comm import CommManager
from ipykernel.kernelbase import Kernel as KernelBase
from ipykernel.zmqshell import ZMQInteractiveShell
from .interactiveshell import YAPInteractiveShell
from IPython.core.interactiveshell import InteractiveShellABC, InteractiveShell
from contextlib import redirect_stdout
kernel_json = {
"argv": [sys.executable,
"-m", "yap_kernel",
"-f", "{connection_file}"],
"display_name": " YAP-6.3",
"language": "prolog",
"name": "yap_kernel",
}
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
class YAPKernel(KernelBase):
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
shell_class = Type(ZMQInteractiveShell )
user_ns = Instance(dict, args=None, allow_none=True)
def _user_ns_changed(self, name, old, new):
if self.shell is not None:
self.shell.user_ns = new
self.shell.init_user_ns()
# A reference to the Python builtin 'raw_input' function.
# (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
_sys_raw_input = Any()
_sys_eval_input = Any()
implementation = 'YAP Kernel'
implementation_version = '1.0'
language = 'text'
language_version = '6.3'
banner = "YAP-6.3"
language_info = {
'mimetype': 'text/prolog',
'name': 'text',
# ------ If different from 'language':
'codemirror_mode': {
"version": 2,
"name": "prolog"
},
'pygments_lexer': 'prolog',
'version': "0.0.1",
'file_extension': '.yap',
}
#-------------------------------------------------------------------------
# Things related to history management
#-------------------------------------------------------------------------
def __init__(self, **kwargs):
# sp = super(YAPKernel, self)
super(YAPKernel, self).__init__(**kwargs)
# Initialize the InteractiveShell subclass
self.shell = self.shell_class.instance(parent=self,
profile_dir = self.profile_dir,
user_ns = self.user_ns,
kernel = self,
)
self.shell.displayhook.session = self.session
self.shell.displayhook.pub_socket = self.iopub_socket
self.shell.displayhook.topic = self._topic('execute_result')
self.shell.display_pub.session = self.session
self.shell.display_pub.pub_socket = self.iopub_socket
self.comm_manager = CommManager(parent=self, kernel=self)
# self.shell._last_traceback = None
self.shell.configurables.append(self.comm_manager)
comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
for msg_type in comm_msg_types:
self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
self.yap_shell = YAPInteractiveShell( self )
def get_usage(self):
return "This is the YAP kernel."
help_links = List([
{
'text': "Python",
'url': "http://docs.python.org/%i.%i" % sys.version_info[:2],
},
{
'text': "YAP",
'url': "http://YAP.org/documentation.html",
},
{
'text': "NumPy",
'url': "http://docs.scipy.org/doc/numpy/reference/",
},
{
'text': "SciPy",
'url': "http://docs.scipy.org/doc/scipy/reference/",
},
{
'text': "Matplotlib",
'url': "http://matplotlib.org/contents.html",
},
{
'text': "SymPy",
'url': "http://docs.sympy.org/latest/index.html",
},
{
'text': "pandas",
'url': "http://pandas.pydata.org/pandas-docs/stable/",
},
]).tag(config=True)
# Kernel info fields
implementation = 'YAP'
implementation_version = release.version
language_info = {
'name': 'python',
'version': sys.version.split()[0],
'mimetype': 'text/x-python',
'codemirror_mode': {
'name': 'prolog',
'version': sys.version_info[0]
},
'pygments_lexer': 'prolog',
'nbconvert_exporter': 'python',
'file_extension': '.yap'
}
@property
def banner(self):
return self.shell.banner
def start(self):
self.shell.exit_now = False
super(YAPKernel, self).start()
def set_parent(self, ident, parent):
"""Overridden from parent to tell the display hook and output streams
about the parent message.
"""
super(YAPKernel, self).set_parent(ident, parent)
self.shell.set_parent(parent)
def init_metadata(self, parent):
"""Initialize metadata.
Run at the beginning of each execution request.
"""
md = super(YAPKernel, self).init_metadata(parent)
# FIXME: remove deprecated ipyparallel-specific code
# This is required for ipyparallel < 5.0
md.update({
'dependencies_met' : True,
'engine' : self.ident,
})
return md
def finish_metadata(self, parent, metadata, reply_content):
"""Finish populating metadata.
Run after completing an execution request.
"""
# FIXME: remove deprecated ipyparallel-specific code
# This is required by ipyparallel < 5.0
metadata['status'] = reply_content['status']
if reply_content['status'] == 'error' and reply_content['ename'] == 'UnmetDependency':
metadata['dependencies_met'] = False
return metadata
def _forward_input(self, allow_stdin=False):
"""Forward raw_input and getpass to the current frontend.
via input_request
"""
self._allow_stdin = allow_stdin
if PY3:
self._sys_raw_input = builtin_mod.input
builtin_mod.input = self.raw_input
else:
self._sys_raw_input = builtin_mod.raw_input
self._sys_eval_input = builtin_mod.input
builtin_mod.raw_input = self.raw_input
builtin_mod.input = lambda prompt='': eval(self.raw_input(prompt))
self._save_getpass = getpass.getpass
getpass.getpass = self.getpass
def _restore_input(self):
"""Restore raw_input, getpass"""
if PY3:
builtin_mod.input = self._sys_raw_input
else:
builtin_mod.raw_input = self._sys_raw_input
builtin_mod.input = self._sys_eval_input
getpass.getpass = self._save_getpass
@property
def execution_count(self):
return self.shell.execution_count
@execution_count.setter
def execution_count(self, value):
# Ignore the incrememnting done by KernelBase, in favour of our shell's
# execution counter.
pass
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
shell = self.shell # we'll need this a lot here
self._forward_input(allow_stdin)
reply_content = {}
try:
res = shell.run_cell(code, store_history=store_history, silent=silent)
finally:
self._restore_input()
if res.error_before_exec is not None:
err = res.error_before_exec
else:
err = res.error_in_exec
if res.success:
reply_content[u'status'] = u'ok'
elif isinstance(err, KeyboardInterrupt):
reply_content[u'status'] = u'aborted'
else:
reply_content[u'status'] = u'error'
reply_content.update({
# u'traceback': shell._last_traceback or [],
u'ename': unicode_type(type(err).__name__),
u'evalue': safe_unicode(err),
})
# FIXME: deprecate piece for ipyparallel:
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id,
method='execute')
reply_content['engine_info'] = e_info
# Return the execution counter so clients can display prompts
reply_content['execution_count'] = shell.execution_count - 1
if 'traceback' in reply_content:
self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
# At this point, we can tell whether the main code execution succeeded
# or not. If it did, we proceed to evaluate user_expressions
if reply_content['status'] == 'ok':
reply_content[u'user_expressions'] = \
shell.user_expressions(user_expressions or {})
else:
# If there was an error, don't even try to compute expressions
reply_content[u'user_expressions'] = {}
# Payloads should be retrieved regardless of outcome, so we can both
# recover partial output (that could have been generated early in a
# block, before an error) and always clear the payload system.
reply_content[u'payload'] = shell.payload_manager.read_payload()
# Be aggressive about clearing the payload because we don't want
# it to sit in memory until the next execute_request comes in.
shell.payload_manager.clear_payload()
return reply_content
def do_complete(self, code, cursor_pos):
# FIXME: YAP completers currently assume single line,
# but completion messages give multi-line context
# For now, extract line from cell, based on cursor_pos:
if cursor_pos is None:
cursor_pos = len(code)
line, offset = line_at_cursor(code, cursor_pos)
line_cursor = cursor_pos - offset
txt, matches = self.shell.complete('', line, line_cursor)
return {'matches' : matches,
'cursor_end' : cursor_pos,
'cursor_start' : cursor_pos - len(txt),
'metadata' : {},
'status' : 'ok'}
def do_inspect(self, code, cursor_pos, detail_level=0):
name = token_at_cursor(code, cursor_pos)
info = self.shell.object_inspect(name)
reply_content = {'status' : 'ok'}
reply_content['data'] = data = {}
reply_content['metadata'] = {}
reply_content['found'] = info['found']
if info['found']:
info_text = self.shell.object_inspect_text(
name,
detail_level=detail_level,
)
data['text/plain'] = info_text
return reply_content
def do_history(self, hist_access_type, output, raw, session=0, start=0,
stop=None, n=None, pattern=None, unique=False):
if hist_access_type == 'tail':
hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
include_latest=True)
elif hist_access_type == 'range':
hist = self.shell.history_manager.get_range(session, start, stop,
raw=raw, output=output)
elif hist_access_type == 'search':
hist = self.shell.history_manager.search(
pattern, raw=raw, output=output, n=n, unique=unique)
else:
hist = []
return {
'status': 'ok',
'history' : list(hist),
}
def do_shutdown(self, restart):
self.shell.exit_now = True
return dict(status='ok', restart=restart)
def do_is_complete(self, code):
status, indent_spaces = self.shell.input_transformer_manager.check_complete(code)
r = {'status': status}
if status == 'incomplete':
r['indent'] = ' ' * indent_spaces
return r
def do_apply(self, content, bufs, msg_id, reply_metadata):
from .serialize import serialize_object, unpack_apply_message
shell = self.shell
try:
working = shell.user_ns
prefix = "_"+str(msg_id).replace("-","")+"_"
f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
fname = getattr(f, '__name__', 'f')
fname = prefix+"f"
argname = prefix+"args"
kwargname = prefix+"kwargs"
resultname = prefix+"result"
ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
# print ns
working.update(ns)
code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
try:
exec(code, shell.user_global_ns, shell.user_ns)
result = working.get(resultname)
finally:
for key in ns:
working.pop(key)
result_buf = serialize_object(result,
buffer_threshold=self.session.buffer_threshold,
item_threshold=self.session.item_threshold,
)
except BaseException as e:
# invoke YAP traceback formatting
shell.showtraceback()
reply_content = {
u'traceback': shell._last_traceback or [],
u'ename': unicode_type(type(e).__name__),
u'evalue': safe_unicode(e),
}
# FIXME: deprecate piece for ipyparallel:
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
reply_content['engine_info'] = e_info
self.send_response(self.iopub_socket, u'error', reply_content,
ident=self._topic('error'))
self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
result_buf = []
reply_content['status'] = 'error'
else:
reply_content = {'status' : 'ok'}
return reply_content, result_buf
def do_clear(self):
self.shell.reset(False)
return dict(status='ok')