This commit is contained in:
Vitor Santos Costa
2018-07-21 01:56:48 +01:00
parent abc11dcfaa
commit 562e9e5af3
28 changed files with 1266 additions and 1567 deletions

View File

@@ -6,7 +6,7 @@ set (PYTHON_HEADERS py4yap.h)
set (CMAKE_POSITION_INDEPENDENT_CODE TRUE)
include_directories( BEFORE ${PYTHON_INCLUDE_DIRS} ${CMAKE_BINARY_DIR}
${CMAKE_SOURCE_DIR}/include ${CMAKE_SOURCE_DIR}/os )
${CMAKE_SOURCE_DIR}/include ${CMAKE_SOURCE_DIR}/os ${CMAKE_SOURCE_DIR}/H ${CMAKE_SOURCE_DIR}/OPTYap )
#talk to python.pl
add_lib(YAPPython pyload.c ${PYTHON_HEADERS} )

View File

@@ -1,4 +1,5 @@
#include "Yap.h"
#include "py4yap.h"
@@ -109,7 +110,7 @@ static bool copy_to_dictionary(PyObject *dict, term_t targ, term_t taux,
PyObject *term_to_python(term_t t, bool eval, PyObject *o, bool cvt) {
// o≈
YAP_Term yt = YAP_GetFromSlot(t);
// Yap_DebugPlWriteln(yt);
Yap_DebugPlWriteln(yt);
switch (PL_term_type(t)) {
case PL_VARIABLE: {
if (yt == 0) {
@@ -186,31 +187,24 @@ PyObject *term_to_python(term_t t, bool eval, PyObject *o, bool cvt) {
}
default:
if (PL_is_pair(t)) {
term_t tail = PL_new_term_ref();
term_t arg = PL_new_term_ref();
size_t len, i;
if (PL_skip_list(t, tail, &len) && PL_get_nil(tail)) {
Term t0 = Yap_GetFromHandle(t);
Term *tail;
size_t len,i;
if ((len = Yap_SkipList(&t0, &tail))>=0 && *tail == TermNil) {
PyObject *out, *a;
out = PyList_New(len);
if (!out) {
PL_reset_term_refs(tail);
YAPPy_ThrowError(SYSTEM_ERROR_INTERNAL, t, "list->python");
}
for (i = 0; i < len; i++) {
if (!PL_get_list(t, arg, t)) {
PL_reset_term_refs(tail);
YAPPy_ThrowError(SYSTEM_ERROR_INTERNAL, t, "list->python");
}
a = term_to_python(arg, eval, o, cvt);
Term ai = HeadOfTerm(t0);
a = term_to_python(Yap_InitHandle(ai), eval, o, cvt);
if (a) {
if (PyList_SetItem(out, i, a) < 0) {
YAPPy_ThrowError(SYSTEM_ERROR_INTERNAL, t, "list->python");
}
}
t0 = TailOfTerm(t0);
}
PL_reset_term_refs(tail);
return out;
} else {
PyObject *no = find_obj(o, t, false);
@@ -343,7 +337,7 @@ PyObject *term_to_python(term_t t, bool eval, PyObject *o, bool cvt) {
AOK(PL_get_arg(1, t, t), NULL);
if (!(dict = PyDict_New()))
return NULL;
Py_INCREF(dict);
Py_INCREF(dict);
DebugPrintf("Dict %p\n", dict);
while (PL_is_functor(t, FUNCTOR_comma2)) {
@@ -360,6 +354,7 @@ PyObject *term_to_python(term_t t, bool eval, PyObject *o, bool cvt) {
return dict;
}
AOK(PL_get_name_arity(t, &name, &arity), NULL);
if (name == ATOM_t) {
int i;
rc = PyTuple_New(arity);

View File

@@ -1,5 +1,9 @@
#include "Yap.h"
#include "py4yap.h"
#include <frameobject.h>
void YAPPy_ThrowError__(const char *file, const char *function, int lineno,
@@ -28,12 +32,9 @@ void YAPPy_ThrowError__(const char *file, const char *function, int lineno,
}
}
static foreign_t repr_term(PyObject *pVal, term_t t) {
term_t to = PL_new_term_ref(), t1 = PL_new_term_ref();
PL_put_pointer(t1, pVal);
PL_cons_functor(to, FUNCTOR_pointer1, t1);
Py_INCREF(pVal);
return PL_unify(t, to);
static Term repr_term(PyObject *pVal) {
Term t = MkAddressTerm(pVal);
return Yap_MkApplTerm(FunctorObj, 1, &t);
}
foreign_t assign_to_symbol(term_t t, PyObject *e);
@@ -50,185 +51,157 @@ foreign_t assign_to_symbol(term_t t, PyObject *e) {
return PyObject_SetAttrString(dic, s, e) == 0;
}
foreign_t python_to_term(PyObject *pVal, term_t t)
{
bool rc = true;
term_t to = PL_new_term_ref();
// fputs(" <<*** ",stderr); PyObject_Print(pVal,stderr,0);
// fputs("<<***\n",stderr);
static Term python_to_term__(PyObject *pVal) {
if (pVal == Py_None) {
// fputs("<<*** ",stderr);Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs("
// >>***\n",stderr);
rc = true;
return YAP_MkVarTerm();
// fputs("<<*** ",stderr);Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs("
// >>***\n",stderr);
} else if (PyBool_Check(pVal)) {
rc = rc && PL_unify_bool(t, PyObject_IsTrue(pVal));
if(PyObject_IsTrue(pVal)) return TermTrue;
return TermFalse;
} else if (PyLong_Check(pVal)) {
rc = rc && PL_unify_int64(t, PyLong_AsLong(pVal));
return MkIntegerTerm(PyLong_AsLong(pVal));
#if PY_MAJOR_VERSION < 3
} else if (PyInt_Check(pVal)) {
rc = rc && PL_unify_int64(t, PyInt_AsLong(pVal));
return MkIntegerTerm(PyInt_AsLong(pVal));
#endif
} else if (PyFloat_Check(pVal)) {
rc = rc && PL_unify_float(t, PyFloat_AsDouble(pVal));
return MkFloatTerm(PyFloat_AsDouble(pVal));
} else if (PyComplex_Check(pVal)) {
term_t t1 = PL_new_term_ref(), t2 = PL_new_term_ref();
if (!PL_put_float(t1, PyComplex_RealAsDouble(pVal)) ||
!PL_put_float(t2, PyComplex_ImagAsDouble(pVal)) ||
!PL_cons_functor(to, FUNCTOR_complex2, t1, t2)) {
rc = false;
} else {
rc = rc && PL_unify(t, to);
}
} else if (PyUnicode_Check(pVal)) {
Term t[2];
t[0] = MkFloatTerm(PyComplex_RealAsDouble(pVal));
t[1] = MkFloatTerm(PyComplex_ImagAsDouble(pVal));
return Yap_MkApplTerm(FunctorI, 2, t);
}
else if (PyUnicode_Check(pVal)) {
#if PY_MAJOR_VERSION < 3
size_t sz = PyUnicode_GetSize(pVal) + 1;
wchar_t *s = malloc(sizeof(wchar_t) * sz);
sz = PyUnicode_AsWideChar((PyUnicodeObject *)pVal, a, sz - 1);
free(ptr);
size_t sz = PyUnicode_GetSize(pVal) + 1;
wchar_t *s = malloc(sizeof(wchar_t) * sz);
sz = PyUnicode_AsWideChar((PyUnicodeObject *)pVal, a, sz - 1);
free(ptr);
#else
const char *s = PyUnicode_AsUTF8(pVal);
const char *s = PyUnicode_AsUTF8(pVal);
#endif
if (Yap_AtomInUse(s))
#if 0
if (false && Yap_AtomInUse(s))
rc = rc && PL_unify_atom_chars(t, s);
else
rc = rc && PL_unify_string_chars(t, s);
} else if (PyByteArray_Check(pVal)) {
rc = rc && PL_unify_string_chars(t, PyByteArray_AsString(pVal));
#if PY_MAJOR_VERSION < 3
} else if (PyString_Check(pVal)) {
rc = rc && PL_unify_string_chars(t, PyString_AsString(pVal));
#endif
} else if (PyTuple_Check(pVal)) {
Py_ssize_t i, sz = PyTuple_Size(pVal);
functor_t f;
const char *s;
if (sz == 0) {
rc = rc && PL_unify_atom(t, ATOM_brackets);
} else {
if ((s = (Py_TYPE(pVal)->tp_name))) {
if (!strcmp(s, "v")) {
pVal = PyTuple_GetItem(pVal, 0);
if (pVal == NULL) {
pVal = Py_None;
PyErr_Clear();
}
term_t v = YAP_InitSlot(PyLong_AsLong(pVal));
return PL_unify(v, t);
}
if (s[0] == '$') {
char *ns = malloc(strlen(s) + 5);
strcpy(ns, "__");
strcat(ns, s + 1);
strcat(ns, "__");
f = PL_new_functor(PL_new_atom(ns), sz);
} else {
f = PL_new_functor(PL_new_atom(s), sz);
}
} else {
f = PL_new_functor(ATOM_t, sz);
}
if (PL_unify_functor(t, f)) {
for (i = 0; i < sz; i++) {
term_t to = PL_new_term_ref();
if (!PL_get_arg(i + 1, t, to))
rc = false;
PyObject *p = PyTuple_GetItem(pVal, i);
if (p == NULL) {
PyErr_Clear();
p = Py_None;
} else {
rc = rc && python_to_term(p, to);
}
PL_reset_term_refs(to);
}
} else {
rc = false;
}
// fputs(" ||*** ",stderr); Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs("
// ||***\n",stderr);
return MkStringTerm(s);
}
else if (PyByteArray_Check(pVal)) {
return MkStringTerm(PyByteArray_AsString(pVal));
#if PY_MAJOR_VERSION < 3
}
else if (PyString_Check(pVal)) {
return MkStringTerm(PyString_AsString(pVal));
#endif
}
else if (PyTuple_Check(pVal)) {
Py_ssize_t sz = PyTuple_Size(pVal);
const char *s;
s = Py_TYPE(pVal)->tp_name;
if (s == NULL)
s = "t";
if (sz == 0) {
return MkAtomTerm(YAP_LookupAtom(Py_TYPE(pVal)->tp_name));
}
else {
Functor f = Yap_MkFunctor(Yap_LookupAtom(s), sz);
Term t = Yap_MkNewApplTerm(f, sz);
long i;
CELL *ptr = RepAppl(t) + 1;
for (i = 0; i < sz; i++) {
PyObject *p = PyTuple_GetItem(pVal, i);
if (p == NULL) {
PyErr_Clear();
return false;
}
} else if (PyList_Check(pVal)) {
Py_ssize_t i, sz = PyList_GET_SIZE(pVal);
for (i = 0; i < sz; i++) {
PyObject *obj;
term_t to = PL_new_term_ref();
rc = rc && PL_unify_list(t, to, t);
if ((obj = PyList_GetItem(pVal, i)) == NULL) {
obj = Py_None;
}
rc = rc && python_to_term(obj, to);
PL_reset_term_refs(to);
if (!rc)
return false;
}
return rc && PL_unify_nil(t);
// fputs("[***] ", stderr);
// Yap_DebugPlWrite(yt); fputs("[***]\n", stderr);
} else if (PyDict_Check(pVal)) {
Py_ssize_t pos = 0;
int left = PyDict_Size(pVal);
PyObject *key, *value;
if (left == 0) {
rc = rc && PL_unify_atom(t, ATOM_curly_brackets);
} else {
while (PyDict_Next(pVal, &pos, &key, &value)) {
term_t tkey = PL_new_term_ref(), tval = PL_new_term_ref(), tint,
tnew = PL_new_term_ref();
term_t to = PL_new_term_ref();
/* do something interesting with the values... */
if (!python_to_term(key, tkey)) {
continue;
}
if (!python_to_term(value, tval)) {
continue;
}
/* reuse */
tint = tkey;
if (!PL_cons_functor(tint, FUNCTOR_colon2, tkey, tval)) {
rc = false;
continue;
}
if (--left) {
if (!PL_cons_functor(tint, FUNCTOR_comma2, tint, tnew))
PL_reset_term_refs(tkey);
rc = false;
}
if (!PL_unify(to, tint)) {
rc = false;
}
}
rc = rc && PL_unify(t, to);
}
} else {
rc = rc && repr_term(pVal, t);
*ptr++ = python_to_term__(p);
}
return t;
}
// PL_reset_term_refs(to);
// fputs(" ||*** ",stderr); Yap_DebugPlWrite(YAP_GetFromSlot(t)); fputs("
// ||***\n",stderr);
}
else if (PyList_Check(pVal)) {
Py_ssize_t i, sz = PyList_GET_SIZE(pVal);
if (sz == 0)
return TermNil;
Term t = TermNil;
for (i = sz; i > 0; --i) {
PyObject *p = PyTuple_GetItem(pVal, i);
if (p == NULL) {
PyErr_Clear();
return false;
}
if (!python_to_term__(p))
return false;
t = MkPairTerm(python_to_term__(p), t);
}
return t;
}
else if (PyDict_Check(pVal)) {
Py_ssize_t pos = 0;
int left = PyDict_Size(pVal);
PyObject *key, *value;
Term f, *opt = &f, t;
if (left == 0) {
return ATOM_curly_brackets;
} else {
while (PyDict_Next(pVal, &pos, &key, &value)) {
Term t0[2], to;
t0[0] = python_to_term__(key);
t0[1] = python_to_term__(value);
to = Yap_MkApplTerm(FunctorEq, 2, t0);
if (left--) {
t = Yap_MkNewApplTerm(FunctorOr, 2);
*opt = t;
CELL *pt = RepAppl(t) + 1;
pt[0] = to;
opt = pt + 1;
} else {
*opt = t = to;
}
}
return Yap_MkApplTerm(FunctorBraces, 1, &t);
}
}else {
return repr_term(pVal);
}
}
foreign_t python_to_term(PyObject *pVal, term_t t) {
term_t t0 = PL_new_term_ref();
bool rc = python_to_term__(pVal);
PL_reset_term_refs(t0);
return rc;
}
// extern bool Yap_do_low_level_trace;
X_API YAP_Term pythonToYAP(PyObject *pVal) {
term_t t = PL_new_term_ref();
if (pVal == NULL || !python_to_term(pVal, t)) {
PL_reset_term_refs(t);
return 0;
}
YAP_Term tt = YAP_GetFromSlot(t);
PL_reset_term_refs(t);
// Yap_do_low_level_trace=1;
/* fputs(" *** ", stderr); */
/* PyObject_Print(pVal, stderr, 0); */
/* fputs("***>>\n", stderr); */
if (pVal == NULL)
Yap_ThrowError(SYSTEM_ERROR_INTERNAL, 0, NULL);
Term t = python_to_term__(pVal);
/* fputs("<< *** ", stderr); */
/* Yap_DebugPlWrite(t); */
/* fputs(" ***\n", stderr); */
// Py_DECREF(pVal);
return tt;
return t;
}
PyObject *py_Local, *py_Global;
/**
* assigns the Python RHS to a Prolog term LHS, ie LHS = RHS
*

View File

@@ -23,6 +23,11 @@ static foreign_t python_len(term_t tobj, term_t tf) {
len = PyObject_Length(o);
pyErrorAndReturn(PL_unify_int64(tf, len));
}
static foreign_t python_clear_errors(void) {
PyErr_Clear();
return true;
}
static foreign_t python_dir(term_t tobj, term_t tf) {
PyObject *dir;
@@ -701,6 +706,7 @@ install_t install_pypreds(void) {
PL_register_foreign("python_import", 2, python_import, 0);
PL_register_foreign("python_access", 3, python_access, 0);
PL_register_foreign("python_threaded", 0, p_python_threaded, 0);
PL_register_foreign("python_clear_errors", 0, python_clear_errors, 0);
init_python_vfs();
}

View File

@@ -32,6 +32,7 @@
release_GIL/0,
python_threaded/0,
prolog_list_to_python_list/3,
python_clear_errors/0,
op(100,fy,$),
op(950,fy,:=),
op(950,yfx,:=),

View File

@@ -3,20 +3,20 @@
%% @brief support yap shell
%%
%:- start_low_level_trace.
:- module(yapi, [
python_ouput/0,
show_answer/2,
show_answer/3,
yap_query/4,
python_query/2,
python_query/3,
python_import/1,
yapi_query/2
]).
%% :- module(yapi, [
%% python_ouput/0,
%% show_answer/2,
%% show_answer/3,
%% yap_query/4,
%% python_query/2,
%% python_query/3,
%% python_import/1,
%% yapi_query/2
%% ]).
:- yap_flag(verbose, silent).
:- use_module(library(python)).
:- use_module(library(python)).
:- use_module( library(lists) ).
:- use_module( library(maplist) ).
@@ -88,3 +88,4 @@ in_dict(Dict, nonvar([V0|Vs],G)) :- !,
in_dict( Dict, nonvar(Vs, G) ).
in_dict(_Dict, nonvar([],_G)) :- !.
in_dict(_, _)

View File

@@ -6,16 +6,17 @@ import sys
yap_lib_path = dirname(__file__)
compile = namedtuple('compile', 'file')
bindvars = namedtuple('bindvars', 'list')
library = namedtuple('library', 'list')
compile = namedtuple('compile', 'file')
jupyter_query = namedtuple('jupyter_query', 'vars dict')
library = namedtuple('library', 'listfiles')
prolog_library = namedtuple('prolog_library', 'listfiles')
python_query = namedtuple('python_query', 'vars dict')
set_prolog_flag = namedtuple('set_prolog_flag', 'flag new_value')
show_answer = namedtuple('show_answer', 'vars dict')
v0 = namedtuple('v', 'slot')
yap_query = namedtuple('yap_query', 'query owner')
jupyter_query = namedtuple('jupyter_query', 'vars dict')
python_query = namedtuple('python_query', 'vars dict')
yapi_query = namedtuple('yapi_query', 'vars dict')
show_answer = namedtuple('show_answer', 'vars dict')
set_prolog_flag = namedtuple('set_prolog_flag', 'flag new_value')
class Engine( YAPEngine ):
@@ -30,17 +31,18 @@ class Engine( YAPEngine ):
args.setYapPLDIR(yap_lib_path)
args.setSavedState(join(yap_lib_path, "startup.yss"))
YAPEngine.__init__(self, args)
self.goal(set_prolog_flag('verbose', 'silent'),True)
self.goal(compile(library('yapi')), True)
self.goal(set_prolog_flag('verbose', 'normal'), True)
self.run(compile(library('yapi')),m="user",release=True)
def run(self, g, m=None, release=False):
if m:
self.mgoal(g, m, release)
else:
self.goal(release)
self.goal(g, release)
def prolog_library(self, file):
g = prolog_library(file)
self.run(g)
class JupyterEngine( Engine ):
def __init__(self, args=None,self_contained=False,**kwargs):
@@ -50,11 +52,9 @@ class JupyterEngine( Engine ):
args.jupyter = True
Engine.__init__(self, args)
self.errors = None
self.goal(set_prolog_flag('verbose', 'silent'),True)
self.goal(compile(library('verify')), True)
self.goal(compile(library('complete')), True)
self.goal(compile(library('jupyter')), True)
self.goal(set_prolog_flag('verbose', 'normal'), True)
self.run(compile(library('jupyter')),"user")
self.run(compile(library('complete')),"user")
self.run(compile(library('verify')),"user")
class EngineArgs( YAPEngineArgs ):
""" Interface to Engine Options class"""

View File

@@ -4,12 +4,12 @@
* @brief Prolog completer.
*/
:- module( completer,
[completions/2 ]).
%% %% :- module( completer,
%% %% [completions/2 ]).
:- use_module(library(lists)).
:- use_module(library(maplist)).
:- use_module(library(python)).
:- use_module(library(python)).
%% completions( +Text, +PythonCell )
%

View File

@@ -6,36 +6,38 @@
*/
:- yap_flag(gc_trace,verbose).
/*
:- module( jupyter,
[jupyter_query/3,
blank/1
blank/1,
streams/1
]
).
*/
:- use_module(library(hacks)).
:- use_module(library(lists)).
:- use_module(library(maplist)).
:- reexport(library(python)).
:- reexport(library(yapi)).
:- reexport(library(complete)).
:- reexport(library(verify)).
%% :- reexport(library(python)).
%% :- reexport(library(yapi)).
%% :- reexport(library(complete)).
%% :- reexport(library(verify)).
:- python_import(sys).
jupyter_query(Caller, Cell, Line ) :-
jupyter_cell(Caller, Cell, Line).
jupyter_cell(Caller, Cell, Line).
jupyter_cell(_Caller, Cell, _Line) :-
jupyter_consult(Cell), %stack_dump,
fail.
jupyter_cell( _Caller, _, '' ) :- !.
jupyter_cell( _Caller, _, `` ) :- !.
jupyter_cell( _Caller, _, Line ) :-
blank( Line ),
!.
jupyter_cell( Caller, _, Line ) :-
Self := Caller.query,
jupyter_cell(Self, _, Line ) :-
catch(
python_query(Self,Line),
E=error(A,B),
@@ -83,7 +85,8 @@ blank(Text) :-
string_codes(Text, L),
maplist( code_type(space), L).
streams(false) :-
streams(false) :-
close(user_input),
close(user_output),
close(user_error).
@@ -109,4 +112,4 @@ plot_inline :-
:- endif.
%:- ( start_low_level_trace ).
%y:- ( start_low_level_trace ).

View File

@@ -5,19 +5,22 @@
*/
:- module( verify,
[errors/2,q
ready/2]
).
%% :- module( verify,
%% [errors/2,
%% ready/2]
%% ).
:- use_module(library(hacks)).
:- use_module(library(jupyter)).
%% :- use_module(library(jupyter)).
:- use_module(library(lists)).
:- use_module(library(maplist)).
:- use_module(library(python)).
:- use_module(library(yapi)).
:- use_module(library(python)).
%% :- use_module(library(yapi)).
:- dynamic jupyter/1.
jupyter( []).
ready( Engine, Query) :-
errors( Engine , Cell ),
@@ -27,10 +30,10 @@ ready( Engine, Query) :-
errors( _Engine , Text ) :-
blank(Text).
blank(Text),
!.
errors( Engine , Text ) :-
jupyter..shell := Engine,
%start_low_level_trace,
setup_call_cleanup(
open_esh( Engine , Text, Stream, Name ),
esh(Engine , Name, Stream),
@@ -40,50 +43,55 @@ errors( Engine , Text ) :-
errors( _Engine , _Text ).
open_esh(Engine , Text, Stream, Name) :-
Engine.errors := [],
Engine.errors := [],
retractall(jupyter(_)),
assertz(jupyter(Engine)),
b_setval( jupyter, Engine),
Name := Engine.stream_name,
open_mem_read_stream( Text, Stream ).
esh(Engine , Name, Stream) :-
b_setval(code,python),
repeat,
catch(
( read_clause(Stream, Cl, [ syntax_errors(fail)]),
writeln(cl:Cl),
read_clause(Stream, Cl, [ syntax_errors(dec10)]),
error(C,E),
p_message(C,E)
p3_message(C,Engine,E)
),
Cl == end_of_file,
!.
user:print_message() :- p_message
:- multifile user:portray_message/2.
user:portray_message(S,E) :-
jupyter(En),
En \= [],
python_clear_errors,
p3_message(S,En,E).
close_esh( _Engine , Stream ) :-
b_delete
retractall(jupyter(_)),
assertz(jupyter([])),
close(Stream).
p_message(Severity, Error) :-
writeln((Severity->Error)),
p_message(Severity, Engine, Error).
p_message( _Severity, Engine, error(syntax_error(Cause),info(between(_,LN,_), _FileName, CharPos, Details))) :-
%% nb_getval(jupyter_cell, on),
%% assert( syntax_error(Cause,LN,CharPos,Details) ).
%% user:portray_message(_Severity, error(style_check(_),_) ) :-
%% nb_getval(jupyter_cell, on).
Engine.errors := [t(Cause,LN,CharPos,Details)] + Engine.errors,
!.
p_message(error, Engine, E) :-
writeln(E),
!.
p_message(warning, Engine, E) :-
p3_message( _Severity, Engine, error(syntax_error(Cause),info(between(_,LN,_), _FileName, CharPos, Details))) :-
python_clear_errors,
!,
writeln(E),
NE := [t(Cause,LN,CharPos,Details)]+Engine.errors,
writeln(E),
writeln(NE),
Engine.errors := NE.
p3_message(error, Engine, E) :-
python_clear_errors,
!.
p3_message(warning, Engine, E) :-
!.
p_message(error, Engine, E) :-
p3_message(error, Engine, E) :-
Engine.errors := [E] + Engine.errors.
p_message(warning, Engine, E) :-
p3_message(warning, Engine, E) :-
Engine.errors := [E] + Engine.errors.
%% ready(_Self, Line ) :-
%% blank( Line ),
@@ -173,3 +181,4 @@ p_message( _Severity, Engine, error(syntax_error(Cause),info(between(_,LN,_), _
%% Self.errors := [t(C,L,N,A)] + Self.errors,
%% fail.
%% close_events( _ ).

View File

@@ -113,424 +113,9 @@ class YAPInputSplitter(InputSplitter):
return True
if not line:
line = text.rstrip()
engine.errors = []
engine.goal(errors(engine, text),True)
print(engine.errors)
return engine.errors != []
def reset(self):
"""Reset the input buffer and associated state."""
#super(YAPInputSplitter, self).reset()
self._buffer_raw[:] = []
self.source_raw = ''
self.transformer_accumulating = False
for t in self.transforms:
try:
t.reset()
except SyntaxError:
# Nothing that calls reset() expects to handle transformer
# errors
pass
def flush_transformers(self):
def _flush(transform, outs):
"""yield transformed lines
always strings, never None
transform: the current transform
outs: an iterable of previously transformed inputs.
Each may be multiline, which will be passed
one line at a time to transform.
"""
for out in outs:
for line in out.splitlines():
# push one line at a time
tmp = transform.push(line)
if tmp is not None:
yield tmp
# reset the transform
tmp = transform.reset()
if tmp is not None:
yield tmp
out = []
for t in self.transforms:
out = _flush(t, out)
out = list(out)
if out:
self._store('\n'.join(out))
def raw_reset(self):
"""Return raw input only and perform a full reset.
"""
out = self.source_raw
self.reset()
return out
def source_reset(self):
try:
self.flush_transformers()
return self.source
finally:
self.reset()
def push_accepts_more(self):
if self.transformer_accumulating:
return True
else:
return self.validQuery(self.source, engine, self.shell)
def transform_cell(self, cell):
"""Process and translate a cell of input.
"""
self.reset()
try:
self.push(cell)
self.flush_transformers()
return self.source
finally:
self.reset()
def push(self, lines):
"""Push one or more lines of yap_ipython input.
This stores the given lines and returns a status code indicating
whether the code forms a complete Python block or not, after processing
all input lines for special yap_ipython syntax.
Any exceptions generated in compilation are swallowed, but if an
exception was produced, the method returns True.
Parameters
----------
lines : string
One or more lines of Python input.
Returns
-------
is_complete : boolean
True if the current input source (the result of the current input
plus prior inputs) forms a complete Python execution block. Note that
this value is also stored as a private attribute (_is_complete), so it
can be queried at any time.
"""
# We must ensure all input is pure unicode
lines = cast_unicode(lines, self.encoding)
# ''.splitlines() --> [], but we need to push the empty line to transformers
lines_list = lines.splitlines()
if not lines_list:
lines_list = ['']
# Store raw source before applying any transformations to it. Note
# that this must be done *after* the reset() call that would otherwise
# flush the buffer.
self._store(lines, self._buffer_raw, 'source_raw')
transformed_lines_list = []
for line in lines_list:
transformed = self._transform_line(line)
if transformed is not None:
transformed_lines_list.append(transformed)
if transformed_lines_list:
transformed_lines = '\n'.join(transformed_lines_list)
else:
# Got nothing back from transformers - they must be waiting for
# more input.
return False
def _transform_line(self, line):
"""Push a line of input code through the various transformers.
Returns any output from the transformers, or None if a transformer
is accumulating lines.
Sets self.transformer_accumulating as a side effect.
"""
def _accumulating(dbg):
#print(dbg)
self.transformer_accumulating = True
return None
for transformer in self.physical_line_transforms:
line = transformer.push(line)
if line is None:
return _accumulating(transformer)
for transformer in self.logical_line_transforms:
line = transformer.push(line)
if line is None:
return _accumulating(transformer)
#print("transformers clear") #debug
self.transformer_accumulating = False
return line
class YAPCompleter(Completer):
greedy = Bool(False,
help="""Activate greedy completion
PENDING DEPRECTION. this is now mostly taken care of with Jedi.
This will enable completion on elements of lists, self.results of function calls, etc.,
but can be unsafe because the code is actually evaluated on TAB.
"""
).tag(config=True)
debug = Bool(default_value=False,
help='Enable debug for the Completer. Mostly print extra '
'information for experimental jedi integration.') \
.tag(config=True)
backslash_combining_completions = Bool(True,
help="Enable unicode completions, e.g. \\alpha<tab> . "
"Includes completion of latex commands, unicode names, and expanding "
"unicode characters back to latex commands.").tag(config=True)
def __init__(self, namespace=None, global_namespace=None, shell=None, **kwargs):
"""Create a new completer for the command line.
Completer(namespace=ns, global_namespace=ns2) -> completer instance.
"""
self.shell = shell
self.magic_escape = ESC_MAGIC
super(Completer, self).__init__(**kwargs)
def complete(self, text, line=None, cursor_pos=None):
"""Return the completed text and a list of completions.
Parameters
----------
text : string
A string of text to be completed on. It can be given as empty and
instead a line/position pair are given. In this case, the
completer itself will split the line like readline does.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
line : string, optional
The complete line that text is part of.
cursor_pos : int, optional
The position of the cursor on the input line.
Returns
-------
text : string
The actual text that was completed.
matches : list
A sorted list with all possible completions.
The optional arguments allow the completion to take more context into
account, and are part of the low-level completion API.
This is a wrapper around the completion mechanism, similar to what
readline does at the command line when the TAB key is hit. By
exposing it as a method, it can be used by other non-readline
environments (such as GUIs) for text completion.
Simple usage example:
In [1]: x = 'hello'
In [2]: _ip.complete('x.l')
Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip'])
"""
if not text:
text = line[:cursor_pos]
return self.completions(text, cursor_pos)
def magic_matches(self, text):
"""Match magics"""
# Get all shell magics now rather than statically, so magics loaded at
# runtime show up too.
lsm = self.shell.magics_manager.lsmagic()
line_magics = lsm['line']
cell_magics = lsm['cell']
pre = self.magic_escape
pre2 = pre+pre
explicit_magic = text.startswith(pre)
# Completion logic:
# - user gives %%: only do cell magics
# - user gives %: do both line and cell magics
# - no prefix: do both
# In other words, line magics are skipped if the user gives %% explicitly
#
# We also exclude magics that match any currently visible names:
# https://github.com/ipython/ipython/issues/4877, unless the user has
# typed a %:
# https://github.com/ipython/ipython/issues/10754
bare_text = text.lstrip(pre)
global_matches = []
if not explicit_magic:
def matches(magic):
"""
Filter magics, in particular remove magics that match
a name present in global namespace.
"""
return ( magic.startswith(bare_text) and
magic not in global_matches )
else:
def matches(magic):
return magic.startswith(bare_text)
comp = [ pre2+m for m in cell_magics if matches(m)]
if not text.startswith(pre2):
comp += [ pre+m for m in line_magics if matches(m)]
return comp
def magic_config_matches(self, text): #:str) -> List[str]:
""" Match class names and attributes for %config magic """
texts = text.strip().split()
if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'):
# get all configuration classes
classes = sorted(set([ c for c in self.shell.configurables
if c.__class__.class_traits(config=True)
]), key=lambda x: x.__class__.__name__)
classnames = [ c.__class__.__name__ for c in classes ]
# return all classnames if config or %config is given
if len(texts) == 1:
return classnames
# match classname
classname_texts = texts[1].split('.')
classname = classname_texts[0]
classname_matches = [ c for c in classnames
if c.startswith(classname) ]
# return matched classes or the matched class with attributes
if texts[1].find('.') < 0:
return classname_matches
elif len(classname_matches) == 1 and \
classname_matches[0] == classname:
cls = classes[classnames.index(classname)].__class__
help = cls.class_get_help()
# strip leading '--' from cl-args:
help = re.sub(re.compile(r'^--', re.MULTILINE), '', help)
return [ attr.split('=')[0]
for attr in help.strip().splitlines()
if attr.startswith(texts[1]) ]
return []
def magic_color_matches(self, text): #:str) -> List[str] :
""" Match color schemes for %colors magic"""
texts = text.split()
if text.endswith(' '):
# .split() strips off the trailing whitespace. Add '' back
# so that: '%colors ' -> ['%colors', '']
texts.append('')
if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'):
prefix = texts[1]
return [ color for color in InspectColors.keys()
if color.startswith(prefix) ]
return []
def completions(self, text, offset):
"""
Returns an iterator over the possible completions
.. warning:: Unstable
This function is unstable, API may change without warning.
It will also raise unless use in proper context manager.
Parameters
----------
text:str
Full text of the current input, multi line string.
offset:int
Integer representing the position of the cursor in ``text``. Offset
is 0-based indexed.
Yields
------
:any:`Completion` object
The cursor on a text can either be seen as being "in between"
characters or "On" a character depending on the interface visible to
the user. For consistency the cursor being on "in between" characters X
and Y is equivalent to the cursor being "on" character Y, that is to say
the character the cursor is on is considered as being after the cursor.
Combining characters may span more that one position in the
text.
.. note::
If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--``
fake Completion token to distinguish completion returned by Jedi
and usual yap_ipython completion.
.. note::
Completions are not completely deduplicated yet. If identical
completions are coming from different sources this function does not
ensure that each completion object will only be present once.
"""
self.matches = []
prolog_res = self.shell.yapeng.goal(completions(text, self),True)
if self.matches:
return text, self.matches
magic_res = self.magic_matches(text)
return text, magic_res
class YAPRun:
"""An enhanced, interactive shell for YAP."""
def __init__(self, shell):
self.shell = shell
self.yapeng = JupyterEngine()
global engine
engine = self.yapeng
self.query = None
self.os = None
self.it = None
self.shell.yapeng = self.yapeng
self._get_exc_info = shell._get_exc_info
def syntaxErrors(self, text):
"""Return whether a legal query
"""
if not text:
return []
if text == self.os:
return self.yapeng.errors
(text,_,_,_) = self.clean_end(text)
self.yapeng.goal(errors(self.yapeng,text),True)
print( self.yapeng.errors )
return self.yapeng.errors != []
self.errors = []
engine.mgoal(errors(self, line),"user",True)
return self.errors != []
def reset(self):
@@ -911,7 +496,7 @@ class YAPCompleter(Completer):
ensure that each completion object will only be present once.
"""
self.matches = []
prolog_res = self.shell.yapeng.goal(completions(text, self),True)
prolog_res = self.shell.yapeng.mgoal(completions(text, self), "user",True)
if self.matches:
return text, self.matches
magic_res = self.magic_matches(text)
@@ -940,11 +525,11 @@ class YAPRun:
if not text:
return []
if text == self.os:
return self.yapeng.errors
return self.errors
self.errors=[]
(text,_,_,_) = self.clean_end(text)
self.yapeng.goal(errors(self.yapeng,text),True)
print( self.yapeng.errors )
return self.yapeng.errors
self.yapeng.mgoal(errors(self,text),"user",True)
return self.errors
def jupyter_query(self, s):
#
@@ -1027,6 +612,7 @@ class YAPRun:
# you can print it out, the left-side is the variable name,
# the right side wraps a handle to a variable
#import pdb; pdb.set_trace()
# #pdb.set_trace()
# atom match either symbols, or if no symbol exists, strings, In this case
# variable names should match strings
# ask = True
@@ -1067,9 +653,7 @@ class YAPRun:
# except SyntaxError:
# preprocessing_exc_tuple = self.shell.syntax_error() # sys.exc_info()
cell = raw_cell # cell has to exist so it can be stored/logged
self.yapeng.goal(streams(True), True)
errors = self.syntaxErrors(raw_cell)
for i in errors:
for i in self.syntaxErrors(raw_cell):
try:
(what,lin,_,text) = i
e = SyntaxError(what, ("<string>", lin, 1, text))
@@ -1095,7 +679,6 @@ class YAPRun:
# compiler
# compiler = self.shell.compile if shell_futures else CachingCompiler()
cell_name = str( self.shell.execution_count)
engine.stream_name = cell_name
if cell[0] == '%':
if cell[1] == '%':
linec = False
@@ -1124,6 +707,7 @@ class YAPRun:
self.shell.displayhook.exec_result = self.result
has_raised = False
try:
self.yapeng.mgoal(streams(True),"user", True)
self.bindings = dicts = []
if cell.strip('\n \t'):
#create a Trace object, telling it what to ignore, and whether to
@@ -1148,9 +732,9 @@ class YAPRun:
except Exception as e:
has_raised = True
self.result.result = False
self.yapeng.goal(streams(False), True)
self.yapeng.mgoal(streams(False),"user", True)
self.yapeng.goal(ODstreams(False), True)
self.yapeng.mgoal(streams(False),"user", True)
self.shell.last_execution_succeeded = not has_raised
# Reset this so later displayed values do not modify the