This commit is contained in:
Vitor Santos Costa 2018-01-05 16:57:38 +00:00
parent 814aa2bd4c
commit 9c862c21bc
271 changed files with 43711 additions and 6129 deletions

View File

@ -1,19 +1,19 @@
/*************************************************************************
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: parser.c *
* Last rev: *
* mods: *
* comments: Prolog's parser *
* *
*************************************************************************/
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: parser.c *
* Last rev: *
* mods: *
* comments: Prolog's parser *
* *
*************************************************************************/
#ifdef SCCS
static char SccsId[] = "%W% %G%";
#endif
@ -135,10 +135,10 @@ dot with single quotes.
*/
#include "Yap.h"
#include "YapEval.h"
#include "YapHeap.h"
#include "YapText.h"
#include "Yatom.h"
#include "YapEval.h"
#include "yapio.h"
/* stuff we want to use in standard YAP code */
#include "iopreds.h"
@ -157,7 +157,9 @@ dot with single quotes.
/* weak backtraking mechanism based on long_jump */
typedef struct jmp_buff_struct { sigjmp_buf JmpBuff; } JMPBUFF;
typedef struct jmp_buff_struct {
sigjmp_buf JmpBuff;
} JMPBUFF;
static void GNextToken(CACHE_TYPE1);
static void checkfor(Term, JMPBUFF *, encoding_t CACHE_TYPE);
@ -165,19 +167,20 @@ static Term ParseArgs(Atom, Term, JMPBUFF *, Term, encoding_t, Term CACHE_TYPE);
static Term ParseList(JMPBUFF *, encoding_t, Term CACHE_TYPE);
static Term ParseTerm(int, JMPBUFF *, encoding_t, Term CACHE_TYPE);
extern Term Yap_tokRep(void* tokptr);
extern const char * Yap_tokText(void *tokptr);
extern Term Yap_tokRep(void *tokptr);
extern const char *Yap_tokText(void *tokptr);
static void syntax_msg(const char *msg, ...) {
CACHE_REGS
va_list ap;
if (!LOCAL_ErrorMessage ||
(LOCAL_Error_TYPE == SYNTAX_ERROR &&
LOCAL_ActiveError->prologParserLine < LOCAL_tokptr->TokPos)) {
LOCAL_tokptr->TokPos < LOCAL_ActiveError->prologParserPos )) {
if (!LOCAL_ErrorMessage) {
LOCAL_ErrorMessage = malloc(1024 + 1);
LOCAL_ErrorMessage = malloc(MAX_ERROR_MSG_SIZE + 1);
}
LOCAL_ActiveError->prologParserLine = LOCAL_tokptr->TokPos;
LOCAL_ActiveError->prologParserLine = LOCAL_tokptr->TokLine;
LOCAL_ActiveError->prologParserPos = LOCAL_tokptr->TokPos;
va_start(ap, msg);
vsnprintf(LOCAL_ErrorMessage, MAX_ERROR_MSG_SIZE, msg, ap);
va_end(ap);
@ -226,7 +229,7 @@ static void syntax_msg(const char *msg, ...) {
#define FAIL siglongjmp(FailBuff->JmpBuff, 1)
VarEntry *Yap_LookupVar(const char *var) /* lookup variable in variables table
* */
* */
{
CACHE_REGS
VarEntry *p;
@ -364,7 +367,7 @@ static Term Variables(VarEntry *p, Term l USES_REGS) {
Term Yap_Variables(VarEntry *p, Term l) {
CACHE_REGS
l = Variables(LOCAL_AnonVarTable, l PASS_REGS);
l = Variables(LOCAL_AnonVarTable, l PASS_REGS);
return Variables(p, l PASS_REGS);
}
@ -468,7 +471,7 @@ inline static void checkfor(Term c, JMPBUFF *FailBuff,
strncpy(s, Yap_tokText(LOCAL_tokptr), 1023);
syntax_msg("line %d: expected to find "
"\'%c....................................\', found %s",
LOCAL_tokptr->TokPos, c, s);
LOCAL_tokptr->TokLine, c, s);
FAIL;
}
NextToken;
@ -549,12 +552,12 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1,
func = Yap_MkFunctor(a, 1);
if (func == NULL) {
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
t = Yap_MkApplTerm(func, nargs, p);
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
return TermNil;
}
NextToken;
@ -564,7 +567,7 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1,
while (1) {
Term *tp = (Term *)ParserAuxSp;
if (ParserAuxSp + 1 > LOCAL_TrailTop) {
syntax_msg("line %d: Trail Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Trail Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
*tp++ = Unsigned(ParseTerm(999, FailBuff, enc, cmod PASS_REGS));
@ -582,12 +585,12 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1,
* order
*/
if (HR > ASP - (nargs + 1)) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
func = Yap_MkFunctor(a, nargs);
if (func == NULL) {
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
#ifdef SFUNC
@ -602,7 +605,7 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1,
t = Yap_MkApplTerm(func, nargs, p);
#endif
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
return TermNil;
}
/* check for possible overflow against local stack */
@ -611,8 +614,8 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1,
}
static Term MakeAccessor(Term t, Functor f USES_REGS) {
UInt arity = ArityOfFunctor(FunctorOfTerm(t));
int i;
UInt arity = ArityOfFunctor(FunctorOfTerm(t));
int i;
Term tf[2], tl = TermNil;
tf[1] = ArgOfTerm(1, t);
@ -638,7 +641,7 @@ loop:
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
to_store[1] = TermNil;
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
} else {
to_store[1] = AbsPair(HR);
@ -653,7 +656,7 @@ loop:
}
} else {
syntax_msg("line %d: looking for symbol ',','|' got symbol '%s'",
LOCAL_tokptr->TokPos, Yap_tokText(LOCAL_tokptr));
LOCAL_tokptr->TokLine, Yap_tokText(LOCAL_tokptr));
FAIL;
}
return (o);
@ -725,13 +728,13 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
TRY(
/* build appl on the heap */
func = Yap_MkFunctor(AtomOfTerm(t), 1); if (func == NULL) {
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine);
FAIL;
} t = ParseTerm(oprprio, FailBuff, enc, cmod PASS_REGS);
t = Yap_MkApplTerm(func, 1, &t);
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
} curprio = opprio;
, break;)
@ -762,7 +765,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
break;
case Error_tok:
syntax_msg("line %d: found ill-formed \"%s\"", LOCAL_tokptr->TokPos,
syntax_msg("line %d: found ill-formed \"%s\"", LOCAL_tokptr->TokLine,
Yap_tokText(LOCAL_tokptr));
FAIL;
@ -798,14 +801,14 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
t = Yap_MkApplTerm(FunctorBraces, 1, &t);
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
checkfor(TermEndCurlyBracket, FailBuff, enc PASS_REGS);
break;
default:
syntax_msg("line %d: unexpected ponctuation signal %s",
LOCAL_tokptr->TokPos, Yap_tokRep(LOCAL_tokptr));
LOCAL_tokptr->TokLine, Yap_tokRep(LOCAL_tokptr));
FAIL;
}
break;
@ -896,7 +899,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
NextToken;
break;
default:
syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokPos,
syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokLine,
Yap_tokText(LOCAL_tokptr));
FAIL;
}
@ -912,9 +915,8 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
/* try parsing as infix operator */
Volatile int oldprio = curprio;
TRY3(
func = Yap_MkFunctor(save_opinfo, 2);
if (func == NULL) {
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos);
func = Yap_MkFunctor(save_opinfo, 2); if (func == NULL) {
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine);
FAIL;
} NextToken;
{
@ -924,7 +926,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
t = Yap_MkApplTerm(func, 2, args);
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
},
@ -937,13 +939,13 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
/* parse as posfix operator */
Functor func = Yap_MkFunctor(AtomOfTerm(LOCAL_tokptr->TokInfo), 1);
if (func == NULL) {
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
t = Yap_MkApplTerm(func, 1, &t);
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
curprio = opprio;
@ -953,7 +955,8 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
break;
}
if (LOCAL_tokptr->Tok == Ord(Ponctuation_tok)) {
if (LOCAL_tokptr->TokInfo == TermComma && prio >= 1000 && curprio <= 999) {
if (LOCAL_tokptr->TokInfo == TermComma && prio >= 1000 &&
curprio <= 999) {
Volatile Term args[2];
NextToken;
args[0] = t;
@ -961,7 +964,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
t = Yap_MkApplTerm(FunctorComma, 2, args);
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
curprio = 1000;
@ -977,7 +980,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
t = Yap_MkApplTerm(FunctorVBar, 2, args);
/* check for possible overflow against local stack */
if (HR > ASP - 4096) {
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos);
syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine);
FAIL;
}
curprio = opprio;
@ -1000,18 +1003,17 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
curprio = opprio;
continue;
} else if (LOCAL_tokptr->TokInfo == TermBeginCurlyBracket &&
IsPosfixOp(AtomBraces, &opprio, &oplprio,
cmod PASS_REGS) &&
IsPosfixOp(AtomBraces, &opprio, &oplprio, cmod PASS_REGS) &&
opprio <= prio && oplprio >= curprio) {
t = ParseArgs(AtomBraces, TermEndCurlyBracket, FailBuff, t,
enc, cmod PASS_REGS);
t = ParseArgs(AtomBraces, TermEndCurlyBracket, FailBuff, t, enc,
cmod PASS_REGS);
t = MakeAccessor(t, FunctorBraces PASS_REGS);
curprio = opprio;
continue;
}
}
if (LOCAL_tokptr->Tok <= Ord(String_tok)) {
syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokPos,
syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokLine,
Yap_tokText(LOCAL_tokptr));
FAIL;
}
@ -1022,8 +1024,8 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc,
Term Yap_Parse(UInt prio, encoding_t enc, Term cmod) {
CACHE_REGS
// ensure that if we throw an exception
// t will be 0.
// ensure that if we throw an exception
// t will be 0.
Volatile Term t = 0;
JMPBUFF FailBuff;
yhandle_t sls = Yap_StartSlots();
@ -1046,12 +1048,12 @@ Term Yap_Parse(UInt prio, encoding_t enc, Term cmod) {
Yap_CloseSlots(sls);
}
if (LOCAL_tokptr != NULL && LOCAL_tokptr->Tok != Ord(eot_tok)) {
LOCAL_Error_TYPE = SYNTAX_ERROR;
if (LOCAL_tokptr->TokNext) {
LOCAL_ErrorMessage = "operator misssing . ";
} else {
LOCAL_ErrorMessage = "term does not end on . ";
}
LOCAL_Error_TYPE = SYNTAX_ERROR;
if (LOCAL_tokptr->TokNext) {
LOCAL_ErrorMessage = "bracket or operator expected.";
} else {
LOCAL_ErrorMessage = "term must end with . or EOF.";
}
t = 0;
}
if (t != 0 && LOCAL_Error_TYPE == SYNTAX_ERROR) {

View File

@ -688,8 +688,7 @@ static int send_error_message(char s[]) {
return 0;
}
static wchar_t read_quoted_char(int *scan_nextp,
struct stream_desc *st) {
static wchar_t read_quoted_char(int *scan_nextp, struct stream_desc *st) {
int ch;
/* escape sequence */
@ -1123,7 +1122,8 @@ Term Yap_scan_num(StreamDesc *inp, bool error_on) {
;
#endif
TokEntry *tokptr = (TokEntry *)AllocScannerMemory(sizeof(TokEntry));
tokptr->TokPos = GetCurInpPos(inp);
tokptr->TokLine = GetCurInpLine(inp);
tokptr->TokPos = GetCurInpPos(inp);
if (ch == '-') {
sign = -1;
ch = getchr(inp);
@ -1324,8 +1324,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments,
LOCAL_AnonVarTable = NULL;
l = NULL;
p = NULL; /* Just to make lint happy */
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "i %d", st-GLOBAL_Stream);
ch = getchr(st);
__android_log_print(ANDROID_LOG_INFO, "YAPDroid", "i %d", st - GLOBAL_Stream);
ch = getchr(st);
while (chtype(ch) == BS) {
ch = getchr(st);
}
@ -1353,7 +1353,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments,
while (chtype(ch) == BS) {
ch = getchr(st);
}
t->TokPos = GetCurInpPos(st);
t->TokPos = GetCurInpPos(st);
t->TokLine = GetCurInpLine(st);
switch (chtype(ch)) {
@ -1472,7 +1473,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments,
if (cherr) {
TokEntry *e;
t->Tok = Number_tok;
t->TokPos = GetCurInpPos(st);
t->TokPos = GetCurInpPos(st);
t->TokLine = GetCurInpLine(st);
e = (TokEntry *)AllocScannerMemory(sizeof(TokEntry));
if (e == NULL) {
return TrailSpaceError(p, l);
@ -1498,7 +1500,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments,
t->Tok = Ord(Var_tok);
t->TokInfo = (Term)Yap_LookupVar("E");
t->TokPos = GetCurInpPos(st);
t->TokPos = GetCurInpPos(st);
t->TokLine = GetCurInpLine(st);
e2 = (TokEntry *)AllocScannerMemory(sizeof(TokEntry));
if (e2 == NULL) {
return TrailSpaceError(p, l);
@ -1531,7 +1534,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments,
if (ch == '(')
solo_flag = FALSE;
t->TokInfo = MkAtomTerm(AtomE);
t->TokPos = GetCurInpPos(st);
t->TokLine = GetCurInpLine(st);
t->TokPos = GetCurInpPos(st);
e2 = (TokEntry *)AllocScannerMemory(sizeof(TokEntry));
if (e2 == NULL) {
return TrailSpaceError(p, l);
@ -1967,7 +1971,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments,
p->TokNext = e;
e->Tok = Error_tok;
e->TokInfo = MkAtomTerm(Yap_LookupAtom(LOCAL_ErrorMessage));
e->TokPos = GetCurInpPos(st);
e->TokPos = GetCurInpPos(st);
e->TokLine = GetCurInpLine(st);
e->TokNext = NULL;
LOCAL_ErrorMessage = NULL;
p = e;

View File

@ -330,12 +330,10 @@ static const char *find_directory(YAP_init_args *iap, const char *paths[],
int j = 0;
while ((p = names[j++])) {
char *io = o + s;
printf("%s -> %s\n", inp, o);
if ((no = location(iap, p, io)) && io[0] != '\0' && Yap_Exists(o))
return pop_output_text_stack(lvl, realpath(o, full));
}
} else {
printf("-> %s\n", o);
return pop_output_text_stack(lvl, o);
}
}

View File

@ -413,32 +413,36 @@ endif ()
# rpath stuff, hopefully it works
# use, i.e. don't skip the full RPATH for the build tree
#SET(CMAKE_SKIP_BUILD_RPATH FALSE)
#SET(CMAKE_SKIP_BUILD_RPATH TRUE)
# when building, don't use the install RPATH already
# (but later on when installing)
#SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
# SET(CMAKE_INSTALL_FULL_RPATH ${CMAKE_TOP_BINARY_DIR})
# add the automatically determined parts of the RPATH
# which point to directories outside the build tree to the install RPATH
SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
# the RPATH to be used when installing, but only if it's not a system directory
LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${libdir}" isSystemDir)
IF("${isSystemDir}" STREQUAL "-1")
SET(CMAKE_INSTALL_RPATH ${libdir})
ENDIF("${isSystemDir}" STREQUAL "-1")
## (but later on when installing)
#SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
#
## SET(CMAKE_INSTALL_FULL_RPATH ${CMAKE_TOP_BINARY_DIR})
#
## add the automatically determined parts of the RPATH
## which point to directories outside the build tree to the install RPATH
#SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH FAlSE)
#
#
## the RPATH to be used when installing, but only if it's not a system directory
#LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${libdir}" isSystemDir)
# IF("${isSystemDir}" STREQUAL "-1")
# SET(CMAKE_INSTALL_RPATH ${libdir})
#ENDIF("${isSystemDir}" STREQUAL "-1")
#
IF(NOT WIN32 AND NOT APPLE)
LIST(APPEND CMAKE_INSTALL_RPATH \\$ORIGIN/../lib/Yap)
LIST(APPEND CMAKE_INSTALL_RPATH ${CMAKE_INSTALL_FULL_LIBDIR})
LIST(APPEND CMAKE_INSTALL_RPATH \\$ORIGIN/../lib)
LIST(APPEND CMAKE_INSTALL_RPATH \\$ORIGIN/../../../lib)
ELSE()
LIST(APPEND CMAKE_INSTALL_RPATH @loader_path/../lib/Yap)
LIST(APPEND CMAKE_INSTALL_RPATH ${CMAKE_INSTALL_FULL_LIBDIR})
LIST(APPEND CMAKE_INSTALL_RPATH @loader_path/../lib)
LIST(APPEND CMAKE_INSTALL_RPATH @loader_path/../../../lib)
ENDIF()
# Model Specific

View File

@ -1031,6 +1031,8 @@ Term YAPEngine::top_level(std::string s) {
ARG1 = YAP_ReadBuffer(s.data(), &tp);
ARG2 = tp;
ARG3 = MkVarTerm();
if (ARG1 == 0)
YAPError(SYNTAX_ERROR);
YAPPredicate p = YAPPredicate(YAP_TopGoal());
YAPQuery *Q = new YAPQuery(p, 0);
Term ts[2];

View File

@ -13,7 +13,7 @@ typedef enum TokenKinds {
typedef struct TOKEN {
enum TokenKinds Tok;
Term TokInfo;
int TokPos;
intptr_t TokPos, TokLine;
struct TOKEN *TokNext;
} TokEntry;

View File

@ -382,9 +382,14 @@ function. */
#cmakedefine HAVE_FFSL ${HAVE_FFSL}
#endif
/* Define to 1 if you have the `ffsll' function. */
#ifndef HAVE_FFSLL
#cmakedefine HAVE_FFSLL ${HAVE_FFSLL}
/* Define to 1 if you have the `ftell' function. */
#ifndef HAVE_FTELL
#cmakedefine HAVE_FTELL ${HAVE_FTELL}
#endif
/* Define to 1 if you have the `ftello' function. */
#ifndef HAVE_FTELLO
#cmakedefine HAVE_FTELLO ${HAVE_FTELLO}
#endif
/* Define to 1 if you have the `fgetpos' function. */

View File

@ -33,7 +33,6 @@
#include <encoding.h>
typedef struct {
dev_t st_dev; /* ID of device containing file */
mode_t st_mode; /* Mode of file (see below) */
@ -50,13 +49,12 @@ typedef struct {
#endif
} vfs_stat;
typedef enum vfs_flags {
VFS_CAN_WRITE = 0x1, /// we can write to files in this space
VFS_CAN_EXEC = 0x2, /// we can execute files in this space
VFS_CAN_SEEK = 0x4, /// we can seek within files in this space
VFS_HAS_PREFIX = 0x8, /// has a prefix that identifies a file in this space
VFS_HAS_SUFFIX = 0x10, /// has a suffix that describes the file.
VFS_CAN_WRITE = 0x1, /// we can write to files in this space
VFS_CAN_EXEC = 0x2, /// we can execute files in this space
VFS_CAN_SEEK = 0x4, /// we can seek within files in this space
VFS_HAS_PREFIX = 0x8, /// has a prefix that identifies a file in this space
VFS_HAS_SUFFIX = 0x10, /// has a suffix that describes the file.
VFS_HAS_FUNCTION = 0x20 /// has a suffix that describes the file.
} vfs_flags_t;
@ -80,27 +78,32 @@ typedef struct vfs {
const char *suffix;
bool (*chDir)(struct vfs *me, const char *s);
/** operations */
void *(*open)(struct vfs *,int sno, const char *fname, const char *io_mode); /// open an object
void *(*open)(struct vfs *, int sno, const char *fname,
const char *io_mode); /// open an object
/// in this space, usual w,r,a,b flags plus B (store in a buffer)
bool (*close)(int sno); /// close the object
int (*get_char)(int sno); /// get an octet to the stream
bool (*close)(int sno); /// close the object
int (*get_char)(int sno); /// get an octet from the stream
int (*peek_char)(int sno); /// unget an octet from the stream
int (*put_char)(int sno, int ch); /// output an octet to the stream
void (*flush)(int sno); /// flush a stream
void (*flush)(int sno); /// flush a stream
int64_t (*seek)(int sno, int64_t offset,
int whence); /// jump around the stream
void *(*opendir)(struct vfs *,const char *s); /// open a directory object, if one exists
const char *(*nextdir)(void *d); /// walk to the next entry in a directory object
void *(*opendir)(struct vfs *,
const char *s); /// open a directory object, if one exists
const char *(*nextdir)(
void *d); /// walk to the next entry in a directory object
bool (*closedir)(void *d);
; /// close access a directory object
bool (*stat)(struct vfs *,const char *s,
bool (*stat)(struct vfs *, const char *s,
vfs_stat *); /// obtain size, age, permissions of a file.
bool (*isdir)(struct vfs *,const char *s); /// verify whether is directory.
bool (*isdir)(struct vfs *, const char *s); /// verify whether is directory.
bool (*exists)(struct vfs *, const char *s); /// verify whether a file exists.
bool (*chdir)(struct vfs *,const char *s); /// set working directory (may be virtual).
encoding_t enc; /// default file encoded.
bool (*chdir)(struct vfs *,
const char *s); /// set working directory (may be virtual).
encoding_t enc; /// default file encoded.
YAP_Term (*parsers)(int sno); // a set of parsers that can read the
// stream and generate a YAP_Term
int (*writers)(int ch, int sno );
// stream and generate a YAP_Term
int (*writers)(int ch, int sno);
/// convert a YAP_Term into this space
const char *virtual_cwd;
/** VFS dep
@ -113,7 +116,8 @@ extern VFS_t *GLOBAL_VFS;
extern void init_android_stream(void);
extern void Yap_InitStdStream(int sno, unsigned int flags, FILE *file, VFS_t *vfsp);
extern void Yap_InitStdStream(int sno, unsigned int flags, FILE *file,
VFS_t *vfsp);
static inline VFS_t *vfs_owner(const char *fname) {
VFS_t *me = GLOBAL_VFS;
@ -124,12 +128,13 @@ static inline VFS_t *vfs_owner(const char *fname) {
bool p = true;
if ((me->vflags & VFS_HAS_PREFIX) && p) {
const char *r = fname, *s = me->prefix;
while (*s && p) p = *s++ == *r++;
if (p && r > fname+1)
return me;
while (*s && p)
p = *s++ == *r++;
if (p && r > fname + 1)
return me;
}
if (me->vflags & VFS_HAS_SUFFIX && (sz = strlen(me->suffix)) && (d = (sz0 - sz)) >= 0 &&
strcmp(fname + d, me->suffix) == 0) {
if (me->vflags & VFS_HAS_SUFFIX && (sz = strlen(me->suffix)) &&
(d = (sz0 - sz)) >= 0 && strcmp(fname + d, me->suffix) == 0) {
return me;
}
me = me->next;

View File

@ -1,17 +1,16 @@
/*************************************************************************
* *
* YAP Prolog %W% %G% *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: YapError.h *
* mods: *
* comments: error header file for YAP *
* version: $Id: Yap.h,v 1.38 2008-06-18 10:02:27 vsc Exp $ *
*************************************************************************/
* *
* YAP Prolog %W% %G% *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: YapError.h * mods:
** comments: error header file for YAP *
* version: $Id: Yap.h,v 1.38 2008-06-18 10:02:27 vsc Exp $ *
*************************************************************************/
#ifndef YAP_ERROR_H
#define YAP_ERROR_H 1
@ -42,21 +41,20 @@ struct yami *Yap_Error__(const char *file, const char *function, int lineno,
yap_error_number err, YAP_Term wheret, ...);
void Yap_ThrowError__(const char *file, const char *function, int lineno,
yap_error_number err, YAP_Term wheret, ...)
#ifndef MSC_VER
__attribute__ ((noreturn))
#endif
;
yap_error_number err, YAP_Term wheret, ...)
#ifndef MSC_VER
__attribute__((noreturn))
#endif
;
#define Yap_NilError(id, ...) \
Yap_Error__(__FILE__, __FUNCTION__, __LINE__, id, TermNil, __VA_ARGS__)
#define Yap_Error(id, inp, ...) \
Yap_Error__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__)
Yap_Error__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__)
#define Yap_ThrowError(id, inp, ...) \
Yap_ThrowError__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__)
#define Yap_ThrowError(id, inp, ...) \
Yap_ThrowError__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__)
#ifdef YAP_TERM_H
/**
@ -183,10 +181,10 @@ INLINE_ONLY extern inline Term Yap_ensure_atom__(const char *fu, const char *fi,
uintptr_t prologPredLine;
uintptr_t prologPredFirstLine;
uintptr_t prologPredLastLine;
const char * prologPredName;
const char *prologPredName;
uintptr_t prologPredArity;
const char * prologPredModule;
const char * prologPredFile;
const char *prologPredModule;
const char *prologPredFile;
void *errorGoal;
struct error_prolog_source *errorParent;
} yap_error_prolog_source_t;
@ -195,8 +193,8 @@ INLINE_ONLY extern inline Term Yap_ensure_atom__(const char *fu, const char *fi,
typedef struct s_yap_error_descriptor {
enum yap_error_status status;
yap_error_class_number errorClass;
const char * errorAsText;
const char * classAsText;
const char *errorAsText;
const char *classAsText;
yap_error_number errorNo;
intptr_t errorLine;
const char *errorFunction;
@ -206,15 +204,16 @@ INLINE_ONLY extern inline Term Yap_ensure_atom__(const char *fu, const char *fi,
uintptr_t prologPredLine;
uintptr_t prologPredFirstLine;
uintptr_t prologPredLastLine;
const char * prologPredName;
const char *prologPredName;
uintptr_t prologPredArity;
const char * prologPredModule;
const char * prologPredFile;
const char *prologPredModule;
const char *prologPredFile;
uintptr_t prologParserPos;
uintptr_t prologParserLine;
uintptr_t prologParserFirstLine;
uintptr_t prologParserLastLine;
const char * prologParserName;
const char * prologParserFile;
const char *prologParserText;
const char *prologParserFile;
bool prologConsulting;
void *errorTerm;
uintptr_t rawErrorTerm, rawExtraErrorTerm;

View File

@ -65,6 +65,16 @@ typedef struct yap_io_position {
intptr_t reserved[2]; /* future extensions */
} yapIOPOS;
typedef struct yapchlookahead {
intptr_t charcount; /* character position in file */
intptr_t linecount; /* lineno in file */
intptr_t linepos; /* position in line */
intptr_t ch; /* future extensions */
struct yapchlookahead *next;
} yapStreamLookahead;
extern int PopCode(int sno);
#ifndef _PL_STREAM_H
typedef struct {
YAP_Atom file; /* current source file */
@ -244,6 +254,7 @@ typedef struct stream_desc {
int); /* function the stream uses for parser. It may be different
from above if the ISO character conversion is on */
encoding_t encoding; /** current encoding for stream */
struct yapchlookahead *recbs; /// support arbitrary depth peek
} StreamDesc;
#endif

View File

@ -222,6 +222,8 @@ X_API int PL_get_nchars(term_t l, size_t *lengthp, char **s, unsigned flags) {
if (s) {
size_t len = strlen(out.val.c);
if (flags & (BUF_DISCARDABLE | BUF_RING)) {
strncpy(LOCAL_FileNameBuf, out.val.c, YAP_FILENAME_MAX);
*s = LOCAL_FileNameBuf;
pop_text_stack(lvl);
return true;
}
@ -707,10 +709,12 @@ X_API int PL_get_functor(term_t ts, functor_t *f) {
*f = t;
} else if (IsPairTerm(t)) {
*f = FunctorToSWIFunctor(FunctorDot);
} else {
} else if (IsApplTerm(t) && !IsExtensionFunctor(FunctorOfTerm(t))) {
*f = FunctorToSWIFunctor(FunctorOfTerm(t));
} else {
return false;
}
return 1;
return true;
}
/** @brief *f is assigned the floating point number of term ts, or the

View File

@ -3,9 +3,9 @@
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
mod(require("codemirror/lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define([ "../../lib/codemirror" ], mod);
define([ "codemirror/lib/codemirror" ], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
@ -205,7 +205,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
});
delete state.tagName;
delete state.tagColumn;
return ret("dict_open", null);
return ret("dict_open", "bracket");
}
if (ch == "/" && stream.eat("*"))
@ -225,7 +225,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
break;
case "]":
state.nesting.pop();
return ret("list_close", null);
return ret("list_close", "bracket");
case "}": {
var nest = nesting(state);
var type = (nest && nest.tag) ? "dict_close" : "brace_term_close";
@ -251,30 +251,30 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
closeColumn : stream.column(),
alignment : stream.column() + 2
});
return ret("list_open", null);
return ret("list_open", "bracket");
break;
case "{":
if (config.quasiQuotations && stream.eat("|")) {
state.nesting.push(
{type : "quasi-quotation", alignment : stream.column() + 1});
return ret("qq_open", "qq_open");
return ret("qq_open", "bracket");
} else {
state.nesting.push({
type : "curly",
closeColumn : stream.column(),
alignment : stream.column() + 2
});
return ret("brace_term_open", null);
return ret("brace_term_open", "bracket");
}
break;
case "|":
if (config.quasiQuotations) {
if (stream.eat("|")) {
state.tokenize = plTokenQuasiQuotation;
return ret("qq_sep", "qq_sep");
return ret("qq_sep", "bracket");
} else if (stream.eat("}")) {
state.nesting.pop();
return ret("qq_close", "qq_close");
return ret("qq_close", "bracket");
}
}
if (isControl(state))
@ -304,7 +304,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
if (!readEsc(stream))
return ret("error", "error");
}
return ret("code", "code");
return ret("code", "number");
}
}
@ -325,37 +325,41 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
return ret("fullstop", "error", atom);
} else {
}
return ret("fullstop", "fullstop", atom);
return ret("fullstop", null, atom);
} else if (isNeck.test(atom)) {
return ret("neck", "neck", atom);
return ret("neck", "property", atom);
} else if (isControl(state) && isControlOp.test(atom)) {
state.goalStart = true;
return ret("symbol", "operator", atom);
return ret("symbol", "meta", atom);
} else
return ret("symbol", "operator", atom);
return ret("symbol", "meta", atom);
}
stream.eatWhile(/[\w_]/);
var word = stream.current();
var word = stream.current(), extra = "";
if (stream.peek() == "{" && config.dicts) {
state.tagName = word; /* tmp state extension */
state.tagColumn = stream.column();
return ret("tag", "tag", word);
} else if (ch == "_") {
if (word.length == 1) {
return ret("var", "anon", word);
return ret("var", "variable-3", word);
} else {
var sec = word.charAt(1);
if (sec == sec.toUpperCase())
return ret("var", "var-2", word);
return ret("var", "variable-3", word);
}
return ret("var", "var", word);
return ret("var", "variable-3", word);
} else if (ch == ch.toUpperCase()) {
return ret("var", "var", word);
return ret("var", "Variable-2", word);
} else if (stream.peek() == "(") {
state.functorName = word; /* tmp state extension */
state.functorColumn = stream.column();
return ret("functor", "functor", word);
return ret("functor", "atom", word);
} else if ((extra = stream.eat(/\/\/?\d+/))) {
state.functorName = word; /* tmp state extension */
state.functorColumn = stream.column();
return ret("functor", "atom", word);
} else
return ret("atom", "atom", word);
}
@ -367,7 +371,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
if (stream.peek() == "(") { /* 'quoted functor'() */
var word = stream.current();
state.functorName = word; /* tmp state extension */
return ret("functor", "functor", word);
return ret("functor", "atom", word);
}
if (stream.peek() == "{" && config.dicts) { /* 'quoted tag'{} */
var word = stream.current();
@ -389,7 +393,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
}
maybeEnd = (ch == "|");
}
return ret("qq_content", "qq_content");
return ret("qq_content", "string");
}
function plTokenComment(stream, state) {
@ -574,7 +578,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
"atom" : "atom",
"qatom" : "atom",
"bqstring" : "string",
"symbol" : "atom",
"symbol" : "keyword",
"functor" : "keyword",
"tag" : "tag",
"number" : "number",
@ -1192,15 +1196,14 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
if (builtins[state.curToken] == "prolog")
return "builtin";
if (ops[state.curToken])
return "operator";
//if (ops[state.curToken])
// return "operator";
if (typeof(parserConfig.enrich) == "function")
style = parserConfig.enrich(stream, state, type, content, style);
//if (typeof(parserConfig.enrich) == "function")
// style = parserConfig.enrich(stream, state, type, content, style);
return style;
return translType[type];
},
indent : function(state, textAfter) {
@ -1211,7 +1214,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) {
if ((nest = nesting(state))) {
if (nest.closeColumn && !state.commaAtEOL)
return nest.closeColumn;
return nest.alignment;
y return nest.alignment;
}
if (!state.inBody)
return 0;

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,7 @@
#cmakedefine HAVE_READLINE_READLINE_H ${HAVE_READLINE_READLINE_H}
#endif
#if defined(FOUND_READLINE)
#if defined(HAVE_READLINE_READLINE_H) && defined(HAVE_LIBREADLINE)
#define USE_READLINE 1
#endif

View File

@ -93,126 +93,68 @@ INLINE_ONLY inline EXTERN Int CharOfAtom(Atom at) {
return val;
}
Int Yap_peek(int sno) {
CACHE_REGS
Int ocharcount, olinecount, olinepos;
int PopCode(int sno) {
StreamDesc *s;
int32_t ch;
Int ch;
struct yapchlookahead *p;
s = GLOBAL_Stream + sno;
#if USE_READLINE
if (s->status & Readline_Stream_f && trueGlobalPrologFlag(READLINE_FLAG)) {
ch = Yap_ReadlinePeekChar(sno);
if (ch == EOFCHAR) {
s->stream_getc = EOFPeek;
s->stream_wgetc = EOFWPeek;
s->status |= Push_Eof_Stream_f;
}
return ch;
if (!s->recbs) {
return EOF;
}
#endif
#if !HAVE_FMEMOPEN
if (s->status & InMemory_Stream_f) {
return Yap_MemPeekc(sno);
}
#endif
/* buffer the character */
if (s->encoding == Yap_SystemEncoding() && 0) {
ch = fgetwc(s->file);
ungetwc(ch, s->file);
return ch;
} else {
ocharcount = s->charcount;
olinecount = s->linecount;
olinepos = s->linepos;
ch = s->stream_wgetc(sno);
if (ch == EOFCHAR) {
s->stream_getc = EOFPeek;
s->stream_wgetc = EOFWPeek;
s->status |= Push_Eof_Stream_f;
return ch;
}
}
if (s->encoding == ENC_OCTET || s->encoding == ENC_ISO_LATIN1 ||
s->encoding == ENC_ISO_ASCII) {
ungetc(ch, s->file);
} else if (s->encoding == ENC_ISO_UTF8) {
unsigned char cs[8];
size_t n = put_utf8(cs, ch);
while (n--) {
ungetc(cs[n], s->file);
}
} else if (s->encoding == ENC_UTF16_BE) {
/* do the ungetc as if a write .. */
// computations
if (ch < 0x10000) {
ungetc(ch % 256, s->file);
ungetc(ch / 256, s->file);
} else {
uint16_t lead = LEAD_OFFSET + (ch >> 10);
uint16_t trail = 0xDC00 + (ch & 0x3FF);
ungetc(lead % 256, s->file);
ungetc(lead / 256, s->file);
ungetc(trail % 256, s->file);
ungetc(trail / 256, s->file);
}
} else if (s->encoding == ENC_UTF16_LE) {
if (ch < 0x10000) {
ungetc(ch / 256, s->file);
ungetc(ch % 256, s->file);
} else {
uint16_t lead = LEAD_OFFSET + (ch >> 10);
uint16_t trail = 0xDC00 + (ch & 0x3FF);
ungetc(trail / 256, s->file);
ungetc(trail % 256, s->file);
ungetc(lead / 256, s->file);
ungetc(lead % 256, s->file);
}
} else if (s->encoding == ENC_ISO_UTF32_LE) {
ungetc((ch >> 24) & 0xff, s->file);
ungetc((ch >> 16) & 0xff, s->file);
ungetc((ch >> 8) & 0xff, s->file);
ungetc(ch & 0xff, s->file);
} else if (s->encoding == ENC_ISO_UTF32_BE) {
ungetc(ch & 0xff, s->file);
ungetc((ch >> 8) & 0xff, s->file);
ungetc((ch >> 16) & 0xff, s->file);
ungetc((ch >> 24) & 0xff, s->file);
} else if (s->encoding == ENC_UCS2_BE) {
/* do the ungetc as if a write .. */
// computations
ungetc(ch % 256, s->file);
ungetc(ch / 256, s->file);
} else if (s->encoding == ENC_UCS2_LE) {
ungetc(ch / 256, s->file);
ungetc(ch % 256, s->file);
}
s->charcount = ocharcount;
s->linecount = olinecount;
s->linepos = olinepos;
p = s->recbs;
ch = p->ch;
s->recbs = s->recbs->next;
free(p);
if (!s->recbs)
Yap_DefaultStreamOps(s);
return ch;
}
static Int dopeek_byte(int sno) {
static int peekCode(int sno, bool wide) {
Int ocharcount, olinecount, olinepos;
StreamDesc *s;
Int ch;
struct yapchlookahead *recb = malloc(sizeof(struct yapchlookahead)), *r;
recb->next = NULL;
s = GLOBAL_Stream + sno;
ocharcount = s->charcount;
olinecount = s->linecount;
olinepos = s->linepos;
ch = GLOBAL_Stream[sno].stream_getc(sno);
if (wide)
recb->ch = ch = GLOBAL_Stream[sno].stream_wgetc(sno);
else
recb->ch = ch = GLOBAL_Stream[sno].stream_getc(sno);
if (ch == EOFCHAR) {
s->stream_getc = EOFPeek;
s->stream_wgetc = EOFWPeek;
s->status |= Push_Eof_Stream_f;
return ch;
}
recb->charcount = s->charcount;
recb->linecount = s->linecount;
recb->linepos = s->linepos;
s->charcount = ocharcount;
s->linecount = olinecount;
s->linepos = olinepos;
if (s->recbs) {
r = s->recbs;
while (r->next) {
r = r->next;
}
r->next = recb;
} else {
s->recbs = recb;
}
/* buffer the character */
ungetc(ch, s->file);
GLOBAL_Stream[sno].stream_getc = PopCode;
GLOBAL_Stream[sno].stream_wgetc = PopCode;
return ch;
}
Int Yap_peek(int sno) { return peekCode(sno, true); }
static Int dopeek_byte(int sno) { return peekCode(sno, false); }
bool store_code(int ch, Term t USES_REGS) {
Term t2 = Deref(t);
bool rc = Yap_unify_constant(t2, MkIntegerTerm(ch));

View File

@ -108,6 +108,7 @@ bool fill_pads(int sno, int sno0, int total, format_info *fg USES_REGS)
GLOBAL_Stream[sno].linecount = 1;
GLOBAL_Stream[sno].linepos += nchars;
GLOBAL_Stream[sno].charcount = 0;
GLOBAL_Stream[sno].recbs = NULL;
fg->phys_start = 0;
fg->lstart = GLOBAL_Stream[sno].linepos;
fg->gapi = 0;
@ -122,6 +123,7 @@ bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf,
st->file = f = fmemopen((char *)buf, nchars, "r");
st->status = Input_Stream_f | Seekable_Stream_f | InMemory_Stream_f;
st->vfs = NULL;
st->recbs = NULL;
st->encoding = LOCAL_encoding;
Yap_DefaultStreamOps(st);
st->linecount = 0;
@ -194,7 +196,7 @@ int Yap_open_buf_write_stream(encoding_t enc, memBufSource src) {
st->linecount = 1;
st->encoding = enc;
st->vfs = NULL;
Yap_DefaultStreamOps(st);
st->recbs = NULL;
#if HAVE_OPEN_MEMSTREAM
st->file = open_memstream(&st->nbuf, &st->nsize);
// setbuf(st->file, NULL);
@ -205,6 +207,7 @@ int Yap_open_buf_write_stream(encoding_t enc, memBufSource src) {
return -1;
}
#endif
Yap_DefaultStreamOps(st);
UNLOCK(st->streamlock);
return sno;
}

View File

@ -189,7 +189,7 @@ fmemseek(void *cookie, fpos_t pos, int whence)
return (fpos_t) offset;
}
/* Reclaim resources used by stream described by COOKIE. */
/* Reclaim resources used by stream described by COOKIE. */
static int
fmemclose(void *cookie)
{

File diff suppressed because it is too large Load Diff

View File

@ -91,10 +91,15 @@ extern void Yap_InitStdStreams(void);
extern Term Yap_StreamPosition(int);
extern void Yap_CloseStream(int sno);
static inline Int GetCurInpPos(StreamDesc *inp_stream) {
return (inp_stream->linecount);
static inline Int GetCurInpLine(StreamDesc *inp_stream) {
return (inp_stream->linecount);
}
static inline Int GetCurInpPos(StreamDesc *inp_stream) {
return (inp_stream->charcount);
}
#define PlIOError(type, culprit, ...) \
PlIOError__(__FILE__, __FUNCTION__, __LINE__, type, culprit, __VA_ARGS__)

View File

@ -1,19 +1,19 @@
/*************************************************************************
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: mem.c *
* Last rev: 5/2/88 *
* mods: *
* comments: Input/Output C implemented predicates *
* *
*************************************************************************/
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: mem.c *
* Last rev: 5/2/88 *
* mods: *
* comments: Input/Output C implemented predicates *
* *
*************************************************************************/
#ifdef SCCS
static char SccsId[] = "%W% %G%";
#endif
@ -24,10 +24,10 @@ static char SccsId[] = "%W% %G%";
*/
#include "sysbits.h"
#include "YapStreams.h"
#if !HAVE_FMEMOPEN || !defined(HAVE_FMEMOPEN)
#include "YapStreams.h"
#include "format.h"
@ -48,6 +48,8 @@ int format_synch(int sno, int sno0, format_info *fg) {
GLOBAL_Stream[sno].linecount = 1;
GLOBAL_Stream[sno].linepos = 0;
GLOBAL_Stream[sno].charcount = 0;
GLOBAL_Stream[sno].recbs = NULL;
GLOBAL_Stream[sno].vfs = NULL;
fg->lstart = 0;
fg->phys_start = 0;
fg->gapi = 0;
@ -55,7 +57,7 @@ int format_synch(int sno, int sno0, format_info *fg) {
}
// uses directly the buffer in the memory stream.
bool fill_pads(int sno, int sno0, int total, format_info *fg USES_REGS) {
bool fill_pads(int sno, int sno0, int total, format_info *fg USES_REGS) {
int nfillers, fill_space, lfill_space, nchars;
int (*f_putc)(int, int);
const char *buf;
@ -103,6 +105,9 @@ int format_synch(int sno, int sno0, format_info *fg) {
GLOBAL_Stream[sno].linecount = 1;
GLOBAL_Stream[sno].linepos += nchars;
GLOBAL_Stream[sno].charcount = 0;
GLOBAL_Stream[sno].recbs = NULL;
GLOBAL_Stream[sno].vfs = NULL;
GLOBAL_Stream[sno].file = NULL;
fg->phys_start = 0;
fg->lstart = GLOBAL_Stream[sno].linepos;
fg->gapi = 0;
@ -180,7 +185,8 @@ static int MemPutc(int sno, int ch) {
return ((int)ch);
}
bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf, size_t nchars USES_REGS) {
bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf,
size_t nchars USES_REGS) {
FILE *f;
stream_flags_t flags;
@ -189,14 +195,14 @@ bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf, size_t nchars USES_R
st->vfs = NULL;
Yap_initStream(st - GLOBAL_Stream, f, NULL, TermNil, LOCAL_encoding, flags,
AtomRead, NULL);
// like any file stream.
// like any file stream.
/* currently these streams are not seekable */
st->status = Input_Stream_f | InMemory_Stream_f;
st->u.mem_string.pos = 0;
st->u.mem_string.buf = (char *)buf;
st->u.mem_string.max_size = nchars;
st->u.mem_string.error_handler = NULL;
// st->u.mem_string.src = src; check new assets coode
// st->u.mem_string.src = src; check new assets coode
Yap_DefaultStreamOps(st);
return true;
}
@ -223,7 +229,7 @@ int Yap_open_buf_read_stream(const char *buf, size_t nchars, encoding_t *encp,
flags = Input_Stream_f | InMemory_Stream_f;
st->vfs = NULL;
Yap_initStream(sno, f, NULL, TermNil, encoding, flags, AtomRead, NULL);
// like any file stream.
// like any file stream.
/* currently these streams are not seekable */
st->status = Input_Stream_f | InMemory_Stream_f;
st->u.mem_string.pos = 0;
@ -272,7 +278,9 @@ int Yap_open_buf_write_stream(encoding_t enc, memBufSource src) {
st->charcount = 0;
st->linecount = 1;
st->encoding = enc;
st->recbs = NULL;
st->vfs = NULL;
st->file = NULL;
Yap_DefaultStreamOps(st);
st->nbuf = st->u.mem_string.buf = malloc(PLGETC_BUF_SIZE);
st->u.mem_string.src = MEM_BUF_MALLOC;
@ -357,7 +365,8 @@ restart:
void Yap_MemOps(StreamDesc *st) {
st->stream_putc = MemPutc;
st->stream_getc = MemGetc;}
st->stream_getc = MemGetc;
}
bool Yap_CloseMemoryStream(int sno) {
if ((GLOBAL_Stream[sno].status & Output_Stream_f)) {

View File

@ -95,7 +95,7 @@ static char SccsId[] = "%W% %G%";
#define SYSTEM_STAT stat
#endif
static Term syntax_error(TokEntry *errtok, int sno, Term cmod);
static Term syntax_error(TokEntry *errtok, int sno, Term cmod, Int start);
static void clean_vars(VarEntry *p) {
if (p == NULL)
@ -315,55 +315,80 @@ static Int scan_to_list(USES_REGS1) {
* Implicit arguments:
* +
*/
static Term syntax_error(TokEntry *errtok, int sno, Term cmod) {
static Term syntax_error(TokEntry *errtok, int sno, Term cmod, Int newpos) {
CACHE_REGS
Term startline, errline, endline;
Term tf[3];
Term tf[4];
Term tm;
Term *tailp = tf + 2;
Term *tailp = tf + 3;
CELL *Hi = HR;
TokEntry *tok = LOCAL_tokptr;
Int cline = tok->TokPos;
Int cline = tok->TokLine;
Int startpos = tok->TokPos;
errtok = LOCAL_toktide;
Int errpos = errtok->TokPos;
UInt diff = 0;
startline = MkIntegerTerm(cline);
endline = MkIntegerTerm(cline);
if (errtok != LOCAL_toktide) {
errtok = LOCAL_toktide;
}
LOCAL_Error_TYPE = YAP_NO_ERROR;
errline = MkIntegerTerm(errtok->TokPos);
errline = MkIntegerTerm(errtok->TokLine);
if (LOCAL_ErrorMessage)
tm = MkStringTerm(LOCAL_ErrorMessage);
else
tm = MkStringTerm("syntax error");
while (tok) {
else {
tm = MkStringTerm("syntax error");
}
if (errpos && newpos >= 0) {
char o[128 + 1];
diff = errpos - startpos;
if (diff > 128) {
diff = 128;
startpos = errpos - diff;
}
#if HAVE_FTELLO
Int curpos = ftello(GLOBAL_Stream[sno].file);
fseeko(GLOBAL_Stream[sno].file, startpos, SEEK_SET);
#else
Int curpos = ftell(GLOBAL_Stream[sno].file);
fseek(GLOBAL_Stream[sno].file, startpos, SEEK_SET);
#endif
fread(o, diff, 1, GLOBAL_Stream[sno].file);
#if HAVE_FTELLO
fseeko(GLOBAL_Stream[sno].file, curpos, SEEK_SET);
#else
fseek(GLOBAL_Stream[sno].file, curpos, SEEK_SET);
#endif
o[diff] = '\0';
tf[3] = MkStringTerm(o);
} else {
while (tok) {
if (HR > ASP - 1024) {
errline = MkIntegerTerm(0);
endline = MkIntegerTerm(0);
/* for some reason moving this earlier confuses gcc on solaris */
HR = Hi;
break;
}
if (tok->TokPos != cline) {
*tailp = MkPairTerm(TermNewLine, TermNil);
tailp = RepPair(*tailp) + 1;
cline = tok->TokPos;
}
if (tok == errtok && tok->Tok != Error_tok) {
*tailp = MkPairTerm(MkAtomTerm(AtomError), TermNil);
if (HR > ASP - 1024) {
errline = MkIntegerTerm(0);
endline = MkIntegerTerm(0);
/* for some reason moving this earlier confuses gcc on solaris */
HR = Hi;
break;
}
if (tok->TokLine != cline) {
*tailp = MkPairTerm(TermNewLine, TermNil);
tailp = RepPair(*tailp) + 1;
cline = tok->TokLine;
}
if (tok == errtok && tok->Tok != Error_tok) {
*tailp = MkPairTerm(MkAtomTerm(AtomError), TermNil);
tailp = RepPair(*tailp) + 1;
}
Term rep = Yap_tokRep(tok);
if (tok->TokNext) {
tok = tok->TokNext;
} else {
endline = MkIntegerTerm(tok->TokLine);
tok = NULL;
break;
}
*tailp = MkPairTerm(rep, TermNil);
tailp = RepPair(*tailp) + 1;
}
Term rep = Yap_tokRep(tok);
if (tok->TokNext) {
tok = tok->TokNext;
} else {
endline = MkIntegerTerm(tok->TokPos);
tok = NULL;
break;
}
*tailp = MkPairTerm(rep, TermNil);
tailp = RepPair(*tailp) + 1;
}
{
Term t[3];
@ -376,9 +401,10 @@ static Term syntax_error(TokEntry *errtok, int sno, Term cmod) {
/*2 msg */
/* 1: file */
tf[1] = Yap_StreamUserName(sno);
tf[2] = MkIntegerTerm(LOCAL_ActiveError->prologParserPos);
clean_vars(LOCAL_VarTable);
clean_vars(LOCAL_AnonVarTable);
Term terr = Yap_MkApplTerm(FunctorInfo3, 3, tf);
Term terr = Yap_MkApplTerm(FunctorInfo4, 4, tf);
Term tn[2];
tn[0] = Yap_MkApplTerm(FunctorShortSyntaxError, 1, &tm);
tn[1] = terr;
@ -386,14 +412,14 @@ static Term syntax_error(TokEntry *errtok, int sno, Term cmod) {
#if DEBUG
if (Yap_ExecutionMode == YAP_BOOT_MODE) {
fprintf(stderr, "SYNTAX ERROR while booting: ");
Yap_DebugPlWriteln(terr);
fe
}
#endif
return terr;
}
Term Yap_syntax_error(TokEntry *errtok, int sno) {
return syntax_error(errtok, sno, CurrentModule);
return syntax_error(errtok, sno, CurrentModule, -1);
}
typedef struct FEnv {
@ -417,9 +443,6 @@ typedef struct renv {
bool ce, sw;
Term sy;
UInt cpos;
#if HAVE_FGETPOS
fpos_t rpos;
#endif
int prio;
int ungetc_oldc;
int had_ungetc;
@ -500,11 +523,7 @@ static xarg *setReadEnv(Term opts, FEnv *fe, struct renv *re, int inp_stream) {
}
re->seekable = (GLOBAL_Stream[inp_stream].status & Seekable_Stream_f) != 0;
if (re->seekable) {
#if HAVE_FGETPOS
fgetpos(GLOBAL_Stream[inp_stream].file, &re->rpos);
#else
re->cpos = GLOBAL_Stream[inp_stream].charcount;
#endif
}
if (args[READ_PRIORITY].used) {
re->prio = IntegerOfTerm(args[READ_PRIORITY].tvalue);
@ -890,8 +909,8 @@ static parser_state_t scanError(REnv *re, FEnv *fe, int inp_stream) {
if (GLOBAL_Stream[inp_stream].status & InMemory_Stream_f) {
GLOBAL_Stream[inp_stream].u.mem_string.pos = re->cpos;
} else if (GLOBAL_Stream[inp_stream].status) {
#if HAVE_FGETPOS
fsetpos(GLOBAL_Stream[inp_stream].file, &re->rpos);
#if HAVE_FTELLO
fseeko(GLOBAL_Stream[inp_stream].file, re->cpos, 0L);
#else
fseek(GLOBAL_Stream[inp_stream].file, re->cpos, 0L);
#endif
@ -912,7 +931,11 @@ static parser_state_t parseError(REnv *re, FEnv *fe, int inp_stream) {
LOCAL_Error_TYPE = YAP_NO_ERROR;
return YAP_PARSING_FINISHED;
} else {
Term t = syntax_error(fe->toklast, inp_stream, fe->cmod);
if (re->seekable) {
re->cpos = GLOBAL_Stream[inp_stream].charcount;
}
Term t = syntax_error(fe->toklast, inp_stream, fe->cmod, re->cpos);
if (ParserErrorStyle == TermError) {
LOCAL_ActiveError->errorTerm = Yap_StoreTermInDB(t, 4);
LOCAL_Error_TYPE = SYNTAX_ERROR;
@ -1059,8 +1082,7 @@ typedef enum read_clause_enum_choices {
static const param_t read_clause_defs[] = {READ_CLAUSE_DEFS()};
#undef PAR
static xarg *setClauseReadEnv(Term opts, FEnv *fe, struct renv *re,
int sno) {
static xarg *setClauseReadEnv(Term opts, FEnv *fe, struct renv *re, int sno) {
CACHE_REGS
xarg *args = Yap_ArgListToVector(opts, read_clause_defs, READ_CLAUSE_END);
@ -1120,11 +1142,7 @@ static xarg *setClauseReadEnv(Term opts, FEnv *fe, struct renv *re,
fe->ce = Yap_CharacterEscapes(fe->cmod);
re->seekable = (GLOBAL_Stream[sno].status & Seekable_Stream_f) != 0;
if (re->seekable) {
#if HAVE_FGETPOS
fgetpos(GLOBAL_Stream[sno].file, &re->rpos);
#else
re->cpos = GLOBAL_Stream[sno].charcount;
#endif
}
re->prio = LOCAL_default_priority;
return args;
@ -1354,202 +1372,200 @@ Term Yap_BufferToTerm(const unsigned char *s, Term opts) {
Term rval;
int sno;
encoding_t l = ENC_ISO_UTF8;
sno = Yap_open_buf_read_stream((char *)s, strlen((const char*)s), &l, MEM_BUF_USER);
sno = Yap_open_buf_read_stream((char *)s, strlen((const char *)s), &l,
MEM_BUF_USER);
rval = Yap_read_term(sno, opts, false);
Yap_CloseStream(sno);
return rval;
}
X_API Term Yap_BufferToTermWithPrioBindings(const unsigned char *s, size_t len,
Term opts, int prio,
Term bindings) {
CACHE_REGS
Term ctl;
X_API Term Yap_BufferToTermWithPrioBindings(const unsigned char *s, size_t len,
Term opts, int prio,
Term bindings) {
CACHE_REGS
Term ctl;
ctl = opts;
if (bindings) {
ctl = add_names(bindings, TermNil);
}
if (prio != 1200) {
ctl = add_priority(bindings, ctl);
}
return Yap_BufferToTerm(s, ctl);
}
ctl = opts;
if (bindings) {
ctl = add_names(bindings, TermNil);
}
if (prio != 1200) {
ctl = add_priority(bindings, ctl);
}
return Yap_BufferToTerm(s, ctl);
}
/**
* @pred read_term_from_atom( +Atom , -T , +Options )
*
* read a term _T_ stored in constant _Atom_ according to _Options_
*
* @param _Atom_ the source _Atom_
* @param _T_ the output term _T_, may be any term
* @param _Options_ read_term/3 options.
*
* @notes Originally from SWI-Prolog, in YAP only works with internalised
*atoms
* Check read_term_from_atomic/3 for the general version. Also, the built-in
*is
*supposed to
* use YAP's internal encoding, so please avoid the encoding/1 option.
*/
static Int read_term_from_atom(USES_REGS1) {
Term t1 = Deref(ARG1);
Atom at;
const unsigned char *s;
/**
* @pred read_term_from_atom( +Atom , -T , +Options )
*
* read a term _T_ stored in constant _Atom_ according to _Options_
*
* @param _Atom_ the source _Atom_
* @param _T_ the output term _T_, may be any term
* @param _Options_ read_term/3 options.
*
* @notes Originally from SWI-Prolog, in YAP only works with internalised
*atoms
* Check read_term_from_atomic/3 for the general version. Also, the built-in
*is
*supposed to
* use YAP's internal encoding, so please avoid the encoding/1 option.
*/
static Int read_term_from_atom(USES_REGS1) {
Term t1 = Deref(ARG1);
Atom at;
const unsigned char *s;
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "style_check/1");
return false;
} else if (!IsAtomTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOM, t1, "style_check/1");
return false;
} else {
at = AtomOfTerm(t1);
s = at->UStrOfAE;
}
Term ctl = add_output(ARG2, ARG3);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "style_check/1");
return false;
} else if (!IsAtomTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOM, t1, "style_check/1");
return false;
} else {
at = AtomOfTerm(t1);
s = at->UStrOfAE;
}
Term ctl = add_output(ARG2, ARG3);
return Yap_BufferToTerm(s, ctl);
}
return Yap_BufferToTerm(s, ctl);
}
/**
* @pred read_term_from_atomic( +Atomic , - T , +Options )
*
* read a term _T_ stored in text _Atomic_ according to _Options_
*
* @param _Atomic_ the source may be an atom, string, list of codes, or list
*of
*chars.
* @param _T_ the output term _T_, may be any term
* @param _Options_ read_term/3 options.
*
* @notes Idea originally from SWI-Prolog, but in YAP we separate atomic and
*atom.
* Encoding is fixed in atoms and strings.
*/
static Int read_term_from_atomic(USES_REGS1) {
Term t1 = Deref(ARG1);
const unsigned char *s;
/**
* @pred read_term_from_atomic( +Atomic , - T , +Options )
*
* read a term _T_ stored in text _Atomic_ according to _Options_
*
* @param _Atomic_ the source may be an atom, string, list of codes, or list
*of
*chars.
* @param _T_ the output term _T_, may be any term
* @param _Options_ read_term/3 options.
*
* @notes Idea originally from SWI-Prolog, but in YAP we separate atomic and
*atom.
* Encoding is fixed in atoms and strings.
*/
static Int read_term_from_atomic(USES_REGS1) {
Term t1 = Deref(ARG1);
const unsigned char *s;
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_atomic/3");
return (FALSE);
} else if (!IsAtomicTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3");
return (FALSE);
} else {
Term t = Yap_AtomicToString(t1 PASS_REGS);
s = UStringOfTerm(t);
}
Term ctl = add_output(ARG2, ARG3);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_atomic/3");
return (FALSE);
} else if (!IsAtomicTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3");
return (FALSE);
} else {
Term t = Yap_AtomicToString(t1 PASS_REGS);
s = UStringOfTerm(t);
}
Term ctl = add_output(ARG2, ARG3);
return Yap_BufferToTerm(s, ctl);
}
return Yap_BufferToTerm(s, ctl);
}
/**
* @pred read_term_from_string( +String , - T , + Options )
*
* read a term _T_ stored in constant _String_ according to _Options_
*
* @param _String_ the source _String_
* @param _T_ the output term _T_, may be any term
* @param _Options_ read_term/3 options.
*
* @notes Idea from SWI-Prolog, in YAP only works with strings
* Check read_term_from_atomic/3 for the general version.
*/
static Int read_term_from_string(USES_REGS1) {
Term t1 = Deref(ARG1), rc;
const unsigned char *s;
size_t len;
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsStringTerm(t1)) {
Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3");
return (FALSE);
} else {
s = UStringOfTerm(t1);
len = strlen_utf8(s);
}
char *ss = (char *)s;
encoding_t enc = ENC_ISO_UTF8;
int sno = Yap_open_buf_read_stream(ss, len, &enc, MEM_BUF_USER);
rc = Yap_read_term(sno, Deref(ARG3), 3);
Yap_CloseStream(sno);
if (!rc)
return false;
return Yap_unify(rc, ARG2);
}
/**
* @pred read_term_from_string( +String , - T , + Options )
*
* read a term _T_ stored in constant _String_ according to _Options_
*
* @param _String_ the source _String_
* @param _T_ the output term _T_, may be any term
* @param _Options_ read_term/3 options.
*
* @notes Idea from SWI-Prolog, in YAP only works with strings
* Check read_term_from_atomic/3 for the general version.
*/
static Int read_term_from_string(USES_REGS1) {
Term t1 = Deref(ARG1), rc;
const unsigned char *s;
size_t len;
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsStringTerm(t1)) {
Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3");
return (FALSE);
} else {
s = UStringOfTerm(t1);
len = strlen_utf8(s);
}
char *ss = (char *)s;
encoding_t enc = ENC_ISO_UTF8;
int sno = Yap_open_buf_read_stream(ss, len, &enc, MEM_BUF_USER);
rc = Yap_read_term(sno, Deref(ARG3), 3);
Yap_CloseStream(sno);
if (!rc)
return false;
return Yap_unify(rc, ARG2);
}
static Int atomic_to_term(USES_REGS1) {
Term t1 = Deref(ARG1);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsAtomicTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3");
return (FALSE);
} else {
Term t = Yap_AtomicToString(t1 PASS_REGS);
const unsigned char *us = UStringOfTerm(t);
return Yap_BufferToTerm(us,
add_output(ARG2, add_names(ARG3, TermNil)));
}
}
static Int atomic_to_term(USES_REGS1) {
Term t1 = Deref(ARG1);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsAtomicTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3");
return (FALSE);
} else {
Term t = Yap_AtomicToString(t1 PASS_REGS);
const unsigned char *us = UStringOfTerm(t);
return Yap_BufferToTerm(us, add_output(ARG2, add_names(ARG3, TermNil)));
}
}
static Int atom_to_term(USES_REGS1) {
Term t1 = Deref(ARG1);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsAtomTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOM, t1, "read_term_from_atomic/3");
return (FALSE);
} else {
Term t = Yap_AtomicToString(t1 PASS_REGS);
const unsigned char *us = UStringOfTerm(t);
return Yap_BufferToTerm(us,
add_output(ARG2, add_names(ARG3, TermNil)));
}
}
static Int atom_to_term(USES_REGS1) {
Term t1 = Deref(ARG1);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsAtomTerm(t1)) {
Yap_Error(TYPE_ERROR_ATOM, t1, "read_term_from_atomic/3");
return (FALSE);
} else {
Term t = Yap_AtomicToString(t1 PASS_REGS);
const unsigned char *us = UStringOfTerm(t);
return Yap_BufferToTerm(us, add_output(ARG2, add_names(ARG3, TermNil)));
}
}
static Int string_to_term(USES_REGS1) {
Term t1 = Deref(ARG1);
static Int string_to_term(USES_REGS1) {
Term t1 = Deref(ARG1);
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsStringTerm(t1)) {
Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3");
return (FALSE);
} else {
const unsigned char *us = UStringOfTerm(t1);
return Yap_BufferToTerm(us,
add_output(ARG2, add_names(ARG3, TermNil)));
}
}
if (IsVarTerm(t1)) {
Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3");
return (FALSE);
} else if (!IsStringTerm(t1)) {
Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3");
return (FALSE);
} else {
const unsigned char *us = UStringOfTerm(t1);
return Yap_BufferToTerm(us, add_output(ARG2, add_names(ARG3, TermNil)));
}
}
void Yap_InitReadTPreds(void) {
Yap_InitCPred("read_term", 2, read_term2, SyncPredFlag);
Yap_InitCPred("read_term", 3, read_term, SyncPredFlag);
void Yap_InitReadTPreds(void) {
Yap_InitCPred("read_term", 2, read_term2, SyncPredFlag);
Yap_InitCPred("read_term", 3, read_term, SyncPredFlag);
Yap_InitCPred("scan_to_list", 2, scan_to_list, SyncPredFlag);
Yap_InitCPred("read", 1, read1, SyncPredFlag);
Yap_InitCPred("read", 2, read2, SyncPredFlag);
Yap_InitCPred("read_clause", 2, read_clause2, SyncPredFlag);
Yap_InitCPred("read_clause", 3, read_clause, 0);
Yap_InitCPred("read_term_from_atom", 3, read_term_from_atom, 0);
Yap_InitCPred("read_term_from_atomic", 3, read_term_from_atomic, 0);
Yap_InitCPred("read_term_from_string", 3, read_term_from_string, 0);
Yap_InitCPred("atom_to_term", 3, atom_to_term, 0);
Yap_InitCPred("atomic_to_term", 3, atomic_to_term, 0);
Yap_InitCPred("string_to_term", 3, string_to_term, 0);
Yap_InitCPred("scan_to_list", 2, scan_to_list, SyncPredFlag);
Yap_InitCPred("read", 1, read1, SyncPredFlag);
Yap_InitCPred("read", 2, read2, SyncPredFlag);
Yap_InitCPred("read_clause", 2, read_clause2, SyncPredFlag);
Yap_InitCPred("read_clause", 3, read_clause, 0);
Yap_InitCPred("read_term_from_atom", 3, read_term_from_atom, 0);
Yap_InitCPred("read_term_from_atomic", 3, read_term_from_atomic, 0);
Yap_InitCPred("read_term_from_string", 3, read_term_from_string, 0);
Yap_InitCPred("atom_to_term", 3, atom_to_term, 0);
Yap_InitCPred("atomic_to_term", 3, atomic_to_term, 0);
Yap_InitCPred("string_to_term", 3, string_to_term, 0);
Yap_InitCPred("fileerrors", 0, fileerrors, SyncPredFlag);
Yap_InitCPred("nofileeleerrors", 0, nofileerrors, SyncPredFlag);
Yap_InitCPred("source_location", 2, source_location, SyncPredFlag);
Yap_InitCPred("$style_checker", 1, style_checker,
SyncPredFlag | HiddenPredFlag);
}
Yap_InitCPred("fileerrors", 0, fileerrors, SyncPredFlag);
Yap_InitCPred("nofileeleerrors", 0, nofileerrors, SyncPredFlag);
Yap_InitCPred("source_location", 2, source_location, SyncPredFlag);
Yap_InitCPred("$style_checker", 1, style_checker,
SyncPredFlag | HiddenPredFlag);
}

View File

@ -256,6 +256,8 @@ Yap_InitSocketStream(int fd, socket_info flags, socket_domain domain) {
st->charcount = 0;
st->linecount = 1;
st->linepos = 0;
st->vfs = NULL;
st->recbs = NULL;
st->stream_putc = SocketPutc;
st->stream_getc = SocketGetc;
Yap_DefaultStreamOps( st );

View File

@ -701,7 +701,29 @@ wint_t = c_uint
# } __value; /* Value so far. */
#} __mbstate_t;
class _mbstate_t_value(Union):
class lK_mbstate_t_value(Union):
_fields_ = [("__wch",wint_t),
("__wchb",c_char*4)]

View File

@ -698,68 +698,66 @@ static PyObject *structseq_repr(PyObject *iobj) {
}
#endif
static bool
legal_symbol( const char *s)
{
int ch;
while(((ch = *s++) != '\0')) {
if (isalnum(ch) || ch == '_')
continue;
return false;
}
return true;
static bool legal_symbol(const char *s) {
int ch;
while (((ch = *s++) != '\0')) {
if (isalnum(ch) || ch == '_')
continue;
return false;
}
return true;
}
PyObject *term_to_nametuple(const char *s, arity_t arity, PyObject *tuple) {
PyObject *o, *d = NULL;
if (legal_symbol(s)) {
PyTypeObject *typp;
PyObject *key = PyUnicode_FromString(s);
if (Py_f2p && (d = PyList_GetItem(Py_f2p, arity)) &&
PyDict_Contains(d, key)) {
typp = (PyTypeObject *) PyDict_GetItem(d, key);
Py_INCREF(typp);
} else {
typp = calloc(sizeof(PyTypeObject), 1);
PyStructSequence_Desc *desc = calloc(sizeof(PyStructSequence_Desc), 1);
desc->name = PyMem_Malloc(strlen(s) + 1);
strcpy(desc->name, s);
desc->doc = "YAPTerm";
desc->fields = pnull;
desc->n_in_sequence = arity;
if (PyStructSequence_InitType2(typp, desc) < 0)
return NULL;
// typp->tp_flags &= ~Py_TPFLAGS_HEAPTYPE;
// typp->tp_flags &= ~Py_TPFLAGS_HAVE_GC;
// typp->tp_str = structseq_str;
typp->tp_repr = structseq_repr;
// typp = PyStructSequence_NewType(desc);
// don't do this: we cannot add a type as an atribute.
// PyModule_AddGObject(py_Main, s, (PyObject *)typp);
if (d && !PyDict_Contains(d, key))
PyDict_SetItem(d, key, (PyObject *) typp);
Py_DECREF(key);
Py_INCREF(typp);
}
o = PyStructSequence_New(typp);
PyTypeObject *typp;
PyObject *key = PyUnicode_FromString(s);
if (Py_f2p && (d = PyList_GetItem(Py_f2p, arity)) &&
PyDict_Contains(d, key)) {
typp = (PyTypeObject *)PyDict_GetItem(d, key);
Py_INCREF(typp);
arity_t i;
for (i = 0; i < arity; i++) {
PyObject *pArg = PyTuple_GET_ITEM(tuple, i);
Py_INCREF(pArg);
if (pArg)
PyStructSequence_SET_ITEM(o, i, pArg);
// PyObject_Print(pArg,stderr,0);fputc('\n',stderr);
}
//((PyStructSequence *)o)->ob_base.ob_size = arity;
// PyObject_Print(o,stderr,0);fputc('\n',stderr);
return o;
} else {
typp = calloc(sizeof(PyTypeObject), 1);
PyStructSequence_Desc *desc = calloc(sizeof(PyStructSequence_Desc), 1);
desc->name = PyMem_Malloc(strlen(s) + 1);
strcpy(desc->name, s);
desc->doc = "YAPTerm";
desc->fields = pnull;
desc->n_in_sequence = arity;
if (PyStructSequence_InitType2(typp, desc) < 0)
return NULL;
// typp->tp_flags &= ~Py_TPFLAGS_HEAPTYPE;
// typp->tp_flags &= ~Py_TPFLAGS_HAVE_GC;
// typp->tp_str = structseq_str;
typp->tp_repr = structseq_repr;
// typp = PyStructSequence_NewType(desc);
// don't do this: we cannot add a type as an atribute.
// PyModule_AddGObject(py_Main, s, (PyObject *)typp);
if (d && !PyDict_Contains(d, key))
PyDict_SetItem(d, key, (PyObject *)typp);
Py_DECREF(key);
Py_INCREF(typp);
}
o = PyStructSequence_New(typp);
Py_INCREF(typp);
arity_t i;
for (i = 0; i < arity; i++) {
PyObject *pArg = PyTuple_GET_ITEM(tuple, i);
Py_INCREF(pArg);
if (pArg)
PyStructSequence_SET_ITEM(o, i, pArg);
// PyObject_Print(pArg,stderr,0);fputc('\n',stderr);
}
//((PyStructSequence *)o)->ob_base.ob_size = arity;
// PyObject_Print(o,stderr,0);fputc('\n',stderr);
return o;
} else {
PyObject *o1;
o1 = PyTuple_New(2);
PyTuple_SET_ITEM(o1, 0, PyUnicode_FromString(s));
PyTuple_SET_ITEM(o1, 1, tuple);
return o1;
PyObject *o1;
o1 = PyTuple_New(2);
PyTuple_SET_ITEM(o1, 0, PyUnicode_FromString(s));
PyTuple_SET_ITEM(o1, 1, tuple);
return o1;
}
}
@ -945,7 +943,7 @@ PyObject *compound_to_pyeval(term_t t, PyObject *context, bool cvt) {
AOK(PL_get_arg(1, t, targ), NULL);
ptr = term_to_python(targ, true, NULL, true);
return PyObject_Dir(ptr);
{}
}
else if (fun == FUNCTOR_plus2) {
@ -1004,9 +1002,10 @@ PyObject *compound_to_pyeval(term_t t, PyObject *context, bool cvt) {
if (!arity) {
char *s = NULL;
PyObject *pValue;
AOK(PL_get_atom_chars(t, &s), NULL);
PyObject_Print(o, stderr, 0);
pValue = PyObject_GetAttrString(o, s);
PyObject_Print(pValue, stderr, 0);
if (CHECKNULL(t, pValue) == NULL) {
PyErr_Print();
return NULL;
@ -1014,41 +1013,66 @@ PyObject *compound_to_pyeval(term_t t, PyObject *context, bool cvt) {
return pValue;
} else {
char *s = PL_atom_chars(name);
PyObject *ys = lookupPySymbol(s, o, NULL);
PyObject *pArgs = PyTuple_New(arity);
PyObject *ys = lookupPySymbol(s, o, NULL), *pArgs;
DebugPrintf("Tuple %p\n", pArgs);
int i;
term_t tleft = PL_new_term_ref();
for (i = 0; i < arity; i++) {
PyObject *pArg;
AOK(PL_get_arg(i + 1, t, tleft), NULL);
/* ignore (_) */
if (i == 0 && PL_is_variable(tleft)) {
pArg = Py_None;
} else {
pArg = term_to_python(tleft, true, NULL, true);
// PyObject_Print(pArg,fdopen(2,"w"),0);
if (pArg == NULL) {
pArg = Py_None;
}
/* pArg reference stolen here: */
Py_INCREF(pArg);
}
bool indict = true;
PyObject *pyDict = PyDict_New();
PyTuple_SetItem(pArgs, i, pArg);
for (i = arity; i > 0; i--) {
PyObject *pArg;
AOK(PL_get_arg(i, t, tleft), NULL);
/* ignore (_) */
if (indict) {
if (PL_get_functor(tleft, &fun) && fun == FUNCTOR_equal2) {
term_t tatt = PL_new_term_ref();
AOK(PL_get_arg(1, tleft, tatt), NULL);
PyObject *key = term_to_python(tatt, true, NULL, true);
AOK(PL_get_arg(2, tleft, tatt), NULL);
PyObject *val = term_to_python(tatt, true, NULL, true);
PyDict_SetItem(pyDict, key, val);
} else {
indict = false;
pArgs = PyTuple_New(i);
}
}
if (!indict) {
if (PL_is_variable(tleft)) {
pArg = Py_None;
} else {
pArg = term_to_python(tleft, true, NULL, true);
// PyObject_Print(pArg,fdopen(2,"w"),0);
if (pArg == NULL) {
pArg = Py_None;
}
/* pArg reference stolen here: */
Py_INCREF(pArg);
}
PyTuple_SetItem(pArgs, i - 1, pArg);
}
}
PyObject *rc;
if (ys && PyCallable_Check(ys)) {
if (indict) {
pArgs = PyTuple_New(0);
}
// PyObject_Print(pArgs, stderr, 0);
// PyObject_Print(o, stderr, 0);
CHECK_CALL(rc, t, PyObject_CallObject(ys, pArgs));
Py_DECREF(pArgs);
Py_DECREF(ys);
DebugPrintf("CallObject %p\n", rc);
PyObject *rc;
if (ys && PyCallable_Check(ys)) {
PyObject_Print(ys, stderr, 0);
PyObject_Print(pArgs, stderr, 0);
PyObject_Print(pyDict, stderr, 0);
// PyObject_Print(pArgs, stderr, 0);
// PyObject_Print(o, stderr, 0);
CHECK_CALL(rc, t, PyObject_Call(ys, pArgs, pyDict));
Py_DECREF(pArgs);
Py_DECREF(ys);
PyObject_Print(rc, stderr, 0);
DebugPrintf("CallObject %p\n", rc);
} else {
rc = term_to_nametuple(s, arity, pArgs);
rc = term_to_nametuple(s, arity, pArgs);
}
return rc;

View File

@ -77,14 +77,24 @@ static int py_put(int sno, int ch) {
return ch;
}
static int py_get(int sno) {
static int py_get(int sno)
{
StreamDesc *s = YAP_GetStreamFromId(sno);
PyObject *fget = PyObject_GetAttrString(s->u.private_data, "read");
PyObject *pyr = PyObject_CallFunctionObjArgs(fget, PyLong_FromLong(1), NULL);
return PyUnicode_READ_CHAR(pyr, 0);
}
static int64_t py_seek(int sno, int64_t where, int how) {
static int py_peek(int sno)
{
StreamDesc *s = YAP_GetStreamFromId(sno);
PyObject *fget = PyObject_GetAttrString(s->u.private_data, "peek");
PyObject *pyr = PyObject_CallFunctionObjArgs(fget, PyLong_FromLong(1), NULL);
return PyUnicode_READ_CHAR(pyr, 0);
}
static int64_t py_seek(int sno, int64_t where, int how)
{
StreamDesc *s = YAP_GetStreamFromId(sno);
PyObject *fseek = PyObject_GetAttrString(s->u.private_data, "seek");
PyObject *pyr = PyObject_CallFunctionObjArgs(fseek, PyLong_FromLong(where),
@ -124,6 +134,7 @@ static bool init_python_stream(void) {
pystream.open = py_open;
pystream.close = py_close;
pystream.get_char = py_get;
pystream.peek_char = py_peek;
pystream.put_char = py_put;
pystream.flush = py_flush;
pystream.seek = py_seek;

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) IPython Development Team.
@ -64,7 +64,7 @@ if platform.system() == 'Windows':
win_libs = ['wsock32','ws2_32']
my_extra_link_args = ['-Wl,-export-all-symbols']
elif platform.system() == 'Darwin':
my_extra_link_args = ['-L','..']
my_extra_link_args = ['-L','..','-Wl,-rpath','-Wl,${CMAKE_INSTALL_FULL_LIBDIR}']
win_libs = []
local_libs = ['Py4YAP']
elif platform.system() == 'Linux':

View File

@ -13,19 +13,19 @@ if platform.system() == 'Windows':
def load( dll ):
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
dll = os.path.abspath(dll)
ctypes.WinDLL(dll)
elif platform.system() == 'Darwin':
ctypes.WinDLL(dll)s
elif platform.system() == 'Darwin':
def load( dll ):
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
dll = os.path.abspath(dll)
ctypes.CDLL(dll)
load('libYap.dylib')
load('libPy4YAP.dylib')
else:
def load( dll ):
dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
dll = os.path.abspath(dll)
ctypes.CDLL(dll)
load('libYap.so')
load('libPy4YAP.so')
dll = glob.glob(os.path.join(os.path.realpath(__file__),dll))[0]
dll = os.path.abspath(dll)
ctypes.CDLL(dll)
# load('libYap.dylib')
# load('libPy4YAP.dylib')
load( '_yap.dylib' )
#else:
# def load( dll ):
# dll = glob.glob(os.path.join(yap_lib_path,dll))[0]
# dll = os.path.abspath(dll)
# ctypes.CDLL(dll)
# load('libYap.so')
# load('libPy4YAP.so')

View File

@ -1,7 +1,6 @@
"""The main routine of the yap python project."""
import sys
import yap4py.yapi
def main(**args):
@ -10,5 +9,6 @@ def main(**args):
args = sys.argv[1:]
if __name__ == "__main__":
import yap4py.yapi
main()
yap4py.yapi.main()

View File

@ -1,4 +1,3 @@
set (EXTRAS
MANIFEST.in
YAP_KERNEL.md
@ -60,21 +59,14 @@
yap_kernel/pylab/config.py
)
configure_file(setup.py.in ${CMAKE_CURRENT_BINARY_DIR}/setup.py)
configure_file(setup.py.in ${CMAKE_CURRENT_BINARY_DIR}/setup.py)
configure_file(${CMAKE_SOURCE_DIR}/misc/editors/prolog.js.in ${CMAKE_CURRENT_BINARY_DIR}/prolog.js )
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources )
file( COPY ${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/ )
file( RENAME ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/yap_32x32x32.png ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/logo-32x32.png )
file( COPY ${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources )
file( RENAME ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/yap_64x64x32.png ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/logo-64x64.png )
file( COPY ${CMAKE_CURRENT_SOURCE_DIR}/kernel.js DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/ )
file( COPY ${CMAKE_SOURCE_DIR}/misc/editors/prolog.js DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/)
set(SETUP_PY ${CMAKE_CURRENT_BINARY_DIR}/setup.py)
add_custom_target( YAPKernel ALL
COMMAND ${PYTHON_EXECUTABLE} ${SETUP_PY} build sdist bdist
COMMAND ${PYTHON_EXECUTABLE} ${SETUP_PY} build sdist bdist
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
DEPENDS YAP4PY
)
@ -83,4 +75,3 @@
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})")
install(FILES ${PL_SOURCES} DESTINATION ${libpl} )

View File

@ -1,204 +1,9 @@
define( function() {
return {onload:function() {
console.info('Kernel specific javascript loaded');
// do more things here, like define a codemirror mode]]
require(['codemirror/lib/codemirror', 'codemirror/mode/meta'], function(CodeMirror) {
` CodeMirror.mI = [
{name: "APL", mime: "text/apl", mode: "apl", ext: ["dyalog", "apl"]},
{name: "PGP", mimes: ["application/pgp", "application/pgp-keys", "application/pgp-signature"], mode: "asciiarmor", ext: ["pgp"]},
{name: "ASN.1", mime: "text/x-ttcn-asn", mode: "asn.1", ext: ["asn", "asn1"]},
{name: "Asterisk", mime: "text/x-asterisk", mode: "asterisk", file: /^extensions\.conf$/i},
{name: "Brainfuck", mime: "text/x-brainfuck", mode: "brainfuck", ext: ["b", "bf"]},
{name: "C", mime: "text/x-csrc", mode: "clike", ext: ["c", "h"]},
{name: "C++", mime: "text/x-c++src", mode: "clike", ext: ["cpp", "c++", "cc", "cxx", "hpp", "h++", "hh", "hxx"], alias: ["cpp"]},
{name: "Cobol", mime: "text/x-cobol", mode: "cobol", ext: ["cob", "cpy"]},
{name: "C#", mime: "text/x-csharp", mode: "clike", ext: ["cs"], alias: ["csharp"]},
{name: "Clojure", mime: "text/x-clojure", mode: "clojure", ext: ["clj", "cljc", "cljx"]},
{name: "ClojureScript", mime: "text/x-clojurescript", mode: "clojure", ext: ["cljs"]},
{name: "Closure Stylesheets (GSS)", mime: "text/x-gss", mode: "css", ext: ["gss"]},
{name: "CMake", mime: "text/x-cmake", mode: "cmake", ext: ["cmake", "cmake.in"], file: /^CMakeLists.txt$/},
{name: "CoffeeScript", mime: "text/x-coffeescript", mode: "coffeescript", ext: ["coffee"], alias: ["coffee", "coffee-script"]},
{name: "Common Lisp", mime: "text/x-common-lisp", mode: "commonlisp", ext: ["cl", "lisp", "el"], alias: ["lisp"]},
{name: "Cypher", mime: "application/x-cypher-query", mode: "cypher", ext: ["cyp", "cypher"]},
{name: "Cython", mime: "text/x-cython", mode: "python", ext: ["pyx", "pxd", "pxi"]},
{name: "Crystal", mime: "text/x-crystal", mode: "crystal", ext: ["cr"]},
{name: "CSS", mime: "text/css", mode: "css", ext: ["css"]},
{name: "CQL", mime: "text/x-cassandra", mode: "sql", ext: ["cql"]},
{name: "D", mime: "text/x-d", mode: "d", ext: ["d"]},
{name: "Dart", mimes: ["application/dart", "text/x-dart"], mode: "dart", ext: ["dart"]},
{name: "diff", mime: "text/x-diff", mode: "diff", ext: ["diff", "patch"]},
{name: "Django", mime: "text/x-django", mode: "django"},
{name: "Dockerfile", mime: "text/x-dockerfile", mode: "dockerfile", file: /^Dockerfile$/},
{name: "DTD", mime: "application/xml-dtd", mode: "dtd", ext: ["dtd"]},
{name: "Dylan", mime: "text/x-dylan", mode: "dylan", ext: ["dylan", "dyl", "intr"]},
{name: "EBNF", mime: "text/x-ebnf", mode: "ebnf"},
{name: "ECL", mime: "text/x-ecl", mode: "ecl", ext: ["ecl"]},
{name: "edn", mime: "application/edn", mode: "clojure", ext: ["edn"]},
{name: "Eiffel", mime: "text/x-eiffel", mode: "eiffel", ext: ["e"]},
{name: "Elm", mime: "text/x-elm", mode: "elm", ext: ["elm"]},
{name: "Embedded Javascript", mime: "application/x-ejs", mode: "htmlembedded", ext: ["ejs"]},
{name: "Embedded Ruby", mime: "application/x-erb", mode: "htmlembedded", ext: ["erb"]},
{name: "Erlang", mime: "text/x-erlang", mode: "erlang", ext: ["erl"]},
{name: "Factor", mime: "text/x-factor", mode: "factor", ext: ["factor"]},
{name: "FCL", mime: "text/x-fcl", mode: "fcl"},
{name: "Forth", mime: "text/x-forth", mode: "forth", ext: ["forth", "fth", "4th"]},
{name: "Fortran", mime: "text/x-fortran", mode: "fortran", ext: ["f", "for", "f77", "f90"]},
{name: "F#", mime: "text/x-fsharp", mode: "mllike", ext: ["fs"], alias: ["fsharp"]},
{name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]},
{name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]},
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i},
{name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]},
{name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/},
{name: "HAML", mime: "text/x-haml", mode: "haml", ext: ["haml"]},
{name: "Haskell", mime: "text/x-haskell", mode: "haskell", ext: ["hs"]},
{name: "Haskell (Literate)", mime: "text/x-literate-haskell", mode: "haskell-literate", ext: ["lhs"]},
{name: "Haxe", mime: "text/x-haxe", mode: "haxe", ext: ["hx"]},
{name: "HXML", mime: "text/x-hxml", mode: "haxe", ext: ["hxml"]},
{name: "ASP.NET", mime: "application/x-aspx", mode: "htmlembedded", ext: ["aspx"], alias: ["asp", "aspx"]},
{name: "HTML", mime: "text/html", mode: "htmlmixed", ext: ["html", "htm"], alias: ["xhtml"]},
{name: "HTTP", mime: "message/http", mode: "http"},
{name: "IDL", mime: "text/x-idl", mode: "idl", ext: ["pro"]},
{name: "Pug", mime: "text/x-pug", mode: "pug", ext: ["jade", "pug"], alias: ["jade"]},
{name: "Java", mime: "text/x-java", mode: "clike", ext: ["java"]},
{name: "Java Server Pages", mime: "application/x-jsp", mode: "htmlembedded", ext: ["jsp"], alias: ["jsp"]},
{name: "JavaScript", mimes: ["text/javascript", "text/ecmascript", "application/javascript", "application/x-javascript", "application/ecmascript"],
mode: "javascript", ext: ["js"], alias: ["ecmascript", "js", "node"]},
{name: "JSON", mimes: ["application/json", "application/x-json"], mode: "javascript", ext: ["json", "map"], alias: ["json5"]},
{name: "JSON-LD", mime: "application/ld+json", mode: "javascript", ext: ["jsonld"], alias: ["jsonld"]},
{name: "JSX", mime: "text/jsx", mode: "jsx", ext: ["jsx"]},
{name: "Jinja2", mime: "null", mode: "jinja2"},
{name: "Julia", mime: "text/x-julia", mode: "julia", ext: ["jl"]},
{name: "Kotlin", mime: "text/x-kotlin", mode: "clike", ext: ["kt"]},
{name: "LESS", mime: "text/x-less", mode: "css", ext: ["less"]},
{name: "LiveScript", mime: "text/x-livescript", mode: "livescript", ext: ["ls"], alias: ["ls"]},
{name: "Lua", mime: "text/x-lua", mode: "lua", ext: ["lua"]},
{name: "Markdown", mime: "text/x-markdown", mode: "markdown", ext: ["markdown", "md", "mkd"]},
{name: "mIRC", mime: "text/mirc", mode: "mirc"},
{name: "MariaDB SQL", mime: "text/x-mariadb", mode: "sql"},
{name: "Mathematica", mime: "text/x-mathematica", mode: "mathematica", ext: ["m", "nb"]},
{name: "Modelica", mime: "text/x-modelica", mode: "modelica", ext: ["mo"]},
{name: "MUMPS", mime: "text/x-mumps", mode: "mumps", ext: ["mps"]},
{name: "MS SQL", mime: "text/x-mssql", mode: "sql"},
{name: "mbox", mime: "application/mbox", mode: "mbox", ext: ["mbox"]},
{name: "MySQL", mime: "text/x-mysql", mode: "sql"},
{name: "Nginx", mime: "text/x-nginx-conf", mode: "nginx", file: /nginx.*\.conf$/i},
{name: "NSIS", mime: "text/x-nsis", mode: "nsis", ext: ["nsh", "nsi"]},
{name: "NTriples", mime: "text/n-triples", mode: "ntriples", ext: ["nt"]},
{name: "Objective C", mime: "text/x-objectivec", mode: "clike", ext: ["m", "mm"], alias: ["objective-c", "objc"]},
{name: "OCaml", mime: "text/x-ocaml", mode: "mllike", ext: ["ml", "mli", "mll", "mly"]},
{name: "Octave", mime: "text/x-octave", mode: "octave", ext: ["m"]},
{name: "Oz", mime: "text/x-oz", mode: "oz", ext: ["oz"]},
{name: "Pascal", mime: "text/x-pascal", mode: "pascal", ext: ["p", "pas"]},
{name: "PEG.js", mime: "null", mode: "pegjs", ext: ["jsonld"]},
{name: "Perl", mime: "text/x-perl", mode: "perl", ext: ["pl", "pm"]},
{name: "PHP", mime: "application/x-httpd-php", mode: "php", ext: ["php", "php3", "php4", "php5", "phtml"]},
{name: "Pig", mime: "text/x-pig", mode: "pig", ext: ["pig"]},
{name: "Plain Text", mime: "text/plain", mode: "null", ext: ["txt", "text", "conf", "def", "list", "log"]},
{name: "PLSQL", mime: "text/x-plsql", mode: "sql", ext: ["pls"]},
{name: "PowerShell", mime: "application/x-powershell", mode: "powershell", ext: ["ps1", "psd1", "psm1"]},
{name: "Properties files", mime: "text/x-properties", mode: "properties", ext: ["properties", "ini", "in"], alias: ["ini", "properties"]},
{ name: "Prolog", mime: "text/x-prolog",
mode: "prolog", ext: ["pl", "yap", "yss", "P"] },
/*define(['./prolog.js'], function(){
{name: "ProtoBuf", mime: "text/x-protobuf", mode: "protobuf", ext: ["proto"]},
< {name: "Python", mime: "text/x-python", mode: "python", ext: ["BUILD", "bzl", "py", "pyw"], file: /^(BUCK|BUILD)$/},
{name: "Puppet", mime: "text/x-puppet", mode: "puppet", ext: ["pp"]},
{name: "Q", mime: "text/x-q", mode: "q", ext: ["q"]},
{name: "R", mime: "text/x-rsrc", mode: "r", ext: ["r", "R"], alias: ["rscript"]},
{name: "reStructuredText", mime: "text/x-rst", mode: "rst", ext: ["rst"], alias: ["rst"]},
{name: "RPM Changes", mime: "text/x-rpm-changes", mode: "rpm"},
{name: "RPM Spec", mime: "text/x-rpm-spec", mode: "rpm", ext: ["spec"]},
{name: "Ruby", mime: "text/x-ruby", mode: "ruby", ext: ["rb"], alias: ["jruby", "macruby", "rake", "rb", "rbx"]},
{name: "Rust", mime: "text/x-rustsrc", mode: "rust", ext: ["rs"]},
{name: "SAS", mime: "text/x-sas", mode: "sas", ext: ["sas"]},
{name: "Sass", mime: "text/x-sass", mode: "sass", ext: ["sass"]},
{name: "Scala", mime: "text/x-scala", mode: "clike", ext: ["scala"]},
{name: "Scheme", mime: "text/x-scheme", mode: "scheme", ext: ["scm", "ss"]},
{name: "SCSS", mime: "text/x-scss", mode: "css", ext: ["scss"]},
{name: "Shell", mime: "text/x-sh", mode: "shell", ext: ["sh", "ksh", "bash"], alias: ["bash", "sh", "zsh"], file: /^PKGBUILD$/},
{name: "Sieve", mime: "application/sieve", mode: "sieve", ext: ["siv", "sieve"]},
{name: "Slim", mimes: ["text/x-slim", "application/x-slim"], mode: "slim", ext: ["slim"]},
{name: "Smalltalk", mime: "text/x-stsrc", mode: "smalltalk", ext: ["st"]},
{name: "Smarty", mime: "text/x-smarty", mode: "smarty", ext: ["tpl"]},
{name: "Solr", mime: "text/x-solr", mode: "solr"},
{name: "Soy", mime: "text/x-soy", mode: "soy", ext: ["soy"], alias: ["closure template"]},
{name: "SPARQL", mime: "application/sparql-query", mode: "sparql", ext: ["rq", "sparql"], alias: ["sparul"]},
{name: "Spreadsheet", mime: "text/x-spreadsheet", mode: "spreadsheet", alias: ["excel", "formula"]},
{name: "SQL", mime: "text/x-sql", mode: "sql", ext: ["sql"]},
{name: "Squirrel", mime: "text/x-squirrel", mode: "clike", ext: ["nut"]},
{name: "Stylus", mime: "text/x-styl", mode: "stylus", ext: ["styl"]},
{name: "Swift", mime: "text/x-swift", mode: "swift", ext: ["swift"]},
{name: "sTeX", mime: "text/x-stex", mode: "stex"},
{name: "LaTeX", mime: "text/x-latex", mode: "stex", ext: ["text", "ltx"], alias: ["tex"]},
{name: "SystemVerilog", mime: "text/x-systemverilog", mode: "verilog", ext: ["v"]},
{name: "Tcl", mime: "text/x-tcl", mode: "tcl", ext: ["tcl"]},
{name: "Textile", mime: "text/x-textile", mode: "textile", ext: ["textile"]},
{name: "TiddlyWiki ", mime: "text/x-tiddlywiki", mode: "tiddlywiki"},
{name: "Tiki wiki", mime: "text/tiki", mode: "tiki"},
{name: "TOML", mime: "text/x-toml", mode: "toml", ext: ["toml"]},
{name: "Tornado", mime: "text/x-tornado", mode: "tornado"},
{name: "troff", mime: "text/troff", mode: "troff", ext: ["1", "2", "3", "4", "5", "6", "7", "8", "9"]},
{name: "TTCN", mime: "text/x-ttcn", mode: "ttcn", ext: ["ttcn", "ttcn3", "ttcnpp"]},
{name: "TTCN_CFG", mime: "text/x-ttcn-cfg", mode: "ttcn-cfg", ext: ["cfg"]},
{name: "Turtle", mime: "text/turtle", mode: "turtle", ext: ["ttl"]},
{name: "TypeScript", mime: "application/typescript", mode: "javascript", ext: ["ts"], alias: ["ts"]},
{name: "Twig", mime: "text/x-twig", mode: "twig"},
{name: "Web IDL", mime: "text/x-webidl", mode: "webidl", ext: ["webidl"]},
{name: "VB.NET", mime: "text/x-vb", mode: "vb", ext: ["vb"]},
{name: "VBScript", mime: "text/vbscript", mode: "vbscript", ext: ["vbs"]},
{name: "Velocity", mime: "text/velocity", mode: "velocity", ext: ["vtl"]},
{name: "Verilog", mime: "text/x-verilog", mode: "verilog", ext: ["v"]},
{name: "VHDL", mime: "text/x-vhdl", mode: "vhdl", ext: ["vhd", "vhdl"]},
{name: "Vue.js Component", mimes: ["script/x-vue", "text/x-vue"], mode: "vue", ext: ["vue"]},
{name: "XML", mimes: ["application/xml", "text/xml"], mode: "xml", ext: ["xml", "xsl", "xsd"], alias: ["rss", "wsdl", "xsd"]},
{name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]},
{name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]},
{name: "YAML", mimes: ["text/x-yaml", "text/yaml"], mode: "yaml", ext: ["yaml", "yml"], alias: ["yml"]},
{name: "Z80", mime: "text/x-z80", mode: "z80", ext: ["z80"]},
{name: "mscgen", mime: "text/x-mscgen", mode: "mscgen", ext: ["mscgen", "mscin", "msc"]},
{name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]},
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]}
];
var onload = function(){
console.log("I am being loaded");
});
CodeMirror.findModeByMIME = function(mime) {
mime = mime.toLowerCase();
for (var i = 0; i < CodeMirror.mI.length; i++) {
var info = CodeMirror.mI[i];
if (info.mime == mime) return info;
if (info.mimes) for (var j = 0; j < info.mimes.length; j++)
if (info.mimes[j] == mime) return info;
}
};
CodeMirror.findModeByExtension = function(ext) {
for (var i = 0; i < CodeMirror.mI.length; i++) {
var info = CodeMirror.mI[i];
if (info.ext) for (var j = 0; j < info.ext.length; j++)
if (info.ext[j] == ext) return info;
}
};
CodeMirror.findModeByFileName = function(filename) {
for (var i = 0; i < CodeMirror.mI.length; i++) {
var info = CodeMirror.mI[i];
if (info.file && info.file.test(filename)) return info;
}
var dot = filename.lastIndexOf(".");
var ext = dot > -1 && filename.substring(dot + 1, filename.length);
if (ext) return CodeMirror.findModeByExtension(ext);
};
CodeMirror.findModeByName = function(name) {
name = name.toLowerCase();
for (var i = 0; i < CodeMirror.mI.length; i++) {
var info = CodeMirror.mI[i];
if (info.name.toLowerCase() == name) return info;
if (info.alias) for (var j = 0; j < info.alias.length; j++)
if (info.alias[j].toLowerCase() == name) return info;
}
};
} }; });
};
return {onload:onload}
});*/

View File

@ -1,133 +0,0 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
# the name of the package
name = 'yap_kernel'
#-----------------------------------------------------------------------------
# Minimal Python version sanity check
#-----------------------------------------------------------------------------
import sys
import sysconfig
import setuptools
v = sys.version_info
if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
error = "ERROR: %s requires Python version 3.3 or above." % name
print(error, file=sys.stderr)
sys.exit(1)
PY3 = (sys.version_info[0] >= 3)
#-----------------------------------------------------------------------------
# get on with it
#-----------------------------------------------------------------------------
from glob import glob
import os
import shutil
from distutils.core import setup
pjoin = os.path.join
here = os.path.abspath(os.path.dirname(__file__))
# pkg_root = pjoin(here, name)
packages = setuptools.find_packages('/home/vsc/github/yap-6.3/packages/python/yap_kernel')
# for d, _, _ in os.walk(pjoin(here, name)):
# if os.path.exists(pjoin(d, '__init__.py')):
# packages.append(d[len(here)+1:].replace(os.path.sep, '.'))
sys.path.insert(0, "/home/vsc/github/yap-6.3/packages/python/yap_kernel")
package_data = {
'yap_ipython': ['prolog/*.*'],
'yap_kernel': ['resources/*.*']
}
version_ns = {}
with open(pjoin('/home/vsc/github/yap-6.3/packages/python/yap_kernel', name, '_version.py')) as f:
exec(f.read(), {}, version_ns)
setup_args = dict(
name = name,
version = version_ns['__version__'],
scripts = glob(pjoin('scripts', '*')),
packages = packages,
py_modules = ['yap_kernel_launcher'],
package_data = package_data,
package_dir = {'':"/home/vsc/github/yap-6.3/packages/python/yap_kernel"},
description = "YAP Kernel for Jupyter",
author = 'YAP Development Team',
author_email = 'YAP-dev@scipy.org',
url = 'http://ipython.org',
license = 'BSD',
platforms = "Linux, Mac OS X, Windows",
keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'],
classifiers = [
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Prolog',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv):
import setuptools
setuptools_args = {}
install_requires = setuptools_args['install_requires'] = [
]
if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
from yap_kernel.kernelspec import write_kernel_spec, make_yap_kernel_cmd, KERNEL_NAME
argv = make_yap_kernel_cmd(executable='python')
dest = os.path.join(here, 'resources')
if not os.path.exists(dest):
os.makedirs( dest )
shutil.copy('/home/vsc/github/yap-6.3/docs/icons/yap_32x32x32.png',os.path.join(dest,'logo-32x32.png'))
shutil.copy('/home/vsc/github/yap-6.3/docs/icons/yap_64x64x32.png',os.path.join(dest,'logo-64x64.png'))
try:
write_kernel_spec(dest, overrides={'argv': argv})
except:
none
# shutil.copy('/home/vsc/github/yap-6.3/packages/python/yap_kernel/kernel.js',dest)
# shutil.copy('/home/vsc/github/yap-6.3/misc/editors/prolog.js',dest)
setup_args['data_files'] = [
(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))),
]
mode_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'components', 'codemirror', 'mode', 'prolog')
custom_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'custom')
try:
shutil.copy( pjoin( custom_loc, "custom.js") , pjoin( custom_loc, "custom.js.orig"))
shutil.copy( "/home/vsc/github/yap-6.3/packages/python/yap_kernel/custom.js" , pjoin( custom_loc, "custom.js"))
if not os.path.exists(mode_loc):
os.makedirs(mode_loc)
shutil.copy( "/home/vsc/github/yap-6.3/misc/editors/prolog.js" , mode_loc)
except:
pass
extras_require = setuptools_args['extras_require'] = {
'test:python_version=="2.7"': ['mock'],
'test': ['nose_warnings_filters', 'nose-timer'],
}
if 'setuptools' in sys.modules:
setup_args.update(setuptools_args)
if __name__ == '__main__':
setup(**setup_args)

View File

@ -100,26 +100,24 @@ if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv):
try:
shutil.rmtree(dest)
os.makedirs( dest )
shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png',dest)
shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png',dest)
shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png',pjoin(dest,"logo_32x32.png"))
shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png',pjoin(dest,"logo_64x64.png"))
write_kernel_spec(dest, overrides={'argv': argv})
except:
pass
# shutil.copy('${CMAKE_CURRENT_SOURCE_DIR}/kernel.js',dest)
# shutil.copy('${CMAKE_SOURCE_DIR}/misc/editors/prolog.js',dest)
setup_args['data_files'] = [
(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))),
]
setup_args['data_files'] = [(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*')))]
mode_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'components', 'codemirror', 'mode', 'prolog')
custom_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'custom')
try:
shutil.copy( pjoin( custom_loc, "custom.js") , pjoin( custom_loc, "custom.js.orig"))
shutil.copy( "${CMAKE_CURRENT_SOURCE_DIR}/custom.js" , pjoin( custom_loc, "custom.js"))
if not os.path.exists(mode_loc):
os.makedirs(mode_loc)
shutil.copy( "${CMAKE_SOURCE_DIR}/misc/editors/prolog.js" , mode_loc)
except:
pass
# try:
# shutil.copy( pjoin( custom_loc, "custom.js") , pjoin( custom_loc, "custom.js.orig"))
# shutil.copy( "${CMAKE_CURRENT_SOURCE_DIR}/custom.js" , pjoin( custom_loc, "custom.js"))
# if not os.path.exists(mode_loc):
# os.makedirs(mode_loc)
# shutil.copy( "${CMAKE_SOURCE_DIR}/misc/editors/prolog.js" , mode_loc)
# except:
# pass
extras_require = setuptools_args['extras_require'] = {
'test:python_version=="2.7"': ['mock'],

View File

@ -1,11 +1,11 @@
# encoding: utf-8
"""
IPython: tools for interactive and parallel computing in Python.
yap_ipython: tools for interactive and parallel computing in Python.
http://ipython.org
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2011, IPython Development Team.
# Copyright (c) 2008-2011, yap_ipython Development Team.
# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
@ -29,19 +29,19 @@ import sys
# Don't forget to also update setup.py when this changes!
if sys.version_info < (3,3):
raise ImportError(
"""
IPython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2.
When using Python 2.7, please install IPython 5.x LTS Long Term Support version.
Beginning with IPython 6.0, Python 3.3 and above is required.
"""
yap_ipython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2.
When using Python 2.7, please install yap_ipython 5.x LTS Long Term Support version.
Beginning with yap_ipython 6.0, Python 3.3 and above is required.
See IPython `README.rst` file for more information:
See yap_ipython `README.rst` file for more information:
https://github.com/ipython/ipython/blob/master/README.rst
https://github.com/ipython/ipython/blob/master/README.rst
""")
""")
# Make it easy to import extensions - they are always directly on pythonpath.
# Therefore, non-IPython modules can be added to extensions directory.
# Therefore, non-yap_ipython modules can be added to extensions directory.
# This should probably be in ipapp.py.
sys.path.append(os.path.join(os.path.dirname(__file__), "extensions"))
@ -51,13 +51,13 @@ sys.path.append(os.path.join(os.path.dirname(__file__), "extensions"))
from .core.getipython import get_ipython
from .core import release
from IPython.core.application import Application
from IPython.terminal.embed import embed
from .core.application import Application
from .terminal.embed import embed
from .core.interactiveshell import YAPInteractive as InteractiveShell
from IPython.testing import test
from IPython.utils.sysinfo import sys_info
from IPython.utils.frame import extract_module_locals
from .core.interactiveshell import InteractiveShell
from .testing import test
from .utils.sysinfo import sys_info
from .utils.frame import extract_module_locals
# Release data
__author__ = '%s <%s>' % (release.author, release.author_email)
@ -66,22 +66,22 @@ __version__ = release.version
version_info = release.version_info
def embed_kernel(module=None, local_ns=None, **kwargs):
"""Embed and start an IPython kernel in a given scope.
"""Embed and start an yap_ipython kernel in a given scope.
If you don't want the kernel to initialize the namespace
from the scope of the surrounding function,
and/or you want to load full IPython configuration,
you probably want `IPython.start_kernel()` instead.
and/or you want to load full yap_ipython configuration,
you probably want `yap_ipython.start_kernel()` instead.
Parameters
----------
module : ModuleType, optional
The module to load into IPython globals (default: caller)
The module to load into yap_ipython globals (default: caller)
local_ns : dict, optional
The namespace to load into IPython user namespace (default: caller)
The namespace to load into yap_ipython user namespace (default: caller)
kwargs : various, optional
Further keyword args are relayed to the IPKernelApp constructor,
Further keyword args are relayed to the YAPKernelApp constructor,
allowing configuration of the Kernel. Will only have an effect
on the first embed_kernel call for a given process.
"""
@ -93,17 +93,17 @@ def embed_kernel(module=None, local_ns=None, **kwargs):
local_ns = caller_locals
# Only import .zmq when we really need it
from ipykernel.embed import embed_kernel as real_embed_kernel
from yap_kernel.embed import embed_kernel as real_embed_kernel
real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
def start_ipython(argv=None, **kwargs):
"""Launch a normal IPython instance (as opposed to embedded)
"""Launch a normal yap_ipython instance (as opposed to embedded)
`IPython.embed()` puts a shell in a particular calling scope,
`yap_ipython.embed()` puts a shell in a particular calling scope,
such as a function or method for debugging purposes,
which is often not desirable.
`start_ipython()` does full, regular IPython initialization,
`start_ipython()` does full, regular yap_ipython initialization,
including loading startup files, configuration, etc.
much of which is skipped by `embed()`.
@ -113,25 +113,25 @@ def start_ipython(argv=None, **kwargs):
----------
argv : list or None, optional
If unspecified or None, IPython will parse command-line options from sys.argv.
If unspecified or None, yap_ipython will parse command-line options from sys.argv.
To prevent any command-line parsing, pass an empty list: `argv=[]`.
user_ns : dict, optional
specify this dictionary to initialize the IPython user namespace with particular values.
specify this dictionary to initialize the yap_ipython user namespace with particular values.
kwargs : various, optional
Any other kwargs will be passed to the Application constructor,
such as `config`.
"""
from IPython.terminal.ipapp import launch_new_instance
from yap_ipython.terminal.ipapp import launch_new_instance
return launch_new_instance(argv=argv, **kwargs)
def start_kernel(argv=None, **kwargs):
"""Launch a normal IPython kernel instance (as opposed to embedded)
"""Launch a normal yap_ipython kernel instance (as opposed to embedded)
`IPython.embed_kernel()` puts a shell in a particular calling scope,
`yap_ipython.embed_kernel()` puts a shell in a particular calling scope,
such as a function or method for debugging purposes,
which is often not desirable.
`start_kernel()` does full, regular IPython initialization,
`start_kernel()` does full, regular yap_ipython initialization,
including loading startup files, configuration, etc.
much of which is skipped by `embed()`.
@ -139,13 +139,13 @@ def start_kernel(argv=None, **kwargs):
----------
argv : list or None, optional
If unspecified or None, IPython will parse command-line options from sys.argv.
If unspecified or None, yap_ipython will parse command-line options from sys.argv.
To prevent any command-line parsing, pass an empty list: `argv=[]`.
user_ns : dict, optional
specify this dictionary to initialize the IPython user namespace with particular values.
specify this dictionary to initialize the yap_ipython user namespace with particular values.
kwargs : various, optional
Any other kwargs will be passed to the Application constructor,
such as `config`.
"""
from IPython.kernel.zmq.kernelapp import launch_new_instance
from yap_ipython.kernel.zmq.kernelapp import launch_new_instance
return launch_new_instance(argv=argv, **kwargs)

View File

@ -0,0 +1,14 @@
# encoding: utf-8
"""Terminal-based yap_ipython entry point.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012, yap_ipython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yap_ipython import start_ipython
start_ipython()

View File

@ -0,0 +1,19 @@
"""
Shim to maintain backwards compatibility with old yap_ipython.config imports.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
from yap_ipython.utils.shimmodule import ShimModule, ShimWarning
warn("The `yap_ipython.config` package has been deprecated since yap_ipython 4.0. "
"You should import from traitlets.config instead.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
# trigger the custom attribute access above
sys.modules['yap_ipython.config'] = ShimModule(src='yap_ipython.config', mirror='traitlets.config')

View File

@ -0,0 +1,12 @@
"""
Shim to maintain backwards compatibility with old yap_ipython.consoleapp imports.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
from warnings import warn
warn("The `yap_ipython.consoleapp` package has been deprecated. "
"You should import from jupyter_client.consoleapp instead.")
from jupyter_client.consoleapp import *

View File

@ -0,0 +1,256 @@
# encoding: utf-8
"""
System command aliases.
Authors:
* Fernando Perez
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import re
import sys
from traitlets.config.configurable import Configurable
from yap_ipython.core.error import UsageError
from traitlets import List, Instance
from logging import error
#-----------------------------------------------------------------------------
# Utilities
#-----------------------------------------------------------------------------
# This is used as the pattern for calls to split_user_input.
shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)')
def default_aliases():
"""Return list of shell aliases to auto-define.
"""
# Note: the aliases defined here should be safe to use on a kernel
# regardless of what frontend it is attached to. Frontends that use a
# kernel in-process can define additional aliases that will only work in
# their case. For example, things like 'less' or 'clear' that manipulate
# the terminal should NOT be declared here, as they will only work if the
# kernel is running inside a true terminal, and not over the network.
if os.name == 'posix':
default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'),
('cat', 'cat'),
]
# Useful set of ls aliases. The GNU and BSD options are a little
# different, so we make aliases that provide as similar as possible
# behavior in ipython, by passing the right flags for each platform
if sys.platform.startswith('linux'):
ls_aliases = [('ls', 'ls -F --color'),
# long ls
('ll', 'ls -F -o --color'),
# ls normal files only
('lf', 'ls -F -o --color %l | grep ^-'),
# ls symbolic links
('lk', 'ls -F -o --color %l | grep ^l'),
# directories or links to directories,
('ldir', 'ls -F -o --color %l | grep /$'),
# things which are executable
('lx', 'ls -F -o --color %l | grep ^-..x'),
]
elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'):
# OpenBSD, NetBSD. The ls implementation on these platforms do not support
# the -G switch and lack the ability to use colorized output.
ls_aliases = [('ls', 'ls -F'),
# long ls
('ll', 'ls -F -l'),
# ls normal files only
('lf', 'ls -F -l %l | grep ^-'),
# ls symbolic links
('lk', 'ls -F -l %l | grep ^l'),
# directories or links to directories,
('ldir', 'ls -F -l %l | grep /$'),
# things which are executable
('lx', 'ls -F -l %l | grep ^-..x'),
]
else:
# BSD, OSX, etc.
ls_aliases = [('ls', 'ls -F -G'),
# long ls
('ll', 'ls -F -l -G'),
# ls normal files only
('lf', 'ls -F -l -G %l | grep ^-'),
# ls symbolic links
('lk', 'ls -F -l -G %l | grep ^l'),
# directories or links to directories,
('ldir', 'ls -F -G -l %l | grep /$'),
# things which are executable
('lx', 'ls -F -l -G %l | grep ^-..x'),
]
default_aliases = default_aliases + ls_aliases
elif os.name in ['nt', 'dos']:
default_aliases = [('ls', 'dir /on'),
('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'),
('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'),
]
else:
default_aliases = []
return default_aliases
class AliasError(Exception):
pass
class InvalidAliasError(AliasError):
pass
class Alias(object):
"""Callable object storing the details of one alias.
Instances are registered as magic functions to allow use of aliases.
"""
# Prepare blacklist
blacklist = {'cd','popd','pushd','dhist','alias','unalias'}
def __init__(self, shell, name, cmd):
self.shell = shell
self.name = name
self.cmd = cmd
self.__doc__ = "Alias for `!{}`".format(cmd)
self.nargs = self.validate()
def validate(self):
"""Validate the alias, and return the number of arguments."""
if self.name in self.blacklist:
raise InvalidAliasError("The name %s can't be aliased "
"because it is a keyword or builtin." % self.name)
try:
caller = self.shell.magics_manager.magics['line'][self.name]
except KeyError:
pass
else:
if not isinstance(caller, Alias):
raise InvalidAliasError("The name %s can't be aliased "
"because it is another magic command." % self.name)
if not (isinstance(self.cmd, str)):
raise InvalidAliasError("An alias command must be a string, "
"got: %r" % self.cmd)
nargs = self.cmd.count('%s') - self.cmd.count('%%s')
if (nargs > 0) and (self.cmd.find('%l') >= 0):
raise InvalidAliasError('The %s and %l specifiers are mutually '
'exclusive in alias definitions.')
return nargs
def __repr__(self):
return "<alias {} for {!r}>".format(self.name, self.cmd)
def __call__(self, rest=''):
cmd = self.cmd
nargs = self.nargs
# Expand the %l special to be the user's input line
if cmd.find('%l') >= 0:
cmd = cmd.replace('%l', rest)
rest = ''
if nargs==0:
if cmd.find('%%s') >= 1:
cmd = cmd.replace('%%s', '%s')
# Simple, argument-less aliases
cmd = '%s %s' % (cmd, rest)
else:
# Handle aliases with positional arguments
args = rest.split(None, nargs)
if len(args) < nargs:
raise UsageError('Alias <%s> requires %s arguments, %s given.' %
(self.name, nargs, len(args)))
cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:]))
self.shell.system(cmd)
#-----------------------------------------------------------------------------
# Main AliasManager class
#-----------------------------------------------------------------------------
class AliasManager(Configurable):
default_aliases = List(default_aliases()).tag(config=True)
user_aliases = List(default_value=[]).tag(config=True)
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
def __init__(self, shell=None, **kwargs):
super(AliasManager, self).__init__(shell=shell, **kwargs)
# For convenient access
self.linemagics = self.shell.magics_manager.magics['line']
self.init_aliases()
def init_aliases(self):
# Load default & user aliases
for name, cmd in self.default_aliases + self.user_aliases:
self.soft_define_alias(name, cmd)
@property
def aliases(self):
return [(n, func.cmd) for (n, func) in self.linemagics.items()
if isinstance(func, Alias)]
def soft_define_alias(self, name, cmd):
"""Define an alias, but don't raise on an AliasError."""
try:
self.define_alias(name, cmd)
except AliasError as e:
error("Invalid alias: %s" % e)
def define_alias(self, name, cmd):
"""Define a new alias after validating it.
This will raise an :exc:`AliasError` if there are validation
problems.
"""
caller = Alias(shell=self.shell, name=name, cmd=cmd)
self.shell.magics_manager.register_function(caller, magic_kind='line',
magic_name=name)
def get_alias(self, name):
"""Return an alias, or None if no alias by that name exists."""
aname = self.linemagics.get(name, None)
return aname if isinstance(aname, Alias) else None
def is_alias(self, name):
"""Return whether or not a given name has been defined as an alias"""
return self.get_alias(name) is not None
def undefine_alias(self, name):
if self.is_alias(name):
del self.linemagics[name]
else:
raise ValueError('%s is not an alias' % name)
def clear_aliases(self):
for name, cmd in self.aliases:
self.undefine_alias(name)
def retrieve_alias(self, name):
"""Retrieve the command to which an alias expands."""
caller = self.get_alias(name)
if caller:
return caller.cmd
else:
raise ValueError('%s is not an alias' % name)

View File

@ -0,0 +1,462 @@
# encoding: utf-8
"""
An application for yap_ipython.
All top-level applications should use the classes in this module for
handling configuration and creating configurables.
The job of an :class:`Application` is to create the master configuration
object and then create the configurable objects, passing the config to them.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import atexit
from copy import deepcopy
import glob
import logging
import os
import shutil
import sys
from traitlets.config.application import Application, catch_config_error
from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader
from yap_ipython.core import release, crashhandler
from yap_ipython.core.profiledir import ProfileDir, ProfileDirError
from yap_ipython.paths import get_ipython_dir, get_ipython_package_dir
from yap_ipython.utils.path import ensure_dir_exists
from traitlets import (
List, Unicode, Type, Bool, Set, Instance, Undefined,
default, observe,
)
if os.name == 'nt':
programdata = os.environ.get('PROGRAMDATA', None)
if programdata:
SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')]
else: # PROGRAMDATA is not defined by default on XP.
SYSTEM_CONFIG_DIRS = []
else:
SYSTEM_CONFIG_DIRS = [
"/usr/local/etc/ipython",
"/etc/ipython",
]
ENV_CONFIG_DIRS = []
_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
if _env_config_dir not in SYSTEM_CONFIG_DIRS:
# only add ENV_CONFIG if sys.prefix is not already included
ENV_CONFIG_DIRS.append(_env_config_dir)
_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS')
if _envvar in {None, ''}:
IPYTHON_SUPPRESS_CONFIG_ERRORS = None
else:
if _envvar.lower() in {'1','true'}:
IPYTHON_SUPPRESS_CONFIG_ERRORS = True
elif _envvar.lower() in {'0','false'} :
IPYTHON_SUPPRESS_CONFIG_ERRORS = False
else:
sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar )
# aliases and flags
base_aliases = {
'profile-dir' : 'ProfileDir.location',
'profile' : 'BaseYAPApplication.profile',
'ipython-dir' : 'BaseYAPApplication.ipython_dir',
'log-level' : 'Application.log_level',
'config' : 'BaseYAPApplication.extra_config_file',
}
base_flags = dict(
debug = ({'Application' : {'log_level' : logging.DEBUG}},
"set log level to logging.DEBUG (maximize logging output)"),
quiet = ({'Application' : {'log_level' : logging.CRITICAL}},
"set log level to logging.CRITICAL (minimize logging output)"),
init = ({'BaseYAPApplication' : {
'copy_config_files' : True,
'auto_create' : True}
}, """Initialize profile with default config files. This is equivalent
to running `ipython profile create <profile>` prior to startup.
""")
)
class ProfileAwareConfigLoader(PyFileConfigLoader):
"""A Python file config loader that is aware of yap_ipython profiles."""
def load_subconfig(self, fname, path=None, profile=None):
if profile is not None:
try:
profile_dir = ProfileDir.find_profile_dir_by_name(
get_ipython_dir(),
profile,
)
except ProfileDirError:
return
path = profile_dir.location
return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path)
class BaseYAPApplication(Application):
name = u'ipython'
description = Unicode(u'yap_ipython: an enhanced interactive Python shell.')
version = Unicode(release.version)
aliases = base_aliases
flags = base_flags
classes = List([ProfileDir])
# enable `load_subconfig('cfg.py', profile='name')`
python_config_loader_class = ProfileAwareConfigLoader
# Track whether the config_file has changed,
# because some logic happens only if we aren't using the default.
config_file_specified = Set()
config_file_name = Unicode()
@default('config_file_name')
def _config_file_name_default(self):
return self.name.replace('-','_') + u'_config.py'
@observe('config_file_name')
def _config_file_name_changed(self, change):
if change['new'] != change['old']:
self.config_file_specified.add(change['new'])
# The directory that contains yap_ipython's builtin profiles.
builtin_profile_dir = Unicode(
os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default')
)
config_file_paths = List(Unicode())
@default('config_file_paths')
def _config_file_paths_default(self):
return [os.getcwd()]
extra_config_file = Unicode(
help="""Path to an extra config file to load.
If specified, load this config file in addition to any other yap_ipython config.
""").tag(config=True)
@observe('extra_config_file')
def _extra_config_file_changed(self, change):
old = change['old']
new = change['new']
try:
self.config_files.remove(old)
except ValueError:
pass
self.config_file_specified.add(new)
self.config_files.append(new)
profile = Unicode(u'default',
help="""The yap_ipython profile to use."""
).tag(config=True)
@observe('profile')
def _profile_changed(self, change):
self.builtin_profile_dir = os.path.join(
get_ipython_package_dir(), u'config', u'profile', change['new']
)
ipython_dir = Unicode(
help="""
The name of the yap_ipython directory. This directory is used for logging
configuration (through profiles), history storage, etc. The default
is usually $HOME/.ipython. This option can also be specified through
the environment variable IPYTHONDIR.
"""
).tag(config=True)
@default('ipython_dir')
def _ipython_dir_default(self):
d = get_ipython_dir()
self._ipython_dir_changed({
'name': 'ipython_dir',
'old': d,
'new': d,
})
return d
_in_init_profile_dir = False
profile_dir = Instance(ProfileDir, allow_none=True)
@default('profile_dir')
def _profile_dir_default(self):
# avoid recursion
if self._in_init_profile_dir:
return
# profile_dir requested early, force initialization
self.init_profile_dir()
return self.profile_dir
overwrite = Bool(False,
help="""Whether to overwrite existing config files when copying"""
).tag(config=True)
auto_create = Bool(False,
help="""Whether to create profile dir if it doesn't exist"""
).tag(config=True)
config_files = List(Unicode())
@default('config_files')
def _config_files_default(self):
return [self.config_file_name]
copy_config_files = Bool(False,
help="""Whether to install the default config files into the profile dir.
If a new profile is being created, and yap_ipython contains config files for that
profile, then they will be staged into the new directory. Otherwise,
default config files will be automatically generated.
""").tag(config=True)
verbose_crash = Bool(False,
help="""Create a massive crash report when yap_ipython encounters what may be an
internal error. The default is to append a short message to the
usual traceback""").tag(config=True)
# The class to use as the crash handler.
crash_handler_class = Type(crashhandler.CrashHandler)
@catch_config_error
def __init__(self, **kwargs):
super(BaseYAPApplication, self).__init__(**kwargs)
# ensure current working directory exists
try:
os.getcwd()
except:
# exit if cwd doesn't exist
self.log.error("Current working directory doesn't exist.")
self.exit(1)
#-------------------------------------------------------------------------
# Various stages of Application creation
#-------------------------------------------------------------------------
deprecated_subcommands = {}
def initialize_subcommand(self, subc, argv=None):
if subc in self.deprecated_subcommands:
self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed "
"in future versions.".format(sub=subc))
self.log.warning("You likely want to use `jupyter {sub}` in the "
"future".format(sub=subc))
return super(BaseYAPApplication, self).initialize_subcommand(subc, argv)
def init_crash_handler(self):
"""Create a crash handler, typically setting sys.excepthook to it."""
self.crash_handler = self.crash_handler_class(self)
sys.excepthook = self.excepthook
def unset_crashhandler():
sys.excepthook = sys.__excepthook__
atexit.register(unset_crashhandler)
def excepthook(self, etype, evalue, tb):
"""this is sys.excepthook after init_crashhandler
set self.verbose_crash=True to use our full crashhandler, instead of
a regular traceback with a short message (crash_handler_lite)
"""
if self.verbose_crash:
return self.crash_handler(etype, evalue, tb)
else:
return crashhandler.crash_handler_lite(etype, evalue, tb)
@observe('ipython_dir')
def _ipython_dir_changed(self, change):
old = change['old']
new = change['new']
if old is not Undefined:
str_old = os.path.abspath(old)
if str_old in sys.path:
sys.path.remove(str_old)
str_path = os.path.abspath(new)
sys.path.append(str_path)
ensure_dir_exists(new)
readme = os.path.join(new, 'README')
readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README')
if not os.path.exists(readme) and os.path.exists(readme_src):
shutil.copy(readme_src, readme)
for d in ('extensions', 'nbextensions'):
path = os.path.join(new, d)
try:
ensure_dir_exists(path)
except OSError as e:
# this will not be EEXIST
self.log.error("couldn't create path %s: %s", path, e)
self.log.debug("IPYTHONDIR set to: %s" % new)
def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS):
"""Load the config file.
By default, errors in loading config are handled, and a warning
printed on screen. For testing, the suppress_errors option is set
to False, so errors will make tests fail.
`supress_errors` default value is to be `None` in which case the
behavior default to the one of `traitlets.Application`.
The default value can be set :
- to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive).
- to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive).
- to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset.
Any other value are invalid, and will make yap_ipython exit with a non-zero return code.
"""
self.log.debug("Searching path %s for config files", self.config_file_paths)
base_config = 'ipython_config.py'
self.log.debug("Attempting to load config file: %s" %
base_config)
try:
if suppress_errors is not None:
old_value = Application.raise_config_file_errors
Application.raise_config_file_errors = not suppress_errors;
Application.load_config_file(
self,
base_config,
path=self.config_file_paths
)
except ConfigFileNotFound:
# ignore errors loading parent
self.log.debug("Config file %s not found", base_config)
pass
if suppress_errors is not None:
Application.raise_config_file_errors = old_value
for config_file_name in self.config_files:
if not config_file_name or config_file_name == base_config:
continue
self.log.debug("Attempting to load config file: %s" %
self.config_file_name)
try:
Application.load_config_file(
self,
config_file_name,
path=self.config_file_paths
)
except ConfigFileNotFound:
# Only warn if the default config file was NOT being used.
if config_file_name in self.config_file_specified:
msg = self.log.warning
else:
msg = self.log.debug
msg("Config file not found, skipping: %s", config_file_name)
except Exception:
# For testing purposes.
if not suppress_errors:
raise
self.log.warning("Error loading config file: %s" %
self.config_file_name, exc_info=True)
def init_profile_dir(self):
"""initialize the profile dir"""
self._in_init_profile_dir = True
if self.profile_dir is not None:
# already ran
return
if 'ProfileDir.location' not in self.config:
# location not specified, find by profile name
try:
p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
except ProfileDirError:
# not found, maybe create it (always create default profile)
if self.auto_create or self.profile == 'default':
try:
p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
except ProfileDirError:
self.log.fatal("Could not create profile: %r"%self.profile)
self.exit(1)
else:
self.log.info("Created profile dir: %r"%p.location)
else:
self.log.fatal("Profile %r not found."%self.profile)
self.exit(1)
else:
self.log.debug("Using existing profile dir: %r"%p.location)
else:
location = self.config.ProfileDir.location
# location is fully specified
try:
p = ProfileDir.find_profile_dir(location, self.config)
except ProfileDirError:
# not found, maybe create it
if self.auto_create:
try:
p = ProfileDir.create_profile_dir(location, self.config)
except ProfileDirError:
self.log.fatal("Could not create profile directory: %r"%location)
self.exit(1)
else:
self.log.debug("Creating new profile dir: %r"%location)
else:
self.log.fatal("Profile directory %r not found."%location)
self.exit(1)
else:
self.log.info("Using existing profile dir: %r"%location)
# if profile_dir is specified explicitly, set profile name
dir_name = os.path.basename(p.location)
if dir_name.startswith('profile_'):
self.profile = dir_name[8:]
self.profile_dir = p
self.config_file_paths.append(p.location)
self._in_init_profile_dir = False
def init_config_files(self):
"""[optionally] copy default config files into profile dir."""
self.config_file_paths.extend(ENV_CONFIG_DIRS)
self.config_file_paths.extend(SYSTEM_CONFIG_DIRS)
# copy config files
path = self.builtin_profile_dir
if self.copy_config_files:
src = self.profile
cfg = self.config_file_name
if path and os.path.exists(os.path.join(path, cfg)):
self.log.warning("Staging %r from %s into %r [overwrite=%s]"%(
cfg, src, self.profile_dir.location, self.overwrite)
)
self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite)
else:
self.stage_default_config_file()
else:
# Still stage *bundled* config files, but not generated ones
# This is necessary for `ipython profile=sympy` to load the profile
# on the first go
files = glob.glob(os.path.join(path, '*.py'))
for fullpath in files:
cfg = os.path.basename(fullpath)
if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False):
# file was copied
self.log.warning("Staging bundled %s from %s into %r"%(
cfg, self.profile, self.profile_dir.location)
)
def stage_default_config_file(self):
"""auto generate default config file, and stage it into the profile."""
s = self.generate_config_file()
fname = os.path.join(self.profile_dir.location, self.config_file_name)
if self.overwrite or not os.path.exists(fname):
self.log.warning("Generating default config file: %r"%(fname))
with open(fname, 'w') as f:
f.write(s)
@catch_config_error
def initialize(self, argv=None):
# don't hook up crash handler before parsing command-line
self.parse_command_line(argv)
self.init_crash_handler()
if self.subapp is not None:
# stop here if subapp is taking over
return
# save a copy of CLI config to re-load after config files
# so that it has highest priority
cl_config = deepcopy(self.config)
self.init_profile_dir()
self.init_config_files()
self.load_config_file()
# enforce cl-opts override configfile opts:
self.update_config(cl_config)

View File

@ -0,0 +1,70 @@
# encoding: utf-8
"""
Autocall capabilities for yap_ipython.core.
Authors:
* Brian Granger
* Fernando Perez
* Thomas Kluyver
Notes
-----
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
class IPyAutocall(object):
""" Instances of this class are always autocalled
This happens regardless of 'autocall' variable state. Use this to
develop macro-like mechanisms.
"""
_ip = None
rewrite = True
def __init__(self, ip=None):
self._ip = ip
def set_ip(self, ip):
""" Will be used to set _ip point to current ipython instance b/f call
Override this method if you don't want this to happen.
"""
self._ip = ip
class ExitAutocall(IPyAutocall):
"""An autocallable object which will be added to the user namespace so that
exit, exit(), quit or quit() are all valid ways to close the shell."""
rewrite = False
def __call__(self):
self._ip.ask_exit()
class ZMQExitAutocall(ExitAutocall):
"""Exit yap_ipython. Autocallable, so it needn't be explicitly called.
Parameters
----------
keep_kernel : bool
If True, leave the kernel alive. Otherwise, tell the kernel to exit too
(default).
"""
def __call__(self, keep_kernel=False):
self._ip.keepkernel_on_exit = keep_kernel
self._ip.ask_exit()

View File

@ -0,0 +1,86 @@
"""
A context manager for managing things injected into :mod:`builtins`.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import builtins as builtin_mod
from traitlets.config.configurable import Configurable
from traitlets import Instance
class __BuiltinUndefined(object): pass
BuiltinUndefined = __BuiltinUndefined()
class __HideBuiltin(object): pass
HideBuiltin = __HideBuiltin()
class BuiltinTrap(Configurable):
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
def __init__(self, shell=None):
super(BuiltinTrap, self).__init__(shell=shell, config=None)
self._orig_builtins = {}
# We define this to track if a single BuiltinTrap is nested.
# Only turn off the trap when the outermost call to __exit__ is made.
self._nested_level = 0
self.shell = shell
# builtins we always add - if set to HideBuiltin, they will just
# be removed instead of being replaced by something else
self.auto_builtins = {'exit': HideBuiltin,
'quit': HideBuiltin,
'get_ipython': self.shell.get_ipython,
}
def __enter__(self):
if self._nested_level == 0:
self.activate()
self._nested_level += 1
# I return self, so callers can use add_builtin in a with clause.
return self
def __exit__(self, type, value, traceback):
if self._nested_level == 1:
self.deactivate()
self._nested_level -= 1
# Returning False will cause exceptions to propagate
return False
def add_builtin(self, key, value):
"""Add a builtin and save the original."""
bdict = builtin_mod.__dict__
orig = bdict.get(key, BuiltinUndefined)
if value is HideBuiltin:
if orig is not BuiltinUndefined: #same as 'key in bdict'
self._orig_builtins[key] = orig
del bdict[key]
else:
self._orig_builtins[key] = orig
bdict[key] = value
def remove_builtin(self, key, orig):
"""Remove an added builtin and re-set the original."""
if orig is BuiltinUndefined:
del builtin_mod.__dict__[key]
else:
builtin_mod.__dict__[key] = orig
def activate(self):
"""Store ipython references in the __builtin__ namespace."""
add_builtin = self.add_builtin
for name, func in self.auto_builtins.items():
add_builtin(name, func)
def deactivate(self):
"""Remove any builtins which might have been added by add_builtins, or
restore overwritten ones to their previous values."""
remove_builtin = self.remove_builtin
for key, val in self._orig_builtins.items():
remove_builtin(key, val)
self._orig_builtins.clear()
self._builtins_added = False

View File

@ -0,0 +1,143 @@
"""Compiler tools with improved interactive support.
Provides compilation machinery similar to codeop, but with caching support so
we can provide interactive tracebacks.
Authors
-------
* Robert Kern
* Fernando Perez
* Thomas Kluyver
"""
# Note: though it might be more natural to name this module 'compiler', that
# name is in the stdlib and name collisions with the stdlib tend to produce
# weird problems (often with third-party tools).
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The yap_ipython Development Team.
#
# Distributed under the terms of the BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Stdlib imports
import __future__
from ast import PyCF_ONLY_AST
import codeop
import functools
import hashlib
import linecache
import operator
import time
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
# Roughtly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h,
# this is used as a bitmask to extract future-related code flags.
PyCF_MASK = functools.reduce(operator.or_,
(getattr(__future__, fname).compiler_flag
for fname in __future__.all_feature_names))
#-----------------------------------------------------------------------------
# Local utilities
#-----------------------------------------------------------------------------
def code_name(code, number=0):
""" Compute a (probably) unique name for code for caching.
This now expects code to be unicode.
"""
hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest()
# Include the number and 12 characters of the hash in the name. It's
# pretty much impossible that in a single session we'll have collisions
# even with truncated hashes, and the full one makes tracebacks too long
return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12])
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class CachingCompiler(codeop.Compile):
"""A compiler that caches code compiled from interactive statements.
"""
def __init__(self):
codeop.Compile.__init__(self)
# This is ugly, but it must be done this way to allow multiple
# simultaneous ipython instances to coexist. Since Python itself
# directly accesses the data structures in the linecache module, and
# the cache therein is global, we must work with that data structure.
# We must hold a reference to the original checkcache routine and call
# that in our own check_cache() below, but the special yap_ipython cache
# must also be shared by all yap_ipython instances. If we were to hold
# separate caches (one in each CachingCompiler instance), any call made
# by Python itself to linecache.checkcache() would obliterate the
# cached data from the other yap_ipython instances.
if not hasattr(linecache, '_ipython_cache'):
linecache._ipython_cache = {}
if not hasattr(linecache, '_checkcache_ori'):
linecache._checkcache_ori = linecache.checkcache
# Now, we must monkeypatch the linecache directly so that parts of the
# stdlib that call it outside our control go through our codepath
# (otherwise we'd lose our tracebacks).
linecache.checkcache = check_linecache_ipython
def ast_parse(self, source, filename='<unknown>', symbol='exec'):
"""Parse code to an AST with the current compiler flags active.
Arguments are exactly the same as ast.parse (in the standard library),
and are passed to the built-in compile function."""
return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1)
def reset_compiler_flags(self):
"""Reset compiler flags to default state."""
# This value is copied from codeop.Compile.__init__, so if that ever
# changes, it will need to be updated.
self.flags = codeop.PyCF_DONT_IMPLY_DEDENT
@property
def compiler_flags(self):
"""Flags currently active in the compilation process.
"""
return self.flags
def cache(self, code, number=0):
"""Make a name for a block of code, and cache the code.
Parameters
----------
code : str
The Python source code to cache.
number : int
A number which forms part of the code's name. Used for the execution
counter.
Returns
-------
The name of the cached code (as a string). Pass this as the filename
argument to compilation, so that tracebacks are correctly hooked up.
"""
name = code_name(code, number)
entry = (len(code), time.time(),
[line+'\n' for line in code.splitlines()], name)
linecache.cache[name] = entry
linecache._ipython_cache[name] = entry
return name
def check_linecache_ipython(*args):
"""Call linecache.checkcache() safely protecting our cached values.
"""
# First call the orignal checkcache as intended
linecache._checkcache_ori(*args)
# Then, update back the cache with our data, so that tracebacks related
# to our compiled codes can be produced.
linecache.cache.update(linecache._ipython_cache)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,354 @@
# encoding: utf-8
"""Implementations for various useful completers.
These are all loaded by default by yap_ipython.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The yap_ipython Development Team.
#
# Distributed under the terms of the BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Stdlib imports
import glob
import inspect
import os
import re
import sys
from importlib import import_module
from importlib.machinery import all_suffixes
# Third-party imports
from time import time
from zipimport import zipimporter
# Our own imports
from yap_ipython.core.completer import expand_user, compress_user
from yap_ipython.core.error import TryNext
from yap_ipython.utils._process_common import arg_split
# FIXME: this should be pulled in with the right call via the component system
from yap_ipython import get_ipython
from typing import List
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
_suffixes = all_suffixes()
# Time in seconds after which the rootmodules will be stored permanently in the
# ipython ip.db database (kept in the user's .ipython dir).
TIMEOUT_STORAGE = 2
# Time in seconds after which we give up
TIMEOUT_GIVEUP = 20
# Regular expression for the python import statement
import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)'
r'(?P<package>[/\\]__init__)?'
r'(?P<suffix>%s)$' %
r'|'.join(re.escape(s) for s in _suffixes))
# RE for the ipython %run command (python + ipython scripts)
magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
#-----------------------------------------------------------------------------
# Local utilities
#-----------------------------------------------------------------------------
def module_list(path):
"""
Return the list containing the names of the modules available in the given
folder.
"""
# sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
if path == '':
path = '.'
# A few local constants to be used in loops below
pjoin = os.path.join
if os.path.isdir(path):
# Build a list of all files in the directory and all files
# in its subdirectories. For performance reasons, do not
# recurse more than one level into subdirectories.
files = []
for root, dirs, nondirs in os.walk(path, followlinks=True):
subdir = root[len(path)+1:]
if subdir:
files.extend(pjoin(subdir, f) for f in nondirs)
dirs[:] = [] # Do not recurse into additional subdirectories.
else:
files.extend(nondirs)
else:
try:
files = list(zipimporter(path)._files.keys())
except:
files = []
# Build a list of modules which match the import_re regex.
modules = []
for f in files:
m = import_re.match(f)
if m:
modules.append(m.group('name'))
return list(set(modules))
def get_root_modules():
"""
Returns a list containing the names of all the modules available in the
folders of the pythonpath.
ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
"""
ip = get_ipython()
if ip is None:
# No global shell instance to store cached list of modules.
# Don't try to scan for modules every time.
return list(sys.builtin_module_names)
rootmodules_cache = ip.db.get('rootmodules_cache', {})
rootmodules = list(sys.builtin_module_names)
start_time = time()
store = False
for path in sys.path:
try:
modules = rootmodules_cache[path]
except KeyError:
modules = module_list(path)
try:
modules.remove('__init__')
except ValueError:
pass
if path not in ('', '.'): # cwd modules should not be cached
rootmodules_cache[path] = modules
if time() - start_time > TIMEOUT_STORAGE and not store:
store = True
print("\nCaching the list of root modules, please wait!")
print("(This will only be done once - type '%rehashx' to "
"reset cache!)\n")
sys.stdout.flush()
if time() - start_time > TIMEOUT_GIVEUP:
print("This is taking too long, we give up.\n")
return []
rootmodules.extend(modules)
if store:
ip.db['rootmodules_cache'] = rootmodules_cache
rootmodules = list(set(rootmodules))
return rootmodules
def is_importable(module, attr, only_modules):
if only_modules:
return inspect.ismodule(getattr(module, attr))
else:
return not(attr[:2] == '__' and attr[-2:] == '__')
def try_import(mod: str, only_modules=False) -> List[str]:
"""
Try to import given module and return list of potential completions.
"""
mod = mod.rstrip('.')
try:
m = import_module(mod)
except:
return []
m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__
completions = []
if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
completions.extend( [attr for attr in dir(m) if
is_importable(m, attr, only_modules)])
completions.extend(getattr(m, '__all__', []))
if m_is_init:
completions.extend(module_list(os.path.dirname(m.__file__)))
completions_set = {c for c in completions if isinstance(c, str)}
completions_set.discard('__init__')
return list(completions_set)
#-----------------------------------------------------------------------------
# Completion-related functions.
#-----------------------------------------------------------------------------
def quick_completer(cmd, completions):
""" Easily create a trivial completer for a command.
Takes either a list of completions, or all completions in string (that will
be split on whitespace).
Example::
[d:\ipython]|1> import ipy_completers
[d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
[d:\ipython]|3> foo b<TAB>
bar baz
[d:\ipython]|3> foo ba
"""
if isinstance(completions, str):
completions = completions.split()
def do_complete(self, event):
return completions
get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
def module_completion(line):
"""
Returns a list containing the completion possibilities for an import line.
The line looks like this :
'import xml.d'
'from xml.dom import'
"""
words = line.split(' ')
nwords = len(words)
# from whatever <tab> -> 'import '
if nwords == 3 and words[0] == 'from':
return ['import ']
# 'from xy<tab>' or 'import xy<tab>'
if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) :
if nwords == 1:
return get_root_modules()
mod = words[1].split('.')
if len(mod) < 2:
return get_root_modules()
completion_list = try_import('.'.join(mod[:-1]), True)
return ['.'.join(mod[:-1] + [el]) for el in completion_list]
# 'from xyz import abc<tab>'
if nwords >= 3 and words[0] == 'from':
mod = words[1]
return try_import(mod)
#-----------------------------------------------------------------------------
# Completers
#-----------------------------------------------------------------------------
# These all have the func(self, event) signature to be used as custom
# completers
def module_completer(self,event):
"""Give completions after user has typed 'import ...' or 'from ...'"""
# This works in all versions of python. While 2.5 has
# pkgutil.walk_packages(), that particular routine is fairly dangerous,
# since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
# of possibly problematic side effects.
# This search the folders in the sys.path for available modules.
return module_completion(event.line)
# FIXME: there's a lot of logic common to the run, cd and builtin file
# completers, that is currently reimplemented in each.
def magic_run_completer(self, event):
"""Complete files that end in .py or .ipy or .ipynb for the %run command.
"""
comps = arg_split(event.line, strict=False)
# relpath should be the current token that we need to complete.
if (len(comps) > 1) and (not event.line.endswith(' ')):
relpath = comps[-1].strip("'\"")
else:
relpath = ''
#print("\nev=", event) # dbg
#print("rp=", relpath) # dbg
#print('comps=', comps) # dbg
lglob = glob.glob
isdir = os.path.isdir
relpath, tilde_expand, tilde_val = expand_user(relpath)
# Find if the user has already typed the first filename, after which we
# should complete on all files, since after the first one other files may
# be arguments to the input script.
if any(magic_run_re.match(c) for c in comps):
matches = [f.replace('\\','/') + ('/' if isdir(f) else '')
for f in lglob(relpath+'*')]
else:
dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
pys = [f.replace('\\','/')
for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')]
matches = dirs + pys
#print('run comp:', dirs+pys) # dbg
return [compress_user(p, tilde_expand, tilde_val) for p in matches]
def cd_completer(self, event):
"""Completer function for cd, which only returns directories."""
ip = get_ipython()
relpath = event.symbol
#print(event) # dbg
if event.line.endswith('-b') or ' -b ' in event.line:
# return only bookmark completions
bkms = self.db.get('bookmarks', None)
if bkms:
return bkms.keys()
else:
return []
if event.symbol == '-':
width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
# jump in directory history by number
fmt = '-%0' + width_dh +'d [%s]'
ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
if len(ents) > 1:
return ents
return []
if event.symbol.startswith('--'):
return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
# Expand ~ in path and normalize directory separators.
relpath, tilde_expand, tilde_val = expand_user(relpath)
relpath = relpath.replace('\\','/')
found = []
for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
if os.path.isdir(f)]:
if ' ' in d:
# we don't want to deal with any of that, complex code
# for this is elsewhere
raise TryNext
found.append(d)
if not found:
if os.path.isdir(relpath):
return [compress_user(relpath, tilde_expand, tilde_val)]
# if no completions so far, try bookmarks
bks = self.db.get('bookmarks',{})
bkmatches = [s for s in bks if s.startswith(event.symbol)]
if bkmatches:
return bkmatches
raise TryNext
return [compress_user(p, tilde_expand, tilde_val) for p in found]
def reset_completer(self, event):
"A completer for %reset magic"
return '-f -s in out array dhist'.split()

View File

@ -0,0 +1,215 @@
# encoding: utf-8
"""sys.excepthook for yap_ipython itself, leaves a detailed report on disk.
Authors:
* Fernando Perez
* Brian E. Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
# Copyright (C) 2008-2011 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import sys
import traceback
from pprint import pformat
from yap_ipython.core import ultratb
from yap_ipython.core.release import author_email
from yap_ipython.utils.sysinfo import sys_info
from yap_ipython.utils.py3compat import input
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
# Template for the user message.
_default_message_template = """\
Oops, {app_name} crashed. We do our best to make it stable, but...
A crash report was automatically generated with the following information:
- A verbatim copy of the crash traceback.
- A copy of your input history during this session.
- Data on your current {app_name} configuration.
It was left in the file named:
\t'{crash_report_fname}'
If you can email this file to the developers, the information in it will help
them in understanding and correcting the problem.
You can mail it to: {contact_name} at {contact_email}
with the subject '{app_name} Crash Report'.
If you want to do it now, the following command will work (under Unix):
mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname}
To ensure accurate tracking of this issue, please file a report about it at:
{bug_tracker}
"""
_lite_message_template = """
If you suspect this is an yap_ipython bug, please report it at:
https://github.com/ipython/ipython/issues
or send an email to the mailing list at {email}
You can print a more detailed traceback right now with "%tb", or use "%debug"
to interactively debug it.
Extra-detailed tracebacks for bug-reporting purposes can be enabled via:
{config}Application.verbose_crash=True
"""
class CrashHandler(object):
"""Customizable crash handlers for yap_ipython applications.
Instances of this class provide a :meth:`__call__` method which can be
used as a ``sys.excepthook``. The :meth:`__call__` signature is::
def __call__(self, etype, evalue, etb)
"""
message_template = _default_message_template
section_sep = '\n\n'+'*'*75+'\n\n'
def __init__(self, app, contact_name=None, contact_email=None,
bug_tracker=None, show_crash_traceback=True, call_pdb=False):
"""Create a new crash handler
Parameters
----------
app : Application
A running :class:`Application` instance, which will be queried at
crash time for internal information.
contact_name : str
A string with the name of the person to contact.
contact_email : str
A string with the email address of the contact.
bug_tracker : str
A string with the URL for your project's bug tracker.
show_crash_traceback : bool
If false, don't print the crash traceback on stderr, only generate
the on-disk report
Non-argument instance attributes:
These instances contain some non-argument attributes which allow for
further customization of the crash handler's behavior. Please see the
source for further details.
"""
self.crash_report_fname = "Crash_report_%s.txt" % app.name
self.app = app
self.call_pdb = call_pdb
#self.call_pdb = True # dbg
self.show_crash_traceback = show_crash_traceback
self.info = dict(app_name = app.name,
contact_name = contact_name,
contact_email = contact_email,
bug_tracker = bug_tracker,
crash_report_fname = self.crash_report_fname)
def __call__(self, etype, evalue, etb):
"""Handle an exception, call for compatible with sys.excepthook"""
# do not allow the crash handler to be called twice without reinstalling it
# this prevents unlikely errors in the crash handling from entering an
# infinite loop.
sys.excepthook = sys.__excepthook__
# Report tracebacks shouldn't use color in general (safer for users)
color_scheme = 'NoColor'
# Use this ONLY for developer debugging (keep commented out for release)
#color_scheme = 'Linux' # dbg
try:
rptdir = self.app.ipython_dir
except:
rptdir = os.getcwd()
if rptdir is None or not os.path.isdir(rptdir):
rptdir = os.getcwd()
report_name = os.path.join(rptdir,self.crash_report_fname)
# write the report filename into the instance dict so it can get
# properly expanded out in the user message template
self.crash_report_fname = report_name
self.info['crash_report_fname'] = report_name
TBhandler = ultratb.VerboseTB(
color_scheme=color_scheme,
long_header=1,
call_pdb=self.call_pdb,
)
if self.call_pdb:
TBhandler(etype,evalue,etb)
return
else:
traceback = TBhandler.text(etype,evalue,etb,context=31)
# print traceback to screen
if self.show_crash_traceback:
print(traceback, file=sys.stderr)
# and generate a complete report on disk
try:
report = open(report_name,'w')
except:
print('Could not create crash report on disk.', file=sys.stderr)
return
# Inform user on stderr of what happened
print('\n'+'*'*70+'\n', file=sys.stderr)
print(self.message_template.format(**self.info), file=sys.stderr)
# Construct report on disk
report.write(self.make_report(traceback))
report.close()
input("Hit <Enter> to quit (your terminal may close):")
def make_report(self,traceback):
"""Return a string containing a crash report."""
sec_sep = self.section_sep
report = ['*'*75+'\n\n'+'yap_ipython post-mortem report\n\n']
rpt_add = report.append
rpt_add(sys_info())
try:
config = pformat(self.app.config)
rpt_add(sec_sep)
rpt_add('Application name: %s\n\n' % self.app_name)
rpt_add('Current user configuration structure:\n\n')
rpt_add(config)
except:
pass
rpt_add(sec_sep+'Crash traceback:\n\n' + traceback)
return ''.join(report)
def crash_handler_lite(etype, evalue, tb):
"""a light excepthook, adding a small message to the usual traceback"""
traceback.print_exception(etype, evalue, tb)
from yap_ipython.core.interactiveshell import InteractiveShell
if InteractiveShell.initialized():
# we are in a Shell environment, give %magic example
config = "%config "
else:
# we are not in a shell, show generic config
config = "c."
print(_lite_message_template.format(email=author_email, config=config), file=sys.stderr)

View File

@ -0,0 +1,645 @@
# -*- coding: utf-8 -*-
"""
Pdb debugger class.
Modified from the standard pdb.Pdb class to avoid including readline, so that
the command line completion of other programs which include this isn't
damaged.
In the future, this class will be expanded with improvements over the standard
pdb.
The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor
changes. Licensing should therefore be under the standard Python terms. For
details on the PSF (Python Software Foundation) standard license, see:
https://docs.python.org/2/license.html
"""
#*****************************************************************************
#
# This file is licensed under the PSF license.
#
# Copyright (C) 2001 Python Software Foundation, www.python.org
# Copyright (C) 2005-2006 Fernando Perez. <fperez@colorado.edu>
#
#
#*****************************************************************************
import bdb
import functools
import inspect
import linecache
import sys
import warnings
import re
from yap_ipython import get_ipython
from yap_ipython.utils import PyColorize
from yap_ipython.utils import coloransi, py3compat
from yap_ipython.core.excolors import exception_colors
from yap_ipython.testing.skipdoctest import skip_doctest
prompt = 'ipdb> '
#We have to check this directly from sys.argv, config struct not yet available
from pdb import Pdb as OldPdb
# Allow the set_trace code to operate outside of an ipython instance, even if
# it does so with some limitations. The rest of this support is implemented in
# the Tracer constructor.
def make_arrow(pad):
"""generate the leading arrow in front of traceback or debugger"""
if pad >= 2:
return '-'*(pad-2) + '> '
elif pad == 1:
return '>'
return ''
def BdbQuit_excepthook(et, ev, tb, excepthook=None):
"""Exception hook which handles `BdbQuit` exceptions.
All other exceptions are processed using the `excepthook`
parameter.
"""
warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1",
DeprecationWarning, stacklevel=2)
if et==bdb.BdbQuit:
print('Exiting Debugger.')
elif excepthook is not None:
excepthook(et, ev, tb)
else:
# Backwards compatibility. Raise deprecation warning?
BdbQuit_excepthook.excepthook_ori(et,ev,tb)
def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None):
warnings.warn(
"`BdbQuit_IPython_excepthook` is deprecated since version 5.1",
DeprecationWarning, stacklevel=2)
print('Exiting Debugger.')
class Tracer(object):
"""
DEPRECATED
Class for local debugging, similar to pdb.set_trace.
Instances of this class, when called, behave like pdb.set_trace, but
providing yap_ipython's enhanced capabilities.
This is implemented as a class which must be initialized in your own code
and not as a standalone function because we need to detect at runtime
whether yap_ipython is already active or not. That detection is done in the
constructor, ensuring that this code plays nicely with a running yap_ipython,
while functioning acceptably (though with limitations) if outside of it.
"""
@skip_doctest
def __init__(self, colors=None):
"""
DEPRECATED
Create a local debugger instance.
Parameters
----------
colors : str, optional
The name of the color scheme to use, it must be one of yap_ipython's
valid color schemes. If not given, the function will default to
the current yap_ipython scheme when running inside yap_ipython, and to
'NoColor' otherwise.
Examples
--------
::
from yap_ipython.core.debugger import Tracer; debug_here = Tracer()
Later in your code::
debug_here() # -> will open up the debugger at that point.
Once the debugger activates, you can use all of its regular commands to
step through code, set breakpoints, etc. See the pdb documentation
from the Python standard library for usage details.
"""
warnings.warn("`Tracer` is deprecated since version 5.1, directly use "
"`yap_ipython.core.debugger.Pdb.set_trace()`",
DeprecationWarning, stacklevel=2)
ip = get_ipython()
if ip is None:
# Outside of ipython, we set our own exception hook manually
sys.excepthook = functools.partial(BdbQuit_excepthook,
excepthook=sys.excepthook)
def_colors = 'NoColor'
else:
# In ipython, we use its custom exception handler mechanism
def_colors = ip.colors
ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook)
if colors is None:
colors = def_colors
# The stdlib debugger internally uses a modified repr from the `repr`
# module, that limits the length of printed strings to a hardcoded
# limit of 30 characters. That much trimming is too aggressive, let's
# at least raise that limit to 80 chars, which should be enough for
# most interactive uses.
try:
try:
from reprlib import aRepr # Py 3
except ImportError:
from repr import aRepr # Py 2
aRepr.maxstring = 80
except:
# This is only a user-facing convenience, so any error we encounter
# here can be warned about but can be otherwise ignored. These
# printouts will tell us about problems if this API changes
import traceback
traceback.print_exc()
self.debugger = Pdb(colors)
def __call__(self):
"""Starts an interactive debugger at the point where called.
This is similar to the pdb.set_trace() function from the std lib, but
using yap_ipython's enhanced debugger."""
self.debugger.set_trace(sys._getframe().f_back)
RGX_EXTRA_INDENT = re.compile('(?<=\n)\s+')
def strip_indentation(multiline_string):
return RGX_EXTRA_INDENT.sub('', multiline_string)
def decorate_fn_with_doc(new_fn, old_fn, additional_text=""):
"""Make new_fn have old_fn's doc string. This is particularly useful
for the ``do_...`` commands that hook into the help system.
Adapted from from a comp.lang.python posting
by Duncan Booth."""
def wrapper(*args, **kw):
return new_fn(*args, **kw)
if old_fn.__doc__:
wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text
return wrapper
def _file_lines(fname):
"""Return the contents of a named file as a list of lines.
This function never raises an IOError exception: if the file can't be
read, it simply returns an empty list."""
try:
outfile = open(fname)
except IOError:
return []
else:
out = outfile.readlines()
outfile.close()
return out
class Pdb(OldPdb):
"""Modified Pdb class, does not load readline.
for a standalone version that uses prompt_toolkit, see
`yap_ipython.terminal.debugger.TerminalPdb` and
`yap_ipython.terminal.debugger.set_trace()`
"""
def __init__(self, color_scheme=None, completekey=None,
stdin=None, stdout=None, context=5):
# Parent constructor:
try:
self.context = int(context)
if self.context <= 0:
raise ValueError("Context must be a positive integer")
except (TypeError, ValueError):
raise ValueError("Context must be a positive integer")
OldPdb.__init__(self, completekey, stdin, stdout)
# yap_ipython changes...
self.shell = get_ipython()
if self.shell is None:
save_main = sys.modules['__main__']
# No yap_ipython instance running, we must create one
from yap_ipython.terminal.interactiveshell import \
TerminalInteractiveShell
self.shell = TerminalInteractiveShell.instance()
# needed by any code which calls __import__("__main__") after
# the debugger was entered. See also #9941.
sys.modules['__main__'] = save_main
if color_scheme is not None:
warnings.warn(
"The `color_scheme` argument is deprecated since version 5.1",
DeprecationWarning, stacklevel=2)
else:
color_scheme = self.shell.colors
self.aliases = {}
# Create color table: we copy the default one from the traceback
# module and add a few attributes needed for debugging
self.color_scheme_table = exception_colors()
# shorthands
C = coloransi.TermColors
cst = self.color_scheme_table
cst['NoColor'].colors.prompt = C.NoColor
cst['NoColor'].colors.breakpoint_enabled = C.NoColor
cst['NoColor'].colors.breakpoint_disabled = C.NoColor
cst['Linux'].colors.prompt = C.Green
cst['Linux'].colors.breakpoint_enabled = C.LightRed
cst['Linux'].colors.breakpoint_disabled = C.Red
cst['LightBG'].colors.prompt = C.Blue
cst['LightBG'].colors.breakpoint_enabled = C.LightRed
cst['LightBG'].colors.breakpoint_disabled = C.Red
cst['Neutral'].colors.prompt = C.Blue
cst['Neutral'].colors.breakpoint_enabled = C.LightRed
cst['Neutral'].colors.breakpoint_disabled = C.Red
# Add a python parser so we can syntax highlight source while
# debugging.
self.parser = PyColorize.Parser(style=color_scheme)
self.set_colors(color_scheme)
# Set the prompt - the default prompt is '(Pdb)'
self.prompt = prompt
def set_colors(self, scheme):
"""Shorthand access to the color table scheme selector method."""
self.color_scheme_table.set_active_scheme(scheme)
self.parser.style = scheme
def interaction(self, frame, traceback):
try:
OldPdb.interaction(self, frame, traceback)
except KeyboardInterrupt:
sys.stdout.write('\n' + self.shell.get_exception_only())
def new_do_up(self, arg):
OldPdb.do_up(self, arg)
do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up)
def new_do_down(self, arg):
OldPdb.do_down(self, arg)
do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down)
def new_do_frame(self, arg):
OldPdb.do_frame(self, arg)
def new_do_quit(self, arg):
if hasattr(self, 'old_all_completions'):
self.shell.Completer.all_completions=self.old_all_completions
return OldPdb.do_quit(self, arg)
do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit)
def new_do_restart(self, arg):
"""Restart command. In the context of ipython this is exactly the same
thing as 'quit'."""
self.msg("Restart doesn't make sense here. Using 'quit' instead.")
return self.do_quit(arg)
def print_stack_trace(self, context=None):
if context is None:
context = self.context
try:
context=int(context)
if context <= 0:
raise ValueError("Context must be a positive integer")
except (TypeError, ValueError):
raise ValueError("Context must be a positive integer")
try:
for frame_lineno in self.stack:
self.print_stack_entry(frame_lineno, context=context)
except KeyboardInterrupt:
pass
def print_stack_entry(self,frame_lineno, prompt_prefix='\n-> ',
context=None):
if context is None:
context = self.context
try:
context=int(context)
if context <= 0:
raise ValueError("Context must be a positive integer")
except (TypeError, ValueError):
raise ValueError("Context must be a positive integer")
print(self.format_stack_entry(frame_lineno, '', context))
# vds: >>
frame, lineno = frame_lineno
filename = frame.f_code.co_filename
self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
# vds: <<
def format_stack_entry(self, frame_lineno, lprefix=': ', context=None):
if context is None:
context = self.context
try:
context=int(context)
if context <= 0:
print("Context must be a positive integer")
except (TypeError, ValueError):
print("Context must be a positive integer")
try:
import reprlib # Py 3
except ImportError:
import repr as reprlib # Py 2
ret = []
Colors = self.color_scheme_table.active_colors
ColorsNormal = Colors.Normal
tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal)
tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal)
tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line,
ColorsNormal)
frame, lineno = frame_lineno
return_value = ''
if '__return__' in frame.f_locals:
rv = frame.f_locals['__return__']
#return_value += '->'
return_value += reprlib.repr(rv) + '\n'
ret.append(return_value)
#s = filename + '(' + `lineno` + ')'
filename = self.canonic(frame.f_code.co_filename)
link = tpl_link % py3compat.cast_unicode(filename)
if frame.f_code.co_name:
func = frame.f_code.co_name
else:
func = "<lambda>"
call = ''
if func != '?':
if '__args__' in frame.f_locals:
args = reprlib.repr(frame.f_locals['__args__'])
else:
args = '()'
call = tpl_call % (func, args)
# The level info should be generated in the same format pdb uses, to
# avoid breaking the pdbtrack functionality of python-mode in *emacs.
if frame is self.curframe:
ret.append('> ')
else:
ret.append(' ')
ret.append(u'%s(%s)%s\n' % (link,lineno,call))
start = lineno - 1 - context//2
lines = linecache.getlines(filename)
start = min(start, len(lines) - context)
start = max(start, 0)
lines = lines[start : start + context]
for i,line in enumerate(lines):
show_arrow = (start + 1 + i == lineno)
linetpl = (frame is self.curframe or show_arrow) \
and tpl_line_em \
or tpl_line
ret.append(self.__format_line(linetpl, filename,
start + 1 + i, line,
arrow = show_arrow) )
return ''.join(ret)
def __format_line(self, tpl_line, filename, lineno, line, arrow = False):
bp_mark = ""
bp_mark_color = ""
new_line, err = self.parser.format2(line, 'str')
if not err:
line = new_line
bp = None
if lineno in self.get_file_breaks(filename):
bps = self.get_breaks(filename, lineno)
bp = bps[-1]
if bp:
Colors = self.color_scheme_table.active_colors
bp_mark = str(bp.number)
bp_mark_color = Colors.breakpoint_enabled
if not bp.enabled:
bp_mark_color = Colors.breakpoint_disabled
numbers_width = 7
if arrow:
# This is the line with the error
pad = numbers_width - len(str(lineno)) - len(bp_mark)
num = '%s%s' % (make_arrow(pad), str(lineno))
else:
num = '%*s' % (numbers_width - len(bp_mark), str(lineno))
return tpl_line % (bp_mark_color + bp_mark, num, line)
def print_list_lines(self, filename, first, last):
"""The printing (as opposed to the parsing part of a 'list'
command."""
try:
Colors = self.color_scheme_table.active_colors
ColorsNormal = Colors.Normal
tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal)
tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal)
src = []
if filename == "<string>" and hasattr(self, "_exec_filename"):
filename = self._exec_filename
for lineno in range(first, last+1):
line = linecache.getline(filename, lineno)
if not line:
break
if lineno == self.curframe.f_lineno:
line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True)
else:
line = self.__format_line(tpl_line, filename, lineno, line, arrow = False)
src.append(line)
self.lineno = lineno
print(''.join(src))
except KeyboardInterrupt:
pass
def do_list(self, arg):
"""Print lines of code from the current stack frame
"""
self.lastcmd = 'list'
last = None
if arg:
try:
x = eval(arg, {}, {})
if type(x) == type(()):
first, last = x
first = int(first)
last = int(last)
if last < first:
# Assume it's a count
last = first + last
else:
first = max(1, int(x) - 5)
except:
print('*** Error in argument:', repr(arg))
return
elif self.lineno is None:
first = max(1, self.curframe.f_lineno - 5)
else:
first = self.lineno + 1
if last is None:
last = first + 10
self.print_list_lines(self.curframe.f_code.co_filename, first, last)
# vds: >>
lineno = first
filename = self.curframe.f_code.co_filename
self.shell.hooks.synchronize_with_editor(filename, lineno, 0)
# vds: <<
do_l = do_list
def getsourcelines(self, obj):
lines, lineno = inspect.findsource(obj)
if inspect.isframe(obj) and obj.f_globals is obj.f_locals:
# must be a module frame: do not try to cut a block out of it
return lines, 1
elif inspect.ismodule(obj):
return lines, 1
return inspect.getblock(lines[lineno:]), lineno+1
def do_longlist(self, arg):
"""Print lines of code from the current stack frame.
Shows more lines than 'list' does.
"""
self.lastcmd = 'longlist'
try:
lines, lineno = self.getsourcelines(self.curframe)
except OSError as err:
self.error(err)
return
last = lineno + len(lines)
self.print_list_lines(self.curframe.f_code.co_filename, lineno, last)
do_ll = do_longlist
def do_debug(self, arg):
"""debug code
Enter a recursive debugger that steps through the code
argument (which is an arbitrary expression or statement to be
executed in the current environment).
"""
sys.settrace(None)
globals = self.curframe.f_globals
locals = self.curframe_locals
p = self.__class__(completekey=self.completekey,
stdin=self.stdin, stdout=self.stdout)
p.use_rawinput = self.use_rawinput
p.prompt = "(%s) " % self.prompt.strip()
self.message("ENTERING RECURSIVE DEBUGGER")
sys.call_tracing(p.run, (arg, globals, locals))
self.message("LEAVING RECURSIVE DEBUGGER")
sys.settrace(self.trace_dispatch)
self.lastcmd = p.lastcmd
def do_pdef(self, arg):
"""Print the call signature for any callable object.
The debugger interface to %pdef"""
namespaces = [('Locals', self.curframe.f_locals),
('Globals', self.curframe.f_globals)]
self.shell.find_line_magic('pdef')(arg, namespaces=namespaces)
def do_pdoc(self, arg):
"""Print the docstring for an object.
The debugger interface to %pdoc."""
namespaces = [('Locals', self.curframe.f_locals),
('Globals', self.curframe.f_globals)]
self.shell.find_line_magic('pdoc')(arg, namespaces=namespaces)
def do_pfile(self, arg):
"""Print (or run through pager) the file where an object is defined.
The debugger interface to %pfile.
"""
namespaces = [('Locals', self.curframe.f_locals),
('Globals', self.curframe.f_globals)]
self.shell.find_line_magic('pfile')(arg, namespaces=namespaces)
def do_pinfo(self, arg):
"""Provide detailed information about an object.
The debugger interface to %pinfo, i.e., obj?."""
namespaces = [('Locals', self.curframe.f_locals),
('Globals', self.curframe.f_globals)]
self.shell.find_line_magic('pinfo')(arg, namespaces=namespaces)
def do_pinfo2(self, arg):
"""Provide extra detailed information about an object.
The debugger interface to %pinfo2, i.e., obj??."""
namespaces = [('Locals', self.curframe.f_locals),
('Globals', self.curframe.f_globals)]
self.shell.find_line_magic('pinfo2')(arg, namespaces=namespaces)
def do_psource(self, arg):
"""Print (or run through pager) the source code for an object."""
namespaces = [('Locals', self.curframe.f_locals),
('Globals', self.curframe.f_globals)]
self.shell.find_line_magic('psource')(arg, namespaces=namespaces)
def do_where(self, arg):
"""w(here)
Print a stack trace, with the most recent frame at the bottom.
An arrow indicates the "current frame", which determines the
context of most commands. 'bt' is an alias for this command.
Take a number as argument as an (optional) number of context line to
print"""
if arg:
context = int(arg)
self.print_stack_trace(context)
else:
self.print_stack_trace()
do_w = do_where
def set_trace(frame=None):
"""
Start debugging from `frame`.
If frame is not specified, debugging starts from caller's frame.
"""
Pdb().set_trace(frame or sys._getframe().f_back)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,70 @@
# encoding: utf-8
"""
A context manager for handling sys.displayhook.
Authors:
* Robert Kern
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
from traitlets.config.configurable import Configurable
from traitlets import Any
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class DisplayTrap(Configurable):
"""Object to manage sys.displayhook.
This came from yap_ipython.core.kernel.display_hook, but is simplified
(no callbacks or formatters) until more of the core is refactored.
"""
hook = Any()
def __init__(self, hook=None):
super(DisplayTrap, self).__init__(hook=hook, config=None)
self.old_hook = None
# We define this to track if a single BuiltinTrap is nested.
# Only turn off the trap when the outermost call to __exit__ is made.
self._nested_level = 0
def __enter__(self):
if self._nested_level == 0:
self.set()
self._nested_level += 1
return self
def __exit__(self, type, value, traceback):
if self._nested_level == 1:
self.unset()
self._nested_level -= 1
# Returning False will cause exceptions to propagate
return False
def set(self):
"""Set the hook."""
if sys.displayhook is not self.hook:
self.old_hook = sys.displayhook
sys.displayhook = self.hook
def unset(self):
"""Unset the hook."""
sys.displayhook = self.old_hook

View File

@ -0,0 +1,320 @@
# -*- coding: utf-8 -*-
"""Displayhook for yap_ipython.
This defines a callable class that yap_ipython uses for `sys.displayhook`.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import builtins as builtin_mod
import sys
import io as _io
import tokenize
from traitlets.config.configurable import Configurable
from traitlets import Instance, Float
from warnings import warn
# TODO: Move the various attributes (cache_size, [others now moved]). Some
# of these are also attributes of InteractiveShell. They should be on ONE object
# only and the other objects should ask that one object for their values.
class DisplayHook(Configurable):
"""The custom yap_ipython displayhook to replace sys.displayhook.
This class does many things, but the basic idea is that it is a callable
that gets called anytime user code returns a value.
"""
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
exec_result = Instance('yap_ipython.core.interactiveshell.ExecutionResult',
allow_none=True)
cull_fraction = Float(0.2)
def __init__(self, shell=None, cache_size=1000, **kwargs):
super(DisplayHook, self).__init__(shell=shell, **kwargs)
cache_size_min = 3
if cache_size <= 0:
self.do_full_cache = 0
cache_size = 0
elif cache_size < cache_size_min:
self.do_full_cache = 0
cache_size = 0
warn('caching was disabled (min value for cache size is %s).' %
cache_size_min,stacklevel=3)
else:
self.do_full_cache = 1
self.cache_size = cache_size
# we need a reference to the user-level namespace
self.shell = shell
self._,self.__,self.___ = '','',''
# these are deliberately global:
to_user_ns = {'_':self._,'__':self.__,'___':self.___}
self.shell.user_ns.update(to_user_ns)
@property
def prompt_count(self):
return self.shell.execution_count
#-------------------------------------------------------------------------
# Methods used in __call__. Override these methods to modify the behavior
# of the displayhook.
#-------------------------------------------------------------------------
def check_for_underscore(self):
"""Check if the user has set the '_' variable by hand."""
# If something injected a '_' variable in __builtin__, delete
# ipython's automatic one so we don't clobber that. gettext() in
# particular uses _, so we need to stay away from it.
if '_' in builtin_mod.__dict__:
try:
user_value = self.shell.user_ns['_']
if user_value is not self._:
return
del self.shell.user_ns['_']
except KeyError:
pass
def quiet(self):
"""Should we silence the display hook because of ';'?"""
# do not print output if input ends in ';'
try:
cell = self.shell.history_manager.input_hist_parsed[-1]
except IndexError:
# some uses of ipshellembed may fail here
return False
sio = _io.StringIO(cell)
tokens = list(tokenize.generate_tokens(sio.readline))
for token in reversed(tokens):
if token[0] in (tokenize.ENDMARKER, tokenize.NL, tokenize.NEWLINE, tokenize.COMMENT):
continue
if (token[0] == tokenize.OP) and (token[1] == ';'):
return True
else:
return False
def start_displayhook(self):
"""Start the displayhook, initializing resources."""
pass
def write_output_prompt(self):
"""Write the output prompt.
The default implementation simply writes the prompt to
``sys.stdout``.
"""
# Use write, not print which adds an extra space.
sys.stdout.write(self.shell.separate_out)
outprompt = 'Out[{}]: '.format(self.shell.execution_count)
if self.do_full_cache:
sys.stdout.write(outprompt)
def compute_format_data(self, result):
"""Compute format data of the object to be displayed.
The format data is a generalization of the :func:`repr` of an object.
In the default implementation the format data is a :class:`dict` of
key value pair where the keys are valid MIME types and the values
are JSON'able data structure containing the raw data for that MIME
type. It is up to frontends to determine pick a MIME to to use and
display that data in an appropriate manner.
This method only computes the format data for the object and should
NOT actually print or write that to a stream.
Parameters
----------
result : object
The Python object passed to the display hook, whose format will be
computed.
Returns
-------
(format_dict, md_dict) : dict
format_dict is a :class:`dict` whose keys are valid MIME types and values are
JSON'able raw data for that MIME type. It is recommended that
all return values of this should always include the "text/plain"
MIME type representation of the object.
md_dict is a :class:`dict` with the same MIME type keys
of metadata associated with each output.
"""
return self.shell.display_formatter.format(result)
# This can be set to True by the write_output_prompt method in a subclass
prompt_end_newline = False
def write_format_data(self, format_dict, md_dict=None):
"""Write the format data dict to the frontend.
This default version of this method simply writes the plain text
representation of the object to ``sys.stdout``. Subclasses should
override this method to send the entire `format_dict` to the
frontends.
Parameters
----------
format_dict : dict
The format dict for the object passed to `sys.displayhook`.
md_dict : dict (optional)
The metadata dict to be associated with the display data.
"""
if 'text/plain' not in format_dict:
# nothing to do
return
# We want to print because we want to always make sure we have a
# newline, even if all the prompt separators are ''. This is the
# standard yap_ipython behavior.
result_repr = format_dict['text/plain']
if '\n' in result_repr:
# So that multi-line strings line up with the left column of
# the screen, instead of having the output prompt mess up
# their first line.
# We use the prompt template instead of the expanded prompt
# because the expansion may add ANSI escapes that will interfere
# with our ability to determine whether or not we should add
# a newline.
if not self.prompt_end_newline:
# But avoid extraneous empty lines.
result_repr = '\n' + result_repr
print(result_repr)
def update_user_ns(self, result):
"""Update user_ns with various things like _, __, _1, etc."""
# Avoid recursive reference when displaying _oh/Out
if result is not self.shell.user_ns['_oh']:
if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache:
self.cull_cache()
# Don't overwrite '_' and friends if '_' is in __builtin__
# (otherwise we cause buggy behavior for things like gettext). and
# do not overwrite _, __ or ___ if one of these has been assigned
# by the user.
update_unders = True
for unders in ['_'*i for i in range(1,4)]:
if not unders in self.shell.user_ns:
continue
if getattr(self, unders) is not self.shell.user_ns.get(unders):
update_unders = False
self.___ = self.__
self.__ = self._
self._ = result
if ('_' not in builtin_mod.__dict__) and (update_unders):
self.shell.push({'_':self._,
'__':self.__,
'___':self.___}, interactive=False)
# hackish access to top-level namespace to create _1,_2... dynamically
to_main = {}
if self.do_full_cache:
new_result = '_%s' % self.prompt_count
to_main[new_result] = result
self.shell.push(to_main, interactive=False)
self.shell.user_ns['_oh'][self.prompt_count] = result
def fill_exec_result(self, result):
if self.exec_result is not None:
self.exec_result.result = result
def log_output(self, format_dict):
"""Log the output."""
if 'text/plain' not in format_dict:
# nothing to do
return
if self.shell.logger.log_output:
self.shell.logger.log_write(format_dict['text/plain'], 'output')
self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
format_dict['text/plain']
def finish_displayhook(self):
"""Finish up all displayhook activities."""
sys.stdout.write(self.shell.separate_out2)
sys.stdout.flush()
def __call__(self, result=None):
"""Printing with history cache management.
This is invoked everytime the interpreter needs to print, and is
activated by setting the variable sys.displayhook to it.
"""
self.check_for_underscore()
if result is not None and not self.quiet():
self.start_displayhook()
self.write_output_prompt()
format_dict, md_dict = self.compute_format_data(result)
self.update_user_ns(result)
self.fill_exec_result(result)
if format_dict:
self.write_format_data(format_dict, md_dict)
self.log_output(format_dict)
self.finish_displayhook()
def cull_cache(self):
"""Output cache is full, cull the oldest entries"""
oh = self.shell.user_ns.get('_oh', {})
sz = len(oh)
cull_count = max(int(sz * self.cull_fraction), 2)
warn('Output cache limit (currently {sz} entries) hit.\n'
'Flushing oldest {cull_count} entries.'.format(sz=sz, cull_count=cull_count))
for i, n in enumerate(sorted(oh)):
if i >= cull_count:
break
self.shell.user_ns.pop('_%i' % n, None)
oh.pop(n, None)
def flush(self):
if not self.do_full_cache:
raise ValueError("You shouldn't have reached the cache flush "
"if full caching is not enabled!")
# delete auto-generated vars from global namespace
for n in range(1,self.prompt_count + 1):
key = '_'+repr(n)
try:
del self.shell.user_ns[key]
except: pass
# In some embedded circumstances, the user_ns doesn't have the
# '_oh' key set up.
oh = self.shell.user_ns.get('_oh', None)
if oh is not None:
oh.clear()
# Release our own references to objects:
self._, self.__, self.___ = '', '', ''
if '_' not in builtin_mod.__dict__:
self.shell.user_ns.update({'_':None,'__':None, '___':None})
import gc
# TODO: Is this really needed?
# IronPython blocks here forever
if sys.platform != "cli":
gc.collect()
class CapturingDisplayHook(object):
def __init__(self, shell, outputs=None):
self.shell = shell
if outputs is None:
outputs = []
self.outputs = outputs
def __call__(self, result=None):
if result is None:
return
format_dict, md_dict = self.shell.display_formatter.format(result)
self.outputs.append((format_dict, md_dict))

View File

@ -0,0 +1,125 @@
"""An interface for publishing rich data to frontends.
There are two components of the display system:
* Display formatters, which take a Python object and compute the
representation of the object in various formats (text, HTML, SVG, etc.).
* The display publisher that is used to send the representation data to the
various frontends.
This module defines the logic display publishing. The display publisher uses
the ``display_data`` message type that is defined in the yap_ipython messaging
spec.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from traitlets.config.configurable import Configurable
from traitlets import List
# This used to be defined here - it is imported for backwards compatibility
from .display import publish_display_data
#-----------------------------------------------------------------------------
# Main payload class
#-----------------------------------------------------------------------------
class DisplayPublisher(Configurable):
"""A traited class that publishes display data to frontends.
Instances of this class are created by the main yap_ipython object and should
be accessed there.
"""
def _validate_data(self, data, metadata=None):
"""Validate the display data.
Parameters
----------
data : dict
The formata data dictionary.
metadata : dict
Any metadata for the data.
"""
if not isinstance(data, dict):
raise TypeError('data must be a dict, got: %r' % data)
if metadata is not None:
if not isinstance(metadata, dict):
raise TypeError('metadata must be a dict, got: %r' % data)
# use * to indicate transient, update are keyword-only
def publish(self, data, metadata=None, source=None, *, transient=None, update=False, **kwargs):
"""Publish data and metadata to all frontends.
See the ``display_data`` message in the messaging documentation for
more details about this message type.
The following MIME types are currently implemented:
* text/plain
* text/html
* text/markdown
* text/latex
* application/json
* application/javascript
* image/png
* image/jpeg
* image/svg+xml
Parameters
----------
data : dict
A dictionary having keys that are valid MIME types (like
'text/plain' or 'image/svg+xml') and values that are the data for
that MIME type. The data itself must be a JSON'able data
structure. Minimally all data should have the 'text/plain' data,
which can be displayed by all frontends. If more than the plain
text is given, it is up to the frontend to decide which
representation to use.
metadata : dict
A dictionary for metadata related to the data. This can contain
arbitrary key, value pairs that frontends can use to interpret
the data. Metadata specific to each mime-type can be specified
in the metadata dict with the same mime-type keys as
the data itself.
source : str, deprecated
Unused.
transient: dict, keyword-only
A dictionary for transient data.
Data in this dictionary should not be persisted as part of saving this output.
Examples include 'display_id'.
update: bool, keyword-only, default: False
If True, only update existing outputs with the same display_id,
rather than creating a new output.
"""
# The default is to simply write the plain text data using sys.stdout.
if 'text/plain' in data:
print(data['text/plain'])
def clear_output(self, wait=False):
"""Clear the output of the cell receiving output."""
print('\033[2K\r', end='')
sys.stdout.flush()
print('\033[2K\r', end='')
sys.stderr.flush()
class CapturingDisplayPublisher(DisplayPublisher):
"""A DisplayPublisher that stores"""
outputs = List()
def publish(self, data, metadata=None, source=None, *, transient=None, update=False):
self.outputs.append({'data':data, 'metadata':metadata,
'transient':transient, 'update':update})
def clear_output(self, wait=False):
super(CapturingDisplayPublisher, self).clear_output(wait)
# empty the list, *do not* reassign a new list
self.outputs.clear()

View File

@ -0,0 +1,60 @@
# encoding: utf-8
"""
Global exception classes for yap_ipython.core.
Authors:
* Brian Granger
* Fernando Perez
* Min Ragan-Kelley
Notes
-----
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Exception classes
#-----------------------------------------------------------------------------
class IPythonCoreError(Exception):
pass
class TryNext(IPythonCoreError):
"""Try next hook exception.
Raise this in your hook function to indicate that the next hook handler
should be used to handle the operation.
"""
class UsageError(IPythonCoreError):
"""Error in magic function arguments, etc.
Something that probably won't warrant a full traceback, but should
nevertheless interrupt a macro / batch file.
"""
class StdinNotImplementedError(IPythonCoreError, NotImplementedError):
"""raw_input was requested in a context where it is not supported
For use in yap_ipython kernels, where only some frontends may support
stdin requests.
"""
class InputRejected(Exception):
"""Input rejected by ast transformer.
Raise this in your NodeTransformer to indicate that InteractiveShell should
not execute the supplied input.
"""

View File

@ -0,0 +1,160 @@
"""Infrastructure for registering and firing callbacks on application events.
Unlike :mod:`yap_ipython.core.hooks`, which lets end users set single functions to
be called at specific times, or a collection of alternative methods to try,
callbacks are designed to be used by extension authors. A number of callbacks
can be registered for the same event without needing to be aware of one another.
The functions defined in this module are no-ops indicating the names of available
events and the arguments which will be passed to them.
.. note::
This API is experimental in yap_ipython 2.0, and may be revised in future versions.
"""
from backcall import callback_prototype
class EventManager(object):
"""Manage a collection of events and a sequence of callbacks for each.
This is attached to :class:`~yap_ipython.core.interactiveshell.InteractiveShell`
instances as an ``events`` attribute.
.. note::
This API is experimental in yap_ipython 2.0, and may be revised in future versions.
"""
def __init__(self, shell, available_events):
"""Initialise the :class:`CallbackManager`.
Parameters
----------
shell
The :class:`~yap_ipython.core.interactiveshell.InteractiveShell` instance
available_callbacks
An iterable of names for callback events.
"""
self.shell = shell
self.callbacks = {n:[] for n in available_events}
def register(self, event, function):
"""Register a new event callback.
Parameters
----------
event : str
The event for which to register this callback.
function : callable
A function to be called on the given event. It should take the same
parameters as the appropriate callback prototype.
Raises
------
TypeError
If ``function`` is not callable.
KeyError
If ``event`` is not one of the known events.
"""
if not callable(function):
raise TypeError('Need a callable, got %r' % function)
callback_proto = available_events.get(event)
self.callbacks[event].append(callback_proto.adapt(function))
def unregister(self, event, function):
"""Remove a callback from the given event."""
if function in self.callbacks[event]:
return self.callbacks[event].remove(function)
# Remove callback in case ``function`` was adapted by `backcall`.
for callback in self.callbacks[event]:
try:
if callback.__wrapped__ is function:
return self.callbacks[event].remove(callback)
except AttributeError:
pass
raise ValueError('Function {!r} is not registered as a {} callback'.format(function, event))
def trigger(self, event, *args, **kwargs):
"""Call callbacks for ``event``.
Any additional arguments are passed to all callbacks registered for this
event. Exceptions raised by callbacks are caught, and a message printed.
"""
for func in self.callbacks[event][:]:
try:
func(*args, **kwargs)
except Exception:
print("Error in callback {} (for {}):".format(func, event))
self.shell.showtraceback()
# event_name -> prototype mapping
available_events = {}
def _define_event(callback_function):
callback_proto = callback_prototype(callback_function)
available_events[callback_function.__name__] = callback_proto
return callback_proto
# ------------------------------------------------------------------------------
# Callback prototypes
#
# No-op functions which describe the names of available events and the
# signatures of callbacks for those events.
# ------------------------------------------------------------------------------
@_define_event
def pre_execute():
"""Fires before code is executed in response to user/frontend action.
This includes comm and widget messages and silent execution, as well as user
code cells.
"""
pass
@_define_event
def pre_run_cell(info):
"""Fires before user-entered code runs.
Parameters
----------
info : :class:`~yap_ipython.core.interactiveshell.ExecutionInfo`
An object containing information used for the code execution.
"""
pass
@_define_event
def post_execute():
"""Fires after code is executed in response to user/frontend action.
This includes comm and widget messages and silent execution, as well as user
code cells.
"""
pass
@_define_event
def post_run_cell(result):
"""Fires after user-entered code runs.
Parameters
----------
result : :class:`~yap_ipython.core.interactiveshell.ExecutionResult`
The object which will be returned as the execution result.
"""
pass
@_define_event
def shell_initialized(ip):
"""Fires after initialisation of :class:`~yap_ipython.core.interactiveshell.InteractiveShell`.
This is before extensions and startup scripts are loaded, so it can only be
set by subclassing.
Parameters
----------
ip : :class:`~yap_ipython.core.interactiveshell.InteractiveShell`
The newly initialised shell.
"""
pass

View File

@ -0,0 +1,184 @@
# -*- coding: utf-8 -*-
"""
Color schemes for exception handling code in yap_ipython.
"""
import os
import warnings
#*****************************************************************************
# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
from yap_ipython.utils.coloransi import ColorSchemeTable, TermColors, ColorScheme
def exception_colors():
"""Return a color table with fields for exception reporting.
The table is an instance of ColorSchemeTable with schemes added for
'Neutral', 'Linux', 'LightBG' and 'NoColor' and fields for exception handling filled
in.
Examples:
>>> ec = exception_colors()
>>> ec.active_scheme_name
''
>>> print(ec.active_colors)
None
Now we activate a color scheme:
>>> ec.set_active_scheme('NoColor')
>>> ec.active_scheme_name
'NoColor'
>>> sorted(ec.active_colors.keys())
['Normal', 'caret', 'em', 'excName', 'filename', 'filenameEm', 'line',
'lineno', 'linenoEm', 'name', 'nameEm', 'normalEm', 'topline', 'vName',
'val', 'valEm']
"""
ex_colors = ColorSchemeTable()
# Populate it with color schemes
C = TermColors # shorthand and local lookup
ex_colors.add_scheme(ColorScheme(
'NoColor',
# The color to be used for the top line
topline = C.NoColor,
# The colors to be used in the traceback
filename = C.NoColor,
lineno = C.NoColor,
name = C.NoColor,
vName = C.NoColor,
val = C.NoColor,
em = C.NoColor,
# Emphasized colors for the last frame of the traceback
normalEm = C.NoColor,
filenameEm = C.NoColor,
linenoEm = C.NoColor,
nameEm = C.NoColor,
valEm = C.NoColor,
# Colors for printing the exception
excName = C.NoColor,
line = C.NoColor,
caret = C.NoColor,
Normal = C.NoColor
))
# make some schemes as instances so we can copy them for modification easily
ex_colors.add_scheme(ColorScheme(
'Linux',
# The color to be used for the top line
topline = C.LightRed,
# The colors to be used in the traceback
filename = C.Green,
lineno = C.Green,
name = C.Purple,
vName = C.Cyan,
val = C.Green,
em = C.LightCyan,
# Emphasized colors for the last frame of the traceback
normalEm = C.LightCyan,
filenameEm = C.LightGreen,
linenoEm = C.LightGreen,
nameEm = C.LightPurple,
valEm = C.LightBlue,
# Colors for printing the exception
excName = C.LightRed,
line = C.Yellow,
caret = C.White,
Normal = C.Normal
))
# For light backgrounds, swap dark/light colors
ex_colors.add_scheme(ColorScheme(
'LightBG',
# The color to be used for the top line
topline = C.Red,
# The colors to be used in the traceback
filename = C.LightGreen,
lineno = C.LightGreen,
name = C.LightPurple,
vName = C.Cyan,
val = C.LightGreen,
em = C.Cyan,
# Emphasized colors for the last frame of the traceback
normalEm = C.Cyan,
filenameEm = C.Green,
linenoEm = C.Green,
nameEm = C.Purple,
valEm = C.Blue,
# Colors for printing the exception
excName = C.Red,
#line = C.Brown, # brown often is displayed as yellow
line = C.Red,
caret = C.Normal,
Normal = C.Normal,
))
ex_colors.add_scheme(ColorScheme(
'Neutral',
# The color to be used for the top line
topline = C.Red,
# The colors to be used in the traceback
filename = C.LightGreen,
lineno = C.LightGreen,
name = C.LightPurple,
vName = C.Cyan,
val = C.LightGreen,
em = C.Cyan,
# Emphasized colors for the last frame of the traceback
normalEm = C.Cyan,
filenameEm = C.Green,
linenoEm = C.Green,
nameEm = C.Purple,
valEm = C.Blue,
# Colors for printing the exception
excName = C.Red,
#line = C.Brown, # brown often is displayed as yellow
line = C.Red,
caret = C.Normal,
Normal = C.Normal,
))
# Hack: the 'neutral' colours are not very visible on a dark background on
# Windows. Since Windows command prompts have a dark background by default, and
# relatively few users are likely to alter that, we will use the 'Linux' colours,
# designed for a dark background, as the default on Windows.
if os.name == "nt":
ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral'))
return ex_colors
class Deprec(object):
def __init__(self, wrapped_obj):
self.wrapped=wrapped_obj
def __getattr__(self, name):
val = getattr(self.wrapped, name)
warnings.warn("Using ExceptionColors global is deprecated and will be removed in yap_ipython 6.0",
DeprecationWarning, stacklevel=2)
# using getattr after warnings break ipydoctest in weird way for 3.5
return val
# For backwards compatibility, keep around a single global object. Note that
# this should NOT be used, the factory function should be used instead, since
# these objects are stateful and it's very easy to get strange bugs if any code
# modifies the module-level object's state.
ExceptionColors = Deprec(exception_colors())

View File

@ -0,0 +1,155 @@
# encoding: utf-8
"""A class for managing yap_ipython extensions."""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import os.path
import sys
from importlib import import_module
from traitlets.config.configurable import Configurable
from yap_ipython.utils.path import ensure_dir_exists, compress_user
from yap_ipython.utils.decorators import undoc
from traitlets import Instance
try:
from importlib import reload
except ImportError :
## deprecated since 3.4
from imp import reload
#-----------------------------------------------------------------------------
# Main class
#-----------------------------------------------------------------------------
class ExtensionManager(Configurable):
"""A class to manage yap_ipython extensions.
An yap_ipython extension is an importable Python module that has
a function with the signature::
def load_ipython_extension(ipython):
# Do things with ipython
This function is called after your extension is imported and the
currently active :class:`InteractiveShell` instance is passed as
the only argument. You can do anything you want with yap_ipython at
that point, including defining new magic and aliases, adding new
components, etc.
You can also optionally define an :func:`unload_ipython_extension(ipython)`
function, which will be called if the user unloads or reloads the extension.
The extension manager will only call :func:`load_ipython_extension` again
if the extension is reloaded.
You can put your extension modules anywhere you want, as long as
they can be imported by Python's standard import mechanism. However,
to make it easy to write extensions, you can also put your extensions
in ``os.path.join(self.ipython_dir, 'extensions')``. This directory
is added to ``sys.path`` automatically.
"""
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
def __init__(self, shell=None, **kwargs):
super(ExtensionManager, self).__init__(shell=shell, **kwargs)
self.shell.observe(
self._on_ipython_dir_changed, names=('ipython_dir',)
)
self.loaded = set()
@property
def ipython_extension_dir(self):
return os.path.join(self.shell.ipython_dir, u'extensions')
def _on_ipython_dir_changed(self, change):
ensure_dir_exists(self.ipython_extension_dir)
def load_extension(self, module_str):
"""Load an yap_ipython extension by its module name.
Returns the string "already loaded" if the extension is already loaded,
"no load function" if the module doesn't have a load_ipython_extension
function, or None if it succeeded.
"""
if module_str in self.loaded:
return "already loaded"
from yap_ipython.utils.syspathcontext import prepended_to_syspath
with self.shell.builtin_trap:
if module_str not in sys.modules:
with prepended_to_syspath(self.ipython_extension_dir):
mod = import_module(module_str)
if mod.__file__.startswith(self.ipython_extension_dir):
print(("Loading extensions from {dir} is deprecated. "
"We recommend managing extensions like any "
"other Python packages, in site-packages.").format(
dir=compress_user(self.ipython_extension_dir)))
mod = sys.modules[module_str]
if self._call_load_ipython_extension(mod):
self.loaded.add(module_str)
else:
return "no load function"
def unload_extension(self, module_str):
"""Unload an yap_ipython extension by its module name.
This function looks up the extension's name in ``sys.modules`` and
simply calls ``mod.unload_ipython_extension(self)``.
Returns the string "no unload function" if the extension doesn't define
a function to unload itself, "not loaded" if the extension isn't loaded,
otherwise None.
"""
if module_str not in self.loaded:
return "not loaded"
if module_str in sys.modules:
mod = sys.modules[module_str]
if self._call_unload_ipython_extension(mod):
self.loaded.discard(module_str)
else:
return "no unload function"
def reload_extension(self, module_str):
"""Reload an yap_ipython extension by calling reload.
If the module has not been loaded before,
:meth:`InteractiveShell.load_extension` is called. Otherwise
:func:`reload` is called and then the :func:`load_ipython_extension`
function of the module, if it exists is called.
"""
from yap_ipython.utils.syspathcontext import prepended_to_syspath
if (module_str in self.loaded) and (module_str in sys.modules):
self.unload_extension(module_str)
mod = sys.modules[module_str]
with prepended_to_syspath(self.ipython_extension_dir):
reload(mod)
if self._call_load_ipython_extension(mod):
self.loaded.add(module_str)
else:
self.load_extension(module_str)
def _call_load_ipython_extension(self, mod):
if hasattr(mod, 'load_ipython_extension'):
mod.load_ipython_extension(self.shell)
return True
def _call_unload_ipython_extension(self, mod):
if hasattr(mod, 'unload_ipython_extension'):
mod.unload_ipython_extension(self.shell)
return True
@undoc
def install_extension(self, url, filename=None):
"""
Deprecated.
"""
# Ensure the extension directory exists
raise DeprecationWarning(
'`install_extension` and the `install_ext` magic have been deprecated since yap_ipython 4.0'
'Use pip or other package managers to manage ipython extensions.')

View File

@ -3,11 +3,11 @@
Inheritance diagram:
.. inheritance-diagram:: IPython.core.formatters
.. inheritance-diagram:: yap_ipython.core.formatters
:parts: 3
"""
# Copyright (c) IPython Development Team.
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import abc
@ -17,18 +17,18 @@ import traceback
import warnings
from io import StringIO
from IPython.lib import pretty
from IPython.utils.dir2 import get_real_method
from IPython.utils.sentinel import Sentinel
from decorator import decorator
from traitlets.config.configurable import Configurable
from yap_ipython.core.getipython import get_ipython
from yap_ipython.utils.sentinel import Sentinel
from yap_ipython.utils.dir2 import get_real_method
from yap_ipython.lib import pretty
from traitlets import (
Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List,
ForwardDeclaredInstance,
default, observe,
)
from traitlets.config.configurable import Configurable
from packages.python.yap_kernel.core.yap_kernel.getipython import get_ipython
class DisplayFormatter(Configurable):
@ -57,6 +57,11 @@ class DisplayFormatter(Configurable):
def _default_formatter(self):
return IPythonDisplayFormatter(parent=self)
mimebundle_formatter = ForwardDeclaredInstance('FormatterABC')
@default('mimebundle_formatter')
def _default_mime_formatter(self):
return MimeBundleFormatter(parent=self)
# A dict of formatter whose keys are format types (MIME types) and whose
# values are subclasses of BaseFormatter.
formatters = Dict()
@ -86,7 +91,7 @@ class DisplayFormatter(Configurable):
By default all format types will be computed.
The following MIME types are currently implemented:
The following MIME types are usually implemented:
* text/plain
* text/html
@ -103,14 +108,15 @@ class DisplayFormatter(Configurable):
----------
obj : object
The Python object whose format data will be computed.
include : list or tuple, optional
include : list, tuple or set; optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
exclude : list or tuple, optional
exclude : list, tuple or set; optional
A list of format type string (MIME types) to exclude in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
Mimetypes present in exclude will take precedence over the ones in include
Returns
-------
@ -124,6 +130,15 @@ class DisplayFormatter(Configurable):
metadata_dict is a dictionary of metadata about each mime-type output.
Its keys will be a strict subset of the keys in format_dict.
Notes
-----
If an object implement `_repr_mimebundle_` as well as various
`_repr_*_`, the data returned by `_repr_mimebundle_` will take
precedence and the corresponding `_repr_*_` for this mimetype will
not be called.
"""
format_dict = {}
md_dict = {}
@ -132,7 +147,29 @@ class DisplayFormatter(Configurable):
# object handled itself, don't proceed
return {}, {}
format_dict, md_dict = self.mimebundle_formatter(obj, include=include, exclude=exclude)
if format_dict or md_dict:
if include:
format_dict = {k:v for k,v in format_dict.items() if k in include}
md_dict = {k:v for k,v in md_dict.items() if k in include}
if exclude:
format_dict = {k:v for k,v in format_dict.items() if k not in exclude}
md_dict = {k:v for k,v in md_dict.items() if k not in exclude}
for format_type, formatter in self.formatters.items():
if format_type in format_dict:
# already got it from mimebundle, maybe don't render again.
# exception: manually registered per-mime renderer
# check priority:
# 1. user-registered per-mime formatter
# 2. mime-bundle (user-registered or repr method)
# 3. default per-mime formatter (e.g. repr method)
try:
formatter.lookup(obj)
except KeyError:
# no special formatter, use mime-bundle-provided value
continue
if include and format_type not in include:
continue
if exclude and format_type in exclude:
@ -153,7 +190,6 @@ class DisplayFormatter(Configurable):
format_dict[format_type] = data
if md is not None:
md_dict[format_type] = md
return format_dict, md_dict
@property
@ -188,7 +224,7 @@ def catch_format_error(method, self, *args, **kwargs):
r = method(self, *args, **kwargs)
except NotImplementedError:
# don't warn on NotImplementedErrors
return None
return self._check_return(None, args[0])
except Exception:
exc_info = sys.exc_info()
ip = get_ipython()
@ -196,7 +232,7 @@ def catch_format_error(method, self, *args, **kwargs):
ip.showtraceback(exc_info)
else:
traceback.print_exception(*exc_info)
return None
return self._check_return(None, args[0])
return self._check_return(r, args[0])
@ -531,9 +567,9 @@ class BaseFormatter(Configurable):
class PlainTextFormatter(BaseFormatter):
"""The default pretty-printer.
This uses :mod:`IPython.lib.pretty` to compute the format data of
This uses :mod:`yap_ipython.lib.pretty` to compute the format data of
the object. If the object cannot be pretty printed, :func:`repr` is used.
See the documentation of :mod:`IPython.lib.pretty` for details on
See the documentation of :mod:`yap_ipython.lib.pretty` for details on
how to write pretty printers. Here is a simple example::
def dtype_pprinter(obj, p, cycle):
@ -633,7 +669,7 @@ class PlainTextFormatter(BaseFormatter):
numpy.set_printoptions(precision=8)
self.float_format = fmt
# Use the default pretty printers from IPython.lib.pretty.
# Use the default pretty printers from yap_ipython.lib.pretty.
@default('singleton_printers')
def _singleton_printers_default(self):
return pretty._singleton_pprinters.copy()
@ -797,7 +833,7 @@ class JSONFormatter(BaseFormatter):
# unpack data, metadata tuple for type checking on first element
r, md = r
# handle deprecated JSON-as-string form from IPython < 3
# handle deprecated JSON-as-string form from yap_ipython < 3
if isinstance(r, str):
warnings.warn("JSON expects JSONable list/dict containers, not JSON strings",
FormatterWarning)
@ -843,7 +879,7 @@ class PDFFormatter(BaseFormatter):
_return_type = (bytes, str)
class IPythonDisplayFormatter(BaseFormatter):
"""A Formatter for objects that know how to display themselves.
"""An escape-hatch Formatter for objects that know how to display themselves.
To define the callables that compute the representation of your
objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type`
@ -853,11 +889,17 @@ class IPythonDisplayFormatter(BaseFormatter):
This display formatter has highest priority.
If it fires, no other display formatter will be called.
Prior to yap_ipython 6.1, `_ipython_display_` was the only way to display custom mime-types
without registering a new Formatter.
yap_ipython 6.1 introduces `_repr_mimebundle_` for displaying custom mime-types,
so `_ipython_display_` should only be used for objects that require unusual
display patterns, such as multiple display calls.
"""
print_method = ObjectName('_ipython_display_')
_return_type = (type(None), bool)
@catch_format_error
def __call__(self, obj):
"""Compute the format for an object."""
@ -877,6 +919,60 @@ class IPythonDisplayFormatter(BaseFormatter):
return True
class MimeBundleFormatter(BaseFormatter):
"""A Formatter for arbitrary mime-types.
Unlike other `_repr_<mimetype>_` methods,
`_repr_mimebundle_` should return mime-bundle data,
either the mime-keyed `data` dictionary or the tuple `(data, metadata)`.
Any mime-type is valid.
To define the callables that compute the mime-bundle representation of your
objects, define a :meth:`_repr_mimebundle_` method or use the :meth:`for_type`
or :meth:`for_type_by_name` methods to register functions that handle
this.
.. versionadded:: 6.1
"""
print_method = ObjectName('_repr_mimebundle_')
_return_type = dict
def _check_return(self, r, obj):
r = super(MimeBundleFormatter, self)._check_return(r, obj)
# always return (data, metadata):
if r is None:
return {}, {}
if not isinstance(r, tuple):
return r, {}
return r
@catch_format_error
def __call__(self, obj, include=None, exclude=None):
"""Compute the format for an object.
Identical to parent's method but we pass extra parameters to the method.
Unlike other _repr_*_ `_repr_mimebundle_` should allow extra kwargs, in
particular `include` and `exclude`.
"""
if self.enabled:
# lookup registered printer
try:
printer = self.lookup(obj)
except KeyError:
pass
else:
return printer(obj)
# Finally look for special method names
method = get_real_method(obj, self.print_method)
if method is not None:
return method(include=include, exclude=exclude)
return None
else:
return None
FormatterABC.register(BaseFormatter)
FormatterABC.register(PlainTextFormatter)
FormatterABC.register(HTMLFormatter)
@ -889,6 +985,7 @@ FormatterABC.register(LatexFormatter)
FormatterABC.register(JSONFormatter)
FormatterABC.register(JavascriptFormatter)
FormatterABC.register(IPythonDisplayFormatter)
FormatterABC.register(MimeBundleFormatter)
def format_display_data(obj, include=None, exclude=None):
@ -896,19 +993,6 @@ def format_display_data(obj, include=None, exclude=None):
By default all format types will be computed.
The following MIME types are currently implemented:
* text/plain
* text/html
* text/markdown
* text/latex
* application/json
* application/javascript
* application/pdf
* image/png
* image/jpeg
* image/svg+xml
Parameters
----------
obj : object
@ -938,4 +1022,3 @@ def format_display_data(obj, include=None, exclude=None):
include,
exclude
)

View File

@ -1,5 +1,9 @@
# encoding: utf-8
"""Simple function to call to get the current InteractiveShell instance
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
# Copyright (C) 2013 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
@ -15,6 +19,6 @@ def get_ipython():
Returns None if no InteractiveShell instance is registered.
"""
from yap_ipython.core.interactiveshell import YAPInteractive
if YAPInteractive.initialized():
return YAPInteractive.instance()
from yap_ipython.core.interactiveshell import InteractiveShell
if InteractiveShell.initialized():
return InteractiveShell.instance()

View File

@ -0,0 +1,906 @@
""" History related magics and functionality """
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import atexit
import datetime
import os
import re
try:
import sqlite3
except ImportError:
try:
from pysqlite2 import dbapi2 as sqlite3
except ImportError:
sqlite3 = None
import threading
from traitlets.config.configurable import LoggingConfigurable
from decorator import decorator
from yap_ipython.utils.decorators import undoc
from yap_ipython.utils.path import locate_profile
from traitlets import (
Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError,
default, observe,
)
from warnings import warn
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
@undoc
class DummyDB(object):
"""Dummy DB that will act as a black hole for history.
Only used in the absence of sqlite"""
def execute(*args, **kwargs):
return []
def commit(self, *args, **kwargs):
pass
def __enter__(self, *args, **kwargs):
pass
def __exit__(self, *args, **kwargs):
pass
@decorator
def needs_sqlite(f, self, *a, **kw):
"""Decorator: return an empty list in the absence of sqlite."""
if sqlite3 is None or not self.enabled:
return []
else:
return f(self, *a, **kw)
if sqlite3 is not None:
DatabaseError = sqlite3.DatabaseError
OperationalError = sqlite3.OperationalError
else:
@undoc
class DatabaseError(Exception):
"Dummy exception when sqlite could not be imported. Should never occur."
@undoc
class OperationalError(Exception):
"Dummy exception when sqlite could not be imported. Should never occur."
# use 16kB as threshold for whether a corrupt history db should be saved
# that should be at least 100 entries or so
_SAVE_DB_SIZE = 16384
@decorator
def catch_corrupt_db(f, self, *a, **kw):
"""A decorator which wraps HistoryAccessor method calls to catch errors from
a corrupt SQLite database, move the old database out of the way, and create
a new one.
We avoid clobbering larger databases because this may be triggered due to filesystem issues,
not just a corrupt file.
"""
try:
return f(self, *a, **kw)
except (DatabaseError, OperationalError) as e:
self._corrupt_db_counter += 1
self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e)
if self.hist_file != ':memory:':
if self._corrupt_db_counter > self._corrupt_db_limit:
self.hist_file = ':memory:'
self.log.error("Failed to load history too many times, history will not be saved.")
elif os.path.isfile(self.hist_file):
# move the file out of the way
base, ext = os.path.splitext(self.hist_file)
size = os.stat(self.hist_file).st_size
if size >= _SAVE_DB_SIZE:
# if there's significant content, avoid clobbering
now = datetime.datetime.now().isoformat().replace(':', '.')
newpath = base + '-corrupt-' + now + ext
# don't clobber previous corrupt backups
for i in range(100):
if not os.path.isfile(newpath):
break
else:
newpath = base + '-corrupt-' + now + (u'-%i' % i) + ext
else:
# not much content, possibly empty; don't worry about clobbering
# maybe we should just delete it?
newpath = base + '-corrupt' + ext
os.rename(self.hist_file, newpath)
self.log.error("History file was moved to %s and a new file created.", newpath)
self.init_db()
return []
else:
# Failed with :memory:, something serious is wrong
raise
class HistoryAccessorBase(LoggingConfigurable):
"""An abstract class for History Accessors """
def get_tail(self, n=10, raw=True, output=False, include_latest=False):
raise NotImplementedError
def search(self, pattern="*", raw=True, search_raw=True,
output=False, n=None, unique=False):
raise NotImplementedError
def get_range(self, session, start=1, stop=None, raw=True,output=False):
raise NotImplementedError
def get_range_by_str(self, rangestr, raw=True, output=False):
raise NotImplementedError
class HistoryAccessor(HistoryAccessorBase):
"""Access the history database without adding to it.
This is intended for use by standalone history tools. yap_ipython shells use
HistoryManager, below, which is a subclass of this."""
# counter for init_db retries, so we don't keep trying over and over
_corrupt_db_counter = 0
# after two failures, fallback on :memory:
_corrupt_db_limit = 2
# String holding the path to the history file
hist_file = Unicode(
help="""Path to file to use for SQLite history database.
By default, yap_ipython will put the history database in the yap_ipython
profile directory. If you would rather share one history among
profiles, you can set this value in each, so that they are consistent.
Due to an issue with fcntl, SQLite is known to misbehave on some NFS
mounts. If you see yap_ipython hanging, try setting this to something on a
local disk, e.g::
ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
you can also use the specific value `:memory:` (including the colon
at both end but not the back ticks), to avoid creating an history file.
""").tag(config=True)
enabled = Bool(True,
help="""enable the SQLite history
set enabled=False to disable the SQLite history,
in which case there will be no stored history, no SQLite connection,
and no background saving thread. This may be necessary in some
threaded environments where yap_ipython is embedded.
"""
).tag(config=True)
connection_options = Dict(
help="""Options for configuring the SQLite connection
These options are passed as keyword args to sqlite3.connect
when establishing database conenctions.
"""
).tag(config=True)
# The SQLite database
db = Any()
@observe('db')
def _db_changed(self, change):
"""validate the db, since it can be an Instance of two different types"""
new = change['new']
connection_types = (DummyDB,)
if sqlite3 is not None:
connection_types = (DummyDB, sqlite3.Connection)
if not isinstance(new, connection_types):
msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \
(self.__class__.__name__, new)
raise TraitError(msg)
def __init__(self, profile='default', hist_file=u'', **traits):
"""Create a new history accessor.
Parameters
----------
profile : str
The name of the profile from which to open history.
hist_file : str
Path to an SQLite history database stored by yap_ipython. If specified,
hist_file overrides profile.
config : :class:`~traitlets.config.loader.Config`
Config object. hist_file can also be set through this.
"""
# We need a pointer back to the shell for various tasks.
super(HistoryAccessor, self).__init__(**traits)
# defer setting hist_file from kwarg until after init,
# otherwise the default kwarg value would clobber any value
# set by config
if hist_file:
self.hist_file = hist_file
if self.hist_file == u'':
# No one has set the hist_file, yet.
self.hist_file = self._get_hist_file_name(profile)
if sqlite3 is None and self.enabled:
warn("yap_ipython History requires SQLite, your history will not be saved")
self.enabled = False
self.init_db()
def _get_hist_file_name(self, profile='default'):
"""Find the history file for the given profile name.
This is overridden by the HistoryManager subclass, to use the shell's
active profile.
Parameters
----------
profile : str
The name of a profile which has a history file.
"""
return os.path.join(locate_profile(profile), 'history.sqlite')
@catch_corrupt_db
def init_db(self):
"""Connect to the database, and create tables if necessary."""
if not self.enabled:
self.db = DummyDB()
return
# use detect_types so that timestamps return datetime objects
kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
kwargs.update(self.connection_options)
self.db = sqlite3.connect(self.hist_file, **kwargs)
self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
primary key autoincrement, start timestamp,
end timestamp, num_cmds integer, remark text)""")
self.db.execute("""CREATE TABLE IF NOT EXISTS history
(session integer, line integer, source text, source_raw text,
PRIMARY KEY (session, line))""")
# Output history is optional, but ensure the table's there so it can be
# enabled later.
self.db.execute("""CREATE TABLE IF NOT EXISTS output_history
(session integer, line integer, output text,
PRIMARY KEY (session, line))""")
self.db.commit()
# success! reset corrupt db count
self._corrupt_db_counter = 0
def writeout_cache(self):
"""Overridden by HistoryManager to dump the cache before certain
database lookups."""
pass
## -------------------------------
## Methods for retrieving history:
## -------------------------------
def _run_sql(self, sql, params, raw=True, output=False):
"""Prepares and runs an SQL query for the history database.
Parameters
----------
sql : str
Any filtering expressions to go after SELECT ... FROM ...
params : tuple
Parameters passed to the SQL query (to replace "?")
raw, output : bool
See :meth:`get_range`
Returns
-------
Tuples as :meth:`get_range`
"""
toget = 'source_raw' if raw else 'source'
sqlfrom = "history"
if output:
sqlfrom = "history LEFT JOIN output_history USING (session, line)"
toget = "history.%s, output_history.output" % toget
cur = self.db.execute("SELECT session, line, %s FROM %s " %\
(toget, sqlfrom) + sql, params)
if output: # Regroup into 3-tuples, and parse JSON
return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur)
return cur
@needs_sqlite
@catch_corrupt_db
def get_session_info(self, session):
"""Get info about a session.
Parameters
----------
session : int
Session number to retrieve.
Returns
-------
session_id : int
Session ID number
start : datetime
Timestamp for the start of the session.
end : datetime
Timestamp for the end of the session, or None if yap_ipython crashed.
num_cmds : int
Number of commands run, or None if yap_ipython crashed.
remark : unicode
A manually set description.
"""
query = "SELECT * from sessions where session == ?"
return self.db.execute(query, (session,)).fetchone()
@catch_corrupt_db
def get_last_session_id(self):
"""Get the last session ID currently in the database.
Within yap_ipython, this should be the same as the value stored in
:attr:`HistoryManager.session_number`.
"""
for record in self.get_tail(n=1, include_latest=True):
return record[0]
@catch_corrupt_db
def get_tail(self, n=10, raw=True, output=False, include_latest=False):
"""Get the last n lines from the history database.
Parameters
----------
n : int
The number of lines to get
raw, output : bool
See :meth:`get_range`
include_latest : bool
If False (default), n+1 lines are fetched, and the latest one
is discarded. This is intended to be used where the function
is called by a user command, which it should not return.
Returns
-------
Tuples as :meth:`get_range`
"""
self.writeout_cache()
if not include_latest:
n += 1
cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?",
(n,), raw=raw, output=output)
if not include_latest:
return reversed(list(cur)[1:])
return reversed(list(cur))
@catch_corrupt_db
def search(self, pattern="*", raw=True, search_raw=True,
output=False, n=None, unique=False):
"""Search the database using unix glob-style matching (wildcards
* and ?).
Parameters
----------
pattern : str
The wildcarded pattern to match when searching
search_raw : bool
If True, search the raw input, otherwise, the parsed input
raw, output : bool
See :meth:`get_range`
n : None or int
If an integer is given, it defines the limit of
returned entries.
unique : bool
When it is true, return only unique entries.
Returns
-------
Tuples as :meth:`get_range`
"""
tosearch = "source_raw" if search_raw else "source"
if output:
tosearch = "history." + tosearch
self.writeout_cache()
sqlform = "WHERE %s GLOB ?" % tosearch
params = (pattern,)
if unique:
sqlform += ' GROUP BY {0}'.format(tosearch)
if n is not None:
sqlform += " ORDER BY session DESC, line DESC LIMIT ?"
params += (n,)
elif unique:
sqlform += " ORDER BY session, line"
cur = self._run_sql(sqlform, params, raw=raw, output=output)
if n is not None:
return reversed(list(cur))
return cur
@catch_corrupt_db
def get_range(self, session, start=1, stop=None, raw=True,output=False):
"""Retrieve input by session.
Parameters
----------
session : int
Session number to retrieve.
start : int
First line to retrieve.
stop : int
End of line range (excluded from output itself). If None, retrieve
to the end of the session.
raw : bool
If True, return untranslated input
output : bool
If True, attempt to include output. This will be 'real' Python
objects for the current session, or text reprs from previous
sessions if db_log_output was enabled at the time. Where no output
is found, None is used.
Returns
-------
entries
An iterator over the desired lines. Each line is a 3-tuple, either
(session, line, input) if output is False, or
(session, line, (input, output)) if output is True.
"""
if stop:
lineclause = "line >= ? AND line < ?"
params = (session, start, stop)
else:
lineclause = "line>=?"
params = (session, start)
return self._run_sql("WHERE session==? AND %s" % lineclause,
params, raw=raw, output=output)
def get_range_by_str(self, rangestr, raw=True, output=False):
"""Get lines of history from a string of ranges, as used by magic
commands %hist, %save, %macro, etc.
Parameters
----------
rangestr : str
A string specifying ranges, e.g. "5 ~2/1-4". See
:func:`magic_history` for full details.
raw, output : bool
As :meth:`get_range`
Returns
-------
Tuples as :meth:`get_range`
"""
for sess, s, e in extract_hist_ranges(rangestr):
for line in self.get_range(sess, s, e, raw=raw, output=output):
yield line
class HistoryManager(HistoryAccessor):
"""A class to organize all history-related functionality in one place.
"""
# Public interface
# An instance of the yap_ipython shell we are attached to
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
# Lists to hold processed and raw history. These start with a blank entry
# so that we can index them starting from 1
input_hist_parsed = List([""])
input_hist_raw = List([""])
# A list of directories visited during session
dir_hist = List()
@default('dir_hist')
def _dir_hist_default(self):
try:
return [os.getcwd()]
except OSError:
return []
# A dict of output history, keyed with ints from the shell's
# execution count.
output_hist = Dict()
# The text/plain repr of outputs.
output_hist_reprs = Dict()
# The number of the current session in the history database
session_number = Integer()
db_log_output = Bool(False,
help="Should the history database include output? (default: no)"
).tag(config=True)
db_cache_size = Integer(0,
help="Write to database every x commands (higher values save disk access & power).\n"
"Values of 1 or less effectively disable caching."
).tag(config=True)
# The input and output caches
db_input_cache = List()
db_output_cache = List()
# History saving in separate thread
save_thread = Instance('yap_ipython.core.history.HistorySavingThread',
allow_none=True)
save_flag = Instance(threading.Event, allow_none=True)
# Private interface
# Variables used to store the three last inputs from the user. On each new
# history update, we populate the user's namespace with these, shifted as
# necessary.
_i00 = Unicode(u'')
_i = Unicode(u'')
_ii = Unicode(u'')
_iii = Unicode(u'')
# A regex matching all forms of the exit command, so that we don't store
# them in the history (it's annoying to rewind the first entry and land on
# an exit call).
_exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$")
def __init__(self, shell=None, config=None, **traits):
"""Create a new history manager associated with a shell instance.
"""
# We need a pointer back to the shell for various tasks.
super(HistoryManager, self).__init__(shell=shell, config=config,
**traits)
self.save_flag = threading.Event()
self.db_input_cache_lock = threading.Lock()
self.db_output_cache_lock = threading.Lock()
try:
self.new_session()
except OperationalError:
self.log.error("Failed to create history session in %s. History will not be saved.",
self.hist_file, exc_info=True)
self.hist_file = ':memory:'
if self.enabled and self.hist_file != ':memory:':
self.save_thread = HistorySavingThread(self)
self.save_thread.start()
def _get_hist_file_name(self, profile=None):
"""Get default history file name based on the Shell's profile.
The profile parameter is ignored, but must exist for compatibility with
the parent class."""
profile_dir = self.shell.profile_dir.location
return os.path.join(profile_dir, 'history.sqlite')
@needs_sqlite
def new_session(self, conn=None):
"""Get a new session number."""
if conn is None:
conn = self.db
with conn:
cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL,
NULL, "") """, (datetime.datetime.now(),))
self.session_number = cur.lastrowid
def end_session(self):
"""Close the database session, filling in the end time and line count."""
self.writeout_cache()
with self.db:
self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE
session==?""", (datetime.datetime.now(),
len(self.input_hist_parsed)-1, self.session_number))
self.session_number = 0
def name_session(self, name):
"""Give the current session a name in the history database."""
with self.db:
self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
(name, self.session_number))
def reset(self, new_session=True):
"""Clear the session history, releasing all object references, and
optionally open a new session."""
self.output_hist.clear()
# The directory history can't be completely empty
self.dir_hist[:] = [os.getcwd()]
if new_session:
if self.session_number:
self.end_session()
self.input_hist_parsed[:] = [""]
self.input_hist_raw[:] = [""]
self.new_session()
# ------------------------------
# Methods for retrieving history
# ------------------------------
def get_session_info(self, session=0):
"""Get info about a session.
Parameters
----------
session : int
Session number to retrieve. The current session is 0, and negative
numbers count back from current session, so -1 is the previous session.
Returns
-------
session_id : int
Session ID number
start : datetime
Timestamp for the start of the session.
end : datetime
Timestamp for the end of the session, or None if yap_ipython crashed.
num_cmds : int
Number of commands run, or None if yap_ipython crashed.
remark : unicode
A manually set description.
"""
if session <= 0:
session += self.session_number
return super(HistoryManager, self).get_session_info(session=session)
def _get_range_session(self, start=1, stop=None, raw=True, output=False):
"""Get input and output history from the current session. Called by
get_range, and takes similar parameters."""
input_hist = self.input_hist_raw if raw else self.input_hist_parsed
n = len(input_hist)
if start < 0:
start += n
if not stop or (stop > n):
stop = n
elif stop < 0:
stop += n
for i in range(start, stop):
if output:
line = (input_hist[i], self.output_hist_reprs.get(i))
else:
line = input_hist[i]
yield (0, i, line)
def get_range(self, session=0, start=1, stop=None, raw=True,output=False):
"""Retrieve input by session.
Parameters
----------
session : int
Session number to retrieve. The current session is 0, and negative
numbers count back from current session, so -1 is previous session.
start : int
First line to retrieve.
stop : int
End of line range (excluded from output itself). If None, retrieve
to the end of the session.
raw : bool
If True, return untranslated input
output : bool
If True, attempt to include output. This will be 'real' Python
objects for the current session, or text reprs from previous
sessions if db_log_output was enabled at the time. Where no output
is found, None is used.
Returns
-------
entries
An iterator over the desired lines. Each line is a 3-tuple, either
(session, line, input) if output is False, or
(session, line, (input, output)) if output is True.
"""
if session <= 0:
session += self.session_number
if session==self.session_number: # Current session
return self._get_range_session(start, stop, raw, output)
return super(HistoryManager, self).get_range(session, start, stop, raw,
output)
## ----------------------------
## Methods for storing history:
## ----------------------------
def store_inputs(self, line_num, source, source_raw=None):
"""Store source and raw input in history and create input cache
variables ``_i*``.
Parameters
----------
line_num : int
The prompt number of this input.
source : str
Python input.
source_raw : str, optional
If given, this is the raw input without any yap_ipython transformations
applied to it. If not given, ``source`` is used.
"""
if source_raw is None:
source_raw = source
source = source.rstrip('\n')
source_raw = source_raw.rstrip('\n')
# do not store exit/quit commands
if self._exit_re.match(source_raw.strip()):
return
self.input_hist_parsed.append(source)
self.input_hist_raw.append(source_raw)
with self.db_input_cache_lock:
self.db_input_cache.append((line_num, source, source_raw))
# Trigger to flush cache and write to DB.
if len(self.db_input_cache) >= self.db_cache_size:
self.save_flag.set()
# update the auto _i variables
self._iii = self._ii
self._ii = self._i
self._i = self._i00
self._i00 = source_raw
# hackish access to user namespace to create _i1,_i2... dynamically
new_i = '_i%s' % line_num
to_main = {'_i': self._i,
'_ii': self._ii,
'_iii': self._iii,
new_i : self._i00 }
if self.shell is not None:
self.shell.push(to_main, interactive=False)
def store_output(self, line_num):
"""If database output logging is enabled, this saves all the
outputs from the indicated prompt number to the database. It's
called by run_cell after code has been executed.
Parameters
----------
line_num : int
The line number from which to save outputs
"""
if (not self.db_log_output) or (line_num not in self.output_hist_reprs):
return
output = self.output_hist_reprs[line_num]
with self.db_output_cache_lock:
self.db_output_cache.append((line_num, output))
if self.db_cache_size <= 1:
self.save_flag.set()
def _writeout_input_cache(self, conn):
with conn:
for line in self.db_input_cache:
conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)",
(self.session_number,)+line)
def _writeout_output_cache(self, conn):
with conn:
for line in self.db_output_cache:
conn.execute("INSERT INTO output_history VALUES (?, ?, ?)",
(self.session_number,)+line)
@needs_sqlite
def writeout_cache(self, conn=None):
"""Write any entries in the cache to the database."""
if conn is None:
conn = self.db
with self.db_input_cache_lock:
try:
self._writeout_input_cache(conn)
except sqlite3.IntegrityError:
self.new_session(conn)
print("ERROR! Session/line number was not unique in",
"database. History logging moved to new session",
self.session_number)
try:
# Try writing to the new session. If this fails, don't
# recurse
self._writeout_input_cache(conn)
except sqlite3.IntegrityError:
pass
finally:
self.db_input_cache = []
with self.db_output_cache_lock:
try:
self._writeout_output_cache(conn)
except sqlite3.IntegrityError:
print("!! Session/line number for output was not unique",
"in database. Output will not be stored.")
finally:
self.db_output_cache = []
class HistorySavingThread(threading.Thread):
"""This thread takes care of writing history to the database, so that
the UI isn't held up while that happens.
It waits for the HistoryManager's save_flag to be set, then writes out
the history cache. The main thread is responsible for setting the flag when
the cache size reaches a defined threshold."""
daemon = True
stop_now = False
enabled = True
def __init__(self, history_manager):
super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread")
self.history_manager = history_manager
self.enabled = history_manager.enabled
atexit.register(self.stop)
@needs_sqlite
def run(self):
# We need a separate db connection per thread:
try:
self.db = sqlite3.connect(self.history_manager.hist_file,
**self.history_manager.connection_options
)
while True:
self.history_manager.save_flag.wait()
if self.stop_now:
self.db.close()
return
self.history_manager.save_flag.clear()
self.history_manager.writeout_cache(self.db)
except Exception as e:
print(("The history saving thread hit an unexpected error (%s)."
"History will not be written to the database.") % repr(e))
def stop(self):
"""This can be called from the main thread to safely stop this thread.
Note that it does not attempt to write out remaining history before
exiting. That should be done by calling the HistoryManager's
end_session method."""
self.stop_now = True
self.history_manager.save_flag.set()
self.join()
# To match, e.g. ~5/8-~2/3
range_re = re.compile(r"""
((?P<startsess>~?\d+)/)?
(?P<start>\d+)?
((?P<sep>[\-:])
((?P<endsess>~?\d+)/)?
(?P<end>\d+))?
$""", re.VERBOSE)
def extract_hist_ranges(ranges_str):
"""Turn a string of history ranges into 3-tuples of (session, start, stop).
Examples
--------
>>> list(extract_hist_ranges("~8/5-~7/4 2"))
[(-8, 5, None), (-7, 1, 5), (0, 2, 3)]
"""
for range_str in ranges_str.split():
rmatch = range_re.match(range_str)
if not rmatch:
continue
start = rmatch.group("start")
if start:
start = int(start)
end = rmatch.group("end")
# If no end specified, get (a, a + 1)
end = int(end) if end else start + 1
else: # start not specified
if not rmatch.group('startsess'): # no startsess
continue
start = 1
end = None # provide the entire session hist
if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3]
end += 1
startsess = rmatch.group("startsess") or "0"
endsess = rmatch.group("endsess") or startsess
startsess = int(startsess.replace("~","-"))
endsess = int(endsess.replace("~","-"))
assert endsess >= startsess, "start session must be earlier than end session"
if endsess == startsess:
yield (startsess, start, end)
continue
# Multiple sessions in one range:
yield (startsess, start, None)
for sess in range(startsess+1, endsess):
yield (sess, 1, None)
yield (endsess, 1, end)
def _format_lineno(session, line):
"""Helper function to format line numbers properly."""
if session == 0:
return str(line)
return "%s#%s" % (session, line)

View File

@ -0,0 +1,161 @@
# encoding: utf-8
"""
An application for managing yap_ipython history.
To be invoked as the `ipython history` subcommand.
"""
import os
import sqlite3
from traitlets.config.application import Application
from yap_ipython.core.application import BaseYAPApplication
from traitlets import Bool, Int, Dict
from yap_ipython.utils.io import ask_yes_no
trim_hist_help = """Trim the yap_ipython history database to the last 1000 entries.
This actually copies the last 1000 entries to a new database, and then replaces
the old file with the new. Use the `--keep=` argument to specify a number
other than 1000.
"""
clear_hist_help = """Clear the yap_ipython history database, deleting all entries.
Because this is a destructive operation, yap_ipython will prompt the user if they
really want to do this. Passing a `-f` flag will force clearing without a
prompt.
This is an handy alias to `ipython history trim --keep=0`
"""
class HistoryTrim(BaseYAPApplication):
description = trim_hist_help
backup = Bool(False,
help="Keep the old history file as history.sqlite.<N>"
).tag(config=True)
keep = Int(1000,
help="Number of recent lines to keep in the database."
).tag(config=True)
flags = Dict(dict(
backup = ({'HistoryTrim' : {'backup' : True}},
backup.help
)
))
aliases=Dict(dict(
keep = 'HistoryTrim.keep'
))
def start(self):
profile_dir = self.profile_dir.location
hist_file = os.path.join(profile_dir, 'history.sqlite')
con = sqlite3.connect(hist_file)
# Grab the recent history from the current database.
inputs = list(con.execute('SELECT session, line, source, source_raw FROM '
'history ORDER BY session DESC, line DESC LIMIT ?', (self.keep+1,)))
if len(inputs) <= self.keep:
print("There are already at most %d entries in the history database." % self.keep)
print("Not doing anything. Use --keep= argument to keep fewer entries")
return
print("Trimming history to the most recent %d entries." % self.keep)
inputs.pop() # Remove the extra element we got to check the length.
inputs.reverse()
if inputs:
first_session = inputs[0][0]
outputs = list(con.execute('SELECT session, line, output FROM '
'output_history WHERE session >= ?', (first_session,)))
sessions = list(con.execute('SELECT session, start, end, num_cmds, remark FROM '
'sessions WHERE session >= ?', (first_session,)))
con.close()
# Create the new history database.
new_hist_file = os.path.join(profile_dir, 'history.sqlite.new')
i = 0
while os.path.exists(new_hist_file):
# Make sure we don't interfere with an existing file.
i += 1
new_hist_file = os.path.join(profile_dir, 'history.sqlite.new'+str(i))
new_db = sqlite3.connect(new_hist_file)
new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
primary key autoincrement, start timestamp,
end timestamp, num_cmds integer, remark text)""")
new_db.execute("""CREATE TABLE IF NOT EXISTS history
(session integer, line integer, source text, source_raw text,
PRIMARY KEY (session, line))""")
new_db.execute("""CREATE TABLE IF NOT EXISTS output_history
(session integer, line integer, output text,
PRIMARY KEY (session, line))""")
new_db.commit()
if inputs:
with new_db:
# Add the recent history into the new database.
new_db.executemany('insert into sessions values (?,?,?,?,?)', sessions)
new_db.executemany('insert into history values (?,?,?,?)', inputs)
new_db.executemany('insert into output_history values (?,?,?)', outputs)
new_db.close()
if self.backup:
i = 1
backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i)
while os.path.exists(backup_hist_file):
i += 1
backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i)
os.rename(hist_file, backup_hist_file)
print("Backed up longer history file to", backup_hist_file)
else:
os.remove(hist_file)
os.rename(new_hist_file, hist_file)
class HistoryClear(HistoryTrim):
description = clear_hist_help
keep = Int(0,
help="Number of recent lines to keep in the database.")
force = Bool(False,
help="Don't prompt user for confirmation"
).tag(config=True)
flags = Dict(dict(
force = ({'HistoryClear' : {'force' : True}},
force.help),
f = ({'HistoryTrim' : {'force' : True}},
force.help
)
))
aliases = Dict()
def start(self):
if self.force or ask_yes_no("Really delete all ipython history? ",
default="no", interrupt="no"):
HistoryTrim.start(self)
class HistoryApp(Application):
name = u'ipython-history'
description = "Manage the yap_ipython history database."
subcommands = Dict(dict(
trim = (HistoryTrim, HistoryTrim.description.splitlines()[0]),
clear = (HistoryClear, HistoryClear.description.splitlines()[0]),
))
def start(self):
if self.subapp is None:
print("No subcommand specified. Must specify one of: %s" % \
(self.subcommands.keys()))
print()
self.print_description()
self.print_subcommands()
self.exit(1)
else:
return self.subapp.start()

View File

@ -0,0 +1,229 @@
"""Hooks for yap_ipython.
In Python, it is possible to overwrite any method of any object if you really
want to. But yap_ipython exposes a few 'hooks', methods which are *designed* to
be overwritten by users for customization purposes. This module defines the
default versions of all such hooks, which get used by yap_ipython if not
overridden by the user.
Hooks are simple functions, but they should be declared with ``self`` as their
first argument, because when activated they are registered into yap_ipython as
instance methods. The self argument will be the yap_ipython running instance
itself, so hooks have full access to the entire yap_ipython object.
If you wish to define a new hook and activate it, you can make an :doc:`extension
</config/extensions/index>` or a :ref:`startup script <startup_files>`. For
example, you could use a startup file like this::
import os
def calljed(self,filename, linenum):
"My editor hook calls the jed editor directly."
print "Calling my own editor, jed ..."
if os.system('jed +%d %s' % (linenum,filename)) != 0:
raise TryNext()
def load_ipython_extension(ip):
ip.set_hook('editor', calljed)
"""
#*****************************************************************************
# Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
import os
import subprocess
import warnings
import sys
from yap_ipython.core.error import TryNext
# List here all the default hooks. For now it's just the editor functions
# but over time we'll move here all the public API for user-accessible things.
__all__ = ['editor', 'synchronize_with_editor',
'shutdown_hook', 'late_startup_hook',
'show_in_pager','pre_prompt_hook',
'pre_run_code_hook', 'clipboard_get']
deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event",
'late_startup_hook': "a callback for the 'shell_initialized' event",
'shutdown_hook': "the atexit module",
}
def editor(self, filename, linenum=None, wait=True):
"""Open the default editor at the given filename and linenumber.
This is yap_ipython's default editor hook, you can use it as an example to
write your own modified one. To set your own editor function as the
new editor hook, call ip.set_hook('editor',yourfunc)."""
# yap_ipython configures a default editor at startup by reading $EDITOR from
# the environment, and falling back on vi (unix) or notepad (win32).
editor = self.editor
# marker for at which line to open the file (for existing objects)
if linenum is None or editor=='notepad':
linemark = ''
else:
linemark = '+%d' % int(linenum)
# Enclose in quotes if necessary and legal
if ' ' in editor and os.path.isfile(editor) and editor[0] != '"':
editor = '"%s"' % editor
# Call the actual editor
proc = subprocess.Popen('%s %s %s' % (editor, linemark, filename),
shell=True)
if wait and proc.wait() != 0:
raise TryNext()
import tempfile
from yap_ipython.utils.decorators import undoc
@undoc
def fix_error_editor(self,filename,linenum,column,msg):
"""DEPRECATED
Open the editor at the given filename, linenumber, column and
show an error message. This is used for correcting syntax errors.
The current implementation only has special support for the VIM editor,
and falls back on the 'editor' hook if VIM is not used.
Call ip.set_hook('fix_error_editor',yourfunc) to use your own function,
"""
warnings.warn("""
`fix_error_editor` is deprecated as of yap_ipython 6.0 and will be removed
in future versions. It appears to be used only for automatically fixing syntax
error that has been broken for a few years and has thus been removed. If you
happened to use this function and still need it please make your voice heard on
the mailing list ipython-dev@python.org , or on the GitHub Issue tracker:
https://github.com/ipython/ipython/issues/9649 """, UserWarning)
def vim_quickfix_file():
t = tempfile.NamedTemporaryFile()
t.write('%s:%d:%d:%s\n' % (filename,linenum,column,msg))
t.flush()
return t
if os.path.basename(self.editor) != 'vim':
self.hooks.editor(filename,linenum)
return
t = vim_quickfix_file()
try:
if os.system('vim --cmd "set errorformat=%f:%l:%c:%m" -q ' + t.name):
raise TryNext()
finally:
t.close()
def synchronize_with_editor(self, filename, linenum, column):
pass
class CommandChainDispatcher:
""" Dispatch calls to a chain of commands until some func can handle it
Usage: instantiate, execute "add" to add commands (with optional
priority), execute normally via f() calling mechanism.
"""
def __init__(self,commands=None):
if commands is None:
self.chain = []
else:
self.chain = commands
def __call__(self,*args, **kw):
""" Command chain is called just like normal func.
This will call all funcs in chain with the same args as were given to
this function, and return the result of first func that didn't raise
TryNext"""
last_exc = TryNext()
for prio,cmd in self.chain:
#print "prio",prio,"cmd",cmd #dbg
try:
return cmd(*args, **kw)
except TryNext as exc:
last_exc = exc
# if no function will accept it, raise TryNext up to the caller
raise last_exc
def __str__(self):
return str(self.chain)
def add(self, func, priority=0):
""" Add a func to the cmd chain with given priority """
self.chain.append((priority, func))
self.chain.sort(key=lambda x: x[0])
def __iter__(self):
""" Return all objects in chain.
Handy if the objects are not callable.
"""
return iter(self.chain)
def shutdown_hook(self):
""" default shutdown hook
Typically, shotdown hooks should raise TryNext so all shutdown ops are done
"""
#print "default shutdown hook ok" # dbg
return
def late_startup_hook(self):
""" Executed after ipython has been constructed and configured
"""
#print "default startup hook ok" # dbg
def show_in_pager(self, data, start, screen_lines):
""" Run a string through pager """
# raising TryNext here will use the default paging functionality
raise TryNext
def pre_prompt_hook(self):
""" Run before displaying the next prompt
Use this e.g. to display output from asynchronous operations (in order
to not mess up text entry)
"""
return None
def pre_run_code_hook(self):
""" Executed before running the (prefiltered) code in yap_ipython """
return None
def clipboard_get(self):
""" Get text from the clipboard.
"""
from yap_ipython.lib.clipboard import (
osx_clipboard_get, tkinter_clipboard_get,
win32_clipboard_get
)
if sys.platform == 'win32':
chain = [win32_clipboard_get, tkinter_clipboard_get]
elif sys.platform == 'darwin':
chain = [osx_clipboard_get, tkinter_clipboard_get]
else:
chain = [tkinter_clipboard_get]
dispatcher = CommandChainDispatcher()
for func in chain:
dispatcher.add(func)
text = dispatcher()
return text

View File

@ -0,0 +1,766 @@
"""Input handling and transformation machinery.
The first class in this module, :class:`InputSplitter`, is designed to tell when
input from a line-oriented frontend is complete and should be executed, and when
the user should be prompted for another line of code instead. The name 'input
splitter' is largely for historical reasons.
A companion, :class:`IPythonInputSplitter`, provides the same functionality but
with full support for the extended yap_ipython syntax (magics, system calls, etc).
The code to actually do these transformations is in :mod:`yap_ipython.core.inputtransformer`.
:class:`IPythonInputSplitter` feeds the raw code to the transformers in order
and stores the results.
For more details, see the class docstrings below.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import ast
import codeop
import io
import re
import sys
import tokenize
import warnings
from yap_ipython.utils.py3compat import cast_unicode
from yap_ipython.core.inputtransformer import (leading_indent,
classic_prompt,
ipy_prompt,
cellmagic,
assemble_logical_lines,
help_end,
escaped_commands,
assign_from_magic,
assign_from_system,
assemble_python_lines,
)
# These are available in this module for backwards compatibility.
from yap_ipython.core.inputtransformer import (ESC_SHELL, ESC_SH_CAP, ESC_HELP,
ESC_HELP2, ESC_MAGIC, ESC_MAGIC2,
ESC_QUOTE, ESC_QUOTE2, ESC_PAREN, ESC_SEQUENCES)
#-----------------------------------------------------------------------------
# Utilities
#-----------------------------------------------------------------------------
# FIXME: These are general-purpose utilities that later can be moved to the
# general ward. Kept here for now because we're being very strict about test
# coverage with this code, and this lets us ensure that we keep 100% coverage
# while developing.
# compiled regexps for autoindent management
dedent_re = re.compile('|'.join([
r'^\s+raise(\s.*)?$', # raise statement (+ space + other stuff, maybe)
r'^\s+raise\([^\)]*\).*$', # wacky raise with immediate open paren
r'^\s+return(\s.*)?$', # normal return (+ space + other stuff, maybe)
r'^\s+return\([^\)]*\).*$', # wacky return with immediate open paren
r'^\s+pass\s*$', # pass (optionally followed by trailing spaces)
r'^\s+break\s*$', # break (optionally followed by trailing spaces)
r'^\s+continue\s*$', # continue (optionally followed by trailing spaces)
]))
ini_spaces_re = re.compile(r'^([ \t\r\f\v]+)')
# regexp to match pure comment lines so we don't accidentally insert 'if 1:'
# before pure comments
comment_line_re = re.compile('^\s*\#')
def num_ini_spaces(s):
"""Return the number of initial spaces in a string.
Note that tabs are counted as a single space. For now, we do *not* support
mixing of tabs and spaces in the user's input.
Parameters
----------
s : string
Returns
-------
n : int
"""
ini_spaces = ini_spaces_re.match(s)
if ini_spaces:
return ini_spaces.end()
else:
return 0
# Fake token types for partial_tokenize:
INCOMPLETE_STRING = tokenize.N_TOKENS
IN_MULTILINE_STATEMENT = tokenize.N_TOKENS + 1
# The 2 classes below have the same API as TokenInfo, but don't try to look up
# a token type name that they won't find.
class IncompleteString:
type = exact_type = INCOMPLETE_STRING
def __init__(self, s, start, end, line):
self.s = s
self.start = start
self.end = end
self.line = line
class InMultilineStatement:
type = exact_type = IN_MULTILINE_STATEMENT
def __init__(self, pos, line):
self.s = ''
self.start = self.end = pos
self.line = line
def partial_tokens(s):
"""Iterate over tokens from a possibly-incomplete string of code.
This adds two special token types: INCOMPLETE_STRING and
IN_MULTILINE_STATEMENT. These can only occur as the last token yielded, and
represent the two main ways for code to be incomplete.
"""
readline = io.StringIO(s).readline
token = tokenize.TokenInfo(tokenize.NEWLINE, '', (1, 0), (1, 0), '')
try:
for token in tokenize.generate_tokens(readline):
yield token
except tokenize.TokenError as e:
# catch EOF error
lines = s.splitlines(keepends=True)
end = len(lines), len(lines[-1])
if 'multi-line string' in e.args[0]:
l, c = start = token.end
s = lines[l-1][c:] + ''.join(lines[l:])
yield IncompleteString(s, start, end, lines[-1])
elif 'multi-line statement' in e.args[0]:
yield InMultilineStatement(end, lines[-1])
else:
raise
def find_next_indent(code):
"""Find the number of spaces for the next line of indentation"""
tokens = list(partial_tokens(code))
if tokens[-1].type == tokenize.ENDMARKER:
tokens.pop()
if not tokens:
return 0
while (tokens[-1].type in {tokenize.DEDENT, tokenize.NEWLINE, tokenize.COMMENT}):
tokens.pop()
if tokens[-1].type == INCOMPLETE_STRING:
# Inside a multiline string
return 0
# Find the indents used before
prev_indents = [0]
def _add_indent(n):
if n != prev_indents[-1]:
prev_indents.append(n)
tokiter = iter(tokens)
for tok in tokiter:
if tok.type in {tokenize.INDENT, tokenize.DEDENT}:
_add_indent(tok.end[1])
elif (tok.type == tokenize.NL):
try:
_add_indent(next(tokiter).start[1])
except StopIteration:
break
last_indent = prev_indents.pop()
# If we've just opened a multiline statement (e.g. 'a = ['), indent more
if tokens[-1].type == IN_MULTILINE_STATEMENT:
if tokens[-2].exact_type in {tokenize.LPAR, tokenize.LSQB, tokenize.LBRACE}:
return last_indent + 4
return last_indent
if tokens[-1].exact_type == tokenize.COLON:
# Line ends with colon - indent
return last_indent + 4
if last_indent:
# Examine the last line for dedent cues - statements like return or
# raise which normally end a block of code.
last_line_starts = 0
for i, tok in enumerate(tokens):
if tok.type == tokenize.NEWLINE:
last_line_starts = i + 1
last_line_tokens = tokens[last_line_starts:]
names = [t.string for t in last_line_tokens if t.type == tokenize.NAME]
if names and names[0] in {'raise', 'return', 'pass', 'break', 'continue'}:
# Find the most recent indentation less than the current level
for indent in reversed(prev_indents):
if indent < last_indent:
return indent
return last_indent
def last_blank(src):
"""Determine if the input source ends in a blank.
A blank is either a newline or a line consisting of whitespace.
Parameters
----------
src : string
A single or multiline string.
"""
if not src: return False
ll = src.splitlines()[-1]
return (ll == '') or ll.isspace()
last_two_blanks_re = re.compile(r'\n\s*\n\s*$', re.MULTILINE)
last_two_blanks_re2 = re.compile(r'.+\n\s*\n\s+$', re.MULTILINE)
def last_two_blanks(src):
"""Determine if the input source ends in two blanks.
A blank is either a newline or a line consisting of whitespace.
Parameters
----------
src : string
A single or multiline string.
"""
if not src: return False
# The logic here is tricky: I couldn't get a regexp to work and pass all
# the tests, so I took a different approach: split the source by lines,
# grab the last two and prepend '###\n' as a stand-in for whatever was in
# the body before the last two lines. Then, with that structure, it's
# possible to analyze with two regexps. Not the most elegant solution, but
# it works. If anyone tries to change this logic, make sure to validate
# the whole test suite first!
new_src = '\n'.join(['###\n'] + src.splitlines()[-2:])
return (bool(last_two_blanks_re.match(new_src)) or
bool(last_two_blanks_re2.match(new_src)) )
def remove_comments(src):
"""Remove all comments from input source.
Note: comments are NOT recognized inside of strings!
Parameters
----------
src : string
A single or multiline input string.
Returns
-------
String with all Python comments removed.
"""
return re.sub('#.*', '', src)
def get_input_encoding():
"""Return the default standard input encoding.
If sys.stdin has no encoding, 'ascii' is returned."""
# There are strange environments for which sys.stdin.encoding is None. We
# ensure that a valid encoding is returned.
encoding = getattr(sys.stdin, 'encoding', None)
if encoding is None:
encoding = 'ascii'
return encoding
#-----------------------------------------------------------------------------
# Classes and functions for normal Python syntax handling
#-----------------------------------------------------------------------------
class InputSplitter(object):
r"""An object that can accumulate lines of Python source before execution.
This object is designed to be fed python source line-by-line, using
:meth:`push`. It will return on each push whether the currently pushed
code could be executed already. In addition, it provides a method called
:meth:`push_accepts_more` that can be used to query whether more input
can be pushed into a single interactive block.
This is a simple example of how an interactive terminal-based client can use
this tool::
isp = InputSplitter()
while isp.push_accepts_more():
indent = ' '*isp.indent_spaces
prompt = '>>> ' + indent
line = indent + raw_input(prompt)
isp.push(line)
print 'Input source was:\n', isp.source_reset(),
"""
# A cache for storing the current indentation
# The first value stores the most recently processed source input
# The second value is the number of spaces for the current indentation
# If self.source matches the first value, the second value is a valid
# current indentation. Otherwise, the cache is invalid and the indentation
# must be recalculated.
_indent_spaces_cache = None, None
# String, indicating the default input encoding. It is computed by default
# at initialization time via get_input_encoding(), but it can be reset by a
# client with specific knowledge of the encoding.
encoding = ''
# String where the current full source input is stored, properly encoded.
# Reading this attribute is the normal way of querying the currently pushed
# source code, that has been properly encoded.
source = ''
# Code object corresponding to the current source. It is automatically
# synced to the source, so it can be queried at any time to obtain the code
# object; it will be None if the source doesn't compile to valid Python.
code = None
# Private attributes
# List with lines of input accumulated so far
_buffer = None
# Command compiler
_compile = None
# Boolean indicating whether the current block is complete
_is_complete = None
# Boolean indicating whether the current block has an unrecoverable syntax error
_is_invalid = False
def __init__(self):
"""Create a new InputSplitter instance.
"""
self._buffer = []
self._compile = codeop.CommandCompiler()
self.encoding = get_input_encoding()
def reset(self):
"""Reset the input buffer and associated state."""
self._buffer[:] = []
self.source = ''
self.code = None
self._is_complete = False
self._is_invalid = False
def source_reset(self):
"""Return the input source and perform a full reset.
"""
out = self.source
self.reset()
return out
def check_complete(self, source):
"""Return whether a block of code is ready to execute, or should be continued
This is a non-stateful API, and will reset the state of this InputSplitter.
Parameters
----------
source : string
Python input code, which can be multiline.
Returns
-------
status : str
One of 'complete', 'incomplete', or 'invalid' if source is not a
prefix of valid code.
indent_spaces : int or None
The number of spaces by which to indent the next line of code. If
status is not 'incomplete', this is None.
"""
self.reset()
try:
self.push(source)
except SyntaxError:
# Transformers in IPythonInputSplitter can raise SyntaxError,
# which push() will not catch.
return 'invalid', None
else:
if self._is_invalid:
return 'invalid', None
elif self.push_accepts_more():
return 'incomplete', self.get_indent_spaces()
else:
return 'complete', None
finally:
self.reset()
def push(self, lines):
"""Push one or more lines of input.
This stores the given lines and returns a status code indicating
whether the code forms a complete Python block or not.
Any exceptions generated in compilation are swallowed, but if an
exception was produced, the method returns True.
Parameters
----------
lines : string
One or more lines of Python input.
Returns
-------
is_complete : boolean
True if the current input source (the result of the current input
plus prior inputs) forms a complete Python execution block. Note that
this value is also stored as a private attribute (``_is_complete``), so it
can be queried at any time.
"""
self._store(lines)
source = self.source
# Before calling _compile(), reset the code object to None so that if an
# exception is raised in compilation, we don't mislead by having
# inconsistent code/source attributes.
self.code, self._is_complete = None, None
self._is_invalid = False
# Honor termination lines properly
if source.endswith('\\\n'):
return False
try:
with warnings.catch_warnings():
warnings.simplefilter('error', SyntaxWarning)
self.code = self._compile(source, symbol="exec")
# Invalid syntax can produce any of a number of different errors from
# inside the compiler, so we have to catch them all. Syntax errors
# immediately produce a 'ready' block, so the invalid Python can be
# sent to the kernel for evaluation with possible ipython
# special-syntax conversion.
except (SyntaxError, OverflowError, ValueError, TypeError,
MemoryError, SyntaxWarning):
self._is_complete = True
self._is_invalid = True
else:
# Compilation didn't produce any exceptions (though it may not have
# given a complete code object)
self._is_complete = self.code is not None
return self._is_complete
def push_accepts_more(self):
"""Return whether a block of interactive input can accept more input.
This method is meant to be used by line-oriented frontends, who need to
guess whether a block is complete or not based solely on prior and
current input lines. The InputSplitter considers it has a complete
interactive block and will not accept more input when either:
* A SyntaxError is raised
* The code is complete and consists of a single line or a single
non-compound statement
* The code is complete and has a blank line at the end
If the current input produces a syntax error, this method immediately
returns False but does *not* raise the syntax error exception, as
typically clients will want to send invalid syntax to an execution
backend which might convert the invalid syntax into valid Python via
one of the dynamic yap_ipython mechanisms.
"""
# With incomplete input, unconditionally accept more
# A syntax error also sets _is_complete to True - see push()
if not self._is_complete:
#print("Not complete") # debug
return True
# The user can make any (complete) input execute by leaving a blank line
last_line = self.source.splitlines()[-1]
if (not last_line) or last_line.isspace():
#print("Blank line") # debug
return False
# If there's just a single line or AST node, and we're flush left, as is
# the case after a simple statement such as 'a=1', we want to execute it
# straight away.
if self.get_indent_spaces() == 0:
if len(self.source.splitlines()) <= 1:
return False
try:
code_ast = ast.parse(u''.join(self._buffer))
except Exception:
#print("Can't parse AST") # debug
return False
else:
if len(code_ast.body) == 1 and \
not hasattr(code_ast.body[0], 'body'):
#print("Simple statement") # debug
return False
# General fallback - accept more code
return True
def get_indent_spaces(self):
sourcefor, n = self._indent_spaces_cache
if sourcefor == self.source:
return n
# self.source always has a trailing newline
n = find_next_indent(self.source[:-1])
self._indent_spaces_cache = (self.source, n)
return n
# Backwards compatibility. I think all code that used .indent_spaces was
# inside yap_ipython, but we can leave this here until yap_ipython 7 in case any
# other modules are using it. -TK, November 2017
indent_spaces = property(get_indent_spaces)
def _store(self, lines, buffer=None, store='source'):
"""Store one or more lines of input.
If input lines are not newline-terminated, a newline is automatically
appended."""
if buffer is None:
buffer = self._buffer
if lines.endswith('\n'):
buffer.append(lines)
else:
buffer.append(lines+'\n')
setattr(self, store, self._set_source(buffer))
def _set_source(self, buffer):
return u''.join(buffer)
class IPythonInputSplitter(InputSplitter):
"""An input splitter that recognizes all of yap_ipython's special syntax."""
# String with raw, untransformed input.
source_raw = ''
# Flag to track when a transformer has stored input that it hasn't given
# back yet.
transformer_accumulating = False
# Flag to track when assemble_python_lines has stored input that it hasn't
# given back yet.
within_python_line = False
# Private attributes
# List with lines of raw input accumulated so far.
_buffer_raw = None
def __init__(self, line_input_checker=True, physical_line_transforms=None,
logical_line_transforms=None, python_line_transforms=None):
super(IPythonInputSplitter, self).__init__()
self._buffer_raw = []
self._validate = True
if physical_line_transforms is not None:
self.physical_line_transforms = physical_line_transforms
else:
self.physical_line_transforms = [
leading_indent(),
classic_prompt(),
ipy_prompt(),
cellmagic(end_on_blank_line=line_input_checker),
]
self.assemble_logical_lines = assemble_logical_lines()
if logical_line_transforms is not None:
self.logical_line_transforms = logical_line_transforms
else:
self.logical_line_transforms = [
help_end(),
escaped_commands(),
assign_from_magic(),
assign_from_system(),
]
self.assemble_python_lines = assemble_python_lines()
if python_line_transforms is not None:
self.python_line_transforms = python_line_transforms
else:
# We don't use any of these at present
self.python_line_transforms = []
@property
def transforms(self):
"Quick access to all transformers."
return self.physical_line_transforms + \
[self.assemble_logical_lines] + self.logical_line_transforms + \
[self.assemble_python_lines] + self.python_line_transforms
@property
def transforms_in_use(self):
"""Transformers, excluding logical line transformers if we're in a
Python line."""
t = self.physical_line_transforms[:]
if not self.within_python_line:
t += [self.assemble_logical_lines] + self.logical_line_transforms
return t + [self.assemble_python_lines] + self.python_line_transforms
def reset(self):
"""Reset the input buffer and associated state."""
super(IPythonInputSplitter, self).reset()
self._buffer_raw[:] = []
self.source_raw = ''
self.transformer_accumulating = False
self.within_python_line = False
for t in self.transforms:
try:
t.reset()
except SyntaxError:
# Nothing that calls reset() expects to handle transformer
# errors
pass
def flush_transformers(self):
def _flush(transform, outs):
"""yield transformed lines
always strings, never None
transform: the current transform
outs: an iterable of previously transformed inputs.
Each may be multiline, which will be passed
one line at a time to transform.
"""
for out in outs:
for line in out.splitlines():
# push one line at a time
tmp = transform.push(line)
if tmp is not None:
yield tmp
# reset the transform
tmp = transform.reset()
if tmp is not None:
yield tmp
out = []
for t in self.transforms_in_use:
out = _flush(t, out)
out = list(out)
if out:
self._store('\n'.join(out))
def raw_reset(self):
"""Return raw input only and perform a full reset.
"""
out = self.source_raw
self.reset()
return out
def source_reset(self):
try:
self.flush_transformers()
return self.source
finally:
self.reset()
def push_accepts_more(self):
if self.transformer_accumulating:
return True
else:
return super(IPythonInputSplitter, self).push_accepts_more()
def transform_cell(self, cell):
"""Process and translate a cell of input.
"""
self.reset()
try:
self.push(cell)
self.flush_transformers()
return self.source
finally:
self.reset()
def push(self, lines):
"""Push one or more lines of yap_ipython input.
This stores the given lines and returns a status code indicating
whether the code forms a complete Python block or not, after processing
all input lines for special yap_ipython syntax.
Any exceptions generated in compilation are swallowed, but if an
exception was produced, the method returns True.
Parameters
----------
lines : string
One or more lines of Python input.
Returns
-------
is_complete : boolean
True if the current input source (the result of the current input
plus prior inputs) forms a complete Python execution block. Note that
this value is also stored as a private attribute (_is_complete), so it
can be queried at any time.
"""
# We must ensure all input is pure unicode
lines = cast_unicode(lines, self.encoding)
# ''.splitlines() --> [], but we need to push the empty line to transformers
lines_list = lines.splitlines()
if not lines_list:
lines_list = ['']
# Store raw source before applying any transformations to it. Note
# that this must be done *after* the reset() call that would otherwise
# flush the buffer.
self._store(lines, self._buffer_raw, 'source_raw')
transformed_lines_list = []
for line in lines_list:
transformed = self._transform_line(line)
if transformed is not None:
transformed_lines_list.append(transformed)
if transformed_lines_list:
transformed_lines = '\n'.join(transformed_lines_list)
return super(IPythonInputSplitter, self).push(transformed_lines)
else:
# Got nothing back from transformers - they must be waiting for
# more input.
return False
def _transform_line(self, line):
"""Push a line of input code through the various transformers.
Returns any output from the transformers, or None if a transformer
is accumulating lines.
Sets self.transformer_accumulating as a side effect.
"""
def _accumulating(dbg):
#print(dbg)
self.transformer_accumulating = True
return None
for transformer in self.physical_line_transforms:
line = transformer.push(line)
if line is None:
return _accumulating(transformer)
if not self.within_python_line:
line = self.assemble_logical_lines.push(line)
if line is None:
return _accumulating('acc logical line')
for transformer in self.logical_line_transforms:
line = transformer.push(line)
if line is None:
return _accumulating(transformer)
line = self.assemble_python_lines.push(line)
if line is None:
self.within_python_line = True
return _accumulating('acc python line')
else:
self.within_python_line = False
for transformer in self.python_line_transforms:
line = transformer.push(line)
if line is None:
return _accumulating(transformer)
#print("transformers clear") #debug
self.transformer_accumulating = False
return line

View File

@ -1,4 +1,4 @@
"""Input transformer classes to support IPython special syntax.
"""Input transformer classes to support yap_ipython special syntax.
This includes the machinery to recognise and transform ``%magic`` commands,
``!system`` commands, ``help?`` querying, prompt stripping, and so forth.
@ -8,18 +8,18 @@ import functools
import re
from io import StringIO
from IPython.core.splitinput import LineInfo
from IPython.utils import tokenize2
from IPython.utils.tokenize2 import generate_tokens, untokenize, TokenError
from yap_ipython.core.splitinput import LineInfo
from yap_ipython.utils import tokenize2
from yap_ipython.utils.tokenize2 import generate_tokens, untokenize, TokenError
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
# The escape sequences that define the syntax transformations IPython will
# The escape sequences that define the syntax transformations yap_ipython will
# apply to user input. These can NOT be just changed here: many regular
# expressions and other parts of the code may use their hardcoded values, and
# for all intents and purposes they constitute the 'IPython syntax', so they
# for all intents and purposes they constitute the 'yap_ipython syntax', so they
# should be considered fixed.
ESC_SHELL = '!' # Send line to underlying system shell
@ -198,11 +198,14 @@ def _make_help_call(target, esc, lspace, next_input=None):
else 'psearch' if '*' in target \
else 'pinfo'
arg = " ".join([method, target])
#Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args)
t_magic_name, _, t_magic_arg_s = arg.partition(' ')
t_magic_name = t_magic_name.lstrip(ESC_MAGIC)
if next_input is None:
return '%sget_ipython().magic(%r)' % (lspace, arg)
return '%sget_ipython().run_line_magic(%r, %r)' % (lspace, t_magic_name, t_magic_arg_s)
else:
return '%sget_ipython().set_next_input(%r);get_ipython().magic(%r)' % \
(lspace, next_input, arg)
return '%sget_ipython().set_next_input(%r);get_ipython().run_line_magic(%r, %r)' % \
(lspace, next_input, t_magic_name, t_magic_arg_s)
# These define the transformations for the different escape characters.
def _tr_system(line_info):
@ -225,11 +228,14 @@ def _tr_help(line_info):
def _tr_magic(line_info):
"Translate lines escaped with: %"
tpl = '%sget_ipython().magic(%r)'
tpl = '%sget_ipython().run_line_magic(%r, %r)'
if line_info.line.startswith(ESC_MAGIC2):
return line_info.line
cmd = ' '.join([line_info.ifun, line_info.the_rest]).strip()
return tpl % (line_info.pre, cmd)
#Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args)
t_magic_name, _, t_magic_arg_s = cmd.partition(' ')
t_magic_name = t_magic_name.lstrip(ESC_MAGIC)
return tpl % (line_info.pre, t_magic_name, t_magic_arg_s)
def _tr_quote(line_info):
"Translate lines escaped with: ,"
@ -450,13 +456,13 @@ def classic_prompt():
# FIXME: non-capturing version (?:...) usable?
prompt_re = re.compile(r'^(>>>|\.\.\.)( |$)')
initial_re = re.compile(r'^>>>( |$)')
# Any %magic/!system is IPython syntax, so we needn't look for >>> prompts
# Any %magic/!system is yap_ipython syntax, so we needn't look for >>> prompts
turnoff_re = re.compile(r'^[%!]')
return _strip_prompts(prompt_re, initial_re, turnoff_re)
@CoroutineInputTransformer.wrap
def ipy_prompt():
"""Strip IPython's In [1]:/...: prompts."""
"""Strip yap_ipython's In [1]:/...: prompts."""
# FIXME: non-capturing version (?:...) usable?
prompt_re = re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')
# Disable prompt stripping inside cell magics
@ -514,12 +520,15 @@ def assign_from_system(line):
return assign_system_template % m.group('lhs', 'cmd')
assign_magic_re = re.compile(r'{}%\s*(?P<cmd>.*)'.format(_assign_pat), re.VERBOSE)
assign_magic_template = '%s = get_ipython().magic(%r)'
assign_magic_template = '%s = get_ipython().run_line_magic(%r, %r)'
@StatelessInputTransformer.wrap
def assign_from_magic(line):
"""Transform assignment from magic commands (e.g. a = %who_ls)"""
m = assign_magic_re.match(line)
if m is None:
return line
return assign_magic_template % m.group('lhs', 'cmd')
#Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args)
m_lhs, m_cmd = m.group('lhs', 'cmd')
t_magic_name, _, t_magic_arg_s = m_cmd.partition(' ')
t_magic_name = t_magic_name.lstrip(ESC_MAGIC)
return assign_magic_template % (m_lhs, t_magic_name, t_magic_arg_s)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,218 @@
"""Logger class for yap_ipython's logging facilities.
"""
#*****************************************************************************
# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
#****************************************************************************
# Modules and globals
# Python standard modules
import glob
import io
import os
import time
#****************************************************************************
# FIXME: This class isn't a mixin anymore, but it still needs attributes from
# ipython and does input cache management. Finish cleanup later...
class Logger(object):
"""A Logfile class with different policies for file creation"""
def __init__(self, home_dir, logfname='Logger.log', loghead=u'',
logmode='over'):
# this is the full ipython instance, we need some attributes from it
# which won't exist until later. What a mess, clean up later...
self.home_dir = home_dir
self.logfname = logfname
self.loghead = loghead
self.logmode = logmode
self.logfile = None
# Whether to log raw or processed input
self.log_raw_input = False
# whether to also log output
self.log_output = False
# whether to put timestamps before each log entry
self.timestamp = False
# activity control flags
self.log_active = False
# logmode is a validated property
def _set_mode(self,mode):
if mode not in ['append','backup','global','over','rotate']:
raise ValueError('invalid log mode %s given' % mode)
self._logmode = mode
def _get_mode(self):
return self._logmode
logmode = property(_get_mode,_set_mode)
def logstart(self, logfname=None, loghead=None, logmode=None,
log_output=False, timestamp=False, log_raw_input=False):
"""Generate a new log-file with a default header.
Raises RuntimeError if the log has already been started"""
if self.logfile is not None:
raise RuntimeError('Log file is already active: %s' %
self.logfname)
# The parameters can override constructor defaults
if logfname is not None: self.logfname = logfname
if loghead is not None: self.loghead = loghead
if logmode is not None: self.logmode = logmode
# Parameters not part of the constructor
self.timestamp = timestamp
self.log_output = log_output
self.log_raw_input = log_raw_input
# init depending on the log mode requested
isfile = os.path.isfile
logmode = self.logmode
if logmode == 'append':
self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
elif logmode == 'backup':
if isfile(self.logfname):
backup_logname = self.logfname+'~'
# Manually remove any old backup, since os.rename may fail
# under Windows.
if isfile(backup_logname):
os.remove(backup_logname)
os.rename(self.logfname,backup_logname)
self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
elif logmode == 'global':
self.logfname = os.path.join(self.home_dir,self.logfname)
self.logfile = io.open(self.logfname, 'a', encoding='utf-8')
elif logmode == 'over':
if isfile(self.logfname):
os.remove(self.logfname)
self.logfile = io.open(self.logfname,'w', encoding='utf-8')
elif logmode == 'rotate':
if isfile(self.logfname):
if isfile(self.logfname+'.001~'):
old = glob.glob(self.logfname+'.*~')
old.sort()
old.reverse()
for f in old:
root, ext = os.path.splitext(f)
num = int(ext[1:-1])+1
os.rename(f, root+'.'+repr(num).zfill(3)+'~')
os.rename(self.logfname, self.logfname+'.001~')
self.logfile = io.open(self.logfname, 'w', encoding='utf-8')
if logmode != 'append':
self.logfile.write(self.loghead)
self.logfile.flush()
self.log_active = True
def switch_log(self,val):
"""Switch logging on/off. val should be ONLY a boolean."""
if val not in [False,True,0,1]:
raise ValueError('Call switch_log ONLY with a boolean argument, '
'not with: %s' % val)
label = {0:'OFF',1:'ON',False:'OFF',True:'ON'}
if self.logfile is None:
print("""
Logging hasn't been started yet (use logstart for that).
%logon/%logoff are for temporarily starting and stopping logging for a logfile
which already exists. But you must first start the logging process with
%logstart (optionally giving a logfile name).""")
else:
if self.log_active == val:
print('Logging is already',label[val])
else:
print('Switching logging',label[val])
self.log_active = not self.log_active
self.log_active_out = self.log_active
def logstate(self):
"""Print a status message about the logger."""
if self.logfile is None:
print('Logging has not been activated.')
else:
state = self.log_active and 'active' or 'temporarily suspended'
print('Filename :', self.logfname)
print('Mode :', self.logmode)
print('Output logging :', self.log_output)
print('Raw input log :', self.log_raw_input)
print('Timestamping :', self.timestamp)
print('State :', state)
def log(self, line_mod, line_ori):
"""Write the sources to a log.
Inputs:
- line_mod: possibly modified input, such as the transformations made
by input prefilters or input handlers of various kinds. This should
always be valid Python.
- line_ori: unmodified input line from the user. This is not
necessarily valid Python.
"""
# Write the log line, but decide which one according to the
# log_raw_input flag, set when the log is started.
if self.log_raw_input:
self.log_write(line_ori)
else:
self.log_write(line_mod)
def log_write(self, data, kind='input'):
"""Write data to the log file, if active"""
#print 'data: %r' % data # dbg
if self.log_active and data:
write = self.logfile.write
if kind=='input':
if self.timestamp:
write(time.strftime('# %a, %d %b %Y %H:%M:%S\n', time.localtime()))
write(data)
elif kind=='output' and self.log_output:
odata = u'\n'.join([u'#[Out]# %s' % s
for s in data.splitlines()])
write(u'%s\n' % odata)
self.logfile.flush()
def logstop(self):
"""Fully stop logging and close log file.
In order to start logging again, a new logstart() call needs to be
made, possibly (though not necessarily) with a new filename, mode and
other options."""
if self.logfile is not None:
self.logfile.close()
self.logfile = None
else:
print("Logging hadn't been started.")
self.log_active = False
# For backwards compatibility, in case anyone was using this.
close_log = logstop

View File

@ -0,0 +1,53 @@
"""Support for interactive macros in yap_ipython"""
#*****************************************************************************
# Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
import re
from yap_ipython.utils.encoding import DEFAULT_ENCODING
coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
class Macro(object):
"""Simple class to store the value of macros as strings.
Macro is just a callable that executes a string of yap_ipython
input when called.
"""
def __init__(self,code):
"""store the macro value, as a single string which can be executed"""
lines = []
enc = None
for line in code.splitlines():
coding_match = coding_declaration.match(line)
if coding_match:
enc = coding_match.group(1)
else:
lines.append(line)
code = "\n".join(lines)
if isinstance(code, bytes):
code = code.decode(enc or DEFAULT_ENCODING)
self.value = code + '\n'
def __str__(self):
return self.value
def __repr__(self):
return 'yap_ipython.macro.Macro(%s)' % repr(self.value)
def __getstate__(self):
""" needed for safe pickling via %store """
return {'value': self.value}
def __add__(self, other):
if isinstance(other, Macro):
return Macro(self.value + other.value)
elif isinstance(other, str):
return Macro(self.value + other)
raise TypeError

View File

@ -0,0 +1,684 @@
# encoding: utf-8
"""Magic functions for InteractiveShell.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
# Copyright (C) 2001 Fernando Perez <fperez@colorado.edu>
# Copyright (C) 2008 The yap_ipython Development Team
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
import os
import re
import sys
from getopt import getopt, GetoptError
from traitlets.config.configurable import Configurable
from yap_ipython.core import oinspect
from yap_ipython.core.error import UsageError
from yap_ipython.core.inputsplitter import ESC_MAGIC, ESC_MAGIC2
from decorator import decorator
from yap_ipython.utils.ipstruct import Struct
from yap_ipython.utils.process import arg_split
from yap_ipython.utils.text import dedent
from traitlets import Bool, Dict, Instance, observe
from logging import error
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
# A dict we'll use for each class that has magics, used as temporary storage to
# pass information between the @line/cell_magic method decorators and the
# @magics_class class decorator, because the method decorators have no
# access to the class when they run. See for more details:
# http://stackoverflow.com/questions/2366713/can-a-python-decorator-of-an-instance-method-access-the-class
magics = dict(line={}, cell={})
magic_kinds = ('line', 'cell')
magic_spec = ('line', 'cell', 'line_cell')
magic_escapes = dict(line=ESC_MAGIC, cell=ESC_MAGIC2)
#-----------------------------------------------------------------------------
# Utility classes and functions
#-----------------------------------------------------------------------------
class Bunch: pass
def on_off(tag):
"""Return an ON/OFF string for a 1/0 input. Simple utility function."""
return ['OFF','ON'][tag]
def compress_dhist(dh):
"""Compress a directory history into a new one with at most 20 entries.
Return a new list made from the first and last 10 elements of dhist after
removal of duplicates.
"""
head, tail = dh[:-10], dh[-10:]
newhead = []
done = set()
for h in head:
if h in done:
continue
newhead.append(h)
done.add(h)
return newhead + tail
def needs_local_scope(func):
"""Decorator to mark magic functions which need to local scope to run."""
func.needs_local_scope = True
return func
#-----------------------------------------------------------------------------
# Class and method decorators for registering magics
#-----------------------------------------------------------------------------
def magics_class(cls):
"""Class decorator for all subclasses of the main Magics class.
Any class that subclasses Magics *must* also apply this decorator, to
ensure that all the methods that have been decorated as line/cell magics
get correctly registered in the class instance. This is necessary because
when method decorators run, the class does not exist yet, so they
temporarily store their information into a module global. Application of
this class decorator copies that global data to the class instance and
clears the global.
Obviously, this mechanism is not thread-safe, which means that the
*creation* of subclasses of Magic should only be done in a single-thread
context. Instantiation of the classes has no restrictions. Given that
these classes are typically created at yap_ipython startup time and before user
application code becomes active, in practice this should not pose any
problems.
"""
cls.registered = True
cls.magics = dict(line = magics['line'],
cell = magics['cell'])
magics['line'] = {}
magics['cell'] = {}
return cls
def record_magic(dct, magic_kind, magic_name, func):
"""Utility function to store a function as a magic of a specific kind.
Parameters
----------
dct : dict
A dictionary with 'line' and 'cell' subdicts.
magic_kind : str
Kind of magic to be stored.
magic_name : str
Key to store the magic as.
func : function
Callable object to store.
"""
if magic_kind == 'line_cell':
dct['line'][magic_name] = dct['cell'][magic_name] = func
else:
dct[magic_kind][magic_name] = func
def validate_type(magic_kind):
"""Ensure that the given magic_kind is valid.
Check that the given magic_kind is one of the accepted spec types (stored
in the global `magic_spec`), raise ValueError otherwise.
"""
if magic_kind not in magic_spec:
raise ValueError('magic_kind must be one of %s, %s given' %
magic_kinds, magic_kind)
# The docstrings for the decorator below will be fairly similar for the two
# types (method and function), so we generate them here once and reuse the
# templates below.
_docstring_template = \
"""Decorate the given {0} as {1} magic.
The decorator can be used with or without arguments, as follows.
i) without arguments: it will create a {1} magic named as the {0} being
decorated::
@deco
def foo(...)
will create a {1} magic named `foo`.
ii) with one string argument: which will be used as the actual name of the
resulting magic::
@deco('bar')
def foo(...)
will create a {1} magic named `bar`.
To register a class magic use ``Interactiveshell.register_magic(class or instance)``.
"""
# These two are decorator factories. While they are conceptually very similar,
# there are enough differences in the details that it's simpler to have them
# written as completely standalone functions rather than trying to share code
# and make a single one with convoluted logic.
def _method_magic_marker(magic_kind):
"""Decorator factory for methods in Magics subclasses.
"""
validate_type(magic_kind)
# This is a closure to capture the magic_kind. We could also use a class,
# but it's overkill for just that one bit of state.
def magic_deco(arg):
call = lambda f, *a, **k: f(*a, **k)
if callable(arg):
# "Naked" decorator call (just @foo, no args)
func = arg
name = func.__name__
retval = decorator(call, func)
record_magic(magics, magic_kind, name, name)
elif isinstance(arg, str):
# Decorator called with arguments (@foo('bar'))
name = arg
def mark(func, *a, **kw):
record_magic(magics, magic_kind, name, func.__name__)
return decorator(call, func)
retval = mark
else:
raise TypeError("Decorator can only be called with "
"string or function")
return retval
# Ensure the resulting decorator has a usable docstring
magic_deco.__doc__ = _docstring_template.format('method', magic_kind)
return magic_deco
def _function_magic_marker(magic_kind):
"""Decorator factory for standalone functions.
"""
validate_type(magic_kind)
# This is a closure to capture the magic_kind. We could also use a class,
# but it's overkill for just that one bit of state.
def magic_deco(arg):
call = lambda f, *a, **k: f(*a, **k)
# Find get_ipython() in the caller's namespace
caller = sys._getframe(1)
for ns in ['f_locals', 'f_globals', 'f_builtins']:
get_ipython = getattr(caller, ns).get('get_ipython')
if get_ipython is not None:
break
else:
raise NameError('Decorator can only run in context where '
'`get_ipython` exists')
ip = get_ipython()
if callable(arg):
# "Naked" decorator call (just @foo, no args)
func = arg
name = func.__name__
ip.register_magic_function(func, magic_kind, name)
retval = decorator(call, func)
elif isinstance(arg, str):
# Decorator called with arguments (@foo('bar'))
name = arg
def mark(func, *a, **kw):
ip.register_magic_function(func, magic_kind, name)
return decorator(call, func)
retval = mark
else:
raise TypeError("Decorator can only be called with "
"string or function")
return retval
# Ensure the resulting decorator has a usable docstring
ds = _docstring_template.format('function', magic_kind)
ds += dedent("""
Note: this decorator can only be used in a context where yap_ipython is already
active, so that the `get_ipython()` call succeeds. You can therefore use
it in your startup files loaded after yap_ipython initializes, but *not* in the
yap_ipython configuration file itself, which is executed before yap_ipython is
fully up and running. Any file located in the `startup` subdirectory of
your configuration profile will be OK in this sense.
""")
magic_deco.__doc__ = ds
return magic_deco
# Create the actual decorators for public use
# These three are used to decorate methods in class definitions
line_magic = _method_magic_marker('line')
cell_magic = _method_magic_marker('cell')
line_cell_magic = _method_magic_marker('line_cell')
# These three decorate standalone functions and perform the decoration
# immediately. They can only run where get_ipython() works
register_line_magic = _function_magic_marker('line')
register_cell_magic = _function_magic_marker('cell')
register_line_cell_magic = _function_magic_marker('line_cell')
#-----------------------------------------------------------------------------
# Core Magic classes
#-----------------------------------------------------------------------------
class MagicsManager(Configurable):
"""Object that handles all magic-related functionality for yap_ipython.
"""
# Non-configurable class attributes
# A two-level dict, first keyed by magic type, then by magic function, and
# holding the actual callable object as value. This is the dict used for
# magic function dispatch
magics = Dict()
# A registry of the original objects that we've been given holding magics.
registry = Dict()
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
auto_magic = Bool(True, help=
"Automatically call line magics without requiring explicit % prefix"
).tag(config=True)
@observe('auto_magic')
def _auto_magic_changed(self, change):
self.shell.automagic = change['new']
_auto_status = [
'Automagic is OFF, % prefix IS needed for line magics.',
'Automagic is ON, % prefix IS NOT needed for line magics.']
user_magics = Instance('yap_ipython.core.magics.UserMagics', allow_none=True)
def __init__(self, shell=None, config=None, user_magics=None, **traits):
super(MagicsManager, self).__init__(shell=shell, config=config,
user_magics=user_magics, **traits)
self.magics = dict(line={}, cell={})
# Let's add the user_magics to the registry for uniformity, so *all*
# registered magic containers can be found there.
self.registry[user_magics.__class__.__name__] = user_magics
def auto_status(self):
"""Return descriptive string with automagic status."""
return self._auto_status[self.auto_magic]
def lsmagic(self):
"""Return a dict of currently available magic functions.
The return dict has the keys 'line' and 'cell', corresponding to the
two types of magics we support. Each value is a list of names.
"""
return self.magics
def lsmagic_docs(self, brief=False, missing=''):
"""Return dict of documentation of magic functions.
The return dict has the keys 'line' and 'cell', corresponding to the
two types of magics we support. Each value is a dict keyed by magic
name whose value is the function docstring. If a docstring is
unavailable, the value of `missing` is used instead.
If brief is True, only the first line of each docstring will be returned.
"""
docs = {}
for m_type in self.magics:
m_docs = {}
for m_name, m_func in self.magics[m_type].items():
if m_func.__doc__:
if brief:
m_docs[m_name] = m_func.__doc__.split('\n', 1)[0]
else:
m_docs[m_name] = m_func.__doc__.rstrip()
else:
m_docs[m_name] = missing
docs[m_type] = m_docs
return docs
def register(self, *magic_objects):
"""Register one or more instances of Magics.
Take one or more classes or instances of classes that subclass the main
`core.Magic` class, and register them with yap_ipython to use the magic
functions they provide. The registration process will then ensure that
any methods that have decorated to provide line and/or cell magics will
be recognized with the `%x`/`%%x` syntax as a line/cell magic
respectively.
If classes are given, they will be instantiated with the default
constructor. If your classes need a custom constructor, you should
instanitate them first and pass the instance.
The provided arguments can be an arbitrary mix of classes and instances.
Parameters
----------
magic_objects : one or more classes or instances
"""
# Start by validating them to ensure they have all had their magic
# methods registered at the instance level
for m in magic_objects:
if not m.registered:
raise ValueError("Class of magics %r was constructed without "
"the @register_magics class decorator")
if isinstance(m, type):
# If we're given an uninstantiated class
m = m(shell=self.shell)
# Now that we have an instance, we can register it and update the
# table of callables
self.registry[m.__class__.__name__] = m
for mtype in magic_kinds:
self.magics[mtype].update(m.magics[mtype])
def register_function(self, func, magic_kind='line', magic_name=None):
"""Expose a standalone function as magic function for yap_ipython.
This will create an yap_ipython magic (line, cell or both) from a
standalone function. The functions should have the following
signatures:
* For line magics: `def f(line)`
* For cell magics: `def f(line, cell)`
* For a function that does both: `def f(line, cell=None)`
In the latter case, the function will be called with `cell==None` when
invoked as `%f`, and with cell as a string when invoked as `%%f`.
Parameters
----------
func : callable
Function to be registered as a magic.
magic_kind : str
Kind of magic, one of 'line', 'cell' or 'line_cell'
magic_name : optional str
If given, the name the magic will have in the yap_ipython namespace. By
default, the name of the function itself is used.
"""
# Create the new method in the user_magics and register it in the
# global table
validate_type(magic_kind)
magic_name = func.__name__ if magic_name is None else magic_name
setattr(self.user_magics, magic_name, func)
record_magic(self.magics, magic_kind, magic_name, func)
def register_alias(self, alias_name, magic_name, magic_kind='line', magic_params=None):
"""Register an alias to a magic function.
The alias is an instance of :class:`MagicAlias`, which holds the
name and kind of the magic it should call. Binding is done at
call time, so if the underlying magic function is changed the alias
will call the new function.
Parameters
----------
alias_name : str
The name of the magic to be registered.
magic_name : str
The name of an existing magic.
magic_kind : str
Kind of magic, one of 'line' or 'cell'
"""
# `validate_type` is too permissive, as it allows 'line_cell'
# which we do not handle.
if magic_kind not in magic_kinds:
raise ValueError('magic_kind must be one of %s, %s given' %
magic_kinds, magic_kind)
alias = MagicAlias(self.shell, magic_name, magic_kind, magic_params)
setattr(self.user_magics, alias_name, alias)
record_magic(self.magics, magic_kind, alias_name, alias)
# Key base class that provides the central functionality for magics.
class Magics(Configurable):
"""Base class for implementing magic functions.
Shell functions which can be reached as %function_name. All magic
functions should accept a string, which they can parse for their own
needs. This can make some functions easier to type, eg `%cd ../`
vs. `%cd("../")`
Classes providing magic functions need to subclass this class, and they
MUST:
- Use the method decorators `@line_magic` and `@cell_magic` to decorate
individual methods as magic functions, AND
- Use the class decorator `@magics_class` to ensure that the magic
methods are properly registered at the instance level upon instance
initialization.
See :mod:`magic_functions` for examples of actual implementation classes.
"""
# Dict holding all command-line options for each magic.
options_table = None
# Dict for the mapping of magic names to methods, set by class decorator
magics = None
# Flag to check that the class decorator was properly applied
registered = False
# Instance of yap_ipython shell
shell = None
def __init__(self, shell=None, **kwargs):
if not(self.__class__.registered):
raise ValueError('Magics subclass without registration - '
'did you forget to apply @magics_class?')
if shell is not None:
if hasattr(shell, 'configurables'):
shell.configurables.append(self)
if hasattr(shell, 'config'):
kwargs.setdefault('parent', shell)
self.shell = shell
self.options_table = {}
# The method decorators are run when the instance doesn't exist yet, so
# they can only record the names of the methods they are supposed to
# grab. Only now, that the instance exists, can we create the proper
# mapping to bound methods. So we read the info off the original names
# table and replace each method name by the actual bound method.
# But we mustn't clobber the *class* mapping, in case of multiple instances.
class_magics = self.magics
self.magics = {}
for mtype in magic_kinds:
tab = self.magics[mtype] = {}
cls_tab = class_magics[mtype]
for magic_name, meth_name in cls_tab.items():
if isinstance(meth_name, str):
# it's a method name, grab it
tab[magic_name] = getattr(self, meth_name)
else:
# it's the real thing
tab[magic_name] = meth_name
# Configurable **needs** to be initiated at the end or the config
# magics get screwed up.
super(Magics, self).__init__(**kwargs)
def arg_err(self,func):
"""Print docstring if incorrect arguments were passed"""
print('Error in arguments:')
print(oinspect.getdoc(func))
def format_latex(self, strng):
"""Format a string for latex inclusion."""
# Characters that need to be escaped for latex:
escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
# Magic command names as headers:
cmd_name_re = re.compile(r'^(%s.*?):' % ESC_MAGIC,
re.MULTILINE)
# Magic commands
cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % ESC_MAGIC,
re.MULTILINE)
# Paragraph continue
par_re = re.compile(r'\\$',re.MULTILINE)
# The "\n" symbol
newline_re = re.compile(r'\\n')
# Now build the string for output:
#strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
strng)
strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
strng = par_re.sub(r'\\\\',strng)
strng = escape_re.sub(r'\\\1',strng)
strng = newline_re.sub(r'\\textbackslash{}n',strng)
return strng
def parse_options(self, arg_str, opt_str, *long_opts, **kw):
"""Parse options passed to an argument string.
The interface is similar to that of :func:`getopt.getopt`, but it
returns a :class:`~yap_ipython.utils.struct.Struct` with the options as keys
and the stripped argument string still as a string.
arg_str is quoted as a true sys.argv vector by using shlex.split.
This allows us to easily expand variables, glob files, quote
arguments, etc.
Parameters
----------
arg_str : str
The arguments to parse.
opt_str : str
The options specification.
mode : str, default 'string'
If given as 'list', the argument string is returned as a list (split
on whitespace) instead of a string.
list_all : bool, default False
Put all option values in lists. Normally only options
appearing more than once are put in a list.
posix : bool, default True
Whether to split the input line in POSIX mode or not, as per the
conventions outlined in the :mod:`shlex` module from the standard
library.
"""
# inject default options at the beginning of the input line
caller = sys._getframe(1).f_code.co_name
arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
mode = kw.get('mode','string')
if mode not in ['string','list']:
raise ValueError('incorrect mode given: %s' % mode)
# Get options
list_all = kw.get('list_all',0)
posix = kw.get('posix', os.name == 'posix')
strict = kw.get('strict', True)
# Check if we have more than one argument to warrant extra processing:
odict = {} # Dictionary with options
args = arg_str.split()
if len(args) >= 1:
# If the list of inputs only has 0 or 1 thing in it, there's no
# need to look for options
argv = arg_split(arg_str, posix, strict)
# Do regular option processing
try:
opts,args = getopt(argv, opt_str, long_opts)
except GetoptError as e:
raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
" ".join(long_opts)))
for o,a in opts:
if o.startswith('--'):
o = o[2:]
else:
o = o[1:]
try:
odict[o].append(a)
except AttributeError:
odict[o] = [odict[o],a]
except KeyError:
if list_all:
odict[o] = [a]
else:
odict[o] = a
# Prepare opts,args for return
opts = Struct(odict)
if mode == 'string':
args = ' '.join(args)
return opts,args
def default_option(self, fn, optstr):
"""Make an entry in the options_table for fn, with value optstr"""
if fn not in self.lsmagic():
error("%s is not a magic function" % fn)
self.options_table[fn] = optstr
class MagicAlias(object):
"""An alias to another magic function.
An alias is determined by its magic name and magic kind. Lookup
is done at call time, so if the underlying magic changes the alias
will call the new function.
Use the :meth:`MagicsManager.register_alias` method or the
`%alias_magic` magic function to create and register a new alias.
"""
def __init__(self, shell, magic_name, magic_kind, magic_params=None):
self.shell = shell
self.magic_name = magic_name
self.magic_params = magic_params
self.magic_kind = magic_kind
self.pretty_target = '%s%s' % (magic_escapes[self.magic_kind], self.magic_name)
self.__doc__ = "Alias for `%s`." % self.pretty_target
self._in_call = False
def __call__(self, *args, **kwargs):
"""Call the magic alias."""
fn = self.shell.find_magic(self.magic_name, self.magic_kind)
if fn is None:
raise UsageError("Magic `%s` not found." % self.pretty_target)
# Protect against infinite recursion.
if self._in_call:
raise UsageError("Infinite recursion detected; "
"magic aliases cannot call themselves.")
self._in_call = True
try:
if self.magic_params:
args_list = list(args)
args_list[0] = self.magic_params + " " + args[0]
args = tuple(args_list)
return fn(*args, **kwargs)
finally:
self._in_call = False

View File

@ -0,0 +1,278 @@
''' A decorator-based method of constructing yap_ipython magics with `argparse`
option handling.
New magic functions can be defined like so::
from yap_ipython.core.magic_arguments import (argument, magic_arguments,
parse_argstring)
@magic_arguments()
@argument('-o', '--option', help='An optional argument.')
@argument('arg', type=int, help='An integer positional argument.')
def magic_cool(self, arg):
""" A really cool magic command.
"""
args = parse_argstring(magic_cool, arg)
...
The `@magic_arguments` decorator marks the function as having argparse arguments.
The `@argument` decorator adds an argument using the same syntax as argparse's
`add_argument()` method. More sophisticated uses may also require the
`@argument_group` or `@kwds` decorator to customize the formatting and the
parsing.
Help text for the magic is automatically generated from the docstring and the
arguments::
In[1]: %cool?
%cool [-o OPTION] arg
A really cool magic command.
positional arguments:
arg An integer positional argument.
optional arguments:
-o OPTION, --option OPTION
An optional argument.
Inheritance diagram:
.. inheritance-diagram:: yap_ipython.core.magic_arguments
:parts: 3
'''
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011, yap_ipython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import argparse
import re
# Our own imports
from yap_ipython.core.error import UsageError
from yap_ipython.utils.decorators import undoc
from yap_ipython.utils.process import arg_split
from yap_ipython.utils.text import dedent
NAME_RE = re.compile(r"[a-zA-Z][a-zA-Z0-9_-]*$")
@undoc
class MagicHelpFormatter(argparse.RawDescriptionHelpFormatter):
"""A HelpFormatter with a couple of changes to meet our needs.
"""
# Modified to dedent text.
def _fill_text(self, text, width, indent):
return argparse.RawDescriptionHelpFormatter._fill_text(self, dedent(text), width, indent)
# Modified to wrap argument placeholders in <> where necessary.
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
# IPYTHON MODIFICATION: If args_string is not a plain name, wrap
# it in <> so it's valid RST.
if not NAME_RE.match(args_string):
args_string = "<%s>" % args_string
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
# Override the default prefix ('usage') to our % magic escape,
# in a code block.
def add_usage(self, usage, actions, groups, prefix="::\n\n %"):
super(MagicHelpFormatter, self).add_usage(usage, actions, groups, prefix)
class MagicArgumentParser(argparse.ArgumentParser):
""" An ArgumentParser tweaked for use by yap_ipython magics.
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
parents=None,
formatter_class=MagicHelpFormatter,
prefix_chars='-',
argument_default=None,
conflict_handler='error',
add_help=False):
if parents is None:
parents = []
super(MagicArgumentParser, self).__init__(prog=prog, usage=usage,
description=description, epilog=epilog,
parents=parents, formatter_class=formatter_class,
prefix_chars=prefix_chars, argument_default=argument_default,
conflict_handler=conflict_handler, add_help=add_help)
def error(self, message):
""" Raise a catchable error instead of exiting.
"""
raise UsageError(message)
def parse_argstring(self, argstring):
""" Split a string into an argument list and parse that argument list.
"""
argv = arg_split(argstring)
return self.parse_args(argv)
def construct_parser(magic_func):
""" Construct an argument parser using the function decorations.
"""
kwds = getattr(magic_func, 'argcmd_kwds', {})
if 'description' not in kwds:
kwds['description'] = getattr(magic_func, '__doc__', None)
arg_name = real_name(magic_func)
parser = MagicArgumentParser(arg_name, **kwds)
# Reverse the list of decorators in order to apply them in the
# order in which they appear in the source.
group = None
for deco in magic_func.decorators[::-1]:
result = deco.add_to_parser(parser, group)
if result is not None:
group = result
# Replace the magic function's docstring with the full help text.
magic_func.__doc__ = parser.format_help()
return parser
def parse_argstring(magic_func, argstring):
""" Parse the string of arguments for the given magic function.
"""
return magic_func.parser.parse_argstring(argstring)
def real_name(magic_func):
""" Find the real name of the magic.
"""
magic_name = magic_func.__name__
if magic_name.startswith('magic_'):
magic_name = magic_name[len('magic_'):]
return getattr(magic_func, 'argcmd_name', magic_name)
class ArgDecorator(object):
""" Base class for decorators to add ArgumentParser information to a method.
"""
def __call__(self, func):
if not getattr(func, 'has_arguments', False):
func.has_arguments = True
func.decorators = []
func.decorators.append(self)
return func
def add_to_parser(self, parser, group):
""" Add this object's information to the parser, if necessary.
"""
pass
class magic_arguments(ArgDecorator):
""" Mark the magic as having argparse arguments and possibly adjust the
name.
"""
def __init__(self, name=None):
self.name = name
def __call__(self, func):
if not getattr(func, 'has_arguments', False):
func.has_arguments = True
func.decorators = []
if self.name is not None:
func.argcmd_name = self.name
# This should be the first decorator in the list of decorators, thus the
# last to execute. Build the parser.
func.parser = construct_parser(func)
return func
class ArgMethodWrapper(ArgDecorator):
"""
Base class to define a wrapper for ArgumentParser method.
Child class must define either `_method_name` or `add_to_parser`.
"""
_method_name = None
def __init__(self, *args, **kwds):
self.args = args
self.kwds = kwds
def add_to_parser(self, parser, group):
""" Add this object's information to the parser.
"""
if group is not None:
parser = group
getattr(parser, self._method_name)(*self.args, **self.kwds)
return None
class argument(ArgMethodWrapper):
""" Store arguments and keywords to pass to add_argument().
Instances also serve to decorate command methods.
"""
_method_name = 'add_argument'
class defaults(ArgMethodWrapper):
""" Store arguments and keywords to pass to set_defaults().
Instances also serve to decorate command methods.
"""
_method_name = 'set_defaults'
class argument_group(ArgMethodWrapper):
""" Store arguments and keywords to pass to add_argument_group().
Instances also serve to decorate command methods.
"""
def add_to_parser(self, parser, group):
""" Add this object's information to the parser.
"""
return parser.add_argument_group(*self.args, **self.kwds)
class kwds(ArgDecorator):
""" Provide other keywords to the sub-parser constructor.
"""
def __init__(self, **kwds):
self.kwds = kwds
def __call__(self, func):
func = super(kwds, self).__call__(func)
func.argcmd_kwds = self.kwds
return func
__all__ = ['magic_arguments', 'argument', 'argument_group', 'kwds',
'parse_argstring']

View File

@ -1,13 +0,0 @@
from modulefinder import ModuleFinder
finder = ModuleFinder()
finder.run_script('__main__.py')
print('Loaded modules:')
for name, mod in finder.modules.items():
print('%s: ' % name, end='')
print(','.join(list(mod.globalnames.keys())[:3]))
print('-'*50)
print('Modules not imported:')
print('\n'.join(finder.badmodules.keys()))

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,366 @@
# encoding: utf-8
"""
Paging capabilities for yap_ipython.core
Notes
-----
For now this uses yap_ipython hooks, so it can't be in yap_ipython.utils. If we can get
rid of that dependency, we could move it there.
-----
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import re
import sys
import tempfile
from io import UnsupportedOperation
from yap_ipython import get_ipython
from yap_ipython.core.display import display
from yap_ipython.core.error import TryNext
from yap_ipython.utils.data import chop
from yap_ipython.utils.process import system
from yap_ipython.utils.terminal import get_terminal_size
from yap_ipython.utils import py3compat
def display_page(strng, start=0, screen_lines=25):
"""Just display, no paging. screen_lines is ignored."""
if isinstance(strng, dict):
data = strng
else:
if start:
strng = u'\n'.join(strng.splitlines()[start:])
data = { 'text/plain': strng }
display(data, raw=True)
def as_hook(page_func):
"""Wrap a pager func to strip the `self` arg
so it can be called as a hook.
"""
return lambda self, *args, **kwargs: page_func(*args, **kwargs)
esc_re = re.compile(r"(\x1b[^m]+m)")
def page_dumb(strng, start=0, screen_lines=25):
"""Very dumb 'pager' in Python, for when nothing else works.
Only moves forward, same interface as page(), except for pager_cmd and
mode.
"""
if isinstance(strng, dict):
strng = strng.get('text/plain', '')
out_ln = strng.splitlines()[start:]
screens = chop(out_ln,screen_lines-1)
if len(screens) == 1:
print(os.linesep.join(screens[0]))
else:
last_escape = ""
for scr in screens[0:-1]:
hunk = os.linesep.join(scr)
print(last_escape + hunk)
if not page_more():
return
esc_list = esc_re.findall(hunk)
if len(esc_list) > 0:
last_escape = esc_list[-1]
print(last_escape + os.linesep.join(screens[-1]))
def _detect_screen_size(screen_lines_def):
"""Attempt to work out the number of lines on the screen.
This is called by page(). It can raise an error (e.g. when run in the
test suite), so it's separated out so it can easily be called in a try block.
"""
TERM = os.environ.get('TERM',None)
if not((TERM=='xterm' or TERM=='xterm-color') and sys.platform != 'sunos5'):
# curses causes problems on many terminals other than xterm, and
# some termios calls lock up on Sun OS5.
return screen_lines_def
try:
import termios
import curses
except ImportError:
return screen_lines_def
# There is a bug in curses, where *sometimes* it fails to properly
# initialize, and then after the endwin() call is made, the
# terminal is left in an unusable state. Rather than trying to
# check everytime for this (by requesting and comparing termios
# flags each time), we just save the initial terminal state and
# unconditionally reset it every time. It's cheaper than making
# the checks.
try:
term_flags = termios.tcgetattr(sys.stdout)
except termios.error as err:
# can fail on Linux 2.6, pager_page will catch the TypeError
raise TypeError('termios error: {0}'.format(err))
try:
scr = curses.initscr()
except AttributeError:
# Curses on Solaris may not be complete, so we can't use it there
return screen_lines_def
screen_lines_real,screen_cols = scr.getmaxyx()
curses.endwin()
# Restore terminal state in case endwin() didn't.
termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags)
# Now we have what we needed: the screen size in rows/columns
return screen_lines_real
#print '***Screen size:',screen_lines_real,'lines x',\
#screen_cols,'columns.' # dbg
def pager_page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Display a string, piping through a pager after a certain length.
strng can be a mime-bundle dict, supplying multiple representations,
keyed by mime-type.
The screen_lines parameter specifies the number of *usable* lines of your
terminal screen (total lines minus lines you need to reserve to show other
information).
If you set screen_lines to a number <=0, page() will try to auto-determine
your screen size and will only use up to (screen_size+screen_lines) for
printing, paging after that. That is, if you want auto-detection but need
to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
auto-detection without any lines reserved simply use screen_lines = 0.
If a string won't fit in the allowed lines, it is sent through the
specified pager command. If none given, look for PAGER in the environment,
and ultimately default to less.
If no system pager works, the string is sent through a 'dumb pager'
written in python, very simplistic.
"""
# for compatibility with mime-bundle form:
if isinstance(strng, dict):
strng = strng['text/plain']
# Ugly kludge, but calling curses.initscr() flat out crashes in emacs
TERM = os.environ.get('TERM','dumb')
if TERM in ['dumb','emacs'] and os.name != 'nt':
print(strng)
return
# chop off the topmost part of the string we don't want to see
str_lines = strng.splitlines()[start:]
str_toprint = os.linesep.join(str_lines)
num_newlines = len(str_lines)
len_str = len(str_toprint)
# Dumb heuristics to guesstimate number of on-screen lines the string
# takes. Very basic, but good enough for docstrings in reasonable
# terminals. If someone later feels like refining it, it's not hard.
numlines = max(num_newlines,int(len_str/80)+1)
screen_lines_def = get_terminal_size()[1]
# auto-determine screen size
if screen_lines <= 0:
try:
screen_lines += _detect_screen_size(screen_lines_def)
except (TypeError, UnsupportedOperation):
print(str_toprint)
return
#print 'numlines',numlines,'screenlines',screen_lines # dbg
if numlines <= screen_lines :
#print '*** normal print' # dbg
print(str_toprint)
else:
# Try to open pager and default to internal one if that fails.
# All failure modes are tagged as 'retval=1', to match the return
# value of a failed system command. If any intermediate attempt
# sets retval to 1, at the end we resort to our own page_dumb() pager.
pager_cmd = get_pager_cmd(pager_cmd)
pager_cmd += ' ' + get_pager_start(pager_cmd,start)
if os.name == 'nt':
if pager_cmd.startswith('type'):
# The default WinXP 'type' command is failing on complex strings.
retval = 1
else:
fd, tmpname = tempfile.mkstemp('.txt')
try:
os.close(fd)
with open(tmpname, 'wt') as tmpfile:
tmpfile.write(strng)
cmd = "%s < %s" % (pager_cmd, tmpname)
# tmpfile needs to be closed for windows
if os.system(cmd):
retval = 1
else:
retval = None
finally:
os.remove(tmpname)
else:
try:
retval = None
# if I use popen4, things hang. No idea why.
#pager,shell_out = os.popen4(pager_cmd)
pager = os.popen(pager_cmd, 'w')
try:
pager_encoding = pager.encoding or sys.stdout.encoding
pager.write(strng)
finally:
retval = pager.close()
except IOError as msg: # broken pipe when user quits
if msg.args == (32, 'Broken pipe'):
retval = None
else:
retval = 1
except OSError:
# Other strange problems, sometimes seen in Win2k/cygwin
retval = 1
if retval is not None:
page_dumb(strng,screen_lines=screen_lines)
def page(data, start=0, screen_lines=0, pager_cmd=None):
"""Display content in a pager, piping through a pager after a certain length.
data can be a mime-bundle dict, supplying multiple representations,
keyed by mime-type, or text.
Pager is dispatched via the `show_in_pager` yap_ipython hook.
If no hook is registered, `pager_page` will be used.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
# first, try the hook
ip = get_ipython()
if ip:
try:
ip.hooks.show_in_pager(data, start=start, screen_lines=screen_lines)
return
except TryNext:
pass
# fallback on default pager
return pager_page(data, start, screen_lines, pager_cmd)
def page_file(fname, start=0, pager_cmd=None):
"""Page a file, using an optional pager command and starting line.
"""
pager_cmd = get_pager_cmd(pager_cmd)
pager_cmd += ' ' + get_pager_start(pager_cmd,start)
try:
if os.environ['TERM'] in ['emacs','dumb']:
raise EnvironmentError
system(pager_cmd + ' ' + fname)
except:
try:
if start > 0:
start -= 1
page(open(fname).read(),start)
except:
print('Unable to show file',repr(fname))
def get_pager_cmd(pager_cmd=None):
"""Return a pager command.
Makes some attempts at finding an OS-correct one.
"""
if os.name == 'posix':
default_pager_cmd = 'less -R' # -R for color control sequences
elif os.name in ['nt','dos']:
default_pager_cmd = 'type'
if pager_cmd is None:
try:
pager_cmd = os.environ['PAGER']
except:
pager_cmd = default_pager_cmd
if pager_cmd == 'less' and '-r' not in os.environ.get('LESS', '').lower():
pager_cmd += ' -R'
return pager_cmd
def get_pager_start(pager, start):
"""Return the string for paging files with an offset.
This is the '+N' argument which less and more (under Unix) accept.
"""
if pager in ['less','more']:
if start:
start_string = '+' + str(start)
else:
start_string = ''
else:
start_string = ''
return start_string
# (X)emacs on win32 doesn't like to be bypassed with msvcrt.getch()
if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs':
import msvcrt
def page_more():
""" Smart pausing between pages
@return: True if need print more lines, False if quit
"""
sys.stdout.write('---Return to continue, q to quit--- ')
ans = msvcrt.getwch()
if ans in ("q", "Q"):
result = False
else:
result = True
sys.stdout.write("\b"*37 + " "*37 + "\b"*37)
return result
else:
def page_more():
ans = py3compat.input('---Return to continue, q to quit--- ')
if ans.lower().startswith('q'):
return False
else:
return True
def snip_print(str,width = 75,print_full = 0,header = ''):
"""Print a string snipping the midsection to fit in width.
print_full: mode control:
- 0: only snip long strings
- 1: send to page() directly.
- 2: snip long strings and ask for full length viewing with page()
Return 1 if snipping was necessary, 0 otherwise."""
if print_full == 1:
page(header+str)
return 0
print(header, end=' ')
if len(str) < width:
print(str)
snip = 0
else:
whalf = int((width -5)/2)
print(str[:whalf] + ' <...> ' + str[-whalf:])
snip = 1
if snip and print_full == 2:
if py3compat.input(header+' Snipped. View (y/n)? [N]').lower() == 'y':
page(str)
return snip

View File

@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
"""Payload system for yap_ipython.
Authors:
* Fernando Perez
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from traitlets.config.configurable import Configurable
from traitlets import List
#-----------------------------------------------------------------------------
# Main payload class
#-----------------------------------------------------------------------------
class PayloadManager(Configurable):
_payload = List([])
def write_payload(self, data, single=True):
"""Include or update the specified `data` payload in the PayloadManager.
If a previous payload with the same source exists and `single` is True,
it will be overwritten with the new one.
"""
if not isinstance(data, dict):
raise TypeError('Each payload write must be a dict, got: %r' % data)
if single and 'source' in data:
source = data['source']
for i, pl in enumerate(self._payload):
if 'source' in pl and pl['source'] == source:
self._payload[i] = data
return
self._payload.append(data)
def read_payload(self):
return self._payload
def clear_payload(self):
self._payload = []

View File

@ -0,0 +1,52 @@
# encoding: utf-8
"""A payload based version of page."""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from yap_ipython.core.getipython import get_ipython
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
yap_ipython's payload system instead.
Parameters
----------
strng : str or mime-dict
Text to page, or a mime-type keyed dict of already formatted data.
start : int
Starting line at which to place the display.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = get_ipython()
if isinstance(strng, dict):
data = strng
else:
data = {'text/plain' : strng}
payload = dict(
source='page',
data=data,
start=start,
)
shell.payload_manager.write_payload(payload)
def install_payload_page():
"""DEPRECATED, use show_in_pager hook
Install this version of page as yap_ipython.core.page.page.
"""
warnings.warn("""install_payload_page is deprecated.
Use `ip.set_hook('show_in_pager, page.as_hook(payloadpage.page))`
""")
from yap_ipython.core import page as corepage
corepage.page = page

View File

@ -0,0 +1,709 @@
# encoding: utf-8
"""
Prefiltering components.
Prefilters transform user input before it is exec'd by Python. These
transforms are used to implement additional syntax such as !ls and %magic.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
from keyword import iskeyword
import re
from yap_ipython.core.autocall import IPyAutocall
from traitlets.config.configurable import Configurable
from yap_ipython.core.inputsplitter import (
ESC_MAGIC,
ESC_QUOTE,
ESC_QUOTE2,
ESC_PAREN,
)
from yap_ipython.core.macro import Macro
from yap_ipython.core.splitinput import LineInfo
from traitlets import (
List, Integer, Unicode, Bool, Instance, CRegExp
)
#-----------------------------------------------------------------------------
# Global utilities, errors and constants
#-----------------------------------------------------------------------------
class PrefilterError(Exception):
pass
# RegExp to identify potential function names
re_fun_name = re.compile(r'[a-zA-Z_]([a-zA-Z0-9_.]*) *$')
# RegExp to exclude strings with this start from autocalling. In
# particular, all binary operators should be excluded, so that if foo is
# callable, foo OP bar doesn't become foo(OP bar), which is invalid. The
# characters '!=()' don't need to be checked for, as the checkPythonChars
# routine explicitly does so, to catch direct calls and rebindings of
# existing names.
# Warning: the '-' HAS TO BE AT THE END of the first group, otherwise
# it affects the rest of the group in square brackets.
re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]'
r'|^is |^not |^in |^and |^or ')
# try to catch also methods for stuff in lists/tuples/dicts: off
# (experimental). For this to work, the line_split regexp would need
# to be modified so it wouldn't break things at '['. That line is
# nasty enough that I shouldn't change it until I can test it _well_.
#self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$')
# Handler Check Utilities
def is_shadowed(identifier, ip):
"""Is the given identifier defined in one of the namespaces which shadow
the alias and magic namespaces? Note that an identifier is different
than ifun, because it can not contain a '.' character."""
# This is much safer than calling ofind, which can change state
return (identifier in ip.user_ns \
or identifier in ip.user_global_ns \
or identifier in ip.ns_table['builtin']\
or iskeyword(identifier))
#-----------------------------------------------------------------------------
# Main Prefilter manager
#-----------------------------------------------------------------------------
class PrefilterManager(Configurable):
"""Main prefilter component.
The yap_ipython prefilter is run on all user input before it is run. The
prefilter consumes lines of input and produces transformed lines of
input.
The iplementation consists of two phases:
1. Transformers
2. Checkers and handlers
Over time, we plan on deprecating the checkers and handlers and doing
everything in the transformers.
The transformers are instances of :class:`PrefilterTransformer` and have
a single method :meth:`transform` that takes a line and returns a
transformed line. The transformation can be accomplished using any
tool, but our current ones use regular expressions for speed.
After all the transformers have been run, the line is fed to the checkers,
which are instances of :class:`PrefilterChecker`. The line is passed to
the :meth:`check` method, which either returns `None` or a
:class:`PrefilterHandler` instance. If `None` is returned, the other
checkers are tried. If an :class:`PrefilterHandler` instance is returned,
the line is passed to the :meth:`handle` method of the returned
handler and no further checkers are tried.
Both transformers and checkers have a `priority` attribute, that determines
the order in which they are called. Smaller priorities are tried first.
Both transformers and checkers also have `enabled` attribute, which is
a boolean that determines if the instance is used.
Users or developers can change the priority or enabled attribute of
transformers or checkers, but they must call the :meth:`sort_checkers`
or :meth:`sort_transformers` method after changing the priority.
"""
multi_line_specials = Bool(True).tag(config=True)
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
def __init__(self, shell=None, **kwargs):
super(PrefilterManager, self).__init__(shell=shell, **kwargs)
self.shell = shell
self.init_transformers()
self.init_handlers()
self.init_checkers()
#-------------------------------------------------------------------------
# API for managing transformers
#-------------------------------------------------------------------------
def init_transformers(self):
"""Create the default transformers."""
self._transformers = []
for transformer_cls in _default_transformers:
transformer_cls(
shell=self.shell, prefilter_manager=self, parent=self
)
def sort_transformers(self):
"""Sort the transformers by priority.
This must be called after the priority of a transformer is changed.
The :meth:`register_transformer` method calls this automatically.
"""
self._transformers.sort(key=lambda x: x.priority)
@property
def transformers(self):
"""Return a list of checkers, sorted by priority."""
return self._transformers
def register_transformer(self, transformer):
"""Register a transformer instance."""
if transformer not in self._transformers:
self._transformers.append(transformer)
self.sort_transformers()
def unregister_transformer(self, transformer):
"""Unregister a transformer instance."""
if transformer in self._transformers:
self._transformers.remove(transformer)
#-------------------------------------------------------------------------
# API for managing checkers
#-------------------------------------------------------------------------
def init_checkers(self):
"""Create the default checkers."""
self._checkers = []
for checker in _default_checkers:
checker(
shell=self.shell, prefilter_manager=self, parent=self
)
def sort_checkers(self):
"""Sort the checkers by priority.
This must be called after the priority of a checker is changed.
The :meth:`register_checker` method calls this automatically.
"""
self._checkers.sort(key=lambda x: x.priority)
@property
def checkers(self):
"""Return a list of checkers, sorted by priority."""
return self._checkers
def register_checker(self, checker):
"""Register a checker instance."""
if checker not in self._checkers:
self._checkers.append(checker)
self.sort_checkers()
def unregister_checker(self, checker):
"""Unregister a checker instance."""
if checker in self._checkers:
self._checkers.remove(checker)
#-------------------------------------------------------------------------
# API for managing handlers
#-------------------------------------------------------------------------
def init_handlers(self):
"""Create the default handlers."""
self._handlers = {}
self._esc_handlers = {}
for handler in _default_handlers:
handler(
shell=self.shell, prefilter_manager=self, parent=self
)
@property
def handlers(self):
"""Return a dict of all the handlers."""
return self._handlers
def register_handler(self, name, handler, esc_strings):
"""Register a handler instance by name with esc_strings."""
self._handlers[name] = handler
for esc_str in esc_strings:
self._esc_handlers[esc_str] = handler
def unregister_handler(self, name, handler, esc_strings):
"""Unregister a handler instance by name with esc_strings."""
try:
del self._handlers[name]
except KeyError:
pass
for esc_str in esc_strings:
h = self._esc_handlers.get(esc_str)
if h is handler:
del self._esc_handlers[esc_str]
def get_handler_by_name(self, name):
"""Get a handler by its name."""
return self._handlers.get(name)
def get_handler_by_esc(self, esc_str):
"""Get a handler by its escape string."""
return self._esc_handlers.get(esc_str)
#-------------------------------------------------------------------------
# Main prefiltering API
#-------------------------------------------------------------------------
def prefilter_line_info(self, line_info):
"""Prefilter a line that has been converted to a LineInfo object.
This implements the checker/handler part of the prefilter pipe.
"""
# print "prefilter_line_info: ", line_info
handler = self.find_handler(line_info)
return handler.handle(line_info)
def find_handler(self, line_info):
"""Find a handler for the line_info by trying checkers."""
for checker in self.checkers:
if checker.enabled:
handler = checker.check(line_info)
if handler:
return handler
return self.get_handler_by_name('normal')
def transform_line(self, line, continue_prompt):
"""Calls the enabled transformers in order of increasing priority."""
for transformer in self.transformers:
if transformer.enabled:
line = transformer.transform(line, continue_prompt)
return line
def prefilter_line(self, line, continue_prompt=False):
"""Prefilter a single input line as text.
This method prefilters a single line of text by calling the
transformers and then the checkers/handlers.
"""
# print "prefilter_line: ", line, continue_prompt
# All handlers *must* return a value, even if it's blank ('').
# save the line away in case we crash, so the post-mortem handler can
# record it
self.shell._last_input_line = line
if not line:
# Return immediately on purely empty lines, so that if the user
# previously typed some whitespace that started a continuation
# prompt, he can break out of that loop with just an empty line.
# This is how the default python prompt works.
return ''
# At this point, we invoke our transformers.
if not continue_prompt or (continue_prompt and self.multi_line_specials):
line = self.transform_line(line, continue_prompt)
# Now we compute line_info for the checkers and handlers
line_info = LineInfo(line, continue_prompt)
# the input history needs to track even empty lines
stripped = line.strip()
normal_handler = self.get_handler_by_name('normal')
if not stripped:
return normal_handler.handle(line_info)
# special handlers are only allowed for single line statements
if continue_prompt and not self.multi_line_specials:
return normal_handler.handle(line_info)
prefiltered = self.prefilter_line_info(line_info)
# print "prefiltered line: %r" % prefiltered
return prefiltered
def prefilter_lines(self, lines, continue_prompt=False):
"""Prefilter multiple input lines of text.
This is the main entry point for prefiltering multiple lines of
input. This simply calls :meth:`prefilter_line` for each line of
input.
This covers cases where there are multiple lines in the user entry,
which is the case when the user goes back to a multiline history
entry and presses enter.
"""
llines = lines.rstrip('\n').split('\n')
# We can get multiple lines in one shot, where multiline input 'blends'
# into one line, in cases like recalling from the readline history
# buffer. We need to make sure that in such cases, we correctly
# communicate downstream which line is first and which are continuation
# ones.
if len(llines) > 1:
out = '\n'.join([self.prefilter_line(line, lnum>0)
for lnum, line in enumerate(llines) ])
else:
out = self.prefilter_line(llines[0], continue_prompt)
return out
#-----------------------------------------------------------------------------
# Prefilter transformers
#-----------------------------------------------------------------------------
class PrefilterTransformer(Configurable):
"""Transform a line of user input."""
priority = Integer(100).tag(config=True)
# Transformers don't currently use shell or prefilter_manager, but as we
# move away from checkers and handlers, they will need them.
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
prefilter_manager = Instance('yap_ipython.core.prefilter.PrefilterManager', allow_none=True)
enabled = Bool(True).tag(config=True)
def __init__(self, shell=None, prefilter_manager=None, **kwargs):
super(PrefilterTransformer, self).__init__(
shell=shell, prefilter_manager=prefilter_manager, **kwargs
)
self.prefilter_manager.register_transformer(self)
def transform(self, line, continue_prompt):
"""Transform a line, returning the new one."""
return None
def __repr__(self):
return "<%s(priority=%r, enabled=%r)>" % (
self.__class__.__name__, self.priority, self.enabled)
#-----------------------------------------------------------------------------
# Prefilter checkers
#-----------------------------------------------------------------------------
class PrefilterChecker(Configurable):
"""Inspect an input line and return a handler for that line."""
priority = Integer(100).tag(config=True)
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
prefilter_manager = Instance('yap_ipython.core.prefilter.PrefilterManager', allow_none=True)
enabled = Bool(True).tag(config=True)
def __init__(self, shell=None, prefilter_manager=None, **kwargs):
super(PrefilterChecker, self).__init__(
shell=shell, prefilter_manager=prefilter_manager, **kwargs
)
self.prefilter_manager.register_checker(self)
def check(self, line_info):
"""Inspect line_info and return a handler instance or None."""
return None
def __repr__(self):
return "<%s(priority=%r, enabled=%r)>" % (
self.__class__.__name__, self.priority, self.enabled)
class EmacsChecker(PrefilterChecker):
priority = Integer(100).tag(config=True)
enabled = Bool(False).tag(config=True)
def check(self, line_info):
"Emacs ipython-mode tags certain input lines."
if line_info.line.endswith('# PYTHON-MODE'):
return self.prefilter_manager.get_handler_by_name('emacs')
else:
return None
class MacroChecker(PrefilterChecker):
priority = Integer(250).tag(config=True)
def check(self, line_info):
obj = self.shell.user_ns.get(line_info.ifun)
if isinstance(obj, Macro):
return self.prefilter_manager.get_handler_by_name('macro')
else:
return None
class IPyAutocallChecker(PrefilterChecker):
priority = Integer(300).tag(config=True)
def check(self, line_info):
"Instances of IPyAutocall in user_ns get autocalled immediately"
obj = self.shell.user_ns.get(line_info.ifun, None)
if isinstance(obj, IPyAutocall):
obj.set_ip(self.shell)
return self.prefilter_manager.get_handler_by_name('auto')
else:
return None
class AssignmentChecker(PrefilterChecker):
priority = Integer(600).tag(config=True)
def check(self, line_info):
"""Check to see if user is assigning to a var for the first time, in
which case we want to avoid any sort of automagic / autocall games.
This allows users to assign to either alias or magic names true python
variables (the magic/alias systems always take second seat to true
python code). E.g. ls='hi', or ls,that=1,2"""
if line_info.the_rest:
if line_info.the_rest[0] in '=,':
return self.prefilter_manager.get_handler_by_name('normal')
else:
return None
class AutoMagicChecker(PrefilterChecker):
priority = Integer(700).tag(config=True)
def check(self, line_info):
"""If the ifun is magic, and automagic is on, run it. Note: normal,
non-auto magic would already have been triggered via '%' in
check_esc_chars. This just checks for automagic. Also, before
triggering the magic handler, make sure that there is nothing in the
user namespace which could shadow it."""
if not self.shell.automagic or not self.shell.find_magic(line_info.ifun):
return None
# We have a likely magic method. Make sure we should actually call it.
if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials:
return None
head = line_info.ifun.split('.',1)[0]
if is_shadowed(head, self.shell):
return None
return self.prefilter_manager.get_handler_by_name('magic')
class PythonOpsChecker(PrefilterChecker):
priority = Integer(900).tag(config=True)
def check(self, line_info):
"""If the 'rest' of the line begins with a function call or pretty much
any python operator, we should simply execute the line (regardless of
whether or not there's a possible autocall expansion). This avoids
spurious (and very confusing) geattr() accesses."""
if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|':
return self.prefilter_manager.get_handler_by_name('normal')
else:
return None
class AutocallChecker(PrefilterChecker):
priority = Integer(1000).tag(config=True)
function_name_regexp = CRegExp(re_fun_name,
help="RegExp to identify potential function names."
).tag(config=True)
exclude_regexp = CRegExp(re_exclude_auto,
help="RegExp to exclude strings with this start from autocalling."
).tag(config=True)
def check(self, line_info):
"Check if the initial word/function is callable and autocall is on."
if not self.shell.autocall:
return None
oinfo = line_info.ofind(self.shell) # This can mutate state via getattr
if not oinfo['found']:
return None
ignored_funs = ['b', 'f', 'r', 'u', 'br', 'rb', 'fr', 'rf']
ifun = line_info.ifun
line = line_info.line
if ifun.lower() in ignored_funs and (line.startswith(ifun + "'") or line.startswith(ifun + '"')):
return None
if callable(oinfo['obj']) \
and (not self.exclude_regexp.match(line_info.the_rest)) \
and self.function_name_regexp.match(line_info.ifun):
return self.prefilter_manager.get_handler_by_name('auto')
else:
return None
#-----------------------------------------------------------------------------
# Prefilter handlers
#-----------------------------------------------------------------------------
class PrefilterHandler(Configurable):
handler_name = Unicode('normal')
esc_strings = List([])
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True)
prefilter_manager = Instance('yap_ipython.core.prefilter.PrefilterManager', allow_none=True)
def __init__(self, shell=None, prefilter_manager=None, **kwargs):
super(PrefilterHandler, self).__init__(
shell=shell, prefilter_manager=prefilter_manager, **kwargs
)
self.prefilter_manager.register_handler(
self.handler_name,
self,
self.esc_strings
)
def handle(self, line_info):
# print "normal: ", line_info
"""Handle normal input lines. Use as a template for handlers."""
# With autoindent on, we need some way to exit the input loop, and I
# don't want to force the user to have to backspace all the way to
# clear the line. The rule will be in this case, that either two
# lines of pure whitespace in a row, or a line of pure whitespace but
# of a size different to the indent level, will exit the input loop.
line = line_info.line
continue_prompt = line_info.continue_prompt
if (continue_prompt and
self.shell.autoindent and
line.isspace() and
0 < abs(len(line) - self.shell.indent_current_nsp) <= 2):
line = ''
return line
def __str__(self):
return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name)
class MacroHandler(PrefilterHandler):
handler_name = Unicode("macro")
def handle(self, line_info):
obj = self.shell.user_ns.get(line_info.ifun)
pre_space = line_info.pre_whitespace
line_sep = "\n" + pre_space
return pre_space + line_sep.join(obj.value.splitlines())
class MagicHandler(PrefilterHandler):
handler_name = Unicode('magic')
esc_strings = List([ESC_MAGIC])
def handle(self, line_info):
"""Execute magic functions."""
ifun = line_info.ifun
the_rest = line_info.the_rest
#Prepare arguments for get_ipython().run_line_magic(magic_name, magic_args)
t_arg_s = ifun + " " + the_rest
t_magic_name, _, t_magic_arg_s = t_arg_s.partition(' ')
t_magic_name = t_magic_name.lstrip(ESC_MAGIC)
cmd = '%sget_ipython().run_line_magic(%r, %r)' % (line_info.pre_whitespace, t_magic_name, t_magic_arg_s)
return cmd
class AutoHandler(PrefilterHandler):
handler_name = Unicode('auto')
esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2])
def handle(self, line_info):
"""Handle lines which can be auto-executed, quoting if requested."""
line = line_info.line
ifun = line_info.ifun
the_rest = line_info.the_rest
esc = line_info.esc
continue_prompt = line_info.continue_prompt
obj = line_info.ofind(self.shell)['obj']
# This should only be active for single-line input!
if continue_prompt:
return line
force_auto = isinstance(obj, IPyAutocall)
# User objects sometimes raise exceptions on attribute access other
# than AttributeError (we've seen it in the past), so it's safest to be
# ultra-conservative here and catch all.
try:
auto_rewrite = obj.rewrite
except Exception:
auto_rewrite = True
if esc == ESC_QUOTE:
# Auto-quote splitting on whitespace
newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) )
elif esc == ESC_QUOTE2:
# Auto-quote whole string
newcmd = '%s("%s")' % (ifun,the_rest)
elif esc == ESC_PAREN:
newcmd = '%s(%s)' % (ifun,",".join(the_rest.split()))
else:
# Auto-paren.
if force_auto:
# Don't rewrite if it is already a call.
do_rewrite = not the_rest.startswith('(')
else:
if not the_rest:
# We only apply it to argument-less calls if the autocall
# parameter is set to 2.
do_rewrite = (self.shell.autocall >= 2)
elif the_rest.startswith('[') and hasattr(obj, '__getitem__'):
# Don't autocall in this case: item access for an object
# which is BOTH callable and implements __getitem__.
do_rewrite = False
else:
do_rewrite = True
# Figure out the rewritten command
if do_rewrite:
if the_rest.endswith(';'):
newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1])
else:
newcmd = '%s(%s)' % (ifun.rstrip(), the_rest)
else:
normal_handler = self.prefilter_manager.get_handler_by_name('normal')
return normal_handler.handle(line_info)
# Display the rewritten call
if auto_rewrite:
self.shell.auto_rewrite_input(newcmd)
return newcmd
class EmacsHandler(PrefilterHandler):
handler_name = Unicode('emacs')
esc_strings = List([])
def handle(self, line_info):
"""Handle input lines marked by python-mode."""
# Currently, nothing is done. Later more functionality can be added
# here if needed.
# The input cache shouldn't be updated
return line_info.line
#-----------------------------------------------------------------------------
# Defaults
#-----------------------------------------------------------------------------
_default_transformers = [
]
_default_checkers = [
EmacsChecker,
MacroChecker,
IPyAutocallChecker,
AssignmentChecker,
AutoMagicChecker,
PythonOpsChecker,
AutocallChecker
]
_default_handlers = [
PrefilterHandler,
MacroHandler,
MagicHandler,
AutoHandler,
EmacsHandler
]

View File

@ -0,0 +1,312 @@
# encoding: utf-8
"""
An application for managing yap_ipython profiles.
To be invoked as the `ipython profile` subcommand.
Authors:
* Min RK
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
from traitlets.config.application import Application
from yap_ipython.core.application import (
BaseYAPApplication, base_flags
)
from yap_ipython.core.profiledir import ProfileDir
from yap_ipython.utils.importstring import import_item
from yap_ipython.paths import get_ipython_dir, get_ipython_package_dir
from traitlets import Unicode, Bool, Dict, observe
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
create_help = """Create an yap_ipython profile by name
Create an ipython profile directory by its name or
profile directory path. Profile directories contain
configuration, log and security related files and are named
using the convention 'profile_<name>'. By default they are
located in your ipython directory. Once created, you will
can edit the configuration files in the profile
directory to configure yap_ipython. Most users will create a
profile directory by name,
`ipython profile create myprofile`, which will put the directory
in `<ipython_dir>/profile_myprofile`.
"""
list_help = """List available yap_ipython profiles
List all available profiles, by profile location, that can
be found in the current working directly or in the ipython
directory. Profile directories are named using the convention
'profile_<profile>'.
"""
profile_help = """Manage yap_ipython profiles
Profile directories contain
configuration, log and security related files and are named
using the convention 'profile_<name>'. By default they are
located in your ipython directory. You can create profiles
with `ipython profile create <name>`, or see the profiles you
already have with `ipython profile list`
To get started configuring yap_ipython, simply do:
$> ipython profile create
and yap_ipython will create the default profile in <ipython_dir>/profile_default,
where you can edit ipython_config.py to start configuring yap_ipython.
"""
_list_examples = "ipython profile list # list all profiles"
_create_examples = """
ipython profile create foo # create profile foo w/ default config files
ipython profile create foo --reset # restage default config files over current
ipython profile create foo --parallel # also stage parallel config files
"""
_main_examples = """
ipython profile create -h # show the help string for the create subcommand
ipython profile list -h # show the help string for the list subcommand
ipython locate profile foo # print the path to the directory for profile 'foo'
"""
#-----------------------------------------------------------------------------
# Profile Application Class (for `ipython profile` subcommand)
#-----------------------------------------------------------------------------
def list_profiles_in(path):
"""list profiles in a given root directory"""
files = os.listdir(path)
profiles = []
for f in files:
try:
full_path = os.path.join(path, f)
except UnicodeError:
continue
if os.path.isdir(full_path) and f.startswith('profile_'):
profiles.append(f.split('_',1)[-1])
return profiles
def list_bundled_profiles():
"""list profiles that are bundled with yap_ipython."""
path = os.path.join(get_ipython_package_dir(), u'core', u'profile')
files = os.listdir(path)
profiles = []
for profile in files:
full_path = os.path.join(path, profile)
if os.path.isdir(full_path) and profile != "__pycache__":
profiles.append(profile)
return profiles
class ProfileLocate(BaseYAPApplication):
description = """print the path to an yap_ipython profile dir"""
def parse_command_line(self, argv=None):
super(ProfileLocate, self).parse_command_line(argv)
if self.extra_args:
self.profile = self.extra_args[0]
def start(self):
print(self.profile_dir.location)
class ProfileList(Application):
name = u'ipython-profile'
description = list_help
examples = _list_examples
aliases = Dict({
'ipython-dir' : 'ProfileList.ipython_dir',
'log-level' : 'Application.log_level',
})
flags = Dict(dict(
debug = ({'Application' : {'log_level' : 0}},
"Set Application.log_level to 0, maximizing log output."
)
))
ipython_dir = Unicode(get_ipython_dir(),
help="""
The name of the yap_ipython directory. This directory is used for logging
configuration (through profiles), history storage, etc. The default
is usually $HOME/.ipython. This options can also be specified through
the environment variable IPYTHONDIR.
"""
).tag(config=True)
def _print_profiles(self, profiles):
"""print list of profiles, indented."""
for profile in profiles:
print(' %s' % profile)
def list_profile_dirs(self):
profiles = list_bundled_profiles()
if profiles:
print()
print("Available profiles in yap_ipython:")
self._print_profiles(profiles)
print()
print(" The first request for a bundled profile will copy it")
print(" into your yap_ipython directory (%s)," % self.ipython_dir)
print(" where you can customize it.")
profiles = list_profiles_in(self.ipython_dir)
if profiles:
print()
print("Available profiles in %s:" % self.ipython_dir)
self._print_profiles(profiles)
profiles = list_profiles_in(os.getcwd())
if profiles:
print()
print("Available profiles in current directory (%s):" % os.getcwd())
self._print_profiles(profiles)
print()
print("To use any of the above profiles, start yap_ipython with:")
print(" ipython --profile=<name>")
print()
def start(self):
self.list_profile_dirs()
create_flags = {}
create_flags.update(base_flags)
# don't include '--init' flag, which implies running profile create in other apps
create_flags.pop('init')
create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}},
"reset config files in this profile to the defaults.")
create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}},
"Include the config files for parallel "
"computing apps (ipengine, ipcontroller, etc.)")
class ProfileCreate(BaseYAPApplication):
name = u'ipython-profile'
description = create_help
examples = _create_examples
auto_create = Bool(True)
def _log_format_default(self):
return "[%(name)s] %(message)s"
def _copy_config_files_default(self):
return True
parallel = Bool(False,
help="whether to include parallel computing config files"
).tag(config=True)
@observe('parallel')
def _parallel_changed(self, change):
parallel_files = [ 'ipcontroller_config.py',
'ipengine_config.py',
'ipcluster_config.py'
]
if change['new']:
for cf in parallel_files:
self.config_files.append(cf)
else:
for cf in parallel_files:
if cf in self.config_files:
self.config_files.remove(cf)
def parse_command_line(self, argv):
super(ProfileCreate, self).parse_command_line(argv)
# accept positional arg as profile name
if self.extra_args:
self.profile = self.extra_args[0]
flags = Dict(create_flags)
classes = [ProfileDir]
def _import_app(self, app_path):
"""import an app class"""
app = None
name = app_path.rsplit('.', 1)[-1]
try:
app = import_item(app_path)
except ImportError:
self.log.info("Couldn't import %s, config file will be excluded", name)
except Exception:
self.log.warning('Unexpected error importing %s', name, exc_info=True)
return app
def init_config_files(self):
super(ProfileCreate, self).init_config_files()
# use local imports, since these classes may import from here
from yap_ipython.terminal.ipapp import TerminalIPythonApp
apps = [TerminalIPythonApp]
for app_path in (
'yap_kernel.kernelapp.YAPKernelApp',
):
app = self._import_app(app_path)
if app is not None:
apps.append(app)
if self.parallel:
from ipyparallel.apps.ipcontrollerapp import IPControllerApp
from ipyparallel.apps.ipengineapp import IPEngineApp
from ipyparallel.apps.ipclusterapp import IPClusterStart
apps.extend([
IPControllerApp,
IPEngineApp,
IPClusterStart,
])
for App in apps:
app = App()
app.config.update(self.config)
app.log = self.log
app.overwrite = self.overwrite
app.copy_config_files=True
app.ipython_dir=self.ipython_dir
app.profile_dir=self.profile_dir
app.init_config_files()
def stage_default_config_file(self):
pass
class ProfileApp(Application):
name = u'ipython profile'
description = profile_help
examples = _main_examples
subcommands = Dict(dict(
create = (ProfileCreate, ProfileCreate.description.splitlines()[0]),
list = (ProfileList, ProfileList.description.splitlines()[0]),
locate = (ProfileLocate, ProfileLocate.description.splitlines()[0]),
))
def start(self):
if self.subapp is None:
print("No subcommand specified. Must specify one of: %s"%(self.subcommands.keys()))
print()
self.print_description()
self.print_subcommands()
self.exit(1)
else:
return self.subapp.start()

View File

@ -0,0 +1,223 @@
# encoding: utf-8
"""An object for managing yap_ipython profile directories."""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import shutil
import errno
from traitlets.config.configurable import LoggingConfigurable
from yap_ipython.paths import get_ipython_package_dir
from yap_ipython.utils.path import expand_path, ensure_dir_exists
from traitlets import Unicode, Bool, observe
#-----------------------------------------------------------------------------
# Module errors
#-----------------------------------------------------------------------------
class ProfileDirError(Exception):
pass
#-----------------------------------------------------------------------------
# Class for managing profile directories
#-----------------------------------------------------------------------------
class ProfileDir(LoggingConfigurable):
"""An object to manage the profile directory and its resources.
The profile directory is used by all yap_ipython applications, to manage
configuration, logging and security.
This object knows how to find, create and manage these directories. This
should be used by any code that wants to handle profiles.
"""
security_dir_name = Unicode('security')
log_dir_name = Unicode('log')
startup_dir_name = Unicode('startup')
pid_dir_name = Unicode('pid')
static_dir_name = Unicode('static')
security_dir = Unicode(u'')
log_dir = Unicode(u'')
startup_dir = Unicode(u'')
pid_dir = Unicode(u'')
static_dir = Unicode(u'')
location = Unicode(u'',
help="""Set the profile location directly. This overrides the logic used by the
`profile` option.""",
).tag(config=True)
_location_isset = Bool(False) # flag for detecting multiply set location
@observe('location')
def _location_changed(self, change):
if self._location_isset:
raise RuntimeError("Cannot set profile location more than once.")
self._location_isset = True
new = change['new']
ensure_dir_exists(new)
# ensure config files exist:
self.security_dir = os.path.join(new, self.security_dir_name)
self.log_dir = os.path.join(new, self.log_dir_name)
self.startup_dir = os.path.join(new, self.startup_dir_name)
self.pid_dir = os.path.join(new, self.pid_dir_name)
self.static_dir = os.path.join(new, self.static_dir_name)
self.check_dirs()
def _mkdir(self, path, mode=None):
"""ensure a directory exists at a given path
This is a version of os.mkdir, with the following differences:
- returns True if it created the directory, False otherwise
- ignores EEXIST, protecting against race conditions where
the dir may have been created in between the check and
the creation
- sets permissions if requested and the dir already exists
"""
if os.path.exists(path):
if mode and os.stat(path).st_mode != mode:
try:
os.chmod(path, mode)
except OSError:
self.log.warning(
"Could not set permissions on %s",
path
)
return False
try:
if mode:
os.mkdir(path, mode)
else:
os.mkdir(path)
except OSError as e:
if e.errno == errno.EEXIST:
return False
else:
raise
return True
@observe('log_dir')
def check_log_dir(self, change=None):
self._mkdir(self.log_dir)
@observe('startup_dir')
def check_startup_dir(self, change=None):
self._mkdir(self.startup_dir)
readme = os.path.join(self.startup_dir, 'README')
src = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'README_STARTUP')
if not os.path.exists(src):
self.log.warning("Could not copy README_STARTUP to startup dir. Source file %s does not exist.", src)
if os.path.exists(src) and not os.path.exists(readme):
shutil.copy(src, readme)
@observe('security_dir')
def check_security_dir(self, change=None):
self._mkdir(self.security_dir, 0o40700)
@observe('pid_dir')
def check_pid_dir(self, change=None):
self._mkdir(self.pid_dir, 0o40700)
def check_dirs(self):
self.check_security_dir()
self.check_log_dir()
self.check_pid_dir()
self.check_startup_dir()
def copy_config_file(self, config_file, path=None, overwrite=False):
"""Copy a default config file into the active profile directory.
Default configuration files are kept in :mod:`yap_ipython.core.profile`.
This function moves these from that location to the working profile
directory.
"""
dst = os.path.join(self.location, config_file)
if os.path.isfile(dst) and not overwrite:
return False
if path is None:
path = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'default')
src = os.path.join(path, config_file)
shutil.copy(src, dst)
return True
@classmethod
def create_profile_dir(cls, profile_dir, config=None):
"""Create a new profile directory given a full path.
Parameters
----------
profile_dir : str
The full path to the profile directory. If it does exist, it will
be used. If not, it will be created.
"""
return cls(location=profile_dir, config=config)
@classmethod
def create_profile_dir_by_name(cls, path, name=u'default', config=None):
"""Create a profile dir by profile name and path.
Parameters
----------
path : unicode
The path (directory) to put the profile directory in.
name : unicode
The name of the profile. The name of the profile directory will
be "profile_<profile>".
"""
if not os.path.isdir(path):
raise ProfileDirError('Directory not found: %s' % path)
profile_dir = os.path.join(path, u'profile_' + name)
return cls(location=profile_dir, config=config)
@classmethod
def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None):
"""Find an existing profile dir by profile name, return its ProfileDir.
This searches through a sequence of paths for a profile dir. If it
is not found, a :class:`ProfileDirError` exception will be raised.
The search path algorithm is:
1. ``os.getcwd()``
2. ``ipython_dir``
Parameters
----------
ipython_dir : unicode or str
The yap_ipython directory to use.
name : unicode or str
The name of the profile. The name of the profile directory
will be "profile_<profile>".
"""
dirname = u'profile_' + name
paths = [os.getcwd(), ipython_dir]
for p in paths:
profile_dir = os.path.join(p, dirname)
if os.path.isdir(profile_dir):
return cls(location=profile_dir, config=config)
else:
raise ProfileDirError('Profile directory not found in paths: %s' % dirname)
@classmethod
def find_profile_dir(cls, profile_dir, config=None):
"""Find/create a profile dir and return its ProfileDir.
This will create the profile directory if it doesn't exist.
Parameters
----------
profile_dir : unicode or str
The path of the profile directory.
"""
profile_dir = expand_path(profile_dir)
if not os.path.isdir(profile_dir):
raise ProfileDirError('Profile directory not found: %s' % profile_dir)
return cls(location=profile_dir, config=config)

View File

@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
"""Being removed
"""
class LazyEvaluate(object):
"""This is used for formatting strings with values that need to be updated
at that time, such as the current time or working directory."""
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, **kwargs):
self.kwargs.update(kwargs)
return self.func(*self.args, **self.kwargs)
def __str__(self):
return str(self())
def __format__(self, format_spec):
return format(self(), format_spec)

View File

@ -0,0 +1,412 @@
# -*- coding: utf-8 -*-
"""Pylab (matplotlib) support utilities."""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
from io import BytesIO
from yap_ipython.core.display import _pngxy
from yap_ipython.utils.decorators import flag_calls
# If user specifies a GUI, that dictates the backend, otherwise we read the
# user's mpl default from the mpl rc structure
backends = {'tk': 'TkAgg',
'gtk': 'GTKAgg',
'gtk3': 'GTK3Agg',
'wx': 'WXAgg',
'qt4': 'Qt4Agg',
'qt5': 'Qt5Agg',
'qt': 'Qt5Agg',
'osx': 'MacOSX',
'nbagg': 'nbAgg',
'notebook': 'nbAgg',
'agg': 'agg',
'svg': 'svg',
'pdf': 'pdf',
'ps': 'ps',
'inline': 'module://yap_kernel.pylab.backend_inline',
'ipympl': 'module://ipympl.backend_nbagg',
'widget': 'module://ipympl.backend_nbagg',
}
# We also need a reverse backends2guis mapping that will properly choose which
# GUI support to activate based on the desired matplotlib backend. For the
# most part it's just a reverse of the above dict, but we also need to add a
# few others that map to the same GUI manually:
backend2gui = dict(zip(backends.values(), backends.keys()))
# Our tests expect backend2gui to just return 'qt'
backend2gui['Qt4Agg'] = 'qt'
# In the reverse mapping, there are a few extra valid matplotlib backends that
# map to the same GUI support
backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk'
backend2gui['GTK3Cairo'] = 'gtk3'
backend2gui['WX'] = 'wx'
backend2gui['CocoaAgg'] = 'osx'
# And some backends that don't need GUI integration
del backend2gui['nbAgg']
del backend2gui['agg']
del backend2gui['module://yap_kernel.pylab.backend_inline']
#-----------------------------------------------------------------------------
# Matplotlib utilities
#-----------------------------------------------------------------------------
def getfigs(*fig_nums):
"""Get a list of matplotlib figures by figure numbers.
If no arguments are given, all available figures are returned. If the
argument list contains references to invalid figures, a warning is printed
but the function continues pasting further figures.
Parameters
----------
figs : tuple
A tuple of ints giving the figure numbers of the figures to return.
"""
from matplotlib._pylab_helpers import Gcf
if not fig_nums:
fig_managers = Gcf.get_all_fig_managers()
return [fm.canvas.figure for fm in fig_managers]
else:
figs = []
for num in fig_nums:
f = Gcf.figs.get(num)
if f is None:
print('Warning: figure %s not available.' % num)
else:
figs.append(f.canvas.figure)
return figs
def figsize(sizex, sizey):
"""Set the default figure size to be [sizex, sizey].
This is just an easy to remember, convenience wrapper that sets::
matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
"""
import matplotlib
matplotlib.rcParams['figure.figsize'] = [sizex, sizey]
def print_figure(fig, fmt='png', bbox_inches='tight', **kwargs):
"""Print a figure to an image, and return the resulting file data
Returned data will be bytes unless ``fmt='svg'``,
in which case it will be unicode.
Any keyword args are passed to fig.canvas.print_figure,
such as ``quality`` or ``bbox_inches``.
"""
# When there's an empty figure, we shouldn't return anything, otherwise we
# get big blank areas in the qt console.
if not fig.axes and not fig.lines:
return
dpi = fig.dpi
if fmt == 'retina':
dpi = dpi * 2
fmt = 'png'
# build keyword args
kw = {
"format":fmt,
"facecolor":fig.get_facecolor(),
"edgecolor":fig.get_edgecolor(),
"dpi":dpi,
"bbox_inches":bbox_inches,
}
# **kwargs get higher priority
kw.update(kwargs)
bytes_io = BytesIO()
fig.canvas.print_figure(bytes_io, **kw)
data = bytes_io.getvalue()
if fmt == 'svg':
data = data.decode('utf-8')
return data
def retina_figure(fig, **kwargs):
"""format a figure as a pixel-doubled (retina) PNG"""
pngdata = print_figure(fig, fmt='retina', **kwargs)
# Make sure that retina_figure acts just like print_figure and returns
# None when the figure is empty.
if pngdata is None:
return
w, h = _pngxy(pngdata)
metadata = {"width": w//2, "height":h//2}
return pngdata, metadata
# We need a little factory function here to create the closure where
# safe_execfile can live.
def mpl_runner(safe_execfile):
"""Factory to return a matplotlib-enabled runner for %run.
Parameters
----------
safe_execfile : function
This must be a function with the same interface as the
:meth:`safe_execfile` method of yap_ipython.
Returns
-------
A function suitable for use as the ``runner`` argument of the %run magic
function.
"""
def mpl_execfile(fname,*where,**kw):
"""matplotlib-aware wrapper around safe_execfile.
Its interface is identical to that of the :func:`execfile` builtin.
This is ultimately a call to execfile(), but wrapped in safeties to
properly handle interactive rendering."""
import matplotlib
import matplotlib.pyplot as plt
#print '*** Matplotlib runner ***' # dbg
# turn off rendering until end of script
is_interactive = matplotlib.rcParams['interactive']
matplotlib.interactive(False)
safe_execfile(fname,*where,**kw)
matplotlib.interactive(is_interactive)
# make rendering call now, if the user tried to do it
if plt.draw_if_interactive.called:
plt.draw()
plt.draw_if_interactive.called = False
# re-draw everything that is stale
try:
da = plt.draw_all
except AttributeError:
pass
else:
da()
return mpl_execfile
def _reshow_nbagg_figure(fig):
"""reshow an nbagg figure"""
try:
reshow = fig.canvas.manager.reshow
except AttributeError:
raise NotImplementedError()
else:
reshow()
def select_figure_formats(shell, formats, **kwargs):
"""Select figure formats for the inline backend.
Parameters
==========
shell : InteractiveShell
The main yap_ipython instance.
formats : str or set
One or a set of figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
**kwargs : any
Extra keyword arguments to be passed to fig.canvas.print_figure.
"""
import matplotlib
from matplotlib.figure import Figure
svg_formatter = shell.display_formatter.formatters['image/svg+xml']
png_formatter = shell.display_formatter.formatters['image/png']
jpg_formatter = shell.display_formatter.formatters['image/jpeg']
pdf_formatter = shell.display_formatter.formatters['application/pdf']
if isinstance(formats, str):
formats = {formats}
# cast in case of list / tuple
formats = set(formats)
[ f.pop(Figure, None) for f in shell.display_formatter.formatters.values() ]
mplbackend = matplotlib.get_backend().lower()
if mplbackend == 'nbagg' or mplbackend == 'module://ipympl.backend_nbagg':
formatter = shell.display_formatter.ipython_display_formatter
formatter.for_type(Figure, _reshow_nbagg_figure)
supported = {'png', 'png2x', 'retina', 'jpg', 'jpeg', 'svg', 'pdf'}
bad = formats.difference(supported)
if bad:
bs = "%s" % ','.join([repr(f) for f in bad])
gs = "%s" % ','.join([repr(f) for f in supported])
raise ValueError("supported formats are: %s not %s" % (gs, bs))
if 'png' in formats:
png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs))
if 'retina' in formats or 'png2x' in formats:
png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs))
if 'jpg' in formats or 'jpeg' in formats:
jpg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'jpg', **kwargs))
if 'svg' in formats:
svg_formatter.for_type(Figure, lambda fig: print_figure(fig, 'svg', **kwargs))
if 'pdf' in formats:
pdf_formatter.for_type(Figure, lambda fig: print_figure(fig, 'pdf', **kwargs))
#-----------------------------------------------------------------------------
# Code for initializing matplotlib and importing pylab
#-----------------------------------------------------------------------------
def find_gui_and_backend(gui=None, gui_select=None):
"""Given a gui string return the gui and mpl backend.
Parameters
----------
gui : str
Can be one of ('tk','gtk','wx','qt','qt4','inline','agg').
gui_select : str
Can be one of ('tk','gtk','wx','qt','qt4','inline').
This is any gui already selected by the shell.
Returns
-------
A tuple of (gui, backend) where backend is one of ('TkAgg','GTKAgg',
'WXAgg','Qt4Agg','module://yap_kernel.pylab.backend_inline','agg').
"""
import matplotlib
if gui and gui != 'auto':
# select backend based on requested gui
backend = backends[gui]
if gui == 'agg':
gui = None
else:
# We need to read the backend from the original data structure, *not*
# from mpl.rcParams, since a prior invocation of %matplotlib may have
# overwritten that.
# WARNING: this assumes matplotlib 1.1 or newer!!
backend = matplotlib.rcParamsOrig['backend']
# In this case, we need to find what the appropriate gui selection call
# should be for yap_ipython, so we can activate inputhook accordingly
gui = backend2gui.get(backend, None)
# If we have already had a gui active, we need it and inline are the
# ones allowed.
if gui_select and gui != gui_select:
gui = gui_select
backend = backends[gui]
return gui, backend
def activate_matplotlib(backend):
"""Activate the given backend and set interactive to True."""
import matplotlib
matplotlib.interactive(True)
# Matplotlib had a bug where even switch_backend could not force
# the rcParam to update. This needs to be set *before* the module
# magic of switch_backend().
matplotlib.rcParams['backend'] = backend
import matplotlib.pyplot
matplotlib.pyplot.switch_backend(backend)
# This must be imported last in the matplotlib series, after
# backend/interactivity choices have been made
import matplotlib.pyplot as plt
plt.show._needmain = False
# We need to detect at runtime whether show() is called by the user.
# For this, we wrap it into a decorator which adds a 'called' flag.
plt.draw_if_interactive = flag_calls(plt.draw_if_interactive)
def import_pylab(user_ns, import_all=True):
"""Populate the namespace with pylab-related values.
Imports matplotlib, pylab, numpy, and everything from pylab and numpy.
Also imports a few names from yap_ipython (figsize, display, getfigs)
"""
# Import numpy as np/pyplot as plt are conventions we're trying to
# somewhat standardize on. Making them available to users by default
# will greatly help this.
s = ("import numpy\n"
"import matplotlib\n"
"from matplotlib import pylab, mlab, pyplot\n"
"np = numpy\n"
"plt = pyplot\n"
)
exec(s, user_ns)
if import_all:
s = ("from matplotlib.pylab import *\n"
"from numpy import *\n")
exec(s, user_ns)
# yap_ipython symbols to add
user_ns['figsize'] = figsize
from yap_ipython.core.display import display
# Add display and getfigs to the user's namespace
user_ns['display'] = display
user_ns['getfigs'] = getfigs
def configure_inline_support(shell, backend):
"""Configure an yap_ipython shell object for matplotlib use.
Parameters
----------
shell : InteractiveShell instance
backend : matplotlib backend
"""
# If using our svg payload backend, register the post-execution
# function that will pick up the results for display. This can only be
# done with access to the real shell object.
# Note: if we can't load the inline backend, then there's no point
# continuing (such as in terminal-only shells in environments without
# zeromq available).
try:
from yap_kernel.pylab.backend_inline import InlineBackend
except ImportError:
return
import matplotlib
cfg = InlineBackend.instance(parent=shell)
cfg.shell = shell
if cfg not in shell.configurables:
shell.configurables.append(cfg)
if backend == backends['inline']:
from yap_kernel.pylab.backend_inline import flush_figures
shell.events.register('post_execute', flush_figures)
# Save rcParams that will be overwrittern
shell._saved_rcParams = {}
for k in cfg.rc:
shell._saved_rcParams[k] = matplotlib.rcParams[k]
# load inline_rc
matplotlib.rcParams.update(cfg.rc)
new_backend_name = "inline"
else:
from yap_kernel.pylab.backend_inline import flush_figures
try:
shell.events.unregister('post_execute', flush_figures)
except ValueError:
pass
if hasattr(shell, '_saved_rcParams'):
matplotlib.rcParams.update(shell._saved_rcParams)
del shell._saved_rcParams
new_backend_name = "other"
# only enable the formats once -> don't change the enabled formats (which the user may
# has changed) when getting another "%matplotlib inline" call.
# See https://github.com/ipython/yap_kernel/issues/29
cur_backend = getattr(configure_inline_support, "current_backend", "unset")
if new_backend_name != cur_backend:
# Setup the default figure format
select_figure_formats(shell, cfg.figure_formats, **cfg.print_figure_kwargs)
configure_inline_support.current_backend = new_backend_name

View File

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
"""Release data for the IPython project."""
"""Release data for the yap_ipython project."""
#-----------------------------------------------------------------------------
# Copyright (c) 2008, IPython Development Team.
# Copyright (c) 2008, yap_ipython Development Team.
# Copyright (c) 2001, Fernando Perez <fernando.perez@colorado.edu>
# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
@ -16,15 +16,15 @@
# the tarballs and RPMs made by distutils, so it's best to lowercase it.
name = 'ipython'
# IPython version information. An empty _version_extra corresponds to a full
# yap_ipython version information. An empty _version_extra corresponds to a full
# release. 'dev' as a _version_extra string means this is a development
# version
_version_major = 0
_version_minor = 1
_version_major = 6
_version_minor = 2
_version_patch = 0
_version_extra = '.dev'
# _version_extra = 'rc2'
_version_extra = '' # Uncomment this for full releases
#_version_extra = '' # Uncomment this for full releases
# Construct full version string from these.
_ver = [_version_major, _version_minor, _version_patch]
@ -40,11 +40,11 @@ version_info = (_version_major, _version_minor, _version_patch, _version_extra)
kernel_protocol_version_info = (5, 0)
kernel_protocol_version = "%i.%i" % kernel_protocol_version_info
description = "IPython: Productive Interactive Computing"
description = "yap_ipython: Productive Interactive Computing"
long_description = \
"""
IPython provides a rich toolkit to help you make the most out of using Python
yap_ipython provides a rich toolkit to help you make the most out of using Python
interactively. Its main components are:
* A powerful interactive Python shell
@ -64,7 +64,7 @@ The enhanced interactive Python shells have the following main features:
variables and keywords, filenames and function keywords.
* Extensible system of 'magic' commands for controlling the environment and
performing many tasks related either to IPython or the operating system.
performing many tasks related either to yap_ipython or the operating system.
* A rich configuration system with easy switching between different setups
(simpler than changing $PYTHONSTARTUP environment variables every time).
@ -79,7 +79,7 @@ The enhanced interactive Python shells have the following main features:
* Integrated access to the pdb debugger and the Python profiler.
The latest development version is always available from IPython's `GitHub
The latest development version is always available from yap_ipython's `GitHub
site <http://github.com/ipython>`_.
"""
@ -96,7 +96,7 @@ authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'),
'Matthias' : ('Matthias Bussonnier', 'bussonniermatthias@gmail.com'),
}
author = 'The IPython Development Team'
author = 'The yap_ipython Development Team'
author_email = 'ipython-dev@python.org'
@ -108,11 +108,12 @@ platforms = ['Linux','Mac OSX','Windows']
keywords = ['Interactive','Interpreter','Shell', 'Embedding']
classifiers = [
'Framework :: IPython',
'Framework :: yap_ipython',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Shells'
]

View File

@ -1,10 +1,10 @@
# encoding: utf-8
"""
A mixin for :class:`~IPython.core.application.Application` classes that
A mixin for :class:`~yap_ipython.core.application.Application` classes that
launch InteractiveShell instances, load extensions, etc.
"""
# Copyright (c) IPython Development Team.
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import glob
@ -15,20 +15,14 @@ import sys
from traitlets.config.application import boolean_flag
from traitlets.config.configurable import Configurable
from traitlets.config.loader import Config
from IPython.core.application import SYSTEM_CONFIG_DIRS
from IPython.core import pylabtools
from IPython.utils.contexts import preserve_keys
from IPython.utils.path import filefind
from yap_ipython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS
from yap_ipython.core import pylabtools
from yap_ipython.utils.contexts import preserve_keys
from yap_ipython.utils.path import filefind
from traitlets import (
Unicode, Instance, List, Bool, CaselessStrEnum, observe,
)
from IPython.terminal import pt_inputhooks
ENV_CONFIG_DIRS = []
_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
if _env_config_dir not in SYSTEM_CONFIG_DIRS:
# only add ENV_CONFIG if sys.prefix is not already included
ENV_CONFIG_DIRS.append(_env_config_dir)
from yap_ipython.terminal import pt_inputhooks
#-----------------------------------------------------------------------------
# Aliases and Flags
@ -42,15 +36,15 @@ backend_keys.insert(0, 'auto')
shell_flags = {}
addflag = lambda *args: shell_flags.update(boolean_flag(*args))
addflag('autoindent', 'YAPInteractive.autoindent',
addflag('autoindent', 'InteractiveShell.autoindent',
'Turn on autoindenting.', 'Turn off autoindenting.'
)
addflag('automagic', 'YAPInteractive.automagic',
addflag('automagic', 'InteractiveShell.automagic',
"""Turn on the auto calling of magic commands. Type %%magic at the
IPython prompt for more information.""",
yap_ipython prompt for more information.""",
'Turn off the auto calling of magic commands.'
)
addflag('pdb', 'YAPInteractive.pdb',
addflag('pdb', 'InteractiveShell.pdb',
"Enable auto calling the pdb debugger after every exception.",
"Disable auto calling the pdb debugger after every exception."
)
@ -58,8 +52,8 @@ addflag('pprint', 'PlainTextFormatter.pprint',
"Enable auto pretty printing of results.",
"Disable auto pretty printing of results."
)
addflag('color-info', 'YAPInteractive.color_info',
"""IPython can display information about objects via a set of functions,
addflag('color-info', 'InteractiveShell.color_info',
"""yap_ipython can display information about objects via a set of functions,
and optionally can use colors for this, syntax highlighting
source code and various other elements. This is on by default, but can cause
problems with some pagers. If you see such problems, you can disable the
@ -67,43 +61,43 @@ addflag('color-info', 'YAPInteractive.color_info',
"Disable using colors for info related things."
)
nosep_config = Config()
nosep_config.YAPInteractive.separate_in = ''
nosep_config.YAPInteractive.separate_out = ''
nosep_config.YAPInteractive.separate_out2 = ''
nosep_config.InteractiveShell.separate_in = ''
nosep_config.InteractiveShell.separate_out = ''
nosep_config.InteractiveShell.separate_out2 = ''
shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.")
shell_flags['pylab'] = (
{'YAPInteractiveApp' : {'pylab' : 'auto'}},
{'InteractiveShellApp' : {'pylab' : 'auto'}},
"""Pre-load matplotlib and numpy for interactive use with
the default matplotlib backend."""
)
shell_flags['matplotlib'] = (
{'YAPInteractiveApp' : {'matplotlib' : 'auto'}},
{'InteractiveShellApp' : {'matplotlib' : 'auto'}},
"""Configure matplotlib for interactive use with
the default matplotlib backend."""
)
# it's possible we don't want short aliases for *all* of these:
shell_aliases = dict(
autocall='YAPInteractive.autocall',
colors='YAPInteractive.colors',
logfile='YAPInteractive.logfile',
logappend='YAPInteractive.logappend',
c='YAPInteractiveApp.code_to_run',
m='YAPInteractiveApp.module_to_run',
ext='YAPInteractiveApp.extra_extension',
gui='YAPInteractiveApp.gui',
pylab='YAPInteractiveApp.pylab',
matplotlib='YAPInteractiveApp.matplotlib',
autocall='InteractiveShell.autocall',
colors='InteractiveShell.colors',
logfile='InteractiveShell.logfile',
logappend='InteractiveShell.logappend',
c='InteractiveShellApp.code_to_run',
m='InteractiveShellApp.module_to_run',
ext='InteractiveShellApp.extra_extension',
gui='InteractiveShellApp.gui',
pylab='InteractiveShellApp.pylab',
matplotlib='InteractiveShellApp.matplotlib',
)
shell_aliases['cache-size'] = 'YAPInteractive.cache_size'
shell_aliases['cache-size'] = 'InteractiveShell.cache_size'
#-----------------------------------------------------------------------------
# Main classes and functions
#-----------------------------------------------------------------------------
class YAPInteractiveApp(Configurable):
"""A Mixin for applications that start YAPInteractive instances.
class InteractiveShellApp(Configurable):
"""A Mixin for applications that start InteractiveShell instances.
Provides configurables for loading extensions and executing files
as part of configuring a Shell environment.
@ -118,14 +112,14 @@ class YAPInteractiveApp(Configurable):
- :meth:`init_code`
"""
extensions = List(Unicode(),
help="A list of dotted module names of IPython extensions to load."
help="A list of dotted module names of yap_ipython extensions to load."
).tag(config=True)
extra_extension = Unicode('',
help="dotted module name of an IPython extension to load."
help="dotted module name of an yap_ipython extension to load."
).tag(config=True)
reraise_ipython_extension_failures = Bool(False,
help="Reraise exceptions encountered loading IPython extensions?",
help="Reraise exceptions encountered loading yap_ipython extensions?",
).tag(config=True)
# Extensions that are always loaded (not configurable)
@ -137,17 +131,17 @@ class YAPInteractiveApp(Configurable):
).tag(config=True)
exec_files = List(Unicode(),
help="""List of files to run at IPython startup."""
help="""List of files to run at yap_ipython startup."""
).tag(config=True)
exec_PYTHONSTARTUP = Bool(True,
help="""Run the file referenced by the PYTHONSTARTUP environment
variable at IPython startup."""
variable at yap_ipython startup."""
).tag(config=True)
file_to_run = Unicode('',
help="""A file to be run""").tag(config=True)
exec_lines = List(Unicode(),
help="""lines of code to run at IPython startup."""
help="""lines of code to run at yap_ipython startup."""
).tag(config=True)
code_to_run = Unicode('',
help="Execute the given command string."
@ -168,13 +162,13 @@ class YAPInteractiveApp(Configurable):
"""
).tag(config=True)
pylab_import_all = Bool(True,
help="""If true, IPython will populate the user namespace with numpy, pylab, etc.
help="""If true, yap_ipython will populate the user namespace with numpy, pylab, etc.
and an ``import *`` is done from numpy and pylab, when using pylab mode.
When False, pylab mode should not import any names into the user namespace.
"""
).tag(config=True)
shell = Instance('yap_ipython.core.interactiveshell.YAPInteractiveABC',
shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
# whether interact-loop should start
interact = Bool(True)
@ -234,19 +228,19 @@ class YAPInteractiveApp(Configurable):
"eventloop=%s", gui)
def init_extensions(self):
"""Load all IPython extensions in IPythonApp.extensions.
"""Load all yap_ipython extensions in IPythonApp.extensions.
This uses the :meth:`ExtensionManager.load_extensions` to load all
the extensions listed in ``self.extensions``.
"""
try:
self.log.debug("Loading IPython extensions...")
self.log.debug("Loading yap_ipython extensions...")
extensions = self.default_extensions + self.extensions
if self.extra_extension:
extensions.append(self.extra_extension)
for ext in extensions:
try:
self.log.info("Loading IPython extension: %s" % ext)
self.log.info("Loading yap_ipython extension: %s" % ext)
self.shell.extension_manager.load_extension(ext)
except:
if self.reraise_ipython_extension_failures:

View File

@ -0,0 +1,137 @@
# encoding: utf-8
"""
Simple utility for splitting user input. This is used by both inputsplitter and
prefilter.
Authors:
* Brian Granger
* Fernando Perez
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import re
import sys
from yap_ipython.utils import py3compat
from yap_ipython.utils.encoding import get_stream_enc
#-----------------------------------------------------------------------------
# Main function
#-----------------------------------------------------------------------------
# RegExp for splitting line contents into pre-char//first word-method//rest.
# For clarity, each group in on one line.
# WARNING: update the regexp if the escapes in interactiveshell are changed, as
# they are hardwired in.
# Although it's not solely driven by the regex, note that:
# ,;/% only trigger if they are the first character on the line
# ! and !! trigger if they are first char(s) *or* follow an indent
# ? triggers as first or last char.
line_split = re.compile("""
^(\s*) # any leading space
([,;/%]|!!?|\?\??)? # escape character or characters
\s*(%{0,2}[\w\.\*]*) # function/method, possibly with leading %
# to correctly treat things like '?%magic'
(.*?$|$) # rest of line
""", re.VERBOSE)
def split_user_input(line, pattern=None):
"""Split user input into initial whitespace, escape character, function part
and the rest.
"""
# We need to ensure that the rest of this routine deals only with unicode
encoding = get_stream_enc(sys.stdin, 'utf-8')
line = py3compat.cast_unicode(line, encoding)
if pattern is None:
pattern = line_split
match = pattern.match(line)
if not match:
# print "match failed for line '%s'" % line
try:
ifun, the_rest = line.split(None,1)
except ValueError:
# print "split failed for line '%s'" % line
ifun, the_rest = line, u''
pre = re.match('^(\s*)(.*)',line).groups()[0]
esc = ""
else:
pre, esc, ifun, the_rest = match.groups()
#print 'line:<%s>' % line # dbg
#print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun.strip(),the_rest) # dbg
return pre, esc or '', ifun.strip(), the_rest.lstrip()
class LineInfo(object):
"""A single line of input and associated info.
Includes the following as properties:
line
The original, raw line
continue_prompt
Is this line a continuation in a sequence of multiline input?
pre
Any leading whitespace.
esc
The escape character(s) in pre or the empty string if there isn't one.
Note that '!!' and '??' are possible values for esc. Otherwise it will
always be a single character.
ifun
The 'function part', which is basically the maximal initial sequence
of valid python identifiers and the '.' character. This is what is
checked for alias and magic transformations, used for auto-calling,
etc. In contrast to Python identifiers, it may start with "%" and contain
"*".
the_rest
Everything else on the line.
"""
def __init__(self, line, continue_prompt=False):
self.line = line
self.continue_prompt = continue_prompt
self.pre, self.esc, self.ifun, self.the_rest = split_user_input(line)
self.pre_char = self.pre.strip()
if self.pre_char:
self.pre_whitespace = '' # No whitespace allowd before esc chars
else:
self.pre_whitespace = self.pre
def ofind(self, ip):
"""Do a full, attribute-walking lookup of the ifun in the various
namespaces for the given yap_ipython InteractiveShell instance.
Return a dict with keys: {found, obj, ospace, ismagic}
Note: can cause state changes because of calling getattr, but should
only be run if autocall is on and if the line hasn't matched any
other, less dangerous handlers.
Does cache the results of the call, so can be called multiple times
without worrying about *further* damaging state.
"""
return ip._ofind(self.ifun)
def __str__(self):
return "LineInfo [%s|%s|%s|%s]" %(self.pre, self.esc, self.ifun, self.the_rest)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,348 @@
# -*- coding: utf-8 -*-
"""Usage information for the main yap_ipython applications.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The yap_ipython Development Team
# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
import sys
from yap_ipython.core import release
cl_usage = """\
=========
yap_ipython
=========
Tools for Interactive Computing in Python
=========================================
A Python shell with automatic history (input and output), dynamic object
introspection, easier configuration, command completion, access to the
system shell and more. yap_ipython can also be embedded in running programs.
Usage
ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ...
If invoked with no options, it executes the file and exits, passing the
remaining arguments to the script, just as if you had specified the same
command with python. You may need to specify `--` before args to be passed
to the script, to prevent yap_ipython from attempting to parse them. If you
specify the option `-i` before the filename, it will enter an interactive
yap_ipython session after running the script, rather than exiting. Files ending
in .py will be treated as normal Python, but files ending in .ipy can
contain special yap_ipython syntax (magic commands, shell expansions, etc.).
Almost all configuration in yap_ipython is available via the command-line. Do
`ipython --help-all` to see all available options. For persistent
configuration, look into your `ipython_config.py` configuration file for
details.
This file is typically installed in the `IPYTHONDIR` directory, and there
is a separate configuration directory for each profile. The default profile
directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR
defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to
C:\\Users\\YourUserName in most instances.
To initialize a profile with the default configuration file, do::
$> ipython profile create
and start editing `IPYTHONDIR/profile_default/ipython_config.py`
In yap_ipython's documentation, we will refer to this directory as
`IPYTHONDIR`, you can change its default location by creating an
environment variable with this name and setting it to the desired path.
For more information, see the manual available in HTML and PDF in your
installation, or online at http://ipython.org/documentation.html.
"""
interactive_usage = """
yap_ipython -- An enhanced Interactive Python
=========================================
yap_ipython offers a fully compatible replacement for the standard Python
interpreter, with convenient shell features, special commands, command
history mechanism and output results caching.
At your system command line, type 'ipython -h' to see the command line
options available. This document only describes interactive features.
GETTING HELP
------------
Within yap_ipython you have various way to access help:
? -> Introduction and overview of yap_ipython's features (this screen).
object? -> Details about 'object'.
object?? -> More detailed, verbose information about 'object'.
%quickref -> Quick reference of all yap_ipython specific syntax and magics.
help -> Access Python's own help system.
If you are in terminal yap_ipython you can quit this screen by pressing `q`.
MAIN FEATURES
-------------
* Access to the standard Python help with object docstrings and the Python
manuals. Simply type 'help' (no quotes) to invoke it.
* Magic commands: type %magic for information on the magic subsystem.
* System command aliases, via the %alias command or the configuration file(s).
* Dynamic object information:
Typing ?word or word? prints detailed information about an object. Certain
long strings (code, etc.) get snipped in the center for brevity.
Typing ??word or word?? gives access to the full information without
snipping long strings. Strings that are longer than the screen are printed
through the less pager.
The ?/?? system gives access to the full source code for any object (if
available), shows function prototypes and other useful information.
If you just want to see an object's docstring, type '%pdoc object' (without
quotes, and without % if you have automagic on).
* Tab completion in the local namespace:
At any time, hitting tab will complete any available python commands or
variable names, and show you a list of the possible completions if there's
no unambiguous one. It will also complete filenames in the current directory.
* Search previous command history in multiple ways:
- Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search
through the history items that match what you've typed so far.
- Hit Ctrl-r: opens a search prompt. Begin typing and the system searches
your history for lines that match what you've typed so far, completing as
much as it can.
- %hist: search history by index.
* Persistent command history across sessions.
* Logging of input with the ability to save and restore a working session.
* System shell with !. Typing !ls will run 'ls' in the current directory.
* The reload command does a 'deep' reload of a module: changes made to the
module since you imported will actually be available without having to exit.
* Verbose and colored exception traceback printouts. See the magic xmode and
xcolor functions for details (just type %magic).
* Input caching system:
yap_ipython offers numbered prompts (In/Out) with input and output caching. All
input is saved and can be retrieved as variables (besides the usual arrow
key recall).
The following GLOBAL variables always exist (so don't overwrite them!):
_i: stores previous input.
_ii: next previous.
_iii: next-next previous.
_ih : a list of all input _ih[n] is the input from line n.
Additionally, global variables named _i<n> are dynamically created (<n>
being the prompt counter), such that _i<n> == _ih[<n>]
For example, what you typed at prompt 14 is available as _i14 and _ih[14].
You can create macros which contain multiple input lines from this history,
for later re-execution, with the %macro function.
The history function %hist allows you to see any part of your input history
by printing a range of the _i variables. Note that inputs which contain
magic functions (%) appear in the history with a prepended comment. This is
because they aren't really valid Python code, so you can't exec them.
* Output caching system:
For output that is returned from actions, a system similar to the input
cache exists but using _ instead of _i. Only actions that produce a result
(NOT assignments, for example) are cached. If you are familiar with
Mathematica, yap_ipython's _ variables behave exactly like Mathematica's %
variables.
The following GLOBAL variables always exist (so don't overwrite them!):
_ (one underscore): previous output.
__ (two underscores): next previous.
___ (three underscores): next-next previous.
Global variables named _<n> are dynamically created (<n> being the prompt
counter), such that the result of output <n> is always available as _<n>.
Finally, a global dictionary named _oh exists with entries for all lines
which generated output.
* Directory history:
Your history of visited directories is kept in the global list _dh, and the
magic %cd command can be used to go to any entry in that list.
* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython)
1. Auto-parentheses
Callable objects (i.e. functions, methods, etc) can be invoked like
this (notice the commas between the arguments)::
In [1]: callable_ob arg1, arg2, arg3
and the input will be translated to this::
callable_ob(arg1, arg2, arg3)
This feature is off by default (in rare cases it can produce
undesirable side-effects), but you can activate it at the command-line
by starting yap_ipython with `--autocall 1`, set it permanently in your
configuration file, or turn on at runtime with `%autocall 1`.
You can force auto-parentheses by using '/' as the first character
of a line. For example::
In [1]: /globals # becomes 'globals()'
Note that the '/' MUST be the first character on the line! This
won't work::
In [2]: print /globals # syntax error
In most cases the automatic algorithm should work, so you should
rarely need to explicitly invoke /. One notable exception is if you
are trying to call a function with a list of tuples as arguments (the
parenthesis will confuse yap_ipython)::
In [1]: zip (1,2,3),(4,5,6) # won't work
but this will work::
In [2]: /zip (1,2,3),(4,5,6)
------> zip ((1,2,3),(4,5,6))
Out[2]= [(1, 4), (2, 5), (3, 6)]
yap_ipython tells you that it has altered your command line by
displaying the new command line preceded by -->. e.g.::
In [18]: callable list
-------> callable (list)
2. Auto-Quoting
You can force auto-quoting of a function's arguments by using ',' as
the first character of a line. For example::
In [1]: ,my_function /home/me # becomes my_function("/home/me")
If you use ';' instead, the whole argument is quoted as a single
string (while ',' splits on whitespace)::
In [2]: ,my_function a b c # becomes my_function("a","b","c")
In [3]: ;my_function a b c # becomes my_function("a b c")
Note that the ',' MUST be the first character on the line! This
won't work::
In [4]: x = ,my_function /home/me # syntax error
"""
interactive_usage_min = """\
An enhanced console for Python.
Some of its features are:
- Tab completion in the local namespace.
- Logging of input, see command-line options.
- System shell escape via ! , eg !ls.
- Magic commands, starting with a % (like %ls, %pwd, %cd, etc.)
- Keeps track of locally defined variables via %who, %whos.
- Show object information with a ? eg ?x or x? (use ?? for more info).
"""
quick_reference = r"""
yap_ipython -- An enhanced Interactive Python - Quick Reference Card
================================================================
obj?, obj?? : Get help, or more help for object (also works as
?obj, ??obj).
?foo.*abc* : List names in 'foo' containing 'abc' in them.
%magic : Information about yap_ipython's 'magic' % functions.
Magic functions are prefixed by % or %%, and typically take their arguments
without parentheses, quotes or even commas for convenience. Line magics take a
single % and cell magics are prefixed with two %%.
Example magic function calls:
%alias d ls -F : 'd' is now an alias for 'ls -F'
alias d ls -F : Works if 'alias' not a python name
alist = %alias : Get list of aliases to 'alist'
cd /usr/share : Obvious. cd -<tab> to choose from visited dirs.
%cd?? : See help AND source for magic %cd
%timeit x=10 : time the 'x=10' statement with high precision.
%%timeit x=2**100
x**100 : time 'x**100' with a setup of 'x=2**100'; setup code is not
counted. This is an example of a cell magic.
System commands:
!cp a.txt b/ : System command escape, calls os.system()
cp a.txt b/ : after %rehashx, most system commands work without !
cp ${f}.txt $bar : Variable expansion in magics and system commands
files = !ls /usr : Capture system command output
files.s, files.l, files.n: "a b c", ['a','b','c'], 'a\nb\nc'
History:
_i, _ii, _iii : Previous, next previous, next next previous input
_i4, _ih[2:5] : Input history line 4, lines 2-4
exec _i81 : Execute input history line #81 again
%rep 81 : Edit input history line #81
_, __, ___ : previous, next previous, next next previous output
_dh : Directory history
_oh : Output history
%hist : Command history of current session.
%hist -g foo : Search command history of (almost) all sessions for 'foo'.
%hist -g : Command history of (almost) all sessions.
%hist 1/2-8 : Command history containing lines 2-8 of session 1.
%hist 1/ ~2/ : Command history of session 1 and 2 sessions before current.
%hist ~8/1-~6/5 : Command history from line 1 of 8 sessions ago to
line 5 of 6 sessions ago.
%edit 0/ : Open editor to execute code with history of current session.
Autocall:
f 1,2 : f(1,2) # Off by default, enable with %autocall magic.
/f 1,2 : f(1,2) (forced autoparen)
,f 1 2 : f("1","2")
;f 1 2 : f("1 2")
Remember: TAB completion works in many contexts, not just file names
or python names.
The following magic functions are currently available:
"""
default_banner_parts = ["Python %s\n"%sys.version.split("\n")[0],
"Type 'copyright', 'credits' or 'license' for more information\n" ,
"yap_ipython {version} -- An enhanced Interactive Python. Type '?' for help.\n".format(version=release.version),
]
default_banner = ''.join(default_banner_parts)
# deprecated GUI banner
default_gui_banner = '\n'.join([
'DEPRECATED: yap_ipython.core.usage.default_gui_banner is deprecated and will be removed',
default_banner,
])

View File

@ -1,8 +1,8 @@
"""Public API for display tools in IPython.
"""Public API for display tools in yap_ipython.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
# Copyright (C) 2012 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
@ -12,5 +12,5 @@
# Imports
#-----------------------------------------------------------------------------
from IPython.core.display import *
from IPython.lib.display import *
from yap_ipython.core.display import *
from yap_ipython.lib.display import *

View File

@ -0,0 +1,2 @@
# -*- coding: utf-8 -*-
"""This directory is meant for yap_ipython extensions."""

View File

@ -0,0 +1,524 @@
"""yap_ipython extension to reload modules before executing user code.
``autoreload`` reloads modules automatically before entering the execution of
code typed at the yap_ipython prompt.
This makes for example the following workflow possible:
.. sourcecode:: ipython
In [1]: %load_ext autoreload
In [2]: %autoreload 2
In [3]: from foo import some_function
In [4]: some_function()
Out[4]: 42
In [5]: # open foo.py in an editor and change some_function to return 43
In [6]: some_function()
Out[6]: 43
The module was reloaded without reloading it explicitly, and the object
imported with ``from foo import ...`` was also updated.
Usage
=====
The following magic commands are provided:
``%autoreload``
Reload all modules (except those excluded by ``%aimport``)
automatically now.
``%autoreload 0``
Disable automatic reloading.
``%autoreload 1``
Reload all modules imported with ``%aimport`` every time before
executing the Python code typed.
``%autoreload 2``
Reload all modules (except those excluded by ``%aimport``) every
time before executing the Python code typed.
``%aimport``
List modules which are to be automatically imported or not to be imported.
``%aimport foo``
Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1``
``%aimport foo, bar``
Import modules 'foo', 'bar' and mark them to be autoreloaded for ``%autoreload 1``
``%aimport -foo``
Mark module 'foo' to not be autoreloaded.
Caveats
=======
Reloading Python modules in a reliable way is in general difficult,
and unexpected things may occur. ``%autoreload`` tries to work around
common pitfalls by replacing function code objects and parts of
classes previously in the module with new versions. This makes the
following things to work:
- Functions and classes imported via 'from xxx import foo' are upgraded
to new versions when 'xxx' is reloaded.
- Methods and properties of classes are upgraded on reload, so that
calling 'c.foo()' on an object 'c' created before the reload causes
the new code for 'foo' to be executed.
Some of the known remaining caveats are:
- Replacing code objects does not always succeed: changing a @property
in a class to an ordinary method or a method to a member variable
can cause problems (but in old objects only).
- Functions that are removed (eg. via monkey-patching) from a module
before it is reloaded are not upgraded.
- C extension modules cannot be reloaded, and so cannot be autoreloaded.
"""
skip_doctest = True
#-----------------------------------------------------------------------------
# Copyright (C) 2000 Thomas Heller
# Copyright (C) 2008 Pauli Virtanen <pav@iki.fi>
# Copyright (C) 2012 The yap_ipython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#
# This yap_ipython module is written by Pauli Virtanen, based on the autoreload
# code by Thomas Heller.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import sys
import traceback
import types
import weakref
from importlib import import_module
from imp import reload
from yap_ipython.utils import openpy
#------------------------------------------------------------------------------
# Autoreload functionality
#------------------------------------------------------------------------------
class ModuleReloader(object):
enabled = False
"""Whether this reloader is enabled"""
check_all = True
"""Autoreload all modules, not just those listed in 'modules'"""
def __init__(self):
# Modules that failed to reload: {module: mtime-on-failed-reload, ...}
self.failed = {}
# Modules specially marked as autoreloadable.
self.modules = {}
# Modules specially marked as not autoreloadable.
self.skip_modules = {}
# (module-name, name) -> weakref, for replacing old code objects
self.old_objects = {}
# Module modification timestamps
self.modules_mtimes = {}
# Cache module modification times
self.check(check_all=True, do_reload=False)
def mark_module_skipped(self, module_name):
"""Skip reloading the named module in the future"""
try:
del self.modules[module_name]
except KeyError:
pass
self.skip_modules[module_name] = True
def mark_module_reloadable(self, module_name):
"""Reload the named module in the future (if it is imported)"""
try:
del self.skip_modules[module_name]
except KeyError:
pass
self.modules[module_name] = True
def aimport_module(self, module_name):
"""Import a module, and mark it reloadable
Returns
-------
top_module : module
The imported module if it is top-level, or the top-level
top_name : module
Name of top_module
"""
self.mark_module_reloadable(module_name)
import_module(module_name)
top_name = module_name.split('.')[0]
top_module = sys.modules[top_name]
return top_module, top_name
def filename_and_mtime(self, module):
if not hasattr(module, '__file__') or module.__file__ is None:
return None, None
if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']:
# we cannot reload(__main__) or reload(__mp_main__)
return None, None
filename = module.__file__
path, ext = os.path.splitext(filename)
if ext.lower() == '.py':
py_filename = filename
else:
try:
py_filename = openpy.source_from_cache(filename)
except ValueError:
return None, None
try:
pymtime = os.stat(py_filename).st_mtime
except OSError:
return None, None
return py_filename, pymtime
def check(self, check_all=False, do_reload=True):
"""Check whether some modules need to be reloaded."""
if not self.enabled and not check_all:
return
if check_all or self.check_all:
modules = list(sys.modules.keys())
else:
modules = list(self.modules.keys())
for modname in modules:
m = sys.modules.get(modname, None)
if modname in self.skip_modules:
continue
py_filename, pymtime = self.filename_and_mtime(m)
if py_filename is None:
continue
try:
if pymtime <= self.modules_mtimes[modname]:
continue
except KeyError:
self.modules_mtimes[modname] = pymtime
continue
else:
if self.failed.get(py_filename, None) == pymtime:
continue
self.modules_mtimes[modname] = pymtime
# If we've reached this point, we should try to reload the module
if do_reload:
try:
superreload(m, reload, self.old_objects)
if py_filename in self.failed:
del self.failed[py_filename]
except:
print("[autoreload of %s failed: %s]" % (
modname, traceback.format_exc(10)), file=sys.stderr)
self.failed[py_filename] = pymtime
#------------------------------------------------------------------------------
# superreload
#------------------------------------------------------------------------------
func_attrs = ['__code__', '__defaults__', '__doc__',
'__closure__', '__globals__', '__dict__']
def update_function(old, new):
"""Upgrade the code object of a function"""
for name in func_attrs:
try:
setattr(old, name, getattr(new, name))
except (AttributeError, TypeError):
pass
def update_class(old, new):
"""Replace stuff in the __dict__ of a class, and upgrade
method code objects"""
for key in list(old.__dict__.keys()):
old_obj = getattr(old, key)
try:
new_obj = getattr(new, key)
if old_obj == new_obj:
continue
except AttributeError:
# obsolete attribute: remove it
try:
delattr(old, key)
except (AttributeError, TypeError):
pass
continue
if update_generic(old_obj, new_obj): continue
try:
setattr(old, key, getattr(new, key))
except (AttributeError, TypeError):
pass # skip non-writable attributes
def update_property(old, new):
"""Replace get/set/del functions of a property"""
update_generic(old.fdel, new.fdel)
update_generic(old.fget, new.fget)
update_generic(old.fset, new.fset)
def isinstance2(a, b, typ):
return isinstance(a, typ) and isinstance(b, typ)
UPDATE_RULES = [
(lambda a, b: isinstance2(a, b, type),
update_class),
(lambda a, b: isinstance2(a, b, types.FunctionType),
update_function),
(lambda a, b: isinstance2(a, b, property),
update_property),
]
UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType),
lambda a, b: update_function(a.__func__, b.__func__)),
])
def update_generic(a, b):
for type_check, update in UPDATE_RULES:
if type_check(a, b):
update(a, b)
return True
return False
class StrongRef(object):
def __init__(self, obj):
self.obj = obj
def __call__(self):
return self.obj
def superreload(module, reload=reload, old_objects={}):
"""Enhanced version of the builtin reload function.
superreload remembers objects previously in the module, and
- upgrades the class dictionary of every old class in the module
- upgrades the code object of every old function and method
- clears the module's namespace before reloading
"""
# collect old objects in the module
for name, obj in list(module.__dict__.items()):
if not hasattr(obj, '__module__') or obj.__module__ != module.__name__:
continue
key = (module.__name__, name)
try:
old_objects.setdefault(key, []).append(weakref.ref(obj))
except TypeError:
pass
# reload module
try:
# clear namespace first from old cruft
old_dict = module.__dict__.copy()
old_name = module.__name__
module.__dict__.clear()
module.__dict__['__name__'] = old_name
module.__dict__['__loader__'] = old_dict['__loader__']
except (TypeError, AttributeError, KeyError):
pass
try:
module = reload(module)
except:
# restore module dictionary on failed reload
module.__dict__.update(old_dict)
raise
# iterate over all objects and update functions & classes
for name, new_obj in list(module.__dict__.items()):
key = (module.__name__, name)
if key not in old_objects: continue
new_refs = []
for old_ref in old_objects[key]:
old_obj = old_ref()
if old_obj is None: continue
new_refs.append(old_ref)
update_generic(old_obj, new_obj)
if new_refs:
old_objects[key] = new_refs
else:
del old_objects[key]
return module
#------------------------------------------------------------------------------
# yap_ipython connectivity
#------------------------------------------------------------------------------
from yap_ipython.core.magic import Magics, magics_class, line_magic
@magics_class
class AutoreloadMagics(Magics):
def __init__(self, *a, **kw):
super(AutoreloadMagics, self).__init__(*a, **kw)
self._reloader = ModuleReloader()
self._reloader.check_all = False
self.loaded_modules = set(sys.modules)
@line_magic
def autoreload(self, parameter_s=''):
r"""%autoreload => Reload modules automatically
%autoreload
Reload all modules (except those excluded by %aimport) automatically
now.
%autoreload 0
Disable automatic reloading.
%autoreload 1
Reload all modules imported with %aimport every time before executing
the Python code typed.
%autoreload 2
Reload all modules (except those excluded by %aimport) every time
before executing the Python code typed.
Reloading Python modules in a reliable way is in general
difficult, and unexpected things may occur. %autoreload tries to
work around common pitfalls by replacing function code objects and
parts of classes previously in the module with new versions. This
makes the following things to work:
- Functions and classes imported via 'from xxx import foo' are upgraded
to new versions when 'xxx' is reloaded.
- Methods and properties of classes are upgraded on reload, so that
calling 'c.foo()' on an object 'c' created before the reload causes
the new code for 'foo' to be executed.
Some of the known remaining caveats are:
- Replacing code objects does not always succeed: changing a @property
in a class to an ordinary method or a method to a member variable
can cause problems (but in old objects only).
- Functions that are removed (eg. via monkey-patching) from a module
before it is reloaded are not upgraded.
- C extension modules cannot be reloaded, and so cannot be
autoreloaded.
"""
if parameter_s == '':
self._reloader.check(True)
elif parameter_s == '0':
self._reloader.enabled = False
elif parameter_s == '1':
self._reloader.check_all = False
self._reloader.enabled = True
elif parameter_s == '2':
self._reloader.check_all = True
self._reloader.enabled = True
@line_magic
def aimport(self, parameter_s='', stream=None):
"""%aimport => Import modules for automatic reloading.
%aimport
List modules to automatically import and not to import.
%aimport foo
Import module 'foo' and mark it to be autoreloaded for %autoreload 1
%aimport foo, bar
Import modules 'foo', 'bar' and mark them to be autoreloaded for %autoreload 1
%aimport -foo
Mark module 'foo' to not be autoreloaded for %autoreload 1
"""
modname = parameter_s
if not modname:
to_reload = sorted(self._reloader.modules.keys())
to_skip = sorted(self._reloader.skip_modules.keys())
if stream is None:
stream = sys.stdout
if self._reloader.check_all:
stream.write("Modules to reload:\nall-except-skipped\n")
else:
stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload))
stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip))
elif modname.startswith('-'):
modname = modname[1:]
self._reloader.mark_module_skipped(modname)
else:
for _module in ([_.strip() for _ in modname.split(',')]):
top_module, top_name = self._reloader.aimport_module(_module)
# Inject module to user namespace
self.shell.push({top_name: top_module})
def pre_run_cell(self):
if self._reloader.enabled:
try:
self._reloader.check()
except:
pass
def post_execute_hook(self):
"""Cache the modification times of any modules imported in this execution
"""
newly_loaded_modules = set(sys.modules) - self.loaded_modules
for modname in newly_loaded_modules:
_, pymtime = self._reloader.filename_and_mtime(sys.modules[modname])
if pymtime is not None:
self._reloader.modules_mtimes[modname] = pymtime
self.loaded_modules.update(newly_loaded_modules)
def load_ipython_extension(ip):
"""Load the extension in yap_ipython."""
auto_reload = AutoreloadMagics(ip)
ip.register_magics(auto_reload)
ip.events.register('pre_run_cell', auto_reload.pre_run_cell)
ip.events.register('post_execute', auto_reload.post_execute_hook)

View File

@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
"""
**DEPRECATED**
The cython magic has been integrated into Cython itself,
which is now released in version 0.21.
cf github `Cython` organisation, `Cython` repo, under the
file `Cython/Build/IpythonMagic.py`
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011, yap_ipython Development Team.
#-----------------------------------------------------------------------------
import warnings
## still load the magic in yap_ipython 3.x, remove completely in future versions.
def load_ipython_extension(ip):
"""Load the extension in yap_ipython."""
warnings.warn("""The Cython magic has been moved to the Cython package""")

View File

@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The yap_ipython Development Team
#-----------------------------------------------------------------------------
import warnings
def load_ipython_extension(ip):
"""Load the extension in yap_ipython."""
warnings.warn("The rmagic extension in yap_ipython has moved to "
"`rpy2.ipython`, please see `rpy2` documentation.")

View File

@ -0,0 +1,226 @@
# -*- coding: utf-8 -*-
"""
%store magic for lightweight persistence.
Stores variables, aliases and macros in yap_ipython's database.
To automatically restore stored variables at startup, add this to your
:file:`ipython_config.py` file::
c.StoreMagics.autorestore = True
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import inspect, os, sys, textwrap
from yap_ipython.core.error import UsageError
from yap_ipython.core.magic import Magics, magics_class, line_magic
from traitlets import Bool
def restore_aliases(ip):
staliases = ip.db.get('stored_aliases', {})
for k,v in staliases.items():
#print "restore alias",k,v # dbg
#self.alias_table[k] = v
ip.alias_manager.define_alias(k,v)
def refresh_variables(ip):
db = ip.db
for key in db.keys('autorestore/*'):
# strip autorestore
justkey = os.path.basename(key)
try:
obj = db[key]
except KeyError:
print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey)
print("The error was:", sys.exc_info()[0])
else:
#print "restored",justkey,"=",obj #dbg
ip.user_ns[justkey] = obj
def restore_dhist(ip):
ip.user_ns['_dh'] = ip.db.get('dhist',[])
def restore_data(ip):
refresh_variables(ip)
restore_aliases(ip)
restore_dhist(ip)
@magics_class
class StoreMagics(Magics):
"""Lightweight persistence for python variables.
Provides the %store magic."""
autorestore = Bool(False, help=
"""If True, any %store-d variables will be automatically restored
when yap_ipython starts.
"""
).tag(config=True)
def __init__(self, shell):
super(StoreMagics, self).__init__(shell=shell)
self.shell.configurables.append(self)
if self.autorestore:
restore_data(self.shell)
@line_magic
def store(self, parameter_s=''):
"""Lightweight persistence for python variables.
Example::
In [1]: l = ['hello',10,'world']
In [2]: %store l
In [3]: exit
(yap_ipython session is closed and started again...)
ville@badger:~$ ipython
In [1]: l
NameError: name 'l' is not defined
In [2]: %store -r
In [3]: l
Out[3]: ['hello', 10, 'world']
Usage:
* ``%store`` - Show list of all variables and their current
values
* ``%store spam`` - Store the *current* value of the variable spam
to disk
* ``%store -d spam`` - Remove the variable and its value from storage
* ``%store -z`` - Remove all variables from storage
* ``%store -r`` - Refresh all variables from store (overwrite
current vals)
* ``%store -r spam bar`` - Refresh specified variables from store
(delete current val)
* ``%store foo >a.txt`` - Store value of foo to new file a.txt
* ``%store foo >>a.txt`` - Append value of foo to file a.txt
It should be noted that if you change the value of a variable, you
need to %store it again if you want to persist the new value.
Note also that the variables will need to be pickleable; most basic
python types can be safely %store'd.
Also aliases can be %store'd across sessions.
"""
opts,argsl = self.parse_options(parameter_s,'drz',mode='string')
args = argsl.split(None,1)
ip = self.shell
db = ip.db
# delete
if 'd' in opts:
try:
todel = args[0]
except IndexError:
raise UsageError('You must provide the variable to forget')
else:
try:
del db['autorestore/' + todel]
except:
raise UsageError("Can't delete variable '%s'" % todel)
# reset
elif 'z' in opts:
for k in db.keys('autorestore/*'):
del db[k]
elif 'r' in opts:
if args:
for arg in args:
try:
obj = db['autorestore/' + arg]
except KeyError:
print("no stored variable %s" % arg)
else:
ip.user_ns[arg] = obj
else:
restore_data(ip)
# run without arguments -> list variables & values
elif not args:
vars = db.keys('autorestore/*')
vars.sort()
if vars:
size = max(map(len, vars))
else:
size = 0
print('Stored variables and their in-db values:')
fmt = '%-'+str(size)+'s -> %s'
get = db.get
for var in vars:
justkey = os.path.basename(var)
# print 30 first characters from every var
print(fmt % (justkey, repr(get(var, '<unavailable>'))[:50]))
# default action - store the variable
else:
# %store foo >file.txt or >>file.txt
if len(args) > 1 and args[1].startswith('>'):
fnam = os.path.expanduser(args[1].lstrip('>').lstrip())
if args[1].startswith('>>'):
fil = open(fnam, 'a')
else:
fil = open(fnam, 'w')
obj = ip.ev(args[0])
print("Writing '%s' (%s) to file '%s'." % (args[0],
obj.__class__.__name__, fnam))
if not isinstance (obj, str):
from pprint import pprint
pprint(obj, fil)
else:
fil.write(obj)
if not obj.endswith('\n'):
fil.write('\n')
fil.close()
return
# %store foo
try:
obj = ip.user_ns[args[0]]
except KeyError:
# it might be an alias
name = args[0]
try:
cmd = ip.alias_manager.retrieve_alias(name)
except ValueError:
raise UsageError("Unknown variable '%s'" % name)
staliases = db.get('stored_aliases',{})
staliases[name] = cmd
db['stored_aliases'] = staliases
print("Alias stored: %s (%s)" % (name, cmd))
return
else:
modname = getattr(inspect.getmodule(obj), '__name__', '')
if modname == '__main__':
print(textwrap.dedent("""\
Warning:%s is %s
Proper storage of interactively declared classes (or instances
of those classes) is not possible! Only instances
of classes in real modules on file system can be %%store'd.
""" % (args[0], obj) ))
return
#pickled = pickle.dumps(obj)
db[ 'autorestore/' + args[0] ] = obj
print("Stored '%s' (%s)" % (args[0], obj.__class__.__name__))
def load_ipython_extension(ip):
"""Load the extension in yap_ipython."""
ip.register_magics(StoreMagics)

View File

@ -0,0 +1,32 @@
"""
**DEPRECATED**
A print function that pretty prints sympy Basic objects.
:moduleauthor: Brian Granger
Usage
=====
Once the extension is loaded, Sympy Basic objects are automatically
pretty-printed.
As of SymPy 0.7.2, maintenance of this extension has moved to SymPy under
sympy.interactive.ipythonprinting, any modifications to account for changes to
SymPy should be submitted to SymPy rather than changed here. This module is
maintained here for backwards compatablitiy with old SymPy versions.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The yap_ipython Development Team
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import warnings
def load_ipython_extension(ip):
warnings.warn("The sympyprinting extension has moved to `sympy`, "
"use `from sympy import init_printing; init_printing()`")

View File

@ -0,0 +1,5 @@
"""
This package contains all third-party modules bundled with yap_ipython.
"""
__all__ = ["simplegeneric"]

View File

@ -0,0 +1,13 @@
#!/usr/bin/python
"""
`yap_ipython.external.mathjax` is deprecated with yap_ipython 4.0+
mathjax is now install by default with the notebook package
"""
import sys
if __name__ == '__main__' :
sys.exit("yap_ipython.external.mathjax is deprecated, Mathjax is now installed by default with the notebook package")

View File

@ -0,0 +1,95 @@
""" Import Qt in a manner suitable for an yap_ipython kernel.
This is the import used for the `gui=qt` or `matplotlib=qt` initialization.
Import Priority:
if Qt has been imported anywhere else:
use that
if matplotlib has been imported and doesn't support v2 (<= 1.0.1):
use PyQt4 @v1
Next, ask QT_API env variable
if QT_API not set:
ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the
version matplotlib is configured with
else: (matplotlib said nothing)
# this is the default path - nobody told us anything
try in this order:
PyQt default version, PySide, PyQt5
else:
use what QT_API says
"""
# NOTE: This is no longer an external, third-party module, and should be
# considered part of yap_ipython. For compatibility however, it is being kept in
# yap_ipython/external.
import os
import sys
from yap_ipython.utils.version import check_version
from yap_ipython.external.qt_loaders import (load_qt, loaded_api, QT_API_PYSIDE,
QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5,
QT_API_PYQTv1, QT_API_PYQT_DEFAULT)
_qt_apis = (QT_API_PYSIDE, QT_API_PYSIDE2, QT_API_PYQT, QT_API_PYQT5, QT_API_PYQTv1,
QT_API_PYQT_DEFAULT)
#Constraints placed on an imported matplotlib
def matplotlib_options(mpl):
if mpl is None:
return
backend = mpl.rcParams.get('backend', None)
if backend == 'Qt4Agg':
mpqt = mpl.rcParams.get('backend.qt4', None)
if mpqt is None:
return None
if mpqt.lower() == 'pyside':
return [QT_API_PYSIDE]
elif mpqt.lower() == 'pyqt4':
return [QT_API_PYQT_DEFAULT]
elif mpqt.lower() == 'pyqt4v2':
return [QT_API_PYQT]
raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" %
mpqt)
elif backend == 'Qt5Agg':
mpqt = mpl.rcParams.get('backend.qt5', None)
if mpqt is None:
return None
if mpqt.lower() == 'pyqt5':
return [QT_API_PYQT5]
raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" %
mpqt)
def get_options():
"""Return a list of acceptable QT APIs, in decreasing order of
preference
"""
#already imported Qt somewhere. Use that
loaded = loaded_api()
if loaded is not None:
return [loaded]
mpl = sys.modules.get('matplotlib', None)
if mpl is not None and not check_version(mpl.__version__, '1.0.2'):
#1.0.1 only supports PyQt4 v1
return [QT_API_PYQT_DEFAULT]
qt_api = os.environ.get('QT_API', None)
if qt_api is None:
#no ETS variable. Ask mpl, then use default fallback path
return matplotlib_options(mpl) or [QT_API_PYQT_DEFAULT, QT_API_PYSIDE,
QT_API_PYQT5, QT_API_PYSIDE2]
elif qt_api not in _qt_apis:
raise RuntimeError("Invalid Qt API %r, valid values are: %r" %
(qt_api, ', '.join(_qt_apis)))
else:
return [qt_api]
api_opts = get_options()
QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts)

View File

@ -0,0 +1,373 @@
"""
This module contains factory functions that attempt
to return Qt submodules from the various python Qt bindings.
It also protects against double-importing Qt with different
bindings, which is unstable and likely to crash
This is used primarily by qt and qt_for_kernel, and shouldn't
be accessed directly from the outside
"""
import sys
import types
from functools import partial
from importlib import import_module
from yap_ipython.utils.version import check_version
# Available APIs.
QT_API_PYQT = 'pyqt' # Force version 2
QT_API_PYQT5 = 'pyqt5'
QT_API_PYQTv1 = 'pyqtv1' # Force version 2
QT_API_PYQT_DEFAULT = 'pyqtdefault' # use system default for version 1 vs. 2
QT_API_PYSIDE = 'pyside'
QT_API_PYSIDE2 = 'pyside2'
api_to_module = {QT_API_PYSIDE2: 'PySide2',
QT_API_PYSIDE: 'PySide',
QT_API_PYQT: 'PyQt4',
QT_API_PYQTv1: 'PyQt4',
QT_API_PYQT5: 'PyQt5',
QT_API_PYQT_DEFAULT: 'PyQt4',
}
class ImportDenier(object):
"""Import Hook that will guard against bad Qt imports
once yap_ipython commits to a specific binding
"""
def __init__(self):
self.__forbidden = set()
def forbid(self, module_name):
sys.modules.pop(module_name, None)
self.__forbidden.add(module_name)
def find_module(self, fullname, path=None):
if path:
return
if fullname in self.__forbidden:
return self
def load_module(self, fullname):
raise ImportError("""
Importing %s disabled by yap_ipython, which has
already imported an Incompatible QT Binding: %s
""" % (fullname, loaded_api()))
ID = ImportDenier()
sys.meta_path.insert(0, ID)
def commit_api(api):
"""Commit to a particular API, and trigger ImportErrors on subsequent
dangerous imports"""
if api == QT_API_PYSIDE2:
ID.forbid('PySide')
ID.forbid('PyQt4')
ID.forbid('PyQt5')
if api == QT_API_PYSIDE:
ID.forbid('PySide2')
ID.forbid('PyQt4')
ID.forbid('PyQt5')
elif api == QT_API_PYQT5:
ID.forbid('PySide2')
ID.forbid('PySide')
ID.forbid('PyQt4')
else: # There are three other possibilities, all representing PyQt4
ID.forbid('PyQt5')
ID.forbid('PySide2')
ID.forbid('PySide')
def loaded_api():
"""Return which API is loaded, if any
If this returns anything besides None,
importing any other Qt binding is unsafe.
Returns
-------
None, 'pyside2', 'pyside', 'pyqt', 'pyqt5', or 'pyqtv1'
"""
if 'PyQt4.QtCore' in sys.modules:
if qtapi_version() == 2:
return QT_API_PYQT
else:
return QT_API_PYQTv1
elif 'PySide.QtCore' in sys.modules:
return QT_API_PYSIDE
elif 'PySide2.QtCore' in sys.modules:
return QT_API_PYSIDE2
elif 'PyQt5.QtCore' in sys.modules:
return QT_API_PYQT5
return None
def has_binding(api):
"""Safely check for PyQt4/5, PySide or PySide2, without importing submodules
Supports Python <= 3.3
Parameters
----------
api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
Which module to check for
Returns
-------
True if the relevant module appears to be importable
"""
# we can't import an incomplete pyside and pyqt4
# this will cause a crash in sip (#1431)
# check for complete presence before importing
module_name = api_to_module[api]
import imp
try:
#importing top level PyQt4/PySide module is ok...
mod = import_module(module_name)
#...importing submodules is not
imp.find_module('QtCore', mod.__path__)
imp.find_module('QtGui', mod.__path__)
imp.find_module('QtSvg', mod.__path__)
if api in (QT_API_PYQT5, QT_API_PYSIDE2):
# QT5 requires QtWidgets too
imp.find_module('QtWidgets', mod.__path__)
#we can also safely check PySide version
if api == QT_API_PYSIDE:
return check_version(mod.__version__, '1.0.3')
else:
return True
except ImportError:
return False
def has_binding_new(api):
"""Safely check for PyQt4/5, PySide or PySide2, without importing submodules
Supports Python >= 3.4
Parameters
----------
api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault']
Which module to check for
Returns
-------
True if the relevant module appears to be importable
"""
module_name = api_to_module[api]
from importlib.util import find_spec
required = ['QtCore', 'QtGui', 'QtSvg']
if api in (QT_API_PYQT5, QT_API_PYSIDE2):
# QT5 requires QtWidgets too
required.append('QtWidgets')
for submod in required:
try:
spec = find_spec('%s.%s' % (module_name, submod))
except ImportError:
# Package (e.g. PyQt5) not found
return False
else:
if spec is None:
# Submodule (e.g. PyQt5.QtCore) not found
return False
if api == QT_API_PYSIDE:
# We can also safely check PySide version
import PySide
return check_version(PySide.__version__, '1.0.3')
return True
if sys.version_info >= (3, 4):
has_binding = has_binding_new
def qtapi_version():
"""Return which QString API has been set, if any
Returns
-------
The QString API version (1 or 2), or None if not set
"""
try:
import sip
except ImportError:
return
try:
return sip.getapi('QString')
except ValueError:
return
def can_import(api):
"""Safely query whether an API is importable, without importing it"""
if not has_binding(api):
return False
current = loaded_api()
if api == QT_API_PYQT_DEFAULT:
return current in [QT_API_PYQT, QT_API_PYQTv1, None]
else:
return current in [api, None]
def import_pyqt4(version=2):
"""
Import PyQt4
Parameters
----------
version : 1, 2, or None
Which QString/QVariant API to use. Set to None to use the system
default
ImportErrors rasied within this function are non-recoverable
"""
# The new-style string API (version=2) automatically
# converts QStrings to Unicode Python strings. Also, automatically unpacks
# QVariants to their underlying objects.
import sip
if version is not None:
sip.setapi('QString', version)
sip.setapi('QVariant', version)
from PyQt4 import QtGui, QtCore, QtSvg
if not check_version(QtCore.PYQT_VERSION_STR, '4.7'):
raise ImportError("yap_ipython requires PyQt4 >= 4.7, found %s" %
QtCore.PYQT_VERSION_STR)
# Alias PyQt-specific functions for PySide compatibility.
QtCore.Signal = QtCore.pyqtSignal
QtCore.Slot = QtCore.pyqtSlot
# query for the API version (in case version == None)
version = sip.getapi('QString')
api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT
return QtCore, QtGui, QtSvg, api
def import_pyqt5():
"""
Import PyQt5
ImportErrors rasied within this function are non-recoverable
"""
import sip
from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui
# Alias PyQt-specific functions for PySide compatibility.
QtCore.Signal = QtCore.pyqtSignal
QtCore.Slot = QtCore.pyqtSlot
# Join QtGui and QtWidgets for Qt4 compatibility.
QtGuiCompat = types.ModuleType('QtGuiCompat')
QtGuiCompat.__dict__.update(QtGui.__dict__)
QtGuiCompat.__dict__.update(QtWidgets.__dict__)
api = QT_API_PYQT5
return QtCore, QtGuiCompat, QtSvg, api
def import_pyside():
"""
Import PySide
ImportErrors raised within this function are non-recoverable
"""
from PySide import QtGui, QtCore, QtSvg
return QtCore, QtGui, QtSvg, QT_API_PYSIDE
def import_pyside2():
"""
Import PySide2
ImportErrors raised within this function are non-recoverable
"""
from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport
# Join QtGui and QtWidgets for Qt4 compatibility.
QtGuiCompat = types.ModuleType('QtGuiCompat')
QtGuiCompat.__dict__.update(QtGui.__dict__)
QtGuiCompat.__dict__.update(QtWidgets.__dict__)
QtGuiCompat.__dict__.update(QtPrintSupport.__dict__)
return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2
def load_qt(api_options):
"""
Attempt to import Qt, given a preference list
of permissible bindings
It is safe to call this function multiple times.
Parameters
----------
api_options: List of strings
The order of APIs to try. Valid items are 'pyside', 'pyside2',
'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault'
Returns
-------
A tuple of QtCore, QtGui, QtSvg, QT_API
The first three are the Qt modules. The last is the
string indicating which module was loaded.
Raises
------
ImportError, if it isn't possible to import any requested
bindings (either becaues they aren't installed, or because
an incompatible library has already been installed)
"""
loaders = {
QT_API_PYSIDE2: import_pyside2,
QT_API_PYSIDE: import_pyside,
QT_API_PYQT: import_pyqt4,
QT_API_PYQT5: import_pyqt5,
QT_API_PYQTv1: partial(import_pyqt4, version=1),
QT_API_PYQT_DEFAULT: partial(import_pyqt4, version=None)
}
for api in api_options:
if api not in loaders:
raise RuntimeError(
"Invalid Qt API %r, valid values are: %s" %
(api, ", ".join(["%r" % k for k in loaders.keys()])))
if not can_import(api):
continue
#cannot safely recover from an ImportError during this
result = loaders[api]()
api = result[-1] # changed if api = QT_API_PYQT_DEFAULT
commit_api(api)
return result
else:
raise ImportError("""
Could not load requested Qt binding. Please ensure that
PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available,
and only one is imported per session.
Currently-imported Qt library: %r
PyQt4 available (requires QtCore, QtGui, QtSvg): %s
PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s
PySide >= 1.0.3 installed: %s
PySide2 installed: %s
Tried to load: %r
""" % (loaded_api(),
has_binding(QT_API_PYQT),
has_binding(QT_API_PYQT5),
has_binding(QT_API_PYSIDE),
has_binding(QT_API_PYSIDE2),
api_options))

View File

@ -0,0 +1,29 @@
"""
Shim to maintain backwards compatibility with old frontend imports.
We have moved all contents of the old `frontend` subpackage into top-level
subpackages (`html`, `qt` and `terminal`), and flattened the notebook into
just `yap_ipython.html`, formerly `yap_ipython.frontend.html.notebook`.
This will let code that was making `from yap_ipython.frontend...` calls continue
working, though a warning will be printed.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
from yap_ipython.utils.shimmodule import ShimModule, ShimWarning
warn("The top-level `frontend` package has been deprecated since yap_ipython 1.0. "
"All its subpackages have been moved to the top `yap_ipython` level.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
# trigger the custom attribute access above
sys.modules['yap_ipython.frontend.html.notebook'] = ShimModule(
src='yap_ipython.frontend.html.notebook', mirror='yap_ipython.html')
sys.modules['yap_ipython.frontend'] = ShimModule(
src='yap_ipython.frontend', mirror='yap_ipython')

View File

@ -0,0 +1,28 @@
"""
Shim to maintain backwards compatibility with old yap_ipython.html imports.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
from yap_ipython.utils.shimmodule import ShimModule, ShimWarning
warn("The `yap_ipython.html` package has been deprecated since yap_ipython 4.0. "
"You should import from `notebook` instead. "
"`yap_ipython.html.widgets` has moved to `ipywidgets`.", ShimWarning)
_widgets = sys.modules['yap_ipython.html.widgets'] = ShimModule(
src='yap_ipython.html.widgets', mirror='ipywidgets')
_html = ShimModule(
src='yap_ipython.html', mirror='notebook')
# hook up widgets
_html.widgets = _widgets
sys.modules['yap_ipython.html'] = _html
if __name__ == '__main__':
from notebook import notebookapp as app
app.launch_new_instance()

View File

@ -0,0 +1,35 @@
"""
Shim to maintain backwards compatibility with old yap_ipython.kernel imports.
"""
# Copyright (c) yap_ipython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
from yap_ipython.utils.shimmodule import ShimModule, ShimWarning
warn("The `yap_ipython.kernel` package has been deprecated since yap_ipython 4.0."
"You should import from yap_kernel or jupyter_client instead.", ShimWarning)
# zmq subdir is gone
sys.modules['yap_ipython.kernel.zmq.session'] = ShimModule(
src='yap_ipython.kernel.zmq.session', mirror='jupyter_client.session')
sys.modules['yap_ipython.kernel.zmq'] = ShimModule(
src='yap_ipython.kernel.zmq', mirror='yap_kernel')
for pkg in ('comm', 'inprocess'):
src = 'yap_ipython.kernel.%s' % pkg
sys.modules[src] = ShimModule(src=src, mirror='yap_kernel.%s' % pkg)
for pkg in ('ioloop', 'blocking'):
src = 'yap_ipython.kernel.%s' % pkg
sys.modules[src] = ShimModule(src=src, mirror='jupyter_client.%s' % pkg)
# required for `from yap_ipython.kernel import PKG`
from yap_kernel import comm, inprocess
from jupyter_client import ioloop, blocking
# public API
from yap_kernel.connect import *
from jupyter_client import *

View File

@ -0,0 +1,3 @@
if __name__ == '__main__':
from yap_kernel import kernelapp as app
app.launch_new_instance()

View File

@ -0,0 +1 @@
from jupyter_client.adapter import *

View File

@ -0,0 +1 @@
from jupyter_client.channels import *

View File

@ -0,0 +1 @@
from jupyter_client.channelsabc import *

View File

@ -0,0 +1 @@
from jupyter_client.client import *

Some files were not shown because too many files have changed in this diff Show More