diff --git a/C/parser.c b/C/parser.c index 23b46508d..46835c3bf 100755 --- a/C/parser.c +++ b/C/parser.c @@ -1,19 +1,19 @@ /************************************************************************* -* * -* YAP Prolog * -* * -* Yap Prolog was developed at NCCUP - Universidade do Porto * -* * -* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * -* * -************************************************************************** -* * -* File: parser.c * -* Last rev: * -* mods: * -* comments: Prolog's parser * -* * -*************************************************************************/ + * * + * YAP Prolog * + * * + * Yap Prolog was developed at NCCUP - Universidade do Porto * + * * + * Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * + * * + ************************************************************************** + * * + * File: parser.c * + * Last rev: * + * mods: * + * comments: Prolog's parser * + * * + *************************************************************************/ #ifdef SCCS static char SccsId[] = "%W% %G%"; #endif @@ -135,10 +135,10 @@ dot with single quotes. */ #include "Yap.h" +#include "YapEval.h" #include "YapHeap.h" #include "YapText.h" #include "Yatom.h" -#include "YapEval.h" #include "yapio.h" /* stuff we want to use in standard YAP code */ #include "iopreds.h" @@ -157,7 +157,9 @@ dot with single quotes. /* weak backtraking mechanism based on long_jump */ -typedef struct jmp_buff_struct { sigjmp_buf JmpBuff; } JMPBUFF; +typedef struct jmp_buff_struct { + sigjmp_buf JmpBuff; +} JMPBUFF; static void GNextToken(CACHE_TYPE1); static void checkfor(Term, JMPBUFF *, encoding_t CACHE_TYPE); @@ -165,19 +167,20 @@ static Term ParseArgs(Atom, Term, JMPBUFF *, Term, encoding_t, Term CACHE_TYPE); static Term ParseList(JMPBUFF *, encoding_t, Term CACHE_TYPE); static Term ParseTerm(int, JMPBUFF *, encoding_t, Term CACHE_TYPE); -extern Term Yap_tokRep(void* tokptr); -extern const char * Yap_tokText(void *tokptr); +extern Term Yap_tokRep(void *tokptr); +extern const char *Yap_tokText(void *tokptr); static void syntax_msg(const char *msg, ...) { CACHE_REGS va_list ap; if (!LOCAL_ErrorMessage || (LOCAL_Error_TYPE == SYNTAX_ERROR && - LOCAL_ActiveError->prologParserLine < LOCAL_tokptr->TokPos)) { + LOCAL_tokptr->TokPos < LOCAL_ActiveError->prologParserPos )) { if (!LOCAL_ErrorMessage) { - LOCAL_ErrorMessage = malloc(1024 + 1); + LOCAL_ErrorMessage = malloc(MAX_ERROR_MSG_SIZE + 1); } - LOCAL_ActiveError->prologParserLine = LOCAL_tokptr->TokPos; + LOCAL_ActiveError->prologParserLine = LOCAL_tokptr->TokLine; + LOCAL_ActiveError->prologParserPos = LOCAL_tokptr->TokPos; va_start(ap, msg); vsnprintf(LOCAL_ErrorMessage, MAX_ERROR_MSG_SIZE, msg, ap); va_end(ap); @@ -226,7 +229,7 @@ static void syntax_msg(const char *msg, ...) { #define FAIL siglongjmp(FailBuff->JmpBuff, 1) VarEntry *Yap_LookupVar(const char *var) /* lookup variable in variables table - * */ + * */ { CACHE_REGS VarEntry *p; @@ -364,7 +367,7 @@ static Term Variables(VarEntry *p, Term l USES_REGS) { Term Yap_Variables(VarEntry *p, Term l) { CACHE_REGS - l = Variables(LOCAL_AnonVarTable, l PASS_REGS); + l = Variables(LOCAL_AnonVarTable, l PASS_REGS); return Variables(p, l PASS_REGS); } @@ -468,7 +471,7 @@ inline static void checkfor(Term c, JMPBUFF *FailBuff, strncpy(s, Yap_tokText(LOCAL_tokptr), 1023); syntax_msg("line %d: expected to find " "\'%c....................................\', found %s", - LOCAL_tokptr->TokPos, c, s); + LOCAL_tokptr->TokLine, c, s); FAIL; } NextToken; @@ -549,12 +552,12 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1, func = Yap_MkFunctor(a, 1); if (func == NULL) { - syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine); FAIL; } t = Yap_MkApplTerm(func, nargs, p); if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); return TermNil; } NextToken; @@ -564,7 +567,7 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1, while (1) { Term *tp = (Term *)ParserAuxSp; if (ParserAuxSp + 1 > LOCAL_TrailTop) { - syntax_msg("line %d: Trail Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Trail Overflow", LOCAL_tokptr->TokLine); FAIL; } *tp++ = Unsigned(ParseTerm(999, FailBuff, enc, cmod PASS_REGS)); @@ -582,12 +585,12 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1, * order */ if (HR > ASP - (nargs + 1)) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } func = Yap_MkFunctor(a, nargs); if (func == NULL) { - syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine); FAIL; } #ifdef SFUNC @@ -602,7 +605,7 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1, t = Yap_MkApplTerm(func, nargs, p); #endif if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); return TermNil; } /* check for possible overflow against local stack */ @@ -611,8 +614,8 @@ static Term ParseArgs(Atom a, Term close, JMPBUFF *FailBuff, Term arg1, } static Term MakeAccessor(Term t, Functor f USES_REGS) { - UInt arity = ArityOfFunctor(FunctorOfTerm(t)); - int i; + UInt arity = ArityOfFunctor(FunctorOfTerm(t)); + int i; Term tf[2], tl = TermNil; tf[1] = ArgOfTerm(1, t); @@ -638,7 +641,7 @@ loop: /* check for possible overflow against local stack */ if (HR > ASP - 4096) { to_store[1] = TermNil; - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } else { to_store[1] = AbsPair(HR); @@ -653,7 +656,7 @@ loop: } } else { syntax_msg("line %d: looking for symbol ',','|' got symbol '%s'", - LOCAL_tokptr->TokPos, Yap_tokText(LOCAL_tokptr)); + LOCAL_tokptr->TokLine, Yap_tokText(LOCAL_tokptr)); FAIL; } return (o); @@ -725,13 +728,13 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, TRY( /* build appl on the heap */ func = Yap_MkFunctor(AtomOfTerm(t), 1); if (func == NULL) { - syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine); FAIL; } t = ParseTerm(oprprio, FailBuff, enc, cmod PASS_REGS); t = Yap_MkApplTerm(func, 1, &t); /* check for possible overflow against local stack */ if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } curprio = opprio; , break;) @@ -762,7 +765,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, break; case Error_tok: - syntax_msg("line %d: found ill-formed \"%s\"", LOCAL_tokptr->TokPos, + syntax_msg("line %d: found ill-formed \"%s\"", LOCAL_tokptr->TokLine, Yap_tokText(LOCAL_tokptr)); FAIL; @@ -798,14 +801,14 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, t = Yap_MkApplTerm(FunctorBraces, 1, &t); /* check for possible overflow against local stack */ if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } checkfor(TermEndCurlyBracket, FailBuff, enc PASS_REGS); break; default: syntax_msg("line %d: unexpected ponctuation signal %s", - LOCAL_tokptr->TokPos, Yap_tokRep(LOCAL_tokptr)); + LOCAL_tokptr->TokLine, Yap_tokRep(LOCAL_tokptr)); FAIL; } break; @@ -896,7 +899,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, NextToken; break; default: - syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokPos, + syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokLine, Yap_tokText(LOCAL_tokptr)); FAIL; } @@ -912,9 +915,8 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, /* try parsing as infix operator */ Volatile int oldprio = curprio; TRY3( - func = Yap_MkFunctor(save_opinfo, 2); - if (func == NULL) { - syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos); + func = Yap_MkFunctor(save_opinfo, 2); if (func == NULL) { + syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine); FAIL; } NextToken; { @@ -924,7 +926,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, t = Yap_MkApplTerm(func, 2, args); /* check for possible overflow against local stack */ if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } }, @@ -937,13 +939,13 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, /* parse as posfix operator */ Functor func = Yap_MkFunctor(AtomOfTerm(LOCAL_tokptr->TokInfo), 1); if (func == NULL) { - syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Heap Overflow", LOCAL_tokptr->TokLine); FAIL; } t = Yap_MkApplTerm(func, 1, &t); /* check for possible overflow against local stack */ if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } curprio = opprio; @@ -953,7 +955,8 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, break; } if (LOCAL_tokptr->Tok == Ord(Ponctuation_tok)) { - if (LOCAL_tokptr->TokInfo == TermComma && prio >= 1000 && curprio <= 999) { + if (LOCAL_tokptr->TokInfo == TermComma && prio >= 1000 && + curprio <= 999) { Volatile Term args[2]; NextToken; args[0] = t; @@ -961,7 +964,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, t = Yap_MkApplTerm(FunctorComma, 2, args); /* check for possible overflow against local stack */ if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } curprio = 1000; @@ -977,7 +980,7 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, t = Yap_MkApplTerm(FunctorVBar, 2, args); /* check for possible overflow against local stack */ if (HR > ASP - 4096) { - syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokPos); + syntax_msg("line %d: Stack Overflow", LOCAL_tokptr->TokLine); FAIL; } curprio = opprio; @@ -1000,18 +1003,17 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, curprio = opprio; continue; } else if (LOCAL_tokptr->TokInfo == TermBeginCurlyBracket && - IsPosfixOp(AtomBraces, &opprio, &oplprio, - cmod PASS_REGS) && + IsPosfixOp(AtomBraces, &opprio, &oplprio, cmod PASS_REGS) && opprio <= prio && oplprio >= curprio) { - t = ParseArgs(AtomBraces, TermEndCurlyBracket, FailBuff, t, - enc, cmod PASS_REGS); + t = ParseArgs(AtomBraces, TermEndCurlyBracket, FailBuff, t, enc, + cmod PASS_REGS); t = MakeAccessor(t, FunctorBraces PASS_REGS); curprio = opprio; continue; } } if (LOCAL_tokptr->Tok <= Ord(String_tok)) { - syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokPos, + syntax_msg("line %d: expected operator, got \'%s\'", LOCAL_tokptr->TokLine, Yap_tokText(LOCAL_tokptr)); FAIL; } @@ -1022,8 +1024,8 @@ static Term ParseTerm(int prio, JMPBUFF *FailBuff, encoding_t enc, Term Yap_Parse(UInt prio, encoding_t enc, Term cmod) { CACHE_REGS - // ensure that if we throw an exception - // t will be 0. + // ensure that if we throw an exception + // t will be 0. Volatile Term t = 0; JMPBUFF FailBuff; yhandle_t sls = Yap_StartSlots(); @@ -1046,12 +1048,12 @@ Term Yap_Parse(UInt prio, encoding_t enc, Term cmod) { Yap_CloseSlots(sls); } if (LOCAL_tokptr != NULL && LOCAL_tokptr->Tok != Ord(eot_tok)) { - LOCAL_Error_TYPE = SYNTAX_ERROR; - if (LOCAL_tokptr->TokNext) { - LOCAL_ErrorMessage = "operator misssing . "; - } else { - LOCAL_ErrorMessage = "term does not end on . "; - } + LOCAL_Error_TYPE = SYNTAX_ERROR; + if (LOCAL_tokptr->TokNext) { + LOCAL_ErrorMessage = "bracket or operator expected."; + } else { + LOCAL_ErrorMessage = "term must end with . or EOF."; + } t = 0; } if (t != 0 && LOCAL_Error_TYPE == SYNTAX_ERROR) { diff --git a/C/scanner.c b/C/scanner.c index 8efaa55fe..9eea1e6cb 100755 --- a/C/scanner.c +++ b/C/scanner.c @@ -688,8 +688,7 @@ static int send_error_message(char s[]) { return 0; } -static wchar_t read_quoted_char(int *scan_nextp, - struct stream_desc *st) { +static wchar_t read_quoted_char(int *scan_nextp, struct stream_desc *st) { int ch; /* escape sequence */ @@ -1123,7 +1122,8 @@ Term Yap_scan_num(StreamDesc *inp, bool error_on) { ; #endif TokEntry *tokptr = (TokEntry *)AllocScannerMemory(sizeof(TokEntry)); - tokptr->TokPos = GetCurInpPos(inp); + tokptr->TokLine = GetCurInpLine(inp); + tokptr->TokPos = GetCurInpPos(inp); if (ch == '-') { sign = -1; ch = getchr(inp); @@ -1324,8 +1324,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments, LOCAL_AnonVarTable = NULL; l = NULL; p = NULL; /* Just to make lint happy */ - __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "i %d", st-GLOBAL_Stream); - ch = getchr(st); + __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "i %d", st - GLOBAL_Stream); + ch = getchr(st); while (chtype(ch) == BS) { ch = getchr(st); } @@ -1353,7 +1353,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments, while (chtype(ch) == BS) { ch = getchr(st); } - t->TokPos = GetCurInpPos(st); + t->TokPos = GetCurInpPos(st); + t->TokLine = GetCurInpLine(st); switch (chtype(ch)) { @@ -1472,7 +1473,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments, if (cherr) { TokEntry *e; t->Tok = Number_tok; - t->TokPos = GetCurInpPos(st); + t->TokPos = GetCurInpPos(st); + t->TokLine = GetCurInpLine(st); e = (TokEntry *)AllocScannerMemory(sizeof(TokEntry)); if (e == NULL) { return TrailSpaceError(p, l); @@ -1498,7 +1500,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments, t->Tok = Ord(Var_tok); t->TokInfo = (Term)Yap_LookupVar("E"); - t->TokPos = GetCurInpPos(st); + t->TokPos = GetCurInpPos(st); + t->TokLine = GetCurInpLine(st); e2 = (TokEntry *)AllocScannerMemory(sizeof(TokEntry)); if (e2 == NULL) { return TrailSpaceError(p, l); @@ -1531,7 +1534,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments, if (ch == '(') solo_flag = FALSE; t->TokInfo = MkAtomTerm(AtomE); - t->TokPos = GetCurInpPos(st); + t->TokLine = GetCurInpLine(st); + t->TokPos = GetCurInpPos(st); e2 = (TokEntry *)AllocScannerMemory(sizeof(TokEntry)); if (e2 == NULL) { return TrailSpaceError(p, l); @@ -1967,7 +1971,8 @@ TokEntry *Yap_tokenizer(struct stream_desc *st, bool store_comments, p->TokNext = e; e->Tok = Error_tok; e->TokInfo = MkAtomTerm(Yap_LookupAtom(LOCAL_ErrorMessage)); - e->TokPos = GetCurInpPos(st); + e->TokPos = GetCurInpPos(st); + e->TokLine = GetCurInpLine(st); e->TokNext = NULL; LOCAL_ErrorMessage = NULL; p = e; diff --git a/C/yap-args.c b/C/yap-args.c index 0d88b1640..573964e8a 100755 --- a/C/yap-args.c +++ b/C/yap-args.c @@ -330,12 +330,10 @@ static const char *find_directory(YAP_init_args *iap, const char *paths[], int j = 0; while ((p = names[j++])) { char *io = o + s; - printf("%s -> %s\n", inp, o); if ((no = location(iap, p, io)) && io[0] != '\0' && Yap_Exists(o)) return pop_output_text_stack(lvl, realpath(o, full)); } } else { - printf("-> %s\n", o); return pop_output_text_stack(lvl, o); } } diff --git a/CMakeLists.txt b/CMakeLists.txt index 6a629599a..cbabf24b8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -413,32 +413,36 @@ endif () # rpath stuff, hopefully it works # use, i.e. don't skip the full RPATH for the build tree -#SET(CMAKE_SKIP_BUILD_RPATH FALSE) +#SET(CMAKE_SKIP_BUILD_RPATH TRUE) # when building, don't use the install RPATH already -# (but later on when installing) -#SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) - -# SET(CMAKE_INSTALL_FULL_RPATH ${CMAKE_TOP_BINARY_DIR}) - -# add the automatically determined parts of the RPATH -# which point to directories outside the build tree to the install RPATH -SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) - - -# the RPATH to be used when installing, but only if it's not a system directory -LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${libdir}" isSystemDir) - IF("${isSystemDir}" STREQUAL "-1") - SET(CMAKE_INSTALL_RPATH ${libdir}) - ENDIF("${isSystemDir}" STREQUAL "-1") - +## (but later on when installing) +#SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) +# +## SET(CMAKE_INSTALL_FULL_RPATH ${CMAKE_TOP_BINARY_DIR}) +# +## add the automatically determined parts of the RPATH +## which point to directories outside the build tree to the install RPATH +#SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH FAlSE) +# +# +## the RPATH to be used when installing, but only if it's not a system directory +#LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${libdir}" isSystemDir) +# IF("${isSystemDir}" STREQUAL "-1") +# SET(CMAKE_INSTALL_RPATH ${libdir}) +#ENDIF("${isSystemDir}" STREQUAL "-1") +# IF(NOT WIN32 AND NOT APPLE) LIST(APPEND CMAKE_INSTALL_RPATH \\$ORIGIN/../lib/Yap) LIST(APPEND CMAKE_INSTALL_RPATH ${CMAKE_INSTALL_FULL_LIBDIR}) LIST(APPEND CMAKE_INSTALL_RPATH \\$ORIGIN/../lib) LIST(APPEND CMAKE_INSTALL_RPATH \\$ORIGIN/../../../lib) - +ELSE() + LIST(APPEND CMAKE_INSTALL_RPATH @loader_path/../lib/Yap) + LIST(APPEND CMAKE_INSTALL_RPATH ${CMAKE_INSTALL_FULL_LIBDIR}) + LIST(APPEND CMAKE_INSTALL_RPATH @loader_path/../lib) + LIST(APPEND CMAKE_INSTALL_RPATH @loader_path/../../../lib) ENDIF() # Model Specific diff --git a/CXX/yapi.cpp b/CXX/yapi.cpp index 819787fda..7acb291b8 100644 --- a/CXX/yapi.cpp +++ b/CXX/yapi.cpp @@ -1031,6 +1031,8 @@ Term YAPEngine::top_level(std::string s) { ARG1 = YAP_ReadBuffer(s.data(), &tp); ARG2 = tp; ARG3 = MkVarTerm(); + if (ARG1 == 0) + YAPError(SYNTAX_ERROR); YAPPredicate p = YAPPredicate(YAP_TopGoal()); YAPQuery *Q = new YAPQuery(p, 0); Term ts[2]; diff --git a/H/ScannerTypes.h b/H/ScannerTypes.h index c1667f7d9..d7407c7f3 100644 --- a/H/ScannerTypes.h +++ b/H/ScannerTypes.h @@ -13,7 +13,7 @@ typedef enum TokenKinds { typedef struct TOKEN { enum TokenKinds Tok; Term TokInfo; - int TokPos; + intptr_t TokPos, TokLine; struct TOKEN *TokNext; } TokEntry; diff --git a/config.h.cmake b/config.h.cmake index b55ff4772..2e7d7ffa9 100644 --- a/config.h.cmake +++ b/config.h.cmake @@ -382,9 +382,14 @@ function. */ #cmakedefine HAVE_FFSL ${HAVE_FFSL} #endif -/* Define to 1 if you have the `ffsll' function. */ -#ifndef HAVE_FFSLL -#cmakedefine HAVE_FFSLL ${HAVE_FFSLL} +/* Define to 1 if you have the `ftell' function. */ + #ifndef HAVE_FTELL + #cmakedefine HAVE_FTELL ${HAVE_FTELL} + #endif + +/* Define to 1 if you have the `ftello' function. */ +#ifndef HAVE_FTELLO +#cmakedefine HAVE_FTELLO ${HAVE_FTELLO} #endif /* Define to 1 if you have the `fgetpos' function. */ diff --git a/include/VFS.h b/include/VFS.h index 717f7d961..a11bfb199 100644 --- a/include/VFS.h +++ b/include/VFS.h @@ -33,7 +33,6 @@ #include - typedef struct { dev_t st_dev; /* ID of device containing file */ mode_t st_mode; /* Mode of file (see below) */ @@ -50,13 +49,12 @@ typedef struct { #endif } vfs_stat; - typedef enum vfs_flags { - VFS_CAN_WRITE = 0x1, /// we can write to files in this space - VFS_CAN_EXEC = 0x2, /// we can execute files in this space - VFS_CAN_SEEK = 0x4, /// we can seek within files in this space - VFS_HAS_PREFIX = 0x8, /// has a prefix that identifies a file in this space - VFS_HAS_SUFFIX = 0x10, /// has a suffix that describes the file. + VFS_CAN_WRITE = 0x1, /// we can write to files in this space + VFS_CAN_EXEC = 0x2, /// we can execute files in this space + VFS_CAN_SEEK = 0x4, /// we can seek within files in this space + VFS_HAS_PREFIX = 0x8, /// has a prefix that identifies a file in this space + VFS_HAS_SUFFIX = 0x10, /// has a suffix that describes the file. VFS_HAS_FUNCTION = 0x20 /// has a suffix that describes the file. } vfs_flags_t; @@ -80,27 +78,32 @@ typedef struct vfs { const char *suffix; bool (*chDir)(struct vfs *me, const char *s); /** operations */ - void *(*open)(struct vfs *,int sno, const char *fname, const char *io_mode); /// open an object + void *(*open)(struct vfs *, int sno, const char *fname, + const char *io_mode); /// open an object /// in this space, usual w,r,a,b flags plus B (store in a buffer) - bool (*close)(int sno); /// close the object - int (*get_char)(int sno); /// get an octet to the stream + bool (*close)(int sno); /// close the object + int (*get_char)(int sno); /// get an octet from the stream + int (*peek_char)(int sno); /// unget an octet from the stream int (*put_char)(int sno, int ch); /// output an octet to the stream - void (*flush)(int sno); /// flush a stream + void (*flush)(int sno); /// flush a stream int64_t (*seek)(int sno, int64_t offset, int whence); /// jump around the stream - void *(*opendir)(struct vfs *,const char *s); /// open a directory object, if one exists - const char *(*nextdir)(void *d); /// walk to the next entry in a directory object + void *(*opendir)(struct vfs *, + const char *s); /// open a directory object, if one exists + const char *(*nextdir)( + void *d); /// walk to the next entry in a directory object bool (*closedir)(void *d); ; /// close access a directory object - bool (*stat)(struct vfs *,const char *s, + bool (*stat)(struct vfs *, const char *s, vfs_stat *); /// obtain size, age, permissions of a file. - bool (*isdir)(struct vfs *,const char *s); /// verify whether is directory. + bool (*isdir)(struct vfs *, const char *s); /// verify whether is directory. bool (*exists)(struct vfs *, const char *s); /// verify whether a file exists. - bool (*chdir)(struct vfs *,const char *s); /// set working directory (may be virtual). - encoding_t enc; /// default file encoded. + bool (*chdir)(struct vfs *, + const char *s); /// set working directory (may be virtual). + encoding_t enc; /// default file encoded. YAP_Term (*parsers)(int sno); // a set of parsers that can read the - // stream and generate a YAP_Term - int (*writers)(int ch, int sno ); + // stream and generate a YAP_Term + int (*writers)(int ch, int sno); /// convert a YAP_Term into this space const char *virtual_cwd; /** VFS dep @@ -113,7 +116,8 @@ extern VFS_t *GLOBAL_VFS; extern void init_android_stream(void); -extern void Yap_InitStdStream(int sno, unsigned int flags, FILE *file, VFS_t *vfsp); +extern void Yap_InitStdStream(int sno, unsigned int flags, FILE *file, + VFS_t *vfsp); static inline VFS_t *vfs_owner(const char *fname) { VFS_t *me = GLOBAL_VFS; @@ -124,12 +128,13 @@ static inline VFS_t *vfs_owner(const char *fname) { bool p = true; if ((me->vflags & VFS_HAS_PREFIX) && p) { const char *r = fname, *s = me->prefix; - while (*s && p) p = *s++ == *r++; - if (p && r > fname+1) - return me; + while (*s && p) + p = *s++ == *r++; + if (p && r > fname + 1) + return me; } - if (me->vflags & VFS_HAS_SUFFIX && (sz = strlen(me->suffix)) && (d = (sz0 - sz)) >= 0 && - strcmp(fname + d, me->suffix) == 0) { + if (me->vflags & VFS_HAS_SUFFIX && (sz = strlen(me->suffix)) && + (d = (sz0 - sz)) >= 0 && strcmp(fname + d, me->suffix) == 0) { return me; } me = me->next; diff --git a/include/YapError.h b/include/YapError.h index d2e907abb..210df4b7b 100644 --- a/include/YapError.h +++ b/include/YapError.h @@ -1,17 +1,16 @@ /************************************************************************* -* * -* YAP Prolog %W% %G% * -* Yap Prolog was developed at NCCUP - Universidade do Porto * -* * -* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * -* * -************************************************************************** -* * -* File: YapError.h * -* mods: * -* comments: error header file for YAP * -* version: $Id: Yap.h,v 1.38 2008-06-18 10:02:27 vsc Exp $ * -*************************************************************************/ + * * + * YAP Prolog %W% %G% * + * Yap Prolog was developed at NCCUP - Universidade do Porto * + * * + * Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * + * * + ************************************************************************** + * * + * File: YapError.h * mods: + ** comments: error header file for YAP * + * version: $Id: Yap.h,v 1.38 2008-06-18 10:02:27 vsc Exp $ * + *************************************************************************/ #ifndef YAP_ERROR_H #define YAP_ERROR_H 1 @@ -42,21 +41,20 @@ struct yami *Yap_Error__(const char *file, const char *function, int lineno, yap_error_number err, YAP_Term wheret, ...); void Yap_ThrowError__(const char *file, const char *function, int lineno, - yap_error_number err, YAP_Term wheret, ...) - #ifndef MSC_VER - __attribute__ ((noreturn)) - #endif - ; - + yap_error_number err, YAP_Term wheret, ...) +#ifndef MSC_VER + __attribute__((noreturn)) +#endif + ; #define Yap_NilError(id, ...) \ Yap_Error__(__FILE__, __FUNCTION__, __LINE__, id, TermNil, __VA_ARGS__) #define Yap_Error(id, inp, ...) \ -Yap_Error__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__) + Yap_Error__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__) -#define Yap_ThrowError(id, inp, ...) \ -Yap_ThrowError__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__) +#define Yap_ThrowError(id, inp, ...) \ + Yap_ThrowError__(__FILE__, __FUNCTION__, __LINE__, id, inp, __VA_ARGS__) #ifdef YAP_TERM_H /** @@ -183,10 +181,10 @@ INLINE_ONLY extern inline Term Yap_ensure_atom__(const char *fu, const char *fi, uintptr_t prologPredLine; uintptr_t prologPredFirstLine; uintptr_t prologPredLastLine; - const char * prologPredName; + const char *prologPredName; uintptr_t prologPredArity; - const char * prologPredModule; - const char * prologPredFile; + const char *prologPredModule; + const char *prologPredFile; void *errorGoal; struct error_prolog_source *errorParent; } yap_error_prolog_source_t; @@ -195,8 +193,8 @@ INLINE_ONLY extern inline Term Yap_ensure_atom__(const char *fu, const char *fi, typedef struct s_yap_error_descriptor { enum yap_error_status status; yap_error_class_number errorClass; - const char * errorAsText; - const char * classAsText; + const char *errorAsText; + const char *classAsText; yap_error_number errorNo; intptr_t errorLine; const char *errorFunction; @@ -206,15 +204,16 @@ INLINE_ONLY extern inline Term Yap_ensure_atom__(const char *fu, const char *fi, uintptr_t prologPredLine; uintptr_t prologPredFirstLine; uintptr_t prologPredLastLine; - const char * prologPredName; + const char *prologPredName; uintptr_t prologPredArity; - const char * prologPredModule; - const char * prologPredFile; + const char *prologPredModule; + const char *prologPredFile; + uintptr_t prologParserPos; uintptr_t prologParserLine; uintptr_t prologParserFirstLine; uintptr_t prologParserLastLine; - const char * prologParserName; - const char * prologParserFile; + const char *prologParserText; + const char *prologParserFile; bool prologConsulting; void *errorTerm; uintptr_t rawErrorTerm, rawExtraErrorTerm; diff --git a/include/YapStreams.h b/include/YapStreams.h index 60e330158..b3c3a8cdd 100644 --- a/include/YapStreams.h +++ b/include/YapStreams.h @@ -65,6 +65,16 @@ typedef struct yap_io_position { intptr_t reserved[2]; /* future extensions */ } yapIOPOS; +typedef struct yapchlookahead { + intptr_t charcount; /* character position in file */ + intptr_t linecount; /* lineno in file */ + intptr_t linepos; /* position in line */ + intptr_t ch; /* future extensions */ + struct yapchlookahead *next; +} yapStreamLookahead; + +extern int PopCode(int sno); + #ifndef _PL_STREAM_H typedef struct { YAP_Atom file; /* current source file */ @@ -244,6 +254,7 @@ typedef struct stream_desc { int); /* function the stream uses for parser. It may be different from above if the ISO character conversion is on */ encoding_t encoding; /** current encoding for stream */ +struct yapchlookahead *recbs; /// support arbitrary depth peek } StreamDesc; #endif diff --git a/library/dialect/swi/fli/swi.c b/library/dialect/swi/fli/swi.c index 661d75991..16b4f31f1 100755 --- a/library/dialect/swi/fli/swi.c +++ b/library/dialect/swi/fli/swi.c @@ -222,6 +222,8 @@ X_API int PL_get_nchars(term_t l, size_t *lengthp, char **s, unsigned flags) { if (s) { size_t len = strlen(out.val.c); if (flags & (BUF_DISCARDABLE | BUF_RING)) { + strncpy(LOCAL_FileNameBuf, out.val.c, YAP_FILENAME_MAX); + *s = LOCAL_FileNameBuf; pop_text_stack(lvl); return true; } @@ -707,10 +709,12 @@ X_API int PL_get_functor(term_t ts, functor_t *f) { *f = t; } else if (IsPairTerm(t)) { *f = FunctorToSWIFunctor(FunctorDot); - } else { + } else if (IsApplTerm(t) && !IsExtensionFunctor(FunctorOfTerm(t))) { *f = FunctorToSWIFunctor(FunctorOfTerm(t)); + } else { + return false; } - return 1; + return true; } /** @brief *f is assigned the floating point number of term ts, or the diff --git a/misc/editors/prolog.js b/misc/editors/prolog.js.in similarity index 95% rename from misc/editors/prolog.js rename to misc/editors/prolog.js.in index 28c4121a6..961d9c45e 100644 --- a/misc/editors/prolog.js +++ b/misc/editors/prolog.js.in @@ -3,9 +3,9 @@ (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS - mod(require("../../lib/codemirror")); + mod(require("codemirror/lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD - define([ "../../lib/codemirror" ], mod); + define([ "codemirror/lib/codemirror" ], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { @@ -205,7 +205,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { }); delete state.tagName; delete state.tagColumn; - return ret("dict_open", null); + return ret("dict_open", "bracket"); } if (ch == "/" && stream.eat("*")) @@ -225,7 +225,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { break; case "]": state.nesting.pop(); - return ret("list_close", null); + return ret("list_close", "bracket"); case "}": { var nest = nesting(state); var type = (nest && nest.tag) ? "dict_close" : "brace_term_close"; @@ -251,30 +251,30 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { closeColumn : stream.column(), alignment : stream.column() + 2 }); - return ret("list_open", null); + return ret("list_open", "bracket"); break; case "{": if (config.quasiQuotations && stream.eat("|")) { state.nesting.push( {type : "quasi-quotation", alignment : stream.column() + 1}); - return ret("qq_open", "qq_open"); + return ret("qq_open", "bracket"); } else { state.nesting.push({ type : "curly", closeColumn : stream.column(), alignment : stream.column() + 2 }); - return ret("brace_term_open", null); + return ret("brace_term_open", "bracket"); } break; case "|": if (config.quasiQuotations) { if (stream.eat("|")) { state.tokenize = plTokenQuasiQuotation; - return ret("qq_sep", "qq_sep"); + return ret("qq_sep", "bracket"); } else if (stream.eat("}")) { state.nesting.pop(); - return ret("qq_close", "qq_close"); + return ret("qq_close", "bracket"); } } if (isControl(state)) @@ -304,7 +304,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { if (!readEsc(stream)) return ret("error", "error"); } - return ret("code", "code"); + return ret("code", "number"); } } @@ -325,37 +325,41 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { return ret("fullstop", "error", atom); } else { } - return ret("fullstop", "fullstop", atom); + return ret("fullstop", null, atom); } else if (isNeck.test(atom)) { - return ret("neck", "neck", atom); + return ret("neck", "property", atom); } else if (isControl(state) && isControlOp.test(atom)) { state.goalStart = true; - return ret("symbol", "operator", atom); + return ret("symbol", "meta", atom); } else - return ret("symbol", "operator", atom); + return ret("symbol", "meta", atom); } stream.eatWhile(/[\w_]/); - var word = stream.current(); + var word = stream.current(), extra = ""; if (stream.peek() == "{" && config.dicts) { state.tagName = word; /* tmp state extension */ state.tagColumn = stream.column(); return ret("tag", "tag", word); } else if (ch == "_") { if (word.length == 1) { - return ret("var", "anon", word); + return ret("var", "variable-3", word); } else { var sec = word.charAt(1); if (sec == sec.toUpperCase()) - return ret("var", "var-2", word); + return ret("var", "variable-3", word); } - return ret("var", "var", word); + return ret("var", "variable-3", word); } else if (ch == ch.toUpperCase()) { - return ret("var", "var", word); + return ret("var", "Variable-2", word); } else if (stream.peek() == "(") { state.functorName = word; /* tmp state extension */ state.functorColumn = stream.column(); - return ret("functor", "functor", word); + return ret("functor", "atom", word); + } else if ((extra = stream.eat(/\/\/?\d+/))) { + state.functorName = word; /* tmp state extension */ + state.functorColumn = stream.column(); + return ret("functor", "atom", word); } else return ret("atom", "atom", word); } @@ -367,7 +371,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { if (stream.peek() == "(") { /* 'quoted functor'() */ var word = stream.current(); state.functorName = word; /* tmp state extension */ - return ret("functor", "functor", word); + return ret("functor", "atom", word); } if (stream.peek() == "{" && config.dicts) { /* 'quoted tag'{} */ var word = stream.current(); @@ -389,7 +393,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { } maybeEnd = (ch == "|"); } - return ret("qq_content", "qq_content"); + return ret("qq_content", "string"); } function plTokenComment(stream, state) { @@ -574,7 +578,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { "atom" : "atom", "qatom" : "atom", "bqstring" : "string", - "symbol" : "atom", + "symbol" : "keyword", "functor" : "keyword", "tag" : "tag", "number" : "number", @@ -1192,15 +1196,14 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { if (builtins[state.curToken] == "prolog") return "builtin"; - if (ops[state.curToken]) - return "operator"; + //if (ops[state.curToken]) + // return "operator"; - if (typeof(parserConfig.enrich) == "function") - style = parserConfig.enrich(stream, state, type, content, style); + //if (typeof(parserConfig.enrich) == "function") + // style = parserConfig.enrich(stream, state, type, content, style); return style; - return translType[type]; }, indent : function(state, textAfter) { @@ -1211,7 +1214,7 @@ CodeMirror.defineMode("prolog", function(cm_config, parserConfig) { if ((nest = nesting(state))) { if (nest.closeColumn && !state.commaAtEOL) return nest.closeColumn; - return nest.alignment; +y return nest.alignment; } if (!state.inBody) return 0; diff --git a/misc/editors/yap.js b/misc/editors/yap.js index 7ba1db0de..47543475b 100644 --- a/misc/editors/yap.js +++ b/misc/editors/yap.js @@ -1,666 +1,861 @@ + // CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: http://codemirror.net/LICENSE (function(mod) { - if (typeof exports == "object" && typeof module == "object") // CommonJS - mod(require("../../lib/codemirror")); - else if (typeof define == "function" && define.amd) // AMD - define(["../../lib/codemirror"], mod); - else // Plain browser env - mod(CodeMirror); + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("codemirror/lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["codemirror/lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); })(function(CodeMirror) { -"use strict"; + "use strict"; -CodeMirror.defineMode("prolog", function(cmConfig, parserConfig) { - - function chain(stream, state, f) { - state.tokenize = f; - return f(stream, state); - } - - /******************************* - * CONFIG DATA * - *******************************/ - - var config = { quasiQuotations: false, /* {|Syntax||Quotation|} */ - dicts: false, /* tag{k:v, ...} */ - unicodeEscape: true, /* \uXXXX and \UXXXXXXXX */ - multiLineQuoted: true, /* "...\n..." */ - groupedIntegers: false /* 10 000 or 10_000 */ - }; - - var quoteType = { '"': "string", - "'": "qatom", - "`": "bqstring" - }; - - var isSingleEscChar = /[abref\\'"nrtsv]/; - var isOctalDigit = /[0-7]/; - var isHexDigit = /[0-9a-fA-F]/; - - var isSymbolChar = /[-#$&*+./:<=>?@\\^~]/; /* Prolog glueing symbols chars */ - var isSoloChar = /[[\]{}(),;|!]/; /* Prolog solo chars */ - var isNeck = /^(:-|-->)$/; - var isControlOp = /^(,|;|->|\*->|\\+|\|)$/; - - - /******************************* - * CHARACTER ESCAPES * - *******************************/ - - function readDigits(stream, re, count) { - if ( count > 0 ) { - while( count-- > 0 ) { - if ( !re.test(stream.next()) ) - return false; - } - } else { - while ( re.test(stream.peek()) ) - stream.next(); - } - return true; - } - - function readEsc(stream) { - var next = stream.next(); - if ( isSingleEscChar.test(next) ) - return true; - switch( next ) - { case "u": - if ( config.unicodeEscape ) - return readDigits(stream, isHexDigit, 4); /* SWI */ - return false; - case "U": - if ( config.unicodeEscape ) - return readDigits(stream, isHexDigit, 8); /* SWI */ - return false; - case null: return true; /* end of line */ - case "c": stream.eatSpace(); return true; - case "x": return readDigits(stream, isHexDigit, 2); - } - if ( isOctalDigit.test(next) ) { - if ( !readDigits(stream, isOctalDigit, -1) ) - return false; - if ( stream.peek() == "\\" ) /* SWI: optional closing \ */ - stream.next(); - return true; - } - return false; - } - - function nextUntilUnescaped(stream, state, end) { - var next; - while ((next = stream.next()) != null) { - if ( next == end && end != stream.peek() ) - { state.nesting.pop(); - return false; - } - if ( next == "\\" ) - { if ( !readEsc(stream) ) - return false; - } - } - return config.multiLineQuoted; - } - - /******************************* - * CONTEXT NESTING * - *******************************/ - - function nesting(state) { - return state.nesting.slice(-1)[0]; - } - - /* Called on every non-comment token */ - function setArg1(state) { - var nest = nesting(state); - if ( nest ) { - if ( nest.arg == 0 ) /* nested in a compound */ - nest.arg = 1; - else if ( nest.type == "control" ) - state.goalStart = false; - } else - state.goalStart = false; - } - - function setArgAlignment(state) { - var nest = nesting(state); - if ( nest && !nest.alignment && nest.arg != undefined ) { - if ( nest.arg == 0 ) - nest.alignment = nest.leftCol ? nest.leftCol+4 : nest.column+4; - else - nest.alignment = nest.column+1; - } - } - - function nextArg(state) { - var nest = nesting(state); - if ( nest ) { - if ( nest.arg ) /* nested in a compound */ - nest.arg++; - else if ( nest.type == "control" ) - state.goalStart = true; /* FIXME: also needed for ; and -> */ - } else - state.goalStart = true; - } - - function isControl(state) { /* our terms are goals */ - var nest = nesting(state); - if ( nest ) { - if ( nest.type == "control" ) { - return true; - } - return false; - } else - return state.inBody; - } - - // Used as scratch variables to communicate multiple values without - // consing up tons of objects. - var type, content; - function ret(tp, style, cont) { - type = tp; content = cont; - return style; - } - - function peekSpace(stream) { /* TBD: handle block comment as space */ - if ( stream.eol() || - /[\s%]/.test(stream.peek()) ) - return true; - return false; - } - - - /******************************* - * SUB TOKENISERS * - *******************************/ - - function plTokenBase(stream, state) { - var ch = stream.next(); - - if ( ch == "(" ) { - if ( state.lastType == "functor" ) { - state.nesting.push({ functor: state.functorName, - column: stream.column(), - leftCol: state.functorColumn, - arg: 0 - }); - delete state.functorName; - delete state.functorColumn; - } else { - state.nesting.push({ type: "control", - closeColumn: stream.column(), - alignment: stream.column()+4 - }); - } - return ret("solo", null, "("); + CodeMirror.modeInfo = [ + { + name : "Prolog", + mime : "text/x-prolog", + mode : "prolog", + ext : [ "pl", "yap", "pro", "P", "prolog" ] + }, + {name: "APL", mime: "text/apl", mode: "apl", ext: ["dyalog", "apl"]}, + {name: "PGP", mimes: ["application/pgp", "application/pgp-keys", "application/pgp-signature"], mode: "asciiarmor", ext: ["pgp"]}, + {name: "ASN.1", mime: "text/x-ttcn-asn", mode: "asn.1", ext: ["asn", "asn1"]}, + {name: "Asterisk", mime: "text/x-asterisk", mode: "asterisk", file: /^extensions\.conf$/i}, + {name: "Brainfuck", mime: "text/x-brainfuck", mode: "brainfuck", ext: ["b", "bf"]}, + {name: "C", mime: "text/x-csrc", mode: "clike", ext: ["c", "h"]}, + {name: "C++", mime: "text/x-c++src", mode: "clike", ext: ["cpp", "c++", "cc", "cxx", "hpp", "h++", "hh", "hxx"], alias: ["cpp"]}, + {name: "Cobol", mime: "text/x-cobol", mode: "cobol", ext: ["cob", "cpy"]}, + {name: "C#", mime: "text/x-csharp", mode: "clike", ext: ["cs"], alias: ["csharp"]}, + {name: "Clojure", mime: "text/x-clojure", mode: "clojure", ext: ["clj", "cljc", "cljx"]}, + {name: "ClojureScript", mime: "text/x-clojurescript", mode: "clojure", ext: ["cljs"]}, + {name: "Closure Stylesheets (GSS)", mime: "text/x-gss", mode: "css", ext: ["gss"]}, + {name: "CMake", mime: "text/x-cmake", mode: "cmake", ext: ["cmake", "cmake.in"], file: /^CMakeLists.txt$/}, + {name: "CoffeeScript", mime: "text/x-coffeescript", mode: "coffeescript", ext: ["coffee"], alias: ["coffee", "coffee-script"]}, + {name: "Common Lisp", mime: "text/x-common-lisp", mode: "commonlisp", ext: ["cl", "lisp", "el"], alias: ["lisp"]}, + {name: "Cypher", mime: "application/x-cypher-query", mode: "cypher", ext: ["cyp", "cypher"]}, + {name: "Cython", mime: "text/x-cython", mode: "python", ext: ["pyx", "pxd", "pxi"]}, + {name: "Crystal", mime: "text/x-crystal", mode: "crystal", ext: ["cr"]}, + {name: "CSS", mime: "text/css", mode: "css", ext: ["css"]}, + {name: "CQL", mime: "text/x-cassandra", mode: "sql", ext: ["cql"]}, + {name: "D", mime: "text/x-d", mode: "d", ext: ["d"]}, + {name: "Dart", mimes: ["application/dart", "text/x-dart"], mode: "dart", ext: ["dart"]}, + {name: "diff", mime: "text/x-diff", mode: "diff", ext: ["diff", "patch"]}, + {name: "Django", mime: "text/x-django", mode: "django"}, + {name: "Dockerfile", mime: "text/x-dockerfile", mode: "dockerfile", file: /^Dockerfile$/}, + {name: "DTD", mime: "application/xml-dtd", mode: "dtd", ext: ["dtd"]}, + {name: "Dylan", mime: "text/x-dylan", mode: "dylan", ext: ["dylan", "dyl", "intr"]}, + {name: "EBNF", mime: "text/x-ebnf", mode: "ebnf"}, + {name: "ECL", mime: "text/x-ecl", mode: "ecl", ext: ["ecl"]}, + {name: "edn", mime: "application/edn", mode: "clojure", ext: ["edn"]}, + {name: "Eiffel", mime: "text/x-eiffel", mode: "eiffel", ext: ["e"]}, + {name: "Elm", mime: "text/x-elm", mode: "elm", ext: ["elm"]}, + {name: "Embedded Javascript", mime: "application/x-ejs", mode: "htmlembedded", ext: ["ejs"]}, + {name: "Embedded Ruby", mime: "application/x-erb", mode: "htmlembedded", ext: ["erb"]}, + {name: "Erlang", mime: "text/x-erlang", mode: "erlang", ext: ["erl"]}, + {name: "Factor", mime: "text/x-factor", mode: "factor", ext: ["factor"]}, + {name: "FCL", mime: "text/x-fcl", mode: "fcl"}, + {name: "Forth", mime: "text/x-forth", mode: "forth", ext: ["forth", "fth", "4th"]}, + {name: "Fortran", mime: "text/x-fortran", mode: "fortran", ext: ["f", "for", "f77", "f90"]}, + {name: "F#", mime: "text/x-fsharp", mode: "mllike", ext: ["fs"], alias: ["fsharp"]}, + {name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]}, + {name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]}, + {name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i}, + {name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]}, + {name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/}, + {name: "HAML", mime: "text/x-haml", mode: "haml", ext: ["haml"]}, + {name: "Haskell", mime: "text/x-haskell", mode: "haskell", ext: ["hs"]}, + {name: "Haskell (Literate)", mime: "text/x-literate-haskell", mode: "haskell-literate", ext: ["lhs"]}, + {name: "Haxe", mime: "text/x-haxe", mode: "haxe", ext: ["hx"]}, + {name: "HXML", mime: "text/x-hxml", mode: "haxe", ext: ["hxml"]}, + {name: "ASP.NET", mime: "application/x-aspx", mode: "htmlembedded", ext: ["aspx"], alias: ["asp", "aspx"]}, + {name: "HTML", mime: "text/html", mode: "htmlmixed", ext: ["html", "htm"], alias: ["xhtml"]}, + {name: "HTTP", mime: "message/http", mode: "http"}, + {name: "IDL", mime: "text/x-idl", mode: "idl", ext: ["pro"]}, + {name: "Pug", mime: "text/x-pug", mode: "pug", ext: ["jade", "pug"], alias: ["jade"]}, + {name: "Java", mime: "text/x-java", mode: "clike", ext: ["java"]}, + {name: "Java Server Pages", mime: "application/x-jsp", mode: "htmlembedded", ext: ["jsp"], alias: ["jsp"]}, + {name: "JavaScript", mimes: ["text/javascript", "text/ecmascript", "application/javascript", "application/x-javascript", "application/ecmascript"], + mode: "javascript", ext: ["js"], alias: ["ecmascript", "js", "node"]}, + {name: "JSON", mimes: ["application/json", "application/x-json"], mode: "javascript", ext: ["json", "map"], alias: ["json5"]}, + {name: "JSON-LD", mime: "application/ld+json", mode: "javascript", ext: ["jsonld"], alias: ["jsonld"]}, + {name: "JSX", mime: "text/jsx", mode: "jsx", ext: ["jsx"]}, + {name: "Jinja2", mime: "null", mode: "jinja2"}, + {name: "Julia", mime: "text/x-julia", mode: "julia", ext: ["jl"]}, + {name: "Kotlin", mime: "text/x-kotlin", mode: "clike", ext: ["kt"]}, + {name: "LESS", mime: "text/x-less", mode: "css", ext: ["less"]}, + {name: "LiveScript", mime: "text/x-livescript", mode: "livescript", ext: ["ls"], alias: ["ls"]}, + {name: "Lua", mime: "text/x-lua", mode: "lua", ext: ["lua"]}, + {name: "Markdown", mime: "text/x-markdown", mode: "markdown", ext: ["markdown", "md", "mkd"]}, + {name: "mIRC", mime: "text/mirc", mode: "mirc"}, + {name: "MariaDB SQL", mime: "text/x-mariadb", mode: "sql"}, + {name: "Mathematica", mime: "text/x-mathematica", mode: "mathematica", ext: ["m", "nb"]}, + {name: "Modelica", mime: "text/x-modelica", mode: "modelica", ext: ["mo"]}, + {name: "MUMPS", mime: "text/x-mumps", mode: "mumps", ext: ["mps"]}, + {name: "MS SQL", mime: "text/x-mssql", mode: "sql"}, + {name: "mbox", mime: "application/mbox", mode: "mbox", ext: ["mbox"]}, + {name: "MySQL", mime: "text/x-mysql", mode: "sql"}, + {name: "Nginx", mime: "text/x-nginx-conf", mode: "nginx", file: /nginx.*\.conf$/i}, + {name: "NSIS", mime: "text/x-nsis", mode: "nsis", ext: ["nsh", "nsi"]}, + {name: "NTriples", mime: "text/n-triples", mode: "ntriples", ext: ["nt"]}, + {name: "Objective C", mime: "text/x-objectivec", mode: "clike", ext: ["m", "mm"], alias: ["objective-c", "objc"]}, + {name: "OCaml", mime: "text/x-ocaml", mode: "mllike", ext: ["ml", "mli", "mll", "mly"]}, + {name: "Octave", mime: "text/x-octave", mode: "octave", ext: ["m"]}, + {name: "Oz", mime: "text/x-oz", mode: "oz", ext: ["oz"]}, + {name: "Pascal", mime: "text/x-pascal", mode: "pascal", ext: ["p", "pas"]}, + {name: "PEG.js", mime: "null", mode: "pegjs", ext: ["jsonld"]}, + {name: "Perl", mime: "text/x-perl", mode: "perl", ext: ["pl", "pm"]}, + {name: "PHP", mime: "application/x-httpd-php", mode: "php", ext: ["php", "php3", "php4", "php5", "phtml"]}, + {name: "Pig", mime: "text/x-pig", mode: "pig", ext: ["pig"]}, + {name: "Plain Text", mime: "text/plain", mode: "null", ext: ["txt", "text", "conf", "def", "list", "log"]}, + {name: "PLSQL", mime: "text/x-plsql", mode: "sql", ext: ["pls"]}, + {name: "PowerShell", mime: "application/x-powershell", mode: "powershell", ext: ["ps1", "psd1", "psm1"]}, + {name: "Properties files", mime: "text/x-properties", mode: "properties", ext: ["properties", "ini", "in"], alias: ["ini", "properties"]}, + {name: "ProtoBuf", mime: "text/x-protobuf", mode: "protobuf", ext: ["proto"]}, + {name: "Python", mime: "text/x-python", mode: "python", ext: ["BUILD", "bzl", "py", "pyw"], file: /^(BUCK|BUILD)$/}, + {name: "Puppet", mime: "text/x-puppet", mode: "puppet", ext: ["pp"]}, + {name: "Q", mime: "text/x-q", mode: "q", ext: ["q"]}, + {name: "R", mime: "text/x-rsrc", mode: "r", ext: ["r", "R"], alias: ["rscript"]}, + {name: "reStructuredText", mime: "text/x-rst", mode: "rst", ext: ["rst"], alias: ["rst"]}, + {name: "RPM Changes", mime: "text/x-rpm-changes", mode: "rpm"}, + {name: "RPM Spec", mime: "text/x-rpm-spec", mode: "rpm", ext: ["spec"]}, + {name: "Ruby", mime: "text/x-ruby", mode: "ruby", ext: ["rb"], alias: ["jruby", "macruby", "rake", "rb", "rbx"]}, + {name: "Rust", mime: "text/x-rustsrc", mode: "rust", ext: ["rs"]}, + {name: "SAS", mime: "text/x-sas", mode: "sas", ext: ["sas"]}, + {name: "Sass", mime: "text/x-sass", mode: "sass", ext: ["sass"]}, + {name: "Scala", mime: "text/x-scala", mode: "clike", ext: ["scala"]}, + {name: "Scheme", mime: "text/x-scheme", mode: "scheme", ext: ["scm", "ss"]}, + {name: "SCSS", mime: "text/x-scss", mode: "css", ext: ["scss"]}, + {name: "Shell", mime: "text/x-sh", mode: "shell", ext: ["sh", "ksh", "bash"], alias: ["bash", "sh", "zsh"], file: /^PKGBUILD$/}, + {name: "Sieve", mime: "application/sieve", mode: "sieve", ext: ["siv", "sieve"]}, + {name: "Slim", mimes: ["text/x-slim", "application/x-slim"], mode: "slim", ext: ["slim"]}, + {name: "Smalltalk", mime: "text/x-stsrc", mode: "smalltalk", ext: ["st"]}, + {name: "Smarty", mime: "text/x-smarty", mode: "smarty", ext: ["tpl"]}, + {name: "Solr", mime: "text/x-solr", mode: "solr"}, + {name: "Soy", mime: "text/x-soy", mode: "soy", ext: ["soy"], alias: ["closure template"]}, + {name: "SPARQL", mime: "application/sparql-query", mode: "sparql", ext: ["rq", "sparql"], alias: ["sparul"]}, + {name: "Spreadsheet", mime: "text/x-spreadsheet", mode: "spreadsheet", alias: ["excel", "formula"]}, + {name: "SQL", mime: "text/x-sql", mode: "sql", ext: ["sql"]}, + {name: "SQLite", mime: "text/x-sqlite", mode: "sql"}, + {name: "Squirrel", mime: "text/x-squirrel", mode: "clike", ext: ["nut"]}, + {name: "Stylus", mime: "text/x-styl", mode: "stylus", ext: ["styl"]}, + {name: "Swift", mime: "text/x-swift", mode: "swift", ext: ["swift"]}, + {name: "sTeX", mime: "text/x-stex", mode: "stex"}, + {name: "LaTeX", mime: "text/x-latex", mode: "stex", ext: ["text", "ltx"], alias: ["tex"]}, + {name: "SystemVerilog", mime: "text/x-systemverilog", mode: "verilog", ext: ["v"]}, + {name: "Tcl", mime: "text/x-tcl", mode: "tcl", ext: ["tcl"]}, + {name: "Textile", mime: "text/x-textile", mode: "textile", ext: ["textile"]}, + {name: "TiddlyWiki ", mime: "text/x-tiddlywiki", mode: "tiddlywiki"}, + {name: "Tiki wiki", mime: "text/tiki", mode: "tiki"}, + {name: "TOML", mime: "text/x-toml", mode: "toml", ext: ["toml"]}, + {name: "Tornado", mime: "text/x-tornado", mode: "tornado"}, + {name: "troff", mime: "text/troff", mode: "troff", ext: ["1", "2", "3", "4", "5", "6", "7", "8", "9"]}, + {name: "TTCN", mime: "text/x-ttcn", mode: "ttcn", ext: ["ttcn", "ttcn3", "ttcnpp"]}, + {name: "TTCN_CFG", mime: "text/x-ttcn-cfg", mode: "ttcn-cfg", ext: ["cfg"]}, + {name: "Turtle", mime: "text/turtle", mode: "turtle", ext: ["ttl"]}, + {name: "TypeScript", mime: "application/typescript", mode: "javascript", ext: ["ts"], alias: ["ts"]}, + {name: "TypeScript-JSX", mime: "text/typescript-jsx", mode: "jsx", ext: ["tsx"], alias: ["tsx"]}, + {name: "Twig", mime: "text/x-twig", mode: "twig"}, + {name: "Web IDL", mime: "text/x-webidl", mode: "webidl", ext: ["webidl"]}, + {name: "VB.NET", mime: "text/x-vb", mode: "vb", ext: ["vb"]}, + {name: "VBScript", mime: "text/vbscript", mode: "vbscript", ext: ["vbs"]}, + {name: "Velocity", mime: "text/velocity", mode: "velocity", ext: ["vtl"]}, + {name: "Verilog", mime: "text/x-verilog", mode: "verilog", ext: ["v"]}, + {name: "VHDL", mime: "text/x-vhdl", mode: "vhdl", ext: ["vhd", "vhdl"]}, + {name: "Vue.js Component", mimes: ["script/x-vue", "text/x-vue"], mode: "vue", ext: ["vue"]}, + {name: "XML", mimes: ["application/xml", "text/xml"], mode: "xml", ext: ["xml", "xsl", "xsd", "svg"], alias: ["rss", "wsdl", "xsd"]}, + {name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]}, + {name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]}, + {name: "YAML", mimes: ["text/x-yaml", "text/yaml"], mode: "yaml", ext: ["yaml", "yml"], alias: ["yml"]}, + {name: "Z80", mime: "text/x-z80", mode: "z80", ext: ["z80"]}, + {name: "mscgen", mime: "text/x-mscgen", mode: "mscgen", ext: ["mscgen", "mscin", "msc"]}, + {name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]}, + {name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]} + ]; + // Ensure all modes have a mime property for backwards compatibility + for (var i = 0; i < CodeMirror.modeInfo.length; i++) { + var info = CodeMirror.modeInfo[i]; + if (info.mimes) info.mime = info.mimes[0]; } - if ( ch == "{" && state.lastType == "tag" ) { - state.nesting.push({ tag: state.tagName, - column: stream.column(), - leftCol: state.tagColumn, - arg: 0 - }); - delete state.tagName; - delete state.tagColumn; - return ret("dict_open", null); - } - - if ( ch == "/" && stream.eat("*") ) - return chain(stream, state, plTokenComment); - - if ( ch == "%" ) { - stream.skipToEnd(); - return ret("comment", "comment"); - } - - setArg1(state); - - if ( isSoloChar.test(ch) ) { - switch ( ch ) - { case ")": - state.nesting.pop(); - break; - case "]": - state.nesting.pop(); - return ret("list_close", null); - case "}": - { var nest = nesting(state); - var type = (nest && nest.tag) ? "dict_close" : "brace_term_close"; - - state.nesting.pop(); - return ret(type, null); + CodeMirror.findModeByMIME = function(mime) { + mime = mime.toLowerCase(); + for (var i = 0; i < CodeMirror.modeInfo.length; i++) { + var info = CodeMirror.modeInfo[i]; + if (info.mime == mime) return info; + if (info.mimes) for (var j = 0; j < info.mimes.length; j++) + if (info.mimes[j] == mime) return info; } - case ",": - if ( stream.eol() ) - state.commaAtEOL = true; - nextArg(state); - /*FALLTHROUGH*/ - case ";": - if ( isControl(state) ) - state.goalStart = true; - break; - case "[": - state.nesting.push({ type: "list", - closeColumn: stream.column(), - alignment: stream.column()+2 - }); - return ret("list_open", null); - break; - case "{": - if ( config.quasiQuotations && stream.eat("|") ) { - state.nesting.push({ type: "quasi-quotation", - alignment: stream.column()+1 - }); - return ret("qq_open", "qq_open"); - } else { - state.nesting.push({ type: "curly", - closeColumn: stream.column(), - alignment: stream.column()+2 - }); - return ret("brace_term_open", null); - } - break; - case "|": - if ( config.quasiQuotations ) { - if ( stream.eat("|") ) { - state.tokenize = plTokenQuasiQuotation; - return ret("qq_sep", "qq_sep"); - } else if ( stream.eat("}") ) { - state.nesting.pop(); - return ret("qq_close", "qq_close"); - } - } - if ( isControl(state) ) - state.goalStart = true; - break; - } - return ret("solo", null, ch); - } - - if (ch == '"' || ch == "'" || ch == "`") - { state.nesting.push({ type: "quoted", - alignment: stream.column()+1 - }); - return chain(stream, state, plTokenString(ch)); - } - - if ( ch == "0" ) { - if ( stream.eat(/x/i)) { - stream.eatWhile(/[\da-f]/i); - return ret("number", "number"); - } - if ( stream.eat(/o/i)) { - stream.eatWhile(/[0-7]/i); - return ret("number", "number"); - } - if ( stream.eat(/'/) ) { /* 0' */ - var next = stream.next(); - if ( next == "\\" ) { - if ( !readEsc(stream) ) - return ret("error", "error"); - } - return ret("code", "code"); - } - } - - if ( /\d/.test(ch) || /[+-]/.test(ch) && stream.eat(/\d/)) { - if ( config.groupedIntegers ) - stream.match(/^\d*((_|\s+)\d+)*(?:\.\d+)?(?:[eE][+\-]?\d+)?/); - else - stream.match(/^\d*(?:\.\d+)?(?:[eE][+\-]?\d+)?/); - return ret(ch == "-" ? "neg-number" : - ch == "+" ? "pos-number" : - "number"); - } - - if ( isSymbolChar.test(ch) ) { - stream.eatWhile(isSymbolChar); - var atom = stream.current(); - if ( atom == "." && peekSpace(stream) ) { - if ( nesting(state) ) { - return ret("fullstop", "error", atom); - } else { - } return ret("fullstop", "fullstop", atom); - } else if ( isNeck.test(atom) ) { - return ret("neck", "neck", atom); - } else if ( isControl(state) && isControlOp.test(atom) ) { - state.goalStart = true; - return ret("symbol", "operator", atom); - } else - return ret("symbol", "operator", atom); - } - - stream.eatWhile(/[\w_]/); - var word = stream.current(); - if ( stream.peek() == "{" && config.dicts ) { - state.tagName = word; /* tmp state extension */ - state.tagColumn = stream.column(); - return ret("tag", "tag", word); - } else if ( ch == "_" ) { - if ( word.length == 1 ) { - return ret("var", "anon", word); - } else { - var sec = word.charAt(1); - if ( sec == sec.toUpperCase() ) - return ret("var", "var-2", word); - } - return ret("var", "var", word); - } else if ( ch == ch.toUpperCase() ) { - return ret("var", "var", word); - } else if ( stream.peek() == "(" ) { - state.functorName = word; /* tmp state extension */ - state.functorColumn = stream.column(); - return ret("functor", "functor", word); - } else - return ret("atom", "atom", word); - } - - function plTokenString(quote) { - return function(stream, state) { - if (!nextUntilUnescaped(stream, state, quote)) { - state.tokenize = plTokenBase; - if ( stream.peek() == "(" ) { /* 'quoted functor'() */ - var word = stream.current(); - state.functorName = word; /* tmp state extension */ - return ret("functor", "functor", word); - } - if ( stream.peek() == "{" && config.dicts ) { /* 'quoted tag'{} */ - var word = stream.current(); - state.tagName = word; /* tmp state extension */ - return ret("tag", "tag", word); - } - } - return ret(quoteType[quote], quoteType[quote]); + if (/\+xml$/.test(mime)) return CodeMirror.findModeByMIME("application/xml") + if (/\+json$/.test(mime)) return CodeMirror.findModeByMIME("application/json") }; - } - function plTokenQuasiQuotation(stream, state) { - var maybeEnd = false, ch; - while (ch = stream.next()) { - if (ch == "}" && maybeEnd) { - state.tokenize = plTokenBase; - stream.backUp(2); - break; - } - maybeEnd = (ch == "|"); - } - return ret("qq_content", "qq_content"); - } + CodeMirror.findModeByExtension = function(ext) { + for (var i = 0; i < CodeMirror.modeInfo.length; i++) { + var info = CodeMirror.modeInfo[i]; + if (info.ext) for (var j = 0; j < info.ext.length; j++) + if (info.ext[j] == ext) return info; + } + }; - function plTokenComment(stream, state) { - var maybeEnd = false, ch; - while (ch = stream.next()) { - if (ch == "/" && maybeEnd) { - state.tokenize = plTokenBase; - break; - } - maybeEnd = (ch == "*"); - } - return ret("comment", "comment"); - } + CodeMirror.findModeByFileName = function(filename) { + for (var i = 0; i < CodeMirror.modeInfo.length; i++) { + var info = CodeMirror.modeInfo[i]; + if (info.file && info.file.test(filename)) return info; + } + var dot = filename.lastIndexOf("."); + var ext = dot > -1 && filename.substring(dot + 1, filename.length); + if (ext) return CodeMirror.findModeByExtension(ext); + }; + + CodeMirror.findModeByName = function(name) { + name = name.toLowerCase(); + for (var i = 0; i < CodeMirror.modeInfo.length; i++) { + var info = CodeMirror.modeInfo[i]; + if (info.name.toLowerCase() == name) return info; + if (info.alias) for (var j = 0; j < info.alias.length; j++) + if (info.alias[j].toLowerCase() == name) return info; + } + }; + + CodeMirror.defineMode("prolog", function(cmConfig) { + + function chain(stream, state, f) { + state.tokenize = f; + return f(stream, state); + } + + /******************************* + * CONFIG DATA * + *******************************/ + + var config = { quasiQuotations: false, /* {|Syntax||Quotation|} */ + dicts: false, /* tag{k:v, ...} */ + unicodeEscape: true, /* \uXXXX and \UXXXXXXXX */ + multiLineQuoted: true, /* "...\n..." */ + groupedIntegers: false /* 10 000 or 10_000 */ + }; + + var quoteType = { '"': "string", + "'": "qatom", + "`": "bqstring" + }; + + var isSingleEscChar = /[abref\\'"nrtsv]/; + var isOctalDigit = /[0-7]/; + var isHexDigit = /[0-9a-fA-F]/; + + var isSymbolChar = /[-#$&*+./:<=>?@\\^~]/; /* Prolog glueing symbols chars */ + var isSoloChar = /[[\]{}(),;|]/; /* Prolog solo chars] */ + var isNeck = /^(:-|-->)$/; + var isControlOp = /^(,|;|->|\*->|\\+|\|)$/; - /******************************* - * ACTIVE KEYS * - *******************************/ + /******************************* + * CHARACTER ESCAPES * + *******************************/ - /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Support if-then-else layout like this: + function readDigits(stream, re, count) { + if ( count > 0 ) { + while( count-- > 0 ) { + if ( !re.test(stream.next()) ) + return false; + } + } else { + while ( re.test(stream.peek()) ) + stream.next(); + } + return true; + } - goal :- - ( Condition - -> IfTrue - ; IfFalse - ). - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ + function readEsc(stream) { + var next = stream.next(); + if ( isSingleEscChar.test(next) ) + return true; + switch( next ) + { case "u": + if ( config.unicodeEscape ) + return readDigits(stream, isHexDigit, 4); /* SWI */ + return false; + case "U": + if ( config.unicodeEscape ) + return readDigits(stream, isHexDigit, 8); /* SWI */ + return false; + case null: return true; /* end of line */ + case "c": stream.eatSpace(); return true; + case "x": return readDigits(stream, isHexDigit, 2); + } + if ( isOctalDigit.test(next) ) { + if ( !readDigits(stream, isOctalDigit, -1) ) + return false; + if ( stream.peek() == "\\" ) /* SWI: optional closing \ */ + stream.next(); + return true; + } + return false; + } + + function nextUntilUnescaped(stream, state, end) { + var next; + while ((next = stream.next()) != null) { + if ( next == end && end != stream.peek() ) + { state.nesting.pop(); + return false; + } + if ( next == "\\" ) + { if ( !readEsc(stream) ) + return false; + } + } + return config.multiLineQuoted; + } + + /******************************* + * CONTEXT NESTING * + *******************************/ + + function nesting(state) { + return state.nesting.slice(-1)[0]; + } + + /* Called on every non-comment token */ + function setArg1(state) { + var nest = nesting(state); + if ( nest ) { + if ( nest.arg == 0 ) /* nested in a compound */ + nest.arg = 1; + else if ( nest.type == "control" ) + state.goalStart = false; + } else + state.goalStart = false; + } + + function setArgAlignment(state) { + var nest = nesting(state); + if ( nest && !nest.alignment && nest.arg != undefined ) { + if ( nest.arg == 0 ) + nest.alignment = nest.leftCol ? nest.leftCol+4 : nest.column+4; + else + nest.alignment = nest.column+1; + } + } + + function nextArg(state) { + var nest = nesting(state); + if ( nest ) { + if ( nest.arg ) /* nested in a compound */ + nest.arg++; + else if ( nest.type == "control" ) + state.goalStart = true; /* FIXME: also needed for ; and -> */ + } else + state.goalStart = true; + } + + function isControl(state) { /* our terms are goals */ + var nest = nesting(state); + if ( nest ) { + if ( nest.type == "control" ) { + return true; + } + return false; + } else + return state.inBody; + } + + // Used as scratch variables to communicate multiple values without + // consing up tons of objects. + var type, content; + function ret(tp, style, cont) { + type = tp; content = cont; + return style; + } + + function peekSpace(stream) { /* TBD: handle block comment as space */ + if ( stream.eol() || + /[\s%]/.test(stream.peek()) ) + return true; + return false; + } - CodeMirror.commands.prologStartIfThenElse = function(cm) { - var start = cm.getCursor("start"); - var token = cm.getTokenAt(start, true); + /******************************* + * SUB TOKENISERS * + *******************************/ - if ( token.state.goalStart == true ) - { cm.replaceSelection("( ", "end"); - return; - } + function plTokenBase(stream, state) { + var ch = stream.next(); - return CodeMirror.Pass; - } + if ( ch == "(" ) { + if ( state.lastType == "functor" ) { + state.nesting.push({ functor: state.functorName, + column: stream.column(), + leftCol: state.functorColumn, + arg: 0 + }); + delete state.functorName; + delete state.functorColumn; + } else { + state.nesting.push({ type: "control", + closeColumn: stream.column(), + alignment: stream.column()+4 + }); + } + return ret("solo", null, "("); + } - CodeMirror.commands.prologStartThen = function(cm) { - var start = cm.getCursor("start"); - var token = cm.getTokenAt(start, true); + if ( ch == "{" && state.lastType == "tag" ) { + state.nesting.push({ tag: state.tagName, + column: stream.column(), + leftCol: state.tagColumn, + arg: 0 + }); + delete state.tagName; + delete state.tagColumn; + return ret("dict_open", null); + } - /* FIXME: These functions are copied from prolog.js. How - can we reuse these? - */ - function nesting(state) { - var len = state.nesting.length; - if ( len > 0 ) - return state.nesting[len-1]; - return null; - } + if ( ch == "/" && stream.eat("*") ) + return chain(stream, state, plTokenComment); - function isControl(state) { /* our terms are goals */ - var nest = nesting(state); - if ( nest ) { - if ( nest.type == "control" ) { - return true; - } - return false; - } else - return state.inBody; - } + if ( ch == "%" ) { + stream.skipToEnd(); + return ret("comment", "comment"); + } - if ( start.ch == token.end && - token.type == "operator" && - token.string == "-" && - isControl(token.state) ) - { cm.replaceSelection("> ", "end"); - return; - } + setArg1(state); - return CodeMirror.Pass; - } + if ( isSoloChar.test(ch) ) { + switch ( ch ) + { case ")": + state.nesting.pop(); + break; + case "]": + state.nesting.pop(); + return ret("list_close", null); + case "}": + { var nest = nesting(state); + var type = (nest && nest.tag) ? "dict_close" : "brace_term_close"; - CodeMirror.commands.prologStartElse = function(cm) { - var start = cm.getCursor("start"); - var token = cm.getTokenAt(start, true); + state.nesting.pop(); + return ret(type, null); + } + case ",": + if ( stream.eol() ) + state.commaAtEOL = true; + nextArg(state); + /*FALLTHROUGH*/ + case ";": + if ( isControl(state) ) + state.goalStart = true; + break; + case "[": + state.nesting.push({ type: "list", + closeColumn: stream.column(), + alignment: stream.column()+2 + }); + return ret("list_open", null); + break; + case "{": + if ( config.quasiQuotations && stream.eat("|") ) { + state.nesting.push({ type: "quasi-quotation", + alignment: stream.column()+1 + }); + return ret("qq_open", "qq_open"); + } else { + state.nesting.push({ type: "curly", + closeColumn: stream.column(), + alignment: stream.column()+2 + }); + return ret("brace_term_open", null); + } + break; + case "|": + if ( config.quasiQuotations ) { + if ( stream.eat("|") ) { + state.tokenize = plTokenQuasiQuotation; + return ret("qq_sep", "qq_sep"); + } else if ( stream.eat("}") ) { + state.nesting.pop(); + return ret("qq_close", "qq_close"); + } + } + if ( isControl(state) ) + state.goalStart = true; + break; + } + return ret("solo", null, ch); + } - if ( token.start == 0 && start.ch == token.end && - !/\S/.test(token.string) ) - { cm.replaceSelection("; ", "end"); - return; - } + if (ch == '"' || ch == "'" || ch == "`") + { state.nesting.push({ type: "quoted", + alignment: stream.column()+1 + }); + return chain(stream, state, plTokenString(ch)); + } - return CodeMirror.Pass; - } + if ( ch == "0" ) { + if ( stream.eat(/x/i)) { + stream.eatWhile(/[\da-f]/i); + return ret("number", "number"); + } + if ( stream.eat(/o/i)) { + stream.eatWhile(/[0-7]/i); + return ret("number", "number"); + } + if ( stream.eat(/'/) ) { /* 0' */ + var next = stream.next(); + if ( next == "\\" ) { + if ( !readEsc(stream) ) + return ret("error", "error"); + } + return ret("code", "code"); + } + } + + if ( /\d/.test(ch) || /[+-]/.test(ch) && stream.eat(/\d/)) { + if ( config.groupedIntegers ) + stream.match(/^\d*((_|\s+)\d+)*(?:\.\d+)?(?:[eE][+\-]?\d+)?/); + else + stream.match(/^\d*(?:\.\d+)?(?:[eE][+\-]?\d+)?/); + return ret(ch == "-" ? "neg-number" : + ch == "+" ? "pos-number" : + "number"); + } + + if ( isSymbolChar.test(ch) ) { + stream.eatWhile(isSymbolChar); + var atom = stream.current(); + if ( atom == "." && peekSpace(stream) ) { + if ( nesting(state) ) { + return ret("fullstop", "error", atom); + } else { + } return ret("fullstop", "fullstop", atom); + } else if ( isNeck.test(atom) ) { + return ret("neck", "neck", atom); + } else if ( isControl(state) && isControlOp.test(atom) ) { + state.goalStart = true; + return ret("symbol", "operator", atom); + } else + return ret("symbol", "operator", atom); + } + + stream.eatWhile(/[\w_]/); + var word = stream.current(); + if ( stream.peek() == "{" && config.dicts ) { + state.tagName = word; /* tmp state extension */ + state.tagColumn = stream.column(); + return ret("tag", "tag", word); + } else if ( ch == "_" ) { + if ( word.length == 1 ) { + return ret("var", "anon", word); + } else { + var sec = word.charAt(1); + if ( sec == sec.toUpperCase() ) + return ret("var", "var-2", word); + } + return ret("var", "var", word); + } else if ( ch == ch.toUpperCase() ) { + return ret("var", "var", word); + } else if ( stream.peek() == "(" ) { + state.functorName = word; /* tmp state extension */ + state.functorColumn = stream.column(); + return ret("functor", "functor", word); + } else + return ret("atom", "atom", word); + } + + function plTokenString(quote) { + return function(stream, state) { + if (!nextUntilUnescaped(stream, state, quote)) { + state.tokenize = plTokenBase; + if ( stream.peek() == "(" ) { /* 'quoted functor'() */ + var word = stream.current(); + state.functorName = word; /* tmp state extension */ + return ret("functor", "functor", word); + } + if ( stream.peek() == "{" && config.dicts ) { /* 'quoted tag'{} */ + var word = stream.current(); + state.tagName = word; /* tmp state extension */ + return ret("tag", "tag", word); + } + } + return ret(quoteType[quote], quoteType[quote]); + }; + } + + function plTokenQuasiQuotation(stream, state) { + var maybeEnd = false, ch; + while (ch = stream.next()) { + if (ch == "}" && maybeEnd) { + state.tokenize = plTokenBase; + stream.backUp(2); + break; + } + maybeEnd = (ch == "|"); + } + return ret("qq_content", "qq_content"); + } + + function plTokenComment(stream, state) { + var maybeEnd = false, ch; + while (ch = stream.next()) { + if (ch == "/" && maybeEnd) { + state.tokenize = plTokenBase; + break; + } + maybeEnd = (ch == "*"); + } + return ret("comment", "comment"); + } + + + /******************************* + * ACTIVE KEYS * + *******************************/ + + /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + Support if-then-else layout like this: + + goal :- + ( Condition + -> IfTrue + ; IfFalse + ). + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ + + + CodeMirror.commands.prologStartIfThenElse = function(cm) { + var start = cm.getCursor("start"); + var token = cm.getTokenAt(start, true); + + if ( token.state.goalStart == true ) + { cm.replaceSelection("( ", "end"); + return; + } + + return CodeMirror.Pass; + } + + CodeMirror.commands.prologStartThen = function(cm) { + var start = cm.getCursor("start"); + var token = cm.getTokenAt(start, true); + + /* FIXME: These functions are copied from prolog.js. How + can we reuse these? + */ + function nesting(state) { + var len = state.nesting.length; + if ( len > 0 ) + return state.nesting[len-1]; + return null; + } + + function isControl(state) { /* our terms are goals */ + var nest = nesting(state); + if ( nest ) { + if ( nest.type == "control" ) { + return true; + } + return false; + } else + return state.inBody; + } + + if ( start.ch == token.end && + token.type == "operator" && + token.string == "-" && + isControl(token.state) ) + { cm.replaceSelection("> ", "end"); + return; + } + + return CodeMirror.Pass; + } + + CodeMirror.commands.prologStartElse = function(cm) { + var start = cm.getCursor("start"); + var token = cm.getTokenAt(start, true); + + if ( token.start == 0 && start.ch == token.end && + !/\S/.test(token.string) ) + { cm.replaceSelection("; ", "end"); + return; + } + + return CodeMirror.Pass; + } + + CodeMirror.defineOption("prologKeys", null, function(cm, val, prev) { + if (prev && prev != CodeMirror.Init) + cm.removeKeyMap("prolog"); + if ( val ) { + var map = { name: "prolog", + "'('": "prologStartIfThenElse", + "'>'": "prologStartThen", + "';'": "prologStartElse", + "Ctrl-L": "refreshHighlight" + }; + cm.addKeyMap(map); + } + }); - CodeMirror.defineOption("prologKeys", null, function(cm, val, prev) { - if (prev && prev != CodeMirror.Init) - cm.removeKeyMap("prolog"); - if ( val ) { - var map = { name: "prolog", - "'('": "prologStartIfThenElse", - "'>'": "prologStartThen", - "';'": "prologStartElse", - "Ctrl-L": "refreshHighlight" - }; - cm.addKeyMap(map); - } }); - - }); - //Default (SWI-)Prolog operator table. To be used later to enhance the - //offline experience. + //Default (SWI-)Prolog operator table. To be used later to enhance the + //offline experience. var ops = { "-->": { p:1200, t:"xfx" }, - ":-": [ { p:1200, t:"xfx" }, - { p:1200, t:"fx" } - ], - "?-": { p:1200, t:"fx" }, + ":-": [ { p:1200, t:"xfx" }, + { p:1200, t:"fx" } + ], + "?-": { p:1200, t:"fx" }, - "dynamic": { p:1150, t:"fx" }, - "discontiguous": { p:1150, t:"fx" }, - "initialization": { p:1150, t:"fx" }, - "meta_predicate": { p:1150, t:"fx" }, - "module_transparent": { p:1150, t:"fx" }, - "multifile": { p:1150, t:"fx" }, - "thread_local": { p:1150, t:"fx" }, - "volatile": { p:1150, t:"fx" }, + "dynamic": { p:1150, t:"fx" }, + "discontiguous": { p:1150, t:"fx" }, + "initialization": { p:1150, t:"fx" }, + "meta_predicate": { p:1150, t:"fx" }, + "module_transparent": { p:1150, t:"fx" }, + "multifile": { p:1150, t:"fx" }, + "thread_local": { p:1150, t:"fx" }, + "volatile": { p:1150, t:"fx" }, - ";": { p:1100, t:"xfy" }, - "|": { p:1100, t:"xfy" }, + ";": { p:1100, t:"xfy" }, + "|": { p:1100, t:"xfy" }, - "->": { p:1050, t:"xfy" }, - "*->": { p:1050, t:"xfy" }, + "->": { p:1050, t:"xfy" }, + "*->": { p:1050, t:"xfy" }, - ",": { p:1000, t:"xfy" }, + ",": { p:1000, t:"xfy" }, - "\\+": { p:900, t:"fy" }, + "\\+": { p:900, t:"fy" }, - "~": { p:900, t:"fx" }, + "~": { p:900, t:"fx" }, - "<": { p:700, t:"xfx" }, - "=": { p:700, t:"xfx" }, - "=..": { p:700, t:"xfx" }, - "=@=": { p:700, t:"xfx" }, - "=:=": { p:700, t:"xfx" }, - "=<": { p:700, t:"xfx" }, - "==": { p:700, t:"xfx" }, - "=\\=": { p:700, t:"xfx" }, - ">": { p:700, t:"xfx" }, - ">=": { p:700, t:"xfx" }, - "@<": { p:700, t:"xfx" }, - "@=<": { p:700, t:"xfx" }, - "@>": { p:700, t:"xfx" }, - "@>=": { p:700, t:"xfx" }, - "\\=": { p:700, t:"xfx" }, - "\\==": { p:700, t:"xfx" }, - "is": { p:700, t:"xfx" }, + "<": { p:700, t:"xfx" }, + "=": { p:700, t:"xfx" }, + "=..": { p:700, t:"xfx" }, + "=@=": { p:700, t:"xfx" }, + "=:=": { p:700, t:"xfx" }, + "=<": { p:700, t:"xfx" }, + "==": { p:700, t:"xfx" }, + "=\\=": { p:700, t:"xfx" }, + ">": { p:700, t:"xfx" }, + ">=": { p:700, t:"xfx" }, + "@<": { p:700, t:"xfx" }, + "@=<": { p:700, t:"xfx" }, + "@>": { p:700, t:"xfx" }, + "@>=": { p:700, t:"xfx" }, + "\\=": { p:700, t:"xfx" }, + "\\==": { p:700, t:"xfx" }, + "is": { p:700, t:"xfx" }, - ":": { p:600, t:"xfy" }, + ":": { p:600, t:"xfy" }, - "+": [ { p:500, t:"yfx" }, - { p:200, t:"fy" } - ], - "-": [ { p:500, t:"yfx" }, - { p:200, t:"fy" } - ], - "/\\": { p:500, t:"yfx" }, - "\\/": { p:500, t:"yfx" }, - "xor": { p:500, t:"yfx" }, + "+": [ { p:500, t:"yfx" }, + { p:200, t:"fy" } + ], + "-": [ { p:500, t:"yfx" }, + { p:200, t:"fy" } + ], + "/\\": { p:500, t:"yfx" }, + "\\/": { p:500, t:"yfx" }, + "xor": { p:500, t:"yfx" }, - "?": { p:500, t:"fx" }, + "?": { p:500, t:"fx" }, - "*": { p:400, t:"yfx" }, - "/": { p:400, t:"yfx" }, - "//": { p:400, t:"yfx" }, - "rdiv": { p:400, t:"yfx" }, - "<<": { p:400, t:"yfx" }, - ">>": { p:400, t:"yfx" }, - "mod": { p:400, t:"yfx" }, - "rem": { p:400, t:"yfx" }, + "*": { p:400, t:"yfx" }, + "/": { p:400, t:"yfx" }, + "//": { p:400, t:"yfx" }, + "rdiv": { p:400, t:"yfx" }, + "<<": { p:400, t:"yfx" }, + ">>": { p:400, t:"yfx" }, + "mod": { p:400, t:"yfx" }, + "rem": { p:400, t:"yfx" }, - "**": { p:200, t:"xfx" }, - "^": { p:200, t:"xfy" }, + "**": { p:200, t:"xfx" }, + "^": { p:200, t:"xfy" }, - "\\": { p:200, t:"fy" } - }; + "\\": { p:200, t:"fy" } + }; - /******************************* - * RETURN OBJECT * - *******************************/ + /******************************* + * RETURN OBJECT * + *******************************/ - return { - startState: function() { - return { - tokenize: plTokenBase, - inBody: false, - goalStart: false, - lastType: null, - nesting: new Array(), /* ([{}]) nesting FIXME: copy this */ - curTerm: null, /* term index in metainfo */ - curToken: null /* token in term */ - }; - }, + return { + startState: function() { + return { + tokenize: plTokenBase, + inBody: false, + goalStart: false, + lastType: null, + nesting: new Array(), /* ([{}]) nesting FIXME: copy this */ + curTerm: null, /* term index in metainfo */ + curToken: null /* token in term */ + }; + }, - token: function(stream, state) { - var nest; + token: function(stream, state) { + var nest; - if ( state.curTerm == null && parserConfig.metainfo ) { - state.curTerm = 0; - state.curToken = 0; - } + if ( state.curTerm == null /* && parserConfig.metainfo */ ) { + state.curTerm = 0; + state.curToken = 0; + } - if ( stream.sol() ) - delete state.commaAtEOL; + if ( stream.sol() ) + delete state.commaAtEOL; - if ( state.tokenize == plTokenBase && stream.eatSpace() ) { - if ( stream.eol() ) - setArgAlignment(state); - return null; - } + if ( state.tokenize == plTokenBase && stream.eatSpace() ) { + if ( stream.eol() ) + setArgAlignment(state); + return null; + } - var style = state.tokenize(stream, state); + var style = state.tokenize(stream, state); - if ( stream.eol() ) - setArgAlignment(state); + if ( stream.eol() ) + setArgAlignment(state); - if ( type == "neck" ) { - state.inBody = true; - state.goalStart = true; - } else if ( type == "fullstop" ) { - state.inBody = false; - state.goalStart = false; - } + if ( type == "neck" ) { + state.inBody = true; + state.goalStart = true; + } else if ( type == "fullstop" ) { + state.inBody = false; + state.goalStart = false; + } - state.lastType = type; + state.lastType = type; - if ( typeof(parserConfig.enrich) == "function" ) - style = parserConfig.enrich(stream, state, type, content, style); + //if ( typeof(parserConfig.enrich) == "function" ) + // style = parserConfig.enrich(stream, state, type, content, style); - return style; - }, + return style; + }, - indent: function(state, textAfter) { - if (state.tokenize == plTokenComment) return CodeMirror.Pass; + indent: function(state, textAfter) { + if (state.tokenize == plTokenComment) return CodeMirror.Pass; - var nest; - if ( (nest=nesting(state)) ) { - if ( nest.closeColumn && !state.commaAtEOL ) - return nest.closeColumn; - return nest.alignment; - } - if ( !state.inBody ) - return 0; + var nest; + if ( (nest=nesting(state)) ) { + if ( nest.closeColumn && !state.commaAtEOL ) + return nest.closeColumn; + return nest.alignment; + } + if ( !state.inBody ) + return 0; - return 4; - }, + return 4; + }, - theme: "prolog", + theme: "prolog", + + blockCommentStart: "/*", /* continuecomment.js support */ + blockCommentEnd: "*/", + blockCommentContinue: " * ", + lineComment: "%", + }); + } - blockCommentStart: "/*", /* continuecomment.js support */ - blockCommentEnd: "*/", - blockCommentContinue: " * ", - lineComment: "%", - }; }); - -CodeMirror.defineMIME("text/x-prolog", "prolog"); -}); -// CodeMirror, copyright (c) by Marijn Haverbeke and others -// Distributed under an MIT license: http://codemirror.net/LICENSE - -(function(mod) { - if (typeof exports == "object" && typeof module == "object") // CommonJS - mod(require("../../lib/codemirror")); - else if (typeof define == "function" && define.amd) // AMD - define(["../../lib/codemirror"], mod); - else // Plain browser env - mod(CodeMirror); -})(function(CodeMirror) { -"use strict"; diff --git a/os/YapIOConfig.h.cmake b/os/YapIOConfig.h.cmake index 9f5a87a76..09e1788e2 100644 --- a/os/YapIOConfig.h.cmake +++ b/os/YapIOConfig.h.cmake @@ -13,7 +13,7 @@ #cmakedefine HAVE_READLINE_READLINE_H ${HAVE_READLINE_READLINE_H} #endif -#if defined(FOUND_READLINE) +#if defined(HAVE_READLINE_READLINE_H) && defined(HAVE_LIBREADLINE) #define USE_READLINE 1 #endif diff --git a/os/charsio.c b/os/charsio.c index e8266a8db..0284d51f3 100644 --- a/os/charsio.c +++ b/os/charsio.c @@ -93,126 +93,68 @@ INLINE_ONLY inline EXTERN Int CharOfAtom(Atom at) { return val; } -Int Yap_peek(int sno) { - CACHE_REGS - Int ocharcount, olinecount, olinepos; +int PopCode(int sno) { StreamDesc *s; - int32_t ch; - + Int ch; + struct yapchlookahead *p; s = GLOBAL_Stream + sno; -#if USE_READLINE - if (s->status & Readline_Stream_f && trueGlobalPrologFlag(READLINE_FLAG)) { - ch = Yap_ReadlinePeekChar(sno); - if (ch == EOFCHAR) { - s->stream_getc = EOFPeek; - s->stream_wgetc = EOFWPeek; - s->status |= Push_Eof_Stream_f; - } - return ch; + if (!s->recbs) { + return EOF; } -#endif -#if !HAVE_FMEMOPEN - if (s->status & InMemory_Stream_f) { - return Yap_MemPeekc(sno); - } -#endif - /* buffer the character */ - if (s->encoding == Yap_SystemEncoding() && 0) { - ch = fgetwc(s->file); - ungetwc(ch, s->file); - return ch; - } else { - ocharcount = s->charcount; - olinecount = s->linecount; - olinepos = s->linepos; - ch = s->stream_wgetc(sno); - if (ch == EOFCHAR) { - s->stream_getc = EOFPeek; - s->stream_wgetc = EOFWPeek; - s->status |= Push_Eof_Stream_f; - return ch; - } - } - if (s->encoding == ENC_OCTET || s->encoding == ENC_ISO_LATIN1 || - s->encoding == ENC_ISO_ASCII) { - ungetc(ch, s->file); - } else if (s->encoding == ENC_ISO_UTF8) { - unsigned char cs[8]; - size_t n = put_utf8(cs, ch); - while (n--) { - ungetc(cs[n], s->file); - } - } else if (s->encoding == ENC_UTF16_BE) { - /* do the ungetc as if a write .. */ - // computations - if (ch < 0x10000) { - ungetc(ch % 256, s->file); - ungetc(ch / 256, s->file); - } else { - uint16_t lead = LEAD_OFFSET + (ch >> 10); - uint16_t trail = 0xDC00 + (ch & 0x3FF); - - ungetc(lead % 256, s->file); - ungetc(lead / 256, s->file); - ungetc(trail % 256, s->file); - ungetc(trail / 256, s->file); - } - } else if (s->encoding == ENC_UTF16_LE) { - if (ch < 0x10000) { - ungetc(ch / 256, s->file); - ungetc(ch % 256, s->file); - } else { - uint16_t lead = LEAD_OFFSET + (ch >> 10); - uint16_t trail = 0xDC00 + (ch & 0x3FF); - - ungetc(trail / 256, s->file); - ungetc(trail % 256, s->file); - ungetc(lead / 256, s->file); - ungetc(lead % 256, s->file); - } - } else if (s->encoding == ENC_ISO_UTF32_LE) { - ungetc((ch >> 24) & 0xff, s->file); - ungetc((ch >> 16) & 0xff, s->file); - ungetc((ch >> 8) & 0xff, s->file); - ungetc(ch & 0xff, s->file); - } else if (s->encoding == ENC_ISO_UTF32_BE) { - ungetc(ch & 0xff, s->file); - ungetc((ch >> 8) & 0xff, s->file); - ungetc((ch >> 16) & 0xff, s->file); - ungetc((ch >> 24) & 0xff, s->file); - } else if (s->encoding == ENC_UCS2_BE) { - /* do the ungetc as if a write .. */ - // computations - ungetc(ch % 256, s->file); - ungetc(ch / 256, s->file); - } else if (s->encoding == ENC_UCS2_LE) { - ungetc(ch / 256, s->file); - ungetc(ch % 256, s->file); - } - s->charcount = ocharcount; - s->linecount = olinecount; - s->linepos = olinepos; + p = s->recbs; + ch = p->ch; + s->recbs = s->recbs->next; + free(p); + if (!s->recbs) + Yap_DefaultStreamOps(s); return ch; } -static Int dopeek_byte(int sno) { +static int peekCode(int sno, bool wide) { Int ocharcount, olinecount, olinepos; StreamDesc *s; Int ch; - + struct yapchlookahead *recb = malloc(sizeof(struct yapchlookahead)), *r; + recb->next = NULL; s = GLOBAL_Stream + sno; ocharcount = s->charcount; olinecount = s->linecount; olinepos = s->linepos; - ch = GLOBAL_Stream[sno].stream_getc(sno); + if (wide) + recb->ch = ch = GLOBAL_Stream[sno].stream_wgetc(sno); + else + recb->ch = ch = GLOBAL_Stream[sno].stream_getc(sno); + if (ch == EOFCHAR) { + s->stream_getc = EOFPeek; + s->stream_wgetc = EOFWPeek; + s->status |= Push_Eof_Stream_f; + return ch; + } + recb->charcount = s->charcount; + recb->linecount = s->linecount; + recb->linepos = s->linepos; s->charcount = ocharcount; s->linecount = olinecount; s->linepos = olinepos; + if (s->recbs) { + r = s->recbs; + while (r->next) { + r = r->next; + } + r->next = recb; + } else { + s->recbs = recb; + } /* buffer the character */ - ungetc(ch, s->file); + GLOBAL_Stream[sno].stream_getc = PopCode; + GLOBAL_Stream[sno].stream_wgetc = PopCode; return ch; } +Int Yap_peek(int sno) { return peekCode(sno, true); } + +static Int dopeek_byte(int sno) { return peekCode(sno, false); } + bool store_code(int ch, Term t USES_REGS) { Term t2 = Deref(t); bool rc = Yap_unify_constant(t2, MkIntegerTerm(ch)); diff --git a/os/fmem.c b/os/fmem.c index 181421123..6265f18c7 100644 --- a/os/fmem.c +++ b/os/fmem.c @@ -108,6 +108,7 @@ bool fill_pads(int sno, int sno0, int total, format_info *fg USES_REGS) GLOBAL_Stream[sno].linecount = 1; GLOBAL_Stream[sno].linepos += nchars; GLOBAL_Stream[sno].charcount = 0; + GLOBAL_Stream[sno].recbs = NULL; fg->phys_start = 0; fg->lstart = GLOBAL_Stream[sno].linepos; fg->gapi = 0; @@ -122,6 +123,7 @@ bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf, st->file = f = fmemopen((char *)buf, nchars, "r"); st->status = Input_Stream_f | Seekable_Stream_f | InMemory_Stream_f; st->vfs = NULL; + st->recbs = NULL; st->encoding = LOCAL_encoding; Yap_DefaultStreamOps(st); st->linecount = 0; @@ -194,7 +196,7 @@ int Yap_open_buf_write_stream(encoding_t enc, memBufSource src) { st->linecount = 1; st->encoding = enc; st->vfs = NULL; - Yap_DefaultStreamOps(st); + st->recbs = NULL; #if HAVE_OPEN_MEMSTREAM st->file = open_memstream(&st->nbuf, &st->nsize); // setbuf(st->file, NULL); @@ -205,6 +207,7 @@ int Yap_open_buf_write_stream(encoding_t enc, memBufSource src) { return -1; } #endif + Yap_DefaultStreamOps(st); UNLOCK(st->streamlock); return sno; } diff --git a/os/fmemopen-android.c b/os/fmemopen-android.c index a4c9fa823..8d698898e 100644 --- a/os/fmemopen-android.c +++ b/os/fmemopen-android.c @@ -189,7 +189,7 @@ fmemseek(void *cookie, fpos_t pos, int whence) return (fpos_t) offset; } -/* Reclaim resources used by stream described by COOKIE. */ +/* Reclaim resources used by stream described by COOKIE. */ static int fmemclose(void *cookie) { diff --git a/os/iopreds.c b/os/iopreds.c index 69bb81696..9de0dae5d 100644 --- a/os/iopreds.c +++ b/os/iopreds.c @@ -124,301 +124,307 @@ FILE *Yap_stdout; FILE *Yap_stderr; static Term gethdir(Term t) { - CACHE_REGS - Atom aref = AtomOfTerm(t); - char *s = RepAtom(aref)->StrOfAE; - size_t nsz; + CACHE_REGS + Atom aref = AtomOfTerm(t); + char *s = RepAtom(aref)->StrOfAE; + size_t nsz; - s = strncpy(LOCAL_FileNameBuf, RepAtom(aref)->StrOfAE, MAXPATHLEN - 1); - if (!s) { - return false; - } - if (TermDot == t) { - return TermEmptyAtom; - } - nsz = strlen(s); - if (!Yap_dir_separator(s[nsz - 1])) { + s = strncpy(LOCAL_FileNameBuf, RepAtom(aref)->StrOfAE, MAXPATHLEN - 1); + if (!s) { + return false; + } + if (TermDot == t) { + return TermEmptyAtom; + } + nsz = strlen(s); + if (!Yap_dir_separator(s[nsz - 1])) { #if _WIN32 - s[nsz] = '\\'; + s[nsz] = '\\'; #else - s[nsz] = '/'; + s[nsz] = '/'; #endif - s[nsz + 1] = '\0'; - } - return MkAtomTerm(Yap_LookupAtom(s)); + s[nsz + 1] = '\0'; + } + return MkAtomTerm(Yap_LookupAtom(s)); } static Term issolutions(Term t) { - if (t == TermFirst || t == TermAll) - return t; + if (t == TermFirst || t == TermAll) + return t; - if (IsVarTerm(t)) { - Yap_Error(INSTANTIATION_ERROR, t, "solutions in {first, all}."); - return TermZERO; - } - if (IsAtomTerm(t)) { - Yap_Error(DOMAIN_ERROR_SOLUTIONS, t, "solutions in {first, all}"); - return TermZERO; - } - Yap_Error(TYPE_ERROR_ATOM, t, "solutions in {first, all}}"); + if (IsVarTerm(t)) { + Yap_Error(INSTANTIATION_ERROR, t, "solutions in {first, all}."); return TermZERO; + } + if (IsAtomTerm(t)) { + Yap_Error(DOMAIN_ERROR_SOLUTIONS, t, "solutions in {first, all}"); + return TermZERO; + } + Yap_Error(TYPE_ERROR_ATOM, t, "solutions in {first, all}}"); + return TermZERO; } static Term is_file_type(Term t) { - if (t == TermTxt || t == TermProlog || t == TermSource || - t == TermExecutable || t == TermQly || t == TermDirectory) - return t; + if (t == TermTxt || t == TermProlog || t == TermSource || + t == TermExecutable || t == TermQly || t == TermDirectory) + return t; - if (IsVarTerm(t)) { - Yap_Error(INSTANTIATION_ERROR, t, - "file_type in {txt,prolog,exe,directory...}"); - return TermZERO; - } - if (IsAtomTerm(t)) { - Yap_Error(DOMAIN_ERROR_FILE_TYPE, t, - "file_type in {txt,prolog,exe,directory...}"); - return TermZERO; - } - Yap_Error(TYPE_ERROR_ATOM, t, "file_type in {txt,prolog,exe,directory...}"); + if (IsVarTerm(t)) { + Yap_Error(INSTANTIATION_ERROR, t, + "file_type in {txt,prolog,exe,directory...}"); return TermZERO; + } + if (IsAtomTerm(t)) { + Yap_Error(DOMAIN_ERROR_FILE_TYPE, t, + "file_type in {txt,prolog,exe,directory...}"); + return TermZERO; + } + Yap_Error(TYPE_ERROR_ATOM, t, "file_type in {txt,prolog,exe,directory...}"); + return TermZERO; } static Term is_file_errors(Term t) { - if (t == TermFail || t == TermError) - return t; + if (t == TermFail || t == TermError) + return t; - if (IsVarTerm(t)) { - Yap_Error(INSTANTIATION_ERROR, t, "file_error in {fail,error}."); - return TermZERO; - } - if (IsAtomTerm(t)) { - Yap_Error(DOMAIN_ERROR_FILE_ERRORS, t, "file_error in {fail,error}."); - return TermZERO; - } - Yap_Error(TYPE_ERROR_ATOM, t, "file_error in {fail,error}."); + if (IsVarTerm(t)) { + Yap_Error(INSTANTIATION_ERROR, t, "file_error in {fail,error}."); return TermZERO; + } + if (IsAtomTerm(t)) { + Yap_Error(DOMAIN_ERROR_FILE_ERRORS, t, "file_error in {fail,error}."); + return TermZERO; + } + Yap_Error(TYPE_ERROR_ATOM, t, "file_error in {fail,error}."); + return TermZERO; } static void unix_upd_stream_info(StreamDesc *s) { - if (s->status & InMemory_Stream_f) { - s->status |= Seekable_Stream_f; - return; - } - Yap_socketStream(s); + if (s->status & InMemory_Stream_f) { + s->status |= Seekable_Stream_f; + return; + } + Yap_socketStream(s); #if _MSC_VER || defined(__MINGW32__) - { - if (_isatty(_fileno(s->file))) { - s->status |= Tty_Stream_f | Reset_Eof_Stream_f | Promptable_Stream_f; - /* make all console descriptors unbuffered */ - setvbuf(s->file, NULL, _IONBF, 0); - return; - } -#if _MSC_VER - /* standard error stream should never be buffered */ - else if (StdErrStream == s - GLOBAL_Stream) { - setvbuf(s->file, NULL, _IONBF, 0); - } -#endif - s->status |= Seekable_Stream_f; + { + if (_isatty(_fileno(s->file))) { + s->status |= Tty_Stream_f | Reset_Eof_Stream_f | Promptable_Stream_f; + /* make all console descriptors unbuffered */ + setvbuf(s->file, NULL, _IONBF, 0); return; } +#if _MSC_VER + /* standard error stream should never be buffered */ + else if (StdErrStream == s - GLOBAL_Stream) { + setvbuf(s->file, NULL, _IONBF, 0); + } +#endif + s->status |= Seekable_Stream_f; + return; + } #else #if HAVE_ISATTY #if __simplescalar__ - /* isatty does not seem to work with simplescar. I'll assume the first - three streams will probably be ttys (pipes are not thatg different) */ - if (s - Stream < 3) { - s->name = AtomTty; - s->status |= Tty_Stream_f | Reset_Eof_Stream_f | Promptable_Stream_f; - } + /* isatty does not seem to work with simplescar. I'll assume the first + three streams will probably be ttys (pipes are not thatg different) */ + if (s - Stream < 3) { + s->name = AtomTty; + s->status |= Tty_Stream_f | Reset_Eof_Stream_f | Promptable_Stream_f; + } #else - { - int filedes; /* visualc */ - if (!s->file) { - s->name = AtomNil; - return; - } - filedes = fileno(s->file); - if (isatty(filedes)) { + { + int filedes; /* visualc */ + if (!s->file) { + s->name = AtomNil; + return; + } + filedes = fileno(s->file); + if (isatty(filedes)) { #if HAVE_TTYNAME - int rc = ttyname_r(filedes, LOCAL_FileNameBuf, YAP_FILENAME_MAX - 1); - if (rc == 0) - s->name = Yap_LookupAtom(LOCAL_FileNameBuf); - else - s->name = AtomTtys; + int rc = ttyname_r(filedes, LOCAL_FileNameBuf, YAP_FILENAME_MAX - 1); + if (rc == 0) + s->name = Yap_LookupAtom(LOCAL_FileNameBuf); + else + s->name = AtomTtys; #else - s->name = AtomTty; + s->name = AtomTty; #endif - s->status |= Tty_Stream_f | Reset_Eof_Stream_f | Promptable_Stream_f; - return; - } + s->status |= Tty_Stream_f | Reset_Eof_Stream_f | Promptable_Stream_f; + return; } + } #endif #endif /* HAVE_ISATTY */ #endif /* _MSC_VER */ - s->status |= Seekable_Stream_f; + s->status |= Seekable_Stream_f; } void Yap_DefaultStreamOps(StreamDesc *st) { - CACHE_REGS - st->stream_wputc = put_wchar; - if (st->encoding == ENC_ISO_UTF8) - st->stream_wgetc = get_wchar_UTF8; - else - st->stream_wgetc = get_wchar; - if (st->vfs) { - st->stream_putc = st->vfs->put_char; - st->stream_wgetc = st->vfs->get_char; - } else { - st->stream_putc = FilePutc; - st->stream_getc = PlGetc; - } - if (st->status & (Promptable_Stream_f)) { - Yap_ConsoleOps(st); - } + CACHE_REGS + st->stream_wputc = put_wchar; + if (st->encoding == ENC_ISO_UTF8) + st->stream_wgetc = get_wchar_UTF8; + else + st->stream_wgetc = get_wchar; + if (st->vfs) { + st->stream_putc = st->vfs->put_char; + st->stream_wgetc = st->vfs->get_char; + } else { + st->stream_putc = FilePutc; + st->stream_getc = PlGetc; + } + if (st->status & Pipe_Stream_f) { + Yap_PipeOps(st); + } else if (st->status & InMemory_Stream_f) { + Yap_MemOps(st); + } else if (st->status & Tty_Stream_f) { + Yap_ConsoleOps(st); + } else { + unix_upd_stream_info(st); + } + if (st->status & (Promptable_Stream_f)) { + Yap_ConsoleOps(st); + } #ifndef _WIN32 - else if (st->file != NULL && 0 && !(st->status & InMemory_Stream_f)) { - st->stream_wgetc = get_wchar_from_file; - } + else if (st->file != NULL && 0 && !(st->status & InMemory_Stream_f)) { + st->stream_wgetc = get_wchar_from_file; + } #endif - if (GLOBAL_CharConversionTable != NULL) - st->stream_wgetc_for_read = ISOWGetc; - else - st->stream_wgetc_for_read = st->stream_wgetc; - if (st->status & Pipe_Stream_f) { - Yap_PipeOps(st); - } else if (st->status & InMemory_Stream_f) { - Yap_MemOps(st); - } else if (st->status & Tty_Stream_f) { - Yap_ConsoleOps(st); - } else { - unix_upd_stream_info(st); - } + if (st->recbs) { + st->stream_getc = PopCode; + st->stream_wgetc = PopCode; + } + if (GLOBAL_CharConversionTable != NULL) + st->stream_wgetc_for_read = ISOWGetc; + else + st->stream_wgetc_for_read = st->stream_wgetc; } static void InitFileIO(StreamDesc *s) { - CACHE_REGS - Yap_DefaultStreamOps(s); + CACHE_REGS + Yap_DefaultStreamOps(s); } static void InitStdStream(int sno, SMALLUNSGN flags, FILE *file, VFS_t *vfsp) { - StreamDesc *s = &GLOBAL_Stream[sno]; - s->file = file; - s->status = flags; - s->linepos = 0; - s->linecount = 1; - s->charcount = 0; - s->vfs = vfsp; - s->encoding = ENC_ISO_UTF8; - INIT_LOCK(s->streamlock); - if (vfsp != NULL) { - s->u.private_data = - vfsp->open(vfsp, sno, vfsp->name, (sno == StdInStream ? "read" : "write")); - if (s->u.private_data == NULL) { - (PlIOError(EXISTENCE_ERROR_SOURCE_SINK, MkIntTerm(sno), "%s", - vfsp->name)); - return; - } - } else { - unix_upd_stream_info(s); + StreamDesc *s = &GLOBAL_Stream[sno]; + s->file = file; + s->status = flags; + s->linepos = 0; + s->linecount = 1; + s->charcount = 0; + s->vfs = vfsp; + s->recbs = NULL; + s->encoding = ENC_ISO_UTF8; + INIT_LOCK(s->streamlock); + if (vfsp != NULL) { + s->u.private_data = vfsp->open(vfsp, sno, vfsp->name, + (sno == StdInStream ? "read" : "write")); + if (s->u.private_data == NULL) { + (PlIOError(EXISTENCE_ERROR_SOURCE_SINK, MkIntTerm(sno), "%s", + vfsp->name)); + return; } - /* Getting streams to prompt is a mess because we need for cooperation - between readers and writers to the stream :-( - */ - InitFileIO(s); - switch (sno) { - case 0: - s->name = AtomUserIn; - break; - case 1: - s->name = AtomUserOut; - break; - default: - s->name = AtomUserErr; - break; - } - s->user_name = MkAtomTerm(s->name); + } else { + unix_upd_stream_info(s); + } + /* Getting streams to prompt is a mess because we need for cooperation + between readers and writers to the stream :-( + */ + InitFileIO(s); + switch (sno) { + case 0: + s->name = AtomUserIn; + break; + case 1: + s->name = AtomUserOut; + break; + default: + s->name = AtomUserErr; + break; + } + s->user_name = MkAtomTerm(s->name); #if LIGHT - s->status |= Tty_Stream_f | Promptable_Stream_f; + s->status |= Tty_Stream_f | Promptable_Stream_f; #endif - Yap_DefaultStreamOps(s); + s->recbs = NULL; + Yap_DefaultStreamOps(s); #if HAVE_SETBUF - if (s->status & Tty_Stream_f && sno == 0) { - /* make sure input is unbuffered if it comes from stdin, this - makes life simpler for interrupt handling */ - setbuf(stdin, NULL); - // fprintf(stderr,"here I am\n"); - } + if (s->status & Tty_Stream_f && sno == 0) { + /* make sure input is unbuffered if it comes from stdin, this + makes life simpler for interrupt handling */ + setbuf(stdin, NULL); + // fprintf(stderr,"here I am\n"); + } #endif /* HAVE_SETBUF */ } void Yap_InitStdStream(int sno, unsigned int flags, FILE *file, VFS_t *vfsp) { - InitStdStream(sno, flags, file, vfsp); + InitStdStream(sno, flags, file, vfsp); } Term Yap_StreamUserName(int sno) { - Term atname; - StreamDesc *s = &GLOBAL_Stream[sno]; - if (s->user_name != 0L) { - return (s->user_name); - } - if ((atname = StreamName(sno))) - return atname; - return TermNil; + Term atname; + StreamDesc *s = &GLOBAL_Stream[sno]; + if (s->user_name != 0L) { + return (s->user_name); + } + if ((atname = StreamName(sno))) + return atname; + return TermNil; } static void InitStdStreams(void) { - CACHE_REGS - if (LOCAL_sockets_io) { - InitStdStream(StdInStream, Input_Stream_f, NULL, NULL); - InitStdStream(StdOutStream, Output_Stream_f, NULL, NULL); - InitStdStream(StdErrStream, Output_Stream_f, NULL, NULL); - } else { - InitStdStream(StdInStream, Input_Stream_f, stdin, NULL); - InitStdStream(StdOutStream, Output_Stream_f, stdout, NULL); - InitStdStream(StdErrStream, Output_Stream_f, stderr, NULL); - } - GLOBAL_Stream[StdInStream].name = Yap_LookupAtom("user_input"); - GLOBAL_Stream[StdOutStream].name = Yap_LookupAtom("user_output"); - GLOBAL_Stream[StdErrStream].name = Yap_LookupAtom("user_error"); + CACHE_REGS + if (LOCAL_sockets_io) { + InitStdStream(StdInStream, Input_Stream_f, NULL, NULL); + InitStdStream(StdOutStream, Output_Stream_f, NULL, NULL); + InitStdStream(StdErrStream, Output_Stream_f, NULL, NULL); + } else { + InitStdStream(StdInStream, Input_Stream_f, stdin, NULL); + InitStdStream(StdOutStream, Output_Stream_f, stdout, NULL); + InitStdStream(StdErrStream, Output_Stream_f, stderr, NULL); + } + GLOBAL_Stream[StdInStream].name = Yap_LookupAtom("user_input"); + GLOBAL_Stream[StdOutStream].name = Yap_LookupAtom("user_output"); + GLOBAL_Stream[StdErrStream].name = Yap_LookupAtom("user_error"); #if USE_READLINE - if (GLOBAL_Stream[StdInStream].status & Tty_Stream_f && - GLOBAL_Stream[StdOutStream].status & Tty_Stream_f && - GLOBAL_Stream[StdErrStream].status & Tty_Stream_f && !Yap_embedded) { - Yap_InitReadline(TermTrue); - } + if (GLOBAL_Stream[StdInStream].status & Tty_Stream_f && + GLOBAL_Stream[StdOutStream].status & Tty_Stream_f && + GLOBAL_Stream[StdErrStream].status & Tty_Stream_f && !Yap_embedded) { + Yap_InitReadline(TermTrue); + } #endif - LOCAL_c_input_stream = StdInStream; - LOCAL_c_output_stream = StdOutStream; - LOCAL_c_error_stream = StdErrStream; + LOCAL_c_input_stream = StdInStream; + LOCAL_c_output_stream = StdOutStream; + LOCAL_c_error_stream = StdErrStream; } void Yap_InitStdStreams(void) { InitStdStreams(); } Int PlIOError__(const char *file, const char *function, int lineno, yap_error_number type, Term culprit, ...) { - if (trueLocalPrologFlag(FILEERRORS_FLAG) || - type == RESOURCE_ERROR_MAX_STREAMS /* do not catch resource errors */) { - va_list args; - const char *format; - char who[1024]; + if (trueLocalPrologFlag(FILEERRORS_FLAG) || + type == RESOURCE_ERROR_MAX_STREAMS /* do not catch resource errors */) { + va_list args; + const char *format; + char who[1024]; - va_start(args, culprit); - format = va_arg(args, char *); - if (format) { - vsnprintf(who, 1023, format, args); - } else { - who[0] = '\0'; - } - va_end(args); - Yap_Error__(file, function, lineno, type, culprit, who); - /* and fail */ - return false; + va_start(args, culprit); + format = va_arg(args, char *); + if (format) { + vsnprintf(who, 1023, format, args); } else { - pop_text_stack(0); - memset(LOCAL_ActiveError, 0, sizeof(*LOCAL_ActiveError)); - return false; + who[0] = '\0'; } + va_end(args); + Yap_Error__(file, function, lineno, type, culprit, who); + /* and fail */ + return false; + } else { + pop_text_stack(0); + memset(LOCAL_ActiveError, 0, sizeof(*LOCAL_ActiveError)); + return false; + } } static int eolflg = 1; @@ -447,247 +453,247 @@ static void InTTYLine(char *line) { #endif void Yap_DebugSetIFile(char *fname) { - if (curfile) - fclose(curfile); - curfile = fopen(fname, "r"); - if (curfile == NULL) { - curfile = stdin; - Yap_Warning("%% YAP open %s for input\n", fname); - } + if (curfile) + fclose(curfile); + curfile = fopen(fname, "r"); + if (curfile == NULL) { + curfile = stdin; + Yap_Warning("%% YAP open %s for input\n", fname); + } } void Yap_DebugEndline() { *lp = 0; } int Yap_DebugGetc() { - int ch; - if (eolflg) { - if (curfile != NULL) { - if (fgets(my_line, 200, curfile) == 0) - curfile = NULL; - } - if (curfile == NULL) - if (fgets(my_line, 200, stdin) == NULL) { - return EOF; - } - eolflg = 0; - lp = my_line; + int ch; + if (eolflg) { + if (curfile != NULL) { + if (fgets(my_line, 200, curfile) == 0) + curfile = NULL; } - if ((ch = *lp++) == 0) - ch = '\n', eolflg = 1; - if (Yap_Option['l' - 96]) - putc(ch, Yap_logfile); - return (ch); + if (curfile == NULL) + if (fgets(my_line, 200, stdin) == NULL) { + return EOF; + } + eolflg = 0; + lp = my_line; + } + if ((ch = *lp++) == 0) + ch = '\n', eolflg = 1; + if (Yap_Option['l' - 96]) + putc(ch, Yap_logfile); + return (ch); } int Yap_DebugPutc(FILE *s, wchar_t ch) { - if (Yap_Option['l' - 96]) - (void) putc(ch, Yap_logfile); - return (putc(ch, s)); + if (Yap_Option['l' - 96]) + (void)putc(ch, Yap_logfile); + return (putc(ch, s)); } int Yap_DebugPuts(FILE *s, const char *sch) { - if (Yap_Option['l' - 96]) - (void) fputs(sch, Yap_logfile); - return fputs(sch, s); + if (Yap_Option['l' - 96]) + (void)fputs(sch, Yap_logfile); + return fputs(sch, s); } void Yap_DebugErrorPuts(const char *s) { Yap_DebugPuts(stderr, s); } void Yap_DebugPlWrite(Term t) { - if (t == 0) - fprintf(stderr, "NULL"); - Yap_plwrite(t, GLOBAL_Stream + 2, 0, 0, GLOBAL_MaxPriority); + if (t == 0) + fprintf(stderr, "NULL"); + Yap_plwrite(t, GLOBAL_Stream + 2, 0, 0, GLOBAL_MaxPriority); } void Yap_DebugPlWriteln(Term t) { - CACHE_REGS - if (t == 0) - fprintf(stderr, "NULL"); - Yap_plwrite(t, NULL, 15, 0, GLOBAL_MaxPriority); - Yap_DebugPutc(GLOBAL_Stream[LOCAL_c_error_stream].file, '.'); - Yap_DebugPutc(GLOBAL_Stream[LOCAL_c_error_stream].file, 10); + CACHE_REGS + if (t == 0) + fprintf(stderr, "NULL"); + Yap_plwrite(t, NULL, 15, 0, GLOBAL_MaxPriority); + Yap_DebugPutc(GLOBAL_Stream[LOCAL_c_error_stream].file, '.'); + Yap_DebugPutc(GLOBAL_Stream[LOCAL_c_error_stream].file, 10); } void Yap_DebugErrorPutc(int c) { - CACHE_REGS - Yap_DebugPutc(GLOBAL_Stream[LOCAL_c_error_stream].file, c); + CACHE_REGS + Yap_DebugPutc(GLOBAL_Stream[LOCAL_c_error_stream].file, c); } void Yap_DebugWriteIndicator(PredEntry *ap) { - CACHE_REGS - Term tmod = ap->ModuleOfPred; - if (!tmod) - tmod = TermProlog; + CACHE_REGS + Term tmod = ap->ModuleOfPred; + if (!tmod) + tmod = TermProlog; #if THREADS - Yap_DebugPlWrite(MkIntegerTerm(worker_id)); - Yap_DebugPutc(stderr, ' '); + Yap_DebugPlWrite(MkIntegerTerm(worker_id)); + Yap_DebugPutc(stderr, ' '); #endif - Yap_DebugPutc(stderr, '>'); - Yap_DebugPutc(stderr, '\t'); - Yap_DebugPlWrite(tmod); - Yap_DebugPutc(stderr, ':'); - if (ap->ModuleOfPred == IDB_MODULE) { - Term t = Deref(ARG1); - if (IsAtomTerm(t)) { - Yap_DebugPlWrite(t); - } else if (IsIntegerTerm(t)) { - Yap_DebugPlWrite(t); - } else { - Functor f = FunctorOfTerm(t); - Atom At = NameOfFunctor(f); - Yap_DebugPlWrite(MkAtomTerm(At)); - Yap_DebugPutc(stderr, '/'); - Yap_DebugPlWrite(MkIntegerTerm(ArityOfFunctor(f))); - } + Yap_DebugPutc(stderr, '>'); + Yap_DebugPutc(stderr, '\t'); + Yap_DebugPlWrite(tmod); + Yap_DebugPutc(stderr, ':'); + if (ap->ModuleOfPred == IDB_MODULE) { + Term t = Deref(ARG1); + if (IsAtomTerm(t)) { + Yap_DebugPlWrite(t); + } else if (IsIntegerTerm(t)) { + Yap_DebugPlWrite(t); } else { - if (ap->ArityOfPE == 0) { - Atom At = (Atom) ap->FunctorOfPred; - Yap_DebugPlWrite(MkAtomTerm(At)); - } else { - Functor f = ap->FunctorOfPred; - Atom At = NameOfFunctor(f); - Yap_DebugPlWrite(MkAtomTerm(At)); - Yap_DebugPutc(stderr, '/'); - Yap_DebugPlWrite(MkIntegerTerm(ArityOfFunctor(f))); - } + Functor f = FunctorOfTerm(t); + Atom At = NameOfFunctor(f); + Yap_DebugPlWrite(MkAtomTerm(At)); + Yap_DebugPutc(stderr, '/'); + Yap_DebugPlWrite(MkIntegerTerm(ArityOfFunctor(f))); } + } else { + if (ap->ArityOfPE == 0) { + Atom At = (Atom)ap->FunctorOfPred; + Yap_DebugPlWrite(MkAtomTerm(At)); + } else { + Functor f = ap->FunctorOfPred; + Atom At = NameOfFunctor(f); + Yap_DebugPlWrite(MkAtomTerm(At)); + Yap_DebugPutc(stderr, '/'); + Yap_DebugPlWrite(MkIntegerTerm(ArityOfFunctor(f))); + } + } - Yap_DebugPutc(stderr, '\n'); + Yap_DebugPutc(stderr, '\n'); } /* static */ int FilePutc(int sno, int ch) { - StreamDesc *s = &GLOBAL_Stream[sno]; + StreamDesc *s = &GLOBAL_Stream[sno]; #if MAC || _MSC_VER - if (ch == 10) { - ch = '\n'; - } + if (ch == 10) { + ch = '\n'; + } #endif - putc(ch, s->file); + putc(ch, s->file); #if MAC || _MSC_VER - if (ch == 10) { - fflush(s->file); - } + if (ch == 10) { + fflush(s->file); + } #endif - count_output_char(ch, s); - return ((int) ch); + count_output_char(ch, s); + return ((int)ch); } static int NullPutc(int sno, int ch) { - StreamDesc *s = &GLOBAL_Stream[sno]; + StreamDesc *s = &GLOBAL_Stream[sno]; #if MAC || _MSC_VER - if (ch == 10) { - ch = '\n'; - } + if (ch == 10) { + ch = '\n'; + } #endif - count_output_char(ch, s); - return ((int) ch); + count_output_char(ch, s); + return ((int)ch); } int ResetEOF(StreamDesc *s) { - if (s->status & Eof_Error_Stream_f) { - Atom name = s->name; - // Yap_CloseStream(s - GLOBAL_Stream); - Yap_Error(PERMISSION_ERROR_INPUT_PAST_END_OF_STREAM, MkAtomTerm(name), - "GetC"); - return FALSE; - } else if (s->status & Reset_Eof_Stream_f) { - s->status &= ~Push_Eof_Stream_f; - /* reset the eof indicator on file */ - if (feof(s->file)) - clearerr(s->file); - /* reset our function for reading input */ - Yap_DefaultStreamOps(s); - /* next, reset our own error indicator */ - s->status &= ~Eof_Stream_f; - /* try reading again */ - return TRUE; - } else { - s->status |= Past_Eof_Stream_f; - return FALSE; - } + if (s->status & Eof_Error_Stream_f) { + Atom name = s->name; + // Yap_CloseStream(s - GLOBAL_Stream); + Yap_Error(PERMISSION_ERROR_INPUT_PAST_END_OF_STREAM, MkAtomTerm(name), + "GetC"); + return FALSE; + } else if (s->status & Reset_Eof_Stream_f) { + s->status &= ~Push_Eof_Stream_f; + /* reset the eof indicator on file */ + if (feof(s->file)) + clearerr(s->file); + /* reset our function for reading input */ + Yap_DefaultStreamOps(s); + /* next, reset our own error indicator */ + s->status &= ~Eof_Stream_f; + /* try reading again */ + return TRUE; + } else { + s->status |= Past_Eof_Stream_f; + return FALSE; + } } /* handle reading from a stream after having found an EOF */ static int EOFWGetc(int sno) { - register StreamDesc *s = &GLOBAL_Stream[sno]; + register StreamDesc *s = &GLOBAL_Stream[sno]; - if (s->status & Push_Eof_Stream_f) { - /* ok, we have pushed an EOF, send it away */ - s->status &= ~Push_Eof_Stream_f; - return EOF; - } - if (ResetEOF(s)) { - Yap_DefaultStreamOps(s); - return (s->stream_wgetc(sno)); - } + if (s->status & Push_Eof_Stream_f) { + /* ok, we have pushed an EOF, send it away */ + s->status &= ~Push_Eof_Stream_f; return EOF; + } + if (ResetEOF(s)) { + Yap_DefaultStreamOps(s); + return (s->stream_wgetc(sno)); + } + return EOF; } static int EOFGetc(int sno) { - register StreamDesc *s = &GLOBAL_Stream[sno]; + register StreamDesc *s = &GLOBAL_Stream[sno]; - if (s->status & Push_Eof_Stream_f) { - /* ok, we have pushed an EOF, send it away */ - s->status &= ~Push_Eof_Stream_f; - ResetEOF(s); - return EOF; - } - if (ResetEOF(s)) { - Yap_DefaultStreamOps(s); - return s->stream_getc(sno); - } + if (s->status & Push_Eof_Stream_f) { + /* ok, we have pushed an EOF, send it away */ + s->status &= ~Push_Eof_Stream_f; + ResetEOF(s); return EOF; + } + if (ResetEOF(s)) { + Yap_DefaultStreamOps(s); + return s->stream_getc(sno); + } + return EOF; } /* check if we read a LOCAL_newline or an EOF */ int console_post_process_eof(StreamDesc *s) { - CACHE_REGS - if (!ResetEOF(s)) { - s->status |= Eof_Stream_f; - s->stream_getc = EOFGetc; - s->stream_wgetc = EOFWGetc; - s->stream_wgetc_for_read = EOFWGetc; - LOCAL_newline = true; - } - return EOFCHAR; + CACHE_REGS + if (!ResetEOF(s)) { + s->status |= Eof_Stream_f; + s->stream_getc = EOFGetc; + s->stream_wgetc = EOFWGetc; + s->stream_wgetc_for_read = EOFWGetc; + LOCAL_newline = true; + } + return EOFCHAR; } /* check if we read a newline or an EOF */ int post_process_read_wchar(int ch, size_t n, StreamDesc *s) { - if (ch == EOF) { - return post_process_weof(s); - } + if (ch == EOF) { + return post_process_weof(s); + } #if DEBUG - if (GLOBAL_Option[1]) { - static int v; - fprintf(stderr, "%d %C\n", v, ch); - v++; - } + if (GLOBAL_Option[1]) { + static int v; + fprintf(stderr, "%d %C\n", v, ch); + v++; + } #endif - s->charcount += n; - s->linepos += n; - if (ch == '\n') { - ++s->linecount; - s->linepos = 0; - /* don't convert if the stream is binary */ - if (!(s->status & Binary_Stream_f)) - ch = 10; - } - return ch; + s->charcount += n; + s->linepos += n; + if (ch == '\n') { + ++s->linecount; + s->linepos = 0; + /* don't convert if the stream is binary */ + if (!(s->status & Binary_Stream_f)) + ch = 10; + } + return ch; } int post_process_weof(StreamDesc *s) { - if (!ResetEOF(s)) { - s->status |= Eof_Stream_f; - s->stream_wgetc = EOFWGetc; - s->stream_getc = EOFGetc; - s->stream_wgetc_for_read = EOFWGetc; - } - return EOFCHAR; + if (!ResetEOF(s)) { + s->status |= Eof_Stream_f; + s->stream_wgetc = EOFWGetc; + s->stream_getc = EOFGetc; + s->stream_wgetc_for_read = EOFWGetc; + } + return EOFCHAR; } -void *Yap_RepStreamFromId(int sno) { return GLOBAL_Stream+(sno); } +void *Yap_RepStreamFromId(int sno) { return GLOBAL_Stream + (sno); } /** * caled after EOF found a peek, it just calls console_post_process to @@ -706,14 +712,14 @@ int EOFWPeek(int sno) { return EOFCHAR; } It could be made more efficient by doing our own buffering and avoiding post_process_read_char, something to think about */ int PlGetc(int sno) { - StreamDesc *s = &GLOBAL_Stream[sno]; - return fgetc(s->file); + StreamDesc *s = &GLOBAL_Stream[sno]; + return fgetc(s->file); } // layered version static inline int get_wchar_from_file(int sno) { - return post_process_read_wchar(fgetwc(GLOBAL_Stream[sno].file), 1, - GLOBAL_Stream + sno); + return post_process_read_wchar(fgetwc(GLOBAL_Stream[sno].file), 1, + GLOBAL_Stream + sno); } #ifndef MB_LEN_MAX @@ -721,200 +727,200 @@ static inline int get_wchar_from_file(int sno) { #endif static int handle_write_encoding_error(int sno, wchar_t ch) { - if (GLOBAL_Stream[sno].status & RepError_Xml_f) { - /* use HTML/XML encoding in ASCII */ - int i = ch, digits = 1; - GLOBAL_Stream[sno].stream_putc(sno, '&'); - GLOBAL_Stream[sno].stream_putc(sno, '#'); - while (digits < i) - digits *= 10; - if (digits > i) - digits /= 10; - while (i) { - GLOBAL_Stream[sno].stream_putc(sno, i / digits); - i %= 10; - digits /= 10; - } - GLOBAL_Stream[sno].stream_putc(sno, ';'); - return ch; - } else if (GLOBAL_Stream[sno].status & RepError_Prolog_f) { - /* write quoted */ - GLOBAL_Stream[sno].stream_putc(sno, '\\'); - GLOBAL_Stream[sno].stream_putc(sno, 'u'); - GLOBAL_Stream[sno].stream_putc(sno, ch >> 24); - GLOBAL_Stream[sno].stream_putc(sno, 256 & (ch >> 16)); - GLOBAL_Stream[sno].stream_putc(sno, 256 & (ch >> 8)); - GLOBAL_Stream[sno].stream_putc(sno, 256 & ch); - return ch; - } else { - CACHE_REGS - Yap_Error(REPRESENTATION_ERROR_CHARACTER, MkIntegerTerm(ch), - "charater %ld cannot be encoded in stream %d", - (unsigned long int) ch, sno); - return -1; + if (GLOBAL_Stream[sno].status & RepError_Xml_f) { + /* use HTML/XML encoding in ASCII */ + int i = ch, digits = 1; + GLOBAL_Stream[sno].stream_putc(sno, '&'); + GLOBAL_Stream[sno].stream_putc(sno, '#'); + while (digits < i) + digits *= 10; + if (digits > i) + digits /= 10; + while (i) { + GLOBAL_Stream[sno].stream_putc(sno, i / digits); + i %= 10; + digits /= 10; } + GLOBAL_Stream[sno].stream_putc(sno, ';'); + return ch; + } else if (GLOBAL_Stream[sno].status & RepError_Prolog_f) { + /* write quoted */ + GLOBAL_Stream[sno].stream_putc(sno, '\\'); + GLOBAL_Stream[sno].stream_putc(sno, 'u'); + GLOBAL_Stream[sno].stream_putc(sno, ch >> 24); + GLOBAL_Stream[sno].stream_putc(sno, 256 & (ch >> 16)); + GLOBAL_Stream[sno].stream_putc(sno, 256 & (ch >> 8)); + GLOBAL_Stream[sno].stream_putc(sno, 256 & ch); + return ch; + } else { + CACHE_REGS + Yap_Error(REPRESENTATION_ERROR_CHARACTER, MkIntegerTerm(ch), + "charater %ld cannot be encoded in stream %d", + (unsigned long int)ch, sno); + return -1; + } } int put_wchar(int sno, wchar_t ch) { - /* pass the bucck if we can */ - switch (GLOBAL_Stream[sno].encoding) { - case ENC_OCTET: - return GLOBAL_Stream[sno].stream_putc(sno, ch); - case ENC_ISO_LATIN1: - if (ch >= 0xff) { - return handle_write_encoding_error(sno, ch); - } - return GLOBAL_Stream[sno].stream_putc(sno, ch); - case ENC_ISO_ASCII: - if (ch >= 0x80) { - return handle_write_encoding_error(sno, ch); - } - return GLOBAL_Stream[sno].stream_putc(sno, ch); - case ENC_ISO_ANSI: { - char buf[MB_LEN_MAX]; - mbstate_t mbstate; - int n; - - memset((void *) &mbstate, 0, sizeof(mbstate_t)); - if ((n = wcrtomb(buf, ch, &mbstate)) < 0) { - /* error */ - GLOBAL_Stream[sno].stream_putc(sno, ch); - return -1; - } else { - int i; - - for (i = 0; i < n; i++) { - GLOBAL_Stream[sno].stream_putc(sno, buf[i]); - } - return ch; - } - case ENC_ISO_UTF8: - if (ch < 0x80) { - GLOBAL_Stream[sno].stream_putc(sno, ch); - } else if (ch < 0x800) { - GLOBAL_Stream[sno].stream_putc(sno, 0xC0 | ch >> 6); - GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch & 0x3F)); - } else if (ch < 0x10000) { - GLOBAL_Stream[sno].stream_putc(sno, 0xE0 | ch >> 12); - GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch >> 6 & 0x3F)); - GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch & 0x3F)); - } else if (ch < 0x200000) { - GLOBAL_Stream[sno].stream_putc(sno, 0xF0 | ch >> 18); - GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch >> 12 & 0x3F)); - GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch >> 6 & 0x3F)); - GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch & 0x3F)); - } else { - /* should never happen */ - return -1; - } - return ch; - break; - case ENC_UTF16_LE: { - if (ch < 0x10000) { - GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); - } else { - // computations - uint16_t ich = ch; - uint16_t lead = LEAD_OFFSET + (ich >> 10); - uint16_t trail = 0xDC00 + (ich & 0x3FF); - - GLOBAL_Stream[sno].stream_putc(sno, (trail & 0xff)); - GLOBAL_Stream[sno].stream_putc(sno, (trail >> 8)); - GLOBAL_Stream[sno].stream_putc(sno, (lead & 0xff)); - GLOBAL_Stream[sno].stream_putc(sno, (lead >> 8)); - } - return ch; - } - case ENC_UTF16_BE: { - // computations - if (ch < 0x10000) { - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); - GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); - } else { - uint16_t lead = (uint16_t) LEAD_OFFSET + ((uint16_t) ch >> 10); - uint16_t trail = 0xDC00 + ((uint16_t) ch & 0x3FF); - - GLOBAL_Stream[sno].stream_putc(sno, (lead >> 8)); - GLOBAL_Stream[sno].stream_putc(sno, (lead & 0xff)); - GLOBAL_Stream[sno].stream_putc(sno, (trail >> 8)); - GLOBAL_Stream[sno].stream_putc(sno, (trail & 0xff)); - } - return ch; - } - case ENC_UCS2_LE: { - if (ch >= 0x10000) { - return 0; - } - GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); - return ch; - } - case ENC_UCS2_BE: { - // computations - if (ch < 0x10000) { - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); - GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); - return ch; - } else { - return 0; - } - } - - case ENC_ISO_UTF32_BE: - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 24) & 0xff); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 16) & 0xff); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8) & 0xff); - GLOBAL_Stream[sno].stream_putc(sno, ch & 0xff); - return ch; - case ENC_ISO_UTF32_LE: - GLOBAL_Stream[sno].stream_putc(sno, ch & 0xff); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8) & 0xff); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 16) & 0xff); - GLOBAL_Stream[sno].stream_putc(sno, (ch >> 24) & 0xff); - return ch; - } + /* pass the bucck if we can */ + switch (GLOBAL_Stream[sno].encoding) { + case ENC_OCTET: + return GLOBAL_Stream[sno].stream_putc(sno, ch); + case ENC_ISO_LATIN1: + if (ch >= 0xff) { + return handle_write_encoding_error(sno, ch); } - return -1; + return GLOBAL_Stream[sno].stream_putc(sno, ch); + case ENC_ISO_ASCII: + if (ch >= 0x80) { + return handle_write_encoding_error(sno, ch); + } + return GLOBAL_Stream[sno].stream_putc(sno, ch); + case ENC_ISO_ANSI: { + char buf[MB_LEN_MAX]; + mbstate_t mbstate; + int n; + + memset((void *)&mbstate, 0, sizeof(mbstate_t)); + if ((n = wcrtomb(buf, ch, &mbstate)) < 0) { + /* error */ + GLOBAL_Stream[sno].stream_putc(sno, ch); + return -1; + } else { + int i; + + for (i = 0; i < n; i++) { + GLOBAL_Stream[sno].stream_putc(sno, buf[i]); + } + return ch; + } + case ENC_ISO_UTF8: + if (ch < 0x80) { + GLOBAL_Stream[sno].stream_putc(sno, ch); + } else if (ch < 0x800) { + GLOBAL_Stream[sno].stream_putc(sno, 0xC0 | ch >> 6); + GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch & 0x3F)); + } else if (ch < 0x10000) { + GLOBAL_Stream[sno].stream_putc(sno, 0xE0 | ch >> 12); + GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch >> 6 & 0x3F)); + GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch & 0x3F)); + } else if (ch < 0x200000) { + GLOBAL_Stream[sno].stream_putc(sno, 0xF0 | ch >> 18); + GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch >> 12 & 0x3F)); + GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch >> 6 & 0x3F)); + GLOBAL_Stream[sno].stream_putc(sno, 0x80 | (ch & 0x3F)); + } else { + /* should never happen */ + return -1; + } + return ch; + break; + case ENC_UTF16_LE: { + if (ch < 0x10000) { + GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); + } else { + // computations + uint16_t ich = ch; + uint16_t lead = LEAD_OFFSET + (ich >> 10); + uint16_t trail = 0xDC00 + (ich & 0x3FF); + + GLOBAL_Stream[sno].stream_putc(sno, (trail & 0xff)); + GLOBAL_Stream[sno].stream_putc(sno, (trail >> 8)); + GLOBAL_Stream[sno].stream_putc(sno, (lead & 0xff)); + GLOBAL_Stream[sno].stream_putc(sno, (lead >> 8)); + } + return ch; + } + case ENC_UTF16_BE: { + // computations + if (ch < 0x10000) { + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); + GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); + } else { + uint16_t lead = (uint16_t)LEAD_OFFSET + ((uint16_t)ch >> 10); + uint16_t trail = 0xDC00 + ((uint16_t)ch & 0x3FF); + + GLOBAL_Stream[sno].stream_putc(sno, (lead >> 8)); + GLOBAL_Stream[sno].stream_putc(sno, (lead & 0xff)); + GLOBAL_Stream[sno].stream_putc(sno, (trail >> 8)); + GLOBAL_Stream[sno].stream_putc(sno, (trail & 0xff)); + } + return ch; + } + case ENC_UCS2_LE: { + if (ch >= 0x10000) { + return 0; + } + GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); + return ch; + } + case ENC_UCS2_BE: { + // computations + if (ch < 0x10000) { + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8)); + GLOBAL_Stream[sno].stream_putc(sno, (ch & 0xff)); + return ch; + } else { + return 0; + } + } + + case ENC_ISO_UTF32_BE: + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 24) & 0xff); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 16) & 0xff); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8) & 0xff); + GLOBAL_Stream[sno].stream_putc(sno, ch & 0xff); + return ch; + case ENC_ISO_UTF32_LE: + GLOBAL_Stream[sno].stream_putc(sno, ch & 0xff); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 8) & 0xff); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 16) & 0xff); + GLOBAL_Stream[sno].stream_putc(sno, (ch >> 24) & 0xff); + return ch; + } + } + return -1; } /* used by user-code to read characters from the current input stream */ int Yap_PlGetchar(void) { - CACHE_REGS - return ( - GLOBAL_Stream[LOCAL_c_input_stream].stream_getc(LOCAL_c_input_stream)); + CACHE_REGS + return ( + GLOBAL_Stream[LOCAL_c_input_stream].stream_getc(LOCAL_c_input_stream)); } int Yap_PlGetWchar(void) { - CACHE_REGS - return get_wchar(LOCAL_c_input_stream); + CACHE_REGS + return get_wchar(LOCAL_c_input_stream); } /* avoid using a variable to call a function */ int Yap_PlFGetchar(void) { - CACHE_REGS - return (PlGetc(LOCAL_c_input_stream)); + CACHE_REGS + return (PlGetc(LOCAL_c_input_stream)); } Term Yap_MkStream(int n) { - Term t[1]; - t[0] = MkIntTerm(n); - return (Yap_MkApplTerm(FunctorStream, 1, t)); + Term t[1]; + t[0] = MkIntTerm(n); + return (Yap_MkApplTerm(FunctorStream, 1, t)); } /* given a stream index, get the corresponding fd */ Int GetStreamFd(int sno) { #if HAVE_SOCKET - if (GLOBAL_Stream[sno].status & Socket_Stream_f) { - return (GLOBAL_Stream[sno].u.socket.fd); - } else + if (GLOBAL_Stream[sno].status & Socket_Stream_f) { + return (GLOBAL_Stream[sno].u.socket.fd); + } else #endif - if (GLOBAL_Stream[sno].status & Pipe_Stream_f) { - return (GLOBAL_Stream[sno].u.pipe.fd); - } else if (GLOBAL_Stream[sno].status & InMemory_Stream_f) { - return (-1); - } - return (fileno(GLOBAL_Stream[sno].file)); + if (GLOBAL_Stream[sno].status & Pipe_Stream_f) { + return (GLOBAL_Stream[sno].u.pipe.fd); + } else if (GLOBAL_Stream[sno].status & InMemory_Stream_f) { + return (-1); + } + return (fileno(GLOBAL_Stream[sno].file)); } Int Yap_GetStreamFd(int sno) { return GetStreamFd(sno); } @@ -922,230 +928,234 @@ Int Yap_GetStreamFd(int sno) { return GetStreamFd(sno); } static int binary_file(const char *file_name) { #if HAVE_STAT #if _MSC_VER || defined(__MINGW32__) - struct _stat ss; - if (_stat(file_name, &ss) != 0) + struct _stat ss; + if (_stat(file_name, &ss) != 0) #else - struct stat ss; - if (stat(file_name, &ss) != 0) + struct stat ss; + if (stat(file_name, &ss) != 0) #endif - { - /* ignore errors while checking a file */ - return false; - } - return (S_ISDIR(ss.st_mode)); + { + /* ignore errors while checking a file */ + return false; + } + return (S_ISDIR(ss.st_mode)); #else - return (FALSE); + return (FALSE); #endif } static int write_bom(int sno, StreamDesc *st) { - /* dump encoding */ - switch (st->encoding) { - case ENC_ISO_UTF8: - if (st->stream_putc(sno, 0xEF) < 0) - return false; - if (st->stream_putc(sno, 0xBB) < 0) - return false; - if (st->stream_putc(sno, 0xBF) < 0) - return false; - st->status |= HAS_BOM_f; - return true; - case ENC_UTF16_BE: - case ENC_UCS2_BE: - if (st->stream_putc(sno, 0xFE) < 0) - return false; - if (st->stream_putc(sno, 0xFF) < 0) - return false; - st->status |= HAS_BOM_f; - return true; - case ENC_UTF16_LE: - case ENC_UCS2_LE: - if (st->stream_putc(sno, 0xFF) < 0) - return false; - if (st->stream_putc(sno, 0xFE) < 0) - return false; - st->status |= HAS_BOM_f; - return true; - case ENC_ISO_UTF32_BE: - if (st->stream_putc(sno, 0x00) < 0) - return false; - if (st->stream_putc(sno, 0x00) < 0) - return false; - if (st->stream_putc(sno, 0xFE) < 0) - return false; - if (st->stream_putc(sno, 0xFF) < 0) - return false; - st->status |= HAS_BOM_f; - return true; - case ENC_ISO_UTF32_LE: - if (st->stream_putc(sno, 0xFF) < 0) - return false; - if (st->stream_putc(sno, 0xFE) < 0) - return false; - if (st->stream_putc(sno, 0x00) < 0) - return false; - if (st->stream_putc(sno, 0x00) < 0) - return false; - st->status |= HAS_BOM_f; - return true; - default: - return true; - } + /* dump encoding */ + switch (st->encoding) { + case ENC_ISO_UTF8: + if (st->stream_putc(sno, 0xEF) < 0) + return false; + if (st->stream_putc(sno, 0xBB) < 0) + return false; + if (st->stream_putc(sno, 0xBF) < 0) + return false; + st->status |= HAS_BOM_f; + return true; + case ENC_UTF16_BE: + case ENC_UCS2_BE: + if (st->stream_putc(sno, 0xFE) < 0) + return false; + if (st->stream_putc(sno, 0xFF) < 0) + return false; + st->status |= HAS_BOM_f; + return true; + case ENC_UTF16_LE: + case ENC_UCS2_LE: + if (st->stream_putc(sno, 0xFF) < 0) + return false; + if (st->stream_putc(sno, 0xFE) < 0) + return false; + st->status |= HAS_BOM_f; + return true; + case ENC_ISO_UTF32_BE: + if (st->stream_putc(sno, 0x00) < 0) + return false; + if (st->stream_putc(sno, 0x00) < 0) + return false; + if (st->stream_putc(sno, 0xFE) < 0) + return false; + if (st->stream_putc(sno, 0xFF) < 0) + return false; + st->status |= HAS_BOM_f; + return true; + case ENC_ISO_UTF32_LE: + if (st->stream_putc(sno, 0xFF) < 0) + return false; + if (st->stream_putc(sno, 0xFE) < 0) + return false; + if (st->stream_putc(sno, 0x00) < 0) + return false; + if (st->stream_putc(sno, 0x00) < 0) + return false; + st->status |= HAS_BOM_f; + return true; + default: + return true; + } } static void check_bom(int sno, StreamDesc *st) { - int ch1, ch2, ch3, ch4; -if (st-> file == NULL) { - PlIOError(SYSTEM_ERROR_INTERNAL, Yap_MkStream(sno), "YAP does not support BOM n %x type of files", st->status); return; -} - ch1 = st->stream_getc(sno); - switch (ch1) { - case 0x00: { - ch2 = st->stream_getc(sno); - if (ch2 != 0x00) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - return; - } else { - ch3 = st->stream_getc(sno); - if (ch3 == EOFCHAR || ch3 != 0xFE) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - ungetc(ch3, st->file); - return; - } else { - ch4 = st->stream_getc(sno); - if (ch4 == EOFCHAR || ch3 != 0xFF) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - ungetc(ch3, st->file); - ungetc(ch4, st->file); - return; - } else { - st->status |= HAS_BOM_f; - st->encoding = ENC_ISO_UTF32_BE; - return; - } - } - } + int ch1, ch2, ch3, ch4; + if (st->file == NULL) { + PlIOError(SYSTEM_ERROR_INTERNAL, Yap_MkStream(sno), + "YAP does not support BOM n %x type of files", st->status); + return; + } + ch1 = st->stream_getc(sno); + switch (ch1) { + case 0x00: { + ch2 = st->stream_getc(sno); + if (ch2 != 0x00) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + return; + } else { + ch3 = st->stream_getc(sno); + if (ch3 == EOFCHAR || ch3 != 0xFE) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + ungetc(ch3, st->file); + return; + } else { + ch4 = st->stream_getc(sno); + if (ch4 == EOFCHAR || ch3 != 0xFF) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + ungetc(ch3, st->file); + ungetc(ch4, st->file); + return; + } else { + st->status |= HAS_BOM_f; + st->encoding = ENC_ISO_UTF32_BE; + return; } - case 0xFE: { - ch2 = fgetc(st->file); - if (ch2 != 0xFF) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - return; - } else { - st->status |= HAS_BOM_f; - st->encoding = ENC_UTF16_BE; - return; - } - } - case 0xFF: { - ch2 = fgetc(st->file); - if (ch2 != 0xFE) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - return; - } else { - ch3 = fgetc(st->file); - if (ch3 != 0x00) { - ungetc(ch3, st->file); - } else { - ch4 = fgetc(st->file); - if (ch4 == 0x00) { - st->status |= HAS_BOM_f; - st->encoding = ENC_ISO_UTF32_LE; - return; - } else { - ungetc(ch4, st->file); - ungetc(0x00, st->file); - } - } - } - st->status |= HAS_BOM_f; - st->encoding = ENC_UTF16_LE; - return; - } - case 0xEF: - ch2 = fgetc(st->file); - if (ch2 != 0xBB) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - return; - } else { - ch3 = fgetc(st->file); - if (ch3 != 0xBF) { - ungetc(ch1, st->file); - ungetc(ch2, st->file); - ungetc(ch3, st->file); - return; - } else { - st->status |= HAS_BOM_f; - st->encoding = ENC_ISO_UTF8; - return; - } - } - default: - ungetc(ch1, st->file); + } } + } + case 0xFE: { + ch2 = fgetc(st->file); + if (ch2 != 0xFF) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + return; + } else { + st->status |= HAS_BOM_f; + st->encoding = ENC_UTF16_BE; + return; + } + } + case 0xFF: { + ch2 = fgetc(st->file); + if (ch2 != 0xFE) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + return; + } else { + ch3 = fgetc(st->file); + if (ch3 != 0x00) { + ungetc(ch3, st->file); + } else { + ch4 = fgetc(st->file); + if (ch4 == 0x00) { + st->status |= HAS_BOM_f; + st->encoding = ENC_ISO_UTF32_LE; + return; + } else { + ungetc(ch4, st->file); + ungetc(0x00, st->file); + } + } + } + st->status |= HAS_BOM_f; + st->encoding = ENC_UTF16_LE; + return; + } + case 0xEF: + ch2 = fgetc(st->file); + if (ch2 != 0xBB) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + return; + } else { + ch3 = fgetc(st->file); + if (ch3 != 0xBF) { + ungetc(ch1, st->file); + ungetc(ch2, st->file); + ungetc(ch3, st->file); + return; + } else { + st->status |= HAS_BOM_f; + st->encoding = ENC_ISO_UTF8; + return; + } + } + default: + ungetc(ch1, st->file); + } } - bool Yap_initStream(int sno, FILE *fd, const char *name, Term file_name, +bool Yap_initStream(int sno, FILE *fd, const char *name, Term file_name, encoding_t encoding, stream_flags_t flags, Atom open_mode, void *vfs) { - StreamDesc *st = &GLOBAL_Stream[sno]; - st->status = flags; + StreamDesc *st = &GLOBAL_Stream[sno]; + st->status = flags; - st->vfs = vfs; - st->charcount = 0; - st->linecount = 1; - if (flags & Binary_Stream_f) { - st->encoding = ENC_OCTET; - } else { - st->encoding = encoding; - } + st->vfs = vfs; + st->recbs = NULL; + st->charcount = 0; + st->linecount = 1; + if (flags & Binary_Stream_f) { + st->encoding = ENC_OCTET; + } else { + st->encoding = encoding; + } - if (name == NULL) { - char buf[YAP_FILENAME_MAX + 1]; - memset(buf, 0, YAP_FILENAME_MAX + 1); - name = Yap_guessFileName(fd, sno, buf, YAP_FILENAME_MAX); - if (name) - st->name = Yap_LookupAtom(name); - } - st->user_name = file_name; - st->file = fd; - st->linepos = 0; - Yap_DefaultStreamOps(st); - return true; + if (name == NULL) { + char buf[YAP_FILENAME_MAX + 1]; + memset(buf, 0, YAP_FILENAME_MAX + 1); + name = Yap_guessFileName(fd, sno, buf, YAP_FILENAME_MAX); + if (name) + st->name = Yap_LookupAtom(name); + } + st->user_name = file_name; + st->file = fd; + st->linepos = 0; + Yap_DefaultStreamOps(st); + return true; } static bool open_header(int sno, Atom open_mode) { - if (open_mode == AtomWrite) { - const char *ptr; - const char s[] = "#!"; - int ch; + if (open_mode == AtomWrite) { + const char *ptr; + const char s[] = "#!"; + int ch; - ptr = s; - while ((ch = *ptr++)) - GLOBAL_Stream[sno].stream_wputc(sno, ch); - const char *b = Yap_FindExecutable(); - ptr = b; - while ((ch = *ptr++)) - GLOBAL_Stream[sno].stream_wputc(sno, ch); - const char *l = " -L --\n\n YAP script\n#\n# .\n"; - ptr = l; - while ((ch = *ptr++)) - GLOBAL_Stream[sno].stream_wputc(sno, ch); + ptr = s; + while ((ch = *ptr++)) + GLOBAL_Stream[sno].stream_wputc(sno, ch); + const char *b = Yap_FindExecutable(); + ptr = b; + while ((ch = *ptr++)) + GLOBAL_Stream[sno].stream_wputc(sno, ch); + const char *l = " -L --\n\n YAP script\n#\n# .\n"; + ptr = l; + while ((ch = *ptr++)) + GLOBAL_Stream[sno].stream_wputc(sno, ch); - } else if (open_mode == AtomRead) { - // skip header - int ch; - while ((ch = Yap_peek(sno)) == '#') { - while ((ch = GLOBAL_Stream[sno].stream_wgetc(sno)) != 10 && ch != -1); - } + } else if (open_mode == AtomRead) { + // skip header + int ch; + while ((ch = Yap_peek(sno)) == '#') { + while ((ch = GLOBAL_Stream[sno].stream_wgetc(sno)) != 10 && ch != -1) + ; } - return true; + } + return true; } #define OPEN_DEFS() \ @@ -1165,9 +1175,7 @@ static bool open_header(int sno, Atom open_mode) { PAR("wait", booleanFlag, OPEN_WAIT), PAR(NULL, ok, OPEN_END) #define PAR(x, y, z) z -typedef enum open_enum_choices { - OPEN_DEFS() -} open_choices_t; +typedef enum open_enum_choices { OPEN_DEFS() } open_choices_t; #undef PAR @@ -1180,173 +1188,173 @@ static const param_t open_defs[] = {OPEN_DEFS()}; static Int do_open(Term file_name, Term t2, Term tlist USES_REGS) { /* '$open'(+File,+Mode,?Stream,-ReturnCode) */ - Atom open_mode; - int sno; - StreamDesc *st; - bool avoid_bom = false, needs_bom = false; - const char *fname; - char fbuf[FILENAME_MAX]; - stream_flags_t flags; - const char *s_encoding; - encoding_t encoding; - Term tenc; - char io_mode[8]; - // original file name - if (IsVarTerm(file_name)) { - Yap_Error(INSTANTIATION_ERROR, file_name, "open/3"); - return FALSE; - } - if (!IsAtomTerm(file_name)) { - if (IsStringTerm(file_name)) { - fname = (char *) StringOfTerm(file_name); - } else { - Yap_Error(DOMAIN_ERROR_SOURCE_SINK, file_name, "open/3"); - return FALSE; - } + Atom open_mode; + int sno; + StreamDesc *st; + bool avoid_bom = false, needs_bom = false; + const char *fname; + char fbuf[FILENAME_MAX]; + stream_flags_t flags; + const char *s_encoding; + encoding_t encoding; + Term tenc; + char io_mode[8]; + // original file name + if (IsVarTerm(file_name)) { + Yap_Error(INSTANTIATION_ERROR, file_name, "open/3"); + return FALSE; + } + if (!IsAtomTerm(file_name)) { + if (IsStringTerm(file_name)) { + fname = (char *)StringOfTerm(file_name); } else { - fname = RepAtom(AtomOfTerm(file_name))->StrOfAE; + Yap_Error(DOMAIN_ERROR_SOURCE_SINK, file_name, "open/3"); + return FALSE; } - // open mode - if (IsVarTerm(t2)) { - Yap_Error(INSTANTIATION_ERROR, t2, "open/3"); - return FALSE; - } - if (!IsAtomTerm(t2)) { - if (IsStringTerm(t2)) { - open_mode = Yap_LookupAtom(StringOfTerm(t2)); - } else { - Yap_Error(TYPE_ERROR_ATOM, t2, "open/3"); - return (FALSE); - } + } else { + fname = RepAtom(AtomOfTerm(file_name))->StrOfAE; + } + // open mode + if (IsVarTerm(t2)) { + Yap_Error(INSTANTIATION_ERROR, t2, "open/3"); + return FALSE; + } + if (!IsAtomTerm(t2)) { + if (IsStringTerm(t2)) { + open_mode = Yap_LookupAtom(StringOfTerm(t2)); } else { - open_mode = AtomOfTerm(t2); + Yap_Error(TYPE_ERROR_ATOM, t2, "open/3"); + return (FALSE); } - /* get options */ - xarg *args = Yap_ArgListToVector(tlist, open_defs, OPEN_END); - if (args == NULL) { - if (LOCAL_Error_TYPE != YAP_NO_ERROR) { - if (LOCAL_Error_TYPE == DOMAIN_ERROR_PROLOG_FLAG) - LOCAL_Error_TYPE = DOMAIN_ERROR_OPEN_OPTION; - Yap_Error(LOCAL_Error_TYPE, tlist, "option handling in open/3"); - } - return false; + } else { + open_mode = AtomOfTerm(t2); + } + /* get options */ + xarg *args = Yap_ArgListToVector(tlist, open_defs, OPEN_END); + if (args == NULL) { + if (LOCAL_Error_TYPE != YAP_NO_ERROR) { + if (LOCAL_Error_TYPE == DOMAIN_ERROR_PROLOG_FLAG) + LOCAL_Error_TYPE = DOMAIN_ERROR_OPEN_OPTION; + Yap_Error(LOCAL_Error_TYPE, tlist, "option handling in open/3"); } - /* done */ - flags = 0; - if (args[OPEN_ENCODING].used) { - tenc = args[OPEN_ENCODING].tvalue; - s_encoding = RepAtom(AtomOfTerm(tenc))->StrOfAE; - } else { - s_encoding = "default"; - } - // default encoding, no bom yet - encoding = enc_id(s_encoding, ENC_OCTET); - // only set encoding after getting BOM - bool ok = (args[OPEN_EXPAND_FILENAME].used - ? args[OPEN_EXPAND_FILENAME].tvalue == TermTrue - : false) || - trueGlobalPrologFlag(OPEN_EXPANDS_FILENAME_FLAG); - // expand file name? - fname = Yap_AbsoluteFile(fname, fbuf, ok); + return false; + } + /* done */ + flags = 0; + if (args[OPEN_ENCODING].used) { + tenc = args[OPEN_ENCODING].tvalue; + s_encoding = RepAtom(AtomOfTerm(tenc))->StrOfAE; + } else { + s_encoding = "default"; + } + // default encoding, no bom yet + encoding = enc_id(s_encoding, ENC_OCTET); + // only set encoding after getting BOM + bool ok = (args[OPEN_EXPAND_FILENAME].used + ? args[OPEN_EXPAND_FILENAME].tvalue == TermTrue + : false) || + trueGlobalPrologFlag(OPEN_EXPANDS_FILENAME_FLAG); + // expand file name? + fname = Yap_AbsoluteFile(fname, fbuf, ok); - if (!fname) { - PlIOError(EXISTENCE_ERROR_SOURCE_SINK, ARG1, NULL); - } + if (!fname) { + PlIOError(EXISTENCE_ERROR_SOURCE_SINK, ARG1, NULL); + } - // Skip scripts that start with !#/.. or similar - bool script = - (args[OPEN_SCRIPT].used ? args[OPEN_SCRIPT].tvalue == TermTrue : false); - // binary type - if (args[OPEN_TYPE].used) { - Term t = args[OPEN_TYPE].tvalue; - bool bin = (t == TermBinary); - if (bin) { + // Skip scripts that start with !#/.. or similar + bool script = + (args[OPEN_SCRIPT].used ? args[OPEN_SCRIPT].tvalue == TermTrue : false); + // binary type + if (args[OPEN_TYPE].used) { + Term t = args[OPEN_TYPE].tvalue; + bool bin = (t == TermBinary); + if (bin) { #ifdef _WIN32 - strncat(io_mode, "b", 8); + strncat(io_mode, "b", 8); #endif - flags |= Binary_Stream_f; - encoding = ENC_OCTET; - avoid_bom = true; - needs_bom = false; - } else if (t == TermText) { + flags |= Binary_Stream_f; + encoding = ENC_OCTET; + avoid_bom = true; + needs_bom = false; + } else if (t == TermText) { #ifdef _WIN32 - strncat(io_mode, "t", 8); + strncat(io_mode, "t", 8); #endif - /* note that this matters for UNICODE style conversions */ - } else { - Yap_Error(DOMAIN_ERROR_STREAM, tlist, - "type is ~a, must be one of binary or text", t); - } - } - // BOM mess - if (encoding == ENC_UTF16_BE || encoding == ENC_UTF16_LE || - encoding == ENC_UCS2_BE || encoding == ENC_UCS2_LE || - encoding == ENC_ISO_UTF32_BE || encoding == ENC_ISO_UTF32_LE) { - needs_bom = true; - } - if (args[OPEN_BOM].used) { - if (args[OPEN_BOM].tvalue == TermTrue) { - avoid_bom = false; - needs_bom = true; - } else if (args[OPEN_BOM].tvalue == TermFalse) { - avoid_bom = true; - needs_bom = false; - } - } - if (open_mode == AtomRead) { - strncpy(io_mode, "r", 8); - } else if (open_mode == AtomWrite) { - strncpy(io_mode, "w", 8); - } else if (open_mode == AtomAppend) { - strncpy(io_mode, "a", 8); + /* note that this matters for UNICODE style conversions */ } else { - Yap_Error(DOMAIN_ERROR_IO_MODE, MkAtomTerm(open_mode), "open/3"); - return false; + Yap_Error(DOMAIN_ERROR_STREAM, tlist, + "type is ~a, must be one of binary or text", t); } - if ((sno = Yap_OpenStream(fname, io_mode, file_name)) < 0) { -return false; + } + // BOM mess + if (encoding == ENC_UTF16_BE || encoding == ENC_UTF16_LE || + encoding == ENC_UCS2_BE || encoding == ENC_UCS2_LE || + encoding == ENC_ISO_UTF32_BE || encoding == ENC_ISO_UTF32_LE) { + needs_bom = true; + } + if (args[OPEN_BOM].used) { + if (args[OPEN_BOM].tvalue == TermTrue) { + avoid_bom = false; + needs_bom = true; + } else if (args[OPEN_BOM].tvalue == TermFalse) { + avoid_bom = true; + needs_bom = false; } - st = &GLOBAL_Stream[sno]; - st->user_name = file_name; - // user requested encoding? - if (args[OPEN_ALIAS].used) { - Atom al = AtomOfTerm(args[OPEN_ALIAS].tvalue); - if (!Yap_AddAlias(al, sno)) { - free(args); - return false; - } - } - st->name = Yap_LookupAtom(fname); - if (st - GLOBAL_Stream < 3) { - flags |= RepError_Prolog_f; + } + if (open_mode == AtomRead) { + strncpy(io_mode, "r", 8); + } else if (open_mode == AtomWrite) { + strncpy(io_mode, "w", 8); + } else if (open_mode == AtomAppend) { + strncpy(io_mode, "a", 8); + } else { + Yap_Error(DOMAIN_ERROR_IO_MODE, MkAtomTerm(open_mode), "open/3"); + return false; + } + if ((sno = Yap_OpenStream(fname, io_mode, file_name)) < 0) { + return false; + } + st = &GLOBAL_Stream[sno]; + st->user_name = file_name; + // user requested encoding? + if (args[OPEN_ALIAS].used) { + Atom al = AtomOfTerm(args[OPEN_ALIAS].tvalue); + if (!Yap_AddAlias(al, sno)) { + free(args); + return false; } + } + st->name = Yap_LookupAtom(fname); + if (st - GLOBAL_Stream < 3) { + flags |= RepError_Prolog_f; + } #if MAC - if (open_mode == AtomWrite) { - Yap_SetTextFile(RepAtom(AtomOfTerm(file_name))->StrOfAE); - } + if (open_mode == AtomWrite) { + Yap_SetTextFile(RepAtom(AtomOfTerm(file_name))->StrOfAE); + } #endif - // __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "open %s", fname); - if (needs_bom && !write_bom(sno, st)) { - return false; - } else if (open_mode == AtomRead && !avoid_bom) { - check_bom(sno, st); // can change encoding - } - // follow declaration unless there is v - if (st->status & HAS_BOM_f) - st->encoding = enc_id(s_encoding, st->encoding); - else - st->encoding = encoding; - if (script) - open_header(sno, open_mode); - if (fname != fbuf) - freeBuffer(fname); + // __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "open %s", fname); + if (needs_bom && !write_bom(sno, st)) { + return false; + } else if (open_mode == AtomRead && !avoid_bom) { + check_bom(sno, st); // can change encoding + } + // follow declaration unless there is v + if (st->status & HAS_BOM_f) + st->encoding = enc_id(s_encoding, st->encoding); + else + st->encoding = encoding; + if (script) + open_header(sno, open_mode); + if (fname != fbuf) + freeBuffer(fname); - free(args); - UNLOCK(st->streamlock); - { - Term t = Yap_MkStream(sno); - return (Yap_unify(ARG3, t)); - } + free(args); + UNLOCK(st->streamlock); + { + Term t = Yap_MkStream(sno); + return (Yap_unify(ARG3, t)); + } } /** @pred open(+ _F_,+ _M_,- _S_) is iso @@ -1370,8 +1378,8 @@ writable. */ static Int open3(USES_RfEGS1) { -/* '$open'(+File,+Mode,?Stream,-ReturnCode) */ - return do_open(Deref(ARG1), Deref(ARG2), TermNil PASS_REGS); + /* '$open'(+File,+Mode,?Stream,-ReturnCode) */ + return do_open(Deref(ARG1), Deref(ARG2), TermNil PASS_REGS); } /** @pred open(+ _F_,+ _M_,- _S_,+ _Opts_) is iso @@ -1453,262 +1461,263 @@ open_expands_filename. */ static Int open4(USES_REGS1) { /* '$open'(+File,+Mode,?Stream,-ReturnCode) */ - return do_open(Deref(ARG1), Deref(ARG2), Deref(ARG4) PASS_REGS); + return do_open(Deref(ARG1), Deref(ARG2), Deref(ARG4) PASS_REGS); } static Int p_file_expansion(USES_REGS1) { /* '$file_expansion'(+File,-Name) */ - Term file_name = Deref(ARG1); + Term file_name = Deref(ARG1); - /* we know file_name is bound */ - if (IsVarTerm(file_name)) { - PlIOError(INSTANTIATION_ERROR, file_name, "absolute_file_name/3"); - return (FALSE); - } - char tmp[YAP_FILENAME_MAX+1]; - if (!Yap_AbsoluteFile(RepAtom(AtomOfTerm(file_name))->StrOfAE,tmp, false)) - return (PlIOError(EXISTENCE_ERROR_SOURCE_SINK, file_name, - "absolute_file_name/3")); - return (Yap_unify(ARG2, MkAtomTerm(Yap_LookupAtom(tmp)))); + /* we know file_name is bound */ + if (IsVarTerm(file_name)) { + PlIOError(INSTANTIATION_ERROR, file_name, "absolute_file_name/3"); + return (FALSE); + } + char tmp[YAP_FILENAME_MAX + 1]; + if (!Yap_AbsoluteFile(RepAtom(AtomOfTerm(file_name))->StrOfAE, tmp, false)) + return (PlIOError(EXISTENCE_ERROR_SOURCE_SINK, file_name, + "absolute_file_name/3")); + return (Yap_unify(ARG2, MkAtomTerm(Yap_LookupAtom(tmp)))); } static Int p_open_null_stream(USES_REGS1) { - Term t; - StreamDesc *st; - int sno = GetFreeStreamD(); - if (sno < 0) - return (PlIOError(SYSTEM_ERROR_INTERNAL, TermNil, - "new stream not available for open_null_stream/1")); - st = &GLOBAL_Stream[sno]; - st->status = Append_Stream_f | Output_Stream_f | Null_Stream_f; + Term t; + StreamDesc *st; + int sno = GetFreeStreamD(); + if (sno < 0) + return (PlIOError(SYSTEM_ERROR_INTERNAL, TermNil, + "new stream not available for open_null_stream/1")); + st = &GLOBAL_Stream[sno]; + st->status = Append_Stream_f | Output_Stream_f | Null_Stream_f; #if _WIN32 - st->file = fopen("NUL", "w"); + st->file = fopen("NUL", "w"); #else - st->file = fopen("/dev/null", "w"); + st->file = fopen("/dev/null", "w"); #endif - if (st->file == NULL) { - Yap_Error(SYSTEM_ERROR_INTERNAL, TermNil, - "Could not open NULL stream (/dev/null,NUL)"); - return false; - } - st->linepos = 0; - st->charcount = 0; - st->linecount = 1; - st->stream_putc = NullPutc; - st->stream_wputc = put_wchar; - st->stream_getc = PlGetc; - st->stream_wgetc = get_wchar; - st->stream_wgetc_for_read = get_wchar; - st->user_name = MkAtomTerm(st->name = AtomDevNull); - UNLOCK(st->streamlock); - t = Yap_MkStream(sno); - return (Yap_unify(ARG1, t)); + if (st->file == NULL) { + Yap_Error(SYSTEM_ERROR_INTERNAL, TermNil, + "Could not open NULL stream (/dev/null,NUL)"); + return false; + } + st->linepos = 0; + st->charcount = 0; + st->linecount = 1; + st->stream_putc = NullPutc; + st->stream_wputc = put_wchar; + st->stream_getc = PlGetc; + st->stream_wgetc = get_wchar; + st->stream_wgetc_for_read = get_wchar; + st->user_name = MkAtomTerm(st->name = AtomDevNull); + UNLOCK(st->streamlock); + t = Yap_MkStream(sno); + return (Yap_unify(ARG1, t)); } int Yap_OpenStream(const char *fname, const char *io_mode, Term user_name) { - CACHE_REGS - int sno; - StreamDesc *st; - Atom at; - struct vfs *vfsp; - FILE *fd; - int flags; + CACHE_REGS + int sno; + StreamDesc *st; + Atom at; + struct vfs *vfsp; + FILE *fd; + int flags; - sno = GetFreeStreamD(); - if (sno < 0) { - PlIOError(RESOURCE_ERROR_MAX_STREAMS, MkAtomTerm(Yap_LookupAtom(fname)), - "new stream not available for opening"); - return -1; + sno = GetFreeStreamD(); + if (sno < 0) { + PlIOError(RESOURCE_ERROR_MAX_STREAMS, MkAtomTerm(Yap_LookupAtom(fname)), + "new stream not available for opening"); + return -1; + } + st = GLOBAL_Stream + sno; + // read, write, append + st->file = NULL; + st->status = 0; + fname = Yap_VF(fname); + if ((vfsp = vfs_owner(fname)) != NULL) { + if (!vfsp->open(vfsp, sno, fname, io_mode)) { + UNLOCK(st->streamlock); + PlIOError(EXISTENCE_ERROR_SOURCE_SINK, MkAtomTerm(Yap_LookupAtom(fname)), + "%s", fname); + return -1; } - st = GLOBAL_Stream + sno; - // read, write, append - st->file = NULL; - st->status = 0; - fname = Yap_VF(fname); - if ((vfsp = vfs_owner(fname)) != NULL ) { - if (!vfsp->open(vfsp, sno, fname, io_mode)) { - UNLOCK(st->streamlock); - PlIOError(EXISTENCE_ERROR_SOURCE_SINK, - MkAtomTerm(Yap_LookupAtom(fname)), "%s", fname); - return -1; - } - } else { - fd = st->file = fopen(fname, io_mode); - if (fd == NULL) { - if (!strchr(io_mode, 'b') && binary_file(fname)) { - UNLOCK(st->streamlock); - if (errno == ENOENT && !strchr(io_mode, 'r')) { - PlIOError(EXISTENCE_ERROR_SOURCE_SINK, MkAtomTerm(Yap_LookupAtom(fname)), "%s: %s", - fname, - strerror(errno)); - } else { - PlIOError(PERMISSION_ERROR_OPEN_SOURCE_SINK, MkAtomTerm(Yap_LookupAtom(fname)), - "%s: %s", - fname, strerror(errno)); - } - } - return -1; - } - } - flags = st->status; - if (strchr(io_mode, 'w')) { - if (strchr(io_mode, 'a')) { - at = AtomAppend; - flags |= Append_Stream_f | Output_Stream_f; + } else { + fd = st->file = fopen(fname, io_mode); + if (fd == NULL) { + if (!strchr(io_mode, 'b') && binary_file(fname)) { + UNLOCK(st->streamlock); + if (errno == ENOENT && !strchr(io_mode, 'r')) { + PlIOError(EXISTENCE_ERROR_SOURCE_SINK, + MkAtomTerm(Yap_LookupAtom(fname)), "%s: %s", fname, + strerror(errno)); } else { - at = AtomWrite; - flags |= Output_Stream_f; + PlIOError(PERMISSION_ERROR_OPEN_SOURCE_SINK, + MkAtomTerm(Yap_LookupAtom(fname)), "%s: %s", fname, + strerror(errno)); } + } + return -1; } - if (strchr(io_mode, 'r')) { - at = AtomRead; - flags |= Input_Stream_f; + } + flags = st->status; + if (strchr(io_mode, 'w')) { + if (strchr(io_mode, 'a')) { + at = AtomAppend; + flags |= Append_Stream_f | Output_Stream_f; + } else { + at = AtomWrite; + flags |= Output_Stream_f; } - if (strchr(io_mode, 'b')) { - flags |= Binary_Stream_f; - } - Yap_initStream(sno, st->file, fname, user_name, LOCAL_encoding, flags, at, vfsp); - __android_log_print(ANDROID_LOG_INFO, "YAPDroid", - "exists %s <%d>", fname, sno); - return sno; + } + if (strchr(io_mode, 'r')) { + at = AtomRead; + flags |= Input_Stream_f; + } + if (strchr(io_mode, 'b')) { + flags |= Binary_Stream_f; + } + Yap_initStream(sno, st->file, fname, user_name, LOCAL_encoding, flags, at, + vfsp); + __android_log_print(ANDROID_LOG_INFO, "YAPDroid", "exists %s <%d>", fname, + sno); + return sno; } -int Yap_FileStream(FILE *fd, char *name, Term file_name, int flags, VFS_t *vfsp) { - CACHE_REGS - int sno; - Atom at; +int Yap_FileStream(FILE *fd, char *name, Term file_name, int flags, + VFS_t *vfsp) { + CACHE_REGS + int sno; + Atom at; - sno = GetFreeStreamD(); - if (sno < 0) - return (PlIOError(RESOURCE_ERROR_MAX_STREAMS, file_name, - "new stream not available for opening")); - if (flags & Output_Stream_f) { - if (flags & Append_Stream_f) - at = AtomAppend; - else - at = AtomWrite; - } else - at = AtomRead; - Yap_initStream(sno, fd, name, file_name, LOCAL_encoding, flags, at, vfsp); - return sno; + sno = GetFreeStreamD(); + if (sno < 0) + return (PlIOError(RESOURCE_ERROR_MAX_STREAMS, file_name, + "new stream not available for opening")); + if (flags & Output_Stream_f) { + if (flags & Append_Stream_f) + at = AtomAppend; + else + at = AtomWrite; + } else + at = AtomRead; + Yap_initStream(sno, fd, name, file_name, LOCAL_encoding, flags, at, vfsp); + return sno; } - #define CheckStream(arg, kind, msg) \ CheckStream__(__FILE__, __FUNCTION__, __LINE__, arg, kind, msg) static int CheckStream__(const char *file, const char *f, int line, Term arg, int kind, const char *msg) { - int sno = -1; - arg = Deref(arg); - if (IsVarTerm(arg)) { - Yap_Error(INSTANTIATION_ERROR, arg, msg); - return -1; - } else if (IsAtomTerm(arg)) { - Atom sname = AtomOfTerm(arg); + int sno = -1; + arg = Deref(arg); + if (IsVarTerm(arg)) { + Yap_Error(INSTANTIATION_ERROR, arg, msg); + return -1; + } else if (IsAtomTerm(arg)) { + Atom sname = AtomOfTerm(arg); - if (sname == AtomUser) { - if (kind & Input_Stream_f) { - if (kind & (Output_Stream_f | Append_Stream_f)) { - PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_STREAM, arg, - "ambiguous use of 'user' as a stream"); - return (-1); - } - sname = AtomUserIn; - } else { - sname = AtomUserOut; - } - } - if ((sno = Yap_CheckAlias(sname)) < 0) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - PlIOError__(file, f, line, EXISTENCE_ERROR_STREAM, arg, msg); - return -1; - } else { - LOCK(GLOBAL_Stream[sno].streamlock); - } - } else if (IsApplTerm(arg) && FunctorOfTerm(arg) == FunctorStream) { - arg = ArgOfTerm(1, arg); - if (!IsVarTerm(arg) && IsIntegerTerm(arg)) { - sno = IntegerOfTerm(arg); + if (sname == AtomUser) { + if (kind & Input_Stream_f) { + if (kind & (Output_Stream_f | Append_Stream_f)) { + PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_STREAM, arg, + "ambiguous use of 'user' as a stream"); + return (-1); } + sname = AtomUserIn; + } else { + sname = AtomUserOut; + } } - if (sno < 0) { - Yap_Error(DOMAIN_ERROR_STREAM_OR_ALIAS, arg, msg); - return -1; + if ((sno = Yap_CheckAlias(sname)) < 0) { + UNLOCK(GLOBAL_Stream[sno].streamlock); + PlIOError__(file, f, line, EXISTENCE_ERROR_STREAM, arg, msg); + return -1; + } else { + LOCK(GLOBAL_Stream[sno].streamlock); } - if (GLOBAL_Stream[sno].status & Free_Stream_f) { - PlIOError__(file, f, line, EXISTENCE_ERROR_STREAM, arg, msg); - return -1; + } else if (IsApplTerm(arg) && FunctorOfTerm(arg) == FunctorStream) { + arg = ArgOfTerm(1, arg); + if (!IsVarTerm(arg) && IsIntegerTerm(arg)) { + sno = IntegerOfTerm(arg); } - LOCK(GLOBAL_Stream[sno].streamlock); - if ((GLOBAL_Stream[sno].status & Input_Stream_f) && - !(kind & Input_Stream_f)) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_STREAM, arg, msg); - return -1; - } - if ((GLOBAL_Stream[sno].status & (Append_Stream_f | Output_Stream_f)) && - !(kind & Output_Stream_f)) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - PlIOError__(file, f, line, PERMISSION_ERROR_INPUT_STREAM, arg, msg); - return -1; - } - return sno; + } + if (sno < 0) { + Yap_Error(DOMAIN_ERROR_STREAM_OR_ALIAS, arg, msg); + return -1; + } + if (GLOBAL_Stream[sno].status & Free_Stream_f) { + PlIOError__(file, f, line, EXISTENCE_ERROR_STREAM, arg, msg); + return -1; + } + LOCK(GLOBAL_Stream[sno].streamlock); + if ((GLOBAL_Stream[sno].status & Input_Stream_f) && + !(kind & Input_Stream_f)) { + UNLOCK(GLOBAL_Stream[sno].streamlock); + PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_STREAM, arg, msg); + return -1; + } + if ((GLOBAL_Stream[sno].status & (Append_Stream_f | Output_Stream_f)) && + !(kind & Output_Stream_f)) { + UNLOCK(GLOBAL_Stream[sno].streamlock); + PlIOError__(file, f, line, PERMISSION_ERROR_INPUT_STREAM, arg, msg); + return -1; + } + return sno; } int Yap_CheckStream__(const char *file, const char *f, int line, Term arg, int kind, const char *msg) { - return CheckStream__(file, f, line, arg, kind, msg); + return CheckStream__(file, f, line, arg, kind, msg); } int Yap_CheckTextStream__(const char *file, const char *f, int line, Term arg, int kind, const char *msg) { - int sno; - if ((sno = CheckStream__(file, f, line, arg, kind, msg)) < 0) - return -1; - if ((GLOBAL_Stream[sno].status & Binary_Stream_f)) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - if (kind == Input_Stream_f) - PlIOError__(file, f, line, PERMISSION_ERROR_INPUT_BINARY_STREAM, arg, - msg); - else - PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_BINARY_STREAM, arg, - msg); - return -1; - } - return sno; + int sno; + if ((sno = CheckStream__(file, f, line, arg, kind, msg)) < 0) + return -1; + if ((GLOBAL_Stream[sno].status & Binary_Stream_f)) { + UNLOCK(GLOBAL_Stream[sno].streamlock); + if (kind == Input_Stream_f) + PlIOError__(file, f, line, PERMISSION_ERROR_INPUT_BINARY_STREAM, arg, + msg); + else + PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_BINARY_STREAM, arg, + msg); + return -1; + } + return sno; } int Yap_CheckBinaryStream__(const char *file, const char *f, int line, Term arg, int kind, const char *msg) { - int sno; - if ((sno = CheckStream__(file, f, line, arg, kind, msg)) < 0) - return -1; - if (!(GLOBAL_Stream[sno].status & Binary_Stream_f)) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - if (kind == Input_Stream_f) - PlIOError__(file, f, line, PERMISSION_ERROR_INPUT_TEXT_STREAM, arg, msg); - else - PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_TEXT_STREAM, arg, msg); - return -1; - } - return sno; + int sno; + if ((sno = CheckStream__(file, f, line, arg, kind, msg)) < 0) + return -1; + if (!(GLOBAL_Stream[sno].status & Binary_Stream_f)) { + UNLOCK(GLOBAL_Stream[sno].streamlock); + if (kind == Input_Stream_f) + PlIOError__(file, f, line, PERMISSION_ERROR_INPUT_TEXT_STREAM, arg, msg); + else + PlIOError__(file, f, line, PERMISSION_ERROR_OUTPUT_TEXT_STREAM, arg, msg); + return -1; + } + return sno; } /* used from C-interface */ int Yap_GetFreeStreamDForReading(void) { - int sno = GetFreeStreamD(); - StreamDesc *s; + int sno = GetFreeStreamD(); + StreamDesc *s; - if (sno < 0) - return sno; - s = GLOBAL_Stream + sno; - s->status |= User_Stream_f | Input_Stream_f; - s->charcount = 0; - s->linecount = 1; - s->linepos = 0; - Yap_DefaultStreamOps(s); - UNLOCK(s->streamlock); + if (sno < 0) return sno; + s = GLOBAL_Stream + sno; + s->status |= User_Stream_f | Input_Stream_f; + s->charcount = 0; + s->linecount = 1; + s->linepos = 0; + Yap_DefaultStreamOps(s); + UNLOCK(s->streamlock); + return sno; } /** @@ -1719,11 +1728,11 @@ int Yap_GetFreeStreamDForReading(void) { */ static Int always_prompt_user(USES_REGS1) { - StreamDesc *s = GLOBAL_Stream + StdInStream; + StreamDesc *s = GLOBAL_Stream + StdInStream; - s->status |= Promptable_Stream_f; - Yap_DefaultStreamOps(s); - return (TRUE); + s->status |= Promptable_Stream_f; + Yap_DefaultStreamOps(s); + return (TRUE); } static Int close1 /** @pred close(+ _S_) is iso @@ -1736,18 +1745,18 @@ static Int close1 /** @pred close(+ _S_) is iso */ - (USES_REGS1) { /* '$close'(+GLOBAL_Stream) */ - int sno = CheckStream( - ARG1, (Input_Stream_f | Output_Stream_f | Socket_Stream_f), "close/2"); - if (sno < 0) - return false; - if (sno <= StdErrStream) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - return true; - } - Yap_CloseStream(sno); + (USES_REGS1) { /* '$close'(+GLOBAL_Stream) */ + int sno = CheckStream( + ARG1, (Input_Stream_f | Output_Stream_f | Socket_Stream_f), "close/2"); + if (sno < 0) + return false; + if (sno <= StdErrStream) { UNLOCK(GLOBAL_Stream[sno].streamlock); - return (TRUE); + return true; + } + Yap_CloseStream(sno); + UNLOCK(GLOBAL_Stream[sno].streamlock); + return (TRUE); } #define CLOSE_DEFS() \ @@ -1755,9 +1764,7 @@ static Int close1 /** @pred close(+ _S_) is iso #define PAR(x, y, z) z -typedef enum close_enum_choices { - CLOSE_DEFS() -} close_choices_t; +typedef enum close_enum_choices { CLOSE_DEFS() } close_choices_t; #undef PAR @@ -1777,46 +1784,45 @@ YAP currently ignores these options. */ static Int close2(USES_REGS1) { /* '$close'(+GLOBAL_Stream) */ - Int sno = CheckStream( - ARG1, (Input_Stream_f | Output_Stream_f | Socket_Stream_f), "close/2"); - Term tlist; - if (sno < 0) - return (FALSE); - if (sno <= StdErrStream) { - UNLOCK(GLOBAL_Stream[sno].streamlock); - return TRUE; - } - xarg *args = - Yap_ArgListToVector((tlist = Deref(ARG2)), close_defs, CLOSE_END); - if (args == NULL) { - if (LOCAL_Error_TYPE != YAP_NO_ERROR) { - if (LOCAL_Error_TYPE == DOMAIN_ERROR_PROLOG_FLAG) - LOCAL_Error_TYPE = DOMAIN_ERROR_CLOSE_OPTION; - Yap_Error(LOCAL_Error_TYPE, tlist, NULL); - } - return false; - return FALSE; - } - // if (args[CLOSE_FORCE].used) { - // } - Yap_CloseStream(sno); + Int sno = CheckStream( + ARG1, (Input_Stream_f | Output_Stream_f | Socket_Stream_f), "close/2"); + Term tlist; + if (sno < 0) + return (FALSE); + if (sno <= StdErrStream) { UNLOCK(GLOBAL_Stream[sno].streamlock); - return (TRUE); + return TRUE; + } + xarg *args = + Yap_ArgListToVector((tlist = Deref(ARG2)), close_defs, CLOSE_END); + if (args == NULL) { + if (LOCAL_Error_TYPE != YAP_NO_ERROR) { + if (LOCAL_Error_TYPE == DOMAIN_ERROR_PROLOG_FLAG) + LOCAL_Error_TYPE = DOMAIN_ERROR_CLOSE_OPTION; + Yap_Error(LOCAL_Error_TYPE, tlist, NULL); + } + return false; + return FALSE; + } + // if (args[CLOSE_FORCE].used) { + // } + Yap_CloseStream(sno); + UNLOCK(GLOBAL_Stream[sno].streamlock); + return (TRUE); } Term read_line(int sno) { - CACHE_REGS - Term tail; - Int ch; + CACHE_REGS + Term tail; + Int ch; - if ((ch = GLOBAL_Stream[sno].stream_wgetc(sno)) == 10) { - return (TermNil); - } - tail = read_line(sno); - return (MkPairTerm(MkIntTerm(ch), tail)); + if ((ch = GLOBAL_Stream[sno].stream_wgetc(sno)) == 10) { + return (TermNil); + } + tail = read_line(sno); + return (MkPairTerm(MkIntTerm(ch), tail)); } - #define ABSOLUTE_FILE_NAME_DEFS() \ PAR("access", isatom, ABSOLUTE_FILE_NAME_ACCESS) \ , PAR("expand", booleanFlag, ABSOLUTE_FILE_NAME_EXPAND), \ @@ -1833,7 +1839,7 @@ Term read_line(int sno) { #define PAR(x, y, z) z typedef enum ABSOLUTE_FILE_NAME_enum_ { - ABSOLUTE_FILE_NAME_DEFS() + ABSOLUTE_FILE_NAME_DEFS() } absolute_file_name_choices_t; #undef PAR @@ -1842,147 +1848,147 @@ typedef enum ABSOLUTE_FILE_NAME_enum_ { { x, y, z } static const param_t absolute_file_name_search_defs[] = { - ABSOLUTE_FILE_NAME_DEFS()}; + ABSOLUTE_FILE_NAME_DEFS()}; #undef PAR static Int abs_file_parameters(USES_REGS1) { - Term t[ABSOLUTE_FILE_NAME_END]; - Term tlist = Deref(ARG1), tf; - /* get options */ - xarg *args = Yap_ArgListToVector(tlist, absolute_file_name_search_defs, - ABSOLUTE_FILE_NAME_END); - if (args == NULL) { - if (LOCAL_Error_TYPE != YAP_NO_ERROR) { - if (LOCAL_Error_TYPE == DOMAIN_ERROR_PROLOG_FLAG) - LOCAL_Error_TYPE = DOMAIN_ERROR_ABSOLUTE_FILE_NAME_OPTION; - Yap_Error(LOCAL_Error_TYPE, tlist, NULL); - } - return false; + Term t[ABSOLUTE_FILE_NAME_END]; + Term tlist = Deref(ARG1), tf; + /* get options */ + xarg *args = Yap_ArgListToVector(tlist, absolute_file_name_search_defs, + ABSOLUTE_FILE_NAME_END); + if (args == NULL) { + if (LOCAL_Error_TYPE != YAP_NO_ERROR) { + if (LOCAL_Error_TYPE == DOMAIN_ERROR_PROLOG_FLAG) + LOCAL_Error_TYPE = DOMAIN_ERROR_ABSOLUTE_FILE_NAME_OPTION; + Yap_Error(LOCAL_Error_TYPE, tlist, NULL); } - /* done */ - if (args[ABSOLUTE_FILE_NAME_EXTENSIONS].used) { - t[ABSOLUTE_FILE_NAME_EXTENSIONS] = - args[ABSOLUTE_FILE_NAME_EXTENSIONS].tvalue; - } else { - t[ABSOLUTE_FILE_NAME_EXTENSIONS] = TermNil; - } - if (args[ABSOLUTE_FILE_NAME_RELATIVE_TO].used) { - t[ABSOLUTE_FILE_NAME_RELATIVE_TO] = - gethdir(args[ABSOLUTE_FILE_NAME_RELATIVE_TO].tvalue); - } else { - t[ABSOLUTE_FILE_NAME_RELATIVE_TO] = gethdir(TermDot); - } - if (args[ABSOLUTE_FILE_NAME_FILE_TYPE].used) - t[ABSOLUTE_FILE_NAME_FILE_TYPE] = args[ABSOLUTE_FILE_NAME_FILE_TYPE].tvalue; - else - t[ABSOLUTE_FILE_NAME_FILE_TYPE] = TermTxt; - if (args[ABSOLUTE_FILE_NAME_ACCESS].used) - t[ABSOLUTE_FILE_NAME_ACCESS] = args[ABSOLUTE_FILE_NAME_ACCESS].tvalue; - else - t[ABSOLUTE_FILE_NAME_ACCESS] = TermNone; - if (args[ABSOLUTE_FILE_NAME_FILE_ERRORS].used) - t[ABSOLUTE_FILE_NAME_FILE_ERRORS] = - args[ABSOLUTE_FILE_NAME_FILE_ERRORS].tvalue; - else - t[ABSOLUTE_FILE_NAME_FILE_ERRORS] = TermError; - if (args[ABSOLUTE_FILE_NAME_SOLUTIONS].used) - t[ABSOLUTE_FILE_NAME_SOLUTIONS] = args[ABSOLUTE_FILE_NAME_SOLUTIONS].tvalue; - else - t[ABSOLUTE_FILE_NAME_SOLUTIONS] = TermFirst; - if (args[ABSOLUTE_FILE_NAME_EXPAND].used) - t[ABSOLUTE_FILE_NAME_EXPAND] = args[ABSOLUTE_FILE_NAME_EXPAND].tvalue; - else - t[ABSOLUTE_FILE_NAME_EXPAND] = TermFalse; - if (args[ABSOLUTE_FILE_NAME_GLOB].used) { - t[ABSOLUTE_FILE_NAME_GLOB] = args[ABSOLUTE_FILE_NAME_GLOB].tvalue; - t[ABSOLUTE_FILE_NAME_EXPAND] = TermTrue; - } else - t[ABSOLUTE_FILE_NAME_GLOB] = TermEmptyAtom; - if (args[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH].used) - t[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH] = - args[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH].tvalue; - else - t[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH] = - (trueGlobalPrologFlag(VERBOSE_FILE_SEARCH_FLAG) ? TermTrue : TermFalse); - tf = Yap_MkApplTerm(Yap_MkFunctor(AtomOpt, ABSOLUTE_FILE_NAME_END), - ABSOLUTE_FILE_NAME_END, t); - return (Yap_unify(ARG2, tf)); + return false; + } + /* done */ + if (args[ABSOLUTE_FILE_NAME_EXTENSIONS].used) { + t[ABSOLUTE_FILE_NAME_EXTENSIONS] = + args[ABSOLUTE_FILE_NAME_EXTENSIONS].tvalue; + } else { + t[ABSOLUTE_FILE_NAME_EXTENSIONS] = TermNil; + } + if (args[ABSOLUTE_FILE_NAME_RELATIVE_TO].used) { + t[ABSOLUTE_FILE_NAME_RELATIVE_TO] = + gethdir(args[ABSOLUTE_FILE_NAME_RELATIVE_TO].tvalue); + } else { + t[ABSOLUTE_FILE_NAME_RELATIVE_TO] = gethdir(TermDot); + } + if (args[ABSOLUTE_FILE_NAME_FILE_TYPE].used) + t[ABSOLUTE_FILE_NAME_FILE_TYPE] = args[ABSOLUTE_FILE_NAME_FILE_TYPE].tvalue; + else + t[ABSOLUTE_FILE_NAME_FILE_TYPE] = TermTxt; + if (args[ABSOLUTE_FILE_NAME_ACCESS].used) + t[ABSOLUTE_FILE_NAME_ACCESS] = args[ABSOLUTE_FILE_NAME_ACCESS].tvalue; + else + t[ABSOLUTE_FILE_NAME_ACCESS] = TermNone; + if (args[ABSOLUTE_FILE_NAME_FILE_ERRORS].used) + t[ABSOLUTE_FILE_NAME_FILE_ERRORS] = + args[ABSOLUTE_FILE_NAME_FILE_ERRORS].tvalue; + else + t[ABSOLUTE_FILE_NAME_FILE_ERRORS] = TermError; + if (args[ABSOLUTE_FILE_NAME_SOLUTIONS].used) + t[ABSOLUTE_FILE_NAME_SOLUTIONS] = args[ABSOLUTE_FILE_NAME_SOLUTIONS].tvalue; + else + t[ABSOLUTE_FILE_NAME_SOLUTIONS] = TermFirst; + if (args[ABSOLUTE_FILE_NAME_EXPAND].used) + t[ABSOLUTE_FILE_NAME_EXPAND] = args[ABSOLUTE_FILE_NAME_EXPAND].tvalue; + else + t[ABSOLUTE_FILE_NAME_EXPAND] = TermFalse; + if (args[ABSOLUTE_FILE_NAME_GLOB].used) { + t[ABSOLUTE_FILE_NAME_GLOB] = args[ABSOLUTE_FILE_NAME_GLOB].tvalue; + t[ABSOLUTE_FILE_NAME_EXPAND] = TermTrue; + } else + t[ABSOLUTE_FILE_NAME_GLOB] = TermEmptyAtom; + if (args[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH].used) + t[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH] = + args[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH].tvalue; + else + t[ABSOLUTE_FILE_NAME_VERBOSE_FILE_SEARCH] = + (trueGlobalPrologFlag(VERBOSE_FILE_SEARCH_FLAG) ? TermTrue : TermFalse); + tf = Yap_MkApplTerm(Yap_MkFunctor(AtomOpt, ABSOLUTE_FILE_NAME_END), + ABSOLUTE_FILE_NAME_END, t); + return (Yap_unify(ARG2, tf)); } static Int get_abs_file_parameter(USES_REGS1) { - Term t = Deref(ARG1), topts = Deref(ARG2); - /* get options */ - /* done */ - int i = Yap_ArgKey(AtomOfTerm(t), absolute_file_name_search_defs, - ABSOLUTE_FILE_NAME_END); - if (i >= 0) - return Yap_unify(ARG3, ArgOfTerm(i + 1, topts)); - Yap_Error(DOMAIN_ERROR_ABSOLUTE_FILE_NAME_OPTION, ARG1, NULL); - return false; + Term t = Deref(ARG1), topts = Deref(ARG2); + /* get options */ + /* done */ + int i = Yap_ArgKey(AtomOfTerm(t), absolute_file_name_search_defs, + ABSOLUTE_FILE_NAME_END); + if (i >= 0) + return Yap_unify(ARG3, ArgOfTerm(i + 1, topts)); + Yap_Error(DOMAIN_ERROR_ABSOLUTE_FILE_NAME_OPTION, ARG1, NULL); + return false; } void Yap_InitPlIO(struct yap_boot_params *argi) { - Int i; - if (argi->inp > 0) - Yap_stdin = fdopen(argi->inp - 1, "r"); - else if (argi->inp) - Yap_stdin = NULL; - else - Yap_stdin = stdin; - if (argi->out > 0) - Yap_stdout = fdopen(argi->out - 1, "a"); - else if (argi->out) - Yap_stdout = NULL; - else - Yap_stdout = stdout; - if (argi->err > 0) - Yap_stderr = fdopen(argi->err - 1, "a"); - else if (argi->out) - Yap_stdout = NULL; - else - Yap_stderr = stderr; - GLOBAL_Stream = - (StreamDesc *) Yap_AllocCodeSpace(sizeof(StreamDesc) * MaxStreams); - for (i = 0; i < MaxStreams; ++i) { - INIT_LOCK(GLOBAL_Stream[i].streamlock); - GLOBAL_Stream[i].status = Free_Stream_f; - } - InitStdStreams(); + Int i; + if (argi->inp > 0) + Yap_stdin = fdopen(argi->inp - 1, "r"); + else if (argi->inp) + Yap_stdin = NULL; + else + Yap_stdin = stdin; + if (argi->out > 0) + Yap_stdout = fdopen(argi->out - 1, "a"); + else if (argi->out) + Yap_stdout = NULL; + else + Yap_stdout = stdout; + if (argi->err > 0) + Yap_stderr = fdopen(argi->err - 1, "a"); + else if (argi->out) + Yap_stdout = NULL; + else + Yap_stderr = stderr; + GLOBAL_Stream = + (StreamDesc *)Yap_AllocCodeSpace(sizeof(StreamDesc) * MaxStreams); + for (i = 0; i < MaxStreams; ++i) { + INIT_LOCK(GLOBAL_Stream[i].streamlock); + GLOBAL_Stream[i].status = Free_Stream_f; + } + InitStdStreams(); } void Yap_InitIOPreds(void) { - /* here the Input/Output predicates */ - Yap_InitCPred("always_prompt_user", 0, always_prompt_user, - SafePredFlag | SyncPredFlag); - Yap_InitCPred("close", 1, close1, SafePredFlag | SyncPredFlag); - Yap_InitCPred("close", 2, close2, SafePredFlag | SyncPredFlag); - Yap_InitCPred("open", 4, open4, SyncPredFlag); - Yap_InitCPred("open", 3, open3, SyncPredFlag); - Yap_InitCPred("abs_file_parameters", 2, abs_file_parameters, - SyncPredFlag | HiddenPredFlag); - Yap_InitCPred("get_abs_file_parameter", 3, get_abs_file_parameter, - SafePredFlag | SyncPredFlag | HiddenPredFlag); - Yap_InitCPred("$file_expansion", 2, p_file_expansion, - SafePredFlag | SyncPredFlag | HiddenPredFlag); - Yap_InitCPred("$open_null_stream", 1, p_open_null_stream, - SafePredFlag | SyncPredFlag | HiddenPredFlag); - Yap_InitIOStreams(); - Yap_InitCharsio(); - Yap_InitChtypes(); - Yap_InitConsole(); - Yap_InitReadUtil(); - Yap_InitMems(); - Yap_InitPipes(); - Yap_InitFiles(); - Yap_InitWriteTPreds(); - Yap_InitReadTPreds(); - Yap_InitFormat(); - Yap_InitRandomPreds(); + /* here the Input/Output predicates */ + Yap_InitCPred("always_prompt_user", 0, always_prompt_user, + SafePredFlag | SyncPredFlag); + Yap_InitCPred("close", 1, close1, SafePredFlag | SyncPredFlag); + Yap_InitCPred("close", 2, close2, SafePredFlag | SyncPredFlag); + Yap_InitCPred("open", 4, open4, SyncPredFlag); + Yap_InitCPred("open", 3, open3, SyncPredFlag); + Yap_InitCPred("abs_file_parameters", 2, abs_file_parameters, + SyncPredFlag | HiddenPredFlag); + Yap_InitCPred("get_abs_file_parameter", 3, get_abs_file_parameter, + SafePredFlag | SyncPredFlag | HiddenPredFlag); + Yap_InitCPred("$file_expansion", 2, p_file_expansion, + SafePredFlag | SyncPredFlag | HiddenPredFlag); + Yap_InitCPred("$open_null_stream", 1, p_open_null_stream, + SafePredFlag | SyncPredFlag | HiddenPredFlag); + Yap_InitIOStreams(); + Yap_InitCharsio(); + Yap_InitChtypes(); + Yap_InitConsole(); + Yap_InitReadUtil(); + Yap_InitMems(); + Yap_InitPipes(); + Yap_InitFiles(); + Yap_InitWriteTPreds(); + Yap_InitReadTPreds(); + Yap_InitFormat(); + Yap_InitRandomPreds(); #if USE_READLINE - Yap_InitReadlinePreds(); + Yap_InitReadlinePreds(); #endif - Yap_InitSockets(); - Yap_InitSignalPreds(); - Yap_InitSysPreds(); - Yap_InitTimePreds(); + Yap_InitSockets(); + Yap_InitSignalPreds(); + Yap_InitSysPreds(); + Yap_InitTimePreds(); } diff --git a/os/iopreds.h b/os/iopreds.h index 3d88036c7..2cf7367ce 100644 --- a/os/iopreds.h +++ b/os/iopreds.h @@ -91,10 +91,15 @@ extern void Yap_InitStdStreams(void); extern Term Yap_StreamPosition(int); extern void Yap_CloseStream(int sno); -static inline Int GetCurInpPos(StreamDesc *inp_stream) { - return (inp_stream->linecount); +static inline Int GetCurInpLine(StreamDesc *inp_stream) { + return (inp_stream->linecount); } +static inline Int GetCurInpPos(StreamDesc *inp_stream) { + return (inp_stream->charcount); +} + + #define PlIOError(type, culprit, ...) \ PlIOError__(__FILE__, __FUNCTION__, __LINE__, type, culprit, __VA_ARGS__) diff --git a/os/mem.c b/os/mem.c index 7e6988073..322b6a270 100644 --- a/os/mem.c +++ b/os/mem.c @@ -1,19 +1,19 @@ /************************************************************************* -* * -* YAP Prolog * -* * -* Yap Prolog was developed at NCCUP - Universidade do Porto * -* * -* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * -* * -************************************************************************** -* * -* File: mem.c * -* Last rev: 5/2/88 * -* mods: * -* comments: Input/Output C implemented predicates * -* * -*************************************************************************/ + * * + * YAP Prolog * + * * + * Yap Prolog was developed at NCCUP - Universidade do Porto * + * * + * Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 * + * * + ************************************************************************** + * * + * File: mem.c * + * Last rev: 5/2/88 * + * mods: * + * comments: Input/Output C implemented predicates * + * * + *************************************************************************/ #ifdef SCCS static char SccsId[] = "%W% %G%"; #endif @@ -24,10 +24,10 @@ static char SccsId[] = "%W% %G%"; */ #include "sysbits.h" -#include "YapStreams.h" #if !HAVE_FMEMOPEN || !defined(HAVE_FMEMOPEN) +#include "YapStreams.h" #include "format.h" @@ -48,6 +48,8 @@ int format_synch(int sno, int sno0, format_info *fg) { GLOBAL_Stream[sno].linecount = 1; GLOBAL_Stream[sno].linepos = 0; GLOBAL_Stream[sno].charcount = 0; + GLOBAL_Stream[sno].recbs = NULL; + GLOBAL_Stream[sno].vfs = NULL; fg->lstart = 0; fg->phys_start = 0; fg->gapi = 0; @@ -55,7 +57,7 @@ int format_synch(int sno, int sno0, format_info *fg) { } // uses directly the buffer in the memory stream. - bool fill_pads(int sno, int sno0, int total, format_info *fg USES_REGS) { +bool fill_pads(int sno, int sno0, int total, format_info *fg USES_REGS) { int nfillers, fill_space, lfill_space, nchars; int (*f_putc)(int, int); const char *buf; @@ -103,6 +105,9 @@ int format_synch(int sno, int sno0, format_info *fg) { GLOBAL_Stream[sno].linecount = 1; GLOBAL_Stream[sno].linepos += nchars; GLOBAL_Stream[sno].charcount = 0; + GLOBAL_Stream[sno].recbs = NULL; + GLOBAL_Stream[sno].vfs = NULL; + GLOBAL_Stream[sno].file = NULL; fg->phys_start = 0; fg->lstart = GLOBAL_Stream[sno].linepos; fg->gapi = 0; @@ -180,7 +185,8 @@ static int MemPutc(int sno, int ch) { return ((int)ch); } -bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf, size_t nchars USES_REGS) { +bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf, + size_t nchars USES_REGS) { FILE *f; stream_flags_t flags; @@ -189,14 +195,14 @@ bool Yap_set_stream_to_buf(StreamDesc *st, const char *buf, size_t nchars USES_R st->vfs = NULL; Yap_initStream(st - GLOBAL_Stream, f, NULL, TermNil, LOCAL_encoding, flags, AtomRead, NULL); -// like any file stream. + // like any file stream. /* currently these streams are not seekable */ st->status = Input_Stream_f | InMemory_Stream_f; st->u.mem_string.pos = 0; st->u.mem_string.buf = (char *)buf; st->u.mem_string.max_size = nchars; st->u.mem_string.error_handler = NULL; -// st->u.mem_string.src = src; check new assets coode + // st->u.mem_string.src = src; check new assets coode Yap_DefaultStreamOps(st); return true; } @@ -223,7 +229,7 @@ int Yap_open_buf_read_stream(const char *buf, size_t nchars, encoding_t *encp, flags = Input_Stream_f | InMemory_Stream_f; st->vfs = NULL; Yap_initStream(sno, f, NULL, TermNil, encoding, flags, AtomRead, NULL); -// like any file stream. + // like any file stream. /* currently these streams are not seekable */ st->status = Input_Stream_f | InMemory_Stream_f; st->u.mem_string.pos = 0; @@ -272,7 +278,9 @@ int Yap_open_buf_write_stream(encoding_t enc, memBufSource src) { st->charcount = 0; st->linecount = 1; st->encoding = enc; + st->recbs = NULL; st->vfs = NULL; + st->file = NULL; Yap_DefaultStreamOps(st); st->nbuf = st->u.mem_string.buf = malloc(PLGETC_BUF_SIZE); st->u.mem_string.src = MEM_BUF_MALLOC; @@ -357,7 +365,8 @@ restart: void Yap_MemOps(StreamDesc *st) { st->stream_putc = MemPutc; - st->stream_getc = MemGetc;} + st->stream_getc = MemGetc; +} bool Yap_CloseMemoryStream(int sno) { if ((GLOBAL_Stream[sno].status & Output_Stream_f)) { diff --git a/os/readterm.c b/os/readterm.c index fb143e238..1111e23ea 100644 --- a/os/readterm.c +++ b/os/readterm.c @@ -95,7 +95,7 @@ static char SccsId[] = "%W% %G%"; #define SYSTEM_STAT stat #endif -static Term syntax_error(TokEntry *errtok, int sno, Term cmod); +static Term syntax_error(TokEntry *errtok, int sno, Term cmod, Int start); static void clean_vars(VarEntry *p) { if (p == NULL) @@ -315,55 +315,80 @@ static Int scan_to_list(USES_REGS1) { * Implicit arguments: * + */ -static Term syntax_error(TokEntry *errtok, int sno, Term cmod) { +static Term syntax_error(TokEntry *errtok, int sno, Term cmod, Int newpos) { CACHE_REGS Term startline, errline, endline; - Term tf[3]; + Term tf[4]; Term tm; - Term *tailp = tf + 2; + Term *tailp = tf + 3; CELL *Hi = HR; TokEntry *tok = LOCAL_tokptr; - Int cline = tok->TokPos; - + Int cline = tok->TokLine; + Int startpos = tok->TokPos; + errtok = LOCAL_toktide; + Int errpos = errtok->TokPos; + UInt diff = 0; startline = MkIntegerTerm(cline); endline = MkIntegerTerm(cline); - if (errtok != LOCAL_toktide) { - errtok = LOCAL_toktide; - } LOCAL_Error_TYPE = YAP_NO_ERROR; - errline = MkIntegerTerm(errtok->TokPos); + errline = MkIntegerTerm(errtok->TokLine); if (LOCAL_ErrorMessage) tm = MkStringTerm(LOCAL_ErrorMessage); - else - tm = MkStringTerm("syntax error"); - while (tok) { + else { + tm = MkStringTerm("syntax error"); + } + if (errpos && newpos >= 0) { + char o[128 + 1]; + diff = errpos - startpos; + if (diff > 128) { + diff = 128; + startpos = errpos - diff; + } +#if HAVE_FTELLO + Int curpos = ftello(GLOBAL_Stream[sno].file); + fseeko(GLOBAL_Stream[sno].file, startpos, SEEK_SET); +#else + Int curpos = ftell(GLOBAL_Stream[sno].file); + fseek(GLOBAL_Stream[sno].file, startpos, SEEK_SET); +#endif + fread(o, diff, 1, GLOBAL_Stream[sno].file); +#if HAVE_FTELLO + fseeko(GLOBAL_Stream[sno].file, curpos, SEEK_SET); +#else + fseek(GLOBAL_Stream[sno].file, curpos, SEEK_SET); +#endif + o[diff] = '\0'; + tf[3] = MkStringTerm(o); + } else { + while (tok) { - if (HR > ASP - 1024) { - errline = MkIntegerTerm(0); - endline = MkIntegerTerm(0); - /* for some reason moving this earlier confuses gcc on solaris */ - HR = Hi; - break; - } - if (tok->TokPos != cline) { - *tailp = MkPairTerm(TermNewLine, TermNil); - tailp = RepPair(*tailp) + 1; - cline = tok->TokPos; - } - if (tok == errtok && tok->Tok != Error_tok) { - *tailp = MkPairTerm(MkAtomTerm(AtomError), TermNil); + if (HR > ASP - 1024) { + errline = MkIntegerTerm(0); + endline = MkIntegerTerm(0); + /* for some reason moving this earlier confuses gcc on solaris */ + HR = Hi; + break; + } + if (tok->TokLine != cline) { + *tailp = MkPairTerm(TermNewLine, TermNil); + tailp = RepPair(*tailp) + 1; + cline = tok->TokLine; + } + if (tok == errtok && tok->Tok != Error_tok) { + *tailp = MkPairTerm(MkAtomTerm(AtomError), TermNil); + tailp = RepPair(*tailp) + 1; + } + Term rep = Yap_tokRep(tok); + if (tok->TokNext) { + tok = tok->TokNext; + } else { + endline = MkIntegerTerm(tok->TokLine); + tok = NULL; + break; + } + *tailp = MkPairTerm(rep, TermNil); tailp = RepPair(*tailp) + 1; } - Term rep = Yap_tokRep(tok); - if (tok->TokNext) { - tok = tok->TokNext; - } else { - endline = MkIntegerTerm(tok->TokPos); - tok = NULL; - break; - } - *tailp = MkPairTerm(rep, TermNil); - tailp = RepPair(*tailp) + 1; } { Term t[3]; @@ -376,9 +401,10 @@ static Term syntax_error(TokEntry *errtok, int sno, Term cmod) { /*2 msg */ /* 1: file */ tf[1] = Yap_StreamUserName(sno); + tf[2] = MkIntegerTerm(LOCAL_ActiveError->prologParserPos); clean_vars(LOCAL_VarTable); clean_vars(LOCAL_AnonVarTable); - Term terr = Yap_MkApplTerm(FunctorInfo3, 3, tf); + Term terr = Yap_MkApplTerm(FunctorInfo4, 4, tf); Term tn[2]; tn[0] = Yap_MkApplTerm(FunctorShortSyntaxError, 1, &tm); tn[1] = terr; @@ -386,14 +412,14 @@ static Term syntax_error(TokEntry *errtok, int sno, Term cmod) { #if DEBUG if (Yap_ExecutionMode == YAP_BOOT_MODE) { fprintf(stderr, "SYNTAX ERROR while booting: "); - Yap_DebugPlWriteln(terr); + fe } #endif return terr; } Term Yap_syntax_error(TokEntry *errtok, int sno) { - return syntax_error(errtok, sno, CurrentModule); + return syntax_error(errtok, sno, CurrentModule, -1); } typedef struct FEnv { @@ -417,9 +443,6 @@ typedef struct renv { bool ce, sw; Term sy; UInt cpos; -#if HAVE_FGETPOS - fpos_t rpos; -#endif int prio; int ungetc_oldc; int had_ungetc; @@ -500,11 +523,7 @@ static xarg *setReadEnv(Term opts, FEnv *fe, struct renv *re, int inp_stream) { } re->seekable = (GLOBAL_Stream[inp_stream].status & Seekable_Stream_f) != 0; if (re->seekable) { -#if HAVE_FGETPOS - fgetpos(GLOBAL_Stream[inp_stream].file, &re->rpos); -#else re->cpos = GLOBAL_Stream[inp_stream].charcount; -#endif } if (args[READ_PRIORITY].used) { re->prio = IntegerOfTerm(args[READ_PRIORITY].tvalue); @@ -890,8 +909,8 @@ static parser_state_t scanError(REnv *re, FEnv *fe, int inp_stream) { if (GLOBAL_Stream[inp_stream].status & InMemory_Stream_f) { GLOBAL_Stream[inp_stream].u.mem_string.pos = re->cpos; } else if (GLOBAL_Stream[inp_stream].status) { -#if HAVE_FGETPOS - fsetpos(GLOBAL_Stream[inp_stream].file, &re->rpos); +#if HAVE_FTELLO + fseeko(GLOBAL_Stream[inp_stream].file, re->cpos, 0L); #else fseek(GLOBAL_Stream[inp_stream].file, re->cpos, 0L); #endif @@ -912,7 +931,11 @@ static parser_state_t parseError(REnv *re, FEnv *fe, int inp_stream) { LOCAL_Error_TYPE = YAP_NO_ERROR; return YAP_PARSING_FINISHED; } else { - Term t = syntax_error(fe->toklast, inp_stream, fe->cmod); + if (re->seekable) { + re->cpos = GLOBAL_Stream[inp_stream].charcount; + } + + Term t = syntax_error(fe->toklast, inp_stream, fe->cmod, re->cpos); if (ParserErrorStyle == TermError) { LOCAL_ActiveError->errorTerm = Yap_StoreTermInDB(t, 4); LOCAL_Error_TYPE = SYNTAX_ERROR; @@ -1059,8 +1082,7 @@ typedef enum read_clause_enum_choices { static const param_t read_clause_defs[] = {READ_CLAUSE_DEFS()}; #undef PAR -static xarg *setClauseReadEnv(Term opts, FEnv *fe, struct renv *re, - int sno) { +static xarg *setClauseReadEnv(Term opts, FEnv *fe, struct renv *re, int sno) { CACHE_REGS xarg *args = Yap_ArgListToVector(opts, read_clause_defs, READ_CLAUSE_END); @@ -1120,11 +1142,7 @@ static xarg *setClauseReadEnv(Term opts, FEnv *fe, struct renv *re, fe->ce = Yap_CharacterEscapes(fe->cmod); re->seekable = (GLOBAL_Stream[sno].status & Seekable_Stream_f) != 0; if (re->seekable) { -#if HAVE_FGETPOS - fgetpos(GLOBAL_Stream[sno].file, &re->rpos); -#else re->cpos = GLOBAL_Stream[sno].charcount; -#endif } re->prio = LOCAL_default_priority; return args; @@ -1354,202 +1372,200 @@ Term Yap_BufferToTerm(const unsigned char *s, Term opts) { Term rval; int sno; encoding_t l = ENC_ISO_UTF8; - sno = Yap_open_buf_read_stream((char *)s, strlen((const char*)s), &l, MEM_BUF_USER); + sno = Yap_open_buf_read_stream((char *)s, strlen((const char *)s), &l, + MEM_BUF_USER); rval = Yap_read_term(sno, opts, false); Yap_CloseStream(sno); return rval; } - X_API Term Yap_BufferToTermWithPrioBindings(const unsigned char *s, size_t len, - Term opts, int prio, - Term bindings) { - CACHE_REGS - Term ctl; +X_API Term Yap_BufferToTermWithPrioBindings(const unsigned char *s, size_t len, + Term opts, int prio, + Term bindings) { + CACHE_REGS + Term ctl; - ctl = opts; - if (bindings) { - ctl = add_names(bindings, TermNil); - } - if (prio != 1200) { - ctl = add_priority(bindings, ctl); - } - return Yap_BufferToTerm(s, ctl); - } + ctl = opts; + if (bindings) { + ctl = add_names(bindings, TermNil); + } + if (prio != 1200) { + ctl = add_priority(bindings, ctl); + } + return Yap_BufferToTerm(s, ctl); +} - /** - * @pred read_term_from_atom( +Atom , -T , +Options ) - * - * read a term _T_ stored in constant _Atom_ according to _Options_ - * - * @param _Atom_ the source _Atom_ - * @param _T_ the output term _T_, may be any term - * @param _Options_ read_term/3 options. - * - * @notes Originally from SWI-Prolog, in YAP only works with internalised - *atoms - * Check read_term_from_atomic/3 for the general version. Also, the built-in - *is - *supposed to - * use YAP's internal encoding, so please avoid the encoding/1 option. - */ - static Int read_term_from_atom(USES_REGS1) { - Term t1 = Deref(ARG1); - Atom at; - const unsigned char *s; +/** + * @pred read_term_from_atom( +Atom , -T , +Options ) + * + * read a term _T_ stored in constant _Atom_ according to _Options_ + * + * @param _Atom_ the source _Atom_ + * @param _T_ the output term _T_, may be any term + * @param _Options_ read_term/3 options. + * + * @notes Originally from SWI-Prolog, in YAP only works with internalised + *atoms + * Check read_term_from_atomic/3 for the general version. Also, the built-in + *is + *supposed to + * use YAP's internal encoding, so please avoid the encoding/1 option. + */ +static Int read_term_from_atom(USES_REGS1) { + Term t1 = Deref(ARG1); + Atom at; + const unsigned char *s; - if (IsVarTerm(t1)) { - Yap_Error(INSTANTIATION_ERROR, t1, "style_check/1"); - return false; - } else if (!IsAtomTerm(t1)) { - Yap_Error(TYPE_ERROR_ATOM, t1, "style_check/1"); - return false; - } else { - at = AtomOfTerm(t1); - s = at->UStrOfAE; - } - Term ctl = add_output(ARG2, ARG3); + if (IsVarTerm(t1)) { + Yap_Error(INSTANTIATION_ERROR, t1, "style_check/1"); + return false; + } else if (!IsAtomTerm(t1)) { + Yap_Error(TYPE_ERROR_ATOM, t1, "style_check/1"); + return false; + } else { + at = AtomOfTerm(t1); + s = at->UStrOfAE; + } + Term ctl = add_output(ARG2, ARG3); - return Yap_BufferToTerm(s, ctl); - } + return Yap_BufferToTerm(s, ctl); +} - /** - * @pred read_term_from_atomic( +Atomic , - T , +Options ) - * - * read a term _T_ stored in text _Atomic_ according to _Options_ - * - * @param _Atomic_ the source may be an atom, string, list of codes, or list - *of - *chars. - * @param _T_ the output term _T_, may be any term - * @param _Options_ read_term/3 options. - * - * @notes Idea originally from SWI-Prolog, but in YAP we separate atomic and - *atom. - * Encoding is fixed in atoms and strings. - */ - static Int read_term_from_atomic(USES_REGS1) { - Term t1 = Deref(ARG1); - const unsigned char *s; +/** + * @pred read_term_from_atomic( +Atomic , - T , +Options ) + * + * read a term _T_ stored in text _Atomic_ according to _Options_ + * + * @param _Atomic_ the source may be an atom, string, list of codes, or list + *of + *chars. + * @param _T_ the output term _T_, may be any term + * @param _Options_ read_term/3 options. + * + * @notes Idea originally from SWI-Prolog, but in YAP we separate atomic and + *atom. + * Encoding is fixed in atoms and strings. + */ +static Int read_term_from_atomic(USES_REGS1) { + Term t1 = Deref(ARG1); + const unsigned char *s; - if (IsVarTerm(t1)) { - Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_atomic/3"); - return (FALSE); - } else if (!IsAtomicTerm(t1)) { - Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3"); - return (FALSE); - } else { - Term t = Yap_AtomicToString(t1 PASS_REGS); - s = UStringOfTerm(t); - } - Term ctl = add_output(ARG2, ARG3); + if (IsVarTerm(t1)) { + Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_atomic/3"); + return (FALSE); + } else if (!IsAtomicTerm(t1)) { + Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3"); + return (FALSE); + } else { + Term t = Yap_AtomicToString(t1 PASS_REGS); + s = UStringOfTerm(t); + } + Term ctl = add_output(ARG2, ARG3); - return Yap_BufferToTerm(s, ctl); - } + return Yap_BufferToTerm(s, ctl); +} - /** - * @pred read_term_from_string( +String , - T , + Options ) - * - * read a term _T_ stored in constant _String_ according to _Options_ - * - * @param _String_ the source _String_ - * @param _T_ the output term _T_, may be any term - * @param _Options_ read_term/3 options. - * - * @notes Idea from SWI-Prolog, in YAP only works with strings - * Check read_term_from_atomic/3 for the general version. - */ - static Int read_term_from_string(USES_REGS1) { - Term t1 = Deref(ARG1), rc; - const unsigned char *s; - size_t len; - if (IsVarTerm(t1)) { - Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); - return (FALSE); - } else if (!IsStringTerm(t1)) { - Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3"); - return (FALSE); - } else { - s = UStringOfTerm(t1); - len = strlen_utf8(s); - } - char *ss = (char *)s; - encoding_t enc = ENC_ISO_UTF8; - int sno = Yap_open_buf_read_stream(ss, len, &enc, MEM_BUF_USER); - rc = Yap_read_term(sno, Deref(ARG3), 3); - Yap_CloseStream(sno); - if (!rc) - return false; - return Yap_unify(rc, ARG2); - } +/** + * @pred read_term_from_string( +String , - T , + Options ) + * + * read a term _T_ stored in constant _String_ according to _Options_ + * + * @param _String_ the source _String_ + * @param _T_ the output term _T_, may be any term + * @param _Options_ read_term/3 options. + * + * @notes Idea from SWI-Prolog, in YAP only works with strings + * Check read_term_from_atomic/3 for the general version. + */ +static Int read_term_from_string(USES_REGS1) { + Term t1 = Deref(ARG1), rc; + const unsigned char *s; + size_t len; + if (IsVarTerm(t1)) { + Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); + return (FALSE); + } else if (!IsStringTerm(t1)) { + Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3"); + return (FALSE); + } else { + s = UStringOfTerm(t1); + len = strlen_utf8(s); + } + char *ss = (char *)s; + encoding_t enc = ENC_ISO_UTF8; + int sno = Yap_open_buf_read_stream(ss, len, &enc, MEM_BUF_USER); + rc = Yap_read_term(sno, Deref(ARG3), 3); + Yap_CloseStream(sno); + if (!rc) + return false; + return Yap_unify(rc, ARG2); +} - static Int atomic_to_term(USES_REGS1) { - Term t1 = Deref(ARG1); - if (IsVarTerm(t1)) { - Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); - return (FALSE); - } else if (!IsAtomicTerm(t1)) { - Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3"); - return (FALSE); - } else { - Term t = Yap_AtomicToString(t1 PASS_REGS); - const unsigned char *us = UStringOfTerm(t); - return Yap_BufferToTerm(us, - add_output(ARG2, add_names(ARG3, TermNil))); - } - } +static Int atomic_to_term(USES_REGS1) { + Term t1 = Deref(ARG1); + if (IsVarTerm(t1)) { + Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); + return (FALSE); + } else if (!IsAtomicTerm(t1)) { + Yap_Error(TYPE_ERROR_ATOMIC, t1, "read_term_from_atomic/3"); + return (FALSE); + } else { + Term t = Yap_AtomicToString(t1 PASS_REGS); + const unsigned char *us = UStringOfTerm(t); + return Yap_BufferToTerm(us, add_output(ARG2, add_names(ARG3, TermNil))); + } +} - static Int atom_to_term(USES_REGS1) { - Term t1 = Deref(ARG1); - if (IsVarTerm(t1)) { - Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); - return (FALSE); - } else if (!IsAtomTerm(t1)) { - Yap_Error(TYPE_ERROR_ATOM, t1, "read_term_from_atomic/3"); - return (FALSE); - } else { - Term t = Yap_AtomicToString(t1 PASS_REGS); - const unsigned char *us = UStringOfTerm(t); - return Yap_BufferToTerm(us, - add_output(ARG2, add_names(ARG3, TermNil))); - } - } +static Int atom_to_term(USES_REGS1) { + Term t1 = Deref(ARG1); + if (IsVarTerm(t1)) { + Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); + return (FALSE); + } else if (!IsAtomTerm(t1)) { + Yap_Error(TYPE_ERROR_ATOM, t1, "read_term_from_atomic/3"); + return (FALSE); + } else { + Term t = Yap_AtomicToString(t1 PASS_REGS); + const unsigned char *us = UStringOfTerm(t); + return Yap_BufferToTerm(us, add_output(ARG2, add_names(ARG3, TermNil))); + } +} - static Int string_to_term(USES_REGS1) { - Term t1 = Deref(ARG1); +static Int string_to_term(USES_REGS1) { + Term t1 = Deref(ARG1); - if (IsVarTerm(t1)) { - Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); - return (FALSE); - } else if (!IsStringTerm(t1)) { - Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3"); - return (FALSE); - } else { - const unsigned char *us = UStringOfTerm(t1); - return Yap_BufferToTerm(us, - add_output(ARG2, add_names(ARG3, TermNil))); - } - } + if (IsVarTerm(t1)) { + Yap_Error(INSTANTIATION_ERROR, t1, "read_term_from_string/3"); + return (FALSE); + } else if (!IsStringTerm(t1)) { + Yap_Error(TYPE_ERROR_STRING, t1, "read_term_from_string/3"); + return (FALSE); + } else { + const unsigned char *us = UStringOfTerm(t1); + return Yap_BufferToTerm(us, add_output(ARG2, add_names(ARG3, TermNil))); + } +} - void Yap_InitReadTPreds(void) { - Yap_InitCPred("read_term", 2, read_term2, SyncPredFlag); - Yap_InitCPred("read_term", 3, read_term, SyncPredFlag); +void Yap_InitReadTPreds(void) { + Yap_InitCPred("read_term", 2, read_term2, SyncPredFlag); + Yap_InitCPred("read_term", 3, read_term, SyncPredFlag); - Yap_InitCPred("scan_to_list", 2, scan_to_list, SyncPredFlag); - Yap_InitCPred("read", 1, read1, SyncPredFlag); - Yap_InitCPred("read", 2, read2, SyncPredFlag); - Yap_InitCPred("read_clause", 2, read_clause2, SyncPredFlag); - Yap_InitCPred("read_clause", 3, read_clause, 0); - Yap_InitCPred("read_term_from_atom", 3, read_term_from_atom, 0); - Yap_InitCPred("read_term_from_atomic", 3, read_term_from_atomic, 0); - Yap_InitCPred("read_term_from_string", 3, read_term_from_string, 0); - Yap_InitCPred("atom_to_term", 3, atom_to_term, 0); - Yap_InitCPred("atomic_to_term", 3, atomic_to_term, 0); - Yap_InitCPred("string_to_term", 3, string_to_term, 0); + Yap_InitCPred("scan_to_list", 2, scan_to_list, SyncPredFlag); + Yap_InitCPred("read", 1, read1, SyncPredFlag); + Yap_InitCPred("read", 2, read2, SyncPredFlag); + Yap_InitCPred("read_clause", 2, read_clause2, SyncPredFlag); + Yap_InitCPred("read_clause", 3, read_clause, 0); + Yap_InitCPred("read_term_from_atom", 3, read_term_from_atom, 0); + Yap_InitCPred("read_term_from_atomic", 3, read_term_from_atomic, 0); + Yap_InitCPred("read_term_from_string", 3, read_term_from_string, 0); + Yap_InitCPred("atom_to_term", 3, atom_to_term, 0); + Yap_InitCPred("atomic_to_term", 3, atomic_to_term, 0); + Yap_InitCPred("string_to_term", 3, string_to_term, 0); - Yap_InitCPred("fileerrors", 0, fileerrors, SyncPredFlag); - Yap_InitCPred("nofileeleerrors", 0, nofileerrors, SyncPredFlag); - Yap_InitCPred("source_location", 2, source_location, SyncPredFlag); - Yap_InitCPred("$style_checker", 1, style_checker, - SyncPredFlag | HiddenPredFlag); - } + Yap_InitCPred("fileerrors", 0, fileerrors, SyncPredFlag); + Yap_InitCPred("nofileeleerrors", 0, nofileerrors, SyncPredFlag); + Yap_InitCPred("source_location", 2, source_location, SyncPredFlag); + Yap_InitCPred("$style_checker", 1, style_checker, + SyncPredFlag | HiddenPredFlag); +} diff --git a/os/sockets.c b/os/sockets.c index 02702fc11..3c5aa3e8b 100644 --- a/os/sockets.c +++ b/os/sockets.c @@ -38,7 +38,7 @@ static char SccsId[] = "%W% %G%"; #if HAVE_IO_H /* Windows */ #include -#endif +#endif #if HAVE_SOCKET #include #endif @@ -84,7 +84,7 @@ Yap_socketStream( StreamDesc *s ) } } -/* +/* sockets cannot use standard FILE *, we have to go through fds, and in the case of VC++, we have to use the receive routines... */ @@ -108,7 +108,7 @@ SocketGetc(int sno) ch = c; } else { #if HAVE_STRERROR - Yap_Error(SYSTEM_ERROR_INTERNAL, TermNil, + Yap_Error(SYSTEM_ERROR_INTERNAL, TermNil, "( socket_getc: %s)", strerror(errno)); #else Yap_Error(SYSTEM_ERROR_INTERNAL, TermNil, @@ -211,7 +211,7 @@ SocketPutc (int sno, int ch) Yap_Error(PERMISSION_ERROR_INPUT_STREAM, TermNil, "error writing stream socket: %s", strerror(errno)); #else Yap_Error(PERMISSION_ERROR_INPUT_STREAM, TermNil, "error writing stream socket"); -#endif +#endif } } } @@ -256,6 +256,8 @@ Yap_InitSocketStream(int fd, socket_info flags, socket_domain domain) { st->charcount = 0; st->linecount = 1; st->linepos = 0; + st->vfs = NULL; + st->recbs = NULL; st->stream_putc = SocketPutc; st->stream_getc = SocketGetc; Yap_DefaultStreamOps( st ); diff --git a/packages/pyswip/pyswip/core.py b/packages/pyswip/pyswip/core.py index 854f4a89b..efabe1c1c 100644 --- a/packages/pyswip/pyswip/core.py +++ b/packages/pyswip/pyswip/core.py @@ -701,7 +701,29 @@ wint_t = c_uint # } __value; /* Value so far. */ #} __mbstate_t; -class _mbstate_t_value(Union): +class lK_mbstate_t_value(Union): + + + + + + + + + + + + + + + + + + + + + + _fields_ = [("__wch",wint_t), ("__wchb",c_char*4)] diff --git a/packages/python/pybips.c b/packages/python/pybips.c index 71498fe1f..ed8f92c35 100644 --- a/packages/python/pybips.c +++ b/packages/python/pybips.c @@ -698,68 +698,66 @@ static PyObject *structseq_repr(PyObject *iobj) { } #endif -static bool -legal_symbol( const char *s) -{ - int ch; - while(((ch = *s++) != '\0')) { - if (isalnum(ch) || ch == '_') - continue; - return false; - } - return true; +static bool legal_symbol(const char *s) { + int ch; + while (((ch = *s++) != '\0')) { + if (isalnum(ch) || ch == '_') + continue; + return false; + } + return true; } PyObject *term_to_nametuple(const char *s, arity_t arity, PyObject *tuple) { PyObject *o, *d = NULL; if (legal_symbol(s)) { - PyTypeObject *typp; - PyObject *key = PyUnicode_FromString(s); - if (Py_f2p && (d = PyList_GetItem(Py_f2p, arity)) && - PyDict_Contains(d, key)) { - typp = (PyTypeObject *) PyDict_GetItem(d, key); - Py_INCREF(typp); - } else { - typp = calloc(sizeof(PyTypeObject), 1); - PyStructSequence_Desc *desc = calloc(sizeof(PyStructSequence_Desc), 1); - desc->name = PyMem_Malloc(strlen(s) + 1); - strcpy(desc->name, s); - desc->doc = "YAPTerm"; - desc->fields = pnull; - desc->n_in_sequence = arity; - if (PyStructSequence_InitType2(typp, desc) < 0) - return NULL; - // typp->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; - // typp->tp_flags &= ~Py_TPFLAGS_HAVE_GC; - // typp->tp_str = structseq_str; - typp->tp_repr = structseq_repr; - // typp = PyStructSequence_NewType(desc); - // don't do this: we cannot add a type as an atribute. - // PyModule_AddGObject(py_Main, s, (PyObject *)typp); - if (d && !PyDict_Contains(d, key)) - PyDict_SetItem(d, key, (PyObject *) typp); - Py_DECREF(key); - Py_INCREF(typp); - } - o = PyStructSequence_New(typp); + PyTypeObject *typp; + PyObject *key = PyUnicode_FromString(s); + if (Py_f2p && (d = PyList_GetItem(Py_f2p, arity)) && + PyDict_Contains(d, key)) { + typp = (PyTypeObject *)PyDict_GetItem(d, key); Py_INCREF(typp); - arity_t i; - for (i = 0; i < arity; i++) { - PyObject *pArg = PyTuple_GET_ITEM(tuple, i); - Py_INCREF(pArg); - if (pArg) - PyStructSequence_SET_ITEM(o, i, pArg); - // PyObject_Print(pArg,stderr,0);fputc('\n',stderr); - } - //((PyStructSequence *)o)->ob_base.ob_size = arity; - // PyObject_Print(o,stderr,0);fputc('\n',stderr); - return o; + } else { + typp = calloc(sizeof(PyTypeObject), 1); + PyStructSequence_Desc *desc = calloc(sizeof(PyStructSequence_Desc), 1); + desc->name = PyMem_Malloc(strlen(s) + 1); + strcpy(desc->name, s); + desc->doc = "YAPTerm"; + desc->fields = pnull; + desc->n_in_sequence = arity; + if (PyStructSequence_InitType2(typp, desc) < 0) + return NULL; + // typp->tp_flags &= ~Py_TPFLAGS_HEAPTYPE; + // typp->tp_flags &= ~Py_TPFLAGS_HAVE_GC; + // typp->tp_str = structseq_str; + typp->tp_repr = structseq_repr; + // typp = PyStructSequence_NewType(desc); + // don't do this: we cannot add a type as an atribute. + // PyModule_AddGObject(py_Main, s, (PyObject *)typp); + if (d && !PyDict_Contains(d, key)) + PyDict_SetItem(d, key, (PyObject *)typp); + Py_DECREF(key); + Py_INCREF(typp); + } + o = PyStructSequence_New(typp); + Py_INCREF(typp); + arity_t i; + for (i = 0; i < arity; i++) { + PyObject *pArg = PyTuple_GET_ITEM(tuple, i); + Py_INCREF(pArg); + if (pArg) + PyStructSequence_SET_ITEM(o, i, pArg); + // PyObject_Print(pArg,stderr,0);fputc('\n',stderr); + } + //((PyStructSequence *)o)->ob_base.ob_size = arity; + // PyObject_Print(o,stderr,0);fputc('\n',stderr); + return o; } else { - PyObject *o1; - o1 = PyTuple_New(2); - PyTuple_SET_ITEM(o1, 0, PyUnicode_FromString(s)); - PyTuple_SET_ITEM(o1, 1, tuple); - return o1; + PyObject *o1; + o1 = PyTuple_New(2); + PyTuple_SET_ITEM(o1, 0, PyUnicode_FromString(s)); + PyTuple_SET_ITEM(o1, 1, tuple); + return o1; } } @@ -945,7 +943,7 @@ PyObject *compound_to_pyeval(term_t t, PyObject *context, bool cvt) { AOK(PL_get_arg(1, t, targ), NULL); ptr = term_to_python(targ, true, NULL, true); return PyObject_Dir(ptr); - + {} } else if (fun == FUNCTOR_plus2) { @@ -1004,9 +1002,10 @@ PyObject *compound_to_pyeval(term_t t, PyObject *context, bool cvt) { if (!arity) { char *s = NULL; PyObject *pValue; - AOK(PL_get_atom_chars(t, &s), NULL); + PyObject_Print(o, stderr, 0); pValue = PyObject_GetAttrString(o, s); + PyObject_Print(pValue, stderr, 0); if (CHECKNULL(t, pValue) == NULL) { PyErr_Print(); return NULL; @@ -1014,43 +1013,68 @@ PyObject *compound_to_pyeval(term_t t, PyObject *context, bool cvt) { return pValue; } else { char *s = PL_atom_chars(name); - PyObject *ys = lookupPySymbol(s, o, NULL); - PyObject *pArgs = PyTuple_New(arity); + PyObject *ys = lookupPySymbol(s, o, NULL), *pArgs; DebugPrintf("Tuple %p\n", pArgs); int i; term_t tleft = PL_new_term_ref(); - for (i = 0; i < arity; i++) { + bool indict = true; + PyObject *pyDict = PyDict_New(); + + for (i = arity; i > 0; i--) { PyObject *pArg; - AOK(PL_get_arg(i + 1, t, tleft), NULL); + AOK(PL_get_arg(i, t, tleft), NULL); /* ignore (_) */ - if (i == 0 && PL_is_variable(tleft)) { - pArg = Py_None; - } else { - pArg = term_to_python(tleft, true, NULL, true); - // PyObject_Print(pArg,fdopen(2,"w"),0); - if (pArg == NULL) { - pArg = Py_None; + if (indict) { + if (PL_get_functor(tleft, &fun) && fun == FUNCTOR_equal2) { + term_t tatt = PL_new_term_ref(); + AOK(PL_get_arg(1, tleft, tatt), NULL); + PyObject *key = term_to_python(tatt, true, NULL, true); + AOK(PL_get_arg(2, tleft, tatt), NULL); + PyObject *val = term_to_python(tatt, true, NULL, true); + PyDict_SetItem(pyDict, key, val); + } else { + indict = false; + pArgs = PyTuple_New(i); } - /* pArg reference stolen here: */ - Py_INCREF(pArg); } + if (!indict) { + if (PL_is_variable(tleft)) { + pArg = Py_None; + } else { + pArg = term_to_python(tleft, true, NULL, true); + // PyObject_Print(pArg,fdopen(2,"w"),0); + if (pArg == NULL) { + pArg = Py_None; + } + /* pArg reference stolen here: */ + Py_INCREF(pArg); + } - PyTuple_SetItem(pArgs, i, pArg); + PyTuple_SetItem(pArgs, i - 1, pArg); + } } - PyObject *rc; + if (indict) { + pArgs = PyTuple_New(0); + } + + PyObject *rc; if (ys && PyCallable_Check(ys)) { + PyObject_Print(ys, stderr, 0); + PyObject_Print(pArgs, stderr, 0); + PyObject_Print(pyDict, stderr, 0); - // PyObject_Print(pArgs, stderr, 0); - // PyObject_Print(o, stderr, 0); - CHECK_CALL(rc, t, PyObject_CallObject(ys, pArgs)); - Py_DECREF(pArgs); - Py_DECREF(ys); - DebugPrintf("CallObject %p\n", rc); + // PyObject_Print(pArgs, stderr, 0); + // PyObject_Print(o, stderr, 0); + CHECK_CALL(rc, t, PyObject_Call(ys, pArgs, pyDict)); + Py_DECREF(pArgs); + Py_DECREF(ys); + PyObject_Print(rc, stderr, 0); + DebugPrintf("CallObject %p\n", rc); } else { - rc = term_to_nametuple(s, arity, pArgs); + rc = term_to_nametuple(s, arity, pArgs); } - + return rc; } } diff --git a/packages/python/python.c b/packages/python/python.c index cd4c8cb64..d99657cfc 100644 --- a/packages/python/python.c +++ b/packages/python/python.c @@ -77,14 +77,24 @@ static int py_put(int sno, int ch) { return ch; } -static int py_get(int sno) { +static int py_get(int sno) +{ StreamDesc *s = YAP_GetStreamFromId(sno); PyObject *fget = PyObject_GetAttrString(s->u.private_data, "read"); PyObject *pyr = PyObject_CallFunctionObjArgs(fget, PyLong_FromLong(1), NULL); return PyUnicode_READ_CHAR(pyr, 0); } -static int64_t py_seek(int sno, int64_t where, int how) { +static int py_peek(int sno) +{ + StreamDesc *s = YAP_GetStreamFromId(sno); + PyObject *fget = PyObject_GetAttrString(s->u.private_data, "peek"); + PyObject *pyr = PyObject_CallFunctionObjArgs(fget, PyLong_FromLong(1), NULL); + return PyUnicode_READ_CHAR(pyr, 0); +} + +static int64_t py_seek(int sno, int64_t where, int how) +{ StreamDesc *s = YAP_GetStreamFromId(sno); PyObject *fseek = PyObject_GetAttrString(s->u.private_data, "seek"); PyObject *pyr = PyObject_CallFunctionObjArgs(fseek, PyLong_FromLong(where), @@ -124,6 +134,7 @@ static bool init_python_stream(void) { pystream.open = py_open; pystream.close = py_close; pystream.get_char = py_get; + pystream.peek_char = py_peek; pystream.put_char = py_put; pystream.flush = py_flush; pystream.seek = py_seek; diff --git a/packages/python/swig/setup.py.in b/packages/python/swig/setup.py.in index 046cc50fe..a0ec33e80 100644 --- a/packages/python/swig/setup.py.in +++ b/packages/python/swig/setup.py.in @@ -1,4 +1,4 @@ -#!/usr/bin/env python + #!/usr/bin/env python # coding: utf-8 # Copyright (c) IPython Development Team. @@ -64,7 +64,7 @@ if platform.system() == 'Windows': win_libs = ['wsock32','ws2_32'] my_extra_link_args = ['-Wl,-export-all-symbols'] elif platform.system() == 'Darwin': - my_extra_link_args = ['-L','..'] + my_extra_link_args = ['-L','..','-Wl,-rpath','-Wl,${CMAKE_INSTALL_FULL_LIBDIR}'] win_libs = [] local_libs = ['Py4YAP'] elif platform.system() == 'Linux': diff --git a/packages/python/swig/yap4py/__init__.py.in b/packages/python/swig/yap4py/__init__.py.in index 26f8dd79c..ed4c12f41 100644 --- a/packages/python/swig/yap4py/__init__.py.in +++ b/packages/python/swig/yap4py/__init__.py.in @@ -13,19 +13,19 @@ if platform.system() == 'Windows': def load( dll ): dll = glob.glob(os.path.join(yap_lib_path,dll))[0] dll = os.path.abspath(dll) - ctypes.WinDLL(dll) -elif platform.system() == 'Darwin': + ctypes.WinDLL(dll)s +elif platform.system() == 'Darwin': def load( dll ): - dll = glob.glob(os.path.join(yap_lib_path,dll))[0] - dll = os.path.abspath(dll) - ctypes.CDLL(dll) - load('libYap.dylib') - load('libPy4YAP.dylib') -else: - def load( dll ): - dll = glob.glob(os.path.join(yap_lib_path,dll))[0] - dll = os.path.abspath(dll) - ctypes.CDLL(dll) - load('libYap.so') - load('libPy4YAP.so') - + dll = glob.glob(os.path.join(os.path.realpath(__file__),dll))[0] + dll = os.path.abspath(dll) + ctypes.CDLL(dll) + # load('libYap.dylib') + # load('libPy4YAP.dylib') + load( '_yap.dylib' ) +#else: +# def load( dll ): +# dll = glob.glob(os.path.join(yap_lib_path,dll))[0] +# dll = os.path.abspath(dll) +# ctypes.CDLL(dll) +# load('libYap.so') +# load('libPy4YAP.so') \ No newline at end of file diff --git a/packages/python/swig/yap4py/__main__.py b/packages/python/swig/yap4py/__main__.py index 5aeb4f9c7..bc5612ee9 100644 --- a/packages/python/swig/yap4py/__main__.py +++ b/packages/python/swig/yap4py/__main__.py @@ -1,7 +1,6 @@ """The main routine of the yap python project.""" import sys -import yap4py.yapi def main(**args): @@ -10,5 +9,6 @@ def main(**args): args = sys.argv[1:] if __name__ == "__main__": + import yap4py.yapi main() yap4py.yapi.main() diff --git a/packages/python/yap_kernel/CMakeLists.txt b/packages/python/yap_kernel/CMakeLists.txt index f55f59f9a..edb122911 100644 --- a/packages/python/yap_kernel/CMakeLists.txt +++ b/packages/python/yap_kernel/CMakeLists.txt @@ -1,4 +1,3 @@ - set (EXTRAS MANIFEST.in YAP_KERNEL.md @@ -60,21 +59,14 @@ yap_kernel/pylab/config.py ) - configure_file(setup.py.in ${CMAKE_CURRENT_BINARY_DIR}/setup.py) + configure_file(setup.py.in ${CMAKE_CURRENT_BINARY_DIR}/setup.py) + configure_file(${CMAKE_SOURCE_DIR}/misc/editors/prolog.js.in ${CMAKE_CURRENT_BINARY_DIR}/prolog.js ) - file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources ) - file( COPY ${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/ ) - file( RENAME ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/yap_32x32x32.png ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/logo-32x32.png ) - file( COPY ${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources ) - file( RENAME ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/yap_64x64x32.png ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/logo-64x64.png ) - file( COPY ${CMAKE_CURRENT_SOURCE_DIR}/kernel.js DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/ ) - file( COPY ${CMAKE_SOURCE_DIR}/misc/editors/prolog.js DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/yap_kernel/resources/) set(SETUP_PY ${CMAKE_CURRENT_BINARY_DIR}/setup.py) add_custom_target( YAPKernel ALL - COMMAND ${PYTHON_EXECUTABLE} ${SETUP_PY} build sdist bdist + COMMAND ${PYTHON_EXECUTABLE} ${SETUP_PY} build sdist bdist WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} DEPENDS YAP4PY ) @@ -83,4 +75,3 @@ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})") install(FILES ${PL_SOURCES} DESTINATION ${libpl} ) - diff --git a/packages/python/yap_kernel/kernel.js b/packages/python/yap_kernel/kernel.js index e892284ec..b5fd0586e 100644 --- a/packages/python/yap_kernel/kernel.js +++ b/packages/python/yap_kernel/kernel.js @@ -1,204 +1,9 @@ -define( function() { - return {onload:function() { - console.info('Kernel specific javascript loaded'); - // do more things here, like define a codemirror mode]] - require(['codemirror/lib/codemirror', 'codemirror/mode/meta'], function(CodeMirror) { -` CodeMirror.mI = [ - {name: "APL", mime: "text/apl", mode: "apl", ext: ["dyalog", "apl"]}, - {name: "PGP", mimes: ["application/pgp", "application/pgp-keys", "application/pgp-signature"], mode: "asciiarmor", ext: ["pgp"]}, - {name: "ASN.1", mime: "text/x-ttcn-asn", mode: "asn.1", ext: ["asn", "asn1"]}, - {name: "Asterisk", mime: "text/x-asterisk", mode: "asterisk", file: /^extensions\.conf$/i}, - {name: "Brainfuck", mime: "text/x-brainfuck", mode: "brainfuck", ext: ["b", "bf"]}, - {name: "C", mime: "text/x-csrc", mode: "clike", ext: ["c", "h"]}, - {name: "C++", mime: "text/x-c++src", mode: "clike", ext: ["cpp", "c++", "cc", "cxx", "hpp", "h++", "hh", "hxx"], alias: ["cpp"]}, - {name: "Cobol", mime: "text/x-cobol", mode: "cobol", ext: ["cob", "cpy"]}, - {name: "C#", mime: "text/x-csharp", mode: "clike", ext: ["cs"], alias: ["csharp"]}, - {name: "Clojure", mime: "text/x-clojure", mode: "clojure", ext: ["clj", "cljc", "cljx"]}, - {name: "ClojureScript", mime: "text/x-clojurescript", mode: "clojure", ext: ["cljs"]}, - {name: "Closure Stylesheets (GSS)", mime: "text/x-gss", mode: "css", ext: ["gss"]}, - {name: "CMake", mime: "text/x-cmake", mode: "cmake", ext: ["cmake", "cmake.in"], file: /^CMakeLists.txt$/}, - {name: "CoffeeScript", mime: "text/x-coffeescript", mode: "coffeescript", ext: ["coffee"], alias: ["coffee", "coffee-script"]}, - {name: "Common Lisp", mime: "text/x-common-lisp", mode: "commonlisp", ext: ["cl", "lisp", "el"], alias: ["lisp"]}, - {name: "Cypher", mime: "application/x-cypher-query", mode: "cypher", ext: ["cyp", "cypher"]}, - {name: "Cython", mime: "text/x-cython", mode: "python", ext: ["pyx", "pxd", "pxi"]}, - {name: "Crystal", mime: "text/x-crystal", mode: "crystal", ext: ["cr"]}, - {name: "CSS", mime: "text/css", mode: "css", ext: ["css"]}, - {name: "CQL", mime: "text/x-cassandra", mode: "sql", ext: ["cql"]}, - {name: "D", mime: "text/x-d", mode: "d", ext: ["d"]}, - {name: "Dart", mimes: ["application/dart", "text/x-dart"], mode: "dart", ext: ["dart"]}, - {name: "diff", mime: "text/x-diff", mode: "diff", ext: ["diff", "patch"]}, - {name: "Django", mime: "text/x-django", mode: "django"}, - {name: "Dockerfile", mime: "text/x-dockerfile", mode: "dockerfile", file: /^Dockerfile$/}, - {name: "DTD", mime: "application/xml-dtd", mode: "dtd", ext: ["dtd"]}, - {name: "Dylan", mime: "text/x-dylan", mode: "dylan", ext: ["dylan", "dyl", "intr"]}, - {name: "EBNF", mime: "text/x-ebnf", mode: "ebnf"}, - {name: "ECL", mime: "text/x-ecl", mode: "ecl", ext: ["ecl"]}, - {name: "edn", mime: "application/edn", mode: "clojure", ext: ["edn"]}, - {name: "Eiffel", mime: "text/x-eiffel", mode: "eiffel", ext: ["e"]}, - {name: "Elm", mime: "text/x-elm", mode: "elm", ext: ["elm"]}, - {name: "Embedded Javascript", mime: "application/x-ejs", mode: "htmlembedded", ext: ["ejs"]}, - {name: "Embedded Ruby", mime: "application/x-erb", mode: "htmlembedded", ext: ["erb"]}, - {name: "Erlang", mime: "text/x-erlang", mode: "erlang", ext: ["erl"]}, - {name: "Factor", mime: "text/x-factor", mode: "factor", ext: ["factor"]}, - {name: "FCL", mime: "text/x-fcl", mode: "fcl"}, - {name: "Forth", mime: "text/x-forth", mode: "forth", ext: ["forth", "fth", "4th"]}, - {name: "Fortran", mime: "text/x-fortran", mode: "fortran", ext: ["f", "for", "f77", "f90"]}, - {name: "F#", mime: "text/x-fsharp", mode: "mllike", ext: ["fs"], alias: ["fsharp"]}, - {name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]}, - {name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]}, - {name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i}, - {name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]}, - {name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/}, - {name: "HAML", mime: "text/x-haml", mode: "haml", ext: ["haml"]}, - {name: "Haskell", mime: "text/x-haskell", mode: "haskell", ext: ["hs"]}, - {name: "Haskell (Literate)", mime: "text/x-literate-haskell", mode: "haskell-literate", ext: ["lhs"]}, - {name: "Haxe", mime: "text/x-haxe", mode: "haxe", ext: ["hx"]}, - {name: "HXML", mime: "text/x-hxml", mode: "haxe", ext: ["hxml"]}, - {name: "ASP.NET", mime: "application/x-aspx", mode: "htmlembedded", ext: ["aspx"], alias: ["asp", "aspx"]}, - {name: "HTML", mime: "text/html", mode: "htmlmixed", ext: ["html", "htm"], alias: ["xhtml"]}, - {name: "HTTP", mime: "message/http", mode: "http"}, - {name: "IDL", mime: "text/x-idl", mode: "idl", ext: ["pro"]}, - {name: "Pug", mime: "text/x-pug", mode: "pug", ext: ["jade", "pug"], alias: ["jade"]}, - {name: "Java", mime: "text/x-java", mode: "clike", ext: ["java"]}, - {name: "Java Server Pages", mime: "application/x-jsp", mode: "htmlembedded", ext: ["jsp"], alias: ["jsp"]}, - {name: "JavaScript", mimes: ["text/javascript", "text/ecmascript", "application/javascript", "application/x-javascript", "application/ecmascript"], - mode: "javascript", ext: ["js"], alias: ["ecmascript", "js", "node"]}, - {name: "JSON", mimes: ["application/json", "application/x-json"], mode: "javascript", ext: ["json", "map"], alias: ["json5"]}, - {name: "JSON-LD", mime: "application/ld+json", mode: "javascript", ext: ["jsonld"], alias: ["jsonld"]}, - {name: "JSX", mime: "text/jsx", mode: "jsx", ext: ["jsx"]}, - {name: "Jinja2", mime: "null", mode: "jinja2"}, - {name: "Julia", mime: "text/x-julia", mode: "julia", ext: ["jl"]}, - {name: "Kotlin", mime: "text/x-kotlin", mode: "clike", ext: ["kt"]}, - {name: "LESS", mime: "text/x-less", mode: "css", ext: ["less"]}, - {name: "LiveScript", mime: "text/x-livescript", mode: "livescript", ext: ["ls"], alias: ["ls"]}, - {name: "Lua", mime: "text/x-lua", mode: "lua", ext: ["lua"]}, - {name: "Markdown", mime: "text/x-markdown", mode: "markdown", ext: ["markdown", "md", "mkd"]}, - {name: "mIRC", mime: "text/mirc", mode: "mirc"}, - {name: "MariaDB SQL", mime: "text/x-mariadb", mode: "sql"}, - {name: "Mathematica", mime: "text/x-mathematica", mode: "mathematica", ext: ["m", "nb"]}, - {name: "Modelica", mime: "text/x-modelica", mode: "modelica", ext: ["mo"]}, - {name: "MUMPS", mime: "text/x-mumps", mode: "mumps", ext: ["mps"]}, - {name: "MS SQL", mime: "text/x-mssql", mode: "sql"}, - {name: "mbox", mime: "application/mbox", mode: "mbox", ext: ["mbox"]}, - {name: "MySQL", mime: "text/x-mysql", mode: "sql"}, - {name: "Nginx", mime: "text/x-nginx-conf", mode: "nginx", file: /nginx.*\.conf$/i}, - {name: "NSIS", mime: "text/x-nsis", mode: "nsis", ext: ["nsh", "nsi"]}, - {name: "NTriples", mime: "text/n-triples", mode: "ntriples", ext: ["nt"]}, - {name: "Objective C", mime: "text/x-objectivec", mode: "clike", ext: ["m", "mm"], alias: ["objective-c", "objc"]}, - {name: "OCaml", mime: "text/x-ocaml", mode: "mllike", ext: ["ml", "mli", "mll", "mly"]}, - {name: "Octave", mime: "text/x-octave", mode: "octave", ext: ["m"]}, - {name: "Oz", mime: "text/x-oz", mode: "oz", ext: ["oz"]}, - {name: "Pascal", mime: "text/x-pascal", mode: "pascal", ext: ["p", "pas"]}, - {name: "PEG.js", mime: "null", mode: "pegjs", ext: ["jsonld"]}, - {name: "Perl", mime: "text/x-perl", mode: "perl", ext: ["pl", "pm"]}, - {name: "PHP", mime: "application/x-httpd-php", mode: "php", ext: ["php", "php3", "php4", "php5", "phtml"]}, - {name: "Pig", mime: "text/x-pig", mode: "pig", ext: ["pig"]}, - {name: "Plain Text", mime: "text/plain", mode: "null", ext: ["txt", "text", "conf", "def", "list", "log"]}, - {name: "PLSQL", mime: "text/x-plsql", mode: "sql", ext: ["pls"]}, - {name: "PowerShell", mime: "application/x-powershell", mode: "powershell", ext: ["ps1", "psd1", "psm1"]}, - {name: "Properties files", mime: "text/x-properties", mode: "properties", ext: ["properties", "ini", "in"], alias: ["ini", "properties"]}, - { name: "Prolog", mime: "text/x-prolog", - mode: "prolog", ext: ["pl", "yap", "yss", "P"] }, +/*define(['./prolog.js'], function(){ - {name: "ProtoBuf", mime: "text/x-protobuf", mode: "protobuf", ext: ["proto"]}, -< {name: "Python", mime: "text/x-python", mode: "python", ext: ["BUILD", "bzl", "py", "pyw"], file: /^(BUCK|BUILD)$/}, - {name: "Puppet", mime: "text/x-puppet", mode: "puppet", ext: ["pp"]}, - {name: "Q", mime: "text/x-q", mode: "q", ext: ["q"]}, - {name: "R", mime: "text/x-rsrc", mode: "r", ext: ["r", "R"], alias: ["rscript"]}, - {name: "reStructuredText", mime: "text/x-rst", mode: "rst", ext: ["rst"], alias: ["rst"]}, - {name: "RPM Changes", mime: "text/x-rpm-changes", mode: "rpm"}, - {name: "RPM Spec", mime: "text/x-rpm-spec", mode: "rpm", ext: ["spec"]}, - {name: "Ruby", mime: "text/x-ruby", mode: "ruby", ext: ["rb"], alias: ["jruby", "macruby", "rake", "rb", "rbx"]}, - {name: "Rust", mime: "text/x-rustsrc", mode: "rust", ext: ["rs"]}, - {name: "SAS", mime: "text/x-sas", mode: "sas", ext: ["sas"]}, - {name: "Sass", mime: "text/x-sass", mode: "sass", ext: ["sass"]}, - {name: "Scala", mime: "text/x-scala", mode: "clike", ext: ["scala"]}, - {name: "Scheme", mime: "text/x-scheme", mode: "scheme", ext: ["scm", "ss"]}, - {name: "SCSS", mime: "text/x-scss", mode: "css", ext: ["scss"]}, - {name: "Shell", mime: "text/x-sh", mode: "shell", ext: ["sh", "ksh", "bash"], alias: ["bash", "sh", "zsh"], file: /^PKGBUILD$/}, - {name: "Sieve", mime: "application/sieve", mode: "sieve", ext: ["siv", "sieve"]}, - {name: "Slim", mimes: ["text/x-slim", "application/x-slim"], mode: "slim", ext: ["slim"]}, - {name: "Smalltalk", mime: "text/x-stsrc", mode: "smalltalk", ext: ["st"]}, - {name: "Smarty", mime: "text/x-smarty", mode: "smarty", ext: ["tpl"]}, - {name: "Solr", mime: "text/x-solr", mode: "solr"}, - {name: "Soy", mime: "text/x-soy", mode: "soy", ext: ["soy"], alias: ["closure template"]}, - {name: "SPARQL", mime: "application/sparql-query", mode: "sparql", ext: ["rq", "sparql"], alias: ["sparul"]}, - {name: "Spreadsheet", mime: "text/x-spreadsheet", mode: "spreadsheet", alias: ["excel", "formula"]}, - {name: "SQL", mime: "text/x-sql", mode: "sql", ext: ["sql"]}, - {name: "Squirrel", mime: "text/x-squirrel", mode: "clike", ext: ["nut"]}, - {name: "Stylus", mime: "text/x-styl", mode: "stylus", ext: ["styl"]}, - {name: "Swift", mime: "text/x-swift", mode: "swift", ext: ["swift"]}, - {name: "sTeX", mime: "text/x-stex", mode: "stex"}, - {name: "LaTeX", mime: "text/x-latex", mode: "stex", ext: ["text", "ltx"], alias: ["tex"]}, - {name: "SystemVerilog", mime: "text/x-systemverilog", mode: "verilog", ext: ["v"]}, - {name: "Tcl", mime: "text/x-tcl", mode: "tcl", ext: ["tcl"]}, - {name: "Textile", mime: "text/x-textile", mode: "textile", ext: ["textile"]}, - {name: "TiddlyWiki ", mime: "text/x-tiddlywiki", mode: "tiddlywiki"}, - {name: "Tiki wiki", mime: "text/tiki", mode: "tiki"}, - {name: "TOML", mime: "text/x-toml", mode: "toml", ext: ["toml"]}, - {name: "Tornado", mime: "text/x-tornado", mode: "tornado"}, - {name: "troff", mime: "text/troff", mode: "troff", ext: ["1", "2", "3", "4", "5", "6", "7", "8", "9"]}, - {name: "TTCN", mime: "text/x-ttcn", mode: "ttcn", ext: ["ttcn", "ttcn3", "ttcnpp"]}, - {name: "TTCN_CFG", mime: "text/x-ttcn-cfg", mode: "ttcn-cfg", ext: ["cfg"]}, - {name: "Turtle", mime: "text/turtle", mode: "turtle", ext: ["ttl"]}, - {name: "TypeScript", mime: "application/typescript", mode: "javascript", ext: ["ts"], alias: ["ts"]}, - {name: "Twig", mime: "text/x-twig", mode: "twig"}, - {name: "Web IDL", mime: "text/x-webidl", mode: "webidl", ext: ["webidl"]}, - {name: "VB.NET", mime: "text/x-vb", mode: "vb", ext: ["vb"]}, - {name: "VBScript", mime: "text/vbscript", mode: "vbscript", ext: ["vbs"]}, - {name: "Velocity", mime: "text/velocity", mode: "velocity", ext: ["vtl"]}, - {name: "Verilog", mime: "text/x-verilog", mode: "verilog", ext: ["v"]}, - {name: "VHDL", mime: "text/x-vhdl", mode: "vhdl", ext: ["vhd", "vhdl"]}, - {name: "Vue.js Component", mimes: ["script/x-vue", "text/x-vue"], mode: "vue", ext: ["vue"]}, - {name: "XML", mimes: ["application/xml", "text/xml"], mode: "xml", ext: ["xml", "xsl", "xsd"], alias: ["rss", "wsdl", "xsd"]}, - {name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]}, - {name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]}, - {name: "YAML", mimes: ["text/x-yaml", "text/yaml"], mode: "yaml", ext: ["yaml", "yml"], alias: ["yml"]}, - {name: "Z80", mime: "text/x-z80", mode: "z80", ext: ["z80"]}, - {name: "mscgen", mime: "text/x-mscgen", mode: "mscgen", ext: ["mscgen", "mscin", "msc"]}, - {name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]}, - {name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]} - ]; + var onload = function(){ + console.log("I am being loaded"); - - }); - - - CodeMirror.findModeByMIME = function(mime) { - mime = mime.toLowerCase(); - for (var i = 0; i < CodeMirror.mI.length; i++) { - var info = CodeMirror.mI[i]; - if (info.mime == mime) return info; - if (info.mimes) for (var j = 0; j < info.mimes.length; j++) - if (info.mimes[j] == mime) return info; - } - }; - - CodeMirror.findModeByExtension = function(ext) { - for (var i = 0; i < CodeMirror.mI.length; i++) { - var info = CodeMirror.mI[i]; - if (info.ext) for (var j = 0; j < info.ext.length; j++) - if (info.ext[j] == ext) return info; - } - }; - - CodeMirror.findModeByFileName = function(filename) { - for (var i = 0; i < CodeMirror.mI.length; i++) { - var info = CodeMirror.mI[i]; - if (info.file && info.file.test(filename)) return info; - } - var dot = filename.lastIndexOf("."); - var ext = dot > -1 && filename.substring(dot + 1, filename.length); - if (ext) return CodeMirror.findModeByExtension(ext); - }; - - CodeMirror.findModeByName = function(name) { - name = name.toLowerCase(); - for (var i = 0; i < CodeMirror.mI.length; i++) { - var info = CodeMirror.mI[i]; - if (info.name.toLowerCase() == name) return info; - if (info.alias) for (var j = 0; j < info.alias.length; j++) - if (info.alias[j].toLowerCase() == name) return info; - } - }; - } }; }); + + }; + return {onload:onload} +});*/ \ No newline at end of file diff --git a/packages/python/yap_kernel/setup.py b/packages/python/yap_kernel/setup.py deleted file mode 100644 index 718148ef8..000000000 --- a/packages/python/yap_kernel/setup.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -from __future__ import print_function - -# the name of the package -name = 'yap_kernel' - -#----------------------------------------------------------------------------- -# Minimal Python version sanity check -#----------------------------------------------------------------------------- - -import sys -import sysconfig -import setuptools - -v = sys.version_info -if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)): - error = "ERROR: %s requires Python version 3.3 or above." % name - print(error, file=sys.stderr) - sys.exit(1) - -PY3 = (sys.version_info[0] >= 3) - -#----------------------------------------------------------------------------- -# get on with it -#----------------------------------------------------------------------------- - -from glob import glob -import os -import shutil - -from distutils.core import setup - -pjoin = os.path.join -here = os.path.abspath(os.path.dirname(__file__)) -# pkg_root = pjoin(here, name) - -packages = setuptools.find_packages('/home/vsc/github/yap-6.3/packages/python/yap_kernel') -# for d, _, _ in os.walk(pjoin(here, name)): -# if os.path.exists(pjoin(d, '__init__.py')): -# packages.append(d[len(here)+1:].replace(os.path.sep, '.')) - -sys.path.insert(0, "/home/vsc/github/yap-6.3/packages/python/yap_kernel") -package_data = { -'yap_ipython': ['prolog/*.*'], -'yap_kernel': ['resources/*.*'] -} - - -version_ns = {} -with open(pjoin('/home/vsc/github/yap-6.3/packages/python/yap_kernel', name, '_version.py')) as f: - exec(f.read(), {}, version_ns) - - - - -setup_args = dict( - name = name, - version = version_ns['__version__'], - scripts = glob(pjoin('scripts', '*')), - packages = packages, - py_modules = ['yap_kernel_launcher'], - package_data = package_data, - package_dir = {'':"/home/vsc/github/yap-6.3/packages/python/yap_kernel"}, - description = "YAP Kernel for Jupyter", - author = 'YAP Development Team', - author_email = 'YAP-dev@scipy.org', - url = 'http://ipython.org', - license = 'BSD', - platforms = "Linux, Mac OS X, Windows", - keywords = ['Interactive', 'Interpreter', 'Shell', 'Web'], - classifiers = [ - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Prolog', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - ], -) - -if 'develop' in sys.argv or any(a.startswith('bdist') for a in sys.argv): - import setuptools - -setuptools_args = {} -install_requires = setuptools_args['install_requires'] = [ -] - -if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv): - from yap_kernel.kernelspec import write_kernel_spec, make_yap_kernel_cmd, KERNEL_NAME - - - argv = make_yap_kernel_cmd(executable='python') - dest = os.path.join(here, 'resources') - if not os.path.exists(dest): - os.makedirs( dest ) - shutil.copy('/home/vsc/github/yap-6.3/docs/icons/yap_32x32x32.png',os.path.join(dest,'logo-32x32.png')) - shutil.copy('/home/vsc/github/yap-6.3/docs/icons/yap_64x64x32.png',os.path.join(dest,'logo-64x64.png')) - try: - write_kernel_spec(dest, overrides={'argv': argv}) - except: - none - # shutil.copy('/home/vsc/github/yap-6.3/packages/python/yap_kernel/kernel.js',dest) - # shutil.copy('/home/vsc/github/yap-6.3/misc/editors/prolog.js',dest) - setup_args['data_files'] = [ - (pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))), - ] - mode_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'components', 'codemirror', 'mode', 'prolog') - custom_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'custom') - try: - shutil.copy( pjoin( custom_loc, "custom.js") , pjoin( custom_loc, "custom.js.orig")) - shutil.copy( "/home/vsc/github/yap-6.3/packages/python/yap_kernel/custom.js" , pjoin( custom_loc, "custom.js")) - if not os.path.exists(mode_loc): - os.makedirs(mode_loc) - shutil.copy( "/home/vsc/github/yap-6.3/misc/editors/prolog.js" , mode_loc) - except: - pass - -extras_require = setuptools_args['extras_require'] = { - 'test:python_version=="2.7"': ['mock'], - 'test': ['nose_warnings_filters', 'nose-timer'], -} - -if 'setuptools' in sys.modules: - setup_args.update(setuptools_args) - -if __name__ == '__main__': - setup(**setup_args) diff --git a/packages/python/yap_kernel/setup.py.in b/packages/python/yap_kernel/setup.py.in index 583d3d347..3a86917ef 100644 --- a/packages/python/yap_kernel/setup.py.in +++ b/packages/python/yap_kernel/setup.py.in @@ -100,26 +100,24 @@ if any(a.startswith(('bdist', 'build', 'install')) for a in sys.argv): try: shutil.rmtree(dest) os.makedirs( dest ) - shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png',dest) - shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png',dest) + shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_32x32x32.png',pjoin(dest,"logo_32x32.png")) + shutil.copy2('${CMAKE_SOURCE_DIR}/docs/icons/yap_64x64x32.png',pjoin(dest,"logo_64x64.png")) write_kernel_spec(dest, overrides={'argv': argv}) except: pass # shutil.copy('${CMAKE_CURRENT_SOURCE_DIR}/kernel.js',dest) # shutil.copy('${CMAKE_SOURCE_DIR}/misc/editors/prolog.js',dest) - setup_args['data_files'] = [ - (pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*'))), - ] + setup_args['data_files'] = [(pjoin('share', 'jupyter', 'kernels', KERNEL_NAME), glob(pjoin(dest, '*')))] mode_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'components', 'codemirror', 'mode', 'prolog') custom_loc = pjoin( sysconfig.get_path('platlib'), 'notebook', 'static', 'custom') - try: - shutil.copy( pjoin( custom_loc, "custom.js") , pjoin( custom_loc, "custom.js.orig")) - shutil.copy( "${CMAKE_CURRENT_SOURCE_DIR}/custom.js" , pjoin( custom_loc, "custom.js")) - if not os.path.exists(mode_loc): - os.makedirs(mode_loc) - shutil.copy( "${CMAKE_SOURCE_DIR}/misc/editors/prolog.js" , mode_loc) - except: - pass + # try: + # shutil.copy( pjoin( custom_loc, "custom.js") , pjoin( custom_loc, "custom.js.orig")) + # shutil.copy( "${CMAKE_CURRENT_SOURCE_DIR}/custom.js" , pjoin( custom_loc, "custom.js")) + # if not os.path.exists(mode_loc): + # os.makedirs(mode_loc) + # shutil.copy( "${CMAKE_SOURCE_DIR}/misc/editors/prolog.js" , mode_loc) + # except: + # pass extras_require = setuptools_args['extras_require'] = { 'test:python_version=="2.7"': ['mock'], diff --git a/packages/python/yap_kernel/yap_ipython/__init__.py b/packages/python/yap_kernel/yap_ipython/__init__.py index 753cbb709..49c49245d 100644 --- a/packages/python/yap_kernel/yap_ipython/__init__.py +++ b/packages/python/yap_kernel/yap_ipython/__init__.py @@ -1,11 +1,11 @@ # encoding: utf-8 """ -IPython: tools for interactive and parallel computing in Python. +yap_ipython: tools for interactive and parallel computing in Python. http://ipython.org """ #----------------------------------------------------------------------------- -# Copyright (c) 2008-2011, IPython Development Team. +# Copyright (c) 2008-2011, yap_ipython Development Team. # Copyright (c) 2001-2007, Fernando Perez # Copyright (c) 2001, Janko Hauser # Copyright (c) 2001, Nathaniel Gray @@ -29,19 +29,19 @@ import sys # Don't forget to also update setup.py when this changes! if sys.version_info < (3,3): raise ImportError( - """ - IPython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2. - When using Python 2.7, please install IPython 5.x LTS Long Term Support version. - Beginning with IPython 6.0, Python 3.3 and above is required. - - See IPython `README.rst` file for more information: - - https://github.com/ipython/ipython/blob/master/README.rst - - """) +""" +yap_ipython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2. +When using Python 2.7, please install yap_ipython 5.x LTS Long Term Support version. +Beginning with yap_ipython 6.0, Python 3.3 and above is required. + +See yap_ipython `README.rst` file for more information: + + https://github.com/ipython/ipython/blob/master/README.rst + +""") # Make it easy to import extensions - they are always directly on pythonpath. -# Therefore, non-IPython modules can be added to extensions directory. +# Therefore, non-yap_ipython modules can be added to extensions directory. # This should probably be in ipapp.py. sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) @@ -51,13 +51,13 @@ sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) from .core.getipython import get_ipython from .core import release -from IPython.core.application import Application -from IPython.terminal.embed import embed +from .core.application import Application +from .terminal.embed import embed -from .core.interactiveshell import YAPInteractive as InteractiveShell -from IPython.testing import test -from IPython.utils.sysinfo import sys_info -from IPython.utils.frame import extract_module_locals +from .core.interactiveshell import InteractiveShell +from .testing import test +from .utils.sysinfo import sys_info +from .utils.frame import extract_module_locals # Release data __author__ = '%s <%s>' % (release.author, release.author_email) @@ -66,86 +66,86 @@ __version__ = release.version version_info = release.version_info def embed_kernel(module=None, local_ns=None, **kwargs): - """Embed and start an IPython kernel in a given scope. - + """Embed and start an yap_ipython kernel in a given scope. + If you don't want the kernel to initialize the namespace from the scope of the surrounding function, - and/or you want to load full IPython configuration, - you probably want `IPython.start_kernel()` instead. - + and/or you want to load full yap_ipython configuration, + you probably want `yap_ipython.start_kernel()` instead. + Parameters ---------- module : ModuleType, optional - The module to load into IPython globals (default: caller) + The module to load into yap_ipython globals (default: caller) local_ns : dict, optional - The namespace to load into IPython user namespace (default: caller) - + The namespace to load into yap_ipython user namespace (default: caller) + kwargs : various, optional - Further keyword args are relayed to the IPKernelApp constructor, + Further keyword args are relayed to the YAPKernelApp constructor, allowing configuration of the Kernel. Will only have an effect on the first embed_kernel call for a given process. """ - + (caller_module, caller_locals) = extract_module_locals(1) if module is None: module = caller_module if local_ns is None: local_ns = caller_locals - + # Only import .zmq when we really need it - from ipykernel.embed import embed_kernel as real_embed_kernel + from yap_kernel.embed import embed_kernel as real_embed_kernel real_embed_kernel(module=module, local_ns=local_ns, **kwargs) def start_ipython(argv=None, **kwargs): - """Launch a normal IPython instance (as opposed to embedded) - - `IPython.embed()` puts a shell in a particular calling scope, + """Launch a normal yap_ipython instance (as opposed to embedded) + + `yap_ipython.embed()` puts a shell in a particular calling scope, such as a function or method for debugging purposes, which is often not desirable. - - `start_ipython()` does full, regular IPython initialization, + + `start_ipython()` does full, regular yap_ipython initialization, including loading startup files, configuration, etc. much of which is skipped by `embed()`. - + This is a public API method, and will survive implementation changes. - + Parameters ---------- - + argv : list or None, optional - If unspecified or None, IPython will parse command-line options from sys.argv. + If unspecified or None, yap_ipython will parse command-line options from sys.argv. To prevent any command-line parsing, pass an empty list: `argv=[]`. user_ns : dict, optional - specify this dictionary to initialize the IPython user namespace with particular values. + specify this dictionary to initialize the yap_ipython user namespace with particular values. kwargs : various, optional Any other kwargs will be passed to the Application constructor, such as `config`. """ - from IPython.terminal.ipapp import launch_new_instance + from yap_ipython.terminal.ipapp import launch_new_instance return launch_new_instance(argv=argv, **kwargs) def start_kernel(argv=None, **kwargs): - """Launch a normal IPython kernel instance (as opposed to embedded) - - `IPython.embed_kernel()` puts a shell in a particular calling scope, + """Launch a normal yap_ipython kernel instance (as opposed to embedded) + + `yap_ipython.embed_kernel()` puts a shell in a particular calling scope, such as a function or method for debugging purposes, which is often not desirable. - - `start_kernel()` does full, regular IPython initialization, + + `start_kernel()` does full, regular yap_ipython initialization, including loading startup files, configuration, etc. much of which is skipped by `embed()`. - + Parameters ---------- - + argv : list or None, optional - If unspecified or None, IPython will parse command-line options from sys.argv. + If unspecified or None, yap_ipython will parse command-line options from sys.argv. To prevent any command-line parsing, pass an empty list: `argv=[]`. user_ns : dict, optional - specify this dictionary to initialize the IPython user namespace with particular values. + specify this dictionary to initialize the yap_ipython user namespace with particular values. kwargs : various, optional Any other kwargs will be passed to the Application constructor, such as `config`. """ - from IPython.kernel.zmq.kernelapp import launch_new_instance + from yap_ipython.kernel.zmq.kernelapp import launch_new_instance return launch_new_instance(argv=argv, **kwargs) diff --git a/packages/python/yap_kernel/yap_ipython/__main__.py b/packages/python/yap_kernel/yap_ipython/__main__.py new file mode 100644 index 000000000..70c245842 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/__main__.py @@ -0,0 +1,14 @@ +# encoding: utf-8 +"""Terminal-based yap_ipython entry point. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, yap_ipython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +from yap_ipython import start_ipython + +start_ipython() diff --git a/packages/python/yap_kernel/yap_ipython/config.py b/packages/python/yap_kernel/yap_ipython/config.py new file mode 100644 index 000000000..3cbce8ad4 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/config.py @@ -0,0 +1,19 @@ +""" +Shim to maintain backwards compatibility with old yap_ipython.config imports. +""" +# Copyright (c) yap_ipython Development Team. +# Distributed under the terms of the Modified BSD License. + +import sys +from warnings import warn + +from yap_ipython.utils.shimmodule import ShimModule, ShimWarning + +warn("The `yap_ipython.config` package has been deprecated since yap_ipython 4.0. " + "You should import from traitlets.config instead.", ShimWarning) + + +# Unconditionally insert the shim into sys.modules so that further import calls +# trigger the custom attribute access above + +sys.modules['yap_ipython.config'] = ShimModule(src='yap_ipython.config', mirror='traitlets.config') diff --git a/packages/python/yap_kernel/yap_ipython/consoleapp.py b/packages/python/yap_kernel/yap_ipython/consoleapp.py new file mode 100644 index 000000000..793cbc682 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/consoleapp.py @@ -0,0 +1,12 @@ +""" +Shim to maintain backwards compatibility with old yap_ipython.consoleapp imports. +""" +# Copyright (c) yap_ipython Development Team. +# Distributed under the terms of the Modified BSD License. + +from warnings import warn + +warn("The `yap_ipython.consoleapp` package has been deprecated. " + "You should import from jupyter_client.consoleapp instead.") + +from jupyter_client.consoleapp import * diff --git a/packages/python/yap_kernel/yap_ipython/core/alias.py b/packages/python/yap_kernel/yap_ipython/core/alias.py new file mode 100644 index 000000000..5b3045379 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/alias.py @@ -0,0 +1,256 @@ +# encoding: utf-8 +""" +System command aliases. + +Authors: + +* Fernando Perez +* Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The yap_ipython Development Team +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import re +import sys + +from traitlets.config.configurable import Configurable +from yap_ipython.core.error import UsageError + +from traitlets import List, Instance +from logging import error + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +# This is used as the pattern for calls to split_user_input. +shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)') + +def default_aliases(): + """Return list of shell aliases to auto-define. + """ + # Note: the aliases defined here should be safe to use on a kernel + # regardless of what frontend it is attached to. Frontends that use a + # kernel in-process can define additional aliases that will only work in + # their case. For example, things like 'less' or 'clear' that manipulate + # the terminal should NOT be declared here, as they will only work if the + # kernel is running inside a true terminal, and not over the network. + + if os.name == 'posix': + default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'), + ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'), + ('cat', 'cat'), + ] + # Useful set of ls aliases. The GNU and BSD options are a little + # different, so we make aliases that provide as similar as possible + # behavior in ipython, by passing the right flags for each platform + if sys.platform.startswith('linux'): + ls_aliases = [('ls', 'ls -F --color'), + # long ls + ('ll', 'ls -F -o --color'), + # ls normal files only + ('lf', 'ls -F -o --color %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -o --color %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -o --color %l | grep /$'), + # things which are executable + ('lx', 'ls -F -o --color %l | grep ^-..x'), + ] + elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'): + # OpenBSD, NetBSD. The ls implementation on these platforms do not support + # the -G switch and lack the ability to use colorized output. + ls_aliases = [('ls', 'ls -F'), + # long ls + ('ll', 'ls -F -l'), + # ls normal files only + ('lf', 'ls -F -l %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -l %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -l %l | grep /$'), + # things which are executable + ('lx', 'ls -F -l %l | grep ^-..x'), + ] + else: + # BSD, OSX, etc. + ls_aliases = [('ls', 'ls -F -G'), + # long ls + ('ll', 'ls -F -l -G'), + # ls normal files only + ('lf', 'ls -F -l -G %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -l -G %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -G -l %l | grep /$'), + # things which are executable + ('lx', 'ls -F -l -G %l | grep ^-..x'), + ] + default_aliases = default_aliases + ls_aliases + elif os.name in ['nt', 'dos']: + default_aliases = [('ls', 'dir /on'), + ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'), + ('mkdir', 'mkdir'), ('rmdir', 'rmdir'), + ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'), + ] + else: + default_aliases = [] + + return default_aliases + + +class AliasError(Exception): + pass + + +class InvalidAliasError(AliasError): + pass + +class Alias(object): + """Callable object storing the details of one alias. + + Instances are registered as magic functions to allow use of aliases. + """ + + # Prepare blacklist + blacklist = {'cd','popd','pushd','dhist','alias','unalias'} + + def __init__(self, shell, name, cmd): + self.shell = shell + self.name = name + self.cmd = cmd + self.__doc__ = "Alias for `!{}`".format(cmd) + self.nargs = self.validate() + + def validate(self): + """Validate the alias, and return the number of arguments.""" + if self.name in self.blacklist: + raise InvalidAliasError("The name %s can't be aliased " + "because it is a keyword or builtin." % self.name) + try: + caller = self.shell.magics_manager.magics['line'][self.name] + except KeyError: + pass + else: + if not isinstance(caller, Alias): + raise InvalidAliasError("The name %s can't be aliased " + "because it is another magic command." % self.name) + + if not (isinstance(self.cmd, str)): + raise InvalidAliasError("An alias command must be a string, " + "got: %r" % self.cmd) + + nargs = self.cmd.count('%s') - self.cmd.count('%%s') + + if (nargs > 0) and (self.cmd.find('%l') >= 0): + raise InvalidAliasError('The %s and %l specifiers are mutually ' + 'exclusive in alias definitions.') + + return nargs + + def __repr__(self): + return "".format(self.name, self.cmd) + + def __call__(self, rest=''): + cmd = self.cmd + nargs = self.nargs + # Expand the %l special to be the user's input line + if cmd.find('%l') >= 0: + cmd = cmd.replace('%l', rest) + rest = '' + + if nargs==0: + if cmd.find('%%s') >= 1: + cmd = cmd.replace('%%s', '%s') + # Simple, argument-less aliases + cmd = '%s %s' % (cmd, rest) + else: + # Handle aliases with positional arguments + args = rest.split(None, nargs) + if len(args) < nargs: + raise UsageError('Alias <%s> requires %s arguments, %s given.' % + (self.name, nargs, len(args))) + cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:])) + + self.shell.system(cmd) + +#----------------------------------------------------------------------------- +# Main AliasManager class +#----------------------------------------------------------------------------- + +class AliasManager(Configurable): + + default_aliases = List(default_aliases()).tag(config=True) + user_aliases = List(default_value=[]).tag(config=True) + shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', allow_none=True) + + def __init__(self, shell=None, **kwargs): + super(AliasManager, self).__init__(shell=shell, **kwargs) + # For convenient access + self.linemagics = self.shell.magics_manager.magics['line'] + self.init_aliases() + + def init_aliases(self): + # Load default & user aliases + for name, cmd in self.default_aliases + self.user_aliases: + self.soft_define_alias(name, cmd) + + @property + def aliases(self): + return [(n, func.cmd) for (n, func) in self.linemagics.items() + if isinstance(func, Alias)] + + def soft_define_alias(self, name, cmd): + """Define an alias, but don't raise on an AliasError.""" + try: + self.define_alias(name, cmd) + except AliasError as e: + error("Invalid alias: %s" % e) + + def define_alias(self, name, cmd): + """Define a new alias after validating it. + + This will raise an :exc:`AliasError` if there are validation + problems. + """ + caller = Alias(shell=self.shell, name=name, cmd=cmd) + self.shell.magics_manager.register_function(caller, magic_kind='line', + magic_name=name) + + def get_alias(self, name): + """Return an alias, or None if no alias by that name exists.""" + aname = self.linemagics.get(name, None) + return aname if isinstance(aname, Alias) else None + + def is_alias(self, name): + """Return whether or not a given name has been defined as an alias""" + return self.get_alias(name) is not None + + def undefine_alias(self, name): + if self.is_alias(name): + del self.linemagics[name] + else: + raise ValueError('%s is not an alias' % name) + + def clear_aliases(self): + for name, cmd in self.aliases: + self.undefine_alias(name) + + def retrieve_alias(self, name): + """Retrieve the command to which an alias expands.""" + caller = self.get_alias(name) + if caller: + return caller.cmd + else: + raise ValueError('%s is not an alias' % name) diff --git a/packages/python/yap_kernel/yap_ipython/core/application.py b/packages/python/yap_kernel/yap_ipython/core/application.py new file mode 100644 index 000000000..2ff36b8aa --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/application.py @@ -0,0 +1,462 @@ +# encoding: utf-8 +""" +An application for yap_ipython. + +All top-level applications should use the classes in this module for +handling configuration and creating configurables. + +The job of an :class:`Application` is to create the master configuration +object and then create the configurable objects, passing the config to them. +""" + +# Copyright (c) yap_ipython Development Team. +# Distributed under the terms of the Modified BSD License. + +import atexit +from copy import deepcopy +import glob +import logging +import os +import shutil +import sys + +from traitlets.config.application import Application, catch_config_error +from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader +from yap_ipython.core import release, crashhandler +from yap_ipython.core.profiledir import ProfileDir, ProfileDirError +from yap_ipython.paths import get_ipython_dir, get_ipython_package_dir +from yap_ipython.utils.path import ensure_dir_exists +from traitlets import ( + List, Unicode, Type, Bool, Set, Instance, Undefined, + default, observe, +) + +if os.name == 'nt': + programdata = os.environ.get('PROGRAMDATA', None) + if programdata: + SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')] + else: # PROGRAMDATA is not defined by default on XP. + SYSTEM_CONFIG_DIRS = [] +else: + SYSTEM_CONFIG_DIRS = [ + "/usr/local/etc/ipython", + "/etc/ipython", + ] + + +ENV_CONFIG_DIRS = [] +_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython') +if _env_config_dir not in SYSTEM_CONFIG_DIRS: + # only add ENV_CONFIG if sys.prefix is not already included + ENV_CONFIG_DIRS.append(_env_config_dir) + + +_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS') +if _envvar in {None, ''}: + IPYTHON_SUPPRESS_CONFIG_ERRORS = None +else: + if _envvar.lower() in {'1','true'}: + IPYTHON_SUPPRESS_CONFIG_ERRORS = True + elif _envvar.lower() in {'0','false'} : + IPYTHON_SUPPRESS_CONFIG_ERRORS = False + else: + sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar ) + +# aliases and flags + +base_aliases = { + 'profile-dir' : 'ProfileDir.location', + 'profile' : 'BaseYAPApplication.profile', + 'ipython-dir' : 'BaseYAPApplication.ipython_dir', + 'log-level' : 'Application.log_level', + 'config' : 'BaseYAPApplication.extra_config_file', +} + +base_flags = dict( + debug = ({'Application' : {'log_level' : logging.DEBUG}}, + "set log level to logging.DEBUG (maximize logging output)"), + quiet = ({'Application' : {'log_level' : logging.CRITICAL}}, + "set log level to logging.CRITICAL (minimize logging output)"), + init = ({'BaseYAPApplication' : { + 'copy_config_files' : True, + 'auto_create' : True} + }, """Initialize profile with default config files. This is equivalent + to running `ipython profile create ` prior to startup. + """) +) + +class ProfileAwareConfigLoader(PyFileConfigLoader): + """A Python file config loader that is aware of yap_ipython profiles.""" + def load_subconfig(self, fname, path=None, profile=None): + if profile is not None: + try: + profile_dir = ProfileDir.find_profile_dir_by_name( + get_ipython_dir(), + profile, + ) + except ProfileDirError: + return + path = profile_dir.location + return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path) + +class BaseYAPApplication(Application): + + name = u'ipython' + description = Unicode(u'yap_ipython: an enhanced interactive Python shell.') + version = Unicode(release.version) + + aliases = base_aliases + flags = base_flags + classes = List([ProfileDir]) + + # enable `load_subconfig('cfg.py', profile='name')` + python_config_loader_class = ProfileAwareConfigLoader + + # Track whether the config_file has changed, + # because some logic happens only if we aren't using the default. + config_file_specified = Set() + + config_file_name = Unicode() + @default('config_file_name') + def _config_file_name_default(self): + return self.name.replace('-','_') + u'_config.py' + @observe('config_file_name') + def _config_file_name_changed(self, change): + if change['new'] != change['old']: + self.config_file_specified.add(change['new']) + + # The directory that contains yap_ipython's builtin profiles. + builtin_profile_dir = Unicode( + os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') + ) + + config_file_paths = List(Unicode()) + @default('config_file_paths') + def _config_file_paths_default(self): + return [os.getcwd()] + + extra_config_file = Unicode( + help="""Path to an extra config file to load. + + If specified, load this config file in addition to any other yap_ipython config. + """).tag(config=True) + @observe('extra_config_file') + def _extra_config_file_changed(self, change): + old = change['old'] + new = change['new'] + try: + self.config_files.remove(old) + except ValueError: + pass + self.config_file_specified.add(new) + self.config_files.append(new) + + profile = Unicode(u'default', + help="""The yap_ipython profile to use.""" + ).tag(config=True) + + @observe('profile') + def _profile_changed(self, change): + self.builtin_profile_dir = os.path.join( + get_ipython_package_dir(), u'config', u'profile', change['new'] + ) + + ipython_dir = Unicode( + help=""" + The name of the yap_ipython directory. This directory is used for logging + configuration (through profiles), history storage, etc. The default + is usually $HOME/.ipython. This option can also be specified through + the environment variable IPYTHONDIR. + """ + ).tag(config=True) + @default('ipython_dir') + def _ipython_dir_default(self): + d = get_ipython_dir() + self._ipython_dir_changed({ + 'name': 'ipython_dir', + 'old': d, + 'new': d, + }) + return d + + _in_init_profile_dir = False + profile_dir = Instance(ProfileDir, allow_none=True) + @default('profile_dir') + def _profile_dir_default(self): + # avoid recursion + if self._in_init_profile_dir: + return + # profile_dir requested early, force initialization + self.init_profile_dir() + return self.profile_dir + + overwrite = Bool(False, + help="""Whether to overwrite existing config files when copying""" + ).tag(config=True) + auto_create = Bool(False, + help="""Whether to create profile dir if it doesn't exist""" + ).tag(config=True) + + config_files = List(Unicode()) + @default('config_files') + def _config_files_default(self): + return [self.config_file_name] + + copy_config_files = Bool(False, + help="""Whether to install the default config files into the profile dir. + If a new profile is being created, and yap_ipython contains config files for that + profile, then they will be staged into the new directory. Otherwise, + default config files will be automatically generated. + """).tag(config=True) + + verbose_crash = Bool(False, + help="""Create a massive crash report when yap_ipython encounters what may be an + internal error. The default is to append a short message to the + usual traceback""").tag(config=True) + + # The class to use as the crash handler. + crash_handler_class = Type(crashhandler.CrashHandler) + + @catch_config_error + def __init__(self, **kwargs): + super(BaseYAPApplication, self).__init__(**kwargs) + # ensure current working directory exists + try: + os.getcwd() + except: + # exit if cwd doesn't exist + self.log.error("Current working directory doesn't exist.") + self.exit(1) + + #------------------------------------------------------------------------- + # Various stages of Application creation + #------------------------------------------------------------------------- + + deprecated_subcommands = {} + + def initialize_subcommand(self, subc, argv=None): + if subc in self.deprecated_subcommands: + self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed " + "in future versions.".format(sub=subc)) + self.log.warning("You likely want to use `jupyter {sub}` in the " + "future".format(sub=subc)) + return super(BaseYAPApplication, self).initialize_subcommand(subc, argv) + + def init_crash_handler(self): + """Create a crash handler, typically setting sys.excepthook to it.""" + self.crash_handler = self.crash_handler_class(self) + sys.excepthook = self.excepthook + def unset_crashhandler(): + sys.excepthook = sys.__excepthook__ + atexit.register(unset_crashhandler) + + def excepthook(self, etype, evalue, tb): + """this is sys.excepthook after init_crashhandler + + set self.verbose_crash=True to use our full crashhandler, instead of + a regular traceback with a short message (crash_handler_lite) + """ + + if self.verbose_crash: + return self.crash_handler(etype, evalue, tb) + else: + return crashhandler.crash_handler_lite(etype, evalue, tb) + + @observe('ipython_dir') + def _ipython_dir_changed(self, change): + old = change['old'] + new = change['new'] + if old is not Undefined: + str_old = os.path.abspath(old) + if str_old in sys.path: + sys.path.remove(str_old) + str_path = os.path.abspath(new) + sys.path.append(str_path) + ensure_dir_exists(new) + readme = os.path.join(new, 'README') + readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') + if not os.path.exists(readme) and os.path.exists(readme_src): + shutil.copy(readme_src, readme) + for d in ('extensions', 'nbextensions'): + path = os.path.join(new, d) + try: + ensure_dir_exists(path) + except OSError as e: + # this will not be EEXIST + self.log.error("couldn't create path %s: %s", path, e) + self.log.debug("IPYTHONDIR set to: %s" % new) + + def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): + """Load the config file. + + By default, errors in loading config are handled, and a warning + printed on screen. For testing, the suppress_errors option is set + to False, so errors will make tests fail. + + `supress_errors` default value is to be `None` in which case the + behavior default to the one of `traitlets.Application`. + + The default value can be set : + - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive). + - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive). + - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset. + + Any other value are invalid, and will make yap_ipython exit with a non-zero return code. + """ + + + self.log.debug("Searching path %s for config files", self.config_file_paths) + base_config = 'ipython_config.py' + self.log.debug("Attempting to load config file: %s" % + base_config) + try: + if suppress_errors is not None: + old_value = Application.raise_config_file_errors + Application.raise_config_file_errors = not suppress_errors; + Application.load_config_file( + self, + base_config, + path=self.config_file_paths + ) + except ConfigFileNotFound: + # ignore errors loading parent + self.log.debug("Config file %s not found", base_config) + pass + if suppress_errors is not None: + Application.raise_config_file_errors = old_value + + for config_file_name in self.config_files: + if not config_file_name or config_file_name == base_config: + continue + self.log.debug("Attempting to load config file: %s" % + self.config_file_name) + try: + Application.load_config_file( + self, + config_file_name, + path=self.config_file_paths + ) + except ConfigFileNotFound: + # Only warn if the default config file was NOT being used. + if config_file_name in self.config_file_specified: + msg = self.log.warning + else: + msg = self.log.debug + msg("Config file not found, skipping: %s", config_file_name) + except Exception: + # For testing purposes. + if not suppress_errors: + raise + self.log.warning("Error loading config file: %s" % + self.config_file_name, exc_info=True) + + def init_profile_dir(self): + """initialize the profile dir""" + self._in_init_profile_dir = True + if self.profile_dir is not None: + # already ran + return + if 'ProfileDir.location' not in self.config: + # location not specified, find by profile name + try: + p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config) + except ProfileDirError: + # not found, maybe create it (always create default profile) + if self.auto_create or self.profile == 'default': + try: + p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config) + except ProfileDirError: + self.log.fatal("Could not create profile: %r"%self.profile) + self.exit(1) + else: + self.log.info("Created profile dir: %r"%p.location) + else: + self.log.fatal("Profile %r not found."%self.profile) + self.exit(1) + else: + self.log.debug("Using existing profile dir: %r"%p.location) + else: + location = self.config.ProfileDir.location + # location is fully specified + try: + p = ProfileDir.find_profile_dir(location, self.config) + except ProfileDirError: + # not found, maybe create it + if self.auto_create: + try: + p = ProfileDir.create_profile_dir(location, self.config) + except ProfileDirError: + self.log.fatal("Could not create profile directory: %r"%location) + self.exit(1) + else: + self.log.debug("Creating new profile dir: %r"%location) + else: + self.log.fatal("Profile directory %r not found."%location) + self.exit(1) + else: + self.log.info("Using existing profile dir: %r"%location) + # if profile_dir is specified explicitly, set profile name + dir_name = os.path.basename(p.location) + if dir_name.startswith('profile_'): + self.profile = dir_name[8:] + + self.profile_dir = p + self.config_file_paths.append(p.location) + self._in_init_profile_dir = False + + def init_config_files(self): + """[optionally] copy default config files into profile dir.""" + self.config_file_paths.extend(ENV_CONFIG_DIRS) + self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) + # copy config files + path = self.builtin_profile_dir + if self.copy_config_files: + src = self.profile + + cfg = self.config_file_name + if path and os.path.exists(os.path.join(path, cfg)): + self.log.warning("Staging %r from %s into %r [overwrite=%s]"%( + cfg, src, self.profile_dir.location, self.overwrite) + ) + self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) + else: + self.stage_default_config_file() + else: + # Still stage *bundled* config files, but not generated ones + # This is necessary for `ipython profile=sympy` to load the profile + # on the first go + files = glob.glob(os.path.join(path, '*.py')) + for fullpath in files: + cfg = os.path.basename(fullpath) + if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): + # file was copied + self.log.warning("Staging bundled %s from %s into %r"%( + cfg, self.profile, self.profile_dir.location) + ) + + + def stage_default_config_file(self): + """auto generate default config file, and stage it into the profile.""" + s = self.generate_config_file() + fname = os.path.join(self.profile_dir.location, self.config_file_name) + if self.overwrite or not os.path.exists(fname): + self.log.warning("Generating default config file: %r"%(fname)) + with open(fname, 'w') as f: + f.write(s) + + @catch_config_error + def initialize(self, argv=None): + # don't hook up crash handler before parsing command-line + self.parse_command_line(argv) + self.init_crash_handler() + if self.subapp is not None: + # stop here if subapp is taking over + return + # save a copy of CLI config to re-load after config files + # so that it has highest priority + cl_config = deepcopy(self.config) + self.init_profile_dir() + self.init_config_files() + self.load_config_file() + # enforce cl-opts override configfile opts: + self.update_config(cl_config) diff --git a/packages/python/yap_kernel/yap_ipython/core/autocall.py b/packages/python/yap_kernel/yap_ipython/core/autocall.py new file mode 100644 index 000000000..26cc4f30a --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/autocall.py @@ -0,0 +1,70 @@ +# encoding: utf-8 +""" +Autocall capabilities for yap_ipython.core. + +Authors: + +* Brian Granger +* Fernando Perez +* Thomas Kluyver + +Notes +----- +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The yap_ipython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +class IPyAutocall(object): + """ Instances of this class are always autocalled + + This happens regardless of 'autocall' variable state. Use this to + develop macro-like mechanisms. + """ + _ip = None + rewrite = True + def __init__(self, ip=None): + self._ip = ip + + def set_ip(self, ip): + """ Will be used to set _ip point to current ipython instance b/f call + + Override this method if you don't want this to happen. + + """ + self._ip = ip + + +class ExitAutocall(IPyAutocall): + """An autocallable object which will be added to the user namespace so that + exit, exit(), quit or quit() are all valid ways to close the shell.""" + rewrite = False + + def __call__(self): + self._ip.ask_exit() + +class ZMQExitAutocall(ExitAutocall): + """Exit yap_ipython. Autocallable, so it needn't be explicitly called. + + Parameters + ---------- + keep_kernel : bool + If True, leave the kernel alive. Otherwise, tell the kernel to exit too + (default). + """ + def __call__(self, keep_kernel=False): + self._ip.keepkernel_on_exit = keep_kernel + self._ip.ask_exit() diff --git a/packages/python/yap_kernel/yap_ipython/core/builtin_trap.py b/packages/python/yap_kernel/yap_ipython/core/builtin_trap.py new file mode 100644 index 000000000..1eeb7cabc --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/builtin_trap.py @@ -0,0 +1,86 @@ +""" +A context manager for managing things injected into :mod:`builtins`. +""" +# Copyright (c) yap_ipython Development Team. +# Distributed under the terms of the Modified BSD License. +import builtins as builtin_mod + +from traitlets.config.configurable import Configurable + +from traitlets import Instance + + +class __BuiltinUndefined(object): pass +BuiltinUndefined = __BuiltinUndefined() + +class __HideBuiltin(object): pass +HideBuiltin = __HideBuiltin() + + +class BuiltinTrap(Configurable): + + shell = Instance('yap_ipython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + + def __init__(self, shell=None): + super(BuiltinTrap, self).__init__(shell=shell, config=None) + self._orig_builtins = {} + # We define this to track if a single BuiltinTrap is nested. + # Only turn off the trap when the outermost call to __exit__ is made. + self._nested_level = 0 + self.shell = shell + # builtins we always add - if set to HideBuiltin, they will just + # be removed instead of being replaced by something else + self.auto_builtins = {'exit': HideBuiltin, + 'quit': HideBuiltin, + 'get_ipython': self.shell.get_ipython, + } + + def __enter__(self): + if self._nested_level == 0: + self.activate() + self._nested_level += 1 + # I return self, so callers can use add_builtin in a with clause. + return self + + def __exit__(self, type, value, traceback): + if self._nested_level == 1: + self.deactivate() + self._nested_level -= 1 + # Returning False will cause exceptions to propagate + return False + + def add_builtin(self, key, value): + """Add a builtin and save the original.""" + bdict = builtin_mod.__dict__ + orig = bdict.get(key, BuiltinUndefined) + if value is HideBuiltin: + if orig is not BuiltinUndefined: #same as 'key in bdict' + self._orig_builtins[key] = orig + del bdict[key] + else: + self._orig_builtins[key] = orig + bdict[key] = value + + def remove_builtin(self, key, orig): + """Remove an added builtin and re-set the original.""" + if orig is BuiltinUndefined: + del builtin_mod.__dict__[key] + else: + builtin_mod.__dict__[key] = orig + + def activate(self): + """Store ipython references in the __builtin__ namespace.""" + + add_builtin = self.add_builtin + for name, func in self.auto_builtins.items(): + add_builtin(name, func) + + def deactivate(self): + """Remove any builtins which might have been added by add_builtins, or + restore overwritten ones to their previous values.""" + remove_builtin = self.remove_builtin + for key, val in self._orig_builtins.items(): + remove_builtin(key, val) + self._orig_builtins.clear() + self._builtins_added = False diff --git a/packages/python/yap_kernel/yap_ipython/core/compilerop.py b/packages/python/yap_kernel/yap_ipython/core/compilerop.py new file mode 100644 index 000000000..56eb27b02 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/compilerop.py @@ -0,0 +1,143 @@ +"""Compiler tools with improved interactive support. + +Provides compilation machinery similar to codeop, but with caching support so +we can provide interactive tracebacks. + +Authors +------- +* Robert Kern +* Fernando Perez +* Thomas Kluyver +""" + +# Note: though it might be more natural to name this module 'compiler', that +# name is in the stdlib and name collisions with the stdlib tend to produce +# weird problems (often with third-party tools). + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The yap_ipython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import __future__ +from ast import PyCF_ONLY_AST +import codeop +import functools +import hashlib +import linecache +import operator +import time + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Roughtly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, +# this is used as a bitmask to extract future-related code flags. +PyCF_MASK = functools.reduce(operator.or_, + (getattr(__future__, fname).compiler_flag + for fname in __future__.all_feature_names)) + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +def code_name(code, number=0): + """ Compute a (probably) unique name for code for caching. + + This now expects code to be unicode. + """ + hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest() + # Include the number and 12 characters of the hash in the name. It's + # pretty much impossible that in a single session we'll have collisions + # even with truncated hashes, and the full one makes tracebacks too long + return ''.format(number, hash_digest[:12]) + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +class CachingCompiler(codeop.Compile): + """A compiler that caches code compiled from interactive statements. + """ + + def __init__(self): + codeop.Compile.__init__(self) + + # This is ugly, but it must be done this way to allow multiple + # simultaneous ipython instances to coexist. Since Python itself + # directly accesses the data structures in the linecache module, and + # the cache therein is global, we must work with that data structure. + # We must hold a reference to the original checkcache routine and call + # that in our own check_cache() below, but the special yap_ipython cache + # must also be shared by all yap_ipython instances. If we were to hold + # separate caches (one in each CachingCompiler instance), any call made + # by Python itself to linecache.checkcache() would obliterate the + # cached data from the other yap_ipython instances. + if not hasattr(linecache, '_ipython_cache'): + linecache._ipython_cache = {} + if not hasattr(linecache, '_checkcache_ori'): + linecache._checkcache_ori = linecache.checkcache + # Now, we must monkeypatch the linecache directly so that parts of the + # stdlib that call it outside our control go through our codepath + # (otherwise we'd lose our tracebacks). + linecache.checkcache = check_linecache_ipython + + def ast_parse(self, source, filename='', symbol='exec'): + """Parse code to an AST with the current compiler flags active. + + Arguments are exactly the same as ast.parse (in the standard library), + and are passed to the built-in compile function.""" + return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1) + + def reset_compiler_flags(self): + """Reset compiler flags to default state.""" + # This value is copied from codeop.Compile.__init__, so if that ever + # changes, it will need to be updated. + self.flags = codeop.PyCF_DONT_IMPLY_DEDENT + + @property + def compiler_flags(self): + """Flags currently active in the compilation process. + """ + return self.flags + + def cache(self, code, number=0): + """Make a name for a block of code, and cache the code. + + Parameters + ---------- + code : str + The Python source code to cache. + number : int + A number which forms part of the code's name. Used for the execution + counter. + + Returns + ------- + The name of the cached code (as a string). Pass this as the filename + argument to compilation, so that tracebacks are correctly hooked up. + """ + name = code_name(code, number) + entry = (len(code), time.time(), + [line+'\n' for line in code.splitlines()], name) + linecache.cache[name] = entry + linecache._ipython_cache[name] = entry + return name + +def check_linecache_ipython(*args): + """Call linecache.checkcache() safely protecting our cached values. + """ + # First call the orignal checkcache as intended + linecache._checkcache_ori(*args) + # Then, update back the cache with our data, so that tracebacks related + # to our compiled codes can be produced. + linecache.cache.update(linecache._ipython_cache) diff --git a/packages/python/yap_kernel/yap_ipython/core/completer.py b/packages/python/yap_kernel/yap_ipython/core/completer.py new file mode 100644 index 000000000..79666376d --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/completer.py @@ -0,0 +1,2045 @@ +"""Completion for yap_ipython. + +This module started as fork of the rlcompleter module in the Python standard +library. The original enhancements made to rlcompleter have been sent +upstream and were accepted as of Python 2.3, + +This module now support a wide variety of completion mechanism both available +for normal classic Python code, as well as completer for yap_ipython specific +Syntax like magics. + +Latex and Unicode completion +============================ + +yap_ipython and compatible frontends not only can complete your code, but can help +you to input a wide range of characters. In particular we allow you to insert +a unicode character using the tab completion mechanism. + +Forward latex/unicode completion +-------------------------------- + +Forward completion allows you to easily type a unicode character using its latex +name, or unicode long description. To do so type a backslash follow by the +relevant name and press tab: + + +Using latex completion: + +.. code:: + + \\alpha + α + +or using unicode completion: + + +.. code:: + + \\greek small letter alpha + α + + +Only valid Python identifiers will complete. Combining characters (like arrow or +dots) are also available, unlike latex they need to be put after the their +counterpart that is to say, `F\\\\vec` is correct, not `\\\\vecF`. + +Some browsers are known to display combining characters incorrectly. + +Backward latex completion +------------------------- + +It is sometime challenging to know how to type a character, if you are using +yap_ipython, or any compatible frontend you can prepend backslash to the character +and press `` to expand it to its latex form. + +.. code:: + + \\α + \\alpha + + +Both forward and backward completions can be deactivated by setting the +``Completer.backslash_combining_completions`` option to ``False``. + + +Experimental +============ + +Starting with yap_ipython 6.0, this module can make use of the Jedi library to +generate completions both using static analysis of the code, and dynamically +inspecting multiple namespaces. The APIs attached to this new mechanism is +unstable and will raise unless use in an :any:`provisionalcompleter` context +manager. + +You will find that the following are experimental: + + - :any:`provisionalcompleter` + - :any:`IPCompleter.completions` + - :any:`Completion` + - :any:`rectify_completions` + +.. note:: + + better name for :any:`rectify_completions` ? + +We welcome any feedback on these new API, and we also encourage you to try this +module in debug mode (start yap_ipython with ``--Completer.debug=True``) in order +to have extra logging information is :any:`jedi` is crashing, or if current +yap_ipython completer pending deprecations are returning results not yet handled +by :any:`jedi` + +Using Jedi for tab completion allow snippets like the following to work without +having to execute any code: + + >>> myvar = ['hello', 42] + ... myvar[1].bi + +Tab completion will be able to infer that ``myvar[1]`` is a real number without +executing any code unlike the previously available ``IPCompleter.greedy`` +option. + +Be sure to update :any:`jedi` to the latest stable version or to try the +current development version to get better completions. +""" + + +# Copyright (c) yap_ipython Development Team. +# Distributed under the terms of the Modified BSD License. +# +# Some of this code originated from rlcompleter in the Python standard library +# Copyright (C) 2001 Python Software Foundation, www.python.org + + +import __main__ +import builtins as builtin_mod +import glob +import time +import inspect +import itertools +import keyword +import os +import re +import sys +import unicodedata +import string +import warnings + +from contextlib import contextmanager +from importlib import import_module +from typing import Iterator, List, Tuple, Iterable, Union +from types import SimpleNamespace + +from traitlets.config.configurable import Configurable +from yap_ipython.core.error import TryNext +from yap_ipython.core.inputsplitter import ESC_MAGIC +from yap_ipython.core.latex_symbols import latex_symbols, reverse_latex_symbol +from yap_ipython.core.oinspect import InspectColors +from yap_ipython.utils import generics +from yap_ipython.utils.dir2 import dir2, get_real_method +from yap_ipython.utils.process import arg_split +from traitlets import Bool, Enum, observe, Int + +# skip module docstests +skip_doctest = True + +try: + import jedi + import jedi.api.helpers + import jedi.api.classes + JEDI_INSTALLED = True +except ImportError: + JEDI_INSTALLED = False +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# Public API +__all__ = ['Completer','IPCompleter'] + +if sys.platform == 'win32': + PROTECTABLES = ' ' +else: + PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' + +# Protect against returning an enormous number of completions which the frontend +# may have trouble processing. +MATCHES_LIMIT = 500 + +_deprecation_readline_sentinel = object() + + +class ProvisionalCompleterWarning(FutureWarning): + """ + Exception raise by an experimental feature in this module. + + Wrap code in :any:`provisionalcompleter` context manager if you + are certain you want to use an unstable feature. + """ + pass + +warnings.filterwarnings('error', category=ProvisionalCompleterWarning) + +@contextmanager +def provisionalcompleter(action='ignore'): + """ + + + This contest manager has to be used in any place where unstable completer + behavior and API may be called. + + >>> with provisionalcompleter(): + ... completer.do_experimetal_things() # works + + >>> completer.do_experimental_things() # raises. + + .. note:: Unstable + + By using this context manager you agree that the API in use may change + without warning, and that you won't complain if they do so. + + You also understand that if the API is not to you liking you should report + a bug to explain your use case upstream and improve the API and will loose + credibility if you complain after the API is make stable. + + We'll be happy to get your feedback , feature request and improvement on + any of the unstable APIs ! + """ + with warnings.catch_warnings(): + warnings.filterwarnings(action, category=ProvisionalCompleterWarning) + yield + + +def has_open_quotes(s): + """Return whether a string has open quotes. + + This simply counts whether the number of quote characters of either type in + the string is odd. + + Returns + ------- + If there is an open quote, the quote character is returned. Else, return + False. + """ + # We check " first, then ', so complex cases with nested quotes will get + # the " to take precedence. + if s.count('"') % 2: + return '"' + elif s.count("'") % 2: + return "'" + else: + return False + + +def protect_filename(s, protectables=PROTECTABLES): + """Escape a string to protect certain characters.""" + if set(s) & set(protectables): + if sys.platform == "win32": + return '"' + s + '"' + else: + return "".join(("\\" + c if c in protectables else c) for c in s) + else: + return s + + +def expand_user(path:str) -> Tuple[str, bool, str]: + """Expand ``~``-style usernames in strings. + + This is similar to :func:`os.path.expanduser`, but it computes and returns + extra information that will be useful if the input was being used in + computing completions, and you wish to return the completions with the + original '~' instead of its expanded value. + + Parameters + ---------- + path : str + String to be expanded. If no ~ is present, the output is the same as the + input. + + Returns + ------- + newpath : str + Result of ~ expansion in the input path. + tilde_expand : bool + Whether any expansion was performed or not. + tilde_val : str + The value that ~ was replaced with. + """ + # Default values + tilde_expand = False + tilde_val = '' + newpath = path + + if path.startswith('~'): + tilde_expand = True + rest = len(path)-1 + newpath = os.path.expanduser(path) + if rest: + tilde_val = newpath[:-rest] + else: + tilde_val = newpath + + return newpath, tilde_expand, tilde_val + + +def compress_user(path:str, tilde_expand:bool, tilde_val:str) -> str: + """Does the opposite of expand_user, with its outputs. + """ + if tilde_expand: + return path.replace(tilde_val, '~') + else: + return path + + +def completions_sorting_key(word): + """key for sorting completions + + This does several things: + + - Demote any completions starting with underscores to the end + - Insert any %magic and %%cellmagic completions in the alphabetical order + by their name + """ + prio1, prio2 = 0, 0 + + if word.startswith('__'): + prio1 = 2 + elif word.startswith('_'): + prio1 = 1 + + if word.endswith('='): + prio1 = -1 + + if word.startswith('%%'): + # If there's another % in there, this is something else, so leave it alone + if not "%" in word[2:]: + word = word[2:] + prio2 = 2 + elif word.startswith('%'): + if not "%" in word[1:]: + word = word[1:] + prio2 = 1 + + return prio1, word, prio2 + + +class _FakeJediCompletion: + """ + This is a workaround to communicate to the UI that Jedi has crashed and to + report a bug. Will be used only id :any:`IPCompleter.debug` is set to true. + + Added in yap_ipython 6.0 so should likely be removed for 7.0 + + """ + + def __init__(self, name): + + self.name = name + self.complete = name + self.type = 'crashed' + self.name_with_symbols = name + self.signature = '' + self._origin = 'fake' + + def __repr__(self): + return '' + + +class Completion: + """ + Completion object used and return by yap_ipython completers. + + .. warning:: Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + This act as a middle ground :any:`Completion` object between the + :any:`jedi.api.classes.Completion` object and the Prompt Toolkit completion + object. While Jedi need a lot of information about evaluator and how the + code should be ran/inspected, PromptToolkit (and other frontend) mostly + need user facing information. + + - Which range should be replaced replaced by what. + - Some metadata (like completion type), or meta information to displayed to + the use user. + + For debugging purpose we can also store the origin of the completion (``jedi``, + ``yap_ipython.python_matches``, ``yap_ipython.magics_matches``...). + """ + + __slots__ = ['start', 'end', 'text', 'type', 'signature', '_origin'] + + def __init__(self, start: int, end: int, text: str, *, type: str=None, _origin='', signature='') -> None: + warnings.warn("``Completion`` is a provisional API (as of yap_ipython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + self.start = start + self.end = end + self.text = text + self.type = type + self.signature = signature + self._origin = _origin + + def __repr__(self): + return '' % \ + (self.start, self.end, self.text, self.type or '?', self.signature or '?') + + def __eq__(self, other)->Bool: + """ + Equality and hash do not hash the type (as some completer may not be + able to infer the type), but are use to (partially) de-duplicate + completion. + + Completely de-duplicating completion is a bit tricker that just + comparing as it depends on surrounding text, which Completions are not + aware of. + """ + return self.start == other.start and \ + self.end == other.end and \ + self.text == other.text + + def __hash__(self): + return hash((self.start, self.end, self.text)) + + +_IC = Iterable[Completion] + + +def _deduplicate_completions(text: str, completions: _IC)-> _IC: + """ + Deduplicate a set of completions. + + .. warning:: Unstable + + This function is unstable, API may change without warning. + + Parameters + ---------- + text: str + text that should be completed. + completions: Iterator[Completion] + iterator over the completions to deduplicate + + Yields + ------ + `Completions` objects + + + Completions coming from multiple sources, may be different but end up having + the same effect when applied to ``text``. If this is the case, this will + consider completions as equal and only emit the first encountered. + + Not folded in `completions()` yet for debugging purpose, and to detect when + the yap_ipython completer does return things that Jedi does not, but should be + at some point. + """ + completions = list(completions) + if not completions: + return + + new_start = min(c.start for c in completions) + new_end = max(c.end for c in completions) + + seen = set() + for c in completions: + new_text = text[new_start:c.start] + c.text + text[c.end:new_end] + if new_text not in seen: + yield c + seen.add(new_text) + + +def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC: + """ + Rectify a set of completions to all have the same ``start`` and ``end`` + + .. warning:: Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + Parameters + ---------- + text: str + text that should be completed. + completions: Iterator[Completion] + iterator over the completions to rectify + + + :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though + the Jupyter Protocol requires them to behave like so. This will readjust + the completion to have the same ``start`` and ``end`` by padding both + extremities with surrounding text. + + During stabilisation should support a ``_debug`` option to log which + completion are return by the yap_ipython completer and not found in Jedi in + order to make upstream bug report. + """ + warnings.warn("`rectify_completions` is a provisional API (as of yap_ipython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + completions = list(completions) + if not completions: + return + starts = (c.start for c in completions) + ends = (c.end for c in completions) + + new_start = min(starts) + new_end = max(ends) + + seen_jedi = set() + seen_python_matches = set() + for c in completions: + new_text = text[new_start:c.start] + c.text + text[c.end:new_end] + if c._origin == 'jedi': + seen_jedi.add(new_text) + elif c._origin == 'IPCompleter.python_matches': + seen_python_matches.add(new_text) + yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature) + diff = seen_python_matches.difference(seen_jedi) + if diff and _debug: + print('yap_ipython.python matches have extras:', diff) + + +if sys.platform == 'win32': + DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?' +else: + DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' + +GREEDY_DELIMS = ' =\r\n' + + +class CompletionSplitter(object): + """An object to split an input line in a manner similar to readline. + + By having our own implementation, we can expose readline-like completion in + a uniform manner to all frontends. This object only needs to be given the + line of text to be split and the cursor position on said line, and it + returns the 'word' to be completed on at the cursor after splitting the + entire line. + + What characters are used as splitting delimiters can be controlled by + setting the ``delims`` attribute (this is a property that internally + automatically builds the necessary regular expression)""" + + # Private interface + + # A string of delimiter characters. The default value makes sense for + # yap_ipython's most typical usage patterns. + _delims = DELIMS + + # The expression (a normal string) to be compiled into a regular expression + # for actual splitting. We store it as an attribute mostly for ease of + # debugging, since this type of code can be so tricky to debug. + _delim_expr = None + + # The regular expression that does the actual splitting + _delim_re = None + + def __init__(self, delims=None): + delims = CompletionSplitter._delims if delims is None else delims + self.delims = delims + + @property + def delims(self): + """Return the string of delimiter characters.""" + return self._delims + + @delims.setter + def delims(self, delims): + """Set the delimiters for line splitting.""" + expr = '[' + ''.join('\\'+ c for c in delims) + ']' + self._delim_re = re.compile(expr) + self._delims = delims + self._delim_expr = expr + + def split_line(self, line, cursor_pos=None): + """Split a line of text with a cursor at the given position. + """ + l = line if cursor_pos is None else line[:cursor_pos] + return self._delim_re.split(l)[-1] + + + +class Completer(Configurable): + + greedy = Bool(False, + help="""Activate greedy completion + PENDING DEPRECTION. this is now mostly taken care of with Jedi. + + This will enable completion on elements of lists, results of function calls, etc., + but can be unsafe because the code is actually evaluated on TAB. + """ + ).tag(config=True) + + use_jedi = Bool(default_value=JEDI_INSTALLED, + help="Experimental: Use Jedi to generate autocompletions. " + "Default to True if jedi is installed").tag(config=True) + + jedi_compute_type_timeout = Int(default_value=400, + help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types. + Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt + performance by preventing jedi to build its cache. + """).tag(config=True) + + debug = Bool(default_value=False, + help='Enable debug for the Completer. Mostly print extra ' + 'information for experimental jedi integration.')\ + .tag(config=True) + + backslash_combining_completions = Bool(True, + help="Enable unicode completions, e.g. \\alpha . " + "Includes completion of latex commands, unicode names, and expanding " + "unicode characters back to latex commands.").tag(config=True) + + + + def __init__(self, namespace=None, global_namespace=None, **kwargs): + """Create a new completer for the command line. + + Completer(namespace=ns, global_namespace=ns2) -> completer instance. + + If unspecified, the default namespace where completions are performed + is __main__ (technically, __main__.__dict__). Namespaces should be + given as dictionaries. + + An optional second namespace can be given. This allows the completer + to handle cases where both the local and global scopes need to be + distinguished. + """ + + # Don't bind to namespace quite yet, but flag whether the user wants a + # specific namespace or to use __main__.__dict__. This will allow us + # to bind to __main__.__dict__ at completion time, not now. + if namespace is None: + self.use_main_ns = True + else: + self.use_main_ns = False + self.namespace = namespace + + # The global namespace, if given, can be bound directly + if global_namespace is None: + self.global_namespace = {} + else: + self.global_namespace = global_namespace + + super(Completer, self).__init__(**kwargs) + + def complete(self, text, state): + """Return the next possible completion for 'text'. + + This is called successively with state == 0, 1, 2, ... until it + returns None. The completion should begin with 'text'. + + """ + if self.use_main_ns: + self.namespace = __main__.__dict__ + + if state == 0: + if "." in text: + self.matches = self.attr_matches(text) + else: + self.matches = self.global_matches(text) + try: + return self.matches[state] + except IndexError: + return None + + def global_matches(self, text): + """Compute matches when text is a simple name. + + Return a list of all keywords, built-in functions and names currently + defined in self.namespace or self.global_namespace that match. + + """ + matches = [] + match_append = matches.append + n = len(text) + for lst in [keyword.kwlist, + builtin_mod.__dict__.keys(), + self.namespace.keys(), + self.global_namespace.keys()]: + for word in lst: + if word[:n] == text and word != "__builtins__": + match_append(word) + + snake_case_re = re.compile(r"[^_]+(_[^_]+)+?\Z") + for lst in [self.namespace.keys(), + self.global_namespace.keys()]: + shortened = {"_".join([sub[0] for sub in word.split('_')]) : word + for word in lst if snake_case_re.match(word)} + for word in shortened.keys(): + if word[:n] == text and word != "__builtins__": + match_append(shortened[word]) + return matches + + def attr_matches(self, text): + """Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in self.namespace or self.global_namespace, it will be + evaluated and its attributes (as revealed by dir()) are used as + possible completions. (For class instances, class members are are + also considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + + # Another option, seems to work great. Catches things like ''. + m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) + + if m: + expr, attr = m.group(1, 3) + elif self.greedy: + m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) + if not m2: + return [] + expr, attr = m2.group(1,2) + else: + return [] + + try: + obj = eval(expr, self.namespace) + except: + try: + obj = eval(expr, self.global_namespace) + except: + return [] + + if self.limit_to__all__ and hasattr(obj, '__all__'): + words = get__all__entries(obj) + else: + words = dir2(obj) + + try: + words = generics.complete_object(obj, words) + except TryNext: + pass + except AssertionError: + raise + except Exception: + # Silence errors from completion function + #raise # dbg + pass + # Build match list to return + n = len(attr) + return [u"%s.%s" % (expr, w) for w in words if w[:n] == attr ] + + +def get__all__entries(obj): + """returns the strings in the __all__ attribute""" + try: + words = getattr(obj, '__all__') + except: + return [] + + return [w for w in words if isinstance(w, str)] + + +def match_dict_keys(keys: List[str], prefix: str, delims: str): + """Used by dict_key_matches, matching the prefix to a list of keys + + Parameters + ========== + keys: + list of keys in dictionary currently being completed. + prefix: + Part of the text already typed by the user. e.g. `mydict[b'fo` + delims: + String of delimiters to consider when finding the current key. + + Returns + ======= + + A tuple of three elements: ``quote``, ``token_start``, ``matched``, with + ``quote`` being the quote that need to be used to close current string. + ``token_start`` the position where the replacement should start occurring, + ``matches`` a list of replacement/completion + + """ + if not prefix: + return None, 0, [repr(k) for k in keys + if isinstance(k, (str, bytes))] + quote_match = re.search('["\']', prefix) + quote = quote_match.group() + try: + prefix_str = eval(prefix + quote, {}) + except Exception: + return None, 0, [] + + pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' + token_match = re.search(pattern, prefix, re.UNICODE) + token_start = token_match.start() + token_prefix = token_match.group() + + matched = [] + for key in keys: + try: + if not key.startswith(prefix_str): + continue + except (AttributeError, TypeError, UnicodeError): + # Python 3+ TypeError on b'a'.startswith('a') or vice-versa + continue + + # reformat remainder of key to begin with prefix + rem = key[len(prefix_str):] + # force repr wrapped in ' + rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"') + if rem_repr.startswith('u') and prefix[0] not in 'uU': + # Found key is unicode, but prefix is Py2 string. + # Therefore attempt to interpret key as string. + try: + rem_repr = repr(rem.encode('ascii') + '"') + except UnicodeEncodeError: + continue + + rem_repr = rem_repr[1 + rem_repr.index("'"):-2] + if quote == '"': + # The entered prefix is quoted with ", + # but the match is quoted with '. + # A contained " hence needs escaping for comparison: + rem_repr = rem_repr.replace('"', '\\"') + + # then reinsert prefix from start of token + matched.append('%s%s' % (token_prefix, rem_repr)) + return quote, token_start, matched + + +def cursor_to_position(text:str, line:int, column:int)->int: + """ + + Convert the (line,column) position of the cursor in text to an offset in a + string. + + Parameters + ---------- + + text : str + The text in which to calculate the cursor offset + line : int + Line of the cursor; 0-indexed + column : int + Column of the cursor 0-indexed + + Return + ------ + Position of the cursor in ``text``, 0-indexed. + + See Also + -------- + position_to_cursor: reciprocal of this function + + """ + lines = text.split('\n') + assert line <= len(lines), '{} <= {}'.format(str(line), str(len(lines))) + + return sum(len(l) + 1 for l in lines[:line]) + column + +def position_to_cursor(text:str, offset:int)->Tuple[int, int]: + """ + Convert the position of the cursor in text (0 indexed) to a line + number(0-indexed) and a column number (0-indexed) pair + + Position should be a valid position in ``text``. + + Parameters + ---------- + + text : str + The text in which to calculate the cursor offset + offset : int + Position of the cursor in ``text``, 0-indexed. + + Return + ------ + (line, column) : (int, int) + Line of the cursor; 0-indexed, column of the cursor 0-indexed + + + See Also + -------- + cursor_to_position : reciprocal of this function + + + """ + + assert 0 < offset <= len(text) , "0 < %s <= %s" % (offset , len(text)) + + before = text[:offset] + blines = before.split('\n') # ! splitnes trim trailing \n + line = before.count('\n') + col = len(blines[-1]) + return line, col + + +def _safe_isinstance(obj, module, class_name): + """Checks if obj is an instance of module.class_name if loaded + """ + return (module in sys.modules and + isinstance(obj, getattr(import_module(module), class_name))) + + +def back_unicode_name_matches(text): + u"""Match unicode characters back to unicode name + + This does ``☃`` -> ``\\snowman`` + + Note that snowman is not a valid python3 combining character but will be expanded. + Though it will not recombine back to the snowman character by the completion machinery. + + This will not either back-complete standard sequences like \\n, \\b ... + + Used on Python 3 only. + """ + if len(text)<2: + return u'', () + maybe_slash = text[-2] + if maybe_slash != '\\': + return u'', () + + char = text[-1] + # no expand on quote for completion in strings. + # nor backcomplete standard ascii keys + if char in string.ascii_letters or char in ['"',"'"]: + return u'', () + try : + unic = unicodedata.name(char) + return '\\'+char,['\\'+unic] + except KeyError: + pass + return u'', () + +def back_latex_name_matches(text:str): + """Match latex characters back to unicode name + + This does ``\\ℵ`` -> ``\\aleph`` + + Used on Python 3 only. + """ + if len(text)<2: + return u'', () + maybe_slash = text[-2] + if maybe_slash != '\\': + return u'', () + + + char = text[-1] + # no expand on quote for completion in strings. + # nor backcomplete standard ascii keys + if char in string.ascii_letters or char in ['"',"'"]: + return u'', () + try : + latex = reverse_latex_symbol[char] + # '\\' replace the \ as well + return '\\'+char,[latex] + except KeyError: + pass + return u'', () + + +def _formatparamchildren(parameter) -> str: + """ + Get parameter name and value from Jedi Private API + + Jedi does not expose a simple way to get `param=value` from its API. + + Prameter + ======== + + parameter: + Jedi's function `Param` + + Returns + ======= + + A string like 'a', 'b=1', '*args', '**kwargs' + + + """ + description = parameter.description + if not description.startswith('param '): + raise ValueError('Jedi function parameter description have change format.' + 'Expected "param ...", found %r".' % description) + return description[6:] + +def _make_signature(completion)-> str: + """ + Make the signature from a jedi completion + + Parameter + ========= + + completion: jedi.Completion + object does not complete a function type + + Returns + ======= + + a string consisting of the function signature, with the parenthesis but + without the function name. example: + `(a, *args, b=1, **kwargs)` + + """ + + return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for p in completion.params) if f]) + +class IPCompleter(Completer): + """Extension of the completer class with yap_ipython-specific features""" + + @observe('greedy') + def _greedy_changed(self, change): + """update the splitter and readline delims when greedy is changed""" + if change['new']: + self.splitter.delims = GREEDY_DELIMS + else: + self.splitter.delims = DELIMS + + merge_completions = Bool(True, + help="""Whether to merge completion results into a single list + + If False, only the completion results from the first non-empty + completer will be returned. + """ + ).tag(config=True) + omit__names = Enum((0,1,2), default_value=2, + help="""Instruct the completer to omit private method names + + Specifically, when completing on ``object.``. + + When 2 [default]: all names that start with '_' will be excluded. + + When 1: all 'magic' names (``__foo__``) will be excluded. + + When 0: nothing will be excluded. + """ + ).tag(config=True) + limit_to__all__ = Bool(False, + help=""" + DEPRECATED as of version 5.0. + + Instruct the completer to use __all__ for the completion + + Specifically, when completing on ``object.``. + + When True: only those names in obj.__all__ will be included. + + When False [default]: the __all__ attribute is ignored + """, + ).tag(config=True) + + @observe('limit_to__all__') + def _limit_to_all_changed(self, change): + warnings.warn('`yap_ipython.core.IPCompleter.limit_to__all__` configuration ' + 'value has been deprecated since yap_ipython 5.0, will be made to have ' + 'no effects and then removed in future version of yap_ipython.', + UserWarning) + + def __init__(self, shell=None, namespace=None, global_namespace=None, + use_readline=_deprecation_readline_sentinel, config=None, **kwargs): + """IPCompleter() -> completer + + Return a completer object. + + Parameters + ---------- + + shell + a pointer to the ipython shell itself. This is needed + because this completer knows about magic functions, and those can + only be accessed via the ipython instance. + + namespace : dict, optional + an optional dict where completions are performed. + + global_namespace : dict, optional + secondary optional dict for completions, to + handle cases (such as yap_ipython embedded inside functions) where + both Python scopes are visible. + + use_readline : bool, optional + DEPRECATED, ignored since yap_ipython 6.0, will have no effects + """ + + self.magic_escape = ESC_MAGIC + self.splitter = CompletionSplitter() + + if use_readline is not _deprecation_readline_sentinel: + warnings.warn('The `use_readline` parameter is deprecated and ignored since yap_ipython 6.0.', + DeprecationWarning, stacklevel=2) + + # _greedy_changed() depends on splitter and readline being defined: + Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, + config=config, **kwargs) + + # List where completion matches will be stored + self.matches = [] + self.shell = shell + # Regexp to split filenames with spaces in them + self.space_name_re = re.compile(r'([^\\] )') + # Hold a local ref. to glob.glob for speed + self.glob = glob.glob + + # Determine if we are running on 'dumb' terminals, like (X)Emacs + # buffers, to avoid completion problems. + term = os.environ.get('TERM','xterm') + self.dumb_terminal = term in ['dumb','emacs'] + + # Special handling of backslashes needed in win32 platforms + if sys.platform == "win32": + self.clean_glob = self._clean_glob_win32 + else: + self.clean_glob = self._clean_glob + + #regexp to parse docstring for function signature + self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') + self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') + #use this if positional argument name is also needed + #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') + + # All active matcher routines for completion + self.matchers = [ + self.python_matches, + self.file_matches, + self.magic_matches, + self.python_func_kw_matches, + self.dict_key_matches, + ] + self.magic_arg_matchers = [ + self.magic_config_matches, + self.magic_color_matches, + ] + + # This is set externally by InteractiveShell + self.custom_completers = None + + def all_completions(self, text): + """ + Wrapper around the complete method for the benefit of emacs. + """ + return self.complete(text)[1] + + def _clean_glob(self, text): + return self.glob("%s*" % text) + + def _clean_glob_win32(self,text): + return [f.replace("\\","/") + for f in self.glob("%s*" % text)] + + def file_matches(self, text): + """Match filenames, expanding ~USER type strings. + + Most of the seemingly convoluted logic in this completer is an + attempt to handle filenames with spaces in them. And yet it's not + quite perfect, because Python's readline doesn't expose all of the + GNU readline details needed for this to be done correctly. + + For a filename with a space in it, the printed completions will be + only the parts after what's already been typed (instead of the + full completions, as is normally done). I don't think with the + current (as of Python 2.3) Python readline it's possible to do + better.""" + + # chars that require escaping with backslash - i.e. chars + # that readline treats incorrectly as delimiters, but we + # don't want to treat as delimiters in filename matching + # when escaped with backslash + if text.startswith('!'): + text = text[1:] + text_prefix = u'!' + else: + text_prefix = u'' + + text_until_cursor = self.text_until_cursor + # track strings with open quotes + open_quotes = has_open_quotes(text_until_cursor) + + if '(' in text_until_cursor or '[' in text_until_cursor: + lsplit = text + else: + try: + # arg_split ~ shlex.split, but with unicode bugs fixed by us + lsplit = arg_split(text_until_cursor)[-1] + except ValueError: + # typically an unmatched ", or backslash without escaped char. + if open_quotes: + lsplit = text_until_cursor.split(open_quotes)[-1] + else: + return [] + except IndexError: + # tab pressed on empty line + lsplit = "" + + if not open_quotes and lsplit != protect_filename(lsplit): + # if protectables are found, do matching on the whole escaped name + has_protectables = True + text0,text = text,lsplit + else: + has_protectables = False + text = os.path.expanduser(text) + + if text == "": + return [text_prefix + protect_filename(f) for f in self.glob("*")] + + # Compute the matches from the filesystem + if sys.platform == 'win32': + m0 = self.clean_glob(text) + else: + m0 = self.clean_glob(text.replace('\\', '')) + + if has_protectables: + # If we had protectables, we need to revert our changes to the + # beginning of filename so that we don't double-write the part + # of the filename we have so far + len_lsplit = len(lsplit) + matches = [text_prefix + text0 + + protect_filename(f[len_lsplit:]) for f in m0] + else: + if open_quotes: + # if we have a string with an open quote, we don't need to + # protect the names beyond the quote (and we _shouldn't_, as + # it would cause bugs when the filesystem call is made). + matches = m0 if sys.platform == "win32" else\ + [protect_filename(f, open_quotes) for f in m0] + else: + matches = [text_prefix + + protect_filename(f) for f in m0] + + # Mark directories in input list by appending '/' to their names. + return [x+'/' if os.path.isdir(x) else x for x in matches] + + def magic_matches(self, text): + """Match magics""" + # Get all shell magics now rather than statically, so magics loaded at + # runtime show up too. + lsm = self.shell.magics_manager.lsmagic() + line_magics = lsm['line'] + cell_magics = lsm['cell'] + pre = self.magic_escape + pre2 = pre+pre + + explicit_magic = text.startswith(pre) + + # Completion logic: + # - user gives %%: only do cell magics + # - user gives %: do both line and cell magics + # - no prefix: do both + # In other words, line magics are skipped if the user gives %% explicitly + # + # We also exclude magics that match any currently visible names: + # https://github.com/ipython/ipython/issues/4877, unless the user has + # typed a %: + # https://github.com/ipython/ipython/issues/10754 + bare_text = text.lstrip(pre) + global_matches = self.global_matches(bare_text) + if not explicit_magic: + def matches(magic): + """ + Filter magics, in particular remove magics that match + a name present in global namespace. + """ + return ( magic.startswith(bare_text) and + magic not in global_matches ) + else: + def matches(magic): + return magic.startswith(bare_text) + + comp = [ pre2+m for m in cell_magics if matches(m)] + if not text.startswith(pre2): + comp += [ pre+m for m in line_magics if matches(m)] + + return comp + + def magic_config_matches(self, text:str) -> List[str]: + """ Match class names and attributes for %config magic """ + texts = text.strip().split() + + if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'): + # get all configuration classes + classes = sorted(set([ c for c in self.shell.configurables + if c.__class__.class_traits(config=True) + ]), key=lambda x: x.__class__.__name__) + classnames = [ c.__class__.__name__ for c in classes ] + + # return all classnames if config or %config is given + if len(texts) == 1: + return classnames + + # match classname + classname_texts = texts[1].split('.') + classname = classname_texts[0] + classname_matches = [ c for c in classnames + if c.startswith(classname) ] + + # return matched classes or the matched class with attributes + if texts[1].find('.') < 0: + return classname_matches + elif len(classname_matches) == 1 and \ + classname_matches[0] == classname: + cls = classes[classnames.index(classname)].__class__ + help = cls.class_get_help() + # strip leading '--' from cl-args: + help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) + return [ attr.split('=')[0] + for attr in help.strip().splitlines() + if attr.startswith(texts[1]) ] + return [] + + def magic_color_matches(self, text:str) -> List[str] : + """ Match color schemes for %colors magic""" + texts = text.split() + if text.endswith(' '): + # .split() strips off the trailing whitespace. Add '' back + # so that: '%colors ' -> ['%colors', ''] + texts.append('') + + if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'): + prefix = texts[1] + return [ color for color in InspectColors.keys() + if color.startswith(prefix) ] + return [] + + def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str): + """ + + Return a list of :any:`jedi.api.Completions` object from a ``text`` and + cursor position. + + Parameters + ---------- + cursor_column : int + column position of the cursor in ``text``, 0-indexed. + cursor_line : int + line position of the cursor in ``text``, 0-indexed + text : str + text to complete + + Debugging + --------- + + If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion` + object containing a string with the Jedi debug information attached. + """ + namespaces = [self.namespace] + if self.global_namespace is not None: + namespaces.append(self.global_namespace) + + completion_filter = lambda x:x + # cursor_pos is an it, jedi wants line and column + offset = cursor_to_position(text, cursor_line, cursor_column) + # filter output if we are completing for object members + if offset: + pre = text[offset-1] + if pre == '.': + if self.omit__names == 2: + completion_filter = lambda c:not c.name.startswith('_') + elif self.omit__names == 1: + completion_filter = lambda c:not (c.name.startswith('__') and c.name.endswith('__')) + elif self.omit__names == 0: + completion_filter = lambda x:x + else: + raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) + + interpreter = jedi.Interpreter( + text, namespaces, column=cursor_column, line=cursor_line + 1) + try_jedi = True + + try: + # should we check the type of the node is Error ? + try: + # jedi >= 0.11 + from parso.tree import ErrorLeaf + except ImportError: + # jedi < 0.11 + from jedi.parser.tree import ErrorLeaf + + next_to_last_tree = interpreter._get_module().tree_node.children[-2] + completing_string = False + if isinstance(next_to_last_tree, ErrorLeaf): + completing_string = next_to_last_tree.value[0] in {'"', "'"} + # if we are in a string jedi is likely not the right candidate for + # now. Skip it. + try_jedi = not completing_string + except Exception as e: + # many of things can go wrong, we are using private API just don't crash. + if self.debug: + print("Error detecting if completing a non-finished string :", e, '|') + + if not try_jedi: + return [] + try: + return filter(completion_filter, interpreter.completions()) + except Exception as e: + if self.debug: + return [_FakeJediCompletion('Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' % (e))] + else: + return [] + + def python_matches(self, text): + """Match attributes or global python names""" + if "." in text: + try: + matches = self.attr_matches(text) + if text.endswith('.') and self.omit__names: + if self.omit__names == 1: + # true if txt is _not_ a __ name, false otherwise: + no__name = (lambda txt: + re.match(r'.*\.__.*?__',txt) is None) + else: + # true if txt is _not_ a _ name, false otherwise: + no__name = (lambda txt: + re.match(r'\._.*?',txt[txt.rindex('.'):]) is None) + matches = filter(no__name, matches) + except NameError: + # catches . + matches = [] + else: + matches = self.global_matches(text) + return matches + + def _default_arguments_from_docstring(self, doc): + """Parse the first line of docstring for call signature. + + Docstring should be of the form 'min(iterable[, key=func])\n'. + It can also parse cython docstring of the form + 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. + """ + if doc is None: + return [] + + #care only the firstline + line = doc.lstrip().splitlines()[0] + + #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') + #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' + sig = self.docstring_sig_re.search(line) + if sig is None: + return [] + # iterable[, key=func]' -> ['iterable[' ,' key=func]'] + sig = sig.groups()[0].split(',') + ret = [] + for s in sig: + #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') + ret += self.docstring_kwd_re.findall(s) + return ret + + def _default_arguments(self, obj): + """Return the list of default arguments of obj if it is callable, + or empty list otherwise.""" + call_obj = obj + ret = [] + if inspect.isbuiltin(obj): + pass + elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): + if inspect.isclass(obj): + #for cython embedsignature=True the constructor docstring + #belongs to the object itself not __init__ + ret += self._default_arguments_from_docstring( + getattr(obj, '__doc__', '')) + # for classes, check for __init__,__new__ + call_obj = (getattr(obj, '__init__', None) or + getattr(obj, '__new__', None)) + # for all others, check if they are __call__able + elif hasattr(obj, '__call__'): + call_obj = obj.__call__ + ret += self._default_arguments_from_docstring( + getattr(call_obj, '__doc__', '')) + + _keeps = (inspect.Parameter.KEYWORD_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD) + + try: + sig = inspect.signature(call_obj) + ret.extend(k for k, v in sig.parameters.items() if + v.kind in _keeps) + except ValueError: + pass + + return list(set(ret)) + + def python_func_kw_matches(self,text): + """Match named parameters (kwargs) of the last open function""" + + if "." in text: # a parameter cannot be dotted + return [] + try: regexp = self.__funcParamsRegex + except AttributeError: + regexp = self.__funcParamsRegex = re.compile(r''' + '.*?(?,a=1)", the candidate is "foo" + tokens = regexp.findall(self.text_until_cursor) + iterTokens = reversed(tokens); openPar = 0 + + for token in iterTokens: + if token == ')': + openPar -= 1 + elif token == '(': + openPar += 1 + if openPar > 0: + # found the last unclosed parenthesis + break + else: + return [] + # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) + ids = [] + isId = re.compile(r'\w+$').match + + while True: + try: + ids.append(next(iterTokens)) + if not isId(ids[-1]): + ids.pop(); break + if not next(iterTokens) == '.': + break + except StopIteration: + break + + # Find all named arguments already assigned to, as to avoid suggesting + # them again + usedNamedArgs = set() + par_level = -1 + for token, next_token in zip(tokens, tokens[1:]): + if token == '(': + par_level += 1 + elif token == ')': + par_level -= 1 + + if par_level != 0: + continue + + if next_token != '=': + continue + + usedNamedArgs.add(token) + + # lookup the candidate callable matches either using global_matches + # or attr_matches for dotted names + if len(ids) == 1: + callableMatches = self.global_matches(ids[0]) + else: + callableMatches = self.attr_matches('.'.join(ids[::-1])) + argMatches = [] + for callableMatch in callableMatches: + try: + namedArgs = self._default_arguments(eval(callableMatch, + self.namespace)) + except: + continue + + # Remove used named arguments from the list, no need to show twice + for namedArg in set(namedArgs) - usedNamedArgs: + if namedArg.startswith(text): + argMatches.append(u"%s=" %namedArg) + return argMatches + + def dict_key_matches(self, text): + "Match string keys in a dictionary, after e.g. 'foo[' " + def get_keys(obj): + # Objects can define their own completions by defining an + # _ipy_key_completions_() method. + method = get_real_method(obj, '_ipython_key_completions_') + if method is not None: + return method() + + # Special case some common in-memory dict-like types + if isinstance(obj, dict) or\ + _safe_isinstance(obj, 'pandas', 'DataFrame'): + try: + return list(obj.keys()) + except Exception: + return [] + elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ + _safe_isinstance(obj, 'numpy', 'void'): + return obj.dtype.names or [] + return [] + + try: + regexps = self.__dict_key_regexps + except AttributeError: + dict_key_re_fmt = r'''(?x) + ( # match dict-referring expression wrt greedy setting + %s + ) + \[ # open bracket + \s* # and optional whitespace + ([uUbB]? # string prefix (r not handled) + (?: # unclosed string + '(?:[^']|(? key_start: + leading = '' + else: + leading = text[text_start:completion_start] + + # the index of the `[` character + bracket_idx = match.end(1) + + # append closing quote and bracket as appropriate + # this is *not* appropriate if the opening quote or bracket is outside + # the text given to this method + suf = '' + continuation = self.line_buffer[len(self.text_until_cursor):] + if key_start > text_start and closing_quote: + # quotes were opened inside text, maybe close them + if continuation.startswith(closing_quote): + continuation = continuation[len(closing_quote):] + else: + suf += closing_quote + if bracket_idx > text_start: + # brackets were opened inside text, maybe close them + if not continuation.startswith(']'): + suf += ']' + + return [leading + k + suf for k in matches] + + def unicode_name_matches(self, text): + u"""Match Latex-like syntax for unicode characters base + on the name of the character. + + This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` + + Works only on valid python 3 identifier, or on combining characters that + will combine to form a valid identifier. + + Used on Python 3 only. + """ + slashpos = text.rfind('\\') + if slashpos > -1: + s = text[slashpos+1:] + try : + unic = unicodedata.lookup(s) + # allow combining chars + if ('a'+unic).isidentifier(): + return '\\'+s,[unic] + except KeyError: + pass + return u'', [] + + + def latex_matches(self, text): + u"""Match Latex syntax for unicode characters. + + This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` + + Used on Python 3 only. + """ + slashpos = text.rfind('\\') + if slashpos > -1: + s = text[slashpos:] + if s in latex_symbols: + # Try to complete a full latex symbol to unicode + # \\alpha -> α + return s, [latex_symbols[s]] + else: + # If a user has partially typed a latex symbol, give them + # a full list of options \al -> [\aleph, \alpha] + matches = [k for k in latex_symbols if k.startswith(s)] + return s, matches + return u'', [] + + def dispatch_custom_completer(self, text): + if not self.custom_completers: + return + + line = self.line_buffer + if not line.strip(): + return None + + # Create a little structure to pass all the relevant information about + # the current completion to any custom completer. + event = SimpleNamespace() + event.line = line + event.symbol = text + cmd = line.split(None,1)[0] + event.command = cmd + event.text_until_cursor = self.text_until_cursor + + # for foo etc, try also to find completer for %foo + if not cmd.startswith(self.magic_escape): + try_magic = self.custom_completers.s_matches( + self.magic_escape + cmd) + else: + try_magic = [] + + for c in itertools.chain(self.custom_completers.s_matches(cmd), + try_magic, + self.custom_completers.flat_matches(self.text_until_cursor)): + try: + res = c(event) + if res: + # first, try case sensitive match + withcase = [r for r in res if r.startswith(text)] + if withcase: + return withcase + # if none, then case insensitive ones are ok too + text_low = text.lower() + return [r for r in res if r.lower().startswith(text_low)] + except TryNext: + pass + except KeyboardInterrupt: + """ + If custom completer take too long, + let keyboard interrupt abort and return nothing. + """ + break + + return None + + def completions(self, text: str, offset: int)->Iterator[Completion]: + """ + Returns an iterator over the possible completions + + .. warning:: Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + Parameters + ---------- + + text:str + Full text of the current input, multi line string. + offset:int + Integer representing the position of the cursor in ``text``. Offset + is 0-based indexed. + + Yields + ------ + :any:`Completion` object + + + The cursor on a text can either be seen as being "in between" + characters or "On" a character depending on the interface visible to + the user. For consistency the cursor being on "in between" characters X + and Y is equivalent to the cursor being "on" character Y, that is to say + the character the cursor is on is considered as being after the cursor. + + Combining characters may span more that one position in the + text. + + + .. note:: + + If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--`` + fake Completion token to distinguish completion returned by Jedi + and usual yap_ipython completion. + + .. note:: + + Completions are not completely deduplicated yet. If identical + completions are coming from different sources this function does not + ensure that each completion object will only be present once. + """ + warnings.warn("_complete is a provisional API (as of yap_ipython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + seen = set() + for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000): + if c and (c in seen): + continue + yield c + seen.add(c) + + def _completions(self, full_text: str, offset: int, *, _timeout)->Iterator[Completion]: + """ + Core completion module.Same signature as :any:`completions`, with the + extra `timeout` parameter (in seconds). + + + Computing jedi's completion ``.type`` can be quite expensive (it is a + lazy property) and can require some warm-up, more warm up than just + computing the ``name`` of a completion. The warm-up can be : + + - Long warm-up the first time a module is encountered after + install/update: actually build parse/inference tree. + + - first time the module is encountered in a session: load tree from + disk. + + We don't want to block completions for tens of seconds so we give the + completer a "budget" of ``_timeout`` seconds per invocation to compute + completions types, the completions that have not yet been computed will + be marked as "unknown" an will have a chance to be computed next round + are things get cached. + + Keep in mind that Jedi is not the only thing treating the completion so + keep the timeout short-ish as if we take more than 0.3 second we still + have lots of processing to do. + + """ + deadline = time.monotonic() + _timeout + + + before = full_text[:offset] + cursor_line, cursor_column = position_to_cursor(full_text, offset) + + matched_text, matches, matches_origin, jedi_matches = self._complete( + full_text=full_text, cursor_line=cursor_line, cursor_pos=cursor_column) + + iter_jm = iter(jedi_matches) + if _timeout: + for jm in iter_jm: + try: + type_ = jm.type + except Exception: + if self.debug: + print("Error in Jedi getting type of ", jm) + type_ = None + delta = len(jm.name_with_symbols) - len(jm.complete) + if type_ == 'function': + signature = _make_signature(jm) + else: + signature = '' + yield Completion(start=offset - delta, + end=offset, + text=jm.name_with_symbols, + type=type_, + signature=signature, + _origin='jedi') + + if time.monotonic() > deadline: + break + + for jm in iter_jm: + delta = len(jm.name_with_symbols) - len(jm.complete) + yield Completion(start=offset - delta, + end=offset, + text=jm.name_with_symbols, + type='', # don't compute type for speed + _origin='jedi', + signature='') + + + start_offset = before.rfind(matched_text) + + # TODO: + # Suppress this, right now just for debug. + if jedi_matches and matches and self.debug: + yield Completion(start=start_offset, end=offset, text='--jedi/ipython--', + _origin='debug', type='none', signature='') + + # I'm unsure if this is always true, so let's assert and see if it + # crash + assert before.endswith(matched_text) + for m, t in zip(matches, matches_origin): + yield Completion(start=start_offset, end=offset, text=m, _origin=t, signature='', type='') + + + def complete(self, text=None, line_buffer=None, cursor_pos=None): + """Find completions for the given text and line context. + + Note that both the text and the line_buffer are optional, but at least + one of them must be given. + + Parameters + ---------- + text : string, optional + Text to perform the completion on. If not given, the line buffer + is split using the instance's CompletionSplitter object. + + line_buffer : string, optional + If not given, the completer attempts to obtain the current line + buffer via readline. This keyword allows clients which are + requesting for text completions in non-readline contexts to inform + the completer of the entire text. + + cursor_pos : int, optional + Index of the cursor in the full line buffer. Should be provided by + remote frontends where kernel has no access to frontend state. + + Returns + ------- + text : str + Text that was actually used in the completion. + + matches : list + A list of completion matches. + + + .. note:: + + This API is likely to be deprecated and replaced by + :any:`IPCompleter.completions` in the future. + + + """ + warnings.warn('`Completer.complete` is pending deprecation since ' + 'yap_ipython 6.0 and will be replaced by `Completer.completions`.', + PendingDeprecationWarning) + # potential todo, FOLD the 3rd throw away argument of _complete + # into the first 2 one. + return self._complete(line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0)[:2] + + def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, + full_text=None, return_jedi_results=True) -> Tuple[str, List[str], List[str], Iterable[_FakeJediCompletion]]: + """ + + Like complete but can also returns raw jedi completions as well as the + origin of the completion text. This could (and should) be made much + cleaner but that will be simpler once we drop the old (and stateful) + :any:`complete` API. + + + With current provisional API, cursor_pos act both (depending on the + caller) as the offset in the ``text`` or ``line_buffer``, or as the + ``column`` when passing multiline strings this could/should be renamed + but would add extra noise. + """ + + # if the cursor position isn't given, the only sane assumption we can + # make is that it's at the end of the line (the common case) + if cursor_pos is None: + cursor_pos = len(line_buffer) if text is None else len(text) + + if self.use_main_ns: + self.namespace = __main__.__dict__ + + # if text is either None or an empty string, rely on the line buffer + if (not line_buffer) and full_text: + line_buffer = full_text.split('\n')[cursor_line] + if not text: + text = self.splitter.split_line(line_buffer, cursor_pos) + + if self.backslash_combining_completions: + # allow deactivation of these on windows. + base_text = text if not line_buffer else line_buffer[:cursor_pos] + latex_text, latex_matches = self.latex_matches(base_text) + if latex_matches: + return latex_text, latex_matches, ['latex_matches']*len(latex_matches), () + name_text = '' + name_matches = [] + for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches): + name_text, name_matches = meth(base_text) + if name_text: + return name_text, name_matches[:MATCHES_LIMIT], \ + [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), () + + + # If no line buffer is given, assume the input text is all there was + if line_buffer is None: + line_buffer = text + + self.line_buffer = line_buffer + self.text_until_cursor = self.line_buffer[:cursor_pos] + + # Do magic arg matches + for matcher in self.magic_arg_matchers: + matches = list(matcher(line_buffer))[:MATCHES_LIMIT] + if matches: + origins = [matcher.__qualname__] * len(matches) + return text, matches, origins, () + + # Start with a clean slate of completions + matches = [] + custom_res = self.dispatch_custom_completer(text) + # FIXME: we should extend our api to return a dict with completions for + # different types of objects. The rlcomplete() method could then + # simply collapse the dict into a list for readline, but we'd have + # richer completion semantics in other evironments. + completions = () + if self.use_jedi and return_jedi_results: + if not full_text: + full_text = line_buffer + completions = self._jedi_matches( + cursor_pos, cursor_line, full_text) + if custom_res is not None: + # did custom completers produce something? + matches = [(m, 'custom') for m in custom_res] + else: + # Extend the list of completions with the results of each + # matcher, so we return results to the user from all + # namespaces. + if self.merge_completions: + matches = [] + for matcher in self.matchers: + try: + matches.extend([(m, matcher.__qualname__) + for m in matcher(text)]) + except: + # Show the ugly traceback if the matcher causes an + # exception, but do NOT crash the kernel! + sys.excepthook(*sys.exc_info()) + else: + for matcher in self.matchers: + matches = [(m, matcher.__qualname__) + for m in matcher(text)] + if matches: + break + seen = set() + filtered_matches = set() + for m in matches: + t, c = m + if t not in seen: + filtered_matches.add(m) + seen.add(t) + + _filtered_matches = sorted( + set(filtered_matches), key=lambda x: completions_sorting_key(x[0]))\ + [:MATCHES_LIMIT] + + _matches = [m[0] for m in _filtered_matches] + origins = [m[1] for m in _filtered_matches] + + self.matches = _matches + + return text, _matches, origins, completions diff --git a/packages/python/yap_kernel/yap_ipython/core/completerlib.py b/packages/python/yap_kernel/yap_ipython/core/completerlib.py new file mode 100644 index 000000000..3b4b15363 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/completerlib.py @@ -0,0 +1,354 @@ +# encoding: utf-8 +"""Implementations for various useful completers. + +These are all loaded by default by yap_ipython. +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The yap_ipython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import glob +import inspect +import os +import re +import sys +from importlib import import_module +from importlib.machinery import all_suffixes + + +# Third-party imports +from time import time +from zipimport import zipimporter + +# Our own imports +from yap_ipython.core.completer import expand_user, compress_user +from yap_ipython.core.error import TryNext +from yap_ipython.utils._process_common import arg_split + +# FIXME: this should be pulled in with the right call via the component system +from yap_ipython import get_ipython + +from typing import List + +#----------------------------------------------------------------------------- +# Globals and constants +#----------------------------------------------------------------------------- +_suffixes = all_suffixes() + +# Time in seconds after which the rootmodules will be stored permanently in the +# ipython ip.db database (kept in the user's .ipython dir). +TIMEOUT_STORAGE = 2 + +# Time in seconds after which we give up +TIMEOUT_GIVEUP = 20 + +# Regular expression for the python import statement +import_re = re.compile(r'(?P[a-zA-Z_][a-zA-Z0-9_]*?)' + r'(?P[/\\]__init__)?' + r'(?P%s)$' % + r'|'.join(re.escape(s) for s in _suffixes)) + +# RE for the ipython %run command (python + ipython scripts) +magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +def module_list(path): + """ + Return the list containing the names of the modules available in the given + folder. + """ + # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' + if path == '': + path = '.' + + # A few local constants to be used in loops below + pjoin = os.path.join + + if os.path.isdir(path): + # Build a list of all files in the directory and all files + # in its subdirectories. For performance reasons, do not + # recurse more than one level into subdirectories. + files = [] + for root, dirs, nondirs in os.walk(path, followlinks=True): + subdir = root[len(path)+1:] + if subdir: + files.extend(pjoin(subdir, f) for f in nondirs) + dirs[:] = [] # Do not recurse into additional subdirectories. + else: + files.extend(nondirs) + + else: + try: + files = list(zipimporter(path)._files.keys()) + except: + files = [] + + # Build a list of modules which match the import_re regex. + modules = [] + for f in files: + m = import_re.match(f) + if m: + modules.append(m.group('name')) + return list(set(modules)) + + +def get_root_modules(): + """ + Returns a list containing the names of all the modules available in the + folders of the pythonpath. + + ip.db['rootmodules_cache'] maps sys.path entries to list of modules. + """ + ip = get_ipython() + if ip is None: + # No global shell instance to store cached list of modules. + # Don't try to scan for modules every time. + return list(sys.builtin_module_names) + + rootmodules_cache = ip.db.get('rootmodules_cache', {}) + rootmodules = list(sys.builtin_module_names) + start_time = time() + store = False + for path in sys.path: + try: + modules = rootmodules_cache[path] + except KeyError: + modules = module_list(path) + try: + modules.remove('__init__') + except ValueError: + pass + if path not in ('', '.'): # cwd modules should not be cached + rootmodules_cache[path] = modules + if time() - start_time > TIMEOUT_STORAGE and not store: + store = True + print("\nCaching the list of root modules, please wait!") + print("(This will only be done once - type '%rehashx' to " + "reset cache!)\n") + sys.stdout.flush() + if time() - start_time > TIMEOUT_GIVEUP: + print("This is taking too long, we give up.\n") + return [] + rootmodules.extend(modules) + if store: + ip.db['rootmodules_cache'] = rootmodules_cache + rootmodules = list(set(rootmodules)) + return rootmodules + + +def is_importable(module, attr, only_modules): + if only_modules: + return inspect.ismodule(getattr(module, attr)) + else: + return not(attr[:2] == '__' and attr[-2:] == '__') + + +def try_import(mod: str, only_modules=False) -> List[str]: + """ + Try to import given module and return list of potential completions. + """ + mod = mod.rstrip('.') + try: + m = import_module(mod) + except: + return [] + + m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__ + + completions = [] + if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: + completions.extend( [attr for attr in dir(m) if + is_importable(m, attr, only_modules)]) + + completions.extend(getattr(m, '__all__', [])) + if m_is_init: + completions.extend(module_list(os.path.dirname(m.__file__))) + completions_set = {c for c in completions if isinstance(c, str)} + completions_set.discard('__init__') + return list(completions_set) + + +#----------------------------------------------------------------------------- +# Completion-related functions. +#----------------------------------------------------------------------------- + +def quick_completer(cmd, completions): + """ Easily create a trivial completer for a command. + + Takes either a list of completions, or all completions in string (that will + be split on whitespace). + + Example:: + + [d:\ipython]|1> import ipy_completers + [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) + [d:\ipython]|3> foo b + bar baz + [d:\ipython]|3> foo ba + """ + + if isinstance(completions, str): + completions = completions.split() + + def do_complete(self, event): + return completions + + get_ipython().set_hook('complete_command',do_complete, str_key = cmd) + +def module_completion(line): + """ + Returns a list containing the completion possibilities for an import line. + + The line looks like this : + 'import xml.d' + 'from xml.dom import' + """ + + words = line.split(' ') + nwords = len(words) + + # from whatever -> 'import ' + if nwords == 3 and words[0] == 'from': + return ['import '] + + # 'from xy' or 'import xy' + if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : + if nwords == 1: + return get_root_modules() + mod = words[1].split('.') + if len(mod) < 2: + return get_root_modules() + completion_list = try_import('.'.join(mod[:-1]), True) + return ['.'.join(mod[:-1] + [el]) for el in completion_list] + + # 'from xyz import abc' + if nwords >= 3 and words[0] == 'from': + mod = words[1] + return try_import(mod) + +#----------------------------------------------------------------------------- +# Completers +#----------------------------------------------------------------------------- +# These all have the func(self, event) signature to be used as custom +# completers + +def module_completer(self,event): + """Give completions after user has typed 'import ...' or 'from ...'""" + + # This works in all versions of python. While 2.5 has + # pkgutil.walk_packages(), that particular routine is fairly dangerous, + # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full + # of possibly problematic side effects. + # This search the folders in the sys.path for available modules. + + return module_completion(event.line) + +# FIXME: there's a lot of logic common to the run, cd and builtin file +# completers, that is currently reimplemented in each. + +def magic_run_completer(self, event): + """Complete files that end in .py or .ipy or .ipynb for the %run command. + """ + comps = arg_split(event.line, strict=False) + # relpath should be the current token that we need to complete. + if (len(comps) > 1) and (not event.line.endswith(' ')): + relpath = comps[-1].strip("'\"") + else: + relpath = '' + + #print("\nev=", event) # dbg + #print("rp=", relpath) # dbg + #print('comps=', comps) # dbg + + lglob = glob.glob + isdir = os.path.isdir + relpath, tilde_expand, tilde_val = expand_user(relpath) + + # Find if the user has already typed the first filename, after which we + # should complete on all files, since after the first one other files may + # be arguments to the input script. + + if any(magic_run_re.match(c) for c in comps): + matches = [f.replace('\\','/') + ('/' if isdir(f) else '') + for f in lglob(relpath+'*')] + else: + dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] + pys = [f.replace('\\','/') + for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + + lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')] + + matches = dirs + pys + + #print('run comp:', dirs+pys) # dbg + return [compress_user(p, tilde_expand, tilde_val) for p in matches] + + +def cd_completer(self, event): + """Completer function for cd, which only returns directories.""" + ip = get_ipython() + relpath = event.symbol + + #print(event) # dbg + if event.line.endswith('-b') or ' -b ' in event.line: + # return only bookmark completions + bkms = self.db.get('bookmarks', None) + if bkms: + return bkms.keys() + else: + return [] + + if event.symbol == '-': + width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) + # jump in directory history by number + fmt = '-%0' + width_dh +'d [%s]' + ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] + if len(ents) > 1: + return ents + return [] + + if event.symbol.startswith('--'): + return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] + + # Expand ~ in path and normalize directory separators. + relpath, tilde_expand, tilde_val = expand_user(relpath) + relpath = relpath.replace('\\','/') + + found = [] + for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') + if os.path.isdir(f)]: + if ' ' in d: + # we don't want to deal with any of that, complex code + # for this is elsewhere + raise TryNext + + found.append(d) + + if not found: + if os.path.isdir(relpath): + return [compress_user(relpath, tilde_expand, tilde_val)] + + # if no completions so far, try bookmarks + bks = self.db.get('bookmarks',{}) + bkmatches = [s for s in bks if s.startswith(event.symbol)] + if bkmatches: + return bkmatches + + raise TryNext + + return [compress_user(p, tilde_expand, tilde_val) for p in found] + +def reset_completer(self, event): + "A completer for %reset magic" + return '-f -s in out array dhist'.split() diff --git a/packages/python/yap_kernel/yap_ipython/core/crashhandler.py b/packages/python/yap_kernel/yap_ipython/core/crashhandler.py new file mode 100644 index 000000000..27f5ebef3 --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/crashhandler.py @@ -0,0 +1,215 @@ +# encoding: utf-8 +"""sys.excepthook for yap_ipython itself, leaves a detailed report on disk. + +Authors: + +* Fernando Perez +* Brian E. Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2001-2007 Fernando Perez. +# Copyright (C) 2008-2011 The yap_ipython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys +import traceback +from pprint import pformat + +from yap_ipython.core import ultratb +from yap_ipython.core.release import author_email +from yap_ipython.utils.sysinfo import sys_info +from yap_ipython.utils.py3compat import input + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +# Template for the user message. +_default_message_template = """\ +Oops, {app_name} crashed. We do our best to make it stable, but... + +A crash report was automatically generated with the following information: + - A verbatim copy of the crash traceback. + - A copy of your input history during this session. + - Data on your current {app_name} configuration. + +It was left in the file named: +\t'{crash_report_fname}' +If you can email this file to the developers, the information in it will help +them in understanding and correcting the problem. + +You can mail it to: {contact_name} at {contact_email} +with the subject '{app_name} Crash Report'. + +If you want to do it now, the following command will work (under Unix): +mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname} + +To ensure accurate tracking of this issue, please file a report about it at: +{bug_tracker} +""" + +_lite_message_template = """ +If you suspect this is an yap_ipython bug, please report it at: + https://github.com/ipython/ipython/issues +or send an email to the mailing list at {email} + +You can print a more detailed traceback right now with "%tb", or use "%debug" +to interactively debug it. + +Extra-detailed tracebacks for bug-reporting purposes can be enabled via: + {config}Application.verbose_crash=True +""" + + +class CrashHandler(object): + """Customizable crash handlers for yap_ipython applications. + + Instances of this class provide a :meth:`__call__` method which can be + used as a ``sys.excepthook``. The :meth:`__call__` signature is:: + + def __call__(self, etype, evalue, etb) + """ + + message_template = _default_message_template + section_sep = '\n\n'+'*'*75+'\n\n' + + def __init__(self, app, contact_name=None, contact_email=None, + bug_tracker=None, show_crash_traceback=True, call_pdb=False): + """Create a new crash handler + + Parameters + ---------- + app : Application + A running :class:`Application` instance, which will be queried at + crash time for internal information. + + contact_name : str + A string with the name of the person to contact. + + contact_email : str + A string with the email address of the contact. + + bug_tracker : str + A string with the URL for your project's bug tracker. + + show_crash_traceback : bool + If false, don't print the crash traceback on stderr, only generate + the on-disk report + + Non-argument instance attributes: + + These instances contain some non-argument attributes which allow for + further customization of the crash handler's behavior. Please see the + source for further details. + """ + self.crash_report_fname = "Crash_report_%s.txt" % app.name + self.app = app + self.call_pdb = call_pdb + #self.call_pdb = True # dbg + self.show_crash_traceback = show_crash_traceback + self.info = dict(app_name = app.name, + contact_name = contact_name, + contact_email = contact_email, + bug_tracker = bug_tracker, + crash_report_fname = self.crash_report_fname) + + + def __call__(self, etype, evalue, etb): + """Handle an exception, call for compatible with sys.excepthook""" + + # do not allow the crash handler to be called twice without reinstalling it + # this prevents unlikely errors in the crash handling from entering an + # infinite loop. + sys.excepthook = sys.__excepthook__ + + # Report tracebacks shouldn't use color in general (safer for users) + color_scheme = 'NoColor' + + # Use this ONLY for developer debugging (keep commented out for release) + #color_scheme = 'Linux' # dbg + try: + rptdir = self.app.ipython_dir + except: + rptdir = os.getcwd() + if rptdir is None or not os.path.isdir(rptdir): + rptdir = os.getcwd() + report_name = os.path.join(rptdir,self.crash_report_fname) + # write the report filename into the instance dict so it can get + # properly expanded out in the user message template + self.crash_report_fname = report_name + self.info['crash_report_fname'] = report_name + TBhandler = ultratb.VerboseTB( + color_scheme=color_scheme, + long_header=1, + call_pdb=self.call_pdb, + ) + if self.call_pdb: + TBhandler(etype,evalue,etb) + return + else: + traceback = TBhandler.text(etype,evalue,etb,context=31) + + # print traceback to screen + if self.show_crash_traceback: + print(traceback, file=sys.stderr) + + # and generate a complete report on disk + try: + report = open(report_name,'w') + except: + print('Could not create crash report on disk.', file=sys.stderr) + return + + # Inform user on stderr of what happened + print('\n'+'*'*70+'\n', file=sys.stderr) + print(self.message_template.format(**self.info), file=sys.stderr) + + # Construct report on disk + report.write(self.make_report(traceback)) + report.close() + input("Hit to quit (your terminal may close):") + + def make_report(self,traceback): + """Return a string containing a crash report.""" + + sec_sep = self.section_sep + + report = ['*'*75+'\n\n'+'yap_ipython post-mortem report\n\n'] + rpt_add = report.append + rpt_add(sys_info()) + + try: + config = pformat(self.app.config) + rpt_add(sec_sep) + rpt_add('Application name: %s\n\n' % self.app_name) + rpt_add('Current user configuration structure:\n\n') + rpt_add(config) + except: + pass + rpt_add(sec_sep+'Crash traceback:\n\n' + traceback) + + return ''.join(report) + + +def crash_handler_lite(etype, evalue, tb): + """a light excepthook, adding a small message to the usual traceback""" + traceback.print_exception(etype, evalue, tb) + + from yap_ipython.core.interactiveshell import InteractiveShell + if InteractiveShell.initialized(): + # we are in a Shell environment, give %magic example + config = "%config " + else: + # we are not in a shell, show generic config + config = "c." + print(_lite_message_template.format(email=author_email, config=config), file=sys.stderr) + diff --git a/packages/python/yap_kernel/yap_ipython/core/debugger.py b/packages/python/yap_kernel/yap_ipython/core/debugger.py new file mode 100644 index 000000000..670b22c6e --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/debugger.py @@ -0,0 +1,645 @@ +# -*- coding: utf-8 -*- +""" +Pdb debugger class. + +Modified from the standard pdb.Pdb class to avoid including readline, so that +the command line completion of other programs which include this isn't +damaged. + +In the future, this class will be expanded with improvements over the standard +pdb. + +The code in this file is mainly lifted out of cmd.py in Python 2.2, with minor +changes. Licensing should therefore be under the standard Python terms. For +details on the PSF (Python Software Foundation) standard license, see: + +https://docs.python.org/2/license.html +""" + +#***************************************************************************** +# +# This file is licensed under the PSF license. +# +# Copyright (C) 2001 Python Software Foundation, www.python.org +# Copyright (C) 2005-2006 Fernando Perez. +# +# +#***************************************************************************** + +import bdb +import functools +import inspect +import linecache +import sys +import warnings +import re + +from yap_ipython import get_ipython +from yap_ipython.utils import PyColorize +from yap_ipython.utils import coloransi, py3compat +from yap_ipython.core.excolors import exception_colors +from yap_ipython.testing.skipdoctest import skip_doctest + + +prompt = 'ipdb> ' + +#We have to check this directly from sys.argv, config struct not yet available +from pdb import Pdb as OldPdb + +# Allow the set_trace code to operate outside of an ipython instance, even if +# it does so with some limitations. The rest of this support is implemented in +# the Tracer constructor. + +def make_arrow(pad): + """generate the leading arrow in front of traceback or debugger""" + if pad >= 2: + return '-'*(pad-2) + '> ' + elif pad == 1: + return '>' + return '' + + +def BdbQuit_excepthook(et, ev, tb, excepthook=None): + """Exception hook which handles `BdbQuit` exceptions. + + All other exceptions are processed using the `excepthook` + parameter. + """ + warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + if et==bdb.BdbQuit: + print('Exiting Debugger.') + elif excepthook is not None: + excepthook(et, ev, tb) + else: + # Backwards compatibility. Raise deprecation warning? + BdbQuit_excepthook.excepthook_ori(et,ev,tb) + + +def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None): + warnings.warn( + "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + print('Exiting Debugger.') + + +class Tracer(object): + """ + DEPRECATED + + Class for local debugging, similar to pdb.set_trace. + + Instances of this class, when called, behave like pdb.set_trace, but + providing yap_ipython's enhanced capabilities. + + This is implemented as a class which must be initialized in your own code + and not as a standalone function because we need to detect at runtime + whether yap_ipython is already active or not. That detection is done in the + constructor, ensuring that this code plays nicely with a running yap_ipython, + while functioning acceptably (though with limitations) if outside of it. + """ + + @skip_doctest + def __init__(self, colors=None): + """ + DEPRECATED + + Create a local debugger instance. + + Parameters + ---------- + + colors : str, optional + The name of the color scheme to use, it must be one of yap_ipython's + valid color schemes. If not given, the function will default to + the current yap_ipython scheme when running inside yap_ipython, and to + 'NoColor' otherwise. + + Examples + -------- + :: + + from yap_ipython.core.debugger import Tracer; debug_here = Tracer() + + Later in your code:: + + debug_here() # -> will open up the debugger at that point. + + Once the debugger activates, you can use all of its regular commands to + step through code, set breakpoints, etc. See the pdb documentation + from the Python standard library for usage details. + """ + warnings.warn("`Tracer` is deprecated since version 5.1, directly use " + "`yap_ipython.core.debugger.Pdb.set_trace()`", + DeprecationWarning, stacklevel=2) + + ip = get_ipython() + if ip is None: + # Outside of ipython, we set our own exception hook manually + sys.excepthook = functools.partial(BdbQuit_excepthook, + excepthook=sys.excepthook) + def_colors = 'NoColor' + else: + # In ipython, we use its custom exception handler mechanism + def_colors = ip.colors + ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) + + if colors is None: + colors = def_colors + + # The stdlib debugger internally uses a modified repr from the `repr` + # module, that limits the length of printed strings to a hardcoded + # limit of 30 characters. That much trimming is too aggressive, let's + # at least raise that limit to 80 chars, which should be enough for + # most interactive uses. + try: + try: + from reprlib import aRepr # Py 3 + except ImportError: + from repr import aRepr # Py 2 + aRepr.maxstring = 80 + except: + # This is only a user-facing convenience, so any error we encounter + # here can be warned about but can be otherwise ignored. These + # printouts will tell us about problems if this API changes + import traceback + traceback.print_exc() + + self.debugger = Pdb(colors) + + def __call__(self): + """Starts an interactive debugger at the point where called. + + This is similar to the pdb.set_trace() function from the std lib, but + using yap_ipython's enhanced debugger.""" + + self.debugger.set_trace(sys._getframe().f_back) + + +RGX_EXTRA_INDENT = re.compile('(?<=\n)\s+') + + +def strip_indentation(multiline_string): + return RGX_EXTRA_INDENT.sub('', multiline_string) + + +def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): + """Make new_fn have old_fn's doc string. This is particularly useful + for the ``do_...`` commands that hook into the help system. + Adapted from from a comp.lang.python posting + by Duncan Booth.""" + def wrapper(*args, **kw): + return new_fn(*args, **kw) + if old_fn.__doc__: + wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text + return wrapper + + +def _file_lines(fname): + """Return the contents of a named file as a list of lines. + + This function never raises an IOError exception: if the file can't be + read, it simply returns an empty list.""" + + try: + outfile = open(fname) + except IOError: + return [] + else: + out = outfile.readlines() + outfile.close() + return out + + +class Pdb(OldPdb): + """Modified Pdb class, does not load readline. + + for a standalone version that uses prompt_toolkit, see + `yap_ipython.terminal.debugger.TerminalPdb` and + `yap_ipython.terminal.debugger.set_trace()` + """ + + def __init__(self, color_scheme=None, completekey=None, + stdin=None, stdout=None, context=5): + + # Parent constructor: + try: + self.context = int(context) + if self.context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError): + raise ValueError("Context must be a positive integer") + + OldPdb.__init__(self, completekey, stdin, stdout) + + # yap_ipython changes... + self.shell = get_ipython() + + if self.shell is None: + save_main = sys.modules['__main__'] + # No yap_ipython instance running, we must create one + from yap_ipython.terminal.interactiveshell import \ + TerminalInteractiveShell + self.shell = TerminalInteractiveShell.instance() + # needed by any code which calls __import__("__main__") after + # the debugger was entered. See also #9941. + sys.modules['__main__'] = save_main + + if color_scheme is not None: + warnings.warn( + "The `color_scheme` argument is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + else: + color_scheme = self.shell.colors + + self.aliases = {} + + # Create color table: we copy the default one from the traceback + # module and add a few attributes needed for debugging + self.color_scheme_table = exception_colors() + + # shorthands + C = coloransi.TermColors + cst = self.color_scheme_table + + cst['NoColor'].colors.prompt = C.NoColor + cst['NoColor'].colors.breakpoint_enabled = C.NoColor + cst['NoColor'].colors.breakpoint_disabled = C.NoColor + + cst['Linux'].colors.prompt = C.Green + cst['Linux'].colors.breakpoint_enabled = C.LightRed + cst['Linux'].colors.breakpoint_disabled = C.Red + + cst['LightBG'].colors.prompt = C.Blue + cst['LightBG'].colors.breakpoint_enabled = C.LightRed + cst['LightBG'].colors.breakpoint_disabled = C.Red + + cst['Neutral'].colors.prompt = C.Blue + cst['Neutral'].colors.breakpoint_enabled = C.LightRed + cst['Neutral'].colors.breakpoint_disabled = C.Red + + + # Add a python parser so we can syntax highlight source while + # debugging. + self.parser = PyColorize.Parser(style=color_scheme) + self.set_colors(color_scheme) + + # Set the prompt - the default prompt is '(Pdb)' + self.prompt = prompt + + def set_colors(self, scheme): + """Shorthand access to the color table scheme selector method.""" + self.color_scheme_table.set_active_scheme(scheme) + self.parser.style = scheme + + def interaction(self, frame, traceback): + try: + OldPdb.interaction(self, frame, traceback) + except KeyboardInterrupt: + sys.stdout.write('\n' + self.shell.get_exception_only()) + + def new_do_up(self, arg): + OldPdb.do_up(self, arg) + do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up) + + def new_do_down(self, arg): + OldPdb.do_down(self, arg) + + do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down) + + def new_do_frame(self, arg): + OldPdb.do_frame(self, arg) + + def new_do_quit(self, arg): + + if hasattr(self, 'old_all_completions'): + self.shell.Completer.all_completions=self.old_all_completions + + return OldPdb.do_quit(self, arg) + + do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit) + + def new_do_restart(self, arg): + """Restart command. In the context of ipython this is exactly the same + thing as 'quit'.""" + self.msg("Restart doesn't make sense here. Using 'quit' instead.") + return self.do_quit(arg) + + def print_stack_trace(self, context=None): + if context is None: + context = self.context + try: + context=int(context) + if context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError): + raise ValueError("Context must be a positive integer") + try: + for frame_lineno in self.stack: + self.print_stack_entry(frame_lineno, context=context) + except KeyboardInterrupt: + pass + + def print_stack_entry(self,frame_lineno, prompt_prefix='\n-> ', + context=None): + if context is None: + context = self.context + try: + context=int(context) + if context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError): + raise ValueError("Context must be a positive integer") + print(self.format_stack_entry(frame_lineno, '', context)) + + # vds: >> + frame, lineno = frame_lineno + filename = frame.f_code.co_filename + self.shell.hooks.synchronize_with_editor(filename, lineno, 0) + # vds: << + + def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): + if context is None: + context = self.context + try: + context=int(context) + if context <= 0: + print("Context must be a positive integer") + except (TypeError, ValueError): + print("Context must be a positive integer") + try: + import reprlib # Py 3 + except ImportError: + import repr as reprlib # Py 2 + + ret = [] + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal) + tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal) + tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) + tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, + ColorsNormal) + + frame, lineno = frame_lineno + + return_value = '' + if '__return__' in frame.f_locals: + rv = frame.f_locals['__return__'] + #return_value += '->' + return_value += reprlib.repr(rv) + '\n' + ret.append(return_value) + + #s = filename + '(' + `lineno` + ')' + filename = self.canonic(frame.f_code.co_filename) + link = tpl_link % py3compat.cast_unicode(filename) + + if frame.f_code.co_name: + func = frame.f_code.co_name + else: + func = "" + + call = '' + if func != '?': + if '__args__' in frame.f_locals: + args = reprlib.repr(frame.f_locals['__args__']) + else: + args = '()' + call = tpl_call % (func, args) + + # The level info should be generated in the same format pdb uses, to + # avoid breaking the pdbtrack functionality of python-mode in *emacs. + if frame is self.curframe: + ret.append('> ') + else: + ret.append(' ') + ret.append(u'%s(%s)%s\n' % (link,lineno,call)) + + start = lineno - 1 - context//2 + lines = linecache.getlines(filename) + start = min(start, len(lines) - context) + start = max(start, 0) + lines = lines[start : start + context] + + for i,line in enumerate(lines): + show_arrow = (start + 1 + i == lineno) + linetpl = (frame is self.curframe or show_arrow) \ + and tpl_line_em \ + or tpl_line + ret.append(self.__format_line(linetpl, filename, + start + 1 + i, line, + arrow = show_arrow) ) + return ''.join(ret) + + def __format_line(self, tpl_line, filename, lineno, line, arrow = False): + bp_mark = "" + bp_mark_color = "" + + new_line, err = self.parser.format2(line, 'str') + if not err: + line = new_line + + bp = None + if lineno in self.get_file_breaks(filename): + bps = self.get_breaks(filename, lineno) + bp = bps[-1] + + if bp: + Colors = self.color_scheme_table.active_colors + bp_mark = str(bp.number) + bp_mark_color = Colors.breakpoint_enabled + if not bp.enabled: + bp_mark_color = Colors.breakpoint_disabled + + numbers_width = 7 + if arrow: + # This is the line with the error + pad = numbers_width - len(str(lineno)) - len(bp_mark) + num = '%s%s' % (make_arrow(pad), str(lineno)) + else: + num = '%*s' % (numbers_width - len(bp_mark), str(lineno)) + + return tpl_line % (bp_mark_color + bp_mark, num, line) + + + def print_list_lines(self, filename, first, last): + """The printing (as opposed to the parsing part of a 'list' + command.""" + try: + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) + tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal) + src = [] + if filename == "" and hasattr(self, "_exec_filename"): + filename = self._exec_filename + + for lineno in range(first, last+1): + line = linecache.getline(filename, lineno) + if not line: + break + + if lineno == self.curframe.f_lineno: + line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True) + else: + line = self.__format_line(tpl_line, filename, lineno, line, arrow = False) + + src.append(line) + self.lineno = lineno + + print(''.join(src)) + + except KeyboardInterrupt: + pass + + def do_list(self, arg): + """Print lines of code from the current stack frame + """ + self.lastcmd = 'list' + last = None + if arg: + try: + x = eval(arg, {}, {}) + if type(x) == type(()): + first, last = x + first = int(first) + last = int(last) + if last < first: + # Assume it's a count + last = first + last + else: + first = max(1, int(x) - 5) + except: + print('*** Error in argument:', repr(arg)) + return + elif self.lineno is None: + first = max(1, self.curframe.f_lineno - 5) + else: + first = self.lineno + 1 + if last is None: + last = first + 10 + self.print_list_lines(self.curframe.f_code.co_filename, first, last) + + # vds: >> + lineno = first + filename = self.curframe.f_code.co_filename + self.shell.hooks.synchronize_with_editor(filename, lineno, 0) + # vds: << + + do_l = do_list + + def getsourcelines(self, obj): + lines, lineno = inspect.findsource(obj) + if inspect.isframe(obj) and obj.f_globals is obj.f_locals: + # must be a module frame: do not try to cut a block out of it + return lines, 1 + elif inspect.ismodule(obj): + return lines, 1 + return inspect.getblock(lines[lineno:]), lineno+1 + + def do_longlist(self, arg): + """Print lines of code from the current stack frame. + + Shows more lines than 'list' does. + """ + self.lastcmd = 'longlist' + try: + lines, lineno = self.getsourcelines(self.curframe) + except OSError as err: + self.error(err) + return + last = lineno + len(lines) + self.print_list_lines(self.curframe.f_code.co_filename, lineno, last) + do_ll = do_longlist + + def do_debug(self, arg): + """debug code + Enter a recursive debugger that steps through the code + argument (which is an arbitrary expression or statement to be + executed in the current environment). + """ + sys.settrace(None) + globals = self.curframe.f_globals + locals = self.curframe_locals + p = self.__class__(completekey=self.completekey, + stdin=self.stdin, stdout=self.stdout) + p.use_rawinput = self.use_rawinput + p.prompt = "(%s) " % self.prompt.strip() + self.message("ENTERING RECURSIVE DEBUGGER") + sys.call_tracing(p.run, (arg, globals, locals)) + self.message("LEAVING RECURSIVE DEBUGGER") + sys.settrace(self.trace_dispatch) + self.lastcmd = p.lastcmd + + def do_pdef(self, arg): + """Print the call signature for any callable object. + + The debugger interface to %pdef""" + namespaces = [('Locals', self.curframe.f_locals), + ('Globals', self.curframe.f_globals)] + self.shell.find_line_magic('pdef')(arg, namespaces=namespaces) + + def do_pdoc(self, arg): + """Print the docstring for an object. + + The debugger interface to %pdoc.""" + namespaces = [('Locals', self.curframe.f_locals), + ('Globals', self.curframe.f_globals)] + self.shell.find_line_magic('pdoc')(arg, namespaces=namespaces) + + def do_pfile(self, arg): + """Print (or run through pager) the file where an object is defined. + + The debugger interface to %pfile. + """ + namespaces = [('Locals', self.curframe.f_locals), + ('Globals', self.curframe.f_globals)] + self.shell.find_line_magic('pfile')(arg, namespaces=namespaces) + + def do_pinfo(self, arg): + """Provide detailed information about an object. + + The debugger interface to %pinfo, i.e., obj?.""" + namespaces = [('Locals', self.curframe.f_locals), + ('Globals', self.curframe.f_globals)] + self.shell.find_line_magic('pinfo')(arg, namespaces=namespaces) + + def do_pinfo2(self, arg): + """Provide extra detailed information about an object. + + The debugger interface to %pinfo2, i.e., obj??.""" + namespaces = [('Locals', self.curframe.f_locals), + ('Globals', self.curframe.f_globals)] + self.shell.find_line_magic('pinfo2')(arg, namespaces=namespaces) + + def do_psource(self, arg): + """Print (or run through pager) the source code for an object.""" + namespaces = [('Locals', self.curframe.f_locals), + ('Globals', self.curframe.f_globals)] + self.shell.find_line_magic('psource')(arg, namespaces=namespaces) + + def do_where(self, arg): + """w(here) + Print a stack trace, with the most recent frame at the bottom. + An arrow indicates the "current frame", which determines the + context of most commands. 'bt' is an alias for this command. + + Take a number as argument as an (optional) number of context line to + print""" + if arg: + context = int(arg) + self.print_stack_trace(context) + else: + self.print_stack_trace() + + do_w = do_where + + +def set_trace(frame=None): + """ + Start debugging from `frame`. + + If frame is not specified, debugging starts from caller's frame. + """ + Pdb().set_trace(frame or sys._getframe().f_back) diff --git a/packages/python/yap_kernel/yap_ipython/core/display.py b/packages/python/yap_kernel/yap_ipython/core/display.py new file mode 100644 index 000000000..9b67e2b5f --- /dev/null +++ b/packages/python/yap_kernel/yap_ipython/core/display.py @@ -0,0 +1,1413 @@ +# -*- coding: utf-8 -*- +"""Top-level display functions for displaying object in different formats.""" + +# Copyright (c) yap_ipython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from binascii import b2a_hex, b2a_base64, hexlify +import json +import mimetypes +import os +import struct +import sys +import warnings +from copy import deepcopy + +from yap_ipython.utils.py3compat import cast_unicode +from yap_ipython.testing.skipdoctest import skip_doctest + +__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown', +'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', +'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', +'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', +'GeoJSON', 'Javascript', 'Image', 'clear_output', 'set_matplotlib_formats', +'set_matplotlib_close', 'publish_display_data', 'update_display', 'DisplayHandle', +'Video'] + +#----------------------------------------------------------------------------- +# utility functions +#----------------------------------------------------------------------------- + +def _safe_exists(path): + """Check path, but don't let exceptions raise""" + try: + return os.path.exists(path) + except Exception: + return False + +def _merge(d1, d2): + """Like update, but merges sub-dicts instead of clobbering at the top level. + + Updates d1 in-place + """ + + if not isinstance(d2, dict) or not isinstance(d1, dict): + return d2 + for key, value in d2.items(): + d1[key] = _merge(d1.get(key), value) + return d1 + +def _display_mimetype(mimetype, objs, raw=False, metadata=None): + """internal implementation of all display_foo methods + + Parameters + ---------- + mimetype : str + The mimetype to be published (e.g. 'image/png') + objs : tuple of objects + The Python objects to display, or if raw=True raw text data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + if metadata: + metadata = {mimetype: metadata} + if raw: + # turn list of pngdata into list of { 'image/png': pngdata } + objs = [ {mimetype: obj} for obj in objs ] + display(*objs, raw=raw, metadata=metadata, include=[mimetype]) + +#----------------------------------------------------------------------------- +# Main functions +#----------------------------------------------------------------------------- + +# use * to indicate transient is keyword-only +def publish_display_data(data, metadata=None, source=None, *, transient=None, **kwargs): + """Publish data and metadata to all frontends. + + See the ``display_data`` message in the messaging documentation for + more details about this message type. + + Keys of data and metadata can be any mime-type. + + Parameters + ---------- + data : dict + A dictionary having keys that are valid MIME types (like + 'text/plain' or 'image/svg+xml') and values that are the data for + that MIME type. The data itself must be a JSON'able data + structure. Minimally all data should have the 'text/plain' data, + which can be displayed by all frontends. If more than the plain + text is given, it is up to the frontend to decide which + representation to use. + metadata : dict + A dictionary for metadata related to the data. This can contain + arbitrary key, value pairs that frontends can use to interpret + the data. mime-type keys matching those in data can be used + to specify metadata about particular representations. + source : str, deprecated + Unused. + transient : dict, keyword-only + A dictionary of transient data, such as display_id. + """ + from yap_ipython.core.interactiveshell import InteractiveShell + + display_pub = InteractiveShell.instance().display_pub + + # only pass transient if supplied, + # to avoid errors with older yap_kernel. + # TODO: We could check for yap_kernel version and provide a detailed upgrade message. + if transient: + kwargs['transient'] = transient + + display_pub.publish( + data=data, + metadata=metadata, + **kwargs + ) + + +def _new_id(): + """Generate a new random text id with urandom""" + return b2a_hex(os.urandom(16)).decode('ascii') + + +def display(*objs, include=None, exclude=None, metadata=None, transient=None, display_id=None, **kwargs): + """Display a Python object in all frontends. + + By default all representations will be computed and sent to the frontends. + Frontends can decide which representation is used and how. + + In terminal yap_ipython this will be similar to using :func:`print`, for use in richer + frontends see Jupyter notebook examples with rich display logic. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display. + raw : bool, optional + Are the objects to be displayed already mimetype-keyed dicts of raw display data, + or Python objects that need to be formatted before display? [default: False] + include : list, tuple or set, optional + A list of format type strings (MIME types) to include in the + format data dict. If this is set *only* the format types included + in this list will be computed. + exclude : list, tuple or set, optional + A list of format type strings (MIME types) to exclude in the format + data dict. If this is set all format types will be computed, + except for those included in this argument. + metadata : dict, optional + A dictionary of metadata to associate with the output. + mime-type keys in this dictionary will be associated with the individual + representation formats, if they exist. + transient : dict, optional + A dictionary of transient data to associate with the output. + Data in this dict should not be persisted to files (e.g. notebooks). + display_id : str, bool optional + Set an id for the display. + This id can be used for updating this display area later via update_display. + If given as `True`, generate a new `display_id` + kwargs: additional keyword-args, optional + Additional keyword-arguments are passed through to the display publisher. + + Returns + ------- + + handle: DisplayHandle + Returns a handle on updatable displays for use with :func:`update_display`, + if `display_id` is given. Returns :any:`None` if no `display_id` is given + (default). + + Examples + -------- + + >>> class Json(object): + ... def __init__(self, json): + ... self.json = json + ... def _repr_pretty_(self, pp, cycle): + ... import json + ... pp.text(json.dumps(self.json, indent=2)) + ... def __repr__(self): + ... return str(self.json) + ... + + >>> d = Json({1:2, 3: {4:5}}) + + >>> print(d) + {1: 2, 3: {4: 5}} + + >>> display(d) + { + "1": 2, + "3": { + "4": 5 + } + } + + >>> def int_formatter(integer, pp, cycle): + ... pp.text('I'*integer) + + >>> plain = get_ipython().display_formatter.formatters['text/plain'] + >>> plain.for_type(int, int_formatter) + + >>> display(7-5) + II + + >>> del plain.type_printers[int] + >>> display(7-5) + 2 + + See Also + -------- + + :func:`update_display` + + Notes + ----- + + In Python, objects can declare their textual representation using the + `__repr__` method. yap_ipython expands on this idea and allows objects to declare + other, rich representations including: + + - HTML + - JSON + - PNG + - JPEG + - SVG + - LaTeX + + A single object can declare some or all of these representations; all are + handled by yap_ipython's display system. + + The main idea of the first approach is that you have to implement special + display methods when you define your class, one for each representation you + want to use. Here is a list of the names of the special methods and the + values they must return: + + - `_repr_html_`: return raw HTML as a string + - `_repr_json_`: return a JSONable dict + - `_repr_jpeg_`: return raw JPEG data + - `_repr_png_`: return raw PNG data + - `_repr_svg_`: return raw SVG data as a string + - `_repr_latex_`: return LaTeX commands in a string surrounded by "$". + - `_repr_mimebundle_`: return a full mimebundle containing the mapping + from all mimetypes to data. + Use this for any mime-type not listed above. + + When you are directly writing your own classes, you can adapt them for + display in yap_ipython by following the above approach. But in practice, you + often need to work with existing classes that you can't easily modify. + + You can refer to the documentation on integrating with the display system in + order to register custom formatters for already existing types + (:ref:`integrating_rich_display`). + + .. versionadded:: 5.4 display available without import + .. versionadded:: 6.1 display available without import + + Since yap_ipython 5.4 and 6.1 :func:`display` is automatically made available to + the user without import. If you are using display in a document that might + be used in a pure python context or with older version of yap_ipython, use the + following import at the top of your file:: + + from yap_ipython.display import display + + """ + from yap_ipython.core.interactiveshell import InteractiveShell + + if not InteractiveShell.initialized(): + # Directly print objects. + print(*objs) + return + + raw = kwargs.pop('raw', False) + if transient is None: + transient = {} + if metadata is None: + metadata={} + if display_id: + if display_id is True: + display_id = _new_id() + transient['display_id'] = display_id + if kwargs.get('update') and 'display_id' not in transient: + raise TypeError('display_id required for update_display') + if transient: + kwargs['transient'] = transient + + if not raw: + format = InteractiveShell.instance().display_formatter.format + + for obj in objs: + if raw: + publish_display_data(data=obj, metadata=metadata, **kwargs) + else: + format_dict, md_dict = format(obj, include=include, exclude=exclude) + if not format_dict: + # nothing to display (e.g. _ipython_display_ took over) + continue + if metadata: + # kwarg-specified metadata gets precedence + _merge(md_dict, metadata) + publish_display_data(data=format_dict, metadata=md_dict, **kwargs) + if display_id: + return DisplayHandle(display_id) + + +# use * for keyword-only display_id arg +def update_display(obj, *, display_id, **kwargs): + """Update an existing display by id + + Parameters + ---------- + + obj: + The object with which to update the display + display_id: keyword-only + The id of the display to update + + See Also + -------- + + :func:`display` + """ + kwargs['update'] = True + display(obj, display_id=display_id, **kwargs) + + +class DisplayHandle(object): + """A handle on an updatable display + + Call `.update(obj)` to display a new object. + + Call `.display(obj`) to add a new instance of this display, + and update existing instances. + + See Also + -------- + + :func:`display`, :func:`update_display` + + """ + + def __init__(self, display_id=None): + if display_id is None: + display_id = _new_id() + self.display_id = display_id + + def __repr__(self): + return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) + + def display(self, obj, **kwargs): + """Make a new display with my id, updating existing instances. + + Parameters + ---------- + + obj: + object to display + **kwargs: + additional keyword arguments passed to display + """ + display(obj, display_id=self.display_id, **kwargs) + + def update(self, obj, **kwargs): + """Update existing displays with my id + + Parameters + ---------- + + obj: + object to display + **kwargs: + additional keyword arguments passed to update_display + """ + update_display(obj, display_id=self.display_id, **kwargs) + + +def display_pretty(*objs, **kwargs): + """Display the pretty (default) representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw text data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/plain', objs, **kwargs) + + +def display_html(*objs, **kwargs): + """Display the HTML representation of an object. + + Note: If raw=False and the object does not have a HTML + representation, no HTML will be shown. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw HTML data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/html', objs, **kwargs) + + +def display_markdown(*objs, **kwargs): + """Displays the Markdown representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw markdown data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + + _display_mimetype('text/markdown', objs, **kwargs) + + +def display_svg(*objs, **kwargs): + """Display the SVG representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw svg data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/svg+xml', objs, **kwargs) + + +def display_png(*objs, **kwargs): + """Display the PNG representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw png data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/png', objs, **kwargs) + + +def display_jpeg(*objs, **kwargs): + """Display the JPEG representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw JPEG data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/jpeg', objs, **kwargs) + + +def display_latex(*objs, **kwargs): + """Display the LaTeX representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw latex data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/latex', objs, **kwargs) + + +def display_json(*objs, **kwargs): + """Display the JSON representation of an object. + + Note that not many frontends support displaying JSON. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw json data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/json', objs, **kwargs) + + +def display_javascript(*objs, **kwargs): + """Display the Javascript representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw javascript data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/javascript', objs, **kwargs) + + +def display_pdf(*objs, **kwargs): + """Display the PDF representation of an object. + + Parameters + ---------- + objs : tuple of objects + The Python objects to display, or if raw=True raw javascript data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/pdf', objs, **kwargs) + + +#----------------------------------------------------------------------------- +# Smart classes +#----------------------------------------------------------------------------- + + +class DisplayObject(object): + """An object that wraps data to be displayed.""" + + _read_flags = 'r' + _show_mem_addr = False + metadata = None + + def __init__(self, data=None, url=None, filename=None, metadata=None): + """Create a display object given raw data. + + When this object is returned by an expression or passed to the + display function, it will result in the data being displayed + in the frontend. The MIME type of the data should match the + subclasses used, so the Png subclass should be used for 'image/png' + data. If the data is a URL, the data will first be downloaded + and then displayed. If + + Parameters + ---------- + data : unicode, str or bytes + The raw data or a URL or file to load the data from + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + metadata : dict + Dict of metadata associated to be the object when displayed + """ + if data is not None and isinstance(data, str): + if data.startswith('http') and url is None: + url = data + filename = None + data = None + elif _safe_exists(data) and filename is None: + url = None + filename = data + data = None + + self.data = data + self.url = url + self.filename = filename + + if metadata is not None: + self.metadata = metadata + elif self.metadata is None: + self.metadata = {} + + self.reload() + self._check_data() + + def __repr__(self): + if not self._show_mem_addr: + cls = self.__class__ + r = "<%s.%s object>" % (cls.__module__, cls.__name__) + else: + r = super(DisplayObject, self).__repr__() + return r + + def _check_data(self): + """Override in subclasses if there's something to check.""" + pass + + def _data_and_metadata(self): + """shortcut for returning metadata with shape information, if defined""" + if self.metadata: + return self.data, deepcopy(self.metadata) + else: + return self.data + + def reload(self): + """Reload the raw data from file or URL.""" + if self.filename is not None: + with open(self.filename, self._read_flags) as f: + self.data = f.read() + elif self.url is not None: + try: + # Deferred import + from urllib.request import urlopen + response = urlopen(self.url) + self.data = response.read() + # extract encoding from header, if there is one: + encoding = None + for sub in response.headers['content-type'].split(';'): + sub = sub.strip() + if sub.startswith('charset'): + encoding = sub.split('=')[-1].strip() + break + # decode data, if an encoding was specified + if encoding: + self.data = self.data.decode(encoding, 'replace') + except: + self.data = None + +class TextDisplayObject(DisplayObject): + """Validate that display data is text""" + def _check_data(self): + if self.data is not None and not isinstance(self.data, str): + raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data)) + +class Pretty(TextDisplayObject): + + def _repr_pretty_(self, pp, cycle): + return pp.text(self.data) + + +class HTML(TextDisplayObject): + + def _repr_html_(self): + return self.data + + def __html__(self): + """ + This method exists to inform other HTML-using modules (e.g. Markupsafe, + htmltag, etc) that this object is HTML and does not need things like + special characters (<>&) escaped. + """ + return self._repr_html_() + + +class Markdown(TextDisplayObject): + + def _repr_markdown_(self): + return self.data + + +class Math(TextDisplayObject): + + def _repr_latex_(self): + s = self.data.strip('$') + return "$$%s$$" % s + + +class Latex(TextDisplayObject): + + def _repr_latex_(self): + return self.data + + +class SVG(DisplayObject): + + _read_flags = 'rb' + # wrap data in a property, which extracts the tag, discarding + # document headers + _data = None + + @property + def data(self): + return self._data + + @data.setter + def data(self, svg): + if svg is None: + self._data = None + return + # parse into dom object + from xml.dom import minidom + x = minidom.parseString(svg) + # get svg tag (should be 1) + found_svg = x.getElementsByTagName('svg') + if found_svg: + svg = found_svg[0].toxml() + else: + # fallback on the input, trust the user + # but this is probably an error. + pass + svg = cast_unicode(svg) + self._data = svg + + def _repr_svg_(self): + return self._data_and_metadata() + +class ProgressBar(DisplayObject): + """Progressbar supports displaying a progressbar like element + """ + def __init__(self, total): + """Creates a new progressbar + + Parameters + ---------- + total : int + maximum size of the progressbar + """ + self.total = total + self._progress = 0 + self.html_width = '60ex' + self.text_width = 60 + self._display_id = hexlify(os.urandom(8)).decode('ascii') + + def __repr__(self): + fraction = self.progress / self.total + filled = '=' * int(fraction * self.text_width) + rest = ' ' * (self.text_width - len(filled)) + return '[{}{}] {}/{}'.format( + filled, rest, + self.progress, self.total, + ) + + def _repr_html_(self): + return "".format( + self.html_width, self.total, self.progress) + + def display(self): + display(self, display_id=self._display_id) + + def update(self): + display(self, display_id=self._display_id, update=True) + + @property + def progress(self): + return self._progress + + @progress.setter + def progress(self, value): + self._progress = value + self.update() + + def __iter__(self): + self.display() + self._progress = -1 # First iteration is 0 + return self + + def __next__(self): + """Returns current value and increments display by one.""" + self.progress += 1 + if self.progress < self.total: + return self.progress + else: + raise StopIteration() + +class JSON(DisplayObject): + """JSON expects a JSON-able dict or list + + not an already-serialized JSON string. + + Scalar types (None, number, string) are not allowed, only dict or list containers. + """ + # wrap data in a property, which warns about passing already-serialized JSON + _data = None + def __init__(self, data=None, url=None, filename=None, expanded=False, metadata=None, **kwargs): + """Create a JSON display object given raw data. + + Parameters + ---------- + data : dict or list + JSON data to display. Not an already-serialized JSON string. + Scalar types (None, number, string) are not allowed, only dict + or list containers. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + expanded : boolean + Metadata to control whether a JSON display component is expanded. + metadata: dict + Specify extra metadata to attach to the json display object. + """ + self.metadata = {'expanded': expanded} + if metadata: + self.metadata.update(metadata) + if kwargs: + self.metadata.update(kwargs) + super(JSON, self).__init__(data=data, url=url, filename=filename) + + def _check_data(self): + if self.data is not None and not isinstance(self.data, (dict, list)): + raise TypeError("%s expects JSONable dict or list, not %r" % (self.__class__.__name__, self.data)) + + @property + def data(self): + return self._data + + @data.setter + def data(self, data): + if isinstance(data, str): + if getattr(self, 'filename', None) is None: + warnings.warn("JSON expects JSONable dict or list, not JSON strings") + data = json.loads(data) + self._data = data + + def _data_and_metadata(self): + return self.data, self.metadata + + def _repr_json_(self): + return self._data_and_metadata() + +_css_t = """$("head").append($("").attr({ + rel: "stylesheet", + type: "text/css", + href: "%s" +})); +""" + +_lib_t1 = """$.getScript("%s", function () { +""" +_lib_t2 = """}); +""" + +class GeoJSON(JSON): + """GeoJSON expects JSON-able dict + + not an already-serialized JSON string. + + Scalar types (None, number, string) are not allowed, only dict containers. + """ + + def __init__(self, *args, **kwargs): + """Create a GeoJSON display object given raw data. + + Parameters + ---------- + data : dict or list + VegaLite data. Not an already-serialized JSON string. + Scalar types (None, number, string) are not allowed, only dict + or list containers. + url_template : string + Leaflet TileLayer URL template: http://leafletjs.com/reference.html#url-template + layer_options : dict + Leaflet TileLayer options: http://leafletjs.com/reference.html#tilelayer-options + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + metadata: dict + Specify extra metadata to attach to the json display object. + + Examples + -------- + + The following will display an interactive map of Mars with a point of + interest on frontend that do support GeoJSON display. + + >>> from yap_ipython.display import GeoJSON + + >>> GeoJSON(data={ + ... "type": "Feature", + ... "geometry": { + ... "type": "Point", + ... "coordinates": [-81.327, 296.038] + ... } + ... }, + ... url_template="http://s3-eu-west-1.amazonaws.com/whereonmars.cartodb.net/{basemap_id}/{z}/{x}/{y}.png", + ... layer_options={ + ... "basemap_id": "celestia_mars-shaded-16k_global", + ... "attribution" : "Celestia/praesepe", + ... "minZoom" : 0, + ... "maxZoom" : 18, + ... }) + + + In the terminal yap_ipython, you will only see the text representation of + the GeoJSON object. + + """ + + super(GeoJSON, self).__init__(*args, **kwargs) + + + def _ipython_display_(self): + bundle = { + 'application/geo+json': self.data, + 'text/plain': '' + } + metadata = { + 'application/geo+json': self.metadata + } + display(bundle, metadata=metadata, raw=True) + +class Javascript(TextDisplayObject): + + def __init__(self, data=None, url=None, filename=None, lib=None, css=None): + """Create a Javascript display object given raw data. + + When this object is returned by an expression or passed to the + display function, it will result in the data being displayed + in the frontend. If the data is a URL, the data will first be + downloaded and then displayed. + + In the Notebook, the containing element will be available as `element`, + and jQuery will be available. Content appended to `element` will be + visible in the output area. + + Parameters + ---------- + data : unicode, str or bytes + The Javascript source code or a URL to download it from. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + lib : list or str + A sequence of Javascript library URLs to load asynchronously before + running the source code. The full URLs of the libraries should + be given. A single Javascript library URL can also be given as a + string. + css: : list or str + A sequence of css files to load before running the source code. + The full URLs of the css files should be given. A single css URL + can also be given as a string. + """ + if isinstance(lib, str): + lib = [lib] + elif lib is None: + lib = [] + if isinstance(css, str): + css = [css] + elif css is None: + css = [] + if not isinstance(lib, (list,tuple)): + raise TypeError('expected sequence, got: %r' % lib) + if not isinstance(css, (list,tuple)): + raise TypeError('expected sequence, got: %r' % css) + self.lib = lib + self.css = css + super(Javascript, self).__init__(data=data, url=url, filename=filename) + + def _repr_javascript_(self): + r = '' + for c in self.css: + r += _css_t % c + for l in self.lib: + r += _lib_t1 % l + r += self.data + r += _lib_t2*len(self.lib) + return r + +# constants for identifying png/jpeg data +_PNG = b'\x89PNG\r\n\x1a\n' +_JPEG = b'\xff\xd8' + +def _pngxy(data): + """read the (width, height) from a PNG header""" + ihdr = data.index(b'IHDR') + # next 8 bytes are width/height + return struct.unpack('>ii', data[ihdr+4:ihdr+12]) + +def _jpegxy(data): + """read the (width, height) from a JPEG header""" + # adapted from http://www.64lines.com/jpeg-width-height + + idx = 4 + while True: + block_size = struct.unpack('>H', data[idx:idx+2])[0] + idx = idx + block_size + if data[idx:idx+2] == b'\xFF\xC0': + # found Start of Frame + iSOF = idx + break + else: + # read another block + idx += 2 + + h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9]) + return w, h + +def _gifxy(data): + """read the (width, height) from a GIF header""" + return struct.unpack(' tag. Set this to True if you want the image + to be viewable later with no internet connection in the notebook. + + Default is `True`, unless the keyword argument `url` is set, then + default value is `False`. + + Note that QtConsole is not able to display images if `embed` is set to `False` + width : int + Width in pixels to which to constrain the image in html + height : int + Height in pixels to which to constrain the image in html + retina : bool + Automatically set the width and height to half of the measured + width and height. + This only works for embedded images because it reads the width/height + from image data. + For non-embedded images, you can just set the desired display width + and height directly. + unconfined: bool + Set unconfined=True to disable max-width confinement of the image. + metadata: dict + Specify extra metadata to attach to the image. + + Examples + -------- + # embedded image data, works in qtconsole and notebook + # when passed positionally, the first arg can be any of raw image data, + # a URL, or a filename from which to load image data. + # The result is always embedding image data for inline images. + Image('http://www.google.fr/images/srpr/logo3w.png') + Image('/path/to/image.jpg') + Image(b'RAW_PNG_DATA...') + + # Specifying Image(url=...) does not embed the image data, + # it only generates `` tag with a link to the source. + # This will not work in the qtconsole or offline. + Image(url='http://www.google.fr/images/srpr/logo3w.png') + + """ + if filename is not None: + ext = self._find_ext(filename) + elif url is not None: + ext = self._find_ext(url) + elif data is None: + raise ValueError("No image data found. Expecting filename, url, or data.") + elif isinstance(data, str) and ( + data.startswith('http') or _safe_exists(data) + ): + ext = self._find_ext(data) + else: + ext = None + + if format is None: + if ext is not None: + if ext == u'jpg' or ext == u'jpeg': + format = self._FMT_JPEG + elif ext == u'png': + format = self._FMT_PNG + elif ext == u'gif': + format = self._FMT_GIF + else: + format = ext.lower() + elif isinstance(data, bytes): + # infer image type from image data header, + # only if format has not been specified. + if data[:2] == _JPEG: + format = self._FMT_JPEG + + # failed to detect format, default png + if format is None: + format = self._FMT_PNG + + if format.lower() == 'jpg': + # jpg->jpeg + format = self._FMT_JPEG + + self.format = format.lower() + self.embed = embed if embed is not None else (url is None) + + if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS: + raise ValueError("Cannot embed the '%s' image format" % (self.format)) + if self.embed: + self._mimetype = self._MIMETYPES.get(self.format) + + self.width = width + self.height = height + self.retina = retina + self.unconfined = unconfined + super(Image, self).__init__(data=data, url=url, filename=filename, + metadata=metadata) + + if self.width is None and self.metadata.get('width', {}): + self.width = metadata['width'] + + if self.height is None and self.metadata.get('height', {}): + self.height = metadata['height'] + + if retina: + self._retina_shape() + + + def _retina_shape(self): + """load pixel-doubled width and height from image data""" + if not self.embed: + return + if self.format == self._FMT_PNG: + w, h = _pngxy(self.data) + elif self.format == self._FMT_JPEG: + w, h = _jpegxy(self.data) + elif self.format == self._FMT_GIF: + w, h = _gifxy(self.data) + else: + # retina only supports png + return + self.width = w // 2 + self.height = h // 2 + + def reload(self): + """Reload the raw data from file or URL.""" + if self.embed: + super(Image,self).reload() + if self.retina: + self._retina_shape() + + def _repr_html_(self): + if not self.embed: + width = height = klass = '' + if self.width: + width = ' width="%d"' % self.width + if self.height: + height = ' height="%d"' % self.height + if self.unconfined: + klass = ' class="unconfined"' + return u''.format( + url=self.url, + width=width, + height=height, + klass=klass, + ) + + def _repr_mimebundle_(self, include=None, exclude=None): + """Return the image as a mimebundle + + Any new mimetype support should be implemented here. + """ + if self.embed: + mimetype = self._mimetype + data, metadata = self._data_and_metadata(always_both=True) + if metadata: + metadata = {mimetype: metadata} + return {mimetype: data}, metadata + else: + return {'text/html': self._repr_html_()} + + def _data_and_metadata(self, always_both=False): + """shortcut for returning metadata with shape information, if defined""" + b64_data = b2a_base64(self.data).decode('ascii') + md = {} + if self.metadata: + md.update(self.metadata) + if self.width: + md['width'] = self.width + if self.height: + md['height'] = self.height + if self.unconfined: + md['unconfined'] = self.unconfined + if md or always_both: + return b64_data, md + else: + return b64_data + + def _repr_png_(self): + if self.embed and self.format == self._FMT_PNG: + return self._data_and_metadata() + + def _repr_jpeg_(self): + if self.embed and self.format == self._FMT_JPEG: + return self._data_and_metadata() + + def _find_ext(self, s): + return s.split('.')[-1].lower() + + +class Video(DisplayObject): + + def __init__(self, data=None, url=None, filename=None, embed=False, mimetype=None): + """Create a video object given raw data or an URL. + + When this object is returned by an input cell or passed to the + display function, it will result in the video being displayed + in the frontend. + + Parameters + ---------- + data : unicode, str or bytes + The raw video data or a URL or filename to load the data from. + Raw data will require passing `embed=True`. + url : unicode + A URL for the video. If you specify `url=`, + the image data will not be embedded. + filename : unicode + Path to a local file containing the video. + Will be interpreted as a local URL unless `embed=True`. + embed : bool + Should the video be embedded using a data URI (True) or be + loaded using a