[med-svn] [Git][med-team/python3-typed-ast][upstream] New upstream version 1.3.0
Michael R. Crusoe
gitlab at salsa.debian.org
Mon Feb 4 09:30:18 GMT 2019
Michael R. Crusoe pushed to branch upstream at Debian Med / python3-typed-ast
Commits:
9dce3eff by Michael R. Crusoe at 2019-02-04T08:29:24Z
New upstream version 1.3.0
- - - - -
29 changed files:
- PKG-INFO
- ast27/Python/Python-ast.c
- ast27/Python/ast.c
- ast3/Custom/typed_ast.c
- ast3/Include/Python-ast.h
- ast3/Include/ast.h
- ast3/Include/bitset.h
- ast3/Include/compile.h → ast3/Include/compile-ast3.h
- ast3/Include/errcode.h
- ast3/Include/graminit.h
- ast3/Include/grammar.h
- ast3/Include/node.h
- ast3/Include/parsetok.h
- ast3/Include/token.h
- ast3/Parser/grammar1.c
- ast3/Parser/parser.h
- ast3/Parser/parsetok.c
- ast3/Parser/tokenizer.c
- ast3/Parser/tokenizer.h
- ast3/Python/Python-ast.c
- ast3/Python/ast.c
- ast3/Python/graminit.c
- setup.py
- typed_ast.egg-info/PKG-INFO
- typed_ast.egg-info/SOURCES.txt
- typed_ast/__init__.py
- typed_ast/ast27.py
- typed_ast/ast3.py
- typed_ast/conversions.py
Changes:
=====================================
PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: typed-ast
-Version: 1.2.0
+Version: 1.3.0
Summary: a fork of Python 2 and 3 ast modules with type comment support
Home-page: https://github.com/python/typed_ast
Author: David Fisher
=====================================
ast27/Python/Python-ast.c
=====================================
@@ -477,7 +477,7 @@ static PyMethodDef ast_type_methods[] = {
static PyTypeObject AST_type = {
PyVarObject_HEAD_INIT(NULL, 0)
- "_ast27.AST",
+ "typed_ast._ast27.AST",
sizeof(PyObject),
0,
0, /* tp_dealloc */
@@ -533,7 +533,7 @@ static PyTypeObject* make_type(char *type, PyTypeObject* base, char**fields, int
PyTuple_SET_ITEM(fnames, i, field);
}
result = PyObject_CallFunction((PyObject*)&PyType_Type, "s(O){sOss}",
- type, base, "_fields", fnames, "__module__", "_ast27");
+ type, base, "_fields", fnames, "__module__", "typed_ast._ast27");
Py_DECREF(fnames);
return (PyTypeObject*)result;
}
=====================================
ast27/Python/ast.c
=====================================
@@ -1500,7 +1500,6 @@ ast_for_atom(struct compiling *c, const node *n)
case STRING: {
PyObject *kind, *str = parsestrplus(c, n);
const char *raw, *s = STR(CHILD(n, 0));
- int quote = Py_CHARMASK(*s);
/* currently Python allows up to 2 string modifiers */
char *ch, s_kind[3] = {0, 0, 0};
ch = s_kind;
@@ -1519,7 +1518,7 @@ ast_for_atom(struct compiling *c, const node *n)
PyErr_Fetch(&type, &value, &tback);
errstr = PyObject_Str(value);
if (errstr) {
- char *s = "";
+ const char *s = "";
char buf[128];
s = _PyUnicode_AsString(errstr);
PyOS_snprintf(buf, sizeof(buf), "(unicode error) %s", s);
@@ -2190,7 +2189,7 @@ ast_for_call(struct compiling *c, const node *n, expr_ty func)
keyword_ty kw;
identifier key;
int k;
- char *tmp;
+ const char *tmp;
/* CHILD(ch, 0) is test, but must be an identifier? */
e = ast_for_expr(c, CHILD(ch, 0));
=====================================
ast3/Custom/typed_ast.c
=====================================
@@ -1,6 +1,6 @@
#include "Python.h"
#include "Python-ast.h"
-#include "compile.h"
+#include "compile-ast3.h"
#include "node.h"
#include "grammar.h"
#include "token.h"
@@ -211,9 +211,33 @@ err_free(perrdetail *err)
Py_CLEAR(err->filename);
}
+// from Python/pythonrun.c
+node *
+Ta3Parser_SimpleParseStringFlagsFilename(const char *str, const char *filename,
+ int start, int flags)
+{
+ perrdetail err;
+ node *n = Ta3Parser_ParseStringFlagsFilename(str, filename,
+ &_Ta3Parser_Grammar, start, &err, flags);
+ if (n == NULL)
+ err_input(&err);
+ err_free(&err);
+ return n;
+}
+
+/* update compiler and parser flags based on feature version */
+void
+_Ta3Parser_UpdateFlags(PyCompilerFlags *flags, int *iflags, int feature_version)
+{
+ *iflags = PARSER_FLAGS(flags);
+ if (feature_version >= 7)
+ *iflags |= PyPARSE_ASYNC_ALWAYS;
+ flags->cf_flags |= *iflags & PyCF_MASK;
+}
+
// copy of PyParser_ASTFromStringObject in Python/pythonrun.c
/* Preferred access to parser is through AST. */
-mod_ty
+static mod_ty
string_object_to_c_ast(const char *s, PyObject *filename, int start,
PyCompilerFlags *flags, int feature_version,
PyArena *arena)
@@ -221,15 +245,17 @@ string_object_to_c_ast(const char *s, PyObject *filename, int start,
mod_ty mod;
PyCompilerFlags localflags;
perrdetail err;
- int iflags = PARSER_FLAGS(flags);
+ node *n;
+ int iflags;
- node *n = Ta3Parser_ParseStringObject(s, filename,
- &_Ta3Parser_Grammar, start, &err,
- &iflags);
if (flags == NULL) {
localflags.cf_flags = 0;
flags = &localflags;
}
+ _Ta3Parser_UpdateFlags(flags, &iflags, feature_version);
+ n = Ta3Parser_ParseStringObject(s, filename,
+ &_Ta3Parser_Grammar, start, &err,
+ &iflags);
if (n) {
flags->cf_flags |= iflags & PyCF_MASK;
mod = Ta3AST_FromNodeObject(n, flags, filename, feature_version, arena);
=====================================
ast3/Include/Python-ast.h
=====================================
@@ -50,24 +50,24 @@ struct _mod {
asdl_seq *body;
asdl_seq *type_ignores;
} Module;
-
+
struct {
asdl_seq *body;
} Interactive;
-
+
struct {
expr_ty body;
} Expression;
-
+
struct {
asdl_seq *argtypes;
expr_ty returns;
} FunctionType;
-
+
struct {
asdl_seq *body;
} Suite;
-
+
} v;
};
@@ -90,7 +90,7 @@ struct _stmt {
expr_ty returns;
string type_comment;
} FunctionDef;
-
+
struct {
identifier name;
arguments_ty args;
@@ -99,7 +99,7 @@ struct _stmt {
expr_ty returns;
string type_comment;
} AsyncFunctionDef;
-
+
struct {
identifier name;
asdl_seq *bases;
@@ -107,34 +107,34 @@ struct _stmt {
asdl_seq *body;
asdl_seq *decorator_list;
} ClassDef;
-
+
struct {
expr_ty value;
} Return;
-
+
struct {
asdl_seq *targets;
} Delete;
-
+
struct {
asdl_seq *targets;
expr_ty value;
string type_comment;
} Assign;
-
+
struct {
expr_ty target;
operator_ty op;
expr_ty value;
} AugAssign;
-
+
struct {
expr_ty target;
expr_ty annotation;
expr_ty value;
int simple;
} AnnAssign;
-
+
struct {
expr_ty target;
expr_ty iter;
@@ -142,7 +142,7 @@ struct _stmt {
asdl_seq *orelse;
string type_comment;
} For;
-
+
struct {
expr_ty target;
expr_ty iter;
@@ -150,70 +150,70 @@ struct _stmt {
asdl_seq *orelse;
string type_comment;
} AsyncFor;
-
+
struct {
expr_ty test;
asdl_seq *body;
asdl_seq *orelse;
} While;
-
+
struct {
expr_ty test;
asdl_seq *body;
asdl_seq *orelse;
} If;
-
+
struct {
asdl_seq *items;
asdl_seq *body;
string type_comment;
} With;
-
+
struct {
asdl_seq *items;
asdl_seq *body;
string type_comment;
} AsyncWith;
-
+
struct {
expr_ty exc;
expr_ty cause;
} Raise;
-
+
struct {
asdl_seq *body;
asdl_seq *handlers;
asdl_seq *orelse;
asdl_seq *finalbody;
} Try;
-
+
struct {
expr_ty test;
expr_ty msg;
} Assert;
-
+
struct {
asdl_seq *names;
} Import;
-
+
struct {
identifier module;
asdl_seq *names;
int level;
} ImportFrom;
-
+
struct {
asdl_seq *names;
} Global;
-
+
struct {
asdl_seq *names;
} Nonlocal;
-
+
struct {
expr_ty value;
} Expr;
-
+
} v;
int lineno;
int col_offset;
@@ -235,146 +235,147 @@ struct _expr {
boolop_ty op;
asdl_seq *values;
} BoolOp;
-
+
struct {
expr_ty left;
operator_ty op;
expr_ty right;
} BinOp;
-
+
struct {
unaryop_ty op;
expr_ty operand;
} UnaryOp;
-
+
struct {
arguments_ty args;
expr_ty body;
} Lambda;
-
+
struct {
expr_ty test;
expr_ty body;
expr_ty orelse;
} IfExp;
-
+
struct {
asdl_seq *keys;
asdl_seq *values;
} Dict;
-
+
struct {
asdl_seq *elts;
} Set;
-
+
struct {
expr_ty elt;
asdl_seq *generators;
} ListComp;
-
+
struct {
expr_ty elt;
asdl_seq *generators;
} SetComp;
-
+
struct {
expr_ty key;
expr_ty value;
asdl_seq *generators;
} DictComp;
-
+
struct {
expr_ty elt;
asdl_seq *generators;
} GeneratorExp;
-
+
struct {
expr_ty value;
} Await;
-
+
struct {
expr_ty value;
} Yield;
-
+
struct {
expr_ty value;
} YieldFrom;
-
+
struct {
expr_ty left;
asdl_int_seq *ops;
asdl_seq *comparators;
} Compare;
-
+
struct {
expr_ty func;
asdl_seq *args;
asdl_seq *keywords;
} Call;
-
+
struct {
object n;
} Num;
-
+
struct {
string s;
string kind;
} Str;
-
+
struct {
expr_ty value;
int conversion;
expr_ty format_spec;
} FormattedValue;
-
+
struct {
asdl_seq *values;
} JoinedStr;
-
+
struct {
bytes s;
+ string kind;
} Bytes;
-
+
struct {
singleton value;
} NameConstant;
-
+
struct {
constant value;
} Constant;
-
+
struct {
expr_ty value;
identifier attr;
expr_context_ty ctx;
} Attribute;
-
+
struct {
expr_ty value;
slice_ty slice;
expr_context_ty ctx;
} Subscript;
-
+
struct {
expr_ty value;
expr_context_ty ctx;
} Starred;
-
+
struct {
identifier id;
expr_context_ty ctx;
} Name;
-
+
struct {
asdl_seq *elts;
expr_context_ty ctx;
} List;
-
+
struct {
asdl_seq *elts;
expr_context_ty ctx;
} Tuple;
-
+
} v;
int lineno;
int col_offset;
@@ -389,15 +390,15 @@ struct _slice {
expr_ty upper;
expr_ty step;
} Slice;
-
+
struct {
asdl_seq *dims;
} ExtSlice;
-
+
struct {
expr_ty value;
} Index;
-
+
} v;
};
@@ -417,7 +418,7 @@ struct _excepthandler {
identifier name;
asdl_seq *body;
} ExceptHandler;
-
+
} v;
int lineno;
int col_offset;
@@ -462,7 +463,7 @@ struct _type_ignore {
struct {
int lineno;
} TypeIgnore;
-
+
} v;
};
@@ -611,8 +612,9 @@ expr_ty _Ta3_FormattedValue(expr_ty value, int conversion, expr_ty format_spec,
#define JoinedStr(a0, a1, a2, a3) _Ta3_JoinedStr(a0, a1, a2, a3)
expr_ty _Ta3_JoinedStr(asdl_seq * values, int lineno, int col_offset, PyArena
*arena);
-#define Bytes(a0, a1, a2, a3) _Ta3_Bytes(a0, a1, a2, a3)
-expr_ty _Ta3_Bytes(bytes s, int lineno, int col_offset, PyArena *arena);
+#define Bytes(a0, a1, a2, a3, a4) _Ta3_Bytes(a0, a1, a2, a3, a4)
+expr_ty _Ta3_Bytes(bytes s, string kind, int lineno, int col_offset, PyArena
+ *arena);
#define NameConstant(a0, a1, a2, a3) _Ta3_NameConstant(a0, a1, a2, a3)
expr_ty _Ta3_NameConstant(singleton value, int lineno, int col_offset, PyArena
*arena);
=====================================
ast3/Include/ast.h
=====================================
@@ -18,6 +18,13 @@ extern mod_ty Ta3AST_FromNodeObject(
int feature_version,
PyArena *arena);
+#ifndef Py_LIMITED_API
+
+/* _PyAST_ExprAsUnicode is defined in ast_unparse.c */
+extern PyObject * _PyAST_ExprAsUnicode(expr_ty);
+
+#endif /* !Py_LIMITED_API */
+
#ifdef __cplusplus
}
#endif
=====================================
ast3/Include/bitset.h
=====================================
@@ -7,7 +7,7 @@ extern "C" {
/* Bitset interface */
-#define BYTE char
+#define BYTE char
typedef BYTE *bitset;
@@ -18,13 +18,13 @@ int addbit(bitset bs, int ibit); /* Returns 0 if already set */
int samebitset(bitset bs1, bitset bs2, int nbits);
void mergebitset(bitset bs1, bitset bs2, int nbits);
-#define BITSPERBYTE (8*sizeof(BYTE))
-#define NBYTES(nbits) (((nbits) + BITSPERBYTE - 1) / BITSPERBYTE)
+#define BITSPERBYTE (8*sizeof(BYTE))
+#define NBYTES(nbits) (((nbits) + BITSPERBYTE - 1) / BITSPERBYTE)
-#define BIT2BYTE(ibit) ((ibit) / BITSPERBYTE)
-#define BIT2SHIFT(ibit) ((ibit) % BITSPERBYTE)
-#define BIT2MASK(ibit) (1 << BIT2SHIFT(ibit))
-#define BYTE2BIT(ibyte) ((ibyte) * BITSPERBYTE)
+#define BIT2BYTE(ibit) ((ibit) / BITSPERBYTE)
+#define BIT2SHIFT(ibit) ((ibit) % BITSPERBYTE)
+#define BIT2MASK(ibit) (1 << BIT2SHIFT(ibit))
+#define BYTE2BIT(ibyte) ((ibyte) * BITSPERBYTE)
#ifdef __cplusplus
}
=====================================
ast3/Include/compile.h → ast3/Include/compile-ast3.h
=====================================
@@ -1,11 +1,6 @@
-#ifndef Ta3_COMPILE_H
-#define Ta3_COMPILE_H
-
/* These definitions must match corresponding definitions in graminit.h.
There's code in compile.c that checks that they are the same. */
#define Py_single_input 256
#define Py_file_input 257
#define Py_eval_input 258
-#define Py_func_type_input 342
-
-#endif /* !Ta3_COMPILE_H */
+#define Py_func_type_input 343
=====================================
ast3/Include/errcode.h
=====================================
@@ -13,24 +13,24 @@ extern "C" {
the parser only returns E_EOF when it hits EOF immediately, and it
never returns E_OK. */
-#define E_OK 10 /* No error */
-#define E_EOF 11 /* End Of File */
-#define E_INTR 12 /* Interrupted */
-#define E_TOKEN 13 /* Bad token */
-#define E_SYNTAX 14 /* Syntax error */
-#define E_NOMEM 15 /* Ran out of memory */
-#define E_DONE 16 /* Parsing complete */
-#define E_ERROR 17 /* Execution error */
-#define E_TABSPACE 18 /* Inconsistent mixing of tabs and spaces */
-#define E_OVERFLOW 19 /* Node had too many children */
-#define E_TOODEEP 20 /* Too many indentation levels */
-#define E_DEDENT 21 /* No matching outer block for dedent */
-#define E_DECODE 22 /* Error in decoding into Unicode */
-#define E_EOFS 23 /* EOF in triple-quoted string */
-#define E_EOLS 24 /* EOL in single-quoted string */
-#define E_LINECONT 25 /* Unexpected characters after a line continuation */
+#define E_OK 10 /* No error */
+#define E_EOF 11 /* End Of File */
+#define E_INTR 12 /* Interrupted */
+#define E_TOKEN 13 /* Bad token */
+#define E_SYNTAX 14 /* Syntax error */
+#define E_NOMEM 15 /* Ran out of memory */
+#define E_DONE 16 /* Parsing complete */
+#define E_ERROR 17 /* Execution error */
+#define E_TABSPACE 18 /* Inconsistent mixing of tabs and spaces */
+#define E_OVERFLOW 19 /* Node had too many children */
+#define E_TOODEEP 20 /* Too many indentation levels */
+#define E_DEDENT 21 /* No matching outer block for dedent */
+#define E_DECODE 22 /* Error in decoding into Unicode */
+#define E_EOFS 23 /* EOF in triple-quoted string */
+#define E_EOLS 24 /* EOL in single-quoted string */
+#define E_LINECONT 25 /* Unexpected characters after a line continuation */
#define E_IDENTIFIER 26 /* Invalid characters in identifier */
-#define E_BADSINGLE 27 /* Ill-formed single statement input */
+#define E_BADSINGLE 27 /* Ill-formed single statement input */
#ifdef __cplusplus
}
=====================================
ast3/Include/graminit.h
=====================================
@@ -81,11 +81,12 @@
#define arglist 334
#define argument 335
#define comp_iter 336
-#define comp_for 337
-#define comp_if 338
-#define encoding_decl 339
-#define yield_expr 340
-#define yield_arg 341
-#define func_type_input 342
-#define func_type 343
-#define typelist 344
+#define sync_comp_for 337
+#define comp_for 338
+#define comp_if 339
+#define encoding_decl 340
+#define yield_expr 341
+#define yield_arg 342
+#define func_type_input 343
+#define func_type 344
+#define typelist 345
=====================================
ast3/Include/grammar.h
=====================================
@@ -12,58 +12,58 @@ extern "C" {
/* A label of an arc */
typedef struct {
- int lb_type;
- char *lb_str;
+ int lb_type;
+ char *lb_str;
} label;
-#define EMPTY 0 /* Label number 0 is by definition the empty label */
+#define EMPTY 0 /* Label number 0 is by definition the empty label */
/* A list of labels */
typedef struct {
- int ll_nlabels;
- label *ll_label;
+ int ll_nlabels;
+ label *ll_label;
} labellist;
/* An arc from one state to another */
typedef struct {
- short a_lbl; /* Label of this arc */
- short a_arrow; /* State where this arc goes to */
+ short a_lbl; /* Label of this arc */
+ short a_arrow; /* State where this arc goes to */
} arc;
/* A state in a DFA */
typedef struct {
- int s_narcs;
- arc *s_arc; /* Array of arcs */
+ int s_narcs;
+ arc *s_arc; /* Array of arcs */
/* Optional accelerators */
- int s_lower; /* Lowest label index */
- int s_upper; /* Highest label index */
- int *s_accel; /* Accelerator */
- int s_accept; /* Nonzero for accepting state */
+ int s_lower; /* Lowest label index */
+ int s_upper; /* Highest label index */
+ int *s_accel; /* Accelerator */
+ int s_accept; /* Nonzero for accepting state */
} state;
/* A DFA */
typedef struct {
- int d_type; /* Non-terminal this represents */
- char *d_name; /* For printing */
- int d_initial; /* Initial state */
- int d_nstates;
- state *d_state; /* Array of states */
- bitset d_first;
+ int d_type; /* Non-terminal this represents */
+ char *d_name; /* For printing */
+ int d_initial; /* Initial state */
+ int d_nstates;
+ state *d_state; /* Array of states */
+ bitset d_first;
} dfa;
/* A grammar */
typedef struct {
- int g_ndfas;
- dfa *g_dfa; /* Array of DFAs */
- labellist g_ll;
- int g_start; /* Start symbol of the grammar */
- int g_accel; /* Set if accelerators present */
+ int g_ndfas;
+ dfa *g_dfa; /* Array of DFAs */
+ labellist g_ll;
+ int g_start; /* Start symbol of the grammar */
+ int g_accel; /* Set if accelerators present */
} grammar;
/* FUNCTIONS */
=====================================
ast3/Include/node.h
=====================================
@@ -8,15 +8,15 @@ extern "C" {
#endif
typedef struct _node {
- short n_type;
- char *n_str;
- int n_lineno;
- int n_col_offset;
- int n_nchildren;
- struct _node *n_child;
+ short n_type;
+ char *n_str;
+ int n_lineno;
+ int n_col_offset;
+ int n_nchildren;
+ struct _node *n_child;
} node;
-extern node *Ta3Node_New(int type);
+extern node * Ta3Node_New(int type);
extern int Ta3Node_AddChild(node *n, int type,
char *str, int lineno, int col_offset);
extern void Ta3Node_Free(node *n);
@@ -25,12 +25,12 @@ extern Py_ssize_t _Ta3Node_SizeOf(node *n);
#endif
/* Node access functions */
-#define NCH(n) ((n)->n_nchildren)
+#define NCH(n) ((n)->n_nchildren)
-#define CHILD(n, i) (&(n)->n_child[i])
-#define RCHILD(n, i) (CHILD(n, NCH(n) + i))
-#define TYPE(n) ((n)->n_type)
-#define STR(n) ((n)->n_str)
+#define CHILD(n, i) (&(n)->n_child[i])
+#define RCHILD(n, i) (CHILD(n, NCH(n) + i))
+#define TYPE(n) ((n)->n_type)
+#define STR(n) ((n)->n_str)
#define LINENO(n) ((n)->n_lineno)
/* Assert that the type of a node is what we expect */
=====================================
ast3/Include/parsetok.h
=====================================
@@ -21,19 +21,20 @@ typedef struct {
} perrdetail;
#if 0
-#define PyPARSE_YIELD_IS_KEYWORD 0x0001
+#define PyPARSE_YIELD_IS_KEYWORD 0x0001
#endif
-#define PyPARSE_DONT_IMPLY_DEDENT 0x0002
+#define PyPARSE_DONT_IMPLY_DEDENT 0x0002
#if 0
-#define PyPARSE_WITH_IS_KEYWORD 0x0003
+#define PyPARSE_WITH_IS_KEYWORD 0x0003
#define PyPARSE_PRINT_IS_FUNCTION 0x0004
#define PyPARSE_UNICODE_LITERALS 0x0008
#endif
#define PyPARSE_IGNORE_COOKIE 0x0010
#define PyPARSE_BARRY_AS_BDFL 0x0020
+#define PyPARSE_ASYNC_ALWAYS 0x8000
extern node * Ta3Parser_ParseString(const char *, grammar *, int,
perrdetail *);
@@ -98,8 +99,8 @@ extern node * Ta3Parser_ParseStringObject(
/* Note that the following functions are defined in pythonrun.c,
not in parsetok.c */
-PyAPI_FUNC(void) PyParser_SetError(perrdetail *);
-PyAPI_FUNC(void) PyParser_ClearError(perrdetail *);
+extern void PyParser_SetError(perrdetail *);
+extern void PyParser_ClearError(perrdetail *);
#ifdef __cplusplus
}
=====================================
ast3/Include/token.h
=====================================
@@ -9,78 +9,82 @@ extern "C" {
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
-#define ENDMARKER 0
-#define NAME 1
-#define NUMBER 2
-#define STRING 3
-#define NEWLINE 4
-#define INDENT 5
-#define DEDENT 6
-#define LPAR 7
-#define RPAR 8
-#define LSQB 9
-#define RSQB 10
-#define COLON 11
-#define COMMA 12
-#define SEMI 13
-#define PLUS 14
-#define MINUS 15
-#define STAR 16
-#define SLASH 17
-#define VBAR 18
-#define AMPER 19
-#define LESS 20
-#define GREATER 21
-#define EQUAL 22
-#define DOT 23
-#define PERCENT 24
-#define LBRACE 25
-#define RBRACE 26
-#define EQEQUAL 27
-#define NOTEQUAL 28
-#define LESSEQUAL 29
-#define GREATEREQUAL 30
-#define TILDE 31
-#define CIRCUMFLEX 32
-#define LEFTSHIFT 33
-#define RIGHTSHIFT 34
-#define DOUBLESTAR 35
-#define PLUSEQUAL 36
-#define MINEQUAL 37
-#define STAREQUAL 38
-#define SLASHEQUAL 39
-#define PERCENTEQUAL 40
-#define AMPEREQUAL 41
-#define VBAREQUAL 42
-#define CIRCUMFLEXEQUAL 43
-#define LEFTSHIFTEQUAL 44
-#define RIGHTSHIFTEQUAL 45
-#define DOUBLESTAREQUAL 46
-#define DOUBLESLASH 47
+#define ENDMARKER 0
+#define NAME 1
+#define NUMBER 2
+#define STRING 3
+#define NEWLINE 4
+#define INDENT 5
+#define DEDENT 6
+#define LPAR 7
+#define RPAR 8
+#define LSQB 9
+#define RSQB 10
+#define COLON 11
+#define COMMA 12
+#define SEMI 13
+#define PLUS 14
+#define MINUS 15
+#define STAR 16
+#define SLASH 17
+#define VBAR 18
+#define AMPER 19
+#define LESS 20
+#define GREATER 21
+#define EQUAL 22
+#define DOT 23
+#define PERCENT 24
+#define LBRACE 25
+#define RBRACE 26
+#define EQEQUAL 27
+#define NOTEQUAL 28
+#define LESSEQUAL 29
+#define GREATEREQUAL 30
+#define TILDE 31
+#define CIRCUMFLEX 32
+#define LEFTSHIFT 33
+#define RIGHTSHIFT 34
+#define DOUBLESTAR 35
+#define PLUSEQUAL 36
+#define MINEQUAL 37
+#define STAREQUAL 38
+#define SLASHEQUAL 39
+#define PERCENTEQUAL 40
+#define AMPEREQUAL 41
+#define VBAREQUAL 42
+#define CIRCUMFLEXEQUAL 43
+#define LEFTSHIFTEQUAL 44
+#define RIGHTSHIFTEQUAL 45
+#define DOUBLESTAREQUAL 46
+#define DOUBLESLASH 47
#define DOUBLESLASHEQUAL 48
#define AT 49
-#define ATEQUAL 50
+#define ATEQUAL 50
#define RARROW 51
#define ELLIPSIS 52
/* Don't forget to update the table _Ta3Parser_TokenNames in tokenizer.c! */
-#define OP 53
-#define AWAIT 54
-#define ASYNC 55
+#define OP 53
+#define AWAIT 54
+#define ASYNC 55
#define TYPE_IGNORE 56
#define TYPE_COMMENT 57
-#define ERRORTOKEN 58
-#define N_TOKENS 59
+#define ERRORTOKEN 58
+/* These aren't used by the C tokenizer but are needed for tokenize.py */
+#define COMMENT 59
+#define NL 60
+#define ENCODING 61
+#define N_TOKENS 62
/* Special definitions for cooperation with parser */
-#define NT_OFFSET 256
+#define NT_OFFSET 256
-#define ISTERMINAL(x) ((x) < NT_OFFSET)
-#define ISNONTERMINAL(x) ((x) >= NT_OFFSET)
-#define ISEOF(x) ((x) == ENDMARKER)
+#define ISTERMINAL(x) ((x) < NT_OFFSET)
+#define ISNONTERMINAL(x) ((x) >= NT_OFFSET)
+#define ISEOF(x) ((x) == ENDMARKER)
-extern const char *_Ta3Parser_TokenNames[]; /* Token names */
+extern const char * _Ta3Parser_TokenNames[]; /* Token names */
extern int Ta3Token_OneChar(int);
extern int Ta3Token_TwoChars(int, int);
extern int Ta3Token_ThreeChars(int, int, int);
=====================================
ast3/Parser/grammar1.c
=====================================
@@ -25,8 +25,7 @@ Ta3Grammar_FindDFA(grammar *g, int type)
if (d->d_type == type)
return d;
}
- assert(0);
- /* NOTREACHED */
+ abort();
#endif
}
=====================================
ast3/Parser/parser.h
=====================================
@@ -10,23 +10,23 @@ extern "C" {
#define MAXSTACK 1500
typedef struct {
- int s_state; /* State in current DFA */
- dfa *s_dfa; /* Current DFA */
- struct _node *s_parent; /* Where to add next node */
+ int s_state; /* State in current DFA */
+ dfa *s_dfa; /* Current DFA */
+ struct _node *s_parent; /* Where to add next node */
} stackentry;
typedef struct {
- stackentry *s_top; /* Top entry */
- stackentry s_base[MAXSTACK];/* Array of stack entries */
- /* NB The stack grows down */
+ stackentry *s_top; /* Top entry */
+ stackentry s_base[MAXSTACK];/* Array of stack entries */
+ /* NB The stack grows down */
} stack;
typedef struct {
- stack p_stack; /* Stack of parser states */
- grammar *p_grammar; /* Grammar to use */
- node *p_tree; /* Top of parse tree */
+ stack p_stack; /* Stack of parser states */
+ grammar *p_grammar; /* Grammar to use */
+ node *p_tree; /* Top of parse tree */
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
- unsigned long p_flags; /* see co_flags in Include/code.h */
+ unsigned long p_flags; /* see co_flags in Include/code.h */
#endif
} parser_state;
=====================================
ast3/Parser/parsetok.c
=====================================
@@ -64,6 +64,8 @@ Ta3Parser_ParseStringObject(const char *s, PyObject *filename,
Py_INCREF(err_ret->filename);
tok->filename = err_ret->filename;
#endif
+ if (*flags & PyPARSE_ASYNC_ALWAYS)
+ tok->async_always = 1;
return parsetok(tok, g, start, err_ret, flags);
}
@@ -264,7 +266,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
}
else
started = 1;
- len = b - a; /* XXX this may compute NULL - NULL */
+ len = (a != NULL && b != NULL) ? b - a : 0;
str = (char *) PyObject_MALLOC(len + 1);
if (str == NULL) {
err_ret->error = E_NOMEM;
@@ -285,18 +287,19 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
else if ((ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) &&
strcmp(str, "<>")) {
PyObject_FREE(str);
- err_ret->text = "with Barry as BDFL, use '<>' "
- "instead of '!='";
+ err_ret->expected = NOTEQUAL;
err_ret->error = E_SYNTAX;
break;
}
}
#endif
- if (a >= tok->line_start)
+ if (a != NULL && a >= tok->line_start) {
col_offset = Py_SAFE_DOWNCAST(a - tok->line_start,
intptr_t, int);
- else
+ }
+ else {
col_offset = -1;
+ }
if (type == TYPE_IGNORE) {
if (!growable_int_array_add(&type_ignores, tok->lineno)) {
=====================================
ast3/Parser/tokenizer.c
=====================================
@@ -27,6 +27,13 @@
} while (0)
#endif /* Py_XSETREF */
+#ifndef _PyObject_CallNoArg
+#define _PyObject_CallNoArg(func) PyObject_CallObject(func, NULL)
+#endif
+
+/* Alternate tab spacing */
+#define ALTTABSIZE 1
+
#define is_potential_identifier_start(c) (\
(c >= 'a' && c <= 'z')\
|| (c >= 'A' && c <= 'Z')\
@@ -40,12 +47,7 @@
|| c == '_'\
|| (c >= 128))
-#if PY_MINOR_VERSION >= 4
PyAPI_FUNC(char *) PyOS_Readline(FILE *, FILE *, const char *);
-#else
-// Python 3.3 doesn't have PyAPI_FUNC, but it's not supported on Windows anyway.
-char *PyOS_Readline(FILE *, FILE *, char *);
-#endif
/* Return malloc'ed string including trailing \n;
empty malloc'ed string for EOF;
NULL if interrupted */
@@ -122,6 +124,9 @@ const char *_Ta3Parser_TokenNames[] = {
"TYPE_IGNORE",
"TYPE_COMMENT",
"<ERRORTOKEN>",
+ "COMMENT",
+ "NL",
+ "ENCODING",
"<N_TOKENS>"
};
@@ -152,9 +157,6 @@ tok_new(void)
tok->prompt = tok->nextprompt = NULL;
tok->lineno = 0;
tok->level = 0;
- tok->altwarning = 1;
- tok->alterror = 1;
- tok->alttabsize = 1;
tok->altindstack[0] = 0;
tok->decoding_state = STATE_INIT;
tok->decoding_erred = 0;
@@ -171,6 +173,7 @@ tok_new(void)
tok->async_def = 0;
tok->async_def_indent = 0;
tok->async_def_nl = 0;
+ tok->async_always = 0;
return tok;
}
@@ -460,7 +463,7 @@ fp_readl(char *s, int size, struct tok_state *tok)
}
else
{
- bufobj = PyObject_CallObject(tok->decoding_readline, NULL);
+ bufobj = _PyObject_CallNoArg(tok->decoding_readline);
if (bufobj == NULL)
goto error;
}
@@ -553,7 +556,7 @@ fp_setreadl(struct tok_state *tok, const char* enc)
Py_XSETREF(tok->decoding_readline, readline);
if (pos > 0) {
- PyObject *bufobj = PyObject_CallObject(readline, NULL);
+ PyObject *bufobj = _PyObject_CallNoArg(readline);
if (bufobj == NULL)
return 0;
Py_DECREF(bufobj);
@@ -670,7 +673,7 @@ decoding_feof(struct tok_state *tok)
} else {
PyObject* buf = tok->decoding_buffer;
if (buf == NULL) {
- buf = PyObject_CallObject(tok->decoding_readline, NULL);
+ buf = _PyObject_CallNoArg(tok->decoding_readline);
if (buf == NULL) {
error_ret(tok);
return 1;
@@ -976,6 +979,11 @@ tok_nextc(struct tok_state *tok)
buflen = PyBytes_GET_SIZE(u);
buf = PyBytes_AS_STRING(u);
newtok = PyMem_MALLOC(buflen+1);
+ if (newtok == NULL) {
+ Py_DECREF(u);
+ tok->done = E_NOMEM;
+ return EOF;
+ }
strcpy(newtok, buf);
Py_DECREF(u);
}
@@ -1306,22 +1314,9 @@ Ta3Token_ThreeChars(int c1, int c2, int c3)
static int
indenterror(struct tok_state *tok)
{
- if (tok->alterror) {
- tok->done = E_TABSPACE;
- tok->cur = tok->inp;
- return 1;
- }
- if (tok->altwarning) {
-#ifdef PGEN
- PySys_WriteStderr("inconsistent use of tabs and spaces "
- "in indentation\n");
-#else
- PySys_FormatStderr("%U: inconsistent use of tabs and spaces "
- "in indentation\n", tok->filename);
-#endif
- tok->altwarning = 0;
- }
- return 0;
+ tok->done = E_TABSPACE;
+ tok->cur = tok->inp;
+ return ERRORTOKEN;
}
#ifdef PGEN
@@ -1401,9 +1396,8 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
col++, altcol++;
}
else if (c == '\t') {
- col = (col/tok->tabsize + 1) * tok->tabsize;
- altcol = (altcol/tok->alttabsize + 1)
- * tok->alttabsize;
+ col = (col / tok->tabsize + 1) * tok->tabsize;
+ altcol = (altcol / ALTTABSIZE + 1) * ALTTABSIZE;
}
else if (c == '\014') {/* Control-L (formfeed) */
col = altcol = 0; /* For Emacs users */
@@ -1432,9 +1426,7 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
if (col == tok->indstack[tok->indent]) {
/* No change */
if (altcol != tok->altindstack[tok->indent]) {
- if (indenterror(tok)) {
- return ERRORTOKEN;
- }
+ return indenterror(tok);
}
}
else if (col > tok->indstack[tok->indent]) {
@@ -1445,9 +1437,7 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
return ERRORTOKEN;
}
if (altcol <= tok->altindstack[tok->indent]) {
- if (indenterror(tok)) {
- return ERRORTOKEN;
- }
+ return indenterror(tok);
}
tok->pendin++;
tok->indstack[++tok->indent] = col;
@@ -1466,9 +1456,7 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
return ERRORTOKEN;
}
if (altcol != tok->altindstack[tok->indent]) {
- if (indenterror(tok)) {
- return ERRORTOKEN;
- }
+ return indenterror(tok);
}
}
}
@@ -1488,8 +1476,18 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
}
}
+ /* Peek ahead at the next character */
+ c = tok_nextc(tok);
+ tok_backup(tok, c);
+ /* Check if we are closing an async function */
if (tok->async_def
&& !blankline
+ /* Due to some implementation artifacts of type comments,
+ * a TYPE_COMMENT at the start of a function won't set an
+ * indentation level and it will produce a NEWLINE after it.
+ * To avoid spuriously ending an async function due to this,
+ * wait until we have some non-newline char in front of us. */
+ && c != '\n'
&& tok->level == 0
/* There was a NEWLINE after ASYNC DEF,
so we're past the signature. */
@@ -1574,7 +1572,7 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Identifier (most frequent token!) */
nonascii = 0;
if (is_potential_identifier_start(c)) {
- /* Process b"", r"", u"", br"" and rb"" */
+ /* Process the various legal combinations of b"", r"", u"", and f"". */
int saw_b = 0, saw_r = 0, saw_u = 0, saw_f = 0;
while (1) {
if (!(saw_b || saw_u || saw_f) && (c == 'b' || c == 'B'))
@@ -1616,7 +1614,7 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* async/await parsing block. */
if (tok->cur - tok->start == 5) {
/* Current token length is 5. */
- if (tok->async_def) {
+ if (tok->async_always || tok->async_def) {
/* We're inside an 'async def' function. */
if (memcmp(tok->start, "async", 5) == 0) {
return ASYNC;
@@ -1988,9 +1986,7 @@ Ta3Tokenizer_FindEncodingFilename(int fd, PyObject *filename)
char *p_start =NULL , *p_end =NULL , *encoding = NULL;
#ifndef PGEN
-#if PY_MINOR_VERSION >= 4
fd = _Py_dup(fd);
-#endif
#else
fd = dup(fd);
#endif
=====================================
ast3/Parser/tokenizer.h
=====================================
@@ -47,9 +47,6 @@ struct tok_state {
(Grammar/Grammar). */
PyObject *filename;
#endif
- int altwarning; /* Issue warning if alternate tabs don't match */
- int alterror; /* Issue error if alternate tabs don't match */
- int alttabsize; /* Alternate tab spacing */
int altindstack[MAXINDENT]; /* Stack of alternate indents */
/* Stuff for PEP 0263 */
enum decoding_state decoding_state;
@@ -72,6 +69,7 @@ struct tok_state {
int async_def_indent; /* Indentation level of the outermost 'async def'. */
int async_def_nl; /* =1 if the outermost 'async def' had at least one
NEWLINE token after it. */
+ int async_always; /* =1 if async/await are always keywords */
};
extern struct tok_state *Ta3Tokenizer_FromString(const char *, int);
@@ -80,8 +78,6 @@ extern struct tok_state *Ta3Tokenizer_FromFile(FILE *, const char*,
const char *, const char *);
extern void Ta3Tokenizer_Free(struct tok_state *);
extern int Ta3Tokenizer_Get(struct tok_state *, char **, char **);
-extern char * PyTokenizer_RestoreEncoding(struct tok_state* tok,
- int len, int *offset);
#ifdef __cplusplus
}
=====================================
ast3/Python/Python-ast.c
=====================================
The diff for this file was not included because it is too large.
=====================================
ast3/Python/ast.c
=====================================
@@ -8,15 +8,63 @@
#include "node.h"
#include "ast.h"
#include "token.h"
+#include "pythonrun.h"
#include <assert.h>
-#if PY_MINOR_VERSION < 4
-#define PyErr_ProgramTextObject PyErr_ProgramText
+// VS 2010 doesn't have <stdbool.h>...
+typedef int bool;
+#define false 0
+#define true 1
+
+#ifndef _PyObject_FastCall
+static PyObject *
+_PyObject_FastCall(PyObject *func, PyObject *const *args, int nargs)
+{
+ PyObject *t, *res;
+ int i;
+
+ t = PyTuple_New(nargs);
+ if (t == NULL) {
+ return NULL;
+ }
+ for (i = 0; i < nargs; i++) {
+ if (PyTuple_SetItem(t, i, args[i]) < 0) {
+ Py_DECREF(t);
+ return NULL;
+ }
+ }
+ res = PyObject_CallObject(func, t);
+ Py_DECREF(t);
+ return res;
+}
+#endif
+
+#if PY_MINOR_VERSION < 6
+#define _PyUnicode_EqualToASCIIString(a, b) (PyUnicode_CompareWithASCIIString((a), (b)) == 0)
+
+static PyObject *
+_PyBytes_DecodeEscape(const char *s,
+ Py_ssize_t len,
+ const char *errors,
+ Py_ssize_t unicode,
+ const char *recode_encoding,
+ const char **first_invalid_escape)
+{
+ *first_invalid_escape = NULL;
+ return PyBytes_DecodeEscape(s, len, errors, unicode, recode_encoding);
+}
+
+PyObject *
+_PyUnicode_DecodeUnicodeEscape(const char *s,
+ Py_ssize_t size,
+ const char *errors,
+ const char **first_invalid_escape)
+{
+ *first_invalid_escape = NULL;
+ return PyUnicode_DecodeUnicodeEscape(s, size, errors);
+}
-#define PyMem_RawMalloc PyMem_Malloc
-#define PyMem_RawRealloc PyMem_Realloc
-#define PyMem_RawFree PyMem_Free
#endif
static int validate_stmts(asdl_seq *);
@@ -25,10 +73,11 @@ static int validate_nonempty_seq(asdl_seq *, const char *, const char *);
static int validate_stmt(stmt_ty);
static int validate_expr(expr_ty, expr_context_ty);
-mod_ty
-string_object_to_c_ast(const char *s, PyObject *filename, int start,
- PyCompilerFlags *flags, int feature_version,
- PyArena *arena);
+void
+_Ta3Parser_UpdateFlags(PyCompilerFlags *flags, int *iflags, int feature_version);
+node *
+Ta3Parser_SimpleParseStringFlagsFilename(const char *str, const char *filename,
+ int start, int flags);
static int
validate_comprehension(asdl_seq *gens)
@@ -112,8 +161,7 @@ expr_context_name(expr_context_ty ctx)
case Param:
return "Param";
default:
- assert(0);
- return "(unknown)";
+ abort();
}
}
@@ -601,24 +649,23 @@ struct compiling {
PyArena *c_arena; /* Arena for allocating memory. */
PyObject *c_filename; /* filename */
PyObject *c_normalize; /* Normalization function from unicodedata. */
- PyObject *c_normalize_args; /* Normalization argument tuple. */
int c_feature_version; /* Latest minior version of Python for allowed features */
};
static asdl_seq *seq_for_testlist(struct compiling *, const node *);
static expr_ty ast_for_expr(struct compiling *, const node *);
static stmt_ty ast_for_stmt(struct compiling *, const node *);
-static asdl_seq *ast_for_suite(struct compiling *, const node *);
+static asdl_seq *ast_for_suite(struct compiling *c, const node *n);
static asdl_seq *ast_for_exprlist(struct compiling *, const node *,
expr_context_ty);
static expr_ty ast_for_testlist(struct compiling *, const node *);
static stmt_ty ast_for_classdef(struct compiling *, const node *, asdl_seq *);
-static stmt_ty ast_for_with_stmt(struct compiling *, const node *, int);
-static stmt_ty ast_for_for_stmt(struct compiling *, const node *, int);
+static stmt_ty ast_for_with_stmt(struct compiling *, const node *, bool);
+static stmt_ty ast_for_for_stmt(struct compiling *, const node *, bool);
/* Note different signature for ast_for_call */
-static expr_ty ast_for_call(struct compiling *, const node *, expr_ty);
+static expr_ty ast_for_call(struct compiling *, const node *, expr_ty, bool);
static PyObject *parsenumber(struct compiling *, const char *);
static expr_ty parsestrplus(struct compiling *, const node *n);
@@ -637,12 +684,6 @@ init_normalization(struct compiling *c)
Py_DECREF(m);
if (!c->c_normalize)
return 0;
- c->c_normalize_args = Py_BuildValue("(sN)", "NFKC", Py_None);
- if (!c->c_normalize_args) {
- Py_CLEAR(c->c_normalize);
- return 0;
- }
- PyTuple_SET_ITEM(c->c_normalize_args, 1, NULL);
return 1;
}
@@ -658,15 +699,32 @@ new_identifier(const char *n, struct compiling *c)
identifier; if so, normalize to NFKC. */
if (!PyUnicode_IS_ASCII(id)) {
PyObject *id2;
+ PyObject *form;
+ PyObject *args[2];
+ _Py_IDENTIFIER(NFKC);
if (!c->c_normalize && !init_normalization(c)) {
Py_DECREF(id);
return NULL;
}
- PyTuple_SET_ITEM(c->c_normalize_args, 1, id);
- id2 = PyObject_Call(c->c_normalize, c->c_normalize_args, NULL);
+ form = _PyUnicode_FromId(&PyId_NFKC);
+ if (form == NULL) {
+ Py_DECREF(id);
+ return NULL;
+ }
+ args[0] = form;
+ args[1] = id;
+ id2 = _PyObject_FastCall(c->c_normalize, args, 2);
Py_DECREF(id);
if (!id2)
return NULL;
+ if (!PyUnicode_Check(id2)) {
+ PyErr_Format(PyExc_TypeError,
+ "unicodedata.normalize() must return a string, not "
+ "%.200s",
+ Py_TYPE(id2)->tp_name);
+ Py_DECREF(id2);
+ return NULL;
+ }
id = id2;
}
PyUnicode_InternInPlace(&id);
@@ -775,8 +833,7 @@ num_stmts(const node *n)
Py_FatalError(buf);
}
}
- assert(0);
- return 0;
+ abort();
}
/* Transform the CST rooted at node * to the appropriate AST
@@ -784,8 +841,8 @@ num_stmts(const node *n)
mod_ty
Ta3AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
- PyObject *filename, int feature_version,
- PyArena *arena)
+ PyObject *filename, int feature_version,
+ PyArena *arena)
{
int i, j, k, num;
asdl_seq *stmts = NULL;
@@ -801,7 +858,6 @@ Ta3AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
/* borrowed reference */
c.c_filename = filename;
c.c_normalize = NULL;
- c.c_normalize_args = NULL;
c.c_feature_version = feature_version;
if (TYPE(n) == encoding_decl)
@@ -848,8 +904,8 @@ Ta3AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
for (i = 0; i < num; i++) {
type_ignore_ty ti = TypeIgnore(LINENO(CHILD(ch, i)), arena);
if (!ti)
- goto out;
- asdl_seq_SET(type_ignores, i, ti);
+ goto out;
+ asdl_seq_SET(type_ignores, i, ti);
}
res = Module(stmts, type_ignores, arena);
@@ -945,15 +1001,13 @@ Ta3AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
out:
if (c.c_normalize) {
Py_DECREF(c.c_normalize);
- PyTuple_SET_ITEM(c.c_normalize_args, 1, NULL);
- Py_DECREF(c.c_normalize_args);
}
return res;
}
mod_ty
Ta3AST_FromNode(const node *n, PyCompilerFlags *flags, const char *filename_str,
- int feature_version, PyArena *arena)
+ int feature_version, PyArena *arena)
{
mod_ty mod;
PyObject *filename;
@@ -1019,14 +1073,14 @@ forbidden_name(struct compiling *c, identifier name, const node *n,
int full_checks)
{
assert(PyUnicode_Check(name));
- if (PyUnicode_CompareWithASCIIString(name, "__debug__") == 0) {
+ if (_PyUnicode_EqualToASCIIString(name, "__debug__")) {
ast_error(c, n, "assignment to keyword");
return 1;
}
if (full_checks) {
const char * const *p;
for (p = FORBIDDEN; *p; p++) {
- if (PyUnicode_CompareWithASCIIString(name, *p) == 0) {
+ if (_PyUnicode_EqualToASCIIString(name, *p)) {
ast_error(c, n, "assignment to keyword");
return 1;
}
@@ -1242,6 +1296,7 @@ ast_for_comp_op(struct compiling *c, const node *n)
return In;
if (strcmp(STR(n), "is") == 0)
return Is;
+ /* fall through */
default:
PyErr_Format(PyExc_SystemError, "invalid comp_op: %s",
STR(n));
@@ -1256,6 +1311,7 @@ ast_for_comp_op(struct compiling *c, const node *n)
return NotIn;
if (strcmp(STR(CHILD(n, 0)), "is") == 0)
return IsNot;
+ /* fall through */
default:
PyErr_Format(PyExc_SystemError, "invalid comp_op: %s %s",
STR(CHILD(n, 0)), STR(CHILD(n, 1)));
@@ -1337,7 +1393,7 @@ handle_keywordonly_args(struct compiling *c, const node *n, int start,
PyObject *argname;
node *ch;
expr_ty expression, annotation;
- arg_ty arg;
+ arg_ty arg = NULL;
int i = start;
int j = 0; /* index for kwdefaults and kwonlyargs */
@@ -1382,7 +1438,7 @@ handle_keywordonly_args(struct compiling *c, const node *n, int start,
goto error;
asdl_seq_SET(kwonlyargs, j++, arg);
i += 1; /* the name */
- if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
+ if (TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case TYPE_COMMENT:
@@ -1430,7 +1486,7 @@ ast_for_arguments(struct compiling *c, const node *n)
int nposdefaults = 0, found_default = 0;
asdl_seq *posargs, *posdefaults, *kwonlyargs, *kwdefaults;
arg_ty vararg = NULL, kwarg = NULL;
- arg_ty arg;
+ arg_ty arg = NULL;
node *ch;
if (TYPE(n) == parameters) {
@@ -1486,11 +1542,6 @@ ast_for_arguments(struct compiling *c, const node *n)
if (!kwdefaults && nkwonlyargs)
return NULL;
- if (nposargs + nkwonlyargs > 255) {
- ast_error(c, n, "more than 255 arguments");
- return NULL;
- }
-
/* tfpdef: NAME [':' test]
vfpdef: NAME
*/
@@ -1524,7 +1575,7 @@ ast_for_arguments(struct compiling *c, const node *n)
return NULL;
asdl_seq_SET(posargs, k++, arg);
i += 1; /* the name */
- if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
+ if (TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case STAR:
@@ -1540,7 +1591,7 @@ ast_for_arguments(struct compiling *c, const node *n)
int res = 0;
i += 2; /* now follows keyword only arguments */
- if (i < NCH(n) && TYPE(CHILD(n, i)) == TYPE_COMMENT) {
+ if (TYPE(CHILD(n, i)) == TYPE_COMMENT) {
ast_error(c, CHILD(n, i),
"bare * has associated type comment");
return NULL;
@@ -1557,10 +1608,10 @@ ast_for_arguments(struct compiling *c, const node *n)
return NULL;
i += 2; /* the star and the name */
- if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
+ if (TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
- if (i < NCH(n) && TYPE(CHILD(n, i)) == TYPE_COMMENT) {
+ if (TYPE(CHILD(n, i)) == TYPE_COMMENT) {
vararg->type_comment = NEW_TYPE_COMMENT(CHILD(n, i));
i += 1;
}
@@ -1582,7 +1633,7 @@ ast_for_arguments(struct compiling *c, const node *n)
if (!kwarg)
return NULL;
i += 2; /* the double star and the name */
- if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
+ if (TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case TYPE_COMMENT:
@@ -1664,7 +1715,7 @@ ast_for_decorator(struct compiling *c, const node *n)
name_expr = NULL;
}
else {
- d = ast_for_call(c, CHILD(n, 3), name_expr);
+ d = ast_for_call(c, CHILD(n, 3), name_expr, true);
if (!d)
return NULL;
name_expr = NULL;
@@ -1695,10 +1746,11 @@ ast_for_decorators(struct compiling *c, const node *n)
}
static stmt_ty
-ast_for_funcdef_impl(struct compiling *c, const node *n,
- asdl_seq *decorator_seq, int is_async)
+ast_for_funcdef_impl(struct compiling *c, const node *n0,
+ asdl_seq *decorator_seq, bool is_async)
{
/* funcdef: 'def' NAME parameters ['->' test] ':' [TYPE_COMMENT] suite */
+ const node * const n = is_async ? CHILD(n0, 1) : n0;
identifier name;
arguments_ty args;
asdl_seq *body;
@@ -1709,7 +1761,7 @@ ast_for_funcdef_impl(struct compiling *c, const node *n,
if (is_async && c->c_feature_version < 5) {
ast_error(c, n,
- "Async functions are only supported in Python 3.5 and greater");
+ "Async functions are only supported in Python 3.5 and greater");
return NULL;
}
@@ -1748,24 +1800,23 @@ ast_for_funcdef_impl(struct compiling *c, const node *n,
if (is_async)
return AsyncFunctionDef(name, args, body, decorator_seq, returns,
- type_comment, LINENO(n),
- n->n_col_offset, c->c_arena);
+ type_comment, LINENO(n0), n0->n_col_offset, c->c_arena);
else
return FunctionDef(name, args, body, decorator_seq, returns,
- type_comment, LINENO(n),
- n->n_col_offset, c->c_arena);
+ type_comment, LINENO(n), n->n_col_offset, c->c_arena);
}
static stmt_ty
ast_for_async_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
{
- /* async_funcdef: ASYNC funcdef */
+ /* async_funcdef: 'async' funcdef */
REQ(n, async_funcdef);
- REQ(CHILD(n, 0), ASYNC);
+ REQ(CHILD(n, 0), NAME);
+ assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
REQ(CHILD(n, 1), funcdef);
- return ast_for_funcdef_impl(c, CHILD(n, 1), decorator_seq,
- 1 /* is_async */);
+ return ast_for_funcdef_impl(c, n, decorator_seq,
+ true /* is_async */);
}
static stmt_ty
@@ -1773,28 +1824,29 @@ ast_for_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
{
/* funcdef: 'def' NAME parameters ['->' test] ':' suite */
return ast_for_funcdef_impl(c, n, decorator_seq,
- 0 /* is_async */);
+ false /* is_async */);
}
static stmt_ty
ast_for_async_stmt(struct compiling *c, const node *n)
{
- /* async_stmt: ASYNC (funcdef | with_stmt | for_stmt) */
+ /* async_stmt: 'async' (funcdef | with_stmt | for_stmt) */
REQ(n, async_stmt);
- REQ(CHILD(n, 0), ASYNC);
+ REQ(CHILD(n, 0), NAME);
+ assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
switch (TYPE(CHILD(n, 1))) {
case funcdef:
- return ast_for_funcdef_impl(c, CHILD(n, 1), NULL,
- 1 /* is_async */);
+ return ast_for_funcdef_impl(c, n, NULL,
+ true /* is_async */);
case with_stmt:
- return ast_for_with_stmt(c, CHILD(n, 1),
- 1 /* is_async */);
+ return ast_for_with_stmt(c, n,
+ true /* is_async */);
case for_stmt:
- return ast_for_for_stmt(c, CHILD(n, 1),
- 1 /* is_async */);
+ return ast_for_for_stmt(c, n,
+ true /* is_async */);
default:
PyErr_Format(PyExc_SystemError,
@@ -1895,17 +1947,23 @@ static int
count_comp_fors(struct compiling *c, const node *n)
{
int n_fors = 0;
- int is_async;
count_comp_for:
- is_async = 0;
n_fors++;
REQ(n, comp_for);
- if (TYPE(CHILD(n, 0)) == ASYNC) {
- is_async = 1;
+ if (NCH(n) == 2) {
+ REQ(CHILD(n, 0), NAME);
+ assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
+ n = CHILD(n, 1);
}
- if (NCH(n) == (5 + is_async)) {
- n = CHILD(n, 4 + is_async);
+ else if (NCH(n) == 1) {
+ n = CHILD(n, 0);
+ }
+ else {
+ goto error;
+ }
+ if (NCH(n) == (5)) {
+ n = CHILD(n, 4);
}
else {
return n_fors;
@@ -1924,6 +1982,7 @@ count_comp_fors(struct compiling *c, const node *n)
return n_fors;
}
+ error:
/* Should never be reached */
PyErr_SetString(PyExc_SystemError,
"logic error in count_comp_fors");
@@ -1972,13 +2031,21 @@ ast_for_comprehension(struct compiling *c, const node *n)
asdl_seq *t;
expr_ty expression, first;
node *for_ch;
+ node *sync_n;
int is_async = 0;
REQ(n, comp_for);
- if (TYPE(CHILD(n, 0)) == ASYNC) {
+ if (NCH(n) == 2) {
is_async = 1;
+ REQ(CHILD(n, 0), NAME);
+ assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
+ sync_n = CHILD(n, 1);
+ }
+ else {
+ sync_n = CHILD(n, 0);
}
+ REQ(sync_n, sync_comp_for);
/* Async comprehensions only allowed in Python 3.6 and greater */
if (is_async && c->c_feature_version < 6) {
@@ -1987,11 +2054,11 @@ ast_for_comprehension(struct compiling *c, const node *n)
return NULL;
}
- for_ch = CHILD(n, 1 + is_async);
+ for_ch = CHILD(sync_n, 1);
t = ast_for_exprlist(c, for_ch, Store);
if (!t)
return NULL;
- expression = ast_for_expr(c, CHILD(n, 3 + is_async));
+ expression = ast_for_expr(c, CHILD(sync_n, 3));
if (!expression)
return NULL;
@@ -2008,11 +2075,11 @@ ast_for_comprehension(struct compiling *c, const node *n)
if (!comp)
return NULL;
- if (NCH(n) == (5 + is_async)) {
+ if (NCH(sync_n) == 5) {
int j, n_ifs;
asdl_seq *ifs;
- n = CHILD(n, 4 + is_async);
+ n = CHILD(sync_n, 4);
n_ifs = count_comp_ifs(c, n);
if (n_ifs == -1)
return NULL;
@@ -2283,7 +2350,7 @@ ast_for_atom(struct compiling *c, const node *n)
"Underscores in numeric literals are only supported in Python 3.6 and greater");
return NULL;
}
- pynum = parsenumber(c, s);
+ pynum = parsenumber(c, STR(ch));
if (!pynum)
return NULL;
@@ -2506,7 +2573,7 @@ ast_for_trailer(struct compiling *c, const node *n, expr_ty left_expr)
return Call(left_expr, NULL, NULL, LINENO(n),
n->n_col_offset, c->c_arena);
else
- return ast_for_call(c, CHILD(n, 1), left_expr);
+ return ast_for_call(c, CHILD(n, 1), left_expr, true);
}
else if (TYPE(CHILD(n, 0)) == DOT) {
PyObject *attr_id = NEW_IDENTIFIER(CHILD(n, 1));
@@ -2635,7 +2702,7 @@ ast_for_atom_expr(struct compiling *c, const node *n)
}
if (start) {
- /* there was an AWAIT */
+ /* there was an 'await' */
return Await(e, LINENO(n), n->n_col_offset, c->c_arena);
}
else {
@@ -2700,7 +2767,7 @@ ast_for_expr(struct compiling *c, const node *n)
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power
power: atom_expr ['**' factor]
- atom_expr: [AWAIT] atom trailer*
+ atom_expr: ['await'] atom trailer*
yield_expr: 'yield' [yield_arg]
*/
@@ -2848,14 +2915,14 @@ ast_for_expr(struct compiling *c, const node *n)
}
static expr_ty
-ast_for_call(struct compiling *c, const node *n, expr_ty func)
+ast_for_call(struct compiling *c, const node *n, expr_ty func, bool allowgen)
{
/*
arglist: argument (',' argument)* [',']
argument: ( test [comp_for] | '*' test | test '=' test | '**' test )
*/
- int i, nargs, nkeywords, ngens;
+ int i, nargs, nkeywords;
int ndoublestars;
asdl_seq *args;
asdl_seq *keywords;
@@ -2864,14 +2931,22 @@ ast_for_call(struct compiling *c, const node *n, expr_ty func)
nargs = 0;
nkeywords = 0;
- ngens = 0;
for (i = 0; i < NCH(n); i++) {
node *ch = CHILD(n, i);
if (TYPE(ch) == argument) {
if (NCH(ch) == 1)
nargs++;
- else if (TYPE(CHILD(ch, 1)) == comp_for)
- ngens++;
+ else if (TYPE(CHILD(ch, 1)) == comp_for) {
+ nargs++;
+ if (!allowgen) {
+ ast_error(c, ch, "invalid syntax");
+ return NULL;
+ }
+ if (NCH(n) > 1) {
+ ast_error(c, ch, "Generator expression must be parenthesized");
+ return NULL;
+ }
+ }
else if (TYPE(CHILD(ch, 0)) == STAR)
nargs++;
else
@@ -2879,18 +2954,8 @@ ast_for_call(struct compiling *c, const node *n, expr_ty func)
nkeywords++;
}
}
- if (ngens > 1 || (ngens && (nargs || nkeywords))) {
- ast_error(c, n, "Generator expression must be parenthesized "
- "if not sole argument");
- return NULL;
- }
-
- if (nargs + nkeywords + ngens > 255) {
- ast_error(c, n, "more than 255 arguments");
- return NULL;
- }
- args = _Ta3_asdl_seq_new(nargs + ngens, c->c_arena);
+ args = _Ta3_asdl_seq_new(nargs, c->c_arena);
if (!args)
return NULL;
keywords = _Ta3_asdl_seq_new(nkeywords, c->c_arena);
@@ -3042,6 +3107,7 @@ static stmt_ty
ast_for_expr_stmt(struct compiling *c, const node *n)
{
int num;
+
REQ(n, expr_stmt);
/* expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
('=' (yield_expr|testlist_star_expr))* [TYPE_COMMENT])
@@ -3313,6 +3379,7 @@ ast_for_flow_stmt(struct compiling *c, const node *n)
}
return Raise(expression, cause, LINENO(n), n->n_col_offset, c->c_arena);
}
+ /* fall through */
default:
PyErr_Format(PyExc_SystemError,
"unexpected flow_stmt: %d", TYPE(ch));
@@ -3424,6 +3491,8 @@ alias_for_import_name(struct compiling *c, const node *n, int store)
break;
case STAR:
str = PyUnicode_InternFromString("*");
+ if (!str)
+ return NULL;
if (PyArena_AddPyObject(c->c_arena, str) < 0) {
Py_DECREF(str);
return NULL;
@@ -3855,8 +3924,9 @@ ast_for_while_stmt(struct compiling *c, const node *n)
}
static stmt_ty
-ast_for_for_stmt(struct compiling *c, const node *n, int is_async)
+ast_for_for_stmt(struct compiling *c, const node *n0, bool is_async)
{
+ const node * const n = is_async ? CHILD(n0, 1) : n0;
asdl_seq *_target, *seq = NULL, *suite_seq;
expr_ty expression;
expr_ty target, first;
@@ -3906,12 +3976,12 @@ ast_for_for_stmt(struct compiling *c, const node *n, int is_async)
type_comment = NULL;
if (is_async)
- return AsyncFor(target, expression, suite_seq, seq,
- type_comment, LINENO(n), n->n_col_offset,
+ return AsyncFor(target, expression, suite_seq, seq, type_comment,
+ LINENO(n0), n0->n_col_offset,
c->c_arena);
else
- return For(target, expression, suite_seq, seq,
- type_comment, LINENO(n), n->n_col_offset,
+ return For(target, expression, suite_seq, seq, type_comment,
+ LINENO(n), n->n_col_offset,
c->c_arena);
}
@@ -4059,8 +4129,9 @@ ast_for_with_item(struct compiling *c, const node *n)
/* with_stmt: 'with' with_item (',' with_item)* ':' [TYPE_COMMENT] suite */
static stmt_ty
-ast_for_with_stmt(struct compiling *c, const node *n, int is_async)
+ast_for_with_stmt(struct compiling *c, const node *n0, bool is_async)
{
+ const node * const n = is_async ? CHILD(n0, 1) : n0;
int i, n_items, nch_minus_type, has_type_comment;
asdl_seq *items, *body;
string type_comment;
@@ -4097,7 +4168,7 @@ ast_for_with_stmt(struct compiling *c, const node *n, int is_async)
type_comment = NULL;
if (is_async)
- return AsyncWith(items, body, type_comment, LINENO(n), n->n_col_offset, c->c_arena);
+ return AsyncWith(items, body, type_comment, LINENO(n0), n0->n_col_offset, c->c_arena);
else
return With(items, body, type_comment, LINENO(n), n->n_col_offset, c->c_arena);
}
@@ -4121,12 +4192,12 @@ ast_for_classdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
return NULL;
if (forbidden_name(c, classname, CHILD(n, 3), 0))
return NULL;
- return ClassDef(classname, NULL, NULL, s, decorator_seq, LINENO(n),
- n->n_col_offset, c->c_arena);
+ return ClassDef(classname, NULL, NULL, s, decorator_seq,
+ LINENO(n), n->n_col_offset, c->c_arena);
}
if (TYPE(CHILD(n, 3)) == RPAR) { /* class NAME '(' ')' ':' suite */
- s = ast_for_suite(c, CHILD(n,5));
+ s = ast_for_suite(c, CHILD(n, 5));
if (!s)
return NULL;
classname = NEW_IDENTIFIER(CHILD(n, 1));
@@ -4134,8 +4205,8 @@ ast_for_classdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
return NULL;
if (forbidden_name(c, classname, CHILD(n, 3), 0))
return NULL;
- return ClassDef(classname, NULL, NULL, s, decorator_seq, LINENO(n),
- n->n_col_offset, c->c_arena);
+ return ClassDef(classname, NULL, NULL, s, decorator_seq,
+ LINENO(n), n->n_col_offset, c->c_arena);
}
/* class NAME '(' arglist ')' ':' suite */
@@ -4147,7 +4218,7 @@ ast_for_classdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
if (!dummy_name)
return NULL;
dummy = Name(dummy_name, Load, LINENO(n), n->n_col_offset, c->c_arena);
- call = ast_for_call(c, CHILD(n, 3), dummy);
+ call = ast_for_call(c, CHILD(n, 3), dummy, false);
if (!call)
return NULL;
}
@@ -4294,6 +4365,9 @@ parsenumber(struct compiling *c, const char *s)
}
/* Create a duplicate without underscores. */
dup = PyMem_Malloc(strlen(s) + 1);
+ if (dup == NULL) {
+ return PyErr_NoMemory();
+ }
end = dup;
for (; *s; s++) {
if (*s != '_') {
@@ -4317,14 +4391,47 @@ decode_utf8(struct compiling *c, const char **sPtr, const char *end)
return PyUnicode_DecodeUTF8(t, s - t, NULL);
}
+static int
+warn_invalid_escape_sequence(struct compiling *c, const node *n,
+ unsigned char first_invalid_escape_char)
+{
+ PyObject *msg = PyUnicode_FromFormat("invalid escape sequence \\%c",
+ first_invalid_escape_char);
+ if (msg == NULL) {
+ return -1;
+ }
+ if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg,
+ c->c_filename, LINENO(n),
+ NULL, NULL) < 0)
+ {
+ if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) {
+ const char *s;
+
+ /* Replace the DeprecationWarning exception with a SyntaxError
+ to get a more accurate error report */
+ PyErr_Clear();
+
+ s = PyUnicode_AsUTF8(msg);
+ if (s != NULL) {
+ ast_error(c, n, s);
+ }
+ }
+ Py_DECREF(msg);
+ return -1;
+ }
+ Py_DECREF(msg);
+ return 0;
+}
+
static PyObject *
decode_unicode_with_escapes(struct compiling *c, const node *n, const char *s,
size_t len)
{
- PyObject *u;
+ PyObject *v, *u;
char *buf;
char *p;
const char *end;
+ const char *first_invalid_escape;
/* check for integer overflow */
if (len > SIZE_MAX / 6)
@@ -4339,9 +4446,11 @@ decode_unicode_with_escapes(struct compiling *c, const node *n, const char *s,
while (s < end) {
if (*s == '\\') {
*p++ = *s++;
- if (*s & 0x80) {
+ if (s >= end || *s & 0x80) {
strcpy(p, "u005c");
p += 5;
+ if (s >= end)
+ break;
}
}
if (*s & 0x80) { /* XXX inefficient */
@@ -4363,7 +4472,7 @@ decode_unicode_with_escapes(struct compiling *c, const node *n, const char *s,
p += 10;
}
/* Should be impossible to overflow */
- assert(p - buf <= Py_SIZE(u));
+ assert(p - buf <= PyBytes_GET_SIZE(u));
Py_DECREF(w);
} else {
*p++ = *s++;
@@ -4372,14 +4481,88 @@ decode_unicode_with_escapes(struct compiling *c, const node *n, const char *s,
len = p - buf;
s = buf;
- return PyUnicode_DecodeUnicodeEscape(s, len, NULL);
+ v = _PyUnicode_DecodeUnicodeEscape(s, len, NULL, &first_invalid_escape);
+
+ if (v != NULL && first_invalid_escape != NULL) {
+ if (warn_invalid_escape_sequence(c, n, *first_invalid_escape) < 0) {
+ /* We have not decref u before because first_invalid_escape points
+ inside u. */
+ Py_XDECREF(u);
+ Py_DECREF(v);
+ return NULL;
+ }
+ }
+ Py_XDECREF(u);
+ return v;
}
static PyObject *
decode_bytes_with_escapes(struct compiling *c, const node *n, const char *s,
size_t len)
{
- return PyBytes_DecodeEscape(s, len, NULL, 0, NULL);
+ const char *first_invalid_escape;
+ PyObject *result = _PyBytes_DecodeEscape(s, len, NULL, 0, NULL,
+ &first_invalid_escape);
+ if (result == NULL)
+ return NULL;
+
+ if (first_invalid_escape != NULL) {
+ if (warn_invalid_escape_sequence(c, n, *first_invalid_escape) < 0) {
+ Py_DECREF(result);
+ return NULL;
+ }
+ }
+ return result;
+}
+
+/* Shift locations for the given node and all its children by adding `lineno`
+ and `col_offset` to existing locations. */
+static void fstring_shift_node_locations(node *n, int lineno, int col_offset)
+{
+ int i;
+ n->n_col_offset = n->n_col_offset + col_offset;
+ for (i = 0; i < NCH(n); ++i) {
+ if (n->n_lineno && n->n_lineno < CHILD(n, i)->n_lineno) {
+ /* Shifting column offsets unnecessary if there's been newlines. */
+ col_offset = 0;
+ }
+ fstring_shift_node_locations(CHILD(n, i), lineno, col_offset);
+ }
+ n->n_lineno = n->n_lineno + lineno;
+}
+
+/* Fix locations for the given node and its children.
+
+ `parent` is the enclosing node.
+ `n` is the node which locations are going to be fixed relative to parent.
+ `expr_str` is the child node's string representation, including braces.
+*/
+static void
+fstring_fix_node_location(const node *parent, node *n, char *expr_str)
+{
+ char *substr = NULL;
+ char *start;
+ int lines = LINENO(parent) - 1;
+ int cols = parent->n_col_offset;
+ /* Find the full fstring to fix location information in `n`. */
+ while (parent && parent->n_type != STRING)
+ parent = parent->n_child;
+ if (parent && parent->n_str) {
+ substr = strstr(parent->n_str, expr_str);
+ if (substr) {
+ start = substr;
+ while (start > parent->n_str) {
+ if (start[0] == '\n')
+ break;
+ start--;
+ }
+ cols += substr - start;
+ /* Fix lineno in mulitline strings. */
+ while ((substr = strchr(substr + 1, '\n')))
+ lines--;
+ }
+ }
+ fstring_shift_node_locations(n, lines, cols);
}
/* Compile this expression in to an expr_ty. Add parens around the
@@ -4389,54 +4572,42 @@ fstring_compile_expr(const char *expr_start, const char *expr_end,
struct compiling *c, const node *n)
{
- int all_whitespace = 1;
- int kind;
- void *data;
PyCompilerFlags cf;
+ node *mod_n;
mod_ty mod;
char *str;
- PyObject *o, *fstring_name;
Py_ssize_t len;
- Py_ssize_t i;
+ const char *s;
+ int iflags = 0;
assert(expr_end >= expr_start);
assert(*(expr_start-1) == '{');
assert(*expr_end == '}' || *expr_end == '!' || *expr_end == ':');
- /* We know there are no escapes here, because backslashes are not allowed,
- and we know it's utf-8 encoded (per PEP 263). But, in order to check
- that each char is not whitespace, we need to decode it to unicode.
- Which is unfortunate, but such is life. */
-
- /* If the substring is all whitespace, it's an error. We need to catch
- this here, and not when we call PyParser_ASTFromString, because turning
- the expression '' in to '()' would go from being invalid to valid. */
- /* Note that this code says an empty string is all whitespace. That's
- important. There's a test for it: f'{}'. */
- o = PyUnicode_DecodeUTF8(expr_start, expr_end-expr_start, NULL);
- if (o == NULL)
- return NULL;
- len = PyUnicode_GET_LENGTH(o);
- kind = PyUnicode_KIND(o);
- data = PyUnicode_DATA(o);
- for (i = 0; i < len; i++) {
- if (!Py_UNICODE_ISSPACE(PyUnicode_READ(kind, data, i))) {
- all_whitespace = 0;
+ /* If the substring is all whitespace, it's an error. We need to catch this
+ here, and not when we call PyParser_SimpleParseStringFlagsFilename,
+ because turning the expression '' in to '()' would go from being invalid
+ to valid. */
+ for (s = expr_start; s != expr_end; s++) {
+ char c = *s;
+ /* The Python parser ignores only the following whitespace
+ characters (\r already is converted to \n). */
+ if (!(c == ' ' || c == '\t' || c == '\n' || c == '\f')) {
break;
}
}
- Py_DECREF(o);
- if (all_whitespace) {
+ if (s == expr_end) {
ast_error(c, n, "f-string: empty expression not allowed");
return NULL;
}
- /* Reuse len to be the length of the utf-8 input string. */
len = expr_end - expr_start;
/* Allocate 3 extra bytes: open paren, close paren, null byte. */
str = PyMem_RawMalloc(len + 3);
- if (str == NULL)
+ if (str == NULL) {
+ PyErr_NoMemory();
return NULL;
+ }
str[0] = '(';
memcpy(str+1, expr_start, len);
@@ -4444,12 +4615,20 @@ fstring_compile_expr(const char *expr_start, const char *expr_end,
str[len+2] = 0;
cf.cf_flags = PyCF_ONLY_AST;
- fstring_name = PyUnicode_FromString("<fstring>");
- mod = string_object_to_c_ast(str, fstring_name,
- Py_eval_input, &cf,
- c->c_feature_version, c->c_arena);
- Py_DECREF(fstring_name);
+ _Ta3Parser_UpdateFlags(&cf, &iflags, c->c_feature_version);
+ mod_n = Ta3Parser_SimpleParseStringFlagsFilename(str, "<fstring>",
+ Py_eval_input, iflags);
+ if (!mod_n) {
+ PyMem_RawFree(str);
+ return NULL;
+ }
+ /* Reuse str to find the correct column offset. */
+ str[0] = '{';
+ str[len+1] = '}';
+ fstring_fix_node_location(n, mod_n, str);
+ mod = Ta3AST_FromNode(mod_n, &cf, "<fstring>", c->c_feature_version, c->c_arena);
PyMem_RawFree(str);
+ Ta3Node_Free(mod_n);
if (!mod)
return NULL;
return mod->v.Expression.body;
@@ -4472,30 +4651,37 @@ fstring_find_literal(const char **str, const char *end, int raw,
brace (which isn't part of a unicode name escape such as
"\N{EULER CONSTANT}"), or the end of the string. */
- const char *literal_start = *str;
- const char *literal_end;
- int in_named_escape = 0;
+ const char *s = *str;
+ const char *literal_start = s;
int result = 0;
assert(*literal == NULL);
- for (; *str < end; (*str)++) {
- char ch = **str;
- if (!in_named_escape && ch == '{' && (*str)-literal_start >= 2 &&
- *(*str-2) == '\\' && *(*str-1) == 'N') {
- in_named_escape = 1;
- } else if (in_named_escape && ch == '}') {
- in_named_escape = 0;
- } else if (ch == '{' || ch == '}') {
+ while (s < end) {
+ char ch = *s++;
+ if (!raw && ch == '\\' && s < end) {
+ ch = *s++;
+ if (ch == 'N') {
+ if (s < end && *s++ == '{') {
+ while (s < end && *s++ != '}') {
+ }
+ continue;
+ }
+ break;
+ }
+ if (ch == '{' && warn_invalid_escape_sequence(c, n, ch) < 0) {
+ return -1;
+ }
+ }
+ if (ch == '{' || ch == '}') {
/* Check for doubled braces, but only at the top level. If
we checked at every level, then f'{0:{3}}' would fail
with the two closing braces. */
if (recurse_lvl == 0) {
- if (*str+1 < end && *(*str+1) == ch) {
+ if (s < end && *s == ch) {
/* We're going to tell the caller that the literal ends
here, but that they should continue scanning. But also
skip over the second brace when we resume scanning. */
- literal_end = *str+1;
- *str += 2;
+ *str = s + 1;
result = 1;
goto done;
}
@@ -4503,6 +4689,7 @@ fstring_find_literal(const char **str, const char *end, int raw,
/* Where a single '{' is the start of a new expression, a
single '}' is not allowed. */
if (ch == '}') {
+ *str = s - 1;
ast_error(c, n, "f-string: single '}' is not allowed");
return -1;
}
@@ -4510,21 +4697,22 @@ fstring_find_literal(const char **str, const char *end, int raw,
/* We're either at a '{', which means we're starting another
expression; or a '}', which means we're at the end of this
f-string (for a nested format_spec). */
+ s--;
break;
}
}
- literal_end = *str;
- assert(*str <= end);
- assert(*str == end || **str == '{' || **str == '}');
+ *str = s;
+ assert(s <= end);
+ assert(s == end || *s == '{' || *s == '}');
done:
- if (literal_start != literal_end) {
+ if (literal_start != s) {
if (raw)
*literal = PyUnicode_DecodeUTF8Stateful(literal_start,
- literal_end-literal_start,
+ s - literal_start,
NULL, NULL);
else
*literal = decode_unicode_with_escapes(c, n, literal_start,
- literal_end-literal_start);
+ s - literal_start);
if (!*literal)
return -1;
}
@@ -4936,6 +5124,7 @@ ExprList_Finish(ExprList *l, PyArena *arena)
typedef struct {
PyObject *last_str;
ExprList expr_list;
+ int fmode;
} FstringParser;
#ifdef NDEBUG
@@ -4954,6 +5143,7 @@ static void
FstringParser_Init(FstringParser *state)
{
state->last_str = NULL;
+ state->fmode = 0;
ExprList_Init(&state->expr_list);
FstringParser_check_invariants(state);
}
@@ -4967,19 +5157,24 @@ FstringParser_Dealloc(FstringParser *state)
ExprList_Dealloc(&state->expr_list);
}
-/* Make a Str node, but decref the PyUnicode object being added. */
-static expr_ty
-make_str_node_and_del(PyObject **str, struct compiling *c, const node* n)
-{
- PyObject *kind, *s = *str;
- const char *raw = STR(CHILD(n, 0));
+static PyObject *
+make_str_kind(const char *raw) {
/* currently Python allows up to 2 string modifiers */
char *ch, s_kind[3] = {0, 0, 0};
ch = s_kind;
while (*raw && *raw != '\'' && *raw != '"') {
*ch++ = *raw++;
}
- kind = PyUnicode_FromString(s_kind);
+ return PyUnicode_FromString(s_kind);
+}
+
+
+/* Make a Str node, but decref the PyUnicode object being added. */
+static expr_ty
+make_str_node_and_del(PyObject **str, struct compiling *c, const node* n)
+{
+ PyObject *s = *str;
+ PyObject *kind = make_str_kind(STR(CHILD(n, 0)));
if (!kind) {
return NULL;
}
@@ -5027,6 +5222,7 @@ FstringParser_ConcatFstring(FstringParser *state, const char **str,
struct compiling *c, const node *n)
{
FstringParser_check_invariants(state);
+ state->fmode = 1;
/* Parse the f-string. */
while (1) {
@@ -5048,6 +5244,8 @@ FstringParser_ConcatFstring(FstringParser *state, const char **str,
/* Do nothing. Just leave last_str alone (and possibly
NULL). */
} else if (!state->last_str) {
+ /* Note that the literal can be zero length, if the
+ input string is "\\\n" or "\\\r", among others. */
state->last_str = literal;
literal = NULL;
} else {
@@ -5057,8 +5255,6 @@ FstringParser_ConcatFstring(FstringParser *state, const char **str,
return -1;
literal = NULL;
}
- assert(!state->last_str ||
- PyUnicode_GET_LENGTH(state->last_str) != 0);
/* We've dealt with the literal now. It can't be leaked on further
errors. */
@@ -5118,7 +5314,8 @@ FstringParser_Finish(FstringParser *state, struct compiling *c,
/* If we're just a constant string with no expressions, return
that. */
- if(state->expr_list.size == 0) {
+ if (!state->fmode) {
+ assert(!state->expr_list.size);
if (!state->last_str) {
/* Create a zero length string. */
state->last_str = PyUnicode_FromStringAndSize(NULL, 0);
@@ -5142,11 +5339,6 @@ FstringParser_Finish(FstringParser *state, struct compiling *c,
if (!seq)
goto error;
- /* If there's only one expression, return it. Otherwise, we need
- to join them together. */
- if (seq->size == 1)
- return seq->elements[0];
-
return JoinedStr(seq, LINENO(n), n->n_col_offset, c->c_arena);
error:
@@ -5359,10 +5551,14 @@ parsestrplus(struct compiling *c, const node *n)
}
}
if (bytesmode) {
+ PyObject *kind = make_str_kind(STR(CHILD(n, 0)));
+
/* Just return the bytes object and we're done. */
- if (PyArena_AddPyObject(c->c_arena, bytes_str) < 0)
+ if (PyArena_AddPyObject(c->c_arena, bytes_str) < 0) {
+ Py_DECREF(kind);
goto error;
- return Bytes(bytes_str, LINENO(n), n->n_col_offset, c->c_arena);
+ }
+ return Bytes(bytes_str, kind, LINENO(n), n->n_col_offset, c->c_arena);
}
/* We're not a bytes string, bytes_str should never have been set. */
=====================================
ast3/Python/graminit.c
=====================================
@@ -1875,204 +1875,214 @@ static state states_80[2] = {
{2, arcs_80_0},
{1, arcs_80_1},
};
-static arc arcs_81_0[2] = {
- {21, 1},
- {102, 2},
+static arc arcs_81_0[1] = {
+ {102, 1},
};
static arc arcs_81_1[1] = {
- {102, 2},
+ {67, 2},
};
static arc arcs_81_2[1] = {
- {67, 3},
+ {103, 3},
};
static arc arcs_81_3[1] = {
- {103, 4},
+ {113, 4},
};
-static arc arcs_81_4[1] = {
- {113, 5},
+static arc arcs_81_4[2] = {
+ {172, 5},
+ {0, 4},
};
-static arc arcs_81_5[2] = {
- {172, 6},
+static arc arcs_81_5[1] = {
{0, 5},
};
-static arc arcs_81_6[1] = {
- {0, 6},
-};
-static state states_81[7] = {
- {2, arcs_81_0},
+static state states_81[6] = {
+ {1, arcs_81_0},
{1, arcs_81_1},
{1, arcs_81_2},
{1, arcs_81_3},
- {1, arcs_81_4},
- {2, arcs_81_5},
- {1, arcs_81_6},
+ {2, arcs_81_4},
+ {1, arcs_81_5},
};
-static arc arcs_82_0[1] = {
- {98, 1},
+static arc arcs_82_0[2] = {
+ {21, 1},
+ {174, 2},
};
static arc arcs_82_1[1] = {
- {115, 2},
+ {174, 2},
};
-static arc arcs_82_2[2] = {
- {172, 3},
+static arc arcs_82_2[1] = {
{0, 2},
};
-static arc arcs_82_3[1] = {
- {0, 3},
-};
-static state states_82[4] = {
- {1, arcs_82_0},
+static state states_82[3] = {
+ {2, arcs_82_0},
{1, arcs_82_1},
- {2, arcs_82_2},
- {1, arcs_82_3},
+ {1, arcs_82_2},
};
static arc arcs_83_0[1] = {
- {23, 1},
+ {98, 1},
};
static arc arcs_83_1[1] = {
- {0, 1},
+ {115, 2},
};
-static state states_83[2] = {
+static arc arcs_83_2[2] = {
+ {172, 3},
+ {0, 2},
+};
+static arc arcs_83_3[1] = {
+ {0, 3},
+};
+static state states_83[4] = {
{1, arcs_83_0},
{1, arcs_83_1},
+ {2, arcs_83_2},
+ {1, arcs_83_3},
};
static arc arcs_84_0[1] = {
- {175, 1},
+ {23, 1},
};
-static arc arcs_84_1[2] = {
- {176, 2},
+static arc arcs_84_1[1] = {
{0, 1},
};
-static arc arcs_84_2[1] = {
- {0, 2},
-};
-static state states_84[3] = {
+static state states_84[2] = {
{1, arcs_84_0},
- {2, arcs_84_1},
- {1, arcs_84_2},
+ {1, arcs_84_1},
};
-static arc arcs_85_0[2] = {
- {78, 1},
- {9, 2},
+static arc arcs_85_0[1] = {
+ {176, 1},
};
-static arc arcs_85_1[1] = {
- {26, 2},
+static arc arcs_85_1[2] = {
+ {177, 2},
+ {0, 1},
};
static arc arcs_85_2[1] = {
{0, 2},
};
static state states_85[3] = {
- {2, arcs_85_0},
- {1, arcs_85_1},
+ {1, arcs_85_0},
+ {2, arcs_85_1},
{1, arcs_85_2},
};
-static arc arcs_86_0[1] = {
- {178, 1},
+static arc arcs_86_0[2] = {
+ {78, 1},
+ {9, 2},
};
-static arc arcs_86_1[2] = {
- {2, 1},
- {7, 2},
+static arc arcs_86_1[1] = {
+ {26, 2},
};
static arc arcs_86_2[1] = {
{0, 2},
};
static state states_86[3] = {
- {1, arcs_86_0},
- {2, arcs_86_1},
+ {2, arcs_86_0},
+ {1, arcs_86_1},
{1, arcs_86_2},
};
static arc arcs_87_0[1] = {
- {13, 1},
+ {179, 1},
};
static arc arcs_87_1[2] = {
- {179, 2},
- {15, 3},
+ {2, 1},
+ {7, 2},
};
static arc arcs_87_2[1] = {
+ {0, 2},
+};
+static state states_87[3] = {
+ {1, arcs_87_0},
+ {2, arcs_87_1},
+ {1, arcs_87_2},
+};
+static arc arcs_88_0[1] = {
+ {13, 1},
+};
+static arc arcs_88_1[2] = {
+ {180, 2},
{15, 3},
};
-static arc arcs_87_3[1] = {
+static arc arcs_88_2[1] = {
+ {15, 3},
+};
+static arc arcs_88_3[1] = {
{25, 4},
};
-static arc arcs_87_4[1] = {
+static arc arcs_88_4[1] = {
{26, 5},
};
-static arc arcs_87_5[1] = {
+static arc arcs_88_5[1] = {
{0, 5},
};
-static state states_87[6] = {
- {1, arcs_87_0},
- {2, arcs_87_1},
- {1, arcs_87_2},
- {1, arcs_87_3},
- {1, arcs_87_4},
- {1, arcs_87_5},
+static state states_88[6] = {
+ {1, arcs_88_0},
+ {2, arcs_88_1},
+ {1, arcs_88_2},
+ {1, arcs_88_3},
+ {1, arcs_88_4},
+ {1, arcs_88_5},
};
-static arc arcs_88_0[3] = {
+static arc arcs_89_0[3] = {
{26, 1},
{34, 2},
{35, 3},
};
-static arc arcs_88_1[2] = {
+static arc arcs_89_1[2] = {
{33, 4},
{0, 1},
};
-static arc arcs_88_2[3] = {
+static arc arcs_89_2[3] = {
{26, 5},
{33, 6},
{0, 2},
};
-static arc arcs_88_3[1] = {
+static arc arcs_89_3[1] = {
{26, 7},
};
-static arc arcs_88_4[4] = {
+static arc arcs_89_4[4] = {
{26, 1},
{34, 8},
{35, 3},
{0, 4},
};
-static arc arcs_88_5[2] = {
+static arc arcs_89_5[2] = {
{33, 6},
{0, 5},
};
-static arc arcs_88_6[2] = {
+static arc arcs_89_6[2] = {
{26, 5},
{35, 3},
};
-static arc arcs_88_7[1] = {
+static arc arcs_89_7[1] = {
{0, 7},
};
-static arc arcs_88_8[3] = {
+static arc arcs_89_8[3] = {
{26, 9},
{33, 10},
{0, 8},
};
-static arc arcs_88_9[2] = {
+static arc arcs_89_9[2] = {
{33, 10},
{0, 9},
};
-static arc arcs_88_10[2] = {
+static arc arcs_89_10[2] = {
{26, 9},
{35, 3},
};
-static state states_88[11] = {
- {3, arcs_88_0},
- {2, arcs_88_1},
- {3, arcs_88_2},
- {1, arcs_88_3},
- {4, arcs_88_4},
- {2, arcs_88_5},
- {2, arcs_88_6},
- {1, arcs_88_7},
- {3, arcs_88_8},
- {2, arcs_88_9},
- {2, arcs_88_10},
-};
-static dfa dfas[89] = {
+static state states_89[11] = {
+ {3, arcs_89_0},
+ {2, arcs_89_1},
+ {3, arcs_89_2},
+ {1, arcs_89_3},
+ {4, arcs_89_4},
+ {2, arcs_89_5},
+ {2, arcs_89_6},
+ {1, arcs_89_7},
+ {3, arcs_89_8},
+ {2, arcs_89_9},
+ {2, arcs_89_10},
+};
+static dfa dfas[90] = {
{256, "single_input", 0, 3, states_0,
- "\004\050\340\000\004\000\000\000\024\174\022\016\144\011\040\004\000\200\041\121\076\204\000"},
+ "\004\050\340\000\004\000\000\000\024\174\022\016\144\011\040\004\000\200\041\121\076\004\001"},
{257, "file_input", 0, 2, states_1,
- "\204\050\340\000\004\000\000\000\024\174\022\016\144\011\040\004\000\200\041\121\076\204\000"},
+ "\204\050\340\000\004\000\000\000\024\174\022\016\144\011\040\004\000\200\041\121\076\004\001"},
{258, "eval_input", 0, 3, states_2,
"\000\040\200\000\000\000\000\000\000\000\020\000\000\000\040\004\000\200\041\121\076\000\000"},
{259, "decorator", 0, 7, states_3,
@@ -2096,11 +2106,11 @@ static dfa dfas[89] = {
{268, "vfpdef", 0, 2, states_12,
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{269, "stmt", 0, 2, states_13,
- "\000\050\340\000\004\000\000\000\024\174\022\016\144\011\040\004\000\200\041\121\076\204\000"},
+ "\000\050\340\000\004\000\000\000\024\174\022\016\144\011\040\004\000\200\041\121\076\004\001"},
{270, "simple_stmt", 0, 4, states_14,
- "\000\040\200\000\004\000\000\000\024\174\022\016\000\000\040\004\000\200\041\121\076\200\000"},
+ "\000\040\200\000\004\000\000\000\024\174\022\016\000\000\040\004\000\200\041\121\076\000\001"},
{271, "small_stmt", 0, 2, states_15,
- "\000\040\200\000\004\000\000\000\024\174\022\016\000\000\040\004\000\200\041\121\076\200\000"},
+ "\000\040\200\000\004\000\000\000\024\174\022\016\000\000\040\004\000\200\041\121\076\000\001"},
{272, "expr_stmt", 0, 6, states_16,
"\000\040\200\000\004\000\000\000\000\000\020\000\000\000\040\004\000\200\041\121\076\000\000"},
{273, "annassign", 0, 5, states_17,
@@ -2114,7 +2124,7 @@ static dfa dfas[89] = {
{277, "pass_stmt", 0, 2, states_21,
"\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{278, "flow_stmt", 0, 2, states_22,
- "\000\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\200\000"},
+ "\000\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\000\001"},
{279, "break_stmt", 0, 2, states_23,
"\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{280, "continue_stmt", 0, 2, states_24,
@@ -2122,7 +2132,7 @@ static dfa dfas[89] = {
{281, "return_stmt", 0, 3, states_25,
"\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{282, "yield_stmt", 0, 2, states_26,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"},
{283, "raise_stmt", 0, 5, states_27,
"\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{284, "import_stmt", 0, 2, states_28,
@@ -2166,7 +2176,7 @@ static dfa dfas[89] = {
{303, "except_clause", 0, 5, states_47,
"\000\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000"},
{304, "suite", 0, 7, states_48,
- "\004\040\200\000\004\000\000\000\024\174\022\016\000\000\040\004\000\200\041\121\076\200\000"},
+ "\004\040\200\000\004\000\000\000\024\174\022\016\000\000\040\004\000\200\041\121\076\000\001"},
{305, "test", 0, 6, states_49,
"\000\040\200\000\000\000\000\000\000\000\020\000\000\000\040\004\000\200\041\121\076\000\000"},
{306, "test_nocond", 0, 2, states_50,
@@ -2231,24 +2241,26 @@ static dfa dfas[89] = {
"\000\040\200\000\014\000\000\000\000\000\020\000\000\000\040\004\000\200\041\121\076\000\000"},
{336, "comp_iter", 0, 2, states_80,
"\000\000\040\000\000\000\000\000\000\000\000\000\104\000\000\000\000\000\000\000\000\000\000"},
- {337, "comp_for", 0, 7, states_81,
+ {337, "sync_comp_for", 0, 6, states_81,
+ "\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000"},
+ {338, "comp_for", 0, 3, states_82,
"\000\000\040\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000"},
- {338, "comp_if", 0, 4, states_82,
+ {339, "comp_if", 0, 4, states_83,
"\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000"},
- {339, "encoding_decl", 0, 2, states_83,
+ {340, "encoding_decl", 0, 2, states_84,
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {340, "yield_expr", 0, 3, states_84,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
- {341, "yield_arg", 0, 3, states_85,
+ {341, "yield_expr", 0, 3, states_85,
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"},
+ {342, "yield_arg", 0, 3, states_86,
"\000\040\200\000\000\000\000\000\000\100\020\000\000\000\040\004\000\200\041\121\076\000\000"},
- {342, "func_type_input", 0, 3, states_86,
+ {343, "func_type_input", 0, 3, states_87,
"\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {343, "func_type", 0, 6, states_87,
+ {344, "func_type", 0, 6, states_88,
"\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {344, "typelist", 0, 11, states_88,
+ {345, "typelist", 0, 11, states_89,
"\000\040\200\000\014\000\000\000\000\000\020\000\000\000\040\004\000\200\041\121\076\000\000"},
};
-static label labels[180] = {
+static label labels[181] = {
{0, "EMPTY"},
{256, 0},
{4, 0},
@@ -2300,7 +2312,7 @@ static label labels[180] = {
{274, 0},
{273, 0},
{275, 0},
- {340, 0},
+ {341, 0},
{314, 0},
{36, 0},
{37, 0},
@@ -2415,24 +2427,25 @@ static label labels[180] = {
{1, "None"},
{1, "True"},
{1, "False"},
- {337, 0},
+ {338, 0},
{327, 0},
{328, 0},
{329, 0},
{1, "class"},
{335, 0},
{336, 0},
- {338, 0},
{339, 0},
+ {337, 0},
+ {340, 0},
{1, "yield"},
- {341, 0},
{342, 0},
{343, 0},
{344, 0},
+ {345, 0},
};
grammar _Ta3Parser_Grammar = {
- 89,
+ 90,
dfas,
- {180, labels},
+ {181, labels},
256
};
=====================================
setup.py
=====================================
@@ -67,7 +67,7 @@ _ast3 = Extension(
'ast3/Include/asdl.h',
'ast3/Include/ast.h',
'ast3/Include/bitset.h',
- 'ast3/Include/compile.h',
+ 'ast3/Include/compile-ast3.h',
'ast3/Include/errcode.h',
'ast3/Include/graminit.h',
'ast3/Include/grammar.h',
@@ -117,4 +117,5 @@ setup (name = 'typed-ast',
'Topic :: Software Development',
],
packages = ['typed_ast'],
+ ext_package='typed_ast',
ext_modules = [_ast27, _ast3])
=====================================
typed_ast.egg-info/PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: typed-ast
-Version: 1.2.0
+Version: 1.3.0
Summary: a fork of Python 2 and 3 ast modules with type comment support
Home-page: https://github.com/python/typed_ast
Author: David Fisher
=====================================
typed_ast.egg-info/SOURCES.txt
=====================================
@@ -34,7 +34,7 @@ ast3/Include/Python-ast.h
ast3/Include/asdl.h
ast3/Include/ast.h
ast3/Include/bitset.h
-ast3/Include/compile.h
+ast3/Include/compile-ast3.h
ast3/Include/errcode.h
ast3/Include/graminit.h
ast3/Include/grammar.h
=====================================
typed_ast/__init__.py
=====================================
@@ -1 +1 @@
-__version__ = "1.2.0"
+__version__ = "1.3.0"
=====================================
typed_ast/ast27.py
=====================================
@@ -38,8 +38,8 @@
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
-import _ast27
-from _ast27 import *
+from typed_ast import _ast27
+from typed_ast._ast27 import *
def parse(source, filename='<unknown>', mode='exec'):
=====================================
typed_ast/ast3.py
=====================================
@@ -37,10 +37,10 @@
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
-import _ast3
-from _ast3 import *
+from typed_ast import _ast3
+from typed_ast._ast3 import *
-LATEST_MINOR_VERSION = 6
+LATEST_MINOR_VERSION = 7
def parse(source, filename='<unknown>', mode='exec', feature_version=LATEST_MINOR_VERSION):
"""
@@ -56,6 +56,8 @@ def parse(source, filename='<unknown>', mode='exec', feature_version=LATEST_MINO
When feature_version=4, the parser will forbid the use of the async/await
keywords and the '@' operator, but will not forbid the use of PEP 448
additional unpacking generalizations, which were also added in Python 3.5.
+
+ When feature_version>=7, 'async' and 'await' are always keywords.
"""
return _ast3._parse(source, filename, mode, feature_version)
=====================================
typed_ast/conversions.py
=====================================
@@ -217,7 +217,7 @@ class _AST2To3(ast27.NodeTransformer):
def visit_Str(self, s):
if isinstance(s.s, bytes):
- return ast3.Bytes(s.s)
+ return ast3.Bytes(s.s, s.kind)
else:
return ast3.Str(s.s, s.kind)
View it on GitLab: https://salsa.debian.org/med-team/python3-typed-ast/commit/9dce3effdda96403322a11f510e1a99ad379ee16
--
View it on GitLab: https://salsa.debian.org/med-team/python3-typed-ast/commit/9dce3effdda96403322a11f510e1a99ad379ee16
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20190204/83f23e5b/attachment-0001.html>
More information about the debian-med-commit
mailing list