[med-svn] [python3-typed-ast] 01/01: New upstream version 1.0.4
Michael Crusoe
misterc-guest at moszumanska.debian.org
Fri Jun 23 15:54:52 UTC 2017
This is an automated email from the git hooks/post-receive script.
misterc-guest pushed a commit to annotated tag upstream/1.0.4
in repository python3-typed-ast.
commit acaa4f79f3510081a7517b0487eb0e44d8ed0fea
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date: Fri Jun 23 08:45:06 2017 -0700
New upstream version 1.0.4
---
MANIFEST.in | 2 +-
PKG-INFO | 17 +-
ast27/Python/ast.c | 22 +-
{ast35 => ast3}/Custom/typed_ast.c | 38 +-
ast3/Include/Python-ast.h | 672 +++++++++++++++
{ast35 => ast3}/Include/asdl.h | 16 +-
{ast35 => ast3}/Include/ast.h | 14 +-
{ast35 => ast3}/Include/bitset.h | 6 +-
{ast35 => ast3}/Include/compile.h | 6 +-
{ast35 => ast3}/Include/errcode.h | 6 +-
{ast35 => ast3}/Include/graminit.h | 0
{ast35 => ast3}/Include/grammar.h | 15 +-
{ast35 => ast3}/Include/node.h | 16 +-
{ast35 => ast3}/Include/parsetok.h | 24 +-
{ast35 => ast3}/Include/token.h | 16 +-
{ast35 => ast3}/Parser/acceler.c | 6 +-
{ast35 => ast3}/Parser/bitset.c | 0
{ast35 => ast3}/Parser/grammar.c | 41 +-
{ast35 => ast3}/Parser/grammar1.c | 8 +-
{ast35 => ast3}/Parser/node.c | 19 +-
{ast35 => ast3}/Parser/parser.c | 37 +-
{ast35 => ast3}/Parser/parser.h | 14 +-
{ast35 => ast3}/Parser/parsetok.c | 64 +-
{ast35 => ast3}/Parser/tokenizer.c | 223 +++--
{ast35 => ast3}/Parser/tokenizer.h | 16 +-
{ast35 => ast3}/Python/Python-ast.c | 897 +++++++++++++++++---
{ast35 => ast3}/Python/asdl.c | 12 +-
{ast35 => ast3}/Python/ast.c | 1570 ++++++++++++++++++++++++++++-------
{ast35 => ast3}/Python/graminit.c | 167 ++--
ast35/Include/Python-ast.h | 637 --------------
setup.cfg | 1 -
setup.py | 77 +-
typed_ast.egg-info/PKG-INFO | 17 +-
typed_ast.egg-info/SOURCES.txt | 54 +-
typed_ast.egg-info/top_level.txt | 2 +-
typed_ast/ast27.py | 30 +-
typed_ast/{ast35.py => ast3.py} | 105 ++-
typed_ast/conversions.py | 116 +--
38 files changed, 3371 insertions(+), 1612 deletions(-)
diff --git a/MANIFEST.in b/MANIFEST.in
index 41f118b..bef5282 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,3 @@
recursive-include ast27 *.h
-recursive-include ast35 *.h
+recursive-include ast3 *.h
include LICENSE
diff --git a/PKG-INFO b/PKG-INFO
index 3c2dafb..54d110f 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,17 +1,20 @@
Metadata-Version: 1.1
Name: typed-ast
-Version: 0.6.3
-Summary: fork of Python 2 and 3 ast modules with type comment support
-Home-page: https://github.com/dropbox/typed_ast
+Version: 1.0.4
+Summary: a fork of Python 2 and 3 ast modules with type comment support
+Home-page: https://github.com/python/typed_ast
Author: David Fisher
Author-email: ddfisher at dropbox.com
License: Apache License 2.0
-Description: This package is a fork of the CPython 2.7 and 3.5 `ast` modules with the
- ability to parse PEP 484 (https://www.python.org/dev/peps/pep-0484/) type
- comments. The primary goals of this package are correctness and speed.
+Description: `typed_ast` is a Python 3 package that provides a Python 2.7 and Python 3
+ parser similar to the standard `ast` library. Unlike `ast`, the parsers in
+ `typed_ast` include PEP 484 type comments and are independent of the version of
+ Python under which they are run. The `typed_ast` parsers produce the standard
+ Python AST (plus type comments), and are both fast and correct, as they are
+ based on the CPython 2.7 and 3.6 parsers.
Platform: POSIX
Platform: Windows
-Classifier: Development Status :: 4 - Beta
+Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Console
Classifier: Intended Audience :: Developers
Classifier: Operating System :: POSIX
diff --git a/ast27/Python/ast.c b/ast27/Python/ast.c
index 9ae7d2b..4792dcc 100644
--- a/ast27/Python/ast.c
+++ b/ast27/Python/ast.c
@@ -456,7 +456,7 @@ set_context(struct compiling *c, expr_ty e, expr_context_ty ctx, const node *n)
switch (e->kind) {
case Attribute_kind:
if (ctx == Store && !forbidden_check(c, n,
- PyBytes_AS_STRING(e->v.Attribute.attr)))
+ PyUnicode_AsUTF8(e->v.Attribute.attr)))
return 0;
e->v.Attribute.ctx = ctx;
break;
@@ -465,7 +465,7 @@ set_context(struct compiling *c, expr_ty e, expr_context_ty ctx, const node *n)
break;
case Name_kind:
if (ctx == Store && !forbidden_check(c, n,
- PyBytes_AS_STRING(e->v.Name.id)))
+ PyUnicode_AsUTF8(e->v.Name.id)))
return 0;
e->v.Name.ctx = ctx;
break;
@@ -846,7 +846,7 @@ ast_for_arguments(struct compiling *c, const node *n)
}
i += 1; /* the name */
- if (TYPE(CHILD(n, i)) == COMMA)
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
if (parenthesized && Py_Py3kWarningFlag &&
!ast_warn(c, ch, "parenthesized argument names "
@@ -862,7 +862,7 @@ ast_for_arguments(struct compiling *c, const node *n)
if (!vararg)
return NULL;
i += 2; /* the star and the name */
- if (TYPE(CHILD(n, i)) == COMMA)
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case DOUBLESTAR:
@@ -872,7 +872,7 @@ ast_for_arguments(struct compiling *c, const node *n)
if (!kwarg)
return NULL;
i += 2; /* the double star and the name */
- if (TYPE(CHILD(n, i)) == COMMA)
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case TYPE_COMMENT:
@@ -2203,7 +2203,7 @@ ast_for_call(struct compiling *c, const node *n, expr_ty func)
return NULL;
}
key = e->v.Name.id;
- if (!forbidden_check(c, CHILD(ch, 0), PyBytes_AS_STRING(key)))
+ if (!forbidden_check(c, CHILD(ch, 0), PyUnicode_AsUTF8(key)))
return NULL;
for (k = 0; k < nkeywords; k++) {
tmp = _PyUnicode_AsString(
@@ -3500,6 +3500,7 @@ parsenumber(struct compiling *c, const char *s)
const char *end;
long x;
double dx;
+ int old_style_octal;
#ifndef WITHOUT_COMPLEX
Py_complex complex;
int imflag;
@@ -3519,14 +3520,17 @@ parsenumber(struct compiling *c, const char *s)
return PyErr_NoMemory();
memcpy(copy, s, len);
copy[len - 1] = '\0';
- PyObject *result = PyLong_FromString(copy, (char **)0, 0);
+ old_style_octal = len > 2 && copy[0] == '0' && copy[1] >= '0' && copy[1] <= '9';
+ PyObject *result = PyLong_FromString(copy, (char **)0, old_style_octal ? 8 : 0);
free(copy);
return result;
}
x = Ta27OS_strtol((char *)s, (char **)&end, 0);
if (*end == '\0') {
- if (errno != 0)
- return PyLong_FromString((char *)s, (char **)0, 0);
+ if (errno != 0) {
+ old_style_octal = end - s > 1 && s[0] == '0' && s[1] >= '0' && s[1] <= '9';
+ return PyLong_FromString((char *)s, (char **)0, old_style_octal ? 8 : 0);
+ }
return PyLong_FromLong(x);
}
/* XXX Huge floats may silently fail */
diff --git a/ast35/Custom/typed_ast.c b/ast3/Custom/typed_ast.c
similarity index 88%
rename from ast35/Custom/typed_ast.c
rename to ast3/Custom/typed_ast.c
index 80172aa..06dc1d4 100644
--- a/ast35/Custom/typed_ast.c
+++ b/ast3/Custom/typed_ast.c
@@ -8,7 +8,7 @@
#include "parsetok.h"
#include "errcode.h"
-extern grammar _Ta35Parser_Grammar; /* from graminit.c */
+extern grammar _Ta3Parser_Grammar; /* from graminit.c */
// from Python/bltinmodule.c
static const char *
@@ -103,7 +103,7 @@ err_input(perrdetail *err)
if (err->token == TYPE_COMMENT)
msg = "misplaced type annotation";
else
- msg = "invalid syntax";
+ msg = "invalid syntax";
}
break;
case E_TOKEN:
@@ -213,17 +213,18 @@ err_free(perrdetail *err)
// copy of PyParser_ASTFromStringObject in Python/pythonrun.c
/* Preferred access to parser is through AST. */
-static mod_ty
+mod_ty
string_object_to_c_ast(const char *s, PyObject *filename, int start,
- PyCompilerFlags *flags, PyArena *arena)
+ PyCompilerFlags *flags, int feature_version,
+ PyArena *arena)
{
mod_ty mod;
PyCompilerFlags localflags;
perrdetail err;
int iflags = PARSER_FLAGS(flags);
- node *n = Ta35Parser_ParseStringObject(s, filename,
- &_Ta35Parser_Grammar, start, &err,
+ node *n = Ta3Parser_ParseStringObject(s, filename,
+ &_Ta3Parser_Grammar, start, &err,
&iflags);
if (flags == NULL) {
localflags.cf_flags = 0;
@@ -231,8 +232,8 @@ string_object_to_c_ast(const char *s, PyObject *filename, int start,
}
if (n) {
flags->cf_flags |= iflags & PyCF_MASK;
- mod = Ta35AST_FromNodeObject(n, flags, filename, arena);
- Ta35Node_Free(n);
+ mod = Ta3AST_FromNodeObject(n, flags, filename, feature_version, arena);
+ Ta3Node_Free(n);
}
else {
err_input(&err);
@@ -245,28 +246,30 @@ string_object_to_c_ast(const char *s, PyObject *filename, int start,
// adapted from Py_CompileStringObject in Python/pythonrun.c
static PyObject *
string_object_to_py_ast(const char *str, PyObject *filename, int start,
- PyCompilerFlags *flags)
+ PyCompilerFlags *flags, int feature_version)
{
mod_ty mod;
PyArena *arena = PyArena_New();
if (arena == NULL)
return NULL;
- mod = string_object_to_c_ast(str, filename, start, flags, arena);
+ mod = string_object_to_c_ast(str, filename, start, flags, feature_version, arena);
if (mod == NULL) {
PyArena_Free(arena);
return NULL;
}
- PyObject *result = Ta35AST_mod2obj(mod);
+ PyObject *result = Ta3AST_mod2obj(mod);
PyArena_Free(arena);
return result;
}
// adapted from builtin_compile_impl in Python/bltinmodule.c
static PyObject *
-ast35_parse_impl(PyObject *source,
- PyObject *filename, const char *mode)
+ast3_parse_impl(PyObject *source,
+ PyObject *filename,
+ const char *mode,
+ int feature_version)
{
PyObject *source_copy;
const char *str;
@@ -295,7 +298,7 @@ ast35_parse_impl(PyObject *source,
if (str == NULL)
goto error;
- result = string_object_to_py_ast(str, filename, start[compile_mode], &cf);
+ result = string_object_to_py_ast(str, filename, start[compile_mode], &cf, feature_version);
Py_XDECREF(source_copy);
goto finally;
@@ -308,15 +311,16 @@ finally:
// adapted from builtin_compile in Python/clinic/bltinmodule.c.h
PyObject *
-ast35_parse(PyObject *self, PyObject *args)
+ast3_parse(PyObject *self, PyObject *args)
{
PyObject *return_value = NULL;
PyObject *source;
PyObject *filename;
const char *mode;
+ int feature_version;
- if (PyArg_ParseTuple(args, "OO&s:parse", &source, PyUnicode_FSDecoder, &filename, &mode))
- return_value = ast35_parse_impl(source, filename, mode);
+ if (PyArg_ParseTuple(args, "OO&si:parse", &source, PyUnicode_FSDecoder, &filename, &mode, &feature_version))
+ return_value = ast3_parse_impl(source, filename, mode, feature_version);
return return_value;
}
diff --git a/ast3/Include/Python-ast.h b/ast3/Include/Python-ast.h
new file mode 100644
index 0000000..38a6e5a
--- /dev/null
+++ b/ast3/Include/Python-ast.h
@@ -0,0 +1,672 @@
+/* File automatically generated by Parser/asdl_c.py. */
+
+#include "asdl.h"
+
+typedef struct _mod *mod_ty;
+
+typedef struct _stmt *stmt_ty;
+
+typedef struct _expr *expr_ty;
+
+typedef enum _expr_context { Load=1, Store=2, Del=3, AugLoad=4, AugStore=5,
+ Param=6 } expr_context_ty;
+
+typedef struct _slice *slice_ty;
+
+typedef enum _boolop { And=1, Or=2 } boolop_ty;
+
+typedef enum _operator { Add=1, Sub=2, Mult=3, MatMult=4, Div=5, Mod=6, Pow=7,
+ LShift=8, RShift=9, BitOr=10, BitXor=11, BitAnd=12,
+ FloorDiv=13 } operator_ty;
+
+typedef enum _unaryop { Invert=1, Not=2, UAdd=3, USub=4 } unaryop_ty;
+
+typedef enum _cmpop { Eq=1, NotEq=2, Lt=3, LtE=4, Gt=5, GtE=6, Is=7, IsNot=8,
+ In=9, NotIn=10 } cmpop_ty;
+
+typedef struct _comprehension *comprehension_ty;
+
+typedef struct _excepthandler *excepthandler_ty;
+
+typedef struct _arguments *arguments_ty;
+
+typedef struct _arg *arg_ty;
+
+typedef struct _keyword *keyword_ty;
+
+typedef struct _alias *alias_ty;
+
+typedef struct _withitem *withitem_ty;
+
+typedef struct _type_ignore *type_ignore_ty;
+
+
+enum _mod_kind {Module_kind=1, Interactive_kind=2, Expression_kind=3,
+ FunctionType_kind=4, Suite_kind=5};
+struct _mod {
+ enum _mod_kind kind;
+ union {
+ struct {
+ asdl_seq *body;
+ asdl_seq *type_ignores;
+ } Module;
+
+ struct {
+ asdl_seq *body;
+ } Interactive;
+
+ struct {
+ expr_ty body;
+ } Expression;
+
+ struct {
+ asdl_seq *argtypes;
+ expr_ty returns;
+ } FunctionType;
+
+ struct {
+ asdl_seq *body;
+ } Suite;
+
+ } v;
+};
+
+enum _stmt_kind {FunctionDef_kind=1, AsyncFunctionDef_kind=2, ClassDef_kind=3,
+ Return_kind=4, Delete_kind=5, Assign_kind=6,
+ AugAssign_kind=7, AnnAssign_kind=8, For_kind=9,
+ AsyncFor_kind=10, While_kind=11, If_kind=12, With_kind=13,
+ AsyncWith_kind=14, Raise_kind=15, Try_kind=16,
+ Assert_kind=17, Import_kind=18, ImportFrom_kind=19,
+ Global_kind=20, Nonlocal_kind=21, Expr_kind=22, Pass_kind=23,
+ Break_kind=24, Continue_kind=25};
+struct _stmt {
+ enum _stmt_kind kind;
+ union {
+ struct {
+ identifier name;
+ arguments_ty args;
+ asdl_seq *body;
+ asdl_seq *decorator_list;
+ expr_ty returns;
+ string type_comment;
+ } FunctionDef;
+
+ struct {
+ identifier name;
+ arguments_ty args;
+ asdl_seq *body;
+ asdl_seq *decorator_list;
+ expr_ty returns;
+ string type_comment;
+ } AsyncFunctionDef;
+
+ struct {
+ identifier name;
+ asdl_seq *bases;
+ asdl_seq *keywords;
+ asdl_seq *body;
+ asdl_seq *decorator_list;
+ } ClassDef;
+
+ struct {
+ expr_ty value;
+ } Return;
+
+ struct {
+ asdl_seq *targets;
+ } Delete;
+
+ struct {
+ asdl_seq *targets;
+ expr_ty value;
+ string type_comment;
+ } Assign;
+
+ struct {
+ expr_ty target;
+ operator_ty op;
+ expr_ty value;
+ } AugAssign;
+
+ struct {
+ expr_ty target;
+ expr_ty annotation;
+ expr_ty value;
+ int simple;
+ } AnnAssign;
+
+ struct {
+ expr_ty target;
+ expr_ty iter;
+ asdl_seq *body;
+ asdl_seq *orelse;
+ string type_comment;
+ } For;
+
+ struct {
+ expr_ty target;
+ expr_ty iter;
+ asdl_seq *body;
+ asdl_seq *orelse;
+ string type_comment;
+ } AsyncFor;
+
+ struct {
+ expr_ty test;
+ asdl_seq *body;
+ asdl_seq *orelse;
+ } While;
+
+ struct {
+ expr_ty test;
+ asdl_seq *body;
+ asdl_seq *orelse;
+ } If;
+
+ struct {
+ asdl_seq *items;
+ asdl_seq *body;
+ string type_comment;
+ } With;
+
+ struct {
+ asdl_seq *items;
+ asdl_seq *body;
+ string type_comment;
+ } AsyncWith;
+
+ struct {
+ expr_ty exc;
+ expr_ty cause;
+ } Raise;
+
+ struct {
+ asdl_seq *body;
+ asdl_seq *handlers;
+ asdl_seq *orelse;
+ asdl_seq *finalbody;
+ } Try;
+
+ struct {
+ expr_ty test;
+ expr_ty msg;
+ } Assert;
+
+ struct {
+ asdl_seq *names;
+ } Import;
+
+ struct {
+ identifier module;
+ asdl_seq *names;
+ int level;
+ } ImportFrom;
+
+ struct {
+ asdl_seq *names;
+ } Global;
+
+ struct {
+ asdl_seq *names;
+ } Nonlocal;
+
+ struct {
+ expr_ty value;
+ } Expr;
+
+ } v;
+ int lineno;
+ int col_offset;
+};
+
+enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
+ IfExp_kind=5, Dict_kind=6, Set_kind=7, ListComp_kind=8,
+ SetComp_kind=9, DictComp_kind=10, GeneratorExp_kind=11,
+ Await_kind=12, Yield_kind=13, YieldFrom_kind=14,
+ Compare_kind=15, Call_kind=16, Num_kind=17, Str_kind=18,
+ FormattedValue_kind=19, JoinedStr_kind=20, Bytes_kind=21,
+ NameConstant_kind=22, Ellipsis_kind=23, Constant_kind=24,
+ Attribute_kind=25, Subscript_kind=26, Starred_kind=27,
+ Name_kind=28, List_kind=29, Tuple_kind=30};
+struct _expr {
+ enum _expr_kind kind;
+ union {
+ struct {
+ boolop_ty op;
+ asdl_seq *values;
+ } BoolOp;
+
+ struct {
+ expr_ty left;
+ operator_ty op;
+ expr_ty right;
+ } BinOp;
+
+ struct {
+ unaryop_ty op;
+ expr_ty operand;
+ } UnaryOp;
+
+ struct {
+ arguments_ty args;
+ expr_ty body;
+ } Lambda;
+
+ struct {
+ expr_ty test;
+ expr_ty body;
+ expr_ty orelse;
+ } IfExp;
+
+ struct {
+ asdl_seq *keys;
+ asdl_seq *values;
+ } Dict;
+
+ struct {
+ asdl_seq *elts;
+ } Set;
+
+ struct {
+ expr_ty elt;
+ asdl_seq *generators;
+ } ListComp;
+
+ struct {
+ expr_ty elt;
+ asdl_seq *generators;
+ } SetComp;
+
+ struct {
+ expr_ty key;
+ expr_ty value;
+ asdl_seq *generators;
+ } DictComp;
+
+ struct {
+ expr_ty elt;
+ asdl_seq *generators;
+ } GeneratorExp;
+
+ struct {
+ expr_ty value;
+ } Await;
+
+ struct {
+ expr_ty value;
+ } Yield;
+
+ struct {
+ expr_ty value;
+ } YieldFrom;
+
+ struct {
+ expr_ty left;
+ asdl_int_seq *ops;
+ asdl_seq *comparators;
+ } Compare;
+
+ struct {
+ expr_ty func;
+ asdl_seq *args;
+ asdl_seq *keywords;
+ } Call;
+
+ struct {
+ object n;
+ } Num;
+
+ struct {
+ string s;
+ } Str;
+
+ struct {
+ expr_ty value;
+ int conversion;
+ expr_ty format_spec;
+ } FormattedValue;
+
+ struct {
+ asdl_seq *values;
+ } JoinedStr;
+
+ struct {
+ bytes s;
+ } Bytes;
+
+ struct {
+ singleton value;
+ } NameConstant;
+
+ struct {
+ constant value;
+ } Constant;
+
+ struct {
+ expr_ty value;
+ identifier attr;
+ expr_context_ty ctx;
+ } Attribute;
+
+ struct {
+ expr_ty value;
+ slice_ty slice;
+ expr_context_ty ctx;
+ } Subscript;
+
+ struct {
+ expr_ty value;
+ expr_context_ty ctx;
+ } Starred;
+
+ struct {
+ identifier id;
+ expr_context_ty ctx;
+ } Name;
+
+ struct {
+ asdl_seq *elts;
+ expr_context_ty ctx;
+ } List;
+
+ struct {
+ asdl_seq *elts;
+ expr_context_ty ctx;
+ } Tuple;
+
+ } v;
+ int lineno;
+ int col_offset;
+};
+
+enum _slice_kind {Slice_kind=1, ExtSlice_kind=2, Index_kind=3};
+struct _slice {
+ enum _slice_kind kind;
+ union {
+ struct {
+ expr_ty lower;
+ expr_ty upper;
+ expr_ty step;
+ } Slice;
+
+ struct {
+ asdl_seq *dims;
+ } ExtSlice;
+
+ struct {
+ expr_ty value;
+ } Index;
+
+ } v;
+};
+
+struct _comprehension {
+ expr_ty target;
+ expr_ty iter;
+ asdl_seq *ifs;
+ int is_async;
+};
+
+enum _excepthandler_kind {ExceptHandler_kind=1};
+struct _excepthandler {
+ enum _excepthandler_kind kind;
+ union {
+ struct {
+ expr_ty type;
+ identifier name;
+ asdl_seq *body;
+ } ExceptHandler;
+
+ } v;
+ int lineno;
+ int col_offset;
+};
+
+struct _arguments {
+ asdl_seq *args;
+ arg_ty vararg;
+ asdl_seq *kwonlyargs;
+ asdl_seq *kw_defaults;
+ arg_ty kwarg;
+ asdl_seq *defaults;
+};
+
+struct _arg {
+ identifier arg;
+ expr_ty annotation;
+ string type_comment;
+ int lineno;
+ int col_offset;
+};
+
+struct _keyword {
+ identifier arg;
+ expr_ty value;
+};
+
+struct _alias {
+ identifier name;
+ identifier asname;
+};
+
+struct _withitem {
+ expr_ty context_expr;
+ expr_ty optional_vars;
+};
+
+enum _type_ignore_kind {TypeIgnore_kind=1};
+struct _type_ignore {
+ enum _type_ignore_kind kind;
+ union {
+ struct {
+ int lineno;
+ } TypeIgnore;
+
+ } v;
+};
+
+
+#define Module(a0, a1, a2) _Ta3_Module(a0, a1, a2)
+mod_ty _Ta3_Module(asdl_seq * body, asdl_seq * type_ignores, PyArena *arena);
+#define Interactive(a0, a1) _Ta3_Interactive(a0, a1)
+mod_ty _Ta3_Interactive(asdl_seq * body, PyArena *arena);
+#define Expression(a0, a1) _Ta3_Expression(a0, a1)
+mod_ty _Ta3_Expression(expr_ty body, PyArena *arena);
+#define FunctionType(a0, a1, a2) _Ta3_FunctionType(a0, a1, a2)
+mod_ty _Ta3_FunctionType(asdl_seq * argtypes, expr_ty returns, PyArena *arena);
+#define Suite(a0, a1) _Ta3_Suite(a0, a1)
+mod_ty _Ta3_Suite(asdl_seq * body, PyArena *arena);
+#define FunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8) _Ta3_FunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8)
+stmt_ty _Ta3_FunctionDef(identifier name, arguments_ty args, asdl_seq * body,
+ asdl_seq * decorator_list, expr_ty returns, string
+ type_comment, int lineno, int col_offset, PyArena
+ *arena);
+#define AsyncFunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8) _Ta3_AsyncFunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8)
+stmt_ty _Ta3_AsyncFunctionDef(identifier name, arguments_ty args, asdl_seq *
+ body, asdl_seq * decorator_list, expr_ty returns,
+ string type_comment, int lineno, int col_offset,
+ PyArena *arena);
+#define ClassDef(a0, a1, a2, a3, a4, a5, a6, a7) _Ta3_ClassDef(a0, a1, a2, a3, a4, a5, a6, a7)
+stmt_ty _Ta3_ClassDef(identifier name, asdl_seq * bases, asdl_seq * keywords,
+ asdl_seq * body, asdl_seq * decorator_list, int lineno,
+ int col_offset, PyArena *arena);
+#define Return(a0, a1, a2, a3) _Ta3_Return(a0, a1, a2, a3)
+stmt_ty _Ta3_Return(expr_ty value, int lineno, int col_offset, PyArena *arena);
+#define Delete(a0, a1, a2, a3) _Ta3_Delete(a0, a1, a2, a3)
+stmt_ty _Ta3_Delete(asdl_seq * targets, int lineno, int col_offset, PyArena
+ *arena);
+#define Assign(a0, a1, a2, a3, a4, a5) _Ta3_Assign(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_Assign(asdl_seq * targets, expr_ty value, string type_comment, int
+ lineno, int col_offset, PyArena *arena);
+#define AugAssign(a0, a1, a2, a3, a4, a5) _Ta3_AugAssign(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_AugAssign(expr_ty target, operator_ty op, expr_ty value, int
+ lineno, int col_offset, PyArena *arena);
+#define AnnAssign(a0, a1, a2, a3, a4, a5, a6) _Ta3_AnnAssign(a0, a1, a2, a3, a4, a5, a6)
+stmt_ty _Ta3_AnnAssign(expr_ty target, expr_ty annotation, expr_ty value, int
+ simple, int lineno, int col_offset, PyArena *arena);
+#define For(a0, a1, a2, a3, a4, a5, a6, a7) _Ta3_For(a0, a1, a2, a3, a4, a5, a6, a7)
+stmt_ty _Ta3_For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq *
+ orelse, string type_comment, int lineno, int col_offset,
+ PyArena *arena);
+#define AsyncFor(a0, a1, a2, a3, a4, a5, a6, a7) _Ta3_AsyncFor(a0, a1, a2, a3, a4, a5, a6, a7)
+stmt_ty _Ta3_AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq *
+ orelse, string type_comment, int lineno, int col_offset,
+ PyArena *arena);
+#define While(a0, a1, a2, a3, a4, a5) _Ta3_While(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int
+ lineno, int col_offset, PyArena *arena);
+#define If(a0, a1, a2, a3, a4, a5) _Ta3_If(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno,
+ int col_offset, PyArena *arena);
+#define With(a0, a1, a2, a3, a4, a5) _Ta3_With(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_With(asdl_seq * items, asdl_seq * body, string type_comment, int
+ lineno, int col_offset, PyArena *arena);
+#define AsyncWith(a0, a1, a2, a3, a4, a5) _Ta3_AsyncWith(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_AsyncWith(asdl_seq * items, asdl_seq * body, string type_comment,
+ int lineno, int col_offset, PyArena *arena);
+#define Raise(a0, a1, a2, a3, a4) _Ta3_Raise(a0, a1, a2, a3, a4)
+stmt_ty _Ta3_Raise(expr_ty exc, expr_ty cause, int lineno, int col_offset,
+ PyArena *arena);
+#define Try(a0, a1, a2, a3, a4, a5, a6) _Ta3_Try(a0, a1, a2, a3, a4, a5, a6)
+stmt_ty _Ta3_Try(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse,
+ asdl_seq * finalbody, int lineno, int col_offset, PyArena
+ *arena);
+#define Assert(a0, a1, a2, a3, a4) _Ta3_Assert(a0, a1, a2, a3, a4)
+stmt_ty _Ta3_Assert(expr_ty test, expr_ty msg, int lineno, int col_offset,
+ PyArena *arena);
+#define Import(a0, a1, a2, a3) _Ta3_Import(a0, a1, a2, a3)
+stmt_ty _Ta3_Import(asdl_seq * names, int lineno, int col_offset, PyArena
+ *arena);
+#define ImportFrom(a0, a1, a2, a3, a4, a5) _Ta3_ImportFrom(a0, a1, a2, a3, a4, a5)
+stmt_ty _Ta3_ImportFrom(identifier module, asdl_seq * names, int level, int
+ lineno, int col_offset, PyArena *arena);
+#define Global(a0, a1, a2, a3) _Ta3_Global(a0, a1, a2, a3)
+stmt_ty _Ta3_Global(asdl_seq * names, int lineno, int col_offset, PyArena
+ *arena);
+#define Nonlocal(a0, a1, a2, a3) _Ta3_Nonlocal(a0, a1, a2, a3)
+stmt_ty _Ta3_Nonlocal(asdl_seq * names, int lineno, int col_offset, PyArena
+ *arena);
+#define Expr(a0, a1, a2, a3) _Ta3_Expr(a0, a1, a2, a3)
+stmt_ty _Ta3_Expr(expr_ty value, int lineno, int col_offset, PyArena *arena);
+#define Pass(a0, a1, a2) _Ta3_Pass(a0, a1, a2)
+stmt_ty _Ta3_Pass(int lineno, int col_offset, PyArena *arena);
+#define Break(a0, a1, a2) _Ta3_Break(a0, a1, a2)
+stmt_ty _Ta3_Break(int lineno, int col_offset, PyArena *arena);
+#define Continue(a0, a1, a2) _Ta3_Continue(a0, a1, a2)
+stmt_ty _Ta3_Continue(int lineno, int col_offset, PyArena *arena);
+#define BoolOp(a0, a1, a2, a3, a4) _Ta3_BoolOp(a0, a1, a2, a3, a4)
+expr_ty _Ta3_BoolOp(boolop_ty op, asdl_seq * values, int lineno, int
+ col_offset, PyArena *arena);
+#define BinOp(a0, a1, a2, a3, a4, a5) _Ta3_BinOp(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_BinOp(expr_ty left, operator_ty op, expr_ty right, int lineno, int
+ col_offset, PyArena *arena);
+#define UnaryOp(a0, a1, a2, a3, a4) _Ta3_UnaryOp(a0, a1, a2, a3, a4)
+expr_ty _Ta3_UnaryOp(unaryop_ty op, expr_ty operand, int lineno, int
+ col_offset, PyArena *arena);
+#define Lambda(a0, a1, a2, a3, a4) _Ta3_Lambda(a0, a1, a2, a3, a4)
+expr_ty _Ta3_Lambda(arguments_ty args, expr_ty body, int lineno, int
+ col_offset, PyArena *arena);
+#define IfExp(a0, a1, a2, a3, a4, a5) _Ta3_IfExp(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_IfExp(expr_ty test, expr_ty body, expr_ty orelse, int lineno, int
+ col_offset, PyArena *arena);
+#define Dict(a0, a1, a2, a3, a4) _Ta3_Dict(a0, a1, a2, a3, a4)
+expr_ty _Ta3_Dict(asdl_seq * keys, asdl_seq * values, int lineno, int
+ col_offset, PyArena *arena);
+#define Set(a0, a1, a2, a3) _Ta3_Set(a0, a1, a2, a3)
+expr_ty _Ta3_Set(asdl_seq * elts, int lineno, int col_offset, PyArena *arena);
+#define ListComp(a0, a1, a2, a3, a4) _Ta3_ListComp(a0, a1, a2, a3, a4)
+expr_ty _Ta3_ListComp(expr_ty elt, asdl_seq * generators, int lineno, int
+ col_offset, PyArena *arena);
+#define SetComp(a0, a1, a2, a3, a4) _Ta3_SetComp(a0, a1, a2, a3, a4)
+expr_ty _Ta3_SetComp(expr_ty elt, asdl_seq * generators, int lineno, int
+ col_offset, PyArena *arena);
+#define DictComp(a0, a1, a2, a3, a4, a5) _Ta3_DictComp(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_DictComp(expr_ty key, expr_ty value, asdl_seq * generators, int
+ lineno, int col_offset, PyArena *arena);
+#define GeneratorExp(a0, a1, a2, a3, a4) _Ta3_GeneratorExp(a0, a1, a2, a3, a4)
+expr_ty _Ta3_GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int
+ col_offset, PyArena *arena);
+#define Await(a0, a1, a2, a3) _Ta3_Await(a0, a1, a2, a3)
+expr_ty _Ta3_Await(expr_ty value, int lineno, int col_offset, PyArena *arena);
+#define Yield(a0, a1, a2, a3) _Ta3_Yield(a0, a1, a2, a3)
+expr_ty _Ta3_Yield(expr_ty value, int lineno, int col_offset, PyArena *arena);
+#define YieldFrom(a0, a1, a2, a3) _Ta3_YieldFrom(a0, a1, a2, a3)
+expr_ty _Ta3_YieldFrom(expr_ty value, int lineno, int col_offset, PyArena
+ *arena);
+#define Compare(a0, a1, a2, a3, a4, a5) _Ta3_Compare(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators,
+ int lineno, int col_offset, PyArena *arena);
+#define Call(a0, a1, a2, a3, a4, a5) _Ta3_Call(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int
+ lineno, int col_offset, PyArena *arena);
+#define Num(a0, a1, a2, a3) _Ta3_Num(a0, a1, a2, a3)
+expr_ty _Ta3_Num(object n, int lineno, int col_offset, PyArena *arena);
+#define Str(a0, a1, a2, a3) _Ta3_Str(a0, a1, a2, a3)
+expr_ty _Ta3_Str(string s, int lineno, int col_offset, PyArena *arena);
+#define FormattedValue(a0, a1, a2, a3, a4, a5) _Ta3_FormattedValue(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_FormattedValue(expr_ty value, int conversion, expr_ty format_spec,
+ int lineno, int col_offset, PyArena *arena);
+#define JoinedStr(a0, a1, a2, a3) _Ta3_JoinedStr(a0, a1, a2, a3)
+expr_ty _Ta3_JoinedStr(asdl_seq * values, int lineno, int col_offset, PyArena
+ *arena);
+#define Bytes(a0, a1, a2, a3) _Ta3_Bytes(a0, a1, a2, a3)
+expr_ty _Ta3_Bytes(bytes s, int lineno, int col_offset, PyArena *arena);
+#define NameConstant(a0, a1, a2, a3) _Ta3_NameConstant(a0, a1, a2, a3)
+expr_ty _Ta3_NameConstant(singleton value, int lineno, int col_offset, PyArena
+ *arena);
+#define Ellipsis(a0, a1, a2) _Ta3_Ellipsis(a0, a1, a2)
+expr_ty _Ta3_Ellipsis(int lineno, int col_offset, PyArena *arena);
+#define Constant(a0, a1, a2, a3) _Ta3_Constant(a0, a1, a2, a3)
+expr_ty _Ta3_Constant(constant value, int lineno, int col_offset, PyArena
+ *arena);
+#define Attribute(a0, a1, a2, a3, a4, a5) _Ta3_Attribute(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_Attribute(expr_ty value, identifier attr, expr_context_ty ctx, int
+ lineno, int col_offset, PyArena *arena);
+#define Subscript(a0, a1, a2, a3, a4, a5) _Ta3_Subscript(a0, a1, a2, a3, a4, a5)
+expr_ty _Ta3_Subscript(expr_ty value, slice_ty slice, expr_context_ty ctx, int
+ lineno, int col_offset, PyArena *arena);
+#define Starred(a0, a1, a2, a3, a4) _Ta3_Starred(a0, a1, a2, a3, a4)
+expr_ty _Ta3_Starred(expr_ty value, expr_context_ty ctx, int lineno, int
+ col_offset, PyArena *arena);
+#define Name(a0, a1, a2, a3, a4) _Ta3_Name(a0, a1, a2, a3, a4)
+expr_ty _Ta3_Name(identifier id, expr_context_ty ctx, int lineno, int
+ col_offset, PyArena *arena);
+#define List(a0, a1, a2, a3, a4) _Ta3_List(a0, a1, a2, a3, a4)
+expr_ty _Ta3_List(asdl_seq * elts, expr_context_ty ctx, int lineno, int
+ col_offset, PyArena *arena);
+#define Tuple(a0, a1, a2, a3, a4) _Ta3_Tuple(a0, a1, a2, a3, a4)
+expr_ty _Ta3_Tuple(asdl_seq * elts, expr_context_ty ctx, int lineno, int
+ col_offset, PyArena *arena);
+#define Slice(a0, a1, a2, a3) _Ta3_Slice(a0, a1, a2, a3)
+slice_ty _Ta3_Slice(expr_ty lower, expr_ty upper, expr_ty step, PyArena *arena);
+#define ExtSlice(a0, a1) _Ta3_ExtSlice(a0, a1)
+slice_ty _Ta3_ExtSlice(asdl_seq * dims, PyArena *arena);
+#define Index(a0, a1) _Ta3_Index(a0, a1)
+slice_ty _Ta3_Index(expr_ty value, PyArena *arena);
+#define comprehension(a0, a1, a2, a3, a4) _Ta3_comprehension(a0, a1, a2, a3, a4)
+comprehension_ty _Ta3_comprehension(expr_ty target, expr_ty iter, asdl_seq *
+ ifs, int is_async, PyArena *arena);
+#define ExceptHandler(a0, a1, a2, a3, a4, a5) _Ta3_ExceptHandler(a0, a1, a2, a3, a4, a5)
+excepthandler_ty _Ta3_ExceptHandler(expr_ty type, identifier name, asdl_seq *
+ body, int lineno, int col_offset, PyArena
+ *arena);
+#define arguments(a0, a1, a2, a3, a4, a5, a6) _Ta3_arguments(a0, a1, a2, a3, a4, a5, a6)
+arguments_ty _Ta3_arguments(asdl_seq * args, arg_ty vararg, asdl_seq *
+ kwonlyargs, asdl_seq * kw_defaults, arg_ty kwarg,
+ asdl_seq * defaults, PyArena *arena);
+#define arg(a0, a1, a2, a3, a4, a5) _Ta3_arg(a0, a1, a2, a3, a4, a5)
+arg_ty _Ta3_arg(identifier arg, expr_ty annotation, string type_comment, int
+ lineno, int col_offset, PyArena *arena);
+#define keyword(a0, a1, a2) _Ta3_keyword(a0, a1, a2)
+keyword_ty _Ta3_keyword(identifier arg, expr_ty value, PyArena *arena);
+#define alias(a0, a1, a2) _Ta3_alias(a0, a1, a2)
+alias_ty _Ta3_alias(identifier name, identifier asname, PyArena *arena);
+#define withitem(a0, a1, a2) _Ta3_withitem(a0, a1, a2)
+withitem_ty _Ta3_withitem(expr_ty context_expr, expr_ty optional_vars, PyArena
+ *arena);
+#define TypeIgnore(a0, a1) _Ta3_TypeIgnore(a0, a1)
+type_ignore_ty _Ta3_TypeIgnore(int lineno, PyArena *arena);
+
+PyObject* Ta3AST_mod2obj(mod_ty t);
+mod_ty Ta3AST_obj2mod(PyObject* ast, PyArena* arena, int mode);
+int Ta3AST_Check(PyObject* obj);
diff --git a/ast35/Include/asdl.h b/ast3/Include/asdl.h
similarity index 76%
rename from ast35/Include/asdl.h
rename to ast3/Include/asdl.h
index d74d182..2f39db8 100644
--- a/ast35/Include/asdl.h
+++ b/ast3/Include/asdl.h
@@ -1,11 +1,12 @@
-#ifndef Ta35_ASDL_H
-#define Ta35_ASDL_H
+#ifndef Ta3_ASDL_H
+#define Ta3_ASDL_H
typedef PyObject * identifier;
typedef PyObject * string;
typedef PyObject * bytes;
typedef PyObject * object;
typedef PyObject * singleton;
+typedef PyObject * constant;
/* It would be nice if the code generated by asdl_c.py was completely
independent of Python, but it is a goal the requires too much work
@@ -25,13 +26,8 @@ typedef struct {
int elements[1];
} asdl_int_seq;
-
-#if PY_MINOR_VERSION < 4
-#define _Py_asdl_seq_new asdl_seq_new
-#define _Py_asdl_int_seq_new asdl_int_seq_new
-#endif
-asdl_seq *_Py_asdl_seq_new(Py_ssize_t size, PyArena *arena);
-asdl_int_seq *_Py_asdl_int_seq_new(Py_ssize_t size, PyArena *arena);
+asdl_seq *_Ta3_asdl_seq_new(Py_ssize_t size, PyArena *arena);
+asdl_int_seq *_Ta3_asdl_int_seq_new(Py_ssize_t size, PyArena *arena);
#define asdl_seq_GET(S, I) (S)->elements[(I)]
#define asdl_seq_LEN(S) ((S) == NULL ? 0 : (S)->size)
@@ -47,4 +43,4 @@ asdl_int_seq *_Py_asdl_int_seq_new(Py_ssize_t size, PyArena *arena);
#define asdl_seq_SET(S, I, V) (S)->elements[I] = (V)
#endif
-#endif /* !Ta35_ASDL_H */
+#endif /* !Ta3_ASDL_H */
diff --git a/ast35/Include/ast.h b/ast3/Include/ast.h
similarity index 58%
rename from ast35/Include/ast.h
rename to ast3/Include/ast.h
index 50bba6a..ac975d6 100644
--- a/ast35/Include/ast.h
+++ b/ast3/Include/ast.h
@@ -1,22 +1,24 @@
-#ifndef Ta35_AST_H
-#define Ta35_AST_H
+#ifndef Ta3_AST_H
+#define Ta3_AST_H
#ifdef __cplusplus
extern "C" {
#endif
-int Ta35AST_Validate(mod_ty);
-mod_ty Ta35AST_FromNode(
+extern int Ta3AST_Validate(mod_ty);
+extern mod_ty Ta3AST_FromNode(
const node *n,
PyCompilerFlags *flags,
const char *filename, /* decoded from the filesystem encoding */
+ int feature_version,
PyArena *arena);
-mod_ty Ta35AST_FromNodeObject(
+extern mod_ty Ta3AST_FromNodeObject(
const node *n,
PyCompilerFlags *flags,
PyObject *filename,
+ int feature_version,
PyArena *arena);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_AST_H */
+#endif /* !Ta3_AST_H */
diff --git a/ast35/Include/bitset.h b/ast3/Include/bitset.h
similarity index 90%
rename from ast35/Include/bitset.h
rename to ast3/Include/bitset.h
index 7a4c966..36729a7 100644
--- a/ast35/Include/bitset.h
+++ b/ast3/Include/bitset.h
@@ -1,6 +1,6 @@
-#ifndef Ta35_BITSET_H
-#define Ta35_BITSET_H
+#ifndef Ta3_BITSET_H
+#define Ta3_BITSET_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -29,4 +29,4 @@ void mergebitset(bitset bs1, bitset bs2, int nbits);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_BITSET_H */
+#endif /* !Ta3_BITSET_H */
diff --git a/ast35/Include/compile.h b/ast3/Include/compile.h
similarity index 77%
rename from ast35/Include/compile.h
rename to ast3/Include/compile.h
index f4cd238..232be7d 100644
--- a/ast35/Include/compile.h
+++ b/ast3/Include/compile.h
@@ -1,5 +1,5 @@
-#ifndef Ta35_COMPILE_H
-#define Ta35_COMPILE_H
+#ifndef Ta3_COMPILE_H
+#define Ta3_COMPILE_H
/* These definitions must match corresponding definitions in graminit.h.
There's code in compile.c that checks that they are the same. */
@@ -8,4 +8,4 @@
#define Py_eval_input 258
#define Py_func_type_input 342
-#endif /* !Ta35_COMPILE_H */
+#endif /* !Ta3_COMPILE_H */
diff --git a/ast35/Include/errcode.h b/ast3/Include/errcode.h
similarity index 95%
rename from ast35/Include/errcode.h
rename to ast3/Include/errcode.h
index 1b62b92..4797590 100644
--- a/ast35/Include/errcode.h
+++ b/ast3/Include/errcode.h
@@ -1,5 +1,5 @@
-#ifndef Ta35_ERRCODE_H
-#define Ta35_ERRCODE_H
+#ifndef Ta3_ERRCODE_H
+#define Ta3_ERRCODE_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -35,4 +35,4 @@ extern "C" {
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_ERRCODE_H */
+#endif /* !Ta3_ERRCODE_H */
diff --git a/ast35/Include/graminit.h b/ast3/Include/graminit.h
similarity index 100%
rename from ast35/Include/graminit.h
rename to ast3/Include/graminit.h
diff --git a/ast35/Include/grammar.h b/ast3/Include/grammar.h
similarity index 86%
rename from ast35/Include/grammar.h
rename to ast3/Include/grammar.h
index 5f9b25d..6cc70c2 100644
--- a/ast35/Include/grammar.h
+++ b/ast3/Include/grammar.h
@@ -1,8 +1,8 @@
/* Grammar interface */
-#ifndef Ta35_GRAMMAR_H
-#define Ta35_GRAMMAR_H
+#ifndef Ta3_GRAMMAR_H
+#define Ta3_GRAMMAR_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -69,20 +69,21 @@ typedef struct {
/* FUNCTIONS */
grammar *newgrammar(int start);
+void freegrammar(grammar *g);
dfa *adddfa(grammar *g, int type, const char *name);
int addstate(dfa *d);
void addarc(dfa *d, int from, int to, int lbl);
-dfa *Ta35Grammar_FindDFA(grammar *g, int type);
+dfa *Ta3Grammar_FindDFA(grammar *g, int type);
int addlabel(labellist *ll, int type, const char *str);
int findlabel(labellist *ll, int type, const char *str);
-const char *Ta35Grammar_LabelRepr(label *lb);
+const char *Ta3Grammar_LabelRepr(label *lb);
void translatelabels(grammar *g);
void addfirstsets(grammar *g);
-void Ta35Grammar_AddAccelerators(grammar *g);
-void Ta35Grammar_RemoveAccelerators(grammar *);
+void Ta3Grammar_AddAccelerators(grammar *g);
+void Ta3Grammar_RemoveAccelerators(grammar *);
void printgrammar(grammar *g, FILE *fp);
void printnonterminals(grammar *g, FILE *fp);
@@ -90,4 +91,4 @@ void printnonterminals(grammar *g, FILE *fp);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_GRAMMAR_H */
+#endif /* !Ta3_GRAMMAR_H */
diff --git a/ast35/Include/node.h b/ast3/Include/node.h
similarity index 73%
rename from ast35/Include/node.h
rename to ast3/Include/node.h
index 27dc06f..aa5fece 100644
--- a/ast35/Include/node.h
+++ b/ast3/Include/node.h
@@ -1,8 +1,8 @@
/* Parse tree node interface */
-#ifndef Ta35_NODE_H
-#define Ta35_NODE_H
+#ifndef Ta3_NODE_H
+#define Ta3_NODE_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -16,12 +16,12 @@ typedef struct _node {
struct _node *n_child;
} node;
-node *Ta35Node_New(int type);
-int Ta35Node_AddChild(node *n, int type,
+extern node *Ta3Node_New(int type);
+extern int Ta3Node_AddChild(node *n, int type,
char *str, int lineno, int col_offset);
-void Ta35Node_Free(node *n);
+extern void Ta3Node_Free(node *n);
#ifndef Py_LIMITED_API
-Py_ssize_t _Ta35Node_SizeOf(node *n);
+extern Py_ssize_t _Ta3Node_SizeOf(node *n);
#endif
/* Node access functions */
@@ -36,9 +36,9 @@ Py_ssize_t _Ta35Node_SizeOf(node *n);
/* Assert that the type of a node is what we expect */
#define REQ(n, type) assert(TYPE(n) == (type))
-void Ta35Node_ListTree(node *);
+extern void PyNode_ListTree(node *);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_NODE_H */
+#endif /* !Ta3_NODE_H */
diff --git a/ast35/Include/parsetok.h b/ast3/Include/parsetok.h
similarity index 80%
rename from ast35/Include/parsetok.h
rename to ast3/Include/parsetok.h
index c7bd194..8537df6 100644
--- a/ast35/Include/parsetok.h
+++ b/ast3/Include/parsetok.h
@@ -1,8 +1,8 @@
/* Parser-tokenizer link interface */
#ifndef Py_LIMITED_API
-#ifndef Ta35_PARSETOK_H
-#define Ta35_PARSETOK_H
+#ifndef Ta3_PARSETOK_H
+#define Ta3_PARSETOK_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -35,15 +35,15 @@ typedef struct {
#define PyPARSE_IGNORE_COOKIE 0x0010
#define PyPARSE_BARRY_AS_BDFL 0x0020
-node *Ta35Parser_ParseString(const char *, grammar *, int,
+extern node * Ta3Parser_ParseString(const char *, grammar *, int,
perrdetail *);
-node *Ta35Parser_ParseFile (FILE *, const char *, grammar *, int,
+extern node * Ta3Parser_ParseFile (FILE *, const char *, grammar *, int,
const char *, const char *,
perrdetail *);
-node *Ta35Parser_ParseStringFlags(const char *, grammar *, int,
+extern node * Ta3Parser_ParseStringFlags(const char *, grammar *, int,
perrdetail *, int);
-node *Ta35Parser_ParseFileFlags(
+extern node * Ta3Parser_ParseFileFlags(
FILE *fp,
const char *filename, /* decoded from the filesystem encoding */
const char *enc,
@@ -53,7 +53,7 @@ node *Ta35Parser_ParseFileFlags(
const char *ps2,
perrdetail *err_ret,
int flags);
-node *Ta35Parser_ParseFileFlagsEx(
+extern node * Ta3Parser_ParseFileFlagsEx(
FILE *fp,
const char *filename, /* decoded from the filesystem encoding */
const char *enc,
@@ -63,7 +63,7 @@ node *Ta35Parser_ParseFileFlagsEx(
const char *ps2,
perrdetail *err_ret,
int *flags);
-node *Ta35Parser_ParseFileObject(
+extern node * Ta3Parser_ParseFileObject(
FILE *fp,
PyObject *filename,
const char *enc,
@@ -74,21 +74,21 @@ node *Ta35Parser_ParseFileObject(
perrdetail *err_ret,
int *flags);
-node *Ta35Parser_ParseStringFlagsFilename(
+extern node * Ta3Parser_ParseStringFlagsFilename(
const char *s,
const char *filename, /* decoded from the filesystem encoding */
grammar *g,
int start,
perrdetail *err_ret,
int flags);
-node *Ta35Parser_ParseStringFlagsFilenameEx(
+extern node * Ta3Parser_ParseStringFlagsFilenameEx(
const char *s,
const char *filename, /* decoded from the filesystem encoding */
grammar *g,
int start,
perrdetail *err_ret,
int *flags);
-node *Ta35Parser_ParseStringObject(
+extern node * Ta3Parser_ParseStringObject(
const char *s,
PyObject *filename,
grammar *g,
@@ -104,5 +104,5 @@ PyAPI_FUNC(void) PyParser_ClearError(perrdetail *);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_PARSETOK_H */
+#endif /* !Ta3_PARSETOK_H */
#endif /* !Py_LIMITED_API */
diff --git a/ast35/Include/token.h b/ast3/Include/token.h
similarity index 83%
rename from ast35/Include/token.h
rename to ast3/Include/token.h
index f4a4e32..df018c2 100644
--- a/ast35/Include/token.h
+++ b/ast3/Include/token.h
@@ -1,8 +1,8 @@
/* Token types */
#ifndef Py_LIMITED_API
-#ifndef Ta35_TOKEN_H
-#define Ta35_TOKEN_H
+#ifndef Ta3_TOKEN_H
+#define Ta3_TOKEN_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -62,7 +62,7 @@ extern "C" {
#define ATEQUAL 50
#define RARROW 51
#define ELLIPSIS 52
-/* Don't forget to update the table _Ta35Parser_TokenNames in tokenizer.c! */
+/* Don't forget to update the table _Ta3Parser_TokenNames in tokenizer.c! */
#define OP 53
#define AWAIT 54
#define ASYNC 55
@@ -80,13 +80,13 @@ extern "C" {
#define ISEOF(x) ((x) == ENDMARKER)
-extern const char *_Ta35Parser_TokenNames[]; /* Token names */
-int Ta35Token_OneChar(int);
-int Ta35Token_TwoChars(int, int);
-int Ta35Token_ThreeChars(int, int, int);
+extern const char *_Ta3Parser_TokenNames[]; /* Token names */
+extern int Ta3Token_OneChar(int);
+extern int Ta3Token_TwoChars(int, int);
+extern int Ta3Token_ThreeChars(int, int, int);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_TOKEN_H */
+#endif /* !Ta3_TOKEN_H */
#endif /* Py_LIMITED_API */
diff --git a/ast35/Parser/acceler.c b/ast3/Parser/acceler.c
similarity index 95%
rename from ast35/Parser/acceler.c
rename to ast3/Parser/acceler.c
index 15b6fab..7032d39 100644
--- a/ast35/Parser/acceler.c
+++ b/ast3/Parser/acceler.c
@@ -21,7 +21,7 @@ static void fixdfa(grammar *, dfa *);
static void fixstate(grammar *, state *);
void
-Ta35Grammar_AddAccelerators(grammar *g)
+Ta3Grammar_AddAccelerators(grammar *g)
{
dfa *d;
int i;
@@ -32,7 +32,7 @@ Ta35Grammar_AddAccelerators(grammar *g)
}
void
-Ta35Grammar_RemoveAccelerators(grammar *g)
+Ta3Grammar_RemoveAccelerators(grammar *g)
{
dfa *d;
int i;
@@ -85,7 +85,7 @@ fixstate(grammar *g, state *s)
continue;
}
if (ISNONTERMINAL(type)) {
- dfa *d1 = Ta35Grammar_FindDFA(g, type);
+ dfa *d1 = Ta3Grammar_FindDFA(g, type);
int ibit;
if (type - NT_OFFSET >= (1 << 7)) {
printf("XXX too high nonterminal number!\n");
diff --git a/ast35/Parser/bitset.c b/ast3/Parser/bitset.c
similarity index 100%
rename from ast35/Parser/bitset.c
rename to ast3/Parser/bitset.c
diff --git a/ast35/Parser/grammar.c b/ast3/Parser/grammar.c
similarity index 84%
rename from ast35/Parser/grammar.c
rename to ast3/Parser/grammar.c
index 046a6e7..e3a414d 100644
--- a/ast35/Parser/grammar.c
+++ b/ast3/Parser/grammar.c
@@ -28,6 +28,23 @@ newgrammar(int start)
return g;
}
+void
+freegrammar(grammar *g)
+{
+ int i, j;
+ for (i = 0; i < g->g_ndfas; i++) {
+ free(g->g_dfa[i].d_name);
+ for (j = 0; j < g->g_dfa[i].d_nstates; j++)
+ PyObject_FREE(g->g_dfa[i].d_state[j].s_arc);
+ PyObject_FREE(g->g_dfa[i].d_state);
+ }
+ PyObject_FREE(g->g_dfa);
+ for (i = 0; i < g->g_ll.ll_nlabels; i++)
+ free(g->g_ll.ll_label[i].lb_str);
+ PyObject_FREE(g->g_ll.ll_label);
+ PyObject_FREE(g);
+}
+
dfa *
adddfa(grammar *g, int type, const char *name)
{
@@ -63,7 +80,7 @@ addstate(dfa *d)
s->s_upper = 0;
s->s_accel = NULL;
s->s_accept = 0;
- return Py_SAFE_DOWNCAST(s - d->d_state, Py_intptr_t, int);
+ return Py_SAFE_DOWNCAST(s - d->d_state, intptr_t, int);
}
void
@@ -104,8 +121,8 @@ addlabel(labellist *ll, int type, const char *str)
lb->lb_str = strdup(str);
if (Py_DebugFlag)
printf("Label @ %8p, %d: %s\n", ll, ll->ll_nlabels,
- Ta35Grammar_LabelRepr(lb));
- return Py_SAFE_DOWNCAST(lb - ll->ll_label, Py_intptr_t, int);
+ Ta3Grammar_LabelRepr(lb));
+ return Py_SAFE_DOWNCAST(lb - ll->ll_label, intptr_t, int);
}
/* Same, but rather dies than adds */
@@ -122,7 +139,13 @@ findlabel(labellist *ll, int type, const char *str)
}
fprintf(stderr, "Label %d/'%s' not found\n", type, str);
Py_FatalError("grammar.c:findlabel()");
+
+ /* Py_FatalError() is declared with __attribute__((__noreturn__)).
+ GCC emits a warning without "return 0;" (compiler bug!), but Clang is
+ smarter and emits a warning on the return... */
+#ifndef __clang__
return 0; /* Make gcc -Wall happy */
+#endif
}
/* Forward */
@@ -147,7 +170,7 @@ translabel(grammar *g, label *lb)
int i;
if (Py_DebugFlag)
- printf("Translating label %s ...\n", Ta35Grammar_LabelRepr(lb));
+ printf("Translating label %s ...\n", Ta3Grammar_LabelRepr(lb));
if (lb->lb_type == NAME) {
for (i = 0; i < g->g_ndfas; i++) {
@@ -164,7 +187,7 @@ translabel(grammar *g, label *lb)
}
}
for (i = 0; i < (int)N_TOKENS; i++) {
- if (strcmp(lb->lb_str, _Ta35Parser_TokenNames[i]) == 0) {
+ if (strcmp(lb->lb_str, _Ta3Parser_TokenNames[i]) == 0) {
if (Py_DebugFlag)
printf("Label %s is terminal %d.\n",
lb->lb_str, i);
@@ -205,7 +228,7 @@ translabel(grammar *g, label *lb)
lb->lb_str = dest;
}
else if (lb->lb_str[2] == lb->lb_str[0]) {
- int type = (int) Ta35Token_OneChar(lb->lb_str[1]);
+ int type = (int) Ta3Token_OneChar(lb->lb_str[1]);
if (type != OP) {
lb->lb_type = type;
free(lb->lb_str);
@@ -216,7 +239,7 @@ translabel(grammar *g, label *lb)
lb->lb_str);
}
else if (lb->lb_str[2] && lb->lb_str[3] == lb->lb_str[0]) {
- int type = (int) Ta35Token_TwoChars(lb->lb_str[1],
+ int type = (int) Ta3Token_TwoChars(lb->lb_str[1],
lb->lb_str[2]);
if (type != OP) {
lb->lb_type = type;
@@ -228,7 +251,7 @@ translabel(grammar *g, label *lb)
lb->lb_str);
}
else if (lb->lb_str[2] && lb->lb_str[3] && lb->lb_str[4] == lb->lb_str[0]) {
- int type = (int) Ta35Token_ThreeChars(lb->lb_str[1],
+ int type = (int) Ta3Token_ThreeChars(lb->lb_str[1],
lb->lb_str[2],
lb->lb_str[3]);
if (type != OP) {
@@ -246,5 +269,5 @@ translabel(grammar *g, label *lb)
}
else
printf("Can't translate label '%s'\n",
- Ta35Grammar_LabelRepr(lb));
+ Ta3Grammar_LabelRepr(lb));
}
diff --git a/ast35/Parser/grammar1.c b/ast3/Parser/grammar1.c
similarity index 84%
rename from ast35/Parser/grammar1.c
rename to ast3/Parser/grammar1.c
index cbeaf06..7c9f9ba 100644
--- a/ast35/Parser/grammar1.c
+++ b/ast3/Parser/grammar1.c
@@ -9,7 +9,7 @@
/* Return the DFA for the given type */
dfa *
-Ta35Grammar_FindDFA(grammar *g, int type)
+Ta3Grammar_FindDFA(grammar *g, int type)
{
dfa *d;
#if 1
@@ -31,7 +31,7 @@ Ta35Grammar_FindDFA(grammar *g, int type)
}
const char *
-Ta35Grammar_LabelRepr(label *lb)
+Ta3Grammar_LabelRepr(label *lb)
{
static char buf[100];
@@ -47,10 +47,10 @@ Ta35Grammar_LabelRepr(label *lb)
}
else if (lb->lb_type < N_TOKENS) {
if (lb->lb_str == NULL)
- return _Ta35Parser_TokenNames[lb->lb_type];
+ return _Ta3Parser_TokenNames[lb->lb_type];
else {
PyOS_snprintf(buf, sizeof(buf), "%.32s(%.32s)",
- _Ta35Parser_TokenNames[lb->lb_type], lb->lb_str);
+ _Ta3Parser_TokenNames[lb->lb_type], lb->lb_str);
return buf;
}
}
diff --git a/ast35/Parser/node.c b/ast3/Parser/node.c
similarity index 89%
rename from ast35/Parser/node.c
rename to ast3/Parser/node.c
index 9747c2e..68e8fb2 100644
--- a/ast35/Parser/node.c
+++ b/ast3/Parser/node.c
@@ -4,13 +4,8 @@
#include "node.h"
#include "errcode.h"
-#if PY_MINOR_VERSION < 3
-#define _Py_SIZE_ROUND_UP(n, a) (((size_t)(n) + \
- (size_t)((a) - 1)) & ~(size_t)((a) - 1))
-#endif
-
node *
-Ta35Node_New(int type)
+Ta3Node_New(int type)
{
node *n = (node *) PyObject_MALLOC(1 * sizeof(node));
if (n == NULL)
@@ -39,7 +34,7 @@ fancy_roundup(int n)
}
/* A gimmick to make massive numbers of reallocs quicker. The result is
- * a number >= the input. In Ta35Node_AddChild, it's used like so, when
+ * a number >= the input. In Ta3Node_AddChild, it's used like so, when
* we're about to add child number current_size + 1:
*
* if XXXROUNDUP(current_size) < XXXROUNDUP(current_size + 1):
@@ -68,7 +63,7 @@ fancy_roundup(int n)
*
* In a run of compileall across the 2.3a0 Lib directory, Andrew MacIntyre
* reported that, with this scheme, 89% of PyObject_REALLOC calls in
- * Ta35Node_AddChild passed 1 for the size, and 9% passed 4. So this usually
+ * Ta3Node_AddChild passed 1 for the size, and 9% passed 4. So this usually
* wastes very little memory, but is very effective at sidestepping
* platform-realloc disasters on vulnerable platforms.
*
@@ -81,7 +76,7 @@ fancy_roundup(int n)
int
-Ta35Node_AddChild(node *n1, int type, char *str, int lineno, int col_offset)
+Ta3Node_AddChild(node *n1, int type, char *str, int lineno, int col_offset)
{
const int nch = n1->n_nchildren;
int current_capacity;
@@ -96,7 +91,7 @@ Ta35Node_AddChild(node *n1, int type, char *str, int lineno, int col_offset)
if (current_capacity < 0 || required_capacity < 0)
return E_OVERFLOW;
if (current_capacity < required_capacity) {
- if ((size_t)required_capacity > PY_SIZE_MAX / sizeof(node)) {
+ if ((size_t)required_capacity > SIZE_MAX / sizeof(node)) {
return E_NOMEM;
}
n = n1->n_child;
@@ -123,7 +118,7 @@ static Py_ssize_t sizeofchildren(node *n);
void
-Ta35Node_Free(node *n)
+Ta3Node_Free(node *n)
{
if (n != NULL) {
freechildren(n);
@@ -132,7 +127,7 @@ Ta35Node_Free(node *n)
}
Py_ssize_t
-_Ta35Node_SizeOf(node *n)
+_Ta3Node_SizeOf(node *n)
{
Py_ssize_t res = 0;
diff --git a/ast35/Parser/parser.c b/ast3/Parser/parser.c
similarity index 91%
rename from ast35/Parser/parser.c
rename to ast3/Parser/parser.c
index c3cd9e8..5d05928 100644
--- a/ast35/Parser/parser.c
+++ b/ast3/Parser/parser.c
@@ -69,12 +69,12 @@ s_pop(stack *s)
/* PARSER CREATION */
parser_state *
-Ta35Parser_New(grammar *g, int start)
+Ta3Parser_New(grammar *g, int start)
{
parser_state *ps;
if (!g->g_accel)
- Ta35Grammar_AddAccelerators(g);
+ Ta3Grammar_AddAccelerators(g);
ps = (parser_state *)PyMem_MALLOC(sizeof(parser_state));
if (ps == NULL)
return NULL;
@@ -82,22 +82,22 @@ Ta35Parser_New(grammar *g, int start)
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
ps->p_flags = 0;
#endif
- ps->p_tree = Ta35Node_New(start);
+ ps->p_tree = Ta3Node_New(start);
if (ps->p_tree == NULL) {
PyMem_FREE(ps);
return NULL;
}
s_reset(&ps->p_stack);
- (void) s_push(&ps->p_stack, Ta35Grammar_FindDFA(g, start), ps->p_tree);
+ (void) s_push(&ps->p_stack, Ta3Grammar_FindDFA(g, start), ps->p_tree);
return ps;
}
void
-Ta35Parser_Delete(parser_state *ps)
+Ta3Parser_Delete(parser_state *ps)
{
/* NB If you want to save the parse tree,
you must set p_tree to NULL before calling delparser! */
- Ta35Node_Free(ps->p_tree);
+ Ta3Node_Free(ps->p_tree);
PyMem_FREE(ps);
}
@@ -109,7 +109,7 @@ shift(stack *s, int type, char *str, int newstate, int lineno, int col_offset)
{
int err;
assert(!s_empty(s));
- err = Ta35Node_AddChild(s->s_top->s_parent, type, str, lineno, col_offset);
+ err = Ta3Node_AddChild(s->s_top->s_parent, type, str, lineno, col_offset);
if (err)
return err;
s->s_top->s_state = newstate;
@@ -123,7 +123,7 @@ push(stack *s, int type, dfa *d, int newstate, int lineno, int col_offset)
node *n;
n = s->s_top->s_parent;
assert(!s_empty(s));
- err = Ta35Node_AddChild(n, type, (char *)NULL, lineno, col_offset);
+ err = Ta3Node_AddChild(n, type, (char *)NULL, lineno, col_offset);
if (err)
return err;
s->s_top->s_state = newstate;
@@ -140,21 +140,20 @@ classify(parser_state *ps, int type, const char *str)
int n = g->g_ll.ll_nlabels;
if (type == NAME) {
- const char *s = str;
label *l = g->g_ll.ll_label;
int i;
for (i = n; i > 0; i--, l++) {
if (l->lb_type != NAME || l->lb_str == NULL ||
- l->lb_str[0] != s[0] ||
- strcmp(l->lb_str, s) != 0)
+ l->lb_str[0] != str[0] ||
+ strcmp(l->lb_str, str) != 0)
continue;
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
#if 0
/* Leaving this in as an example */
if (!(ps->p_flags & CO_FUTURE_WITH_STATEMENT)) {
- if (s[0] == 'w' && strcmp(s, "with") == 0)
+ if (str[0] == 'w' && strcmp(str, "with") == 0)
break; /* not a keyword yet */
- else if (s[0] == 'a' && strcmp(s, "as") == 0)
+ else if (str[0] == 'a' && strcmp(str, "as") == 0)
break; /* not a keyword yet */
}
#endif
@@ -225,13 +224,13 @@ future_hack(parser_state *ps)
#endif /* future keyword */
int
-Ta35Parser_AddToken(parser_state *ps, int type, char *str,
+Ta3Parser_AddToken(parser_state *ps, int type, char *str,
int lineno, int col_offset, int *expected_ret)
{
int ilabel;
int err;
- D(printf("Token %s/'%s' ... ", _Ta35Parser_TokenNames[type], str));
+ D(printf("Token %s/'%s' ... ", _Ta3Parser_TokenNames[type], str));
/* Find out which label this token is */
ilabel = classify(ps, type, str);
@@ -255,7 +254,7 @@ Ta35Parser_AddToken(parser_state *ps, int type, char *str,
/* Push non-terminal */
int nt = (x >> 8) + NT_OFFSET;
int arrow = x & ((1<<7)-1);
- dfa *d1 = Ta35Grammar_FindDFA(
+ dfa *d1 = Ta3Grammar_FindDFA(
ps->p_grammar, nt);
if ((err = push(&ps->p_stack, nt, d1,
arrow, lineno, col_offset)) > 0) {
@@ -349,7 +348,7 @@ dumptree(grammar *g, node *n)
label l;
l.lb_type = TYPE(n);
l.lb_str = STR(n);
- printf("%s", Ta35Grammar_LabelRepr(&l));
+ printf("%s", Ta3Grammar_LabelRepr(&l));
if (ISNONTERMINAL(TYPE(n))) {
printf("(");
for (i = 0; i < NCH(n); i++) {
@@ -374,7 +373,7 @@ showtree(grammar *g, node *n)
showtree(g, CHILD(n, i));
}
else if (ISTERMINAL(TYPE(n))) {
- printf("%s", _Ta35Parser_TokenNames[TYPE(n)]);
+ printf("%s", _Ta3Parser_TokenNames[TYPE(n)]);
if (TYPE(n) == NUMBER || TYPE(n) == NAME)
printf("(%s)", STR(n));
printf(" ");
@@ -395,7 +394,7 @@ printtree(parser_state *ps)
printf("\n");
}
printf("Listing:\n");
- Ta35Node_ListTree(ps->p_tree);
+ PyNode_ListTree(ps->p_tree);
printf("\n");
}
diff --git a/ast35/Parser/parser.h b/ast3/Parser/parser.h
similarity index 71%
rename from ast35/Parser/parser.h
rename to ast3/Parser/parser.h
index cfb926e..1057459 100644
--- a/ast35/Parser/parser.h
+++ b/ast3/Parser/parser.h
@@ -1,5 +1,5 @@
-#ifndef Ta35_PARSER_H
-#define Ta35_PARSER_H
+#ifndef Ta3_PARSER_H
+#define Ta3_PARSER_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -30,13 +30,13 @@ typedef struct {
#endif
} parser_state;
-parser_state *Ta35Parser_New(grammar *g, int start);
-void Ta35Parser_Delete(parser_state *ps);
-int Ta35Parser_AddToken(parser_state *ps, int type, char *str, int lineno, int col_offset,
+parser_state *Ta3Parser_New(grammar *g, int start);
+void Ta3Parser_Delete(parser_state *ps);
+int Ta3Parser_AddToken(parser_state *ps, int type, char *str, int lineno, int col_offset,
int *expected_ret);
-void Ta35Grammar_AddAccelerators(grammar *g);
+void Ta3Grammar_AddAccelerators(grammar *g);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_PARSER_H */
+#endif /* !Ta3_PARSER_H */
diff --git a/ast35/Parser/parsetok.c b/ast3/Parser/parsetok.c
similarity index 85%
rename from ast35/Parser/parsetok.c
rename to ast3/Parser/parsetok.c
index 252471d..5529feb 100644
--- a/ast35/Parser/parsetok.c
+++ b/ast3/Parser/parsetok.c
@@ -17,31 +17,31 @@ static int initerr(perrdetail *err_ret, PyObject * filename);
/* Parse input coming from a string. Return error code, print some errors. */
node *
-Ta35Parser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)
+Ta3Parser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)
{
- return Ta35Parser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);
+ return Ta3Parser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);
}
node *
-Ta35Parser_ParseStringFlags(const char *s, grammar *g, int start,
+Ta3Parser_ParseStringFlags(const char *s, grammar *g, int start,
perrdetail *err_ret, int flags)
{
- return Ta35Parser_ParseStringFlagsFilename(s, NULL,
+ return Ta3Parser_ParseStringFlagsFilename(s, NULL,
g, start, err_ret, flags);
}
node *
-Ta35Parser_ParseStringFlagsFilename(const char *s, const char *filename,
+Ta3Parser_ParseStringFlagsFilename(const char *s, const char *filename,
grammar *g, int start,
perrdetail *err_ret, int flags)
{
int iflags = flags;
- return Ta35Parser_ParseStringFlagsFilenameEx(s, filename, g, start,
+ return Ta3Parser_ParseStringFlagsFilenameEx(s, filename, g, start,
err_ret, &iflags);
}
node *
-Ta35Parser_ParseStringObject(const char *s, PyObject *filename,
+Ta3Parser_ParseStringObject(const char *s, PyObject *filename,
grammar *g, int start,
perrdetail *err_ret, int *flags)
{
@@ -52,9 +52,9 @@ Ta35Parser_ParseStringObject(const char *s, PyObject *filename,
return NULL;
if (*flags & PyPARSE_IGNORE_COOKIE)
- tok = Ta35Tokenizer_FromUTF8(s, exec_input);
+ tok = Ta3Tokenizer_FromUTF8(s, exec_input);
else
- tok = Ta35Tokenizer_FromString(s, exec_input);
+ tok = Ta3Tokenizer_FromString(s, exec_input);
if (tok == NULL) {
err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM;
return NULL;
@@ -68,7 +68,7 @@ Ta35Parser_ParseStringObject(const char *s, PyObject *filename,
}
node *
-Ta35Parser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str,
+Ta3Parser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str,
grammar *g, int start,
perrdetail *err_ret, int *flags)
{
@@ -83,7 +83,7 @@ Ta35Parser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str,
}
}
#endif
- n = Ta35Parser_ParseStringObject(s, filename, g, start, err_ret, flags);
+ n = Ta3Parser_ParseStringObject(s, filename, g, start, err_ret, flags);
#ifndef PGEN
Py_XDECREF(filename);
#endif
@@ -93,27 +93,27 @@ Ta35Parser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str,
/* Parse input coming from a file. Return error code, print some errors. */
node *
-Ta35Parser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,
+Ta3Parser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,
const char *ps1, const char *ps2,
perrdetail *err_ret)
{
- return Ta35Parser_ParseFileFlags(fp, filename, NULL,
+ return Ta3Parser_ParseFileFlags(fp, filename, NULL,
g, start, ps1, ps2, err_ret, 0);
}
node *
-Ta35Parser_ParseFileFlags(FILE *fp, const char *filename, const char *enc,
+Ta3Parser_ParseFileFlags(FILE *fp, const char *filename, const char *enc,
grammar *g, int start,
const char *ps1, const char *ps2,
perrdetail *err_ret, int flags)
{
int iflags = flags;
- return Ta35Parser_ParseFileFlagsEx(fp, filename, enc, g, start, ps1,
+ return Ta3Parser_ParseFileFlagsEx(fp, filename, enc, g, start, ps1,
ps2, err_ret, &iflags);
}
node *
-Ta35Parser_ParseFileObject(FILE *fp, PyObject *filename,
+Ta3Parser_ParseFileObject(FILE *fp, PyObject *filename,
const char *enc, grammar *g, int start,
const char *ps1, const char *ps2,
perrdetail *err_ret, int *flags)
@@ -123,7 +123,7 @@ Ta35Parser_ParseFileObject(FILE *fp, PyObject *filename,
if (initerr(err_ret, filename) < 0)
return NULL;
- if ((tok = Ta35Tokenizer_FromFile(fp, enc, ps1, ps2)) == NULL) {
+ if ((tok = Ta3Tokenizer_FromFile(fp, enc, ps1, ps2)) == NULL) {
err_ret->error = E_NOMEM;
return NULL;
}
@@ -135,7 +135,7 @@ Ta35Parser_ParseFileObject(FILE *fp, PyObject *filename,
}
node *
-Ta35Parser_ParseFileFlagsEx(FILE *fp, const char *filename,
+Ta3Parser_ParseFileFlagsEx(FILE *fp, const char *filename,
const char *enc, grammar *g, int start,
const char *ps1, const char *ps2,
perrdetail *err_ret, int *flags)
@@ -151,7 +151,7 @@ Ta35Parser_ParseFileFlagsEx(FILE *fp, const char *filename,
}
}
#endif
- n = Ta35Parser_ParseFileObject(fp, fileobj, enc, g,
+ n = Ta3Parser_ParseFileObject(fp, fileobj, enc, g,
start, ps1, ps2, err_ret, flags);
#ifndef PGEN
Py_XDECREF(fileobj);
@@ -161,10 +161,10 @@ Ta35Parser_ParseFileFlagsEx(FILE *fp, const char *filename,
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
#if 0
-static char with_msg[] =
+static const char with_msg[] =
"%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";
-static char as_msg[] =
+static const char as_msg[] =
"%s:%d: Warning: 'as' will become a reserved keyword in Python 2.6\n";
static void
@@ -223,13 +223,13 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
growable_int_array type_ignores;
if (!growable_int_array_init(&type_ignores, 10)) {
err_ret->error = E_NOMEM;
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
- if ((ps = Ta35Parser_New(g, start)) == NULL) {
+ if ((ps = Ta3Parser_New(g, start)) == NULL) {
err_ret->error = E_NOMEM;
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
@@ -244,7 +244,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
char *str;
int col_offset;
- type = Ta35Tokenizer_Get(tok, &a, &b);
+ type = Ta3Tokenizer_Get(tok, &a, &b);
if (type == ERRORTOKEN) {
err_ret->error = tok->done;
break;
@@ -294,7 +294,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
#endif
if (a >= tok->line_start)
col_offset = Py_SAFE_DOWNCAST(a - tok->line_start,
- Py_intptr_t, int);
+ intptr_t, int);
else
col_offset = -1;
@@ -307,7 +307,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
}
if ((err_ret->error =
- Ta35Parser_AddToken(ps, (int)type, str,
+ Ta3Parser_AddToken(ps, (int)type, str,
tok->lineno, col_offset,
&(err_ret->expected))) != E_OK) {
if (err_ret->error != E_DONE) {
@@ -333,7 +333,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
REQ(ch, ENDMARKER);
for (i = 0; i < type_ignores.num_items; i++) {
- Ta35Node_AddChild(ch, TYPE_IGNORE, NULL, type_ignores.items[i], 0);
+ Ta3Node_AddChild(ch, TYPE_IGNORE, NULL, type_ignores.items[i], 0);
}
}
growable_int_array_deallocate(&type_ignores);
@@ -356,7 +356,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
if (c != '#') {
err_ret->error = E_BADSINGLE;
- Ta35Node_Free(n);
+ Ta3Node_Free(n);
n = NULL;
break;
}
@@ -374,7 +374,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
*flags = ps->p_flags;
#endif
- Ta35Parser_Delete(ps);
+ Ta3Parser_Delete(ps);
if (n == NULL) {
if (tok->done == E_EOF)
@@ -396,7 +396,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
/* 'nodes->n_str' uses PyObject_*, while 'tok->encoding' was
* allocated using PyMem_
*/
- node* r = Ta35Node_New(encoding_decl);
+ node* r = Ta3Node_New(encoding_decl);
if (r)
r->n_str = PyObject_MALLOC(strlen(tok->encoding)+1);
if (!r || !r->n_str) {
@@ -415,7 +415,7 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
}
done:
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return n;
}
diff --git a/ast35/Parser/tokenizer.c b/ast3/Parser/tokenizer.c
similarity index 92%
rename from ast35/Parser/tokenizer.c
rename to ast3/Parser/tokenizer.c
index ae1a749..60444c2 100644
--- a/ast35/Parser/tokenizer.c
+++ b/ast3/Parser/tokenizer.c
@@ -18,6 +18,15 @@
#include "abstract.h"
#endif /* PGEN */
+#ifndef Py_XSETREF
+#define Py_XSETREF(op, op2) \
+ do { \
+ PyObject *_py_tmp = (PyObject *)(op); \
+ (op) = (op2); \
+ Py_XDECREF(_py_tmp); \
+ } while (0)
+#endif /* Py_XSETREF */
+
#define is_potential_identifier_start(c) (\
(c >= 'a' && c <= 'z')\
|| (c >= 'A' && c <= 'Z')\
@@ -52,7 +61,7 @@ static void tok_backup(struct tok_state *tok, int c);
/* Token names */
-const char *_Ta35Parser_TokenNames[] = {
+const char *_Ta3Parser_TokenNames[] = {
"ENDMARKER",
"NAME",
"NUMBER",
@@ -205,7 +214,7 @@ static char *
error_ret(struct tok_state *tok) /* XXX */
{
tok->decoding_erred = 1;
- if (tok->fp != NULL && tok->buf != NULL) /* see Ta35Tokenizer_Free */
+ if (tok->fp != NULL && tok->buf != NULL) /* see Ta3Tokenizer_Free */
PyMem_FREE(tok->buf);
tok->buf = tok->cur = tok->end = tok->inp = tok->start = NULL;
tok->done = E_DECODE;
@@ -213,8 +222,8 @@ error_ret(struct tok_state *tok) /* XXX */
}
-static char *
-get_normal_name(char *s) /* for utf-8 and latin-1 */
+static const char *
+get_normal_name(const char *s) /* for utf-8 and latin-1 */
{
char buf[13];
int i;
@@ -275,7 +284,7 @@ get_coding_spec(const char *s, char **spec, Py_ssize_t size, struct tok_state *t
if (begin < t) {
char* r = new_string(begin, t - begin, tok);
- char* q;
+ const char* q;
if (!r)
return 0;
q = get_normal_name(r);
@@ -286,6 +295,7 @@ get_coding_spec(const char *s, char **spec, Py_ssize_t size, struct tok_state *t
return 0;
}
*spec = r;
+ break;
}
}
}
@@ -456,7 +466,7 @@ fp_readl(char *s, int size, struct tok_state *tok)
}
if (PyUnicode_CheckExact(bufobj))
{
- buf = _PyUnicode_AsStringAndSize(bufobj, &buflen);
+ buf = PyUnicode_AsUTF8AndSize(bufobj, &buflen);
if (buf == NULL) {
goto error;
}
@@ -507,16 +517,12 @@ error:
static int
fp_setreadl(struct tok_state *tok, const char* enc)
{
- PyObject *readline = NULL, *stream = NULL, *io = NULL;
+ PyObject *readline, *io, *stream;
_Py_IDENTIFIER(open);
_Py_IDENTIFIER(readline);
int fd;
long pos;
- io = PyImport_ImportModuleNoBlock("io");
- if (io == NULL)
- goto cleanup;
-
fd = fileno(tok->fp);
/* Due to buffering the file offset for fd can be different from the file
* position of tok->fp. If tok->fp was opened in text mode on Windows,
@@ -527,28 +533,33 @@ fp_setreadl(struct tok_state *tok, const char* enc)
if (pos == -1 ||
lseek(fd, (off_t)(pos > 0 ? pos - 1 : pos), SEEK_SET) == (off_t)-1) {
PyErr_SetFromErrnoWithFilename(PyExc_OSError, NULL);
- goto cleanup;
+ return 0;
}
+ io = PyImport_ImportModuleNoBlock("io");
+ if (io == NULL)
+ return 0;
+
stream = _PyObject_CallMethodId(io, &PyId_open, "isisOOO",
fd, "r", -1, enc, Py_None, Py_None, Py_False);
+ Py_DECREF(io);
if (stream == NULL)
- goto cleanup;
+ return 0;
- Py_XDECREF(tok->decoding_readline);
readline = _PyObject_GetAttrId(stream, &PyId_readline);
- tok->decoding_readline = readline;
+ Py_DECREF(stream);
+ if (readline == NULL)
+ return 0;
+ Py_XSETREF(tok->decoding_readline, readline);
+
if (pos > 0) {
- if (PyObject_CallObject(readline, NULL) == NULL) {
- readline = NULL;
- goto cleanup;
- }
+ PyObject *bufobj = PyObject_CallObject(readline, NULL);
+ if (bufobj == NULL)
+ return 0;
+ Py_DECREF(bufobj);
}
- cleanup:
- Py_XDECREF(stream);
- Py_XDECREF(io);
- return readline != NULL;
+ return 1;
}
/* Fetch the next byte from TOK. */
@@ -816,14 +827,14 @@ decode_str(const char *input, int single, struct tok_state *tok)
/* Set up tokenizer for string */
struct tok_state *
-Ta35Tokenizer_FromString(const char *str, int exec_input)
+Ta3Tokenizer_FromString(const char *str, int exec_input)
{
struct tok_state *tok = tok_new();
if (tok == NULL)
return NULL;
str = decode_str(str, exec_input, tok);
if (str == NULL) {
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
@@ -833,7 +844,7 @@ Ta35Tokenizer_FromString(const char *str, int exec_input)
}
struct tok_state *
-Ta35Tokenizer_FromUTF8(const char *str, int exec_input)
+Ta3Tokenizer_FromUTF8(const char *str, int exec_input)
{
struct tok_state *tok = tok_new();
if (tok == NULL)
@@ -842,7 +853,7 @@ Ta35Tokenizer_FromUTF8(const char *str, int exec_input)
tok->input = str = translate_newlines(str, exec_input, tok);
#endif
if (str == NULL) {
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
tok->decoding_state = STATE_RAW;
@@ -851,7 +862,7 @@ Ta35Tokenizer_FromUTF8(const char *str, int exec_input)
tok->str = str;
tok->encoding = (char *)PyMem_MALLOC(6);
if (!tok->encoding) {
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
strcpy(tok->encoding, "utf-8");
@@ -864,14 +875,14 @@ Ta35Tokenizer_FromUTF8(const char *str, int exec_input)
/* Set up tokenizer for file */
struct tok_state *
-Ta35Tokenizer_FromFile(FILE *fp, const char* enc,
+Ta3Tokenizer_FromFile(FILE *fp, const char* enc,
const char *ps1, const char *ps2)
{
struct tok_state *tok = tok_new();
if (tok == NULL)
return NULL;
if ((tok->buf = (char *)PyMem_MALLOC(BUFSIZ)) == NULL) {
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
tok->cur = tok->inp = tok->buf;
@@ -884,7 +895,7 @@ Ta35Tokenizer_FromFile(FILE *fp, const char* enc,
gets copied into the parse tree. */
tok->encoding = PyMem_MALLOC(strlen(enc)+1);
if (!tok->encoding) {
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return NULL;
}
strcpy(tok->encoding, enc);
@@ -897,7 +908,7 @@ Ta35Tokenizer_FromFile(FILE *fp, const char* enc,
/* Free a tok_state structure */
void
-Ta35Tokenizer_Free(struct tok_state *tok)
+Ta3Tokenizer_Free(struct tok_state *tok)
{
if (tok->encoding != NULL)
PyMem_FREE(tok->encoding);
@@ -1034,7 +1045,7 @@ tok_nextc(struct tok_state *tok)
else {
tok->done = E_OK;
tok->inp = strchr(tok->buf, '\0');
- done = tok->inp[-1] == '\n';
+ done = tok->inp == tok->buf || tok->inp[-1] == '\n';
}
}
else {
@@ -1124,7 +1135,7 @@ tok_backup(struct tok_state *tok, int c)
/* Return the token corresponding to a single character */
int
-Ta35Token_OneChar(int c)
+Ta3Token_OneChar(int c)
{
switch (c) {
case '(': return LPAR;
@@ -1156,7 +1167,7 @@ Ta35Token_OneChar(int c)
int
-Ta35Token_TwoChars(int c1, int c2)
+Ta3Token_TwoChars(int c1, int c2)
{
switch (c1) {
case '=':
@@ -1235,7 +1246,7 @@ Ta35Token_TwoChars(int c1, int c2)
}
int
-Ta35Token_ThreeChars(int c1, int c2, int c3)
+Ta3Token_ThreeChars(int c1, int c2, int c3)
{
switch (c1) {
case '<':
@@ -1348,6 +1359,7 @@ static int
tok_decimal_tail(struct tok_state *tok)
{
int c;
+
while (1) {
do {
c = tok_nextc(tok);
@@ -1385,17 +1397,20 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
tok->atbol = 0;
for (;;) {
c = tok_nextc(tok);
- if (c == ' ')
+ if (c == ' ') {
col++, altcol++;
+ }
else if (c == '\t') {
col = (col/tok->tabsize + 1) * tok->tabsize;
altcol = (altcol/tok->alttabsize + 1)
* tok->alttabsize;
}
- else if (c == '\014') /* Control-L (formfeed) */
+ else if (c == '\014') {/* Control-L (formfeed) */
col = altcol = 0; /* For Emacs users */
- else
+ }
+ else {
break;
+ }
}
tok_backup(tok, c);
if (c == '#' || c == '\n') {
@@ -1404,10 +1419,12 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
not passed to the parser as NEWLINE tokens,
except *totally* empty lines in interactive
mode, which signal the end of a command group. */
- if (col == 0 && c == '\n' && tok->prompt != NULL)
+ if (col == 0 && c == '\n' && tok->prompt != NULL) {
blankline = 0; /* Let it through */
- else
+ }
+ else {
blankline = 1; /* Ignore completely */
+ }
/* We can't jump back right here since we still
may need to skip to the end of a comment */
}
@@ -1415,8 +1432,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
if (col == tok->indstack[tok->indent]) {
/* No change */
if (altcol != tok->altindstack[tok->indent]) {
- if (indenterror(tok))
+ if (indenterror(tok)) {
return ERRORTOKEN;
+ }
}
}
else if (col > tok->indstack[tok->indent]) {
@@ -1427,8 +1445,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
return ERRORTOKEN;
}
if (altcol <= tok->altindstack[tok->indent]) {
- if (indenterror(tok))
+ if (indenterror(tok)) {
return ERRORTOKEN;
+ }
}
tok->pendin++;
tok->indstack[++tok->indent] = col;
@@ -1447,8 +1466,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
return ERRORTOKEN;
}
if (altcol != tok->altindstack[tok->indent]) {
- if (indenterror(tok))
+ if (indenterror(tok)) {
return ERRORTOKEN;
+ }
}
}
}
@@ -1546,7 +1566,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
}
}
-
/* Check for EOF and errors now */
if (c == EOF) {
return tok->done == E_EOF ? ENDMARKER : ERRORTOKEN;
@@ -1556,31 +1575,41 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
nonascii = 0;
if (is_potential_identifier_start(c)) {
/* Process b"", r"", u"", br"" and rb"" */
- int saw_b = 0, saw_r = 0, saw_u = 0;
+ int saw_b = 0, saw_r = 0, saw_u = 0, saw_f = 0;
while (1) {
- if (!(saw_b || saw_u) && (c == 'b' || c == 'B'))
+ if (!(saw_b || saw_u || saw_f) && (c == 'b' || c == 'B'))
saw_b = 1;
/* Since this is a backwards compatibility support literal we don't
want to support it in arbitrary order like byte literals. */
- else if (!(saw_b || saw_u || saw_r) && (c == 'u' || c == 'U'))
+ else if (!(saw_b || saw_u || saw_r || saw_f)
+ && (c == 'u'|| c == 'U')) {
saw_u = 1;
+ }
/* ur"" and ru"" are not supported */
- else if (!(saw_r || saw_u) && (c == 'r' || c == 'R'))
+ else if (!(saw_r || saw_u) && (c == 'r' || c == 'R')) {
saw_r = 1;
- else
+ }
+ else if (!(saw_f || saw_b || saw_u) && (c == 'f' || c == 'F')) {
+ saw_f = 1;
+ }
+ else {
break;
+ }
c = tok_nextc(tok);
- if (c == '"' || c == '\'')
+ if (c == '"' || c == '\'') {
goto letter_quote;
+ }
}
while (is_potential_identifier_char(c)) {
- if (c >= 128)
+ if (c >= 128) {
nonascii = 1;
+ }
c = tok_nextc(tok);
}
tok_backup(tok, c);
- if (nonascii && !verify_identifier(tok))
+ if (nonascii && !verify_identifier(tok)) {
return ERRORTOKEN;
+ }
*p_start = tok->start;
*p_end = tok->cur;
@@ -1589,10 +1618,12 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Current token length is 5. */
if (tok->async_def) {
/* We're inside an 'async def' function. */
- if (memcmp(tok->start, "async", 5) == 0)
+ if (memcmp(tok->start, "async", 5) == 0) {
return ASYNC;
- if (memcmp(tok->start, "await", 5) == 0)
+ }
+ if (memcmp(tok->start, "await", 5) == 0) {
return AWAIT;
+ }
}
else if (memcmp(tok->start, "async", 5) == 0) {
/* The current token is 'async'.
@@ -1625,8 +1656,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Newline */
if (c == '\n') {
tok->atbol = 1;
- if (blankline || tok->level > 0)
+ if (blankline || tok->level > 0) {
goto nextline;
+ }
*p_start = tok->start;
*p_end = tok->cur - 1; /* Leave '\n' out of the string */
tok->cont_line = 0;
@@ -1649,11 +1681,13 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
*p_start = tok->start;
*p_end = tok->cur;
return ELLIPSIS;
- } else {
+ }
+ else {
tok_backup(tok, c);
}
tok_backup(tok, '.');
- } else {
+ }
+ else {
tok_backup(tok, c);
}
*p_start = tok->start;
@@ -1670,8 +1704,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Hex */
c = tok_nextc(tok);
do {
- if (c == '_')
+ if (c == '_') {
c = tok_nextc(tok);
+ }
if (!isxdigit(c)) {
tok->done = E_TOKEN;
tok_backup(tok, c);
@@ -1686,8 +1721,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Octal */
c = tok_nextc(tok);
do {
- if (c == '_')
+ if (c == '_') {
c = tok_nextc(tok);
+ }
if (c < '0' || c >= '8') {
tok->done = E_TOKEN;
tok_backup(tok, c);
@@ -1702,8 +1738,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Binary */
c = tok_nextc(tok);
do {
- if (c == '_')
+ if (c == '_') {
c = tok_nextc(tok);
+ }
if (c != '0' && c != '1') {
tok->done = E_TOKEN;
tok_backup(tok, c);
@@ -1727,8 +1764,9 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
return ERRORTOKEN;
}
}
- if (c != '0')
+ if (c != '0') {
break;
+ }
c = tok_nextc(tok);
}
if (isdigit(c)) {
@@ -1742,10 +1780,12 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
c = tok_nextc(tok);
goto fraction;
}
- else if (c == 'e' || c == 'E')
+ else if (c == 'e' || c == 'E') {
goto exponent;
- else if (c == 'j' || c == 'J')
+ }
+ else if (c == 'j' || c == 'J') {
goto imaginary;
+ }
else if (nonzero) {
/* Old-style octal: now disallowed. */
tok->done = E_TOKEN;
@@ -1798,10 +1838,11 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
return ERRORTOKEN;
}
}
- if (c == 'j' || c == 'J')
+ if (c == 'j' || c == 'J') {
/* Imaginary part */
imaginary:
c = tok_nextc(tok);
+ }
}
}
tok_backup(tok, c);
@@ -1821,22 +1862,27 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
c = tok_nextc(tok);
if (c == quote) {
c = tok_nextc(tok);
- if (c == quote)
+ if (c == quote) {
quote_size = 3;
- else
+ }
+ else {
end_quote_size = 1; /* empty string found */
+ }
}
- if (c != quote)
+ if (c != quote) {
tok_backup(tok, c);
+ }
/* Get rest of string */
while (end_quote_size != quote_size) {
c = tok_nextc(tok);
if (c == EOF) {
- if (quote_size == 3)
+ if (quote_size == 3) {
tok->done = E_EOFS;
- else
+ }
+ else {
tok->done = E_EOLS;
+ }
tok->cur = tok->inp;
return ERRORTOKEN;
}
@@ -1845,12 +1891,14 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
tok->cur = tok->inp;
return ERRORTOKEN;
}
- if (c == quote)
+ if (c == quote) {
end_quote_size += 1;
+ }
else {
end_quote_size = 0;
- if (c == '\\')
- c = tok_nextc(tok); /* skip escaped char */
+ if (c == '\\') {
+ tok_nextc(tok); /* skip escaped char */
+ }
}
}
@@ -1874,13 +1922,14 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Check for two-character token */
{
int c2 = tok_nextc(tok);
- int token = Ta35Token_TwoChars(c, c2);
+ int token = Ta3Token_TwoChars(c, c2);
if (token != OP) {
int c3 = tok_nextc(tok);
- int token3 = Ta35Token_ThreeChars(c, c2, c3);
+ int token3 = Ta3Token_ThreeChars(c, c2, c3);
if (token3 != OP) {
token = token3;
- } else {
+ }
+ else {
tok_backup(tok, c3);
}
*p_start = tok->start;
@@ -1907,11 +1956,11 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
/* Punctuation character */
*p_start = tok->start;
*p_end = tok->cur;
- return Ta35Token_OneChar(c);
+ return Ta3Token_OneChar(c);
}
int
-Ta35Tokenizer_Get(struct tok_state *tok, char **p_start, char **p_end)
+Ta3Tokenizer_Get(struct tok_state *tok, char **p_start, char **p_end)
{
int result = tok_get(tok, p_start, p_end);
if (tok->decoding_erred) {
@@ -1924,7 +1973,7 @@ Ta35Tokenizer_Get(struct tok_state *tok, char **p_start, char **p_end)
/* Get the encoding of a Python file. Check for the coding cookie and check if
the file starts with a BOM.
- Ta35Tokenizer_FindEncodingFilename() returns NULL when it can't find the
+ Ta3Tokenizer_FindEncodingFilename() returns NULL when it can't find the
encoding in the first or second line of the file (in which case the encoding
should be assumed to be UTF-8).
@@ -1932,7 +1981,7 @@ Ta35Tokenizer_Get(struct tok_state *tok, char **p_start, char **p_end)
by the caller. */
char *
-Ta35Tokenizer_FindEncodingFilename(int fd, PyObject *filename)
+Ta3Tokenizer_FindEncodingFilename(int fd, PyObject *filename)
{
struct tok_state *tok;
FILE *fp;
@@ -1953,7 +2002,7 @@ Ta35Tokenizer_FindEncodingFilename(int fd, PyObject *filename)
if (fp == NULL) {
return NULL;
}
- tok = Ta35Tokenizer_FromFile(fp, NULL, NULL, NULL);
+ tok = Ta3Tokenizer_FromFile(fp, NULL, NULL, NULL);
if (tok == NULL) {
fclose(fp);
return NULL;
@@ -1967,13 +2016,13 @@ Ta35Tokenizer_FindEncodingFilename(int fd, PyObject *filename)
tok->filename = PyUnicode_FromString("<string>");
if (tok->filename == NULL) {
fclose(fp);
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return encoding;
}
}
#endif
while (tok->lineno < 2 && tok->done == E_OK) {
- Ta35Tokenizer_Get(tok, &p_start, &p_end);
+ Ta3Tokenizer_Get(tok, &p_start, &p_end);
}
fclose(fp);
if (tok->encoding) {
@@ -1981,14 +2030,14 @@ Ta35Tokenizer_FindEncodingFilename(int fd, PyObject *filename)
if (encoding)
strcpy(encoding, tok->encoding);
}
- Ta35Tokenizer_Free(tok);
+ Ta3Tokenizer_Free(tok);
return encoding;
}
char *
-Ta35Tokenizer_FindEncoding(int fd)
+Ta3Tokenizer_FindEncoding(int fd)
{
- return Ta35Tokenizer_FindEncodingFilename(fd, NULL);
+ return Ta3Tokenizer_FindEncodingFilename(fd, NULL);
}
#ifdef Py_DEBUG
@@ -1996,7 +2045,7 @@ Ta35Tokenizer_FindEncoding(int fd)
void
tok_dump(int type, char *start, char *end)
{
- printf("%s", _Ta35Parser_TokenNames[type]);
+ printf("%s", _Ta3Parser_TokenNames[type]);
if (type == NAME || type == NUMBER || type == STRING || type == OP)
printf("(%.*s)", (int)(end - start), start);
}
diff --git a/ast35/Parser/tokenizer.h b/ast3/Parser/tokenizer.h
similarity index 89%
rename from ast35/Parser/tokenizer.h
rename to ast3/Parser/tokenizer.h
index 1f8251a..a204067 100644
--- a/ast35/Parser/tokenizer.h
+++ b/ast3/Parser/tokenizer.h
@@ -1,5 +1,5 @@
-#ifndef Ta35_TOKENIZER_H
-#define Ta35_TOKENIZER_H
+#ifndef Ta3_TOKENIZER_H
+#define Ta3_TOKENIZER_H
#ifdef __cplusplus
extern "C" {
#endif
@@ -74,16 +74,16 @@ struct tok_state {
NEWLINE token after it. */
};
-extern struct tok_state *Ta35Tokenizer_FromString(const char *, int);
-extern struct tok_state *Ta35Tokenizer_FromUTF8(const char *, int);
-extern struct tok_state *Ta35Tokenizer_FromFile(FILE *, const char*,
+extern struct tok_state *Ta3Tokenizer_FromString(const char *, int);
+extern struct tok_state *Ta3Tokenizer_FromUTF8(const char *, int);
+extern struct tok_state *Ta3Tokenizer_FromFile(FILE *, const char*,
const char *, const char *);
-extern void Ta35Tokenizer_Free(struct tok_state *);
-extern int Ta35Tokenizer_Get(struct tok_state *, char **, char **);
+extern void Ta3Tokenizer_Free(struct tok_state *);
+extern int Ta3Tokenizer_Get(struct tok_state *, char **, char **);
extern char * PyTokenizer_RestoreEncoding(struct tok_state* tok,
int len, int *offset);
#ifdef __cplusplus
}
#endif
-#endif /* !Ta35_TOKENIZER_H */
+#endif /* !Ta3_TOKENIZER_H */
diff --git a/ast35/Python/Python-ast.c b/ast3/Python/Python-ast.c
similarity index 88%
rename from ast35/Python/Python-ast.c
rename to ast3/Python/Python-ast.c
index d852b9c..5631328 100644
--- a/ast35/Python/Python-ast.c
+++ b/ast3/Python/Python-ast.c
@@ -85,12 +85,10 @@ static char *Delete_fields[]={
"targets",
};
static PyTypeObject *Assign_type;
-_Py_IDENTIFIER(annotation);
static char *Assign_fields[]={
"targets",
"value",
"type_comment",
- "annotation",
};
static PyTypeObject *AugAssign_type;
_Py_IDENTIFIER(target);
@@ -100,6 +98,15 @@ static char *AugAssign_fields[]={
"op",
"value",
};
+static PyTypeObject *AnnAssign_type;
+_Py_IDENTIFIER(annotation);
+_Py_IDENTIFIER(simple);
+static char *AnnAssign_fields[]={
+ "target",
+ "annotation",
+ "value",
+ "simple",
+};
static PyTypeObject *For_type;
_Py_IDENTIFIER(iter);
_Py_IDENTIFIER(orelse);
@@ -116,6 +123,7 @@ static char *AsyncFor_fields[]={
"iter",
"body",
"orelse",
+ "type_comment",
};
static PyTypeObject *While_type;
_Py_IDENTIFIER(test);
@@ -141,6 +149,7 @@ static PyTypeObject *AsyncWith_type;
static char *AsyncWith_fields[]={
"items",
"body",
+ "type_comment",
};
static PyTypeObject *Raise_type;
_Py_IDENTIFIER(exc);
@@ -293,16 +302,26 @@ static char *Call_fields[]={
};
static PyTypeObject *Num_type;
_Py_IDENTIFIER(n);
-_Py_IDENTIFIER(contains_underscores);
static char *Num_fields[]={
"n",
- "contains_underscores",
};
static PyTypeObject *Str_type;
_Py_IDENTIFIER(s);
static char *Str_fields[]={
"s",
};
+static PyTypeObject *FormattedValue_type;
+_Py_IDENTIFIER(conversion);
+_Py_IDENTIFIER(format_spec);
+static char *FormattedValue_fields[]={
+ "value",
+ "conversion",
+ "format_spec",
+};
+static PyTypeObject *JoinedStr_type;
+static char *JoinedStr_fields[]={
+ "values",
+};
static PyTypeObject *Bytes_type;
static char *Bytes_fields[]={
"s",
@@ -312,6 +331,10 @@ static char *NameConstant_fields[]={
"value",
};
static PyTypeObject *Ellipsis_type;
+static PyTypeObject *Constant_type;
+static char *Constant_fields[]={
+ "value",
+};
static PyTypeObject *Attribute_type;
_Py_IDENTIFIER(attr);
_Py_IDENTIFIER(ctx);
@@ -428,10 +451,12 @@ static PyTypeObject *NotIn_type;
static PyTypeObject *comprehension_type;
static PyObject* ast2obj_comprehension(void*);
_Py_IDENTIFIER(ifs);
+_Py_IDENTIFIER(is_async);
static char *comprehension_fields[]={
"target",
"iter",
"ifs",
+ "is_async",
};
static PyTypeObject *excepthandler_type;
static char *excepthandler_attributes[] = {
@@ -471,6 +496,7 @@ _Py_IDENTIFIER(arg);
static char *arg_fields[]={
"arg",
"annotation",
+ "type_comment",
};
static PyTypeObject *keyword_type;
static PyObject* ast2obj_keyword(void*);
@@ -611,7 +637,7 @@ static PyGetSetDef ast_type_getsets[] = {
static PyTypeObject AST_type = {
PyVarObject_HEAD_INIT(NULL, 0)
- "_ast35.AST",
+ "_ast3.AST",
sizeof(AST_object),
0,
(destructor)ast_dealloc, /* tp_dealloc */
@@ -667,7 +693,7 @@ static PyTypeObject* make_type(char *type, PyTypeObject* base, char**fields, int
PyTuple_SET_ITEM(fnames, i, field);
}
result = PyObject_CallFunction((PyObject*)&PyType_Type, "s(O){sOss}",
- type, base, "_fields", fnames, "__module__", "_ast35");
+ type, base, "_fields", fnames, "__module__", "_ast3");
Py_DECREF(fnames);
return (PyTypeObject*)result;
}
@@ -720,6 +746,7 @@ static PyObject* ast2obj_object(void *o)
return (PyObject*)o;
}
#define ast2obj_singleton ast2obj_object
+#define ast2obj_constant ast2obj_object
#define ast2obj_identifier ast2obj_object
#define ast2obj_string ast2obj_object
#define ast2obj_bytes ast2obj_object
@@ -757,6 +784,19 @@ static int obj2ast_object(PyObject* obj, PyObject** out, PyArena* arena)
return 0;
}
+static int obj2ast_constant(PyObject* obj, PyObject** out, PyArena* arena)
+{
+ if (obj) {
+ if (PyArena_AddPyObject(arena, obj) < 0) {
+ *out = NULL;
+ return -1;
+ }
+ Py_INCREF(obj);
+ }
+ *out = obj;
+ return 0;
+}
+
static int obj2ast_identifier(PyObject* obj, PyObject** out, PyArena* arena)
{
if (!PyUnicode_CheckExact(obj) && obj != Py_None) {
@@ -792,7 +832,7 @@ static int obj2ast_int(PyObject* obj, int* out, PyArena* arena)
return 1;
}
- i = (int)PyLong_AsLong(obj);
+ i = _PyLong_AsInt(obj);
if (i == -1 && PyErr_Occurred())
return 1;
*out = i;
@@ -865,13 +905,15 @@ static int init_types(void)
if (!Return_type) return 0;
Delete_type = make_type("Delete", stmt_type, Delete_fields, 1);
if (!Delete_type) return 0;
- Assign_type = make_type("Assign", stmt_type, Assign_fields, 4);
+ Assign_type = make_type("Assign", stmt_type, Assign_fields, 3);
if (!Assign_type) return 0;
AugAssign_type = make_type("AugAssign", stmt_type, AugAssign_fields, 3);
if (!AugAssign_type) return 0;
+ AnnAssign_type = make_type("AnnAssign", stmt_type, AnnAssign_fields, 4);
+ if (!AnnAssign_type) return 0;
For_type = make_type("For", stmt_type, For_fields, 5);
if (!For_type) return 0;
- AsyncFor_type = make_type("AsyncFor", stmt_type, AsyncFor_fields, 4);
+ AsyncFor_type = make_type("AsyncFor", stmt_type, AsyncFor_fields, 5);
if (!AsyncFor_type) return 0;
While_type = make_type("While", stmt_type, While_fields, 3);
if (!While_type) return 0;
@@ -879,7 +921,7 @@ static int init_types(void)
if (!If_type) return 0;
With_type = make_type("With", stmt_type, With_fields, 3);
if (!With_type) return 0;
- AsyncWith_type = make_type("AsyncWith", stmt_type, AsyncWith_fields, 2);
+ AsyncWith_type = make_type("AsyncWith", stmt_type, AsyncWith_fields, 3);
if (!AsyncWith_type) return 0;
Raise_type = make_type("Raise", stmt_type, Raise_fields, 2);
if (!Raise_type) return 0;
@@ -939,10 +981,15 @@ static int init_types(void)
if (!Compare_type) return 0;
Call_type = make_type("Call", expr_type, Call_fields, 3);
if (!Call_type) return 0;
- Num_type = make_type("Num", expr_type, Num_fields, 2);
+ Num_type = make_type("Num", expr_type, Num_fields, 1);
if (!Num_type) return 0;
Str_type = make_type("Str", expr_type, Str_fields, 1);
if (!Str_type) return 0;
+ FormattedValue_type = make_type("FormattedValue", expr_type,
+ FormattedValue_fields, 3);
+ if (!FormattedValue_type) return 0;
+ JoinedStr_type = make_type("JoinedStr", expr_type, JoinedStr_fields, 1);
+ if (!JoinedStr_type) return 0;
Bytes_type = make_type("Bytes", expr_type, Bytes_fields, 1);
if (!Bytes_type) return 0;
NameConstant_type = make_type("NameConstant", expr_type,
@@ -950,6 +997,8 @@ static int init_types(void)
if (!NameConstant_type) return 0;
Ellipsis_type = make_type("Ellipsis", expr_type, NULL, 0);
if (!Ellipsis_type) return 0;
+ Constant_type = make_type("Constant", expr_type, Constant_fields, 1);
+ if (!Constant_type) return 0;
Attribute_type = make_type("Attribute", expr_type, Attribute_fields, 3);
if (!Attribute_type) return 0;
Subscript_type = make_type("Subscript", expr_type, Subscript_fields, 3);
@@ -1127,7 +1176,7 @@ static int init_types(void)
NotIn_singleton = PyType_GenericNew(NotIn_type, NULL, NULL);
if (!NotIn_singleton) return 0;
comprehension_type = make_type("comprehension", &AST_type,
- comprehension_fields, 3);
+ comprehension_fields, 4);
if (!comprehension_type) return 0;
if (!add_attributes(comprehension_type, NULL, 0)) return 0;
excepthandler_type = make_type("excepthandler", &AST_type, NULL, 0);
@@ -1140,7 +1189,7 @@ static int init_types(void)
arguments_type = make_type("arguments", &AST_type, arguments_fields, 6);
if (!arguments_type) return 0;
if (!add_attributes(arguments_type, NULL, 0)) return 0;
- arg_type = make_type("arg", &AST_type, arg_fields, 2);
+ arg_type = make_type("arg", &AST_type, arg_fields, 3);
if (!arg_type) return 0;
if (!add_attributes(arg_type, arg_attributes, 2)) return 0;
keyword_type = make_type("keyword", &AST_type, keyword_fields, 2);
@@ -1372,10 +1421,15 @@ Delete(asdl_seq * targets, int lineno, int col_offset, PyArena *arena)
}
stmt_ty
-Assign(asdl_seq * targets, expr_ty value, string type_comment, expr_ty
- annotation, int lineno, int col_offset, PyArena *arena)
+Assign(asdl_seq * targets, expr_ty value, string type_comment, int lineno, int
+ col_offset, PyArena *arena)
{
stmt_ty p;
+ if (!value) {
+ PyErr_SetString(PyExc_ValueError,
+ "field value is required for Assign");
+ return NULL;
+ }
p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p));
if (!p)
return NULL;
@@ -1383,7 +1437,6 @@ Assign(asdl_seq * targets, expr_ty value, string type_comment, expr_ty
p->v.Assign.targets = targets;
p->v.Assign.value = value;
p->v.Assign.type_comment = type_comment;
- p->v.Assign.annotation = annotation;
p->lineno = lineno;
p->col_offset = col_offset;
return p;
@@ -1422,6 +1475,34 @@ AugAssign(expr_ty target, operator_ty op, expr_ty value, int lineno, int
}
stmt_ty
+AnnAssign(expr_ty target, expr_ty annotation, expr_ty value, int simple, int
+ lineno, int col_offset, PyArena *arena)
+{
+ stmt_ty p;
+ if (!target) {
+ PyErr_SetString(PyExc_ValueError,
+ "field target is required for AnnAssign");
+ return NULL;
+ }
+ if (!annotation) {
+ PyErr_SetString(PyExc_ValueError,
+ "field annotation is required for AnnAssign");
+ return NULL;
+ }
+ p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p));
+ if (!p)
+ return NULL;
+ p->kind = AnnAssign_kind;
+ p->v.AnnAssign.target = target;
+ p->v.AnnAssign.annotation = annotation;
+ p->v.AnnAssign.value = value;
+ p->v.AnnAssign.simple = simple;
+ p->lineno = lineno;
+ p->col_offset = col_offset;
+ return p;
+}
+
+stmt_ty
For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, string
type_comment, int lineno, int col_offset, PyArena *arena)
{
@@ -1451,8 +1532,8 @@ For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, string
}
stmt_ty
-AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, int
- lineno, int col_offset, PyArena *arena)
+AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse,
+ string type_comment, int lineno, int col_offset, PyArena *arena)
{
stmt_ty p;
if (!target) {
@@ -1473,6 +1554,7 @@ AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, int
p->v.AsyncFor.iter = iter;
p->v.AsyncFor.body = body;
p->v.AsyncFor.orelse = orelse;
+ p->v.AsyncFor.type_comment = type_comment;
p->lineno = lineno;
p->col_offset = col_offset;
return p;
@@ -1540,8 +1622,8 @@ With(asdl_seq * items, asdl_seq * body, string type_comment, int lineno, int
}
stmt_ty
-AsyncWith(asdl_seq * items, asdl_seq * body, int lineno, int col_offset,
- PyArena *arena)
+AsyncWith(asdl_seq * items, asdl_seq * body, string type_comment, int lineno,
+ int col_offset, PyArena *arena)
{
stmt_ty p;
p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p));
@@ -1550,6 +1632,7 @@ AsyncWith(asdl_seq * items, asdl_seq * body, int lineno, int col_offset,
p->kind = AsyncWith_kind;
p->v.AsyncWith.items = items;
p->v.AsyncWith.body = body;
+ p->v.AsyncWith.type_comment = type_comment;
p->lineno = lineno;
p->col_offset = col_offset;
return p;
@@ -2079,8 +2162,7 @@ Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int lineno, int
}
expr_ty
-Num(object n, int contains_underscores, int lineno, int col_offset, PyArena
- *arena)
+Num(object n, int lineno, int col_offset, PyArena *arena)
{
expr_ty p;
if (!n) {
@@ -2093,7 +2175,6 @@ Num(object n, int contains_underscores, int lineno, int col_offset, PyArena
return NULL;
p->kind = Num_kind;
p->v.Num.n = n;
- p->v.Num.contains_underscores = contains_underscores;
p->lineno = lineno;
p->col_offset = col_offset;
return p;
@@ -2119,6 +2200,42 @@ Str(string s, int lineno, int col_offset, PyArena *arena)
}
expr_ty
+FormattedValue(expr_ty value, int conversion, expr_ty format_spec, int lineno,
+ int col_offset, PyArena *arena)
+{
+ expr_ty p;
+ if (!value) {
+ PyErr_SetString(PyExc_ValueError,
+ "field value is required for FormattedValue");
+ return NULL;
+ }
+ p = (expr_ty)PyArena_Malloc(arena, sizeof(*p));
+ if (!p)
+ return NULL;
+ p->kind = FormattedValue_kind;
+ p->v.FormattedValue.value = value;
+ p->v.FormattedValue.conversion = conversion;
+ p->v.FormattedValue.format_spec = format_spec;
+ p->lineno = lineno;
+ p->col_offset = col_offset;
+ return p;
+}
+
+expr_ty
+JoinedStr(asdl_seq * values, int lineno, int col_offset, PyArena *arena)
+{
+ expr_ty p;
+ p = (expr_ty)PyArena_Malloc(arena, sizeof(*p));
+ if (!p)
+ return NULL;
+ p->kind = JoinedStr_kind;
+ p->v.JoinedStr.values = values;
+ p->lineno = lineno;
+ p->col_offset = col_offset;
+ return p;
+}
+
+expr_ty
Bytes(bytes s, int lineno, int col_offset, PyArena *arena)
{
expr_ty p;
@@ -2170,6 +2287,25 @@ Ellipsis(int lineno, int col_offset, PyArena *arena)
}
expr_ty
+Constant(constant value, int lineno, int col_offset, PyArena *arena)
+{
+ expr_ty p;
+ if (!value) {
+ PyErr_SetString(PyExc_ValueError,
+ "field value is required for Constant");
+ return NULL;
+ }
+ p = (expr_ty)PyArena_Malloc(arena, sizeof(*p));
+ if (!p)
+ return NULL;
+ p->kind = Constant_kind;
+ p->v.Constant.value = value;
+ p->lineno = lineno;
+ p->col_offset = col_offset;
+ return p;
+}
+
+expr_ty
Attribute(expr_ty value, identifier attr, expr_context_ty ctx, int lineno, int
col_offset, PyArena *arena)
{
@@ -2371,7 +2507,8 @@ Index(expr_ty value, PyArena *arena)
}
comprehension_ty
-comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs, PyArena *arena)
+comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs, int is_async,
+ PyArena *arena)
{
comprehension_ty p;
if (!target) {
@@ -2390,6 +2527,7 @@ comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs, PyArena *arena)
p->target = target;
p->iter = iter;
p->ifs = ifs;
+ p->is_async = is_async;
return p;
}
@@ -2428,8 +2566,8 @@ arguments(asdl_seq * args, arg_ty vararg, asdl_seq * kwonlyargs, asdl_seq *
}
arg_ty
-arg(identifier arg, expr_ty annotation, int lineno, int col_offset, PyArena
- *arena)
+arg(identifier arg, expr_ty annotation, string type_comment, int lineno, int
+ col_offset, PyArena *arena)
{
arg_ty p;
if (!arg) {
@@ -2442,6 +2580,7 @@ arg(identifier arg, expr_ty annotation, int lineno, int col_offset, PyArena
return NULL;
p->arg = arg;
p->annotation = annotation;
+ p->type_comment = type_comment;
p->lineno = lineno;
p->col_offset = col_offset;
return p;
@@ -2730,11 +2869,6 @@ ast2obj_stmt(void* _o)
if (_PyObject_SetAttrId(result, &PyId_type_comment, value) == -1)
goto failed;
Py_DECREF(value);
- value = ast2obj_expr(o->v.Assign.annotation);
- if (!value) goto failed;
- if (_PyObject_SetAttrId(result, &PyId_annotation, value) == -1)
- goto failed;
- Py_DECREF(value);
break;
case AugAssign_kind:
result = PyType_GenericNew(AugAssign_type, NULL, NULL);
@@ -2755,6 +2889,30 @@ ast2obj_stmt(void* _o)
goto failed;
Py_DECREF(value);
break;
+ case AnnAssign_kind:
+ result = PyType_GenericNew(AnnAssign_type, NULL, NULL);
+ if (!result) goto failed;
+ value = ast2obj_expr(o->v.AnnAssign.target);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_target, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ value = ast2obj_expr(o->v.AnnAssign.annotation);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_annotation, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ value = ast2obj_expr(o->v.AnnAssign.value);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_value, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ value = ast2obj_int(o->v.AnnAssign.simple);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_simple, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ break;
case For_kind:
result = PyType_GenericNew(For_type, NULL, NULL);
if (!result) goto failed;
@@ -2807,6 +2965,11 @@ ast2obj_stmt(void* _o)
if (_PyObject_SetAttrId(result, &PyId_orelse, value) == -1)
goto failed;
Py_DECREF(value);
+ value = ast2obj_string(o->v.AsyncFor.type_comment);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_type_comment, value) == -1)
+ goto failed;
+ Py_DECREF(value);
break;
case While_kind:
result = PyType_GenericNew(While_type, NULL, NULL);
@@ -2878,6 +3041,11 @@ ast2obj_stmt(void* _o)
if (_PyObject_SetAttrId(result, &PyId_body, value) == -1)
goto failed;
Py_DECREF(value);
+ value = ast2obj_string(o->v.AsyncWith.type_comment);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_type_comment, value) == -1)
+ goto failed;
+ Py_DECREF(value);
break;
case Raise_kind:
result = PyType_GenericNew(Raise_type, NULL, NULL);
@@ -3271,12 +3439,6 @@ ast2obj_expr(void* _o)
if (_PyObject_SetAttrId(result, &PyId_n, value) == -1)
goto failed;
Py_DECREF(value);
- value = ast2obj_int(o->v.Num.contains_underscores);
- if (!value) goto failed;
- if (_PyObject_SetAttrId(result, &PyId_contains_underscores, value) ==
- -1)
- goto failed;
- Py_DECREF(value);
break;
case Str_kind:
result = PyType_GenericNew(Str_type, NULL, NULL);
@@ -3287,6 +3449,34 @@ ast2obj_expr(void* _o)
goto failed;
Py_DECREF(value);
break;
+ case FormattedValue_kind:
+ result = PyType_GenericNew(FormattedValue_type, NULL, NULL);
+ if (!result) goto failed;
+ value = ast2obj_expr(o->v.FormattedValue.value);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_value, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ value = ast2obj_int(o->v.FormattedValue.conversion);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_conversion, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ value = ast2obj_expr(o->v.FormattedValue.format_spec);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_format_spec, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ break;
+ case JoinedStr_kind:
+ result = PyType_GenericNew(JoinedStr_type, NULL, NULL);
+ if (!result) goto failed;
+ value = ast2obj_list(o->v.JoinedStr.values, ast2obj_expr);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_values, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ break;
case Bytes_kind:
result = PyType_GenericNew(Bytes_type, NULL, NULL);
if (!result) goto failed;
@@ -3309,6 +3499,15 @@ ast2obj_expr(void* _o)
result = PyType_GenericNew(Ellipsis_type, NULL, NULL);
if (!result) goto failed;
break;
+ case Constant_kind:
+ result = PyType_GenericNew(Constant_type, NULL, NULL);
+ if (!result) goto failed;
+ value = ast2obj_constant(o->v.Constant.value);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_value, value) == -1)
+ goto failed;
+ Py_DECREF(value);
+ break;
case Attribute_kind:
result = PyType_GenericNew(Attribute_type, NULL, NULL);
if (!result) goto failed;
@@ -3654,6 +3853,11 @@ ast2obj_comprehension(void* _o)
if (_PyObject_SetAttrId(result, &PyId_ifs, value) == -1)
goto failed;
Py_DECREF(value);
+ value = ast2obj_int(o->is_async);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_is_async, value) == -1)
+ goto failed;
+ Py_DECREF(value);
return result;
failed:
Py_XDECREF(value);
@@ -3780,6 +3984,11 @@ ast2obj_arg(void* _o)
if (_PyObject_SetAttrId(result, &PyId_annotation, value) == -1)
goto failed;
Py_DECREF(value);
+ value = ast2obj_string(o->type_comment);
+ if (!value) goto failed;
+ if (_PyObject_SetAttrId(result, &PyId_type_comment, value) == -1)
+ goto failed;
+ Py_DECREF(value);
value = ast2obj_int(o->lineno);
if (!value) goto failed;
if (_PyObject_SetAttrId(result, &PyId_lineno, value) < 0)
@@ -3943,12 +4152,16 @@ obj2ast_mod(PyObject* obj, mod_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Module field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -3967,12 +4180,16 @@ obj2ast_mod(PyObject* obj, mod_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- type_ignores = _Py_asdl_seq_new(len, arena);
+ type_ignores = _Ta3_asdl_seq_new(len, arena);
if (type_ignores == NULL) goto failed;
for (i = 0; i < len; i++) {
type_ignore_ty value;
res = obj2ast_type_ignore(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Module field \"type_ignores\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(type_ignores, i, value);
}
Py_CLEAR(tmp);
@@ -4002,12 +4219,16 @@ obj2ast_mod(PyObject* obj, mod_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Interactive field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4060,12 +4281,16 @@ obj2ast_mod(PyObject* obj, mod_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- argtypes = _Py_asdl_seq_new(len, arena);
+ argtypes = _Ta3_asdl_seq_new(len, arena);
if (argtypes == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "FunctionType field \"argtypes\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(argtypes, i, value);
}
Py_CLEAR(tmp);
@@ -4106,12 +4331,16 @@ obj2ast_mod(PyObject* obj, mod_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Suite field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4210,12 +4439,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "FunctionDef field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4234,12 +4467,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- decorator_list = _Py_asdl_seq_new(len, arena);
+ decorator_list = _Ta3_asdl_seq_new(len, arena);
if (decorator_list == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "FunctionDef field \"decorator_list\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(decorator_list, i, value);
}
Py_CLEAR(tmp);
@@ -4317,12 +4554,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "AsyncFunctionDef field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4341,12 +4582,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- decorator_list = _Py_asdl_seq_new(len, arena);
+ decorator_list = _Ta3_asdl_seq_new(len, arena);
if (decorator_list == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "AsyncFunctionDef field \"decorator_list\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(decorator_list, i, value);
}
Py_CLEAR(tmp);
@@ -4412,12 +4657,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- bases = _Py_asdl_seq_new(len, arena);
+ bases = _Ta3_asdl_seq_new(len, arena);
if (bases == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ClassDef field \"bases\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(bases, i, value);
}
Py_CLEAR(tmp);
@@ -4436,12 +4685,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- keywords = _Py_asdl_seq_new(len, arena);
+ keywords = _Ta3_asdl_seq_new(len, arena);
if (keywords == NULL) goto failed;
for (i = 0; i < len; i++) {
keyword_ty value;
res = obj2ast_keyword(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ClassDef field \"keywords\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(keywords, i, value);
}
Py_CLEAR(tmp);
@@ -4460,12 +4713,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ClassDef field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4484,12 +4741,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- decorator_list = _Py_asdl_seq_new(len, arena);
+ decorator_list = _Ta3_asdl_seq_new(len, arena);
if (decorator_list == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ClassDef field \"decorator_list\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(decorator_list, i, value);
}
Py_CLEAR(tmp);
@@ -4541,12 +4802,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- targets = _Py_asdl_seq_new(len, arena);
+ targets = _Ta3_asdl_seq_new(len, arena);
if (targets == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Delete field \"targets\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(targets, i, value);
}
Py_CLEAR(tmp);
@@ -4566,7 +4831,6 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
asdl_seq* targets;
expr_ty value;
string type_comment;
- expr_ty annotation;
if (_PyObject_HasAttrId(obj, &PyId_targets)) {
int res;
@@ -4579,12 +4843,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- targets = _Py_asdl_seq_new(len, arena);
+ targets = _Ta3_asdl_seq_new(len, arena);
if (targets == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Assign field \"targets\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(targets, i, value);
}
Py_CLEAR(tmp);
@@ -4592,7 +4860,7 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
PyErr_SetString(PyExc_TypeError, "required field \"targets\" missing from Assign");
return 1;
}
- if (exists_not_none(obj, &PyId_value)) {
+ if (_PyObject_HasAttrId(obj, &PyId_value)) {
int res;
tmp = _PyObject_GetAttrId(obj, &PyId_value);
if (tmp == NULL) goto failed;
@@ -4600,7 +4868,8 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
if (res != 0) goto failed;
Py_CLEAR(tmp);
} else {
- value = NULL;
+ PyErr_SetString(PyExc_TypeError, "required field \"value\" missing from Assign");
+ return 1;
}
if (exists_not_none(obj, &PyId_type_comment)) {
int res;
@@ -4612,18 +4881,7 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
} else {
type_comment = NULL;
}
- if (exists_not_none(obj, &PyId_annotation)) {
- int res;
- tmp = _PyObject_GetAttrId(obj, &PyId_annotation);
- if (tmp == NULL) goto failed;
- res = obj2ast_expr(tmp, &annotation, arena);
- if (res != 0) goto failed;
- Py_CLEAR(tmp);
- } else {
- annotation = NULL;
- }
- *out = Assign(targets, value, type_comment, annotation, lineno,
- col_offset, arena);
+ *out = Assign(targets, value, type_comment, lineno, col_offset, arena);
if (*out == NULL) goto failed;
return 0;
}
@@ -4673,6 +4931,64 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
if (*out == NULL) goto failed;
return 0;
}
+ isinstance = PyObject_IsInstance(obj, (PyObject*)AnnAssign_type);
+ if (isinstance == -1) {
+ return 1;
+ }
+ if (isinstance) {
+ expr_ty target;
+ expr_ty annotation;
+ expr_ty value;
+ int simple;
+
+ if (_PyObject_HasAttrId(obj, &PyId_target)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_target);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_expr(tmp, &target, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"target\" missing from AnnAssign");
+ return 1;
+ }
+ if (_PyObject_HasAttrId(obj, &PyId_annotation)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_annotation);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_expr(tmp, &annotation, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"annotation\" missing from AnnAssign");
+ return 1;
+ }
+ if (exists_not_none(obj, &PyId_value)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_value);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_expr(tmp, &value, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ value = NULL;
+ }
+ if (_PyObject_HasAttrId(obj, &PyId_simple)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_simple);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_int(tmp, &simple, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"simple\" missing from AnnAssign");
+ return 1;
+ }
+ *out = AnnAssign(target, annotation, value, simple, lineno, col_offset,
+ arena);
+ if (*out == NULL) goto failed;
+ return 0;
+ }
isinstance = PyObject_IsInstance(obj, (PyObject*)For_type);
if (isinstance == -1) {
return 1;
@@ -4717,12 +5033,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "For field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4741,12 +5061,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- orelse = _Py_asdl_seq_new(len, arena);
+ orelse = _Ta3_asdl_seq_new(len, arena);
if (orelse == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "For field \"orelse\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(orelse, i, value);
}
Py_CLEAR(tmp);
@@ -4778,6 +5102,7 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
expr_ty iter;
asdl_seq* body;
asdl_seq* orelse;
+ string type_comment;
if (_PyObject_HasAttrId(obj, &PyId_target)) {
int res;
@@ -4812,12 +5137,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "AsyncFor field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4836,12 +5165,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- orelse = _Py_asdl_seq_new(len, arena);
+ orelse = _Ta3_asdl_seq_new(len, arena);
if (orelse == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "AsyncFor field \"orelse\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(orelse, i, value);
}
Py_CLEAR(tmp);
@@ -4849,7 +5182,18 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
PyErr_SetString(PyExc_TypeError, "required field \"orelse\" missing from AsyncFor");
return 1;
}
- *out = AsyncFor(target, iter, body, orelse, lineno, col_offset, arena);
+ if (exists_not_none(obj, &PyId_type_comment)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_type_comment);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_string(tmp, &type_comment, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ type_comment = NULL;
+ }
+ *out = AsyncFor(target, iter, body, orelse, type_comment, lineno,
+ col_offset, arena);
if (*out == NULL) goto failed;
return 0;
}
@@ -4884,12 +5228,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "While field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4908,12 +5256,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- orelse = _Py_asdl_seq_new(len, arena);
+ orelse = _Ta3_asdl_seq_new(len, arena);
if (orelse == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "While field \"orelse\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(orelse, i, value);
}
Py_CLEAR(tmp);
@@ -4956,12 +5308,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "If field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -4980,12 +5336,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- orelse = _Py_asdl_seq_new(len, arena);
+ orelse = _Ta3_asdl_seq_new(len, arena);
if (orelse == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "If field \"orelse\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(orelse, i, value);
}
Py_CLEAR(tmp);
@@ -5017,12 +5377,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- items = _Py_asdl_seq_new(len, arena);
+ items = _Ta3_asdl_seq_new(len, arena);
if (items == NULL) goto failed;
for (i = 0; i < len; i++) {
withitem_ty value;
res = obj2ast_withitem(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "With field \"items\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(items, i, value);
}
Py_CLEAR(tmp);
@@ -5041,12 +5405,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "With field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -5075,6 +5443,7 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
if (isinstance) {
asdl_seq* items;
asdl_seq* body;
+ string type_comment;
if (_PyObject_HasAttrId(obj, &PyId_items)) {
int res;
@@ -5087,12 +5456,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- items = _Py_asdl_seq_new(len, arena);
+ items = _Ta3_asdl_seq_new(len, arena);
if (items == NULL) goto failed;
for (i = 0; i < len; i++) {
withitem_ty value;
res = obj2ast_withitem(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "AsyncWith field \"items\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(items, i, value);
}
Py_CLEAR(tmp);
@@ -5111,12 +5484,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "AsyncWith field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -5124,7 +5501,17 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
PyErr_SetString(PyExc_TypeError, "required field \"body\" missing from AsyncWith");
return 1;
}
- *out = AsyncWith(items, body, lineno, col_offset, arena);
+ if (exists_not_none(obj, &PyId_type_comment)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_type_comment);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_string(tmp, &type_comment, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ type_comment = NULL;
+ }
+ *out = AsyncWith(items, body, type_comment, lineno, col_offset, arena);
if (*out == NULL) goto failed;
return 0;
}
@@ -5181,12 +5568,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Try field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -5205,12 +5596,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- handlers = _Py_asdl_seq_new(len, arena);
+ handlers = _Ta3_asdl_seq_new(len, arena);
if (handlers == NULL) goto failed;
for (i = 0; i < len; i++) {
excepthandler_ty value;
res = obj2ast_excepthandler(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Try field \"handlers\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(handlers, i, value);
}
Py_CLEAR(tmp);
@@ -5229,12 +5624,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- orelse = _Py_asdl_seq_new(len, arena);
+ orelse = _Ta3_asdl_seq_new(len, arena);
if (orelse == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Try field \"orelse\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(orelse, i, value);
}
Py_CLEAR(tmp);
@@ -5253,12 +5652,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- finalbody = _Py_asdl_seq_new(len, arena);
+ finalbody = _Ta3_asdl_seq_new(len, arena);
if (finalbody == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Try field \"finalbody\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(finalbody, i, value);
}
Py_CLEAR(tmp);
@@ -5322,12 +5725,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- names = _Py_asdl_seq_new(len, arena);
+ names = _Ta3_asdl_seq_new(len, arena);
if (names == NULL) goto failed;
for (i = 0; i < len; i++) {
alias_ty value;
res = obj2ast_alias(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Import field \"names\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(names, i, value);
}
Py_CLEAR(tmp);
@@ -5369,12 +5776,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- names = _Py_asdl_seq_new(len, arena);
+ names = _Ta3_asdl_seq_new(len, arena);
if (names == NULL) goto failed;
for (i = 0; i < len; i++) {
alias_ty value;
res = obj2ast_alias(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ImportFrom field \"names\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(names, i, value);
}
Py_CLEAR(tmp);
@@ -5414,12 +5825,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- names = _Py_asdl_seq_new(len, arena);
+ names = _Ta3_asdl_seq_new(len, arena);
if (names == NULL) goto failed;
for (i = 0; i < len; i++) {
identifier value;
res = obj2ast_identifier(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Global field \"names\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(names, i, value);
}
Py_CLEAR(tmp);
@@ -5449,12 +5864,16 @@ obj2ast_stmt(PyObject* obj, stmt_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- names = _Py_asdl_seq_new(len, arena);
+ names = _Ta3_asdl_seq_new(len, arena);
if (names == NULL) goto failed;
for (i = 0; i < len; i++) {
identifier value;
res = obj2ast_identifier(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Nonlocal field \"names\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(names, i, value);
}
Py_CLEAR(tmp);
@@ -5590,12 +6009,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- values = _Py_asdl_seq_new(len, arena);
+ values = _Ta3_asdl_seq_new(len, arena);
if (values == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "BoolOp field \"values\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(values, i, value);
}
Py_CLEAR(tmp);
@@ -5786,12 +6209,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- keys = _Py_asdl_seq_new(len, arena);
+ keys = _Ta3_asdl_seq_new(len, arena);
if (keys == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Dict field \"keys\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(keys, i, value);
}
Py_CLEAR(tmp);
@@ -5810,12 +6237,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- values = _Py_asdl_seq_new(len, arena);
+ values = _Ta3_asdl_seq_new(len, arena);
if (values == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Dict field \"values\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(values, i, value);
}
Py_CLEAR(tmp);
@@ -5845,12 +6276,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- elts = _Py_asdl_seq_new(len, arena);
+ elts = _Ta3_asdl_seq_new(len, arena);
if (elts == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Set field \"elts\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(elts, i, value);
}
Py_CLEAR(tmp);
@@ -5892,12 +6327,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- generators = _Py_asdl_seq_new(len, arena);
+ generators = _Ta3_asdl_seq_new(len, arena);
if (generators == NULL) goto failed;
for (i = 0; i < len; i++) {
comprehension_ty value;
res = obj2ast_comprehension(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ListComp field \"generators\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(generators, i, value);
}
Py_CLEAR(tmp);
@@ -5939,12 +6378,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- generators = _Py_asdl_seq_new(len, arena);
+ generators = _Ta3_asdl_seq_new(len, arena);
if (generators == NULL) goto failed;
for (i = 0; i < len; i++) {
comprehension_ty value;
res = obj2ast_comprehension(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "SetComp field \"generators\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(generators, i, value);
}
Py_CLEAR(tmp);
@@ -5998,12 +6441,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- generators = _Py_asdl_seq_new(len, arena);
+ generators = _Ta3_asdl_seq_new(len, arena);
if (generators == NULL) goto failed;
for (i = 0; i < len; i++) {
comprehension_ty value;
res = obj2ast_comprehension(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "DictComp field \"generators\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(generators, i, value);
}
Py_CLEAR(tmp);
@@ -6045,12 +6492,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- generators = _Py_asdl_seq_new(len, arena);
+ generators = _Ta3_asdl_seq_new(len, arena);
if (generators == NULL) goto failed;
for (i = 0; i < len; i++) {
comprehension_ty value;
res = obj2ast_comprehension(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "GeneratorExp field \"generators\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(generators, i, value);
}
Py_CLEAR(tmp);
@@ -6158,12 +6609,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- ops = _Py_asdl_int_seq_new(len, arena);
+ ops = _Ta3_asdl_int_seq_new(len, arena);
if (ops == NULL) goto failed;
for (i = 0; i < len; i++) {
cmpop_ty value;
res = obj2ast_cmpop(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Compare field \"ops\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(ops, i, value);
}
Py_CLEAR(tmp);
@@ -6182,12 +6637,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- comparators = _Py_asdl_seq_new(len, arena);
+ comparators = _Ta3_asdl_seq_new(len, arena);
if (comparators == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Compare field \"comparators\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(comparators, i, value);
}
Py_CLEAR(tmp);
@@ -6230,12 +6689,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- args = _Py_asdl_seq_new(len, arena);
+ args = _Ta3_asdl_seq_new(len, arena);
if (args == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Call field \"args\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(args, i, value);
}
Py_CLEAR(tmp);
@@ -6254,12 +6717,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- keywords = _Py_asdl_seq_new(len, arena);
+ keywords = _Ta3_asdl_seq_new(len, arena);
if (keywords == NULL) goto failed;
for (i = 0; i < len; i++) {
keyword_ty value;
res = obj2ast_keyword(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Call field \"keywords\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(keywords, i, value);
}
Py_CLEAR(tmp);
@@ -6277,7 +6744,6 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
}
if (isinstance) {
object n;
- int contains_underscores;
if (_PyObject_HasAttrId(obj, &PyId_n)) {
int res;
@@ -6290,17 +6756,7 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
PyErr_SetString(PyExc_TypeError, "required field \"n\" missing from Num");
return 1;
}
- if (exists_not_none(obj, &PyId_contains_underscores)) {
- int res;
- tmp = _PyObject_GetAttrId(obj, &PyId_contains_underscores);
- if (tmp == NULL) goto failed;
- res = obj2ast_int(tmp, &contains_underscores, arena);
- if (res != 0) goto failed;
- Py_CLEAR(tmp);
- } else {
- contains_underscores = 0;
- }
- *out = Num(n, contains_underscores, lineno, col_offset, arena);
+ *out = Num(n, lineno, col_offset, arena);
if (*out == NULL) goto failed;
return 0;
}
@@ -6326,6 +6782,90 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
if (*out == NULL) goto failed;
return 0;
}
+ isinstance = PyObject_IsInstance(obj, (PyObject*)FormattedValue_type);
+ if (isinstance == -1) {
+ return 1;
+ }
+ if (isinstance) {
+ expr_ty value;
+ int conversion;
+ expr_ty format_spec;
+
+ if (_PyObject_HasAttrId(obj, &PyId_value)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_value);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_expr(tmp, &value, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"value\" missing from FormattedValue");
+ return 1;
+ }
+ if (exists_not_none(obj, &PyId_conversion)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_conversion);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_int(tmp, &conversion, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ conversion = 0;
+ }
+ if (exists_not_none(obj, &PyId_format_spec)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_format_spec);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_expr(tmp, &format_spec, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ format_spec = NULL;
+ }
+ *out = FormattedValue(value, conversion, format_spec, lineno,
+ col_offset, arena);
+ if (*out == NULL) goto failed;
+ return 0;
+ }
+ isinstance = PyObject_IsInstance(obj, (PyObject*)JoinedStr_type);
+ if (isinstance == -1) {
+ return 1;
+ }
+ if (isinstance) {
+ asdl_seq* values;
+
+ if (_PyObject_HasAttrId(obj, &PyId_values)) {
+ int res;
+ Py_ssize_t len;
+ Py_ssize_t i;
+ tmp = _PyObject_GetAttrId(obj, &PyId_values);
+ if (tmp == NULL) goto failed;
+ if (!PyList_Check(tmp)) {
+ PyErr_Format(PyExc_TypeError, "JoinedStr field \"values\" must be a list, not a %.200s", tmp->ob_type->tp_name);
+ goto failed;
+ }
+ len = PyList_GET_SIZE(tmp);
+ values = _Ta3_asdl_seq_new(len, arena);
+ if (values == NULL) goto failed;
+ for (i = 0; i < len; i++) {
+ expr_ty value;
+ res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
+ if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "JoinedStr field \"values\" changed size during iteration");
+ goto failed;
+ }
+ asdl_seq_SET(values, i, value);
+ }
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"values\" missing from JoinedStr");
+ return 1;
+ }
+ *out = JoinedStr(values, lineno, col_offset, arena);
+ if (*out == NULL) goto failed;
+ return 0;
+ }
isinstance = PyObject_IsInstance(obj, (PyObject*)Bytes_type);
if (isinstance == -1) {
return 1;
@@ -6380,6 +6920,28 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
if (*out == NULL) goto failed;
return 0;
}
+ isinstance = PyObject_IsInstance(obj, (PyObject*)Constant_type);
+ if (isinstance == -1) {
+ return 1;
+ }
+ if (isinstance) {
+ constant value;
+
+ if (_PyObject_HasAttrId(obj, &PyId_value)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_value);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_constant(tmp, &value, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"value\" missing from Constant");
+ return 1;
+ }
+ *out = Constant(value, lineno, col_offset, arena);
+ if (*out == NULL) goto failed;
+ return 0;
+ }
isinstance = PyObject_IsInstance(obj, (PyObject*)Attribute_type);
if (isinstance == -1) {
return 1;
@@ -6559,12 +7121,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- elts = _Py_asdl_seq_new(len, arena);
+ elts = _Ta3_asdl_seq_new(len, arena);
if (elts == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "List field \"elts\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(elts, i, value);
}
Py_CLEAR(tmp);
@@ -6606,12 +7172,16 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- elts = _Py_asdl_seq_new(len, arena);
+ elts = _Ta3_asdl_seq_new(len, arena);
if (elts == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "Tuple field \"elts\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(elts, i, value);
}
Py_CLEAR(tmp);
@@ -6771,12 +7341,16 @@ obj2ast_slice(PyObject* obj, slice_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- dims = _Py_asdl_seq_new(len, arena);
+ dims = _Ta3_asdl_seq_new(len, arena);
if (dims == NULL) goto failed;
for (i = 0; i < len; i++) {
slice_ty value;
res = obj2ast_slice(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ExtSlice field \"dims\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(dims, i, value);
}
Py_CLEAR(tmp);
@@ -7096,6 +7670,7 @@ obj2ast_comprehension(PyObject* obj, comprehension_ty* out, PyArena* arena)
expr_ty target;
expr_ty iter;
asdl_seq* ifs;
+ int is_async;
if (_PyObject_HasAttrId(obj, &PyId_target)) {
int res;
@@ -7130,12 +7705,16 @@ obj2ast_comprehension(PyObject* obj, comprehension_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- ifs = _Py_asdl_seq_new(len, arena);
+ ifs = _Ta3_asdl_seq_new(len, arena);
if (ifs == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "comprehension field \"ifs\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(ifs, i, value);
}
Py_CLEAR(tmp);
@@ -7143,7 +7722,18 @@ obj2ast_comprehension(PyObject* obj, comprehension_ty* out, PyArena* arena)
PyErr_SetString(PyExc_TypeError, "required field \"ifs\" missing from comprehension");
return 1;
}
- *out = comprehension(target, iter, ifs, arena);
+ if (_PyObject_HasAttrId(obj, &PyId_is_async)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_is_async);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_int(tmp, &is_async, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ PyErr_SetString(PyExc_TypeError, "required field \"is_async\" missing from comprehension");
+ return 1;
+ }
+ *out = comprehension(target, iter, ifs, is_async, arena);
return 0;
failed:
Py_XDECREF(tmp);
@@ -7225,12 +7815,16 @@ obj2ast_excepthandler(PyObject* obj, excepthandler_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- body = _Py_asdl_seq_new(len, arena);
+ body = _Ta3_asdl_seq_new(len, arena);
if (body == NULL) goto failed;
for (i = 0; i < len; i++) {
stmt_ty value;
res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "ExceptHandler field \"body\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(body, i, value);
}
Py_CLEAR(tmp);
@@ -7271,12 +7865,16 @@ obj2ast_arguments(PyObject* obj, arguments_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- args = _Py_asdl_seq_new(len, arena);
+ args = _Ta3_asdl_seq_new(len, arena);
if (args == NULL) goto failed;
for (i = 0; i < len; i++) {
arg_ty value;
res = obj2ast_arg(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "arguments field \"args\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(args, i, value);
}
Py_CLEAR(tmp);
@@ -7305,12 +7903,16 @@ obj2ast_arguments(PyObject* obj, arguments_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- kwonlyargs = _Py_asdl_seq_new(len, arena);
+ kwonlyargs = _Ta3_asdl_seq_new(len, arena);
if (kwonlyargs == NULL) goto failed;
for (i = 0; i < len; i++) {
arg_ty value;
res = obj2ast_arg(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "arguments field \"kwonlyargs\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(kwonlyargs, i, value);
}
Py_CLEAR(tmp);
@@ -7329,12 +7931,16 @@ obj2ast_arguments(PyObject* obj, arguments_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- kw_defaults = _Py_asdl_seq_new(len, arena);
+ kw_defaults = _Ta3_asdl_seq_new(len, arena);
if (kw_defaults == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "arguments field \"kw_defaults\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(kw_defaults, i, value);
}
Py_CLEAR(tmp);
@@ -7363,12 +7969,16 @@ obj2ast_arguments(PyObject* obj, arguments_ty* out, PyArena* arena)
goto failed;
}
len = PyList_GET_SIZE(tmp);
- defaults = _Py_asdl_seq_new(len, arena);
+ defaults = _Ta3_asdl_seq_new(len, arena);
if (defaults == NULL) goto failed;
for (i = 0; i < len; i++) {
expr_ty value;
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
if (res != 0) goto failed;
+ if (len != PyList_GET_SIZE(tmp)) {
+ PyErr_SetString(PyExc_RuntimeError, "arguments field \"defaults\" changed size during iteration");
+ goto failed;
+ }
asdl_seq_SET(defaults, i, value);
}
Py_CLEAR(tmp);
@@ -7390,6 +8000,7 @@ obj2ast_arg(PyObject* obj, arg_ty* out, PyArena* arena)
PyObject* tmp = NULL;
identifier arg;
expr_ty annotation;
+ string type_comment;
int lineno;
int col_offset;
@@ -7414,6 +8025,16 @@ obj2ast_arg(PyObject* obj, arg_ty* out, PyArena* arena)
} else {
annotation = NULL;
}
+ if (exists_not_none(obj, &PyId_type_comment)) {
+ int res;
+ tmp = _PyObject_GetAttrId(obj, &PyId_type_comment);
+ if (tmp == NULL) goto failed;
+ res = obj2ast_string(tmp, &type_comment, arena);
+ if (res != 0) goto failed;
+ Py_CLEAR(tmp);
+ } else {
+ type_comment = NULL;
+ }
if (_PyObject_HasAttrId(obj, &PyId_lineno)) {
int res;
tmp = _PyObject_GetAttrId(obj, &PyId_lineno);
@@ -7436,7 +8057,7 @@ obj2ast_arg(PyObject* obj, arg_ty* out, PyArena* arena)
PyErr_SetString(PyExc_TypeError, "required field \"col_offset\" missing from arg");
return 1;
}
- *out = arg(arg, annotation, lineno, col_offset, arena);
+ *out = arg(arg, annotation, type_comment, lineno, col_offset, arena);
return 0;
failed:
Py_XDECREF(tmp);
@@ -7589,20 +8210,20 @@ obj2ast_type_ignore(PyObject* obj, type_ignore_ty* out, PyArena* arena)
}
-PyObject *ast35_parse(PyObject *self, PyObject *args);
-static PyMethodDef ast35_methods[] = {
- {"_parse", ast35_parse, METH_VARARGS, "Parse string into typed AST."},
+PyObject *ast3_parse(PyObject *self, PyObject *args);
+static PyMethodDef ast3_methods[] = {
+ {"_parse", ast3_parse, METH_VARARGS, "Parse string into typed AST."},
{NULL, NULL, 0, NULL}
};
-static struct PyModuleDef _astmodule35 = {
- PyModuleDef_HEAD_INIT, "_ast35", NULL, 0, ast35_methods
+static struct PyModuleDef _astmodule3 = {
+ PyModuleDef_HEAD_INIT, "_ast3", NULL, 0, ast3_methods
};
PyMODINIT_FUNC
-PyInit__ast35(void)
+PyInit__ast3(void)
{
PyObject *m, *d;
if (!init_types()) return NULL;
- m = PyModule_Create(&_astmodule35);
+ m = PyModule_Create(&_astmodule3);
if (!m) return NULL;
d = PyModule_GetDict(m);
if (PyDict_SetItemString(d, "AST", (PyObject*)&AST_type) < 0) return NULL;
@@ -7634,6 +8255,8 @@ PyInit__ast35(void)
NULL;
if (PyDict_SetItemString(d, "AugAssign", (PyObject*)AugAssign_type) < 0)
return NULL;
+ if (PyDict_SetItemString(d, "AnnAssign", (PyObject*)AnnAssign_type) < 0)
+ return NULL;
if (PyDict_SetItemString(d, "For", (PyObject*)For_type) < 0) return NULL;
if (PyDict_SetItemString(d, "AsyncFor", (PyObject*)AsyncFor_type) < 0)
return NULL;
@@ -7694,12 +8317,18 @@ PyInit__ast35(void)
if (PyDict_SetItemString(d, "Call", (PyObject*)Call_type) < 0) return NULL;
if (PyDict_SetItemString(d, "Num", (PyObject*)Num_type) < 0) return NULL;
if (PyDict_SetItemString(d, "Str", (PyObject*)Str_type) < 0) return NULL;
+ if (PyDict_SetItemString(d, "FormattedValue",
+ (PyObject*)FormattedValue_type) < 0) return NULL;
+ if (PyDict_SetItemString(d, "JoinedStr", (PyObject*)JoinedStr_type) < 0)
+ return NULL;
if (PyDict_SetItemString(d, "Bytes", (PyObject*)Bytes_type) < 0) return
NULL;
if (PyDict_SetItemString(d, "NameConstant", (PyObject*)NameConstant_type) <
0) return NULL;
if (PyDict_SetItemString(d, "Ellipsis", (PyObject*)Ellipsis_type) < 0)
return NULL;
+ if (PyDict_SetItemString(d, "Constant", (PyObject*)Constant_type) < 0)
+ return NULL;
if (PyDict_SetItemString(d, "Attribute", (PyObject*)Attribute_type) < 0)
return NULL;
if (PyDict_SetItemString(d, "Subscript", (PyObject*)Subscript_type) < 0)
@@ -7801,7 +8430,7 @@ PyInit__ast35(void)
}
-PyObject* Ta35AST_mod2obj(mod_ty t)
+PyObject* Ta3AST_mod2obj(mod_ty t)
{
if (!init_types())
return NULL;
@@ -7809,7 +8438,7 @@ PyObject* Ta35AST_mod2obj(mod_ty t)
}
/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
-mod_ty Ta35AST_obj2mod(PyObject* ast, PyArena* arena, int mode)
+mod_ty Ta3AST_obj2mod(PyObject* ast, PyArena* arena, int mode)
{
mod_ty res;
PyObject *req_type[3];
@@ -7839,7 +8468,7 @@ mod_ty Ta35AST_obj2mod(PyObject* ast, PyArena* arena, int mode)
return res;
}
-int Ta35AST_Check(PyObject* obj)
+int Ta3AST_Check(PyObject* obj)
{
if (!init_types())
return -1;
diff --git a/ast35/Python/asdl.c b/ast3/Python/asdl.c
similarity index 75%
rename from ast35/Python/asdl.c
rename to ast3/Python/asdl.c
index df387b2..8722057 100644
--- a/ast35/Python/asdl.c
+++ b/ast3/Python/asdl.c
@@ -2,21 +2,21 @@
#include "asdl.h"
asdl_seq *
-_Py_asdl_seq_new(Py_ssize_t size, PyArena *arena)
+_Ta3_asdl_seq_new(Py_ssize_t size, PyArena *arena)
{
asdl_seq *seq = NULL;
size_t n;
/* check size is sane */
if (size < 0 ||
- (size && (((size_t)size - 1) > (PY_SIZE_MAX / sizeof(void *))))) {
+ (size && (((size_t)size - 1) > (SIZE_MAX / sizeof(void *))))) {
PyErr_NoMemory();
return NULL;
}
n = (size ? (sizeof(void *) * (size - 1)) : 0);
/* check if size can be added safely */
- if (n > PY_SIZE_MAX - sizeof(asdl_seq)) {
+ if (n > SIZE_MAX - sizeof(asdl_seq)) {
PyErr_NoMemory();
return NULL;
}
@@ -33,21 +33,21 @@ _Py_asdl_seq_new(Py_ssize_t size, PyArena *arena)
}
asdl_int_seq *
-_Py_asdl_int_seq_new(Py_ssize_t size, PyArena *arena)
+_Ta3_asdl_int_seq_new(Py_ssize_t size, PyArena *arena)
{
asdl_int_seq *seq = NULL;
size_t n;
/* check size is sane */
if (size < 0 ||
- (size && (((size_t)size - 1) > (PY_SIZE_MAX / sizeof(void *))))) {
+ (size && (((size_t)size - 1) > (SIZE_MAX / sizeof(void *))))) {
PyErr_NoMemory();
return NULL;
}
n = (size ? (sizeof(void *) * (size - 1)) : 0);
/* check if size can be added safely */
- if (n > PY_SIZE_MAX - sizeof(asdl_seq)) {
+ if (n > SIZE_MAX - sizeof(asdl_seq)) {
PyErr_NoMemory();
return NULL;
}
diff --git a/ast35/Python/ast.c b/ast3/Python/ast.c
similarity index 73%
rename from ast35/Python/ast.c
rename to ast3/Python/ast.c
index d9b632c..2c1120e 100644
--- a/ast35/Python/ast.c
+++ b/ast3/Python/ast.c
@@ -1,6 +1,6 @@
/*
* This file includes functions to transform a concrete syntax tree (CST) to
- * an abstract syntax tree (AST). The main function is Ta35AST_FromNode().
+ * an abstract syntax tree (AST). The main function is Ta3AST_FromNode().
*
*/
#include "Python.h"
@@ -13,6 +13,10 @@
#if PY_MINOR_VERSION < 4
#define PyErr_ProgramTextObject PyErr_ProgramText
+
+#define PyMem_RawMalloc PyMem_Malloc
+#define PyMem_RawRealloc PyMem_Realloc
+#define PyMem_RawFree PyMem_Free
#endif
static int validate_stmts(asdl_seq *);
@@ -21,6 +25,11 @@ static int validate_nonempty_seq(asdl_seq *, const char *, const char *);
static int validate_stmt(stmt_ty);
static int validate_expr(expr_ty, expr_context_ty);
+mod_ty
+string_object_to_c_ast(const char *s, PyObject *filename, int start,
+ PyCompilerFlags *flags, int feature_version,
+ PyArena *arena);
+
static int
validate_comprehension(asdl_seq *gens)
{
@@ -136,6 +145,52 @@ validate_arguments(arguments_ty args)
}
static int
+validate_constant(PyObject *value)
+{
+ if (value == Py_None || value == Py_Ellipsis)
+ return 1;
+
+ if (PyLong_CheckExact(value)
+ || PyFloat_CheckExact(value)
+ || PyComplex_CheckExact(value)
+ || PyBool_Check(value)
+ || PyUnicode_CheckExact(value)
+ || PyBytes_CheckExact(value))
+ return 1;
+
+ if (PyTuple_CheckExact(value) || PyFrozenSet_CheckExact(value)) {
+ PyObject *it;
+
+ it = PyObject_GetIter(value);
+ if (it == NULL)
+ return 0;
+
+ while (1) {
+ PyObject *item = PyIter_Next(it);
+ if (item == NULL) {
+ if (PyErr_Occurred()) {
+ Py_DECREF(it);
+ return 0;
+ }
+ break;
+ }
+
+ if (!validate_constant(item)) {
+ Py_DECREF(it);
+ Py_DECREF(item);
+ return 0;
+ }
+ Py_DECREF(item);
+ }
+
+ Py_DECREF(it);
+ return 1;
+ }
+
+ return 0;
+}
+
+static int
validate_expr(expr_ty exp, expr_context_ty ctx)
{
int check_ctx = 1;
@@ -244,6 +299,14 @@ validate_expr(expr_ty exp, expr_context_ty ctx)
return validate_expr(exp->v.Call.func, Load) &&
validate_exprs(exp->v.Call.args, Load, 0) &&
validate_keywords(exp->v.Call.keywords);
+ case Constant_kind:
+ if (!validate_constant(exp->v.Constant.value)) {
+ PyErr_Format(PyExc_TypeError,
+ "got an invalid type in Constant: %s",
+ Py_TYPE(exp->v.Constant.value)->tp_name);
+ return 0;
+ }
+ return 1;
case Num_kind: {
PyObject *n = exp->v.Num.n;
if (!PyLong_CheckExact(n) && !PyFloat_CheckExact(n) &&
@@ -261,6 +324,14 @@ validate_expr(expr_ty exp, expr_context_ty ctx)
}
return 1;
}
+ case JoinedStr_kind:
+ return validate_exprs(exp->v.JoinedStr.values, Load, 0);
+ case FormattedValue_kind:
+ if (validate_expr(exp->v.FormattedValue.value, Load) == 0)
+ return 0;
+ if (exp->v.FormattedValue.format_spec)
+ return validate_expr(exp->v.FormattedValue.format_spec, Load);
+ return 1;
case Bytes_kind: {
PyObject *b = exp->v.Bytes.s;
if (!PyBytes_CheckExact(b)) {
@@ -334,24 +405,22 @@ validate_stmt(stmt_ty stmt)
case Delete_kind:
return validate_assignlist(stmt->v.Delete.targets, Del);
case Assign_kind:
- if (!stmt->v.Assign.value && !stmt->v.Assign.annotation) {
- PyErr_SetString(PyExc_TypeError,
- "Assignment should at least have type or value");
- return 0;
- }
- if (stmt->v.Assign.type_comment && stmt->v.Assign.annotation) {
- PyErr_SetString(PyExc_TypeError,
- "Assignment can't have both annotation and type comment");
- return 0;
- }
return validate_assignlist(stmt->v.Assign.targets, Store) &&
- (!stmt->v.Assign.value ||
- validate_expr(stmt->v.Assign.value, Load)) &&
- (!stmt->v.Assign.annotation ||
- validate_expr(stmt->v.Assign.annotation, Load));
+ validate_expr(stmt->v.Assign.value, Load);
case AugAssign_kind:
return validate_expr(stmt->v.AugAssign.target, Store) &&
validate_expr(stmt->v.AugAssign.value, Load);
+ case AnnAssign_kind:
+ if (stmt->v.AnnAssign.target->kind != Name_kind &&
+ stmt->v.AnnAssign.simple) {
+ PyErr_SetString(PyExc_TypeError,
+ "AnnAssign with simple non-Name target");
+ return 0;
+ }
+ return validate_expr(stmt->v.AnnAssign.target, Store) &&
+ (!stmt->v.AnnAssign.value ||
+ validate_expr(stmt->v.AnnAssign.value, Load)) &&
+ validate_expr(stmt->v.AnnAssign.annotation, Load);
case For_kind:
return validate_expr(stmt->v.For.target, Store) &&
validate_expr(stmt->v.For.iter, Load) &&
@@ -430,8 +499,8 @@ validate_stmt(stmt_ty stmt)
case Import_kind:
return validate_nonempty_seq(stmt->v.Import.names, "names", "Import");
case ImportFrom_kind:
- if (stmt->v.ImportFrom.level < -1) {
- PyErr_SetString(PyExc_ValueError, "ImportFrom level less than -1");
+ if (stmt->v.ImportFrom.level < 0) {
+ PyErr_SetString(PyExc_ValueError, "Negative ImportFrom level");
return 0;
}
return validate_nonempty_seq(stmt->v.ImportFrom.names, "names", "ImportFrom");
@@ -497,7 +566,7 @@ validate_exprs(asdl_seq *exprs, expr_context_ty ctx, int null_ok)
}
int
-Ta35AST_Validate(mod_ty mod)
+Ta3AST_Validate(mod_ty mod)
{
int res = 0;
@@ -529,11 +598,11 @@ Ta35AST_Validate(mod_ty mod)
/* Data structure used internally */
struct compiling {
- char *c_encoding; /* source encoding */
- PyArena *c_arena; /* arena for allocating memeory */
+ PyArena *c_arena; /* Arena for allocating memory. */
PyObject *c_filename; /* filename */
PyObject *c_normalize; /* Normalization function from unicodedata. */
PyObject *c_normalize_args; /* Normalization argument tuple. */
+ int c_feature_version; /* Latest minior version of Python for allowed features */
};
static asdl_seq *seq_for_testlist(struct compiling *, const node *);
@@ -552,9 +621,7 @@ static stmt_ty ast_for_for_stmt(struct compiling *, const node *, int);
static expr_ty ast_for_call(struct compiling *, const node *, expr_ty);
static PyObject *parsenumber(struct compiling *, const char *);
-static PyObject *parsestr(struct compiling *, const node *n, int *bytesmode);
-static PyObject *parsestrplus(struct compiling *, const node *n,
- int *bytesmode);
+static expr_ty parsestrplus(struct compiling *, const node *n);
#define COMP_GENEXP 0
#define COMP_LISTCOMP 1
@@ -716,8 +783,9 @@ num_stmts(const node *n)
*/
mod_ty
-Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
- PyObject *filename, PyArena *arena)
+Ta3AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
+ PyObject *filename, int feature_version,
+ PyArena *arena)
{
int i, j, k, num;
asdl_seq *stmts = NULL;
@@ -732,28 +800,17 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
c.c_arena = arena;
/* borrowed reference */
c.c_filename = filename;
- c.c_normalize = c.c_normalize_args = NULL;
- if (flags && flags->cf_flags & PyCF_SOURCE_IS_UTF8) {
- c.c_encoding = "utf-8";
- if (TYPE(n) == encoding_decl) {
-#if 0
- ast_error(c, n, "encoding declaration in Unicode string");
- goto out;
-#endif
- n = CHILD(n, 0);
- }
- } else if (TYPE(n) == encoding_decl) {
- c.c_encoding = STR(n);
+ c.c_normalize = NULL;
+ c.c_normalize_args = NULL;
+ c.c_feature_version = feature_version;
+
+ if (TYPE(n) == encoding_decl)
n = CHILD(n, 0);
- } else {
- /* PEP 3120 */
- c.c_encoding = "utf-8";
- }
k = 0;
switch (TYPE(n)) {
case file_input:
- stmts = _Py_asdl_seq_new(num_stmts(n), arena);
+ stmts = _Ta3_asdl_seq_new(num_stmts(n), arena);
if (!stmts)
goto out;
for (i = 0; i < NCH(n) - 1; i++) {
@@ -784,7 +841,7 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
ch = CHILD(n, NCH(n) - 1);
REQ(ch, ENDMARKER);
num = NCH(ch);
- type_ignores = _Py_asdl_seq_new(num, arena);
+ type_ignores = _Ta3_asdl_seq_new(num, arena);
if (!type_ignores)
goto out;
@@ -809,7 +866,7 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
}
case single_input:
if (TYPE(CHILD(n, 0)) == NEWLINE) {
- stmts = _Py_asdl_seq_new(1, arena);
+ stmts = _Ta3_asdl_seq_new(1, arena);
if (!stmts)
goto out;
asdl_seq_SET(stmts, 0, Pass(n->n_lineno, n->n_col_offset,
@@ -821,7 +878,7 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
else {
n = CHILD(n, 0);
num = num_stmts(n);
- stmts = _Py_asdl_seq_new(num, arena);
+ stmts = _Ta3_asdl_seq_new(num, arena);
if (!stmts)
goto out;
if (num == 1) {
@@ -860,7 +917,7 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
num++;
}
- argtypes = _Py_asdl_seq_new(num, arena);
+ argtypes = _Ta3_asdl_seq_new(num, arena);
j = 0;
for (i = 0; i < NCH(ch); i++) {
@@ -873,7 +930,7 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
}
}
else
- argtypes = _Py_asdl_seq_new(0, arena);
+ argtypes = _Ta3_asdl_seq_new(0, arena);
ret = ast_for_expr(&c, CHILD(n, NCH(n) - 1));
if (!ret)
@@ -882,7 +939,7 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
break;
default:
PyErr_Format(PyExc_SystemError,
- "invalid node %d for Ta35AST_FromNode", TYPE(n));
+ "invalid node %d for Ta3AST_FromNode", TYPE(n));
goto out;
}
out:
@@ -895,15 +952,15 @@ Ta35AST_FromNodeObject(const node *n, PyCompilerFlags *flags,
}
mod_ty
-Ta35AST_FromNode(const node *n, PyCompilerFlags *flags, const char *filename_str,
- PyArena *arena)
+Ta3AST_FromNode(const node *n, PyCompilerFlags *flags, const char *filename_str,
+ int feature_version, PyArena *arena)
{
mod_ty mod;
PyObject *filename;
filename = PyUnicode_DecodeFSDefault(filename_str);
if (filename == NULL)
return NULL;
- mod = Ta35AST_FromNodeObject(n, flags, filename, arena);
+ mod = Ta3AST_FromNodeObject(n, flags, filename, feature_version, arena);
Py_DECREF(filename);
return mod;
@@ -913,7 +970,7 @@ Ta35AST_FromNode(const node *n, PyCompilerFlags *flags, const char *filename_str
*/
static operator_ty
-get_operator(const node *n)
+get_operator(struct compiling *c, const node *n)
{
switch (TYPE(n)) {
case VBAR:
@@ -933,6 +990,11 @@ get_operator(const node *n)
case STAR:
return Mult;
case AT:
+ if (c->c_feature_version < 5) {
+ ast_error(c, n,
+ "The '@' operator is only supported in Python 3.5 and greater");
+ return (operator_ty)0;
+ }
return MatMult;
case SLASH:
return Div;
@@ -945,7 +1007,7 @@ get_operator(const node *n)
}
}
-static const char* FORBIDDEN[] = {
+static const char * const FORBIDDEN[] = {
"None",
"True",
"False",
@@ -962,7 +1024,7 @@ forbidden_name(struct compiling *c, identifier name, const node *n,
return 1;
}
if (full_checks) {
- const char **p;
+ const char * const *p;
for (p = FORBIDDEN; *p; p++) {
if (PyUnicode_CompareWithASCIIString(name, *p) == 0) {
ast_error(c, n, "assignment to keyword");
@@ -1024,13 +1086,8 @@ set_context(struct compiling *c, expr_ty e, expr_context_ty ctx, const node *n)
s = e->v.List.elts;
break;
case Tuple_kind:
- if (asdl_seq_LEN(e->v.Tuple.elts)) {
- e->v.Tuple.ctx = ctx;
- s = e->v.Tuple.elts;
- }
- else {
- expr_name = "()";
- }
+ e->v.Tuple.ctx = ctx;
+ s = e->v.Tuple.elts;
break;
case Lambda_kind:
expr_name = "lambda";
@@ -1067,6 +1124,8 @@ set_context(struct compiling *c, expr_ty e, expr_context_ty ctx, const node *n)
case Num_kind:
case Str_kind:
case Bytes_kind:
+ case JoinedStr_kind:
+ case FormattedValue_kind:
expr_name = "literal";
break;
case NameConstant_kind:
@@ -1144,6 +1203,11 @@ ast_for_augassign(struct compiling *c, const node *n)
else
return Mult;
case '@':
+ if (c->c_feature_version < 5) {
+ ast_error(c, n,
+ "The '@' operator is only supported in Python 3.5 and greater");
+ return (operator_ty)0;
+ }
return MatMult;
default:
PyErr_Format(PyExc_SystemError, "invalid augassign: %s", STR(n));
@@ -1214,7 +1278,7 @@ seq_for_testlist(struct compiling *c, const node *n)
int i;
assert(TYPE(n) == testlist || TYPE(n) == testlist_star_expr || TYPE(n) == testlist_comp);
- seq = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
+ seq = _Ta3_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
if (!seq)
return NULL;
@@ -1254,28 +1318,12 @@ ast_for_arg(struct compiling *c, const node *n)
return NULL;
}
- ret = arg(name, annotation, LINENO(n), n->n_col_offset, c->c_arena);
+ ret = arg(name, annotation, NULL, LINENO(n), n->n_col_offset, c->c_arena);
if (!ret)
return NULL;
return ret;
}
-static int
-set_arg_comment_annotation(struct compiling *c, arg_ty arg, node *tc)
-{
- if (arg->annotation) {
- ast_error(c, tc,
- "annotated arg has associated type comment");
- return 0;
- }
-
- arg->annotation = Str(NEW_TYPE_COMMENT(tc), LINENO(tc), tc->n_col_offset, c->c_arena);
- if (!arg->annotation)
- return 0;
-
- return 1;
-}
-
/* returns -1 if failed to handle keyword only arguments
returns new position to keep processing if successful
(',' tfpdef ['=' test])*
@@ -1328,19 +1376,18 @@ handle_keywordonly_args(struct compiling *c, const node *n, int start,
goto error;
if (forbidden_name(c, argname, ch, 0))
goto error;
- arg = arg(argname, annotation, LINENO(ch), ch->n_col_offset,
+ arg = arg(argname, annotation, NULL, LINENO(ch), ch->n_col_offset,
c->c_arena);
if (!arg)
goto error;
asdl_seq_SET(kwonlyargs, j++, arg);
i += 1; /* the name */
- if (TYPE(CHILD(n, i)) == COMMA)
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case TYPE_COMMENT:
/* arg will be equal to the last argument processed */
- if (!set_arg_comment_annotation(c, arg, ch))
- return -1;
+ arg->type_comment = NEW_TYPE_COMMENT(ch);
i += 1;
break;
case DOUBLESTAR:
@@ -1364,16 +1411,20 @@ ast_for_arguments(struct compiling *c, const node *n)
and varargslist (lambda definition).
parameters: '(' [typedargslist] ')'
- typedargslist: ((tfpdef ['=' test] ',')*
- ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef]
- | '**' tfpdef)
- | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
+ typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [
+ '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]]
+ | '**' tfpdef [',']]]
+ | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]]
+ | '**' tfpdef [','])
tfpdef: NAME [':' test]
- varargslist: ((vfpdef ['=' test] ',')*
- ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef]
- | '**' vfpdef)
- | vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
+ varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [
+ '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']]]
+ | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']
+ )
vfpdef: NAME
+
*/
int i, j, k, nposargs = 0, nkwonlyargs = 0;
int nposdefaults = 0, found_default = 0;
@@ -1416,22 +1467,22 @@ ast_for_arguments(struct compiling *c, const node *n)
if (TYPE(ch) == DOUBLESTAR) break;
if (TYPE(ch) == tfpdef || TYPE(ch) == vfpdef) nkwonlyargs++;
}
- posargs = (nposargs ? _Py_asdl_seq_new(nposargs, c->c_arena) : NULL);
+ posargs = (nposargs ? _Ta3_asdl_seq_new(nposargs, c->c_arena) : NULL);
if (!posargs && nposargs)
return NULL;
kwonlyargs = (nkwonlyargs ?
- _Py_asdl_seq_new(nkwonlyargs, c->c_arena) : NULL);
+ _Ta3_asdl_seq_new(nkwonlyargs, c->c_arena) : NULL);
if (!kwonlyargs && nkwonlyargs)
return NULL;
posdefaults = (nposdefaults ?
- _Py_asdl_seq_new(nposdefaults, c->c_arena) : NULL);
+ _Ta3_asdl_seq_new(nposdefaults, c->c_arena) : NULL);
if (!posdefaults && nposdefaults)
return NULL;
/* The length of kwonlyargs and kwdefaults are same
since we set NULL as default for keyword only argument w/o default
- we have sequence data structure, but no dictionary */
kwdefaults = (nkwonlyargs ?
- _Py_asdl_seq_new(nkwonlyargs, c->c_arena) : NULL);
+ _Ta3_asdl_seq_new(nkwonlyargs, c->c_arena) : NULL);
if (!kwdefaults && nkwonlyargs)
return NULL;
@@ -1473,11 +1524,13 @@ ast_for_arguments(struct compiling *c, const node *n)
return NULL;
asdl_seq_SET(posargs, k++, arg);
i += 1; /* the name */
- if (TYPE(CHILD(n, i)) == COMMA)
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case STAR:
- if (i+1 >= NCH(n) || TYPE(CHILD(n, i+1)) == TYPE_COMMENT) {
+ if (i+1 >= NCH(n) ||
+ (i+2 == NCH(n) && (TYPE(CHILD(n, i+1)) == COMMA
+ || TYPE(CHILD(n, i+1)) == TYPE_COMMENT))) {
ast_error(c, CHILD(n, i),
"named arguments must follow bare *");
return NULL;
@@ -1487,7 +1540,7 @@ ast_for_arguments(struct compiling *c, const node *n)
int res = 0;
i += 2; /* now follows keyword only arguments */
- if (TYPE(CHILD(n, i)) == TYPE_COMMENT) {
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == TYPE_COMMENT) {
ast_error(c, CHILD(n, i),
"bare * has associated type comment");
return NULL;
@@ -1503,14 +1556,12 @@ ast_for_arguments(struct compiling *c, const node *n)
if (!vararg)
return NULL;
- i += 2; /* the star and the name */
- if (TYPE(CHILD(n, i)) == COMMA)
- i += 1; /* the comma, if present */
-
- if (TYPE(CHILD(n, i)) == TYPE_COMMENT) {
- if (!set_arg_comment_annotation(c, vararg, CHILD(n, i)))
- return NULL;
+ i += 2; /* the star and the name */
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
+ i += 1; /* the comma, if present */
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == TYPE_COMMENT) {
+ vararg->type_comment = NEW_TYPE_COMMENT(CHILD(n, i));
i += 1;
}
@@ -1531,7 +1582,7 @@ ast_for_arguments(struct compiling *c, const node *n)
if (!kwarg)
return NULL;
i += 2; /* the double star and the name */
- if (TYPE(CHILD(n, i)) == COMMA)
+ if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA)
i += 1; /* the comma, if present */
break;
case TYPE_COMMENT:
@@ -1541,9 +1592,7 @@ ast_for_arguments(struct compiling *c, const node *n)
arg = kwarg;
/* arg will be equal to the last argument processed */
- if (!set_arg_comment_annotation(c, arg, ch))
- return NULL;
-
+ arg->type_comment = NEW_TYPE_COMMENT(ch);
i += 1;
break;
default:
@@ -1632,7 +1681,7 @@ ast_for_decorators(struct compiling *c, const node *n)
int i;
REQ(n, decorators);
- decorator_seq = _Py_asdl_seq_new(NCH(n), c->c_arena);
+ decorator_seq = _Ta3_asdl_seq_new(NCH(n), c->c_arena);
if (!decorator_seq)
return NULL;
@@ -1658,6 +1707,12 @@ ast_for_funcdef_impl(struct compiling *c, const node *n,
node *tc;
string type_comment = NULL;
+ if (is_async && c->c_feature_version < 5) {
+ ast_error(c, n,
+ "Async functions are only supported in Python 3.5 and greater");
+ return NULL;
+ }
+
REQ(n, funcdef);
name = NEW_IDENTIFIER(CHILD(n, name_i));
@@ -1840,14 +1895,21 @@ static int
count_comp_fors(struct compiling *c, const node *n)
{
int n_fors = 0;
+ int is_async;
count_comp_for:
+ is_async = 0;
n_fors++;
REQ(n, comp_for);
- if (NCH(n) == 5)
- n = CHILD(n, 4);
- else
+ if (TYPE(CHILD(n, 0)) == ASYNC) {
+ is_async = 1;
+ }
+ if (NCH(n) == (5 + is_async)) {
+ n = CHILD(n, 4 + is_async);
+ }
+ else {
return n_fors;
+ }
count_comp_iter:
REQ(n, comp_iter);
n = CHILD(n, 0);
@@ -1901,7 +1963,7 @@ ast_for_comprehension(struct compiling *c, const node *n)
if (n_fors == -1)
return NULL;
- comps = _Py_asdl_seq_new(n_fors, c->c_arena);
+ comps = _Ta3_asdl_seq_new(n_fors, c->c_arena);
if (!comps)
return NULL;
@@ -1910,14 +1972,26 @@ ast_for_comprehension(struct compiling *c, const node *n)
asdl_seq *t;
expr_ty expression, first;
node *for_ch;
+ int is_async = 0;
REQ(n, comp_for);
- for_ch = CHILD(n, 1);
+ if (TYPE(CHILD(n, 0)) == ASYNC) {
+ is_async = 1;
+ }
+
+ /* Async comprehensions only allowed in Python 3.6 and greater */
+ if (is_async && c->c_feature_version < 6) {
+ ast_error(c, n,
+ "Async comprehensions are only supported in Python 3.6 and greater");
+ return NULL;
+ }
+
+ for_ch = CHILD(n, 1 + is_async);
t = ast_for_exprlist(c, for_ch, Store);
if (!t)
return NULL;
- expression = ast_for_expr(c, CHILD(n, 3));
+ expression = ast_for_expr(c, CHILD(n, 3 + is_async));
if (!expression)
return NULL;
@@ -1925,24 +1999,25 @@ ast_for_comprehension(struct compiling *c, const node *n)
(x for x, in ...) has 1 element in t, but still requires a Tuple. */
first = (expr_ty)asdl_seq_GET(t, 0);
if (NCH(for_ch) == 1)
- comp = comprehension(first, expression, NULL, c->c_arena);
+ comp = comprehension(first, expression, NULL,
+ is_async, c->c_arena);
else
- comp = comprehension(Tuple(t, Store, first->lineno, first->col_offset,
- c->c_arena),
- expression, NULL, c->c_arena);
+ comp = comprehension(Tuple(t, Store, first->lineno,
+ first->col_offset, c->c_arena),
+ expression, NULL, is_async, c->c_arena);
if (!comp)
return NULL;
- if (NCH(n) == 5) {
+ if (NCH(n) == (5 + is_async)) {
int j, n_ifs;
asdl_seq *ifs;
- n = CHILD(n, 4);
+ n = CHILD(n, 4 + is_async);
n_ifs = count_comp_ifs(c, n);
if (n_ifs == -1)
return NULL;
- ifs = _Py_asdl_seq_new(n_ifs, c->c_arena);
+ ifs = _Ta3_asdl_seq_new(n_ifs, c->c_arena);
if (!ifs)
return NULL;
@@ -2071,11 +2146,11 @@ ast_for_dictdisplay(struct compiling *c, const node *n)
asdl_seq *keys, *values;
size = (NCH(n) + 1) / 3; /* +1 in case no trailing comma */
- keys = _Py_asdl_seq_new(size, c->c_arena);
+ keys = _Ta3_asdl_seq_new(size, c->c_arena);
if (!keys)
return NULL;
- values = _Py_asdl_seq_new(size, c->c_arena);
+ values = _Ta3_asdl_seq_new(size, c->c_arena);
if (!values)
return NULL;
@@ -2125,7 +2200,7 @@ ast_for_setdisplay(struct compiling *c, const node *n)
assert(TYPE(n) == (dictorsetmaker));
size = (NCH(n) + 1) / 2; /* +1 in case no trailing comma */
- elts = _Py_asdl_seq_new(size, c->c_arena);
+ elts = _Ta3_asdl_seq_new(size, c->c_arena);
if (!elts)
return NULL;
for (i = 0; i < NCH(n); i += 2) {
@@ -2146,7 +2221,6 @@ ast_for_atom(struct compiling *c, const node *n)
| '...' | 'None' | 'True' | 'False'
*/
node *ch = CHILD(n, 0);
- int bytesmode = 0;
switch (TYPE(ch)) {
case NAME: {
@@ -2168,7 +2242,7 @@ ast_for_atom(struct compiling *c, const node *n)
return Name(name, Load, LINENO(n), n->n_col_offset, c->c_arena);
}
case STRING: {
- PyObject *str = parsestrplus(c, n, &bytesmode);
+ expr_ty str = parsestrplus(c, n);
if (!str) {
const char *errtype = NULL;
if (PyErr_ExceptionMatches(PyExc_UnicodeError))
@@ -2177,16 +2251,19 @@ ast_for_atom(struct compiling *c, const node *n)
errtype = "value error";
if (errtype) {
char buf[128];
+ const char *s = NULL;
PyObject *type, *value, *tback, *errstr;
PyErr_Fetch(&type, &value, &tback);
errstr = PyObject_Str(value);
- if (errstr) {
- char *s = _PyUnicode_AsString(errstr);
+ if (errstr)
+ s = PyUnicode_AsUTF8(errstr);
+ if (s) {
PyOS_snprintf(buf, sizeof(buf), "(%s) %s", errtype, s);
- Py_DECREF(errstr);
} else {
+ PyErr_Clear();
PyOS_snprintf(buf, sizeof(buf), "(%s) unknown error", errtype);
}
+ Py_XDECREF(errstr);
ast_error(c, n, buf);
Py_DECREF(type);
Py_XDECREF(value);
@@ -2194,18 +2271,17 @@ ast_for_atom(struct compiling *c, const node *n)
}
return NULL;
}
- if (PyArena_AddPyObject(c->c_arena, str) < 0) {
- Py_DECREF(str);
- return NULL;
- }
- if (bytesmode)
- return Bytes(str, LINENO(n), n->n_col_offset, c->c_arena);
- else
- return Str(str, LINENO(n), n->n_col_offset, c->c_arena);
+ return str;
}
case NUMBER: {
const char *s = STR(ch);
- int contains_underscores = strchr(s, '_') != NULL;
+ /* Underscores in numeric literals are only allowed in Python 3.6 or greater */
+ /* Check for underscores here rather than in parse_number so we can report a line number on error */
+ if (c->c_feature_version < 6 && strchr(s, '_') != NULL) {
+ ast_error(c, ch,
+ "Underscores in numeric literals are only supported in Python 3.6 and greater");
+ return NULL;
+ }
PyObject *pynum = parsenumber(c, s);
if (!pynum)
return NULL;
@@ -2214,8 +2290,7 @@ ast_for_atom(struct compiling *c, const node *n)
Py_DECREF(pynum);
return NULL;
}
- return Num(pynum, contains_underscores, LINENO(n),
- n->n_col_offset, c->c_arena);
+ return Num(pynum, LINENO(n), n->n_col_offset, c->c_arena);
}
case ELLIPSIS: /* Ellipsis */
return Ellipsis(LINENO(n), n->n_col_offset, c->c_arena);
@@ -2385,7 +2460,7 @@ ast_for_binop(struct compiling *c, const node *n)
if (!expr2)
return NULL;
- newoperator = get_operator(CHILD(n, 1));
+ newoperator = get_operator(c, CHILD(n, 1));
if (!newoperator)
return NULL;
@@ -2399,7 +2474,7 @@ ast_for_binop(struct compiling *c, const node *n)
expr_ty tmp_result, tmp;
const node* next_oper = CHILD(n, i * 2 + 1);
- newoperator = get_operator(next_oper);
+ newoperator = get_operator(c, next_oper);
if (!newoperator)
return NULL;
@@ -2460,7 +2535,7 @@ ast_for_trailer(struct compiling *c, const node *n, expr_ty left_expr)
expr_ty e;
int simple = 1;
asdl_seq *slices, *elts;
- slices = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
+ slices = _Ta3_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
if (!slices)
return NULL;
for (j = 0; j < NCH(n); j += 2) {
@@ -2476,7 +2551,7 @@ ast_for_trailer(struct compiling *c, const node *n, expr_ty left_expr)
Load, LINENO(n), n->n_col_offset, c->c_arena);
}
/* extract Index values and put them in a Tuple */
- elts = _Py_asdl_seq_new(asdl_seq_LEN(slices), c->c_arena);
+ elts = _Ta3_asdl_seq_new(asdl_seq_LEN(slices), c->c_arena);
if (!elts)
return NULL;
for (j = 0; j < asdl_seq_LEN(slices); ++j) {
@@ -2528,6 +2603,11 @@ ast_for_atom_expr(struct compiling *c, const node *n)
nch = NCH(n);
if (TYPE(CHILD(n, 0)) == AWAIT) {
+ if (c->c_feature_version < 5) {
+ ast_error(c, n,
+ "Await expressions are only supported in Python 3.5 and greater");
+ return NULL;
+ }
start = 1;
assert(nch > 1);
}
@@ -2642,7 +2722,7 @@ ast_for_expr(struct compiling *c, const node *n)
n = CHILD(n, 0);
goto loop;
}
- seq = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
+ seq = _Ta3_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
if (!seq)
return NULL;
for (i = 0; i < NCH(n); i += 2) {
@@ -2678,10 +2758,10 @@ ast_for_expr(struct compiling *c, const node *n)
expr_ty expression;
asdl_int_seq *ops;
asdl_seq *cmps;
- ops = _Py_asdl_int_seq_new(NCH(n) / 2, c->c_arena);
+ ops = _Ta3_asdl_int_seq_new(NCH(n) / 2, c->c_arena);
if (!ops)
return NULL;
- cmps = _Py_asdl_seq_new(NCH(n) / 2, c->c_arena);
+ cmps = _Ta3_asdl_seq_new(NCH(n) / 2, c->c_arena);
if (!cmps) {
return NULL;
}
@@ -2809,10 +2889,10 @@ ast_for_call(struct compiling *c, const node *n, expr_ty func)
return NULL;
}
- args = _Py_asdl_seq_new(nargs + ngens, c->c_arena);
+ args = _Ta3_asdl_seq_new(nargs + ngens, c->c_arena);
if (!args)
return NULL;
- keywords = _Py_asdl_seq_new(nkeywords, c->c_arena);
+ keywords = _Ta3_asdl_seq_new(nkeywords, c->c_arena);
if (!keywords)
return NULL;
@@ -2961,12 +3041,13 @@ static stmt_ty
ast_for_expr_stmt(struct compiling *c, const node *n)
{
REQ(n, expr_stmt);
- /* expr_stmt: testlist_star_expr (augassign (yield_expr|testlist)
- | ('=' (yield_expr|testlist_star_expr))* [TYPE_COMMENT])
+ /* expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
+ ('=' (yield_expr|testlist_star_expr))* [TYPE_COMMENT])
+ annassign: ':' test ['=' test]
testlist_star_expr: (test|star_expr) (',' test|star_expr)* [',']
augassign: '+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^='
| '<<=' | '>>=' | '**=' | '//='
- test: ... here starts the operator precendence dance
+ test: ... here starts the operator precedence dance
*/
int num = NCH(n);
@@ -3016,13 +3097,26 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
return AugAssign(expr1, newoperator, expr2, LINENO(n), n->n_col_offset, c->c_arena);
}
else if (TYPE(CHILD(n, 1)) == annassign) {
- expr_ty expr1, annotation, expr3;
+ expr_ty expr1, expr2, expr3;
node *ch = CHILD(n, 0);
- node *ann = CHILD(n, 1);
- asdl_seq *targets = _Py_asdl_seq_new(1, c->c_arena);
- if (!targets)
+ node *deep, *ann = CHILD(n, 1);
+ int simple = 1;
+
+ /* AnnAssigns are only allowed in Python 3.6 or greater */
+ if (c->c_feature_version < 6) {
+ ast_error(c, ch,
+ "Variable annotation syntax is only supported in Python 3.6 and greater");
return NULL;
+ }
+ /* we keep track of parens to qualify (x) as expression not name */
+ deep = ch;
+ while (NCH(deep) == 1) {
+ deep = CHILD(deep, 0);
+ }
+ if (NCH(deep) > 0 && TYPE(CHILD(deep, 0)) == LPAR) {
+ simple = 0;
+ }
expr1 = ast_for_testlist(c, ch);
if (!expr1) {
return NULL;
@@ -3056,13 +3150,18 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
"illegal target for annotation");
return NULL;
}
+
+ if (expr1->kind != Name_kind) {
+ simple = 0;
+ }
ch = CHILD(ann, 1);
- annotation = ast_for_expr(c, ch);
- if (!annotation) {
+ expr2 = ast_for_expr(c, ch);
+ if (!expr2) {
return NULL;
}
if (NCH(ann) == 2) {
- expr3 = NULL;
+ return AnnAssign(expr1, expr2, NULL, simple,
+ LINENO(n), n->n_col_offset, c->c_arena);
}
else {
ch = CHILD(ann, 3);
@@ -3070,10 +3169,9 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
if (!expr3) {
return NULL;
}
+ return AnnAssign(expr1, expr2, expr3, simple,
+ LINENO(n), n->n_col_offset, c->c_arena);
}
- asdl_seq_SET(targets, 0, expr1);
- return Assign(targets, expr3, NULL, annotation,
- LINENO(n), n->n_col_offset, c->c_arena);
}
else {
int i, nch_minus_type, has_type_comment;
@@ -3088,7 +3186,7 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
has_type_comment = TYPE(CHILD(n, num - 1)) == TYPE_COMMENT;
nch_minus_type = num - has_type_comment;
- targets = _Py_asdl_seq_new(nch_minus_type / 2, c->c_arena);
+ targets = _Ta3_asdl_seq_new(nch_minus_type / 2, c->c_arena);
if (!targets)
return NULL;
for (i = 0; i < nch_minus_type - 2; i += 2) {
@@ -3119,8 +3217,7 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
type_comment = NEW_TYPE_COMMENT(CHILD(n, nch_minus_type));
else
type_comment = NULL;
- return Assign(targets, expression, type_comment, NULL,
- LINENO(n), n->n_col_offset, c->c_arena);
+ return Assign(targets, expression, type_comment, LINENO(n), n->n_col_offset, c->c_arena);
}
}
@@ -3134,7 +3231,7 @@ ast_for_exprlist(struct compiling *c, const node *n, expr_context_ty context)
REQ(n, exprlist);
- seq = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
+ seq = _Ta3_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
if (!seq)
return NULL;
for (i = 0; i < NCH(n); i += 2) {
@@ -3361,7 +3458,7 @@ ast_for_import_stmt(struct compiling *c, const node *n)
if (TYPE(n) == import_name) {
n = CHILD(n, 1);
REQ(n, dotted_as_names);
- aliases = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
+ aliases = _Ta3_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena);
if (!aliases)
return NULL;
for (i = 0; i < NCH(n); i += 2) {
@@ -3423,7 +3520,7 @@ ast_for_import_stmt(struct compiling *c, const node *n)
return NULL;
}
- aliases = _Py_asdl_seq_new((n_children + 1) / 2, c->c_arena);
+ aliases = _Ta3_asdl_seq_new((n_children + 1) / 2, c->c_arena);
if (!aliases)
return NULL;
@@ -3432,14 +3529,14 @@ ast_for_import_stmt(struct compiling *c, const node *n)
alias_ty import_alias = alias_for_import_name(c, n, 1);
if (!import_alias)
return NULL;
- asdl_seq_SET(aliases, 0, import_alias);
+ asdl_seq_SET(aliases, 0, import_alias);
}
else {
for (i = 0; i < NCH(n); i += 2) {
alias_ty import_alias = alias_for_import_name(c, CHILD(n, i), 1);
if (!import_alias)
return NULL;
- asdl_seq_SET(aliases, i / 2, import_alias);
+ asdl_seq_SET(aliases, i / 2, import_alias);
}
}
if (mod != NULL)
@@ -3462,7 +3559,7 @@ ast_for_global_stmt(struct compiling *c, const node *n)
int i;
REQ(n, global_stmt);
- s = _Py_asdl_seq_new(NCH(n) / 2, c->c_arena);
+ s = _Ta3_asdl_seq_new(NCH(n) / 2, c->c_arena);
if (!s)
return NULL;
for (i = 1; i < NCH(n); i += 2) {
@@ -3483,7 +3580,7 @@ ast_for_nonlocal_stmt(struct compiling *c, const node *n)
int i;
REQ(n, nonlocal_stmt);
- s = _Py_asdl_seq_new(NCH(n) / 2, c->c_arena);
+ s = _Ta3_asdl_seq_new(NCH(n) / 2, c->c_arena);
if (!s)
return NULL;
for (i = 1; i < NCH(n); i += 2) {
@@ -3536,7 +3633,7 @@ ast_for_suite(struct compiling *c, const node *n)
REQ(n, suite);
total = num_stmts(n);
- seq = _Py_asdl_seq_new(total, c->c_arena);
+ seq = _Ta3_asdl_seq_new(total, c->c_arena);
if (!seq)
return NULL;
if (TYPE(CHILD(n, 0)) == simple_stmt) {
@@ -3659,7 +3756,7 @@ ast_for_if_stmt(struct compiling *c, const node *n)
if (has_else) {
asdl_seq *suite_seq2;
- orelse = _Py_asdl_seq_new(1, c->c_arena);
+ orelse = _Ta3_asdl_seq_new(1, c->c_arena);
if (!orelse)
return NULL;
expression = ast_for_expr(c, CHILD(n, NCH(n) - 6));
@@ -3683,7 +3780,7 @@ ast_for_if_stmt(struct compiling *c, const node *n)
for (i = 0; i < n_elif; i++) {
int off = 5 + (n_elif - i - 1) * 4;
- asdl_seq *newobj = _Py_asdl_seq_new(1, c->c_arena);
+ asdl_seq *newobj = _Ta3_asdl_seq_new(1, c->c_arena);
if (!newobj)
return NULL;
expression = ast_for_expr(c, CHILD(n, off));
@@ -3764,6 +3861,13 @@ ast_for_for_stmt(struct compiling *c, const node *n, int is_async)
const node *node_target;
int has_type_comment;
string type_comment;
+
+ if (is_async && c->c_feature_version < 5) {
+ ast_error(c, n,
+ "Async for loops are only supported in Python 3.5 and greater");
+ return NULL;
+ }
+
/* for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite] */
REQ(n, for_stmt);
@@ -3801,7 +3905,7 @@ ast_for_for_stmt(struct compiling *c, const node *n, int is_async)
if (is_async)
return AsyncFor(target, expression, suite_seq, seq,
- LINENO(n), n->n_col_offset,
+ type_comment, LINENO(n), n->n_col_offset,
c->c_arena);
else
return For(target, expression, suite_seq, seq,
@@ -3910,7 +4014,7 @@ ast_for_try_stmt(struct compiling *c, const node *n)
if (n_except > 0) {
int i;
/* process except statements to create a try ... except */
- handlers = _Py_asdl_seq_new(n_except, c->c_arena);
+ handlers = _Ta3_asdl_seq_new(n_except, c->c_arena);
if (handlers == NULL)
return NULL;
@@ -3959,13 +4063,19 @@ ast_for_with_stmt(struct compiling *c, const node *n, int is_async)
asdl_seq *items, *body;
string type_comment;
+ if (is_async && c->c_feature_version < 5) {
+ ast_error(c, n,
+ "Async with statements are only supported in Python 3.5 and greater");
+ return NULL;
+ }
+
REQ(n, with_stmt);
has_type_comment = TYPE(CHILD(n, NCH(n) - 2)) == TYPE_COMMENT;
nch_minus_type = NCH(n) - has_type_comment;
n_items = (nch_minus_type - 2) / 2;
- items = _Py_asdl_seq_new(n_items, c->c_arena);
+ items = _Ta3_asdl_seq_new(n_items, c->c_arena);
if (!items)
return NULL;
for (i = 1; i < nch_minus_type - 2; i += 2) {
@@ -3985,7 +4095,7 @@ ast_for_with_stmt(struct compiling *c, const node *n, int is_async)
type_comment = NULL;
if (is_async)
- return AsyncWith(items, body, LINENO(n), n->n_col_offset, c->c_arena);
+ return AsyncWith(items, body, type_comment, LINENO(n), n->n_col_offset, c->c_arena);
else
return With(items, body, type_comment, LINENO(n), n->n_col_offset, c->c_arena);
}
@@ -4206,84 +4316,871 @@ decode_utf8(struct compiling *c, const char **sPtr, const char *end)
}
static PyObject *
-decode_unicode(struct compiling *c, const char *s, size_t len, int rawmode, const char *encoding)
+decode_unicode_with_escapes(struct compiling *c, const node *n, const char *s,
+ size_t len)
{
- PyObject *v, *u;
+ PyObject *u;
char *buf;
char *p;
const char *end;
- if (encoding == NULL) {
- u = NULL;
- } else {
- /* check for integer overflow */
- if (len > PY_SIZE_MAX / 6)
- return NULL;
- /* "ä" (2 bytes) may become "\U000000E4" (10 bytes), or 1:5
- "\ä" (3 bytes) may become "\u005c\U000000E4" (16 bytes), or ~1:6 */
- u = PyBytes_FromStringAndSize((char *)NULL, len * 6);
- if (u == NULL)
- return NULL;
- p = buf = PyBytes_AsString(u);
- end = s + len;
- while (s < end) {
- if (*s == '\\') {
- *p++ = *s++;
- if (*s & 0x80) {
- strcpy(p, "u005c");
- p += 5;
- }
+ /* check for integer overflow */
+ if (len > SIZE_MAX / 6)
+ return NULL;
+ /* "ä" (2 bytes) may become "\U000000E4" (10 bytes), or 1:5
+ "\ä" (3 bytes) may become "\u005c\U000000E4" (16 bytes), or ~1:6 */
+ u = PyBytes_FromStringAndSize((char *)NULL, len * 6);
+ if (u == NULL)
+ return NULL;
+ p = buf = PyBytes_AsString(u);
+ end = s + len;
+ while (s < end) {
+ if (*s == '\\') {
+ *p++ = *s++;
+ if (*s & 0x80) {
+ strcpy(p, "u005c");
+ p += 5;
}
- if (*s & 0x80) { /* XXX inefficient */
- PyObject *w;
- int kind;
- void *data;
- Py_ssize_t len, i;
- w = decode_utf8(c, &s, end);
- if (w == NULL) {
- Py_DECREF(u);
- return NULL;
+ }
+ if (*s & 0x80) { /* XXX inefficient */
+ PyObject *w;
+ int kind;
+ void *data;
+ Py_ssize_t len, i;
+ w = decode_utf8(c, &s, end);
+ if (w == NULL) {
+ Py_DECREF(u);
+ return NULL;
+ }
+ kind = PyUnicode_KIND(w);
+ data = PyUnicode_DATA(w);
+ len = PyUnicode_GET_LENGTH(w);
+ for (i = 0; i < len; i++) {
+ Py_UCS4 chr = PyUnicode_READ(kind, data, i);
+ sprintf(p, "\\U%08x", chr);
+ p += 10;
+ }
+ /* Should be impossible to overflow */
+ assert(p - buf <= Py_SIZE(u));
+ Py_DECREF(w);
+ } else {
+ *p++ = *s++;
+ }
+ }
+ len = p - buf;
+ s = buf;
+
+ return PyUnicode_DecodeUnicodeEscape(s, len, NULL);
+}
+
+static PyObject *
+decode_bytes_with_escapes(struct compiling *c, const node *n, const char *s,
+ size_t len)
+{
+ return PyBytes_DecodeEscape(s, len, NULL, 0, NULL);
+}
+
+/* Compile this expression in to an expr_ty. Add parens around the
+ expression, in order to allow leading spaces in the expression. */
+static expr_ty
+fstring_compile_expr(const char *expr_start, const char *expr_end,
+ struct compiling *c, const node *n)
+
+{
+ int all_whitespace = 1;
+ int kind;
+ void *data;
+ PyCompilerFlags cf;
+ mod_ty mod;
+ char *str;
+ PyObject *o, *fstring_name;
+ Py_ssize_t len;
+ Py_ssize_t i;
+
+ assert(expr_end >= expr_start);
+ assert(*(expr_start-1) == '{');
+ assert(*expr_end == '}' || *expr_end == '!' || *expr_end == ':');
+
+ /* We know there are no escapes here, because backslashes are not allowed,
+ and we know it's utf-8 encoded (per PEP 263). But, in order to check
+ that each char is not whitespace, we need to decode it to unicode.
+ Which is unfortunate, but such is life. */
+
+ /* If the substring is all whitespace, it's an error. We need to catch
+ this here, and not when we call PyParser_ASTFromString, because turning
+ the expression '' in to '()' would go from being invalid to valid. */
+ /* Note that this code says an empty string is all whitespace. That's
+ important. There's a test for it: f'{}'. */
+ o = PyUnicode_DecodeUTF8(expr_start, expr_end-expr_start, NULL);
+ if (o == NULL)
+ return NULL;
+ len = PyUnicode_GET_LENGTH(o);
+ kind = PyUnicode_KIND(o);
+ data = PyUnicode_DATA(o);
+ for (i = 0; i < len; i++) {
+ if (!Py_UNICODE_ISSPACE(PyUnicode_READ(kind, data, i))) {
+ all_whitespace = 0;
+ break;
+ }
+ }
+ Py_DECREF(o);
+ if (all_whitespace) {
+ ast_error(c, n, "f-string: empty expression not allowed");
+ return NULL;
+ }
+
+ /* Reuse len to be the length of the utf-8 input string. */
+ len = expr_end - expr_start;
+ /* Allocate 3 extra bytes: open paren, close paren, null byte. */
+ str = PyMem_RawMalloc(len + 3);
+ if (str == NULL)
+ return NULL;
+
+ str[0] = '(';
+ memcpy(str+1, expr_start, len);
+ str[len+1] = ')';
+ str[len+2] = 0;
+
+ cf.cf_flags = PyCF_ONLY_AST;
+ fstring_name = PyUnicode_FromString("<fstring>");
+ mod = string_object_to_c_ast(str, fstring_name,
+ Py_eval_input, &cf,
+ c->c_feature_version, c->c_arena);
+ Py_DECREF(fstring_name);
+ PyMem_RawFree(str);
+ if (!mod)
+ return NULL;
+ return mod->v.Expression.body;
+}
+
+/* Return -1 on error.
+
+ Return 0 if we reached the end of the literal.
+
+ Return 1 if we haven't reached the end of the literal, but we want
+ the caller to process the literal up to this point. Used for
+ doubled braces.
+*/
+static int
+fstring_find_literal(const char **str, const char *end, int raw,
+ PyObject **literal, int recurse_lvl,
+ struct compiling *c, const node *n)
+{
+ /* Get any literal string. It ends when we hit an un-doubled left
+ brace (which isn't part of a unicode name escape such as
+ "\N{EULER CONSTANT}"), or the end of the string. */
+
+ const char *literal_start = *str;
+ const char *literal_end;
+ int in_named_escape = 0;
+ int result = 0;
+
+ assert(*literal == NULL);
+ for (; *str < end; (*str)++) {
+ char ch = **str;
+ if (!in_named_escape && ch == '{' && (*str)-literal_start >= 2 &&
+ *(*str-2) == '\\' && *(*str-1) == 'N') {
+ in_named_escape = 1;
+ } else if (in_named_escape && ch == '}') {
+ in_named_escape = 0;
+ } else if (ch == '{' || ch == '}') {
+ /* Check for doubled braces, but only at the top level. If
+ we checked at every level, then f'{0:{3}}' would fail
+ with the two closing braces. */
+ if (recurse_lvl == 0) {
+ if (*str+1 < end && *(*str+1) == ch) {
+ /* We're going to tell the caller that the literal ends
+ here, but that they should continue scanning. But also
+ skip over the second brace when we resume scanning. */
+ literal_end = *str+1;
+ *str += 2;
+ result = 1;
+ goto done;
}
- kind = PyUnicode_KIND(w);
- data = PyUnicode_DATA(w);
- len = PyUnicode_GET_LENGTH(w);
- for (i = 0; i < len; i++) {
- Py_UCS4 chr = PyUnicode_READ(kind, data, i);
- sprintf(p, "\\U%08x", chr);
- p += 10;
+
+ /* Where a single '{' is the start of a new expression, a
+ single '}' is not allowed. */
+ if (ch == '}') {
+ ast_error(c, n, "f-string: single '}' is not allowed");
+ return -1;
}
- /* Should be impossible to overflow */
- assert(p - buf <= Py_SIZE(u));
- Py_DECREF(w);
+ }
+ /* We're either at a '{', which means we're starting another
+ expression; or a '}', which means we're at the end of this
+ f-string (for a nested format_spec). */
+ break;
+ }
+ }
+ literal_end = *str;
+ assert(*str <= end);
+ assert(*str == end || **str == '{' || **str == '}');
+done:
+ if (literal_start != literal_end) {
+ if (raw)
+ *literal = PyUnicode_DecodeUTF8Stateful(literal_start,
+ literal_end-literal_start,
+ NULL, NULL);
+ else
+ *literal = decode_unicode_with_escapes(c, n, literal_start,
+ literal_end-literal_start);
+ if (!*literal)
+ return -1;
+ }
+ return result;
+}
+
+/* Forward declaration because parsing is recursive. */
+static expr_ty
+fstring_parse(const char **str, const char *end, int raw, int recurse_lvl,
+ struct compiling *c, const node *n);
+
+/* Parse the f-string at *str, ending at end. We know *str starts an
+ expression (so it must be a '{'). Returns the FormattedValue node,
+ which includes the expression, conversion character, and
+ format_spec expression.
+
+ Note that I don't do a perfect job here: I don't make sure that a
+ closing brace doesn't match an opening paren, for example. It
+ doesn't need to error on all invalid expressions, just correctly
+ find the end of all valid ones. Any errors inside the expression
+ will be caught when we parse it later. */
+static int
+fstring_find_expr(const char **str, const char *end, int raw, int recurse_lvl,
+ expr_ty *expression, struct compiling *c, const node *n)
+{
+ /* Return -1 on error, else 0. */
+
+ const char *expr_start;
+ const char *expr_end;
+ expr_ty simple_expression;
+ expr_ty format_spec = NULL; /* Optional format specifier. */
+ int conversion = -1; /* The conversion char. -1 if not specified. */
+
+ /* 0 if we're not in a string, else the quote char we're trying to
+ match (single or double quote). */
+ char quote_char = 0;
+
+ /* If we're inside a string, 1=normal, 3=triple-quoted. */
+ int string_type = 0;
+
+ /* Keep track of nesting level for braces/parens/brackets in
+ expressions. */
+ Py_ssize_t nested_depth = 0;
+
+ /* Can only nest one level deep. */
+ if (recurse_lvl >= 2) {
+ ast_error(c, n, "f-string: expressions nested too deeply");
+ return -1;
+ }
+
+ /* The first char must be a left brace, or we wouldn't have gotten
+ here. Skip over it. */
+ assert(**str == '{');
+ *str += 1;
+
+ expr_start = *str;
+ for (; *str < end; (*str)++) {
+ char ch;
+
+ /* Loop invariants. */
+ assert(nested_depth >= 0);
+ assert(*str >= expr_start && *str < end);
+ if (quote_char)
+ assert(string_type == 1 || string_type == 3);
+ else
+ assert(string_type == 0);
+
+ ch = **str;
+ /* Nowhere inside an expression is a backslash allowed. */
+ if (ch == '\\') {
+ /* Error: can't include a backslash character, inside
+ parens or strings or not. */
+ ast_error(c, n, "f-string expression part "
+ "cannot include a backslash");
+ return -1;
+ }
+ if (quote_char) {
+ /* We're inside a string. See if we're at the end. */
+ /* This code needs to implement the same non-error logic
+ as tok_get from tokenizer.c, at the letter_quote
+ label. To actually share that code would be a
+ nightmare. But, it's unlikely to change and is small,
+ so duplicate it here. Note we don't need to catch all
+ of the errors, since they'll be caught when parsing the
+ expression. We just need to match the non-error
+ cases. Thus we can ignore \n in single-quoted strings,
+ for example. Or non-terminated strings. */
+ if (ch == quote_char) {
+ /* Does this match the string_type (single or triple
+ quoted)? */
+ if (string_type == 3) {
+ if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) {
+ /* We're at the end of a triple quoted string. */
+ *str += 2;
+ string_type = 0;
+ quote_char = 0;
+ continue;
+ }
+ } else {
+ /* We're at the end of a normal string. */
+ quote_char = 0;
+ string_type = 0;
+ continue;
+ }
+ }
+ } else if (ch == '\'' || ch == '"') {
+ /* Is this a triple quoted string? */
+ if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) {
+ string_type = 3;
+ *str += 2;
} else {
- *p++ = *s++;
+ /* Start of a normal string. */
+ string_type = 1;
}
+ /* Start looking for the end of the string. */
+ quote_char = ch;
+ } else if (ch == '[' || ch == '{' || ch == '(') {
+ nested_depth++;
+ } else if (nested_depth != 0 &&
+ (ch == ']' || ch == '}' || ch == ')')) {
+ nested_depth--;
+ } else if (ch == '#') {
+ /* Error: can't include a comment character, inside parens
+ or not. */
+ ast_error(c, n, "f-string expression part cannot include '#'");
+ return -1;
+ } else if (nested_depth == 0 &&
+ (ch == '!' || ch == ':' || ch == '}')) {
+ /* First, test for the special case of "!=". Since '=' is
+ not an allowed conversion character, nothing is lost in
+ this test. */
+ if (ch == '!' && *str+1 < end && *(*str+1) == '=') {
+ /* This isn't a conversion character, just continue. */
+ continue;
+ }
+ /* Normal way out of this loop. */
+ break;
+ } else {
+ /* Just consume this char and loop around. */
}
- len = p - buf;
- s = buf;
}
- if (rawmode)
- v = PyUnicode_DecodeRawUnicodeEscape(s, len, NULL);
- else
- v = PyUnicode_DecodeUnicodeEscape(s, len, NULL);
- Py_XDECREF(u);
- return v;
+ expr_end = *str;
+ /* If we leave this loop in a string or with mismatched parens, we
+ don't care. We'll get a syntax error when compiling the
+ expression. But, we can produce a better error message, so
+ let's just do that.*/
+ if (quote_char) {
+ ast_error(c, n, "f-string: unterminated string");
+ return -1;
+ }
+ if (nested_depth) {
+ ast_error(c, n, "f-string: mismatched '(', '{', or '['");
+ return -1;
+ }
+
+ if (*str >= end)
+ goto unexpected_end_of_string;
+
+ /* Compile the expression as soon as possible, so we show errors
+ related to the expression before errors related to the
+ conversion or format_spec. */
+ simple_expression = fstring_compile_expr(expr_start, expr_end, c, n);
+ if (!simple_expression)
+ return -1;
+
+ /* Check for a conversion char, if present. */
+ if (**str == '!') {
+ *str += 1;
+ if (*str >= end)
+ goto unexpected_end_of_string;
+
+ conversion = **str;
+ *str += 1;
+
+ /* Validate the conversion. */
+ if (!(conversion == 's' || conversion == 'r'
+ || conversion == 'a')) {
+ ast_error(c, n, "f-string: invalid conversion character: "
+ "expected 's', 'r', or 'a'");
+ return -1;
+ }
+ }
+
+ /* Check for the format spec, if present. */
+ if (*str >= end)
+ goto unexpected_end_of_string;
+ if (**str == ':') {
+ *str += 1;
+ if (*str >= end)
+ goto unexpected_end_of_string;
+
+ /* Parse the format spec. */
+ format_spec = fstring_parse(str, end, raw, recurse_lvl+1, c, n);
+ if (!format_spec)
+ return -1;
+ }
+
+ if (*str >= end || **str != '}')
+ goto unexpected_end_of_string;
+
+ /* We're at a right brace. Consume it. */
+ assert(*str < end);
+ assert(**str == '}');
+ *str += 1;
+
+ /* And now create the FormattedValue node that represents this
+ entire expression with the conversion and format spec. */
+ *expression = FormattedValue(simple_expression, conversion,
+ format_spec, LINENO(n), n->n_col_offset,
+ c->c_arena);
+ if (!*expression)
+ return -1;
+
+ return 0;
+
+unexpected_end_of_string:
+ ast_error(c, n, "f-string: expecting '}'");
+ return -1;
}
-/* s is a Python string literal, including the bracketing quote characters,
- * and r &/or b prefixes (if any), and embedded escape sequences (if any).
- * parsestr parses it, and returns the decoded Python string object.
- */
-static PyObject *
-parsestr(struct compiling *c, const node *n, int *bytesmode)
+/* Return -1 on error.
+
+ Return 0 if we have a literal (possible zero length) and an
+ expression (zero length if at the end of the string.
+
+ Return 1 if we have a literal, but no expression, and we want the
+ caller to call us again. This is used to deal with doubled
+ braces.
+
+ When called multiple times on the string 'a{{b{0}c', this function
+ will return:
+
+ 1. the literal 'a{' with no expression, and a return value
+ of 1. Despite the fact that there's no expression, the return
+ value of 1 means we're not finished yet.
+
+ 2. the literal 'b' and the expression '0', with a return value of
+ 0. The fact that there's an expression means we're not finished.
+
+ 3. literal 'c' with no expression and a return value of 0. The
+ combination of the return value of 0 with no expression means
+ we're finished.
+*/
+static int
+fstring_find_literal_and_expr(const char **str, const char *end, int raw,
+ int recurse_lvl, PyObject **literal,
+ expr_ty *expression,
+ struct compiling *c, const node *n)
+{
+ int result;
+
+ assert(*literal == NULL && *expression == NULL);
+
+ /* Get any literal string. */
+ result = fstring_find_literal(str, end, raw, literal, recurse_lvl, c, n);
+ if (result < 0)
+ goto error;
+
+ assert(result == 0 || result == 1);
+
+ if (result == 1)
+ /* We have a literal, but don't look at the expression. */
+ return 1;
+
+ if (*str >= end || **str == '}')
+ /* We're at the end of the string or the end of a nested
+ f-string: no expression. The top-level error case where we
+ expect to be at the end of the string but we're at a '}' is
+ handled later. */
+ return 0;
+
+ /* We must now be the start of an expression, on a '{'. */
+ assert(**str == '{');
+
+ if (fstring_find_expr(str, end, raw, recurse_lvl, expression, c, n) < 0)
+ goto error;
+
+ return 0;
+
+error:
+ Py_CLEAR(*literal);
+ return -1;
+}
+
+#define EXPRLIST_N_CACHED 64
+
+typedef struct {
+ /* Incrementally build an array of expr_ty, so be used in an
+ asdl_seq. Cache some small but reasonably sized number of
+ expr_ty's, and then after that start dynamically allocating,
+ doubling the number allocated each time. Note that the f-string
+ f'{0}a{1}' contains 3 expr_ty's: 2 FormattedValue's, and one
+ Str for the literal 'a'. So you add expr_ty's about twice as
+ fast as you add exressions in an f-string. */
+
+ Py_ssize_t allocated; /* Number we've allocated. */
+ Py_ssize_t size; /* Number we've used. */
+ expr_ty *p; /* Pointer to the memory we're actually
+ using. Will point to 'data' until we
+ start dynamically allocating. */
+ expr_ty data[EXPRLIST_N_CACHED];
+} ExprList;
+
+#ifdef NDEBUG
+#define ExprList_check_invariants(l)
+#else
+static void
+ExprList_check_invariants(ExprList *l)
+{
+ /* Check our invariants. Make sure this object is "live", and
+ hasn't been deallocated. */
+ assert(l->size >= 0);
+ assert(l->p != NULL);
+ if (l->size <= EXPRLIST_N_CACHED)
+ assert(l->data == l->p);
+}
+#endif
+
+static void
+ExprList_Init(ExprList *l)
+{
+ l->allocated = EXPRLIST_N_CACHED;
+ l->size = 0;
+
+ /* Until we start allocating dynamically, p points to data. */
+ l->p = l->data;
+
+ ExprList_check_invariants(l);
+}
+
+static int
+ExprList_Append(ExprList *l, expr_ty exp)
+{
+ ExprList_check_invariants(l);
+ if (l->size >= l->allocated) {
+ /* We need to alloc (or realloc) the memory. */
+ Py_ssize_t new_size = l->allocated * 2;
+
+ /* See if we've ever allocated anything dynamically. */
+ if (l->p == l->data) {
+ Py_ssize_t i;
+ /* We're still using the cached data. Switch to
+ alloc-ing. */
+ l->p = PyMem_RawMalloc(sizeof(expr_ty) * new_size);
+ if (!l->p)
+ return -1;
+ /* Copy the cached data into the new buffer. */
+ for (i = 0; i < l->size; i++)
+ l->p[i] = l->data[i];
+ } else {
+ /* Just realloc. */
+ expr_ty *tmp = PyMem_RawRealloc(l->p, sizeof(expr_ty) * new_size);
+ if (!tmp) {
+ PyMem_RawFree(l->p);
+ l->p = NULL;
+ return -1;
+ }
+ l->p = tmp;
+ }
+
+ l->allocated = new_size;
+ assert(l->allocated == 2 * l->size);
+ }
+
+ l->p[l->size++] = exp;
+
+ ExprList_check_invariants(l);
+ return 0;
+}
+
+static void
+ExprList_Dealloc(ExprList *l)
+{
+ ExprList_check_invariants(l);
+
+ /* If there's been an error, or we've never dynamically allocated,
+ do nothing. */
+ if (!l->p || l->p == l->data) {
+ /* Do nothing. */
+ } else {
+ /* We have dynamically allocated. Free the memory. */
+ PyMem_RawFree(l->p);
+ }
+ l->p = NULL;
+ l->size = -1;
+}
+
+static asdl_seq *
+ExprList_Finish(ExprList *l, PyArena *arena)
+{
+ asdl_seq *seq;
+
+ ExprList_check_invariants(l);
+
+ /* Allocate the asdl_seq and copy the expressions in to it. */
+ seq = _Ta3_asdl_seq_new(l->size, arena);
+ if (seq) {
+ Py_ssize_t i;
+ for (i = 0; i < l->size; i++)
+ asdl_seq_SET(seq, i, l->p[i]);
+ }
+ ExprList_Dealloc(l);
+ return seq;
+}
+
+/* The FstringParser is designed to add a mix of strings and
+ f-strings, and concat them together as needed. Ultimately, it
+ generates an expr_ty. */
+typedef struct {
+ PyObject *last_str;
+ ExprList expr_list;
+} FstringParser;
+
+#ifdef NDEBUG
+#define FstringParser_check_invariants(state)
+#else
+static void
+FstringParser_check_invariants(FstringParser *state)
+{
+ if (state->last_str)
+ assert(PyUnicode_CheckExact(state->last_str));
+ ExprList_check_invariants(&state->expr_list);
+}
+#endif
+
+static void
+FstringParser_Init(FstringParser *state)
+{
+ state->last_str = NULL;
+ ExprList_Init(&state->expr_list);
+ FstringParser_check_invariants(state);
+}
+
+static void
+FstringParser_Dealloc(FstringParser *state)
+{
+ FstringParser_check_invariants(state);
+
+ Py_XDECREF(state->last_str);
+ ExprList_Dealloc(&state->expr_list);
+}
+
+/* Make a Str node, but decref the PyUnicode object being added. */
+static expr_ty
+make_str_node_and_del(PyObject **str, struct compiling *c, const node* n)
+{
+ PyObject *s = *str;
+ *str = NULL;
+ assert(PyUnicode_CheckExact(s));
+ if (PyArena_AddPyObject(c->c_arena, s) < 0) {
+ Py_DECREF(s);
+ return NULL;
+ }
+ return Str(s, LINENO(n), n->n_col_offset, c->c_arena);
+}
+
+/* Add a non-f-string (that is, a regular literal string). str is
+ decref'd. */
+static int
+FstringParser_ConcatAndDel(FstringParser *state, PyObject *str)
+{
+ FstringParser_check_invariants(state);
+
+ assert(PyUnicode_CheckExact(str));
+
+ if (PyUnicode_GET_LENGTH(str) == 0) {
+ Py_DECREF(str);
+ return 0;
+ }
+
+ if (!state->last_str) {
+ /* We didn't have a string before, so just remember this one. */
+ state->last_str = str;
+ } else {
+ /* Concatenate this with the previous string. */
+ PyUnicode_AppendAndDel(&state->last_str, str);
+ if (!state->last_str)
+ return -1;
+ }
+ FstringParser_check_invariants(state);
+ return 0;
+}
+
+/* Parse an f-string. The f-string is in *str to end, with no
+ 'f' or quotes. */
+static int
+FstringParser_ConcatFstring(FstringParser *state, const char **str,
+ const char *end, int raw, int recurse_lvl,
+ struct compiling *c, const node *n)
+{
+ FstringParser_check_invariants(state);
+
+ /* Parse the f-string. */
+ while (1) {
+ PyObject *literal = NULL;
+ expr_ty expression = NULL;
+
+ /* If there's a zero length literal in front of the
+ expression, literal will be NULL. If we're at the end of
+ the f-string, expression will be NULL (unless result == 1,
+ see below). */
+ int result = fstring_find_literal_and_expr(str, end, raw, recurse_lvl,
+ &literal, &expression,
+ c, n);
+ if (result < 0)
+ return -1;
+
+ /* Add the literal, if any. */
+ if (!literal) {
+ /* Do nothing. Just leave last_str alone (and possibly
+ NULL). */
+ } else if (!state->last_str) {
+ state->last_str = literal;
+ literal = NULL;
+ } else {
+ /* We have a literal, concatenate it. */
+ assert(PyUnicode_GET_LENGTH(literal) != 0);
+ if (FstringParser_ConcatAndDel(state, literal) < 0)
+ return -1;
+ literal = NULL;
+ }
+ assert(!state->last_str ||
+ PyUnicode_GET_LENGTH(state->last_str) != 0);
+
+ /* We've dealt with the literal now. It can't be leaked on further
+ errors. */
+ assert(literal == NULL);
+
+ /* See if we should just loop around to get the next literal
+ and expression, while ignoring the expression this
+ time. This is used for un-doubling braces, as an
+ optimization. */
+ if (result == 1)
+ continue;
+
+ if (!expression)
+ /* We're done with this f-string. */
+ break;
+
+ /* We know we have an expression. Convert any existing string
+ to a Str node. */
+ if (!state->last_str) {
+ /* Do nothing. No previous literal. */
+ } else {
+ /* Convert the existing last_str literal to a Str node. */
+ expr_ty str = make_str_node_and_del(&state->last_str, c, n);
+ if (!str || ExprList_Append(&state->expr_list, str) < 0)
+ return -1;
+ }
+
+ if (ExprList_Append(&state->expr_list, expression) < 0)
+ return -1;
+ }
+
+ /* If recurse_lvl is zero, then we must be at the end of the
+ string. Otherwise, we must be at a right brace. */
+
+ if (recurse_lvl == 0 && *str < end-1) {
+ ast_error(c, n, "f-string: unexpected end of string");
+ return -1;
+ }
+ if (recurse_lvl != 0 && **str != '}') {
+ ast_error(c, n, "f-string: expecting '}'");
+ return -1;
+ }
+
+ FstringParser_check_invariants(state);
+ return 0;
+}
+
+/* Convert the partial state reflected in last_str and expr_list to an
+ expr_ty. The expr_ty can be a Str, or a JoinedStr. */
+static expr_ty
+FstringParser_Finish(FstringParser *state, struct compiling *c,
+ const node *n)
+{
+ asdl_seq *seq;
+
+ FstringParser_check_invariants(state);
+
+ /* If we're just a constant string with no expressions, return
+ that. */
+ if(state->expr_list.size == 0) {
+ if (!state->last_str) {
+ /* Create a zero length string. */
+ state->last_str = PyUnicode_FromStringAndSize(NULL, 0);
+ if (!state->last_str)
+ goto error;
+ }
+ return make_str_node_and_del(&state->last_str, c, n);
+ }
+
+ /* Create a Str node out of last_str, if needed. It will be the
+ last node in our expression list. */
+ if (state->last_str) {
+ expr_ty str = make_str_node_and_del(&state->last_str, c, n);
+ if (!str || ExprList_Append(&state->expr_list, str) < 0)
+ goto error;
+ }
+ /* This has already been freed. */
+ assert(state->last_str == NULL);
+
+ seq = ExprList_Finish(&state->expr_list, c->c_arena);
+ if (!seq)
+ goto error;
+
+ /* If there's only one expression, return it. Otherwise, we need
+ to join them together. */
+ if (seq->size == 1)
+ return seq->elements[0];
+
+ return JoinedStr(seq, LINENO(n), n->n_col_offset, c->c_arena);
+
+error:
+ FstringParser_Dealloc(state);
+ return NULL;
+}
+
+/* Given an f-string (with no 'f' or quotes) that's in *str and ends
+ at end, parse it into an expr_ty. Return NULL on error. Adjust
+ str to point past the parsed portion. */
+static expr_ty
+fstring_parse(const char **str, const char *end, int raw, int recurse_lvl,
+ struct compiling *c, const node *n)
+{
+ FstringParser state;
+
+ FstringParser_Init(&state);
+ if (FstringParser_ConcatFstring(&state, str, end, raw, recurse_lvl,
+ c, n) < 0) {
+ FstringParser_Dealloc(&state);
+ return NULL;
+ }
+
+ return FstringParser_Finish(&state, c, n);
+}
+
+/* n is a Python string literal, including the bracketing quote
+ characters, and r, b, u, &/or f prefixes (if any), and embedded
+ escape sequences (if any). parsestr parses it, and sets *result to
+ decoded Python string object. If the string is an f-string, set
+ *fstr and *fstrlen to the unparsed string object. Return 0 if no
+ errors occurred.
+*/
+static int
+parsestr(struct compiling *c, const node *n, int *bytesmode, int *rawmode,
+ PyObject **result, const char **fstr, Py_ssize_t *fstrlen)
{
size_t len;
const char *s = STR(n);
int quote = Py_CHARMASK(*s);
- int rawmode = 0;
- int need_encoding;
+ int fmode = 0;
+ *bytesmode = 0;
+ *rawmode = 0;
+ *result = NULL;
+ *fstr = NULL;
if (Py_ISALPHA(quote)) {
- while (!*bytesmode || !rawmode) {
+ while (!*bytesmode || !*rawmode) {
if (quote == 'b' || quote == 'B') {
quote = *++s;
*bytesmode = 1;
@@ -4293,114 +5190,175 @@ parsestr(struct compiling *c, const node *n, int *bytesmode)
}
else if (quote == 'r' || quote == 'R') {
quote = *++s;
- rawmode = 1;
+ *rawmode = 1;
+ }
+ else if (quote == 'f' || quote == 'F') {
+ quote = *++s;
+ fmode = 1;
}
else {
break;
}
}
}
+ /* fstrings are only allowed in Python 3.6 and greater */
+ if (fmode && c->c_feature_version < 6) {
+ ast_error(c, n, "Format strings are only supported in Python 3.6 and greater");
+ return -1;
+ }
+ if (fmode && *bytesmode) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
if (quote != '\'' && quote != '\"') {
PyErr_BadInternalCall();
- return NULL;
+ return -1;
}
+ /* Skip the leading quote char. */
s++;
len = strlen(s);
if (len > INT_MAX) {
PyErr_SetString(PyExc_OverflowError,
"string to parse is too long");
- return NULL;
+ return -1;
}
if (s[--len] != quote) {
+ /* Last quote char must match the first. */
PyErr_BadInternalCall();
- return NULL;
+ return -1;
}
if (len >= 4 && s[0] == quote && s[1] == quote) {
+ /* A triple quoted string. We've already skipped one quote at
+ the start and one at the end of the string. Now skip the
+ two at the start. */
s += 2;
len -= 2;
+ /* And check that the last two match. */
if (s[--len] != quote || s[--len] != quote) {
PyErr_BadInternalCall();
- return NULL;
+ return -1;
}
}
- if (!*bytesmode && !rawmode) {
- return decode_unicode(c, s, len, rawmode, c->c_encoding);
+
+ if (fmode) {
+ /* Just return the bytes. The caller will parse the resulting
+ string. */
+ *fstr = s;
+ *fstrlen = len;
+ return 0;
}
+
+ /* Not an f-string. */
+ /* Avoid invoking escape decoding routines if possible. */
+ *rawmode = *rawmode || strchr(s, '\\') == NULL;
if (*bytesmode) {
- /* Disallow non-ascii characters (but not escapes) */
+ /* Disallow non-ASCII characters. */
const char *ch;
for (ch = s; *ch; ch++) {
if (Py_CHARMASK(*ch) >= 0x80) {
ast_error(c, n, "bytes can only contain ASCII "
"literal characters.");
- return NULL;
+ return -1;
}
}
+ if (*rawmode)
+ *result = PyBytes_FromStringAndSize(s, len);
+ else
+ *result = decode_bytes_with_escapes(c, n, s, len);
+ } else {
+ if (*rawmode)
+ *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL);
+ else
+ *result = decode_unicode_with_escapes(c, n, s, len);
}
- need_encoding = (!*bytesmode && c->c_encoding != NULL &&
- strcmp(c->c_encoding, "utf-8") != 0);
- if (rawmode || strchr(s, '\\') == NULL) {
- if (need_encoding) {
- PyObject *v, *u = PyUnicode_DecodeUTF8(s, len, NULL);
- if (u == NULL || !*bytesmode)
- return u;
- v = PyUnicode_AsEncodedString(u, c->c_encoding, NULL);
- Py_DECREF(u);
- return v;
- } else if (*bytesmode) {
- return PyBytes_FromStringAndSize(s, len);
- } else if (strcmp(c->c_encoding, "utf-8") == 0) {
- return PyUnicode_FromStringAndSize(s, len);
- } else {
- return PyUnicode_DecodeLatin1(s, len, NULL);
- }
- }
- return PyBytes_DecodeEscape(s, len, NULL, 1,
- need_encoding ? c->c_encoding : NULL);
+ return *result == NULL ? -1 : 0;
}
-/* Build a Python string object out of a STRING+ atom. This takes care of
- * compile-time literal catenation, calling parsestr() on each piece, and
- * pasting the intermediate results together.
- */
-static PyObject *
-parsestrplus(struct compiling *c, const node *n, int *bytesmode)
+/* Accepts a STRING+ atom, and produces an expr_ty node. Run through
+ each STRING atom, and process it as needed. For bytes, just
+ concatenate them together, and the result will be a Bytes node. For
+ normal strings and f-strings, concatenate them together. The result
+ will be a Str node if there were no f-strings; a FormattedValue
+ node if there's just an f-string (with no leading or trailing
+ literals), or a JoinedStr node if there are multiple f-strings or
+ any literals involved. */
+static expr_ty
+parsestrplus(struct compiling *c, const node *n)
{
- PyObject *v;
+ int bytesmode = 0;
+ PyObject *bytes_str = NULL;
int i;
- REQ(CHILD(n, 0), STRING);
- v = parsestr(c, CHILD(n, 0), bytesmode);
- if (v != NULL) {
- /* String literal concatenation */
- for (i = 1; i < NCH(n); i++) {
- PyObject *s;
- int subbm = 0;
- s = parsestr(c, CHILD(n, i), &subbm);
- if (s == NULL)
- goto onError;
- if (*bytesmode != subbm) {
- ast_error(c, n, "cannot mix bytes and nonbytes literals");
- Py_DECREF(s);
- goto onError;
- }
- if (PyBytes_Check(v) && PyBytes_Check(s)) {
- PyBytes_ConcatAndDel(&v, s);
- if (v == NULL)
- goto onError;
- }
- else {
- PyObject *temp = PyUnicode_Concat(v, s);
- Py_DECREF(s);
- Py_DECREF(v);
- v = temp;
- if (v == NULL)
- goto onError;
+
+ FstringParser state;
+ FstringParser_Init(&state);
+
+ for (i = 0; i < NCH(n); i++) {
+ int this_bytesmode;
+ int this_rawmode;
+ PyObject *s;
+ const char *fstr;
+ Py_ssize_t fstrlen = -1; /* Silence a compiler warning. */
+
+ REQ(CHILD(n, i), STRING);
+ if (parsestr(c, CHILD(n, i), &this_bytesmode, &this_rawmode, &s,
+ &fstr, &fstrlen) != 0)
+ goto error;
+
+ /* Check that we're not mixing bytes with unicode. */
+ if (i != 0 && bytesmode != this_bytesmode) {
+ ast_error(c, n, "cannot mix bytes and nonbytes literals");
+ /* s is NULL if the current string part is an f-string. */
+ Py_XDECREF(s);
+ goto error;
+ }
+ bytesmode = this_bytesmode;
+
+ if (fstr != NULL) {
+ int result;
+ assert(s == NULL && !bytesmode);
+ /* This is an f-string. Parse and concatenate it. */
+ result = FstringParser_ConcatFstring(&state, &fstr, fstr+fstrlen,
+ this_rawmode, 0, c, n);
+ if (result < 0)
+ goto error;
+ } else {
+ /* A string or byte string. */
+ assert(s != NULL && fstr == NULL);
+
+ assert(bytesmode ? PyBytes_CheckExact(s) :
+ PyUnicode_CheckExact(s));
+
+ if (bytesmode) {
+ /* For bytes, concat as we go. */
+ if (i == 0) {
+ /* First time, just remember this value. */
+ bytes_str = s;
+ } else {
+ PyBytes_ConcatAndDel(&bytes_str, s);
+ if (!bytes_str)
+ goto error;
+ }
+ } else {
+ /* This is a regular string. Concatenate it. */
+ if (FstringParser_ConcatAndDel(&state, s) < 0)
+ goto error;
}
}
}
- return v;
+ if (bytesmode) {
+ /* Just return the bytes object and we're done. */
+ if (PyArena_AddPyObject(c->c_arena, bytes_str) < 0)
+ goto error;
+ return Bytes(bytes_str, LINENO(n), n->n_col_offset, c->c_arena);
+ }
+
+ /* We're not a bytes string, bytes_str should never have been set. */
+ assert(bytes_str == NULL);
+
+ return FstringParser_Finish(&state, c, n);
- onError:
- Py_XDECREF(v);
+error:
+ Py_XDECREF(bytes_str);
+ FstringParser_Dealloc(&state);
return NULL;
}
diff --git a/ast35/Python/graminit.c b/ast3/Python/graminit.c
similarity index 95%
rename from ast35/Python/graminit.c
rename to ast3/Python/graminit.c
index 9ce881a..4c74793 100644
--- a/ast35/Python/graminit.c
+++ b/ast3/Python/graminit.c
@@ -2,7 +2,7 @@
#include "pgenheaders.h"
#include "grammar.h"
-grammar _Ta35Parser_Grammar;
+extern grammar _Ta3Parser_Grammar;
static arc arcs_0_0[3] = {
{2, 1},
{3, 1},
@@ -216,12 +216,14 @@ static arc arcs_9_7[3] = {
{28, 6},
{0, 7},
};
-static arc arcs_9_8[3] = {
+static arc arcs_9_8[4] = {
{28, 14},
{31, 15},
{35, 3},
+ {0, 8},
};
-static arc arcs_9_9[2] = {
+static arc arcs_9_9[3] = {
+ {33, 16},
{28, 6},
{0, 9},
};
@@ -230,11 +232,10 @@ static arc arcs_9_10[3] = {
{28, 6},
{0, 10},
};
-static arc arcs_9_11[5] = {
+static arc arcs_9_11[4] = {
{31, 12},
{34, 13},
{35, 3},
- {28, 6},
{0, 11},
};
static arc arcs_9_12[4] = {
@@ -244,48 +245,55 @@ static arc arcs_9_12[4] = {
{0, 12},
};
static arc arcs_9_13[4] = {
- {31, 16},
- {33, 17},
+ {31, 17},
+ {33, 18},
{28, 6},
{0, 13},
};
-static arc arcs_9_14[2] = {
+static arc arcs_9_14[3] = {
{31, 15},
{35, 3},
+ {0, 14},
};
static arc arcs_9_15[4] = {
{33, 8},
- {32, 18},
+ {32, 19},
{28, 6},
{0, 15},
};
-static arc arcs_9_16[3] = {
- {33, 17},
+static arc arcs_9_16[2] = {
{28, 6},
{0, 16},
};
static arc arcs_9_17[3] = {
- {28, 19},
- {31, 20},
+ {33, 18},
+ {28, 6},
+ {0, 17},
+};
+static arc arcs_9_18[4] = {
+ {28, 20},
+ {31, 21},
{35, 3},
+ {0, 18},
};
-static arc arcs_9_18[1] = {
+static arc arcs_9_19[1] = {
{26, 7},
};
-static arc arcs_9_19[2] = {
- {31, 20},
+static arc arcs_9_20[3] = {
+ {31, 21},
{35, 3},
+ {0, 20},
};
-static arc arcs_9_20[4] = {
- {33, 17},
- {32, 21},
+static arc arcs_9_21[4] = {
+ {33, 18},
+ {32, 22},
{28, 6},
- {0, 20},
+ {0, 21},
};
-static arc arcs_9_21[1] = {
- {26, 16},
+static arc arcs_9_22[1] = {
+ {26, 17},
};
-static state states_9[22] = {
+static state states_9[23] = {
{3, arcs_9_0},
{4, arcs_9_1},
{4, arcs_9_2},
@@ -294,20 +302,21 @@ static state states_9[22] = {
{5, arcs_9_5},
{1, arcs_9_6},
{3, arcs_9_7},
- {3, arcs_9_8},
- {2, arcs_9_9},
+ {4, arcs_9_8},
+ {3, arcs_9_9},
{3, arcs_9_10},
- {5, arcs_9_11},
+ {4, arcs_9_11},
{4, arcs_9_12},
{4, arcs_9_13},
- {2, arcs_9_14},
+ {3, arcs_9_14},
{4, arcs_9_15},
- {3, arcs_9_16},
+ {2, arcs_9_16},
{3, arcs_9_17},
- {1, arcs_9_18},
- {2, arcs_9_19},
- {4, arcs_9_20},
- {1, arcs_9_21},
+ {4, arcs_9_18},
+ {1, arcs_9_19},
+ {3, arcs_9_20},
+ {4, arcs_9_21},
+ {1, arcs_9_22},
};
static arc arcs_10_0[1] = {
{23, 1},
@@ -359,11 +368,13 @@ static arc arcs_11_6[2] = {
{33, 7},
{0, 6},
};
-static arc arcs_11_7[2] = {
+static arc arcs_11_7[3] = {
{37, 12},
{35, 3},
+ {0, 7},
};
-static arc arcs_11_8[1] = {
+static arc arcs_11_8[2] = {
+ {33, 13},
{0, 8},
};
static arc arcs_11_9[2] = {
@@ -376,35 +387,39 @@ static arc arcs_11_10[3] = {
{0, 10},
};
static arc arcs_11_11[3] = {
- {37, 13},
- {33, 14},
+ {37, 14},
+ {33, 15},
{0, 11},
};
static arc arcs_11_12[3] = {
{33, 7},
- {32, 15},
+ {32, 16},
{0, 12},
};
-static arc arcs_11_13[2] = {
- {33, 14},
+static arc arcs_11_13[1] = {
{0, 13},
};
static arc arcs_11_14[2] = {
- {37, 16},
+ {33, 15},
+ {0, 14},
+};
+static arc arcs_11_15[3] = {
+ {37, 17},
{35, 3},
+ {0, 15},
};
-static arc arcs_11_15[1] = {
+static arc arcs_11_16[1] = {
{26, 6},
};
-static arc arcs_11_16[3] = {
- {33, 14},
- {32, 17},
- {0, 16},
+static arc arcs_11_17[3] = {
+ {33, 15},
+ {32, 18},
+ {0, 17},
};
-static arc arcs_11_17[1] = {
- {26, 13},
+static arc arcs_11_18[1] = {
+ {26, 14},
};
-static state states_11[18] = {
+static state states_11[19] = {
{3, arcs_11_0},
{3, arcs_11_1},
{3, arcs_11_2},
@@ -412,17 +427,18 @@ static state states_11[18] = {
{1, arcs_11_4},
{4, arcs_11_5},
{2, arcs_11_6},
- {2, arcs_11_7},
- {1, arcs_11_8},
+ {3, arcs_11_7},
+ {2, arcs_11_8},
{2, arcs_11_9},
{3, arcs_11_10},
{3, arcs_11_11},
{3, arcs_11_12},
- {2, arcs_11_13},
+ {1, arcs_11_13},
{2, arcs_11_14},
- {1, arcs_11_15},
- {3, arcs_11_16},
- {1, arcs_11_17},
+ {3, arcs_11_15},
+ {1, arcs_11_16},
+ {3, arcs_11_17},
+ {1, arcs_11_18},
};
static arc arcs_12_0[1] = {
{23, 1},
@@ -1859,32 +1875,37 @@ static state states_80[2] = {
{2, arcs_80_0},
{1, arcs_80_1},
};
-static arc arcs_81_0[1] = {
- {102, 1},
+static arc arcs_81_0[2] = {
+ {21, 1},
+ {102, 2},
};
static arc arcs_81_1[1] = {
- {67, 2},
+ {102, 2},
};
static arc arcs_81_2[1] = {
- {103, 3},
+ {67, 3},
};
static arc arcs_81_3[1] = {
- {113, 4},
+ {103, 4},
};
-static arc arcs_81_4[2] = {
- {172, 5},
- {0, 4},
+static arc arcs_81_4[1] = {
+ {113, 5},
};
-static arc arcs_81_5[1] = {
+static arc arcs_81_5[2] = {
+ {172, 6},
{0, 5},
};
-static state states_81[6] = {
- {1, arcs_81_0},
+static arc arcs_81_6[1] = {
+ {0, 6},
+};
+static state states_81[7] = {
+ {2, arcs_81_0},
{1, arcs_81_1},
{1, arcs_81_2},
{1, arcs_81_3},
- {2, arcs_81_4},
- {1, arcs_81_5},
+ {1, arcs_81_4},
+ {2, arcs_81_5},
+ {1, arcs_81_6},
};
static arc arcs_82_0[1] = {
{98, 1},
@@ -2066,11 +2087,11 @@ static dfa dfas[89] = {
"\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{264, "parameters", 0, 4, states_8,
"\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {265, "typedargslist", 0, 22, states_9,
+ {265, "typedargslist", 0, 23, states_9,
"\000\000\200\000\014\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{266, "tfpdef", 0, 4, states_10,
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {267, "varargslist", 0, 18, states_11,
+ {267, "varargslist", 0, 19, states_11,
"\000\000\200\000\014\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{268, "vfpdef", 0, 2, states_12,
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
@@ -2209,9 +2230,9 @@ static dfa dfas[89] = {
{335, "argument", 0, 4, states_79,
"\000\040\200\000\014\000\000\000\000\000\020\000\000\000\040\004\000\200\041\121\076\000\000"},
{336, "comp_iter", 0, 2, states_80,
- "\000\000\000\000\000\000\000\000\000\000\000\000\104\000\000\000\000\000\000\000\000\000\000"},
- {337, "comp_for", 0, 6, states_81,
- "\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\040\000\000\000\000\000\000\000\000\000\104\000\000\000\000\000\000\000\000\000\000"},
+ {337, "comp_for", 0, 7, states_81,
+ "\000\000\040\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000"},
{338, "comp_if", 0, 4, states_82,
"\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000"},
{339, "encoding_decl", 0, 2, states_83,
@@ -2409,7 +2430,7 @@ static label labels[180] = {
{343, 0},
{344, 0},
};
-grammar _Ta35Parser_Grammar = {
+grammar _Ta3Parser_Grammar = {
89,
dfas,
{180, labels},
diff --git a/ast35/Include/Python-ast.h b/ast35/Include/Python-ast.h
deleted file mode 100644
index 94e9b5e..0000000
--- a/ast35/Include/Python-ast.h
+++ /dev/null
@@ -1,637 +0,0 @@
-/* File automatically generated by Parser/asdl_c.py. */
-
-#include "asdl.h"
-
-typedef struct _mod *mod_ty;
-
-typedef struct _stmt *stmt_ty;
-
-typedef struct _expr *expr_ty;
-
-typedef enum _expr_context { Load=1, Store=2, Del=3, AugLoad=4, AugStore=5,
- Param=6 } expr_context_ty;
-
-typedef struct _slice *slice_ty;
-
-typedef enum _boolop { And=1, Or=2 } boolop_ty;
-
-typedef enum _operator { Add=1, Sub=2, Mult=3, MatMult=4, Div=5, Mod=6, Pow=7,
- LShift=8, RShift=9, BitOr=10, BitXor=11, BitAnd=12,
- FloorDiv=13 } operator_ty;
-
-typedef enum _unaryop { Invert=1, Not=2, UAdd=3, USub=4 } unaryop_ty;
-
-typedef enum _cmpop { Eq=1, NotEq=2, Lt=3, LtE=4, Gt=5, GtE=6, Is=7, IsNot=8,
- In=9, NotIn=10 } cmpop_ty;
-
-typedef struct _comprehension *comprehension_ty;
-
-typedef struct _excepthandler *excepthandler_ty;
-
-typedef struct _arguments *arguments_ty;
-
-typedef struct _arg *arg_ty;
-
-typedef struct _keyword *keyword_ty;
-
-typedef struct _alias *alias_ty;
-
-typedef struct _withitem *withitem_ty;
-
-typedef struct _type_ignore *type_ignore_ty;
-
-
-enum _mod_kind {Module_kind=1, Interactive_kind=2, Expression_kind=3,
- FunctionType_kind=4, Suite_kind=5};
-struct _mod {
- enum _mod_kind kind;
- union {
- struct {
- asdl_seq *body;
- asdl_seq *type_ignores;
- } Module;
-
- struct {
- asdl_seq *body;
- } Interactive;
-
- struct {
- expr_ty body;
- } Expression;
-
- struct {
- asdl_seq *argtypes;
- expr_ty returns;
- } FunctionType;
-
- struct {
- asdl_seq *body;
- } Suite;
-
- } v;
-};
-
-enum _stmt_kind {FunctionDef_kind=1, AsyncFunctionDef_kind=2, ClassDef_kind=3,
- Return_kind=4, Delete_kind=5, Assign_kind=6,
- AugAssign_kind=7, For_kind=8, AsyncFor_kind=9, While_kind=10,
- If_kind=11, With_kind=12, AsyncWith_kind=13, Raise_kind=14,
- Try_kind=15, Assert_kind=16, Import_kind=17,
- ImportFrom_kind=18, Global_kind=19, Nonlocal_kind=20,
- Expr_kind=21, Pass_kind=22, Break_kind=23, Continue_kind=24};
-struct _stmt {
- enum _stmt_kind kind;
- union {
- struct {
- identifier name;
- arguments_ty args;
- asdl_seq *body;
- asdl_seq *decorator_list;
- expr_ty returns;
- string type_comment;
- } FunctionDef;
-
- struct {
- identifier name;
- arguments_ty args;
- asdl_seq *body;
- asdl_seq *decorator_list;
- expr_ty returns;
- string type_comment;
- } AsyncFunctionDef;
-
- struct {
- identifier name;
- asdl_seq *bases;
- asdl_seq *keywords;
- asdl_seq *body;
- asdl_seq *decorator_list;
- } ClassDef;
-
- struct {
- expr_ty value;
- } Return;
-
- struct {
- asdl_seq *targets;
- } Delete;
-
- struct {
- asdl_seq *targets;
- expr_ty value;
- string type_comment;
- expr_ty annotation;
- } Assign;
-
- struct {
- expr_ty target;
- operator_ty op;
- expr_ty value;
- } AugAssign;
-
- struct {
- expr_ty target;
- expr_ty iter;
- asdl_seq *body;
- asdl_seq *orelse;
- string type_comment;
- } For;
-
- struct {
- expr_ty target;
- expr_ty iter;
- asdl_seq *body;
- asdl_seq *orelse;
- } AsyncFor;
-
- struct {
- expr_ty test;
- asdl_seq *body;
- asdl_seq *orelse;
- } While;
-
- struct {
- expr_ty test;
- asdl_seq *body;
- asdl_seq *orelse;
- } If;
-
- struct {
- asdl_seq *items;
- asdl_seq *body;
- string type_comment;
- } With;
-
- struct {
- asdl_seq *items;
- asdl_seq *body;
- } AsyncWith;
-
- struct {
- expr_ty exc;
- expr_ty cause;
- } Raise;
-
- struct {
- asdl_seq *body;
- asdl_seq *handlers;
- asdl_seq *orelse;
- asdl_seq *finalbody;
- } Try;
-
- struct {
- expr_ty test;
- expr_ty msg;
- } Assert;
-
- struct {
- asdl_seq *names;
- } Import;
-
- struct {
- identifier module;
- asdl_seq *names;
- int level;
- } ImportFrom;
-
- struct {
- asdl_seq *names;
- } Global;
-
- struct {
- asdl_seq *names;
- } Nonlocal;
-
- struct {
- expr_ty value;
- } Expr;
-
- } v;
- int lineno;
- int col_offset;
-};
-
-enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
- IfExp_kind=5, Dict_kind=6, Set_kind=7, ListComp_kind=8,
- SetComp_kind=9, DictComp_kind=10, GeneratorExp_kind=11,
- Await_kind=12, Yield_kind=13, YieldFrom_kind=14,
- Compare_kind=15, Call_kind=16, Num_kind=17, Str_kind=18,
- Bytes_kind=19, NameConstant_kind=20, Ellipsis_kind=21,
- Attribute_kind=22, Subscript_kind=23, Starred_kind=24,
- Name_kind=25, List_kind=26, Tuple_kind=27};
-struct _expr {
- enum _expr_kind kind;
- union {
- struct {
- boolop_ty op;
- asdl_seq *values;
- } BoolOp;
-
- struct {
- expr_ty left;
- operator_ty op;
- expr_ty right;
- } BinOp;
-
- struct {
- unaryop_ty op;
- expr_ty operand;
- } UnaryOp;
-
- struct {
- arguments_ty args;
- expr_ty body;
- } Lambda;
-
- struct {
- expr_ty test;
- expr_ty body;
- expr_ty orelse;
- } IfExp;
-
- struct {
- asdl_seq *keys;
- asdl_seq *values;
- } Dict;
-
- struct {
- asdl_seq *elts;
- } Set;
-
- struct {
- expr_ty elt;
- asdl_seq *generators;
- } ListComp;
-
- struct {
- expr_ty elt;
- asdl_seq *generators;
- } SetComp;
-
- struct {
- expr_ty key;
- expr_ty value;
- asdl_seq *generators;
- } DictComp;
-
- struct {
- expr_ty elt;
- asdl_seq *generators;
- } GeneratorExp;
-
- struct {
- expr_ty value;
- } Await;
-
- struct {
- expr_ty value;
- } Yield;
-
- struct {
- expr_ty value;
- } YieldFrom;
-
- struct {
- expr_ty left;
- asdl_int_seq *ops;
- asdl_seq *comparators;
- } Compare;
-
- struct {
- expr_ty func;
- asdl_seq *args;
- asdl_seq *keywords;
- } Call;
-
- struct {
- object n;
- int contains_underscores;
- } Num;
-
- struct {
- string s;
- } Str;
-
- struct {
- bytes s;
- } Bytes;
-
- struct {
- singleton value;
- } NameConstant;
-
- struct {
- expr_ty value;
- identifier attr;
- expr_context_ty ctx;
- } Attribute;
-
- struct {
- expr_ty value;
- slice_ty slice;
- expr_context_ty ctx;
- } Subscript;
-
- struct {
- expr_ty value;
- expr_context_ty ctx;
- } Starred;
-
- struct {
- identifier id;
- expr_context_ty ctx;
- } Name;
-
- struct {
- asdl_seq *elts;
- expr_context_ty ctx;
- } List;
-
- struct {
- asdl_seq *elts;
- expr_context_ty ctx;
- } Tuple;
-
- } v;
- int lineno;
- int col_offset;
-};
-
-enum _slice_kind {Slice_kind=1, ExtSlice_kind=2, Index_kind=3};
-struct _slice {
- enum _slice_kind kind;
- union {
- struct {
- expr_ty lower;
- expr_ty upper;
- expr_ty step;
- } Slice;
-
- struct {
- asdl_seq *dims;
- } ExtSlice;
-
- struct {
- expr_ty value;
- } Index;
-
- } v;
-};
-
-struct _comprehension {
- expr_ty target;
- expr_ty iter;
- asdl_seq *ifs;
-};
-
-enum _excepthandler_kind {ExceptHandler_kind=1};
-struct _excepthandler {
- enum _excepthandler_kind kind;
- union {
- struct {
- expr_ty type;
- identifier name;
- asdl_seq *body;
- } ExceptHandler;
-
- } v;
- int lineno;
- int col_offset;
-};
-
-struct _arguments {
- asdl_seq *args;
- arg_ty vararg;
- asdl_seq *kwonlyargs;
- asdl_seq *kw_defaults;
- arg_ty kwarg;
- asdl_seq *defaults;
-};
-
-struct _arg {
- identifier arg;
- expr_ty annotation;
- int lineno;
- int col_offset;
-};
-
-struct _keyword {
- identifier arg;
- expr_ty value;
-};
-
-struct _alias {
- identifier name;
- identifier asname;
-};
-
-struct _withitem {
- expr_ty context_expr;
- expr_ty optional_vars;
-};
-
-enum _type_ignore_kind {TypeIgnore_kind=1};
-struct _type_ignore {
- enum _type_ignore_kind kind;
- union {
- struct {
- int lineno;
- } TypeIgnore;
-
- } v;
-};
-
-
-#define Module(a0, a1, a2) _Ta35_Module(a0, a1, a2)
-mod_ty _Ta35_Module(asdl_seq * body, asdl_seq * type_ignores, PyArena *arena);
-#define Interactive(a0, a1) _Ta35_Interactive(a0, a1)
-mod_ty _Ta35_Interactive(asdl_seq * body, PyArena *arena);
-#define Expression(a0, a1) _Ta35_Expression(a0, a1)
-mod_ty _Ta35_Expression(expr_ty body, PyArena *arena);
-#define FunctionType(a0, a1, a2) _Ta35_FunctionType(a0, a1, a2)
-mod_ty _Ta35_FunctionType(asdl_seq * argtypes, expr_ty returns, PyArena *arena);
-#define Suite(a0, a1) _Ta35_Suite(a0, a1)
-mod_ty _Ta35_Suite(asdl_seq * body, PyArena *arena);
-#define FunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8) _Ta35_FunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8)
-stmt_ty _Ta35_FunctionDef(identifier name, arguments_ty args, asdl_seq * body,
- asdl_seq * decorator_list, expr_ty returns, string
- type_comment, int lineno, int col_offset, PyArena
- *arena);
-#define AsyncFunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8) _Ta35_AsyncFunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8)
-stmt_ty _Ta35_AsyncFunctionDef(identifier name, arguments_ty args, asdl_seq *
- body, asdl_seq * decorator_list, expr_ty
- returns, string type_comment, int lineno, int
- col_offset, PyArena *arena);
-#define ClassDef(a0, a1, a2, a3, a4, a5, a6, a7) _Ta35_ClassDef(a0, a1, a2, a3, a4, a5, a6, a7)
-stmt_ty _Ta35_ClassDef(identifier name, asdl_seq * bases, asdl_seq * keywords,
- asdl_seq * body, asdl_seq * decorator_list, int lineno,
- int col_offset, PyArena *arena);
-#define Return(a0, a1, a2, a3) _Ta35_Return(a0, a1, a2, a3)
-stmt_ty _Ta35_Return(expr_ty value, int lineno, int col_offset, PyArena *arena);
-#define Delete(a0, a1, a2, a3) _Ta35_Delete(a0, a1, a2, a3)
-stmt_ty _Ta35_Delete(asdl_seq * targets, int lineno, int col_offset, PyArena
- *arena);
-#define Assign(a0, a1, a2, a3, a4, a5, a6) _Ta35_Assign(a0, a1, a2, a3, a4, a5, a6)
-stmt_ty _Ta35_Assign(asdl_seq * targets, expr_ty value, string type_comment,
- expr_ty annotation, int lineno, int col_offset, PyArena
- *arena);
-#define AugAssign(a0, a1, a2, a3, a4, a5) _Ta35_AugAssign(a0, a1, a2, a3, a4, a5)
-stmt_ty _Ta35_AugAssign(expr_ty target, operator_ty op, expr_ty value, int
- lineno, int col_offset, PyArena *arena);
-#define For(a0, a1, a2, a3, a4, a5, a6, a7) _Ta35_For(a0, a1, a2, a3, a4, a5, a6, a7)
-stmt_ty _Ta35_For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq *
- orelse, string type_comment, int lineno, int col_offset,
- PyArena *arena);
-#define AsyncFor(a0, a1, a2, a3, a4, a5, a6) _Ta35_AsyncFor(a0, a1, a2, a3, a4, a5, a6)
-stmt_ty _Ta35_AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq
- * orelse, int lineno, int col_offset, PyArena *arena);
-#define While(a0, a1, a2, a3, a4, a5) _Ta35_While(a0, a1, a2, a3, a4, a5)
-stmt_ty _Ta35_While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int
- lineno, int col_offset, PyArena *arena);
-#define If(a0, a1, a2, a3, a4, a5) _Ta35_If(a0, a1, a2, a3, a4, a5)
-stmt_ty _Ta35_If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno,
- int col_offset, PyArena *arena);
-#define With(a0, a1, a2, a3, a4, a5) _Ta35_With(a0, a1, a2, a3, a4, a5)
-stmt_ty _Ta35_With(asdl_seq * items, asdl_seq * body, string type_comment, int
- lineno, int col_offset, PyArena *arena);
-#define AsyncWith(a0, a1, a2, a3, a4) _Ta35_AsyncWith(a0, a1, a2, a3, a4)
-stmt_ty _Ta35_AsyncWith(asdl_seq * items, asdl_seq * body, int lineno, int
- col_offset, PyArena *arena);
-#define Raise(a0, a1, a2, a3, a4) _Ta35_Raise(a0, a1, a2, a3, a4)
-stmt_ty _Ta35_Raise(expr_ty exc, expr_ty cause, int lineno, int col_offset,
- PyArena *arena);
-#define Try(a0, a1, a2, a3, a4, a5, a6) _Ta35_Try(a0, a1, a2, a3, a4, a5, a6)
-stmt_ty _Ta35_Try(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse,
- asdl_seq * finalbody, int lineno, int col_offset, PyArena
- *arena);
-#define Assert(a0, a1, a2, a3, a4) _Ta35_Assert(a0, a1, a2, a3, a4)
-stmt_ty _Ta35_Assert(expr_ty test, expr_ty msg, int lineno, int col_offset,
- PyArena *arena);
-#define Import(a0, a1, a2, a3) _Ta35_Import(a0, a1, a2, a3)
-stmt_ty _Ta35_Import(asdl_seq * names, int lineno, int col_offset, PyArena
- *arena);
-#define ImportFrom(a0, a1, a2, a3, a4, a5) _Ta35_ImportFrom(a0, a1, a2, a3, a4, a5)
-stmt_ty _Ta35_ImportFrom(identifier module, asdl_seq * names, int level, int
- lineno, int col_offset, PyArena *arena);
-#define Global(a0, a1, a2, a3) _Ta35_Global(a0, a1, a2, a3)
-stmt_ty _Ta35_Global(asdl_seq * names, int lineno, int col_offset, PyArena
- *arena);
-#define Nonlocal(a0, a1, a2, a3) _Ta35_Nonlocal(a0, a1, a2, a3)
-stmt_ty _Ta35_Nonlocal(asdl_seq * names, int lineno, int col_offset, PyArena
- *arena);
-#define Expr(a0, a1, a2, a3) _Ta35_Expr(a0, a1, a2, a3)
-stmt_ty _Ta35_Expr(expr_ty value, int lineno, int col_offset, PyArena *arena);
-#define Pass(a0, a1, a2) _Ta35_Pass(a0, a1, a2)
-stmt_ty _Ta35_Pass(int lineno, int col_offset, PyArena *arena);
-#define Break(a0, a1, a2) _Ta35_Break(a0, a1, a2)
-stmt_ty _Ta35_Break(int lineno, int col_offset, PyArena *arena);
-#define Continue(a0, a1, a2) _Ta35_Continue(a0, a1, a2)
-stmt_ty _Ta35_Continue(int lineno, int col_offset, PyArena *arena);
-#define BoolOp(a0, a1, a2, a3, a4) _Ta35_BoolOp(a0, a1, a2, a3, a4)
-expr_ty _Ta35_BoolOp(boolop_ty op, asdl_seq * values, int lineno, int
- col_offset, PyArena *arena);
-#define BinOp(a0, a1, a2, a3, a4, a5) _Ta35_BinOp(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_BinOp(expr_ty left, operator_ty op, expr_ty right, int lineno,
- int col_offset, PyArena *arena);
-#define UnaryOp(a0, a1, a2, a3, a4) _Ta35_UnaryOp(a0, a1, a2, a3, a4)
-expr_ty _Ta35_UnaryOp(unaryop_ty op, expr_ty operand, int lineno, int
- col_offset, PyArena *arena);
-#define Lambda(a0, a1, a2, a3, a4) _Ta35_Lambda(a0, a1, a2, a3, a4)
-expr_ty _Ta35_Lambda(arguments_ty args, expr_ty body, int lineno, int
- col_offset, PyArena *arena);
-#define IfExp(a0, a1, a2, a3, a4, a5) _Ta35_IfExp(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_IfExp(expr_ty test, expr_ty body, expr_ty orelse, int lineno, int
- col_offset, PyArena *arena);
-#define Dict(a0, a1, a2, a3, a4) _Ta35_Dict(a0, a1, a2, a3, a4)
-expr_ty _Ta35_Dict(asdl_seq * keys, asdl_seq * values, int lineno, int
- col_offset, PyArena *arena);
-#define Set(a0, a1, a2, a3) _Ta35_Set(a0, a1, a2, a3)
-expr_ty _Ta35_Set(asdl_seq * elts, int lineno, int col_offset, PyArena *arena);
-#define ListComp(a0, a1, a2, a3, a4) _Ta35_ListComp(a0, a1, a2, a3, a4)
-expr_ty _Ta35_ListComp(expr_ty elt, asdl_seq * generators, int lineno, int
- col_offset, PyArena *arena);
-#define SetComp(a0, a1, a2, a3, a4) _Ta35_SetComp(a0, a1, a2, a3, a4)
-expr_ty _Ta35_SetComp(expr_ty elt, asdl_seq * generators, int lineno, int
- col_offset, PyArena *arena);
-#define DictComp(a0, a1, a2, a3, a4, a5) _Ta35_DictComp(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_DictComp(expr_ty key, expr_ty value, asdl_seq * generators, int
- lineno, int col_offset, PyArena *arena);
-#define GeneratorExp(a0, a1, a2, a3, a4) _Ta35_GeneratorExp(a0, a1, a2, a3, a4)
-expr_ty _Ta35_GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int
- col_offset, PyArena *arena);
-#define Await(a0, a1, a2, a3) _Ta35_Await(a0, a1, a2, a3)
-expr_ty _Ta35_Await(expr_ty value, int lineno, int col_offset, PyArena *arena);
-#define Yield(a0, a1, a2, a3) _Ta35_Yield(a0, a1, a2, a3)
-expr_ty _Ta35_Yield(expr_ty value, int lineno, int col_offset, PyArena *arena);
-#define YieldFrom(a0, a1, a2, a3) _Ta35_YieldFrom(a0, a1, a2, a3)
-expr_ty _Ta35_YieldFrom(expr_ty value, int lineno, int col_offset, PyArena
- *arena);
-#define Compare(a0, a1, a2, a3, a4, a5) _Ta35_Compare(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators,
- int lineno, int col_offset, PyArena *arena);
-#define Call(a0, a1, a2, a3, a4, a5) _Ta35_Call(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int
- lineno, int col_offset, PyArena *arena);
-#define Num(a0, a1, a2, a3, a4) _Ta35_Num(a0, a1, a2, a3, a4)
-expr_ty _Ta35_Num(object n, int contains_underscores, int lineno, int
- col_offset, PyArena *arena);
-#define Str(a0, a1, a2, a3) _Ta35_Str(a0, a1, a2, a3)
-expr_ty _Ta35_Str(string s, int lineno, int col_offset, PyArena *arena);
-#define Bytes(a0, a1, a2, a3) _Ta35_Bytes(a0, a1, a2, a3)
-expr_ty _Ta35_Bytes(bytes s, int lineno, int col_offset, PyArena *arena);
-#define NameConstant(a0, a1, a2, a3) _Ta35_NameConstant(a0, a1, a2, a3)
-expr_ty _Ta35_NameConstant(singleton value, int lineno, int col_offset, PyArena
- *arena);
-#define Ellipsis(a0, a1, a2) _Ta35_Ellipsis(a0, a1, a2)
-expr_ty _Ta35_Ellipsis(int lineno, int col_offset, PyArena *arena);
-#define Attribute(a0, a1, a2, a3, a4, a5) _Ta35_Attribute(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_Attribute(expr_ty value, identifier attr, expr_context_ty ctx,
- int lineno, int col_offset, PyArena *arena);
-#define Subscript(a0, a1, a2, a3, a4, a5) _Ta35_Subscript(a0, a1, a2, a3, a4, a5)
-expr_ty _Ta35_Subscript(expr_ty value, slice_ty slice, expr_context_ty ctx, int
- lineno, int col_offset, PyArena *arena);
-#define Starred(a0, a1, a2, a3, a4) _Ta35_Starred(a0, a1, a2, a3, a4)
-expr_ty _Ta35_Starred(expr_ty value, expr_context_ty ctx, int lineno, int
- col_offset, PyArena *arena);
-#define Name(a0, a1, a2, a3, a4) _Ta35_Name(a0, a1, a2, a3, a4)
-expr_ty _Ta35_Name(identifier id, expr_context_ty ctx, int lineno, int
- col_offset, PyArena *arena);
-#define List(a0, a1, a2, a3, a4) _Ta35_List(a0, a1, a2, a3, a4)
-expr_ty _Ta35_List(asdl_seq * elts, expr_context_ty ctx, int lineno, int
- col_offset, PyArena *arena);
-#define Tuple(a0, a1, a2, a3, a4) _Ta35_Tuple(a0, a1, a2, a3, a4)
-expr_ty _Ta35_Tuple(asdl_seq * elts, expr_context_ty ctx, int lineno, int
- col_offset, PyArena *arena);
-#define Slice(a0, a1, a2, a3) _Ta35_Slice(a0, a1, a2, a3)
-slice_ty _Ta35_Slice(expr_ty lower, expr_ty upper, expr_ty step, PyArena
- *arena);
-#define ExtSlice(a0, a1) _Ta35_ExtSlice(a0, a1)
-slice_ty _Ta35_ExtSlice(asdl_seq * dims, PyArena *arena);
-#define Index(a0, a1) _Ta35_Index(a0, a1)
-slice_ty _Ta35_Index(expr_ty value, PyArena *arena);
-#define comprehension(a0, a1, a2, a3) _Ta35_comprehension(a0, a1, a2, a3)
-comprehension_ty _Ta35_comprehension(expr_ty target, expr_ty iter, asdl_seq *
- ifs, PyArena *arena);
-#define ExceptHandler(a0, a1, a2, a3, a4, a5) _Ta35_ExceptHandler(a0, a1, a2, a3, a4, a5)
-excepthandler_ty _Ta35_ExceptHandler(expr_ty type, identifier name, asdl_seq *
- body, int lineno, int col_offset, PyArena
- *arena);
-#define arguments(a0, a1, a2, a3, a4, a5, a6) _Ta35_arguments(a0, a1, a2, a3, a4, a5, a6)
-arguments_ty _Ta35_arguments(asdl_seq * args, arg_ty vararg, asdl_seq *
- kwonlyargs, asdl_seq * kw_defaults, arg_ty kwarg,
- asdl_seq * defaults, PyArena *arena);
-#define arg(a0, a1, a2, a3, a4) _Ta35_arg(a0, a1, a2, a3, a4)
-arg_ty _Ta35_arg(identifier arg, expr_ty annotation, int lineno, int
- col_offset, PyArena *arena);
-#define keyword(a0, a1, a2) _Ta35_keyword(a0, a1, a2)
-keyword_ty _Ta35_keyword(identifier arg, expr_ty value, PyArena *arena);
-#define alias(a0, a1, a2) _Ta35_alias(a0, a1, a2)
-alias_ty _Ta35_alias(identifier name, identifier asname, PyArena *arena);
-#define withitem(a0, a1, a2) _Ta35_withitem(a0, a1, a2)
-withitem_ty _Ta35_withitem(expr_ty context_expr, expr_ty optional_vars, PyArena
- *arena);
-#define TypeIgnore(a0, a1) _Ta35_TypeIgnore(a0, a1)
-type_ignore_ty _Ta35_TypeIgnore(int lineno, PyArena *arena);
-
-PyObject* Ta35AST_mod2obj(mod_ty t);
-mod_ty Ta35AST_obj2mod(PyObject* ast, PyArena* arena, int mode);
-int Ta35AST_Check(PyObject* obj);
diff --git a/setup.cfg b/setup.cfg
index 861a9f5..8bfd5a1 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,5 +1,4 @@
[egg_info]
tag_build =
tag_date = 0
-tag_svn_revision = 0
diff --git a/setup.py b/setup.py
index 1f19fc3..e4b3467 100644
--- a/setup.py
+++ b/setup.py
@@ -43,57 +43,60 @@ _ast27 = Extension(
])
-_ast35 = Extension(
- '_ast35',
- include_dirs = ['ast35/Include'],
+_ast3 = Extension(
+ '_ast3',
+ include_dirs = ['ast3/Include'],
sources = [
- 'ast35/Parser/acceler.c',
- 'ast35/Parser/bitset.c',
- 'ast35/Parser/grammar.c',
- 'ast35/Parser/grammar1.c',
- 'ast35/Parser/node.c',
- 'ast35/Parser/parser.c',
- 'ast35/Parser/parsetok.c',
- 'ast35/Parser/tokenizer.c',
- 'ast35/Python/asdl.c',
- 'ast35/Python/ast.c',
- 'ast35/Python/graminit.c',
- 'ast35/Python/Python-ast.c',
- 'ast35/Custom/typed_ast.c',
+ 'ast3/Parser/acceler.c',
+ 'ast3/Parser/bitset.c',
+ 'ast3/Parser/grammar.c',
+ 'ast3/Parser/grammar1.c',
+ 'ast3/Parser/node.c',
+ 'ast3/Parser/parser.c',
+ 'ast3/Parser/parsetok.c',
+ 'ast3/Parser/tokenizer.c',
+ 'ast3/Python/asdl.c',
+ 'ast3/Python/ast.c',
+ 'ast3/Python/graminit.c',
+ 'ast3/Python/Python-ast.c',
+ 'ast3/Custom/typed_ast.c',
],
depends = [
- 'ast35/Include/asdl.h',
- 'ast35/Include/ast.h',
- 'ast35/Include/bitset.h',
- 'ast35/Include/compile.h',
- 'ast35/Include/errcode.h',
- 'ast35/Include/graminit.h',
- 'ast35/Include/grammar.h',
- 'ast35/Include/node.h',
- 'ast35/Include/parsetok.h',
- 'ast35/Include/Python-ast.h',
- 'ast35/Include/token.h',
- 'ast35/Parser/parser.h',
- 'ast35/Parser/tokenizer.h',
+ 'ast3/Include/asdl.h',
+ 'ast3/Include/ast.h',
+ 'ast3/Include/bitset.h',
+ 'ast3/Include/compile.h',
+ 'ast3/Include/errcode.h',
+ 'ast3/Include/graminit.h',
+ 'ast3/Include/grammar.h',
+ 'ast3/Include/node.h',
+ 'ast3/Include/parsetok.h',
+ 'ast3/Include/Python-ast.h',
+ 'ast3/Include/token.h',
+ 'ast3/Parser/parser.h',
+ 'ast3/Parser/tokenizer.h',
])
long_description = """
-This package is a fork of the CPython 2.7 and 3.5 `ast` modules with the
-ability to parse PEP 484 (https://www.python.org/dev/peps/pep-0484/) type
-comments. The primary goals of this package are correctness and speed.
+`typed_ast` is a Python 3 package that provides a Python 2.7 and Python 3
+parser similar to the standard `ast` library. Unlike `ast`, the parsers in
+`typed_ast` include PEP 484 type comments and are independent of the version of
+Python under which they are run. The `typed_ast` parsers produce the standard
+Python AST (plus type comments), and are both fast and correct, as they are
+based on the CPython 2.7 and 3.6 parsers.
""".strip()
setup (name = 'typed-ast',
- version = '0.6.3',
- description = 'fork of Python 2 and 3 ast modules with type comment support',
+ version = '1.0.4',
+ description = 'a fork of Python 2 and 3 ast modules with type comment support',
long_description = long_description,
author = 'David Fisher',
author_email = 'ddfisher at dropbox.com',
- url = 'https://github.com/dropbox/typed_ast',
+ url = 'https://github.com/python/typed_ast',
license='Apache License 2.0',
platforms = ['POSIX', 'Windows'],
classifiers = [
- 'Development Status :: 4 - Beta',
+ 'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: POSIX',
@@ -105,4 +108,4 @@ setup (name = 'typed-ast',
'Topic :: Software Development',
],
packages = ['typed_ast'],
- ext_modules = [_ast27, _ast35])
+ ext_modules = [_ast27, _ast3])
diff --git a/typed_ast.egg-info/PKG-INFO b/typed_ast.egg-info/PKG-INFO
index 3c2dafb..54d110f 100644
--- a/typed_ast.egg-info/PKG-INFO
+++ b/typed_ast.egg-info/PKG-INFO
@@ -1,17 +1,20 @@
Metadata-Version: 1.1
Name: typed-ast
-Version: 0.6.3
-Summary: fork of Python 2 and 3 ast modules with type comment support
-Home-page: https://github.com/dropbox/typed_ast
+Version: 1.0.4
+Summary: a fork of Python 2 and 3 ast modules with type comment support
+Home-page: https://github.com/python/typed_ast
Author: David Fisher
Author-email: ddfisher at dropbox.com
License: Apache License 2.0
-Description: This package is a fork of the CPython 2.7 and 3.5 `ast` modules with the
- ability to parse PEP 484 (https://www.python.org/dev/peps/pep-0484/) type
- comments. The primary goals of this package are correctness and speed.
+Description: `typed_ast` is a Python 3 package that provides a Python 2.7 and Python 3
+ parser similar to the standard `ast` library. Unlike `ast`, the parsers in
+ `typed_ast` include PEP 484 type comments and are independent of the version of
+ Python under which they are run. The `typed_ast` parsers produce the standard
+ Python AST (plus type comments), and are both fast and correct, as they are
+ based on the CPython 2.7 and 3.6 parsers.
Platform: POSIX
Platform: Windows
-Classifier: Development Status :: 4 - Beta
+Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Console
Classifier: Intended Audience :: Developers
Classifier: Operating System :: POSIX
diff --git a/typed_ast.egg-info/SOURCES.txt b/typed_ast.egg-info/SOURCES.txt
index 519ca60..d3a0571 100644
--- a/typed_ast.egg-info/SOURCES.txt
+++ b/typed_ast.egg-info/SOURCES.txt
@@ -28,35 +28,35 @@ ast27/Python/asdl.c
ast27/Python/ast.c
ast27/Python/graminit.c
ast27/Python/mystrtoul.c
-ast35/Custom/typed_ast.c
-ast35/Include/Python-ast.h
-ast35/Include/asdl.h
-ast35/Include/ast.h
-ast35/Include/bitset.h
-ast35/Include/compile.h
-ast35/Include/errcode.h
-ast35/Include/graminit.h
-ast35/Include/grammar.h
-ast35/Include/node.h
-ast35/Include/parsetok.h
-ast35/Include/token.h
-ast35/Parser/acceler.c
-ast35/Parser/bitset.c
-ast35/Parser/grammar.c
-ast35/Parser/grammar1.c
-ast35/Parser/node.c
-ast35/Parser/parser.c
-ast35/Parser/parser.h
-ast35/Parser/parsetok.c
-ast35/Parser/tokenizer.c
-ast35/Parser/tokenizer.h
-ast35/Python/Python-ast.c
-ast35/Python/asdl.c
-ast35/Python/ast.c
-ast35/Python/graminit.c
+ast3/Custom/typed_ast.c
+ast3/Include/Python-ast.h
+ast3/Include/asdl.h
+ast3/Include/ast.h
+ast3/Include/bitset.h
+ast3/Include/compile.h
+ast3/Include/errcode.h
+ast3/Include/graminit.h
+ast3/Include/grammar.h
+ast3/Include/node.h
+ast3/Include/parsetok.h
+ast3/Include/token.h
+ast3/Parser/acceler.c
+ast3/Parser/bitset.c
+ast3/Parser/grammar.c
+ast3/Parser/grammar1.c
+ast3/Parser/node.c
+ast3/Parser/parser.c
+ast3/Parser/parser.h
+ast3/Parser/parsetok.c
+ast3/Parser/tokenizer.c
+ast3/Parser/tokenizer.h
+ast3/Python/Python-ast.c
+ast3/Python/asdl.c
+ast3/Python/ast.c
+ast3/Python/graminit.c
typed_ast/__init__.py
typed_ast/ast27.py
-typed_ast/ast35.py
+typed_ast/ast3.py
typed_ast/conversions.py
typed_ast.egg-info/PKG-INFO
typed_ast.egg-info/SOURCES.txt
diff --git a/typed_ast.egg-info/top_level.txt b/typed_ast.egg-info/top_level.txt
index b7d8e46..8c96e51 100644
--- a/typed_ast.egg-info/top_level.txt
+++ b/typed_ast.egg-info/top_level.txt
@@ -1,3 +1,3 @@
_ast27
-_ast35
+_ast3
typed_ast
diff --git a/typed_ast/ast27.py b/typed_ast/ast27.py
index a1a15f8..4ba8667 100644
--- a/typed_ast/ast27.py
+++ b/typed_ast/ast27.py
@@ -1,17 +1,31 @@
# -*- coding: utf-8 -*-
"""
- ast
+ ast27
~~~
- The `ast` module helps Python applications to process trees of the Python
+ The `ast27` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
- the current grammar looks like and allows modifications of it.
-
- An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
- a flag to the `compile()` builtin function or by using the `parse()`
+ the current grammar looks like and allows modifications of it. The `ast27`
+ module is similar to the builtin `ast` module on Python 2.7, except `ast27`
+ runs on Python 3 and provides PEP 484 type comments as part of the AST.
+
+ Specifically, these changes are made to the Python 2.7 AST:
+ - The `FunctionDef`, `Assign`, `For`, and `With` classes all have a
+ `type_comment` field which contains a `str` with the text of the
+ associated type comment, if any.
+ - `arguments` has a `type_comments` list of per-argument type comments.
+ - `parse` has been augmented so it can parse function signature types when
+ called with `mode=func_type`.
+ - `Module` has a `type_ignores` field which contains a list of
+ lines which have been `# type: ignore`d.
+ - `Str` has a `has_b` boolean field which indicates if the string is
+ explicitly prefixed with a `b`. (This is deprecated and may be removed in
+ future versions.)
+
+ An abstract syntax tree can be generated by using the `parse()`
function from this module. The result will be a tree of objects whose
- classes all inherit from `ast.AST`.
+ classes all inherit from `ast27.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
@@ -31,7 +45,7 @@ from _ast27 import *
def parse(source, filename='<unknown>', mode='exec'):
"""
- Parse the source into an AST node.
+ Parse the source into an AST node with type comments.
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
"""
return _ast27.parse(source, filename, mode)
diff --git a/typed_ast/ast35.py b/typed_ast/ast3.py
similarity index 78%
rename from typed_ast/ast35.py
rename to typed_ast/ast3.py
index 0865a2d..e44632c 100644
--- a/typed_ast/ast35.py
+++ b/typed_ast/ast3.py
@@ -1,29 +1,30 @@
"""
- typed_ast.ast35
+ typed_ast.ast3
~~~
- The `ast35` module helps Python applications to process trees of the
- Python abstract syntax grammar. The abstract syntax itself might change
- with each Python release; this module helps to find out programmatically
- what the current grammar looks like and allows modifications of it. The
- difference between the `ast35` module and the builtin `ast` module is
- that `ast35` provides PEP 484 type comment information as part of the
- AST.
-
- In particular:
- - The `FunctionDef`, `Assign`, `For`, and `With` classes all have a
- `type_comment` field which contains a `str` with the text of the type
- comment.
- - Per-argument function comments are put into the annotation field of each
- argument.
+ The `ast3` module helps Python applications to process trees of the Python
+ abstract syntax grammar. The abstract syntax itself might change with
+ each Python release; this module helps to find out programmatically what
+ the current grammar looks like and allows modifications of it. The
+ difference between the `ast3` module and the builtin `ast` module is
+ that `ast3` is version-independent and provides PEP 484 type comments as
+ part of the AST.
+
+ Specifically, these changes are made to the latest Python 3 AST:
+ - The `FunctionDef`, `AsyncFunctionDef`, `Assign`, `For`, `AsyncFor`,
+ `With`, `AsyncWith`, and `arg` classes all have a `type_comment` field
+ which contains a `str` with the text of the associated type comment, if
+ any.
- `parse` has been augmented so it can parse function signature types when
called with `mode=func_type`.
+ - `parse` has an additional argument `feature_version`, which disables
+ newer Python syntax features.
- `Module` has a `type_ignores` field which contains a list of
lines which have been `# type: ignore`d.
An abstract syntax tree can be generated by using the `parse()`
function from this module. The result will be a tree of objects whose
- classes all inherit from `ast35.AST`.
+ classes all inherit from `ast3.AST`.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
@@ -34,17 +35,29 @@
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
-import _ast35
-from _ast35 import *
+import _ast3
+from _ast3 import *
+LATEST_MINOR_VERSION = 6
-def parse(source, filename='<unknown>', mode='exec'):
+def parse(source, filename='<unknown>', mode='exec', feature_version=LATEST_MINOR_VERSION):
"""
- Parse the source into an AST node including type comment information.
+ Parse the source into an AST node including type comments.
Similar to compile(source, filename, mode, PyCF_ONLY_AST).
+
+ Set feature_version to limit the syntax parsed to that minor version of
+ Python 3. For example, feature_version=5 will prevent new syntax features
+ from Python 3.6+ from being used, such as fstrings. Currently only
+ fully supported for Python 3.5+ with partial support for Python 3.4.
+ So, feature_version=3 or less are all equivalent to feature_version=4.
+
+ When feature_version=4, the parser will forbid the use of the async/await
+ keywords and the '@' operator, but will not forbid the use of PEP 448
+ additional unpacking generalizations, which were also added in Python 3.5.
"""
- return _ast35._parse(source, filename, mode)
+ return _ast3._parse(source, filename, mode, feature_version)
+_NUM_TYPES = (int, float, complex)
def literal_eval(node_or_string):
"""
@@ -58,7 +71,9 @@ def literal_eval(node_or_string):
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
def _convert(node):
- if isinstance(node, (Str, Bytes)):
+ if isinstance(node, Constant):
+ return node.value
+ elif isinstance(node, (Str, Bytes)):
return node.s
elif isinstance(node, Num):
return node.n
@@ -73,24 +88,21 @@ def literal_eval(node_or_string):
in zip(node.keys, node.values))
elif isinstance(node, NameConstant):
return node.value
- elif isinstance(node, UnaryOp) and \
- isinstance(node.op, (UAdd, USub)) and \
- isinstance(node.operand, (Num, UnaryOp, BinOp)):
+ elif isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
operand = _convert(node.operand)
- if isinstance(node.op, UAdd):
- return + operand
- else:
- return - operand
- elif isinstance(node, BinOp) and \
- isinstance(node.op, (Add, Sub)) and \
- isinstance(node.right, (Num, UnaryOp, BinOp)) and \
- isinstance(node.left, (Num, UnaryOp, BinOp)):
+ if isinstance(operand, _NUM_TYPES):
+ if isinstance(node.op, UAdd):
+ return + operand
+ else:
+ return - operand
+ elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)):
left = _convert(node.left)
right = _convert(node.right)
- if isinstance(node.op, Add):
- return left + right
- else:
- return left - right
+ if isinstance(left, _NUM_TYPES) and isinstance(right, _NUM_TYPES):
+ if isinstance(node.op, Add):
+ return left + right
+ else:
+ return left - right
raise ValueError('malformed node or string: ' + repr(node))
return _convert(node_or_string)
@@ -207,12 +219,19 @@ def get_docstring(node, clean=True):
"""
if not isinstance(node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
- if node.body and isinstance(node.body[0], Expr) and \
- isinstance(node.body[0].value, Str):
- if clean:
- import inspect
- return inspect.cleandoc(node.body[0].value.s)
- return node.body[0].value.s
+ if not(node.body and isinstance(node.body[0], Expr)):
+ return
+ node = node.body[0].value
+ if isinstance(node, Str):
+ text = node.s
+ elif isinstance(node, Constant) and isinstance(node.value, str):
+ text = node.value
+ else:
+ return
+ if clean:
+ import inspect
+ text = inspect.cleandoc(text)
+ return text
def walk(node):
diff --git a/typed_ast/conversions.py b/typed_ast/conversions.py
index 61af7d2..356df25 100644
--- a/typed_ast/conversions.py
+++ b/typed_ast/conversions.py
@@ -1,5 +1,5 @@
from typed_ast import ast27
-from typed_ast import ast35
+from typed_ast import ast3
def py2to3(ast):
"""Converts a typed Python 2.7 ast to a typed Python 3.5 ast. The returned
@@ -47,7 +47,7 @@ class _AST2To3(ast27.NodeTransformer):
def generic_visit(self, node):
class_name = node.__class__.__name__
- converted_class = getattr(ast35, class_name)
+ converted_class = getattr(ast3, class_name)
new_node = converted_class()
for field, old_value in ast27.iter_fields(node):
if isinstance(old_value, (ast27.AST, list)):
@@ -71,10 +71,10 @@ class _AST2To3(ast27.NodeTransformer):
return new
def visit_TryExcept(self, n):
- return ast35.Try(self.visit(n.body),
- self.visit(n.handlers),
- self.visit(n.orelse),
- [])
+ return ast3.Try(self.visit(n.body),
+ self.visit(n.handlers),
+ self.visit(n.orelse),
+ [])
def visit_TryFinally(self, n):
if len(n.body) == 1 and isinstance(n.body[0], ast27.TryExcept):
@@ -82,10 +82,10 @@ class _AST2To3(ast27.NodeTransformer):
new.finalbody = self.visit(n.finalbody)
return new
else:
- return ast35.Try(self.visit(n.body),
- [],
- [],
- self.visit(n.finalbody))
+ return ast3.Try(self.visit(n.body),
+ [],
+ [],
+ self.visit(n.finalbody))
def visit_ExceptHandler(self, n):
@@ -96,22 +96,22 @@ class _AST2To3(ast27.NodeTransformer):
else:
raise RuntimeError("'{}' has non-Name name.".format(ast27.dump(n)))
- return ast35.ExceptHandler(self.maybe_visit(n.type),
- name,
- self.visit(n.body))
+ return ast3.ExceptHandler(self.maybe_visit(n.type),
+ name,
+ self.visit(n.body))
def visit_Print(self, n):
keywords = []
if n.dest is not None:
- keywords.append(ast35.keyword("file", self.visit(n.dest)))
+ keywords.append(ast3.keyword("file", self.visit(n.dest)))
if not n.nl:
- keywords.append(ast35.keyword("end", ast35.Str(" ", lineno=n.lineno, col_offset=-1)))
+ keywords.append(ast3.keyword("end", ast3.Str(" ", lineno=n.lineno, col_offset=-1)))
- return ast35.Expr(ast35.Call(ast35.Name("print", ast35.Load(), lineno=n.lineno, col_offset=-1),
- self.visit(n.values),
- keywords,
- lineno=n.lineno, col_offset=-1))
+ return ast3.Expr(ast3.Call(ast3.Name("print", ast3.Load(), lineno=n.lineno, col_offset=-1),
+ self.visit(n.values),
+ keywords,
+ lineno=n.lineno, col_offset=-1))
def visit_Raise(self, n):
e = None
@@ -120,13 +120,13 @@ class _AST2To3(ast27.NodeTransformer):
if n.inst is not None and not (isinstance(n.inst, ast27.Name) and n.inst.id == "None"):
inst = self.visit(n.inst)
- if isinstance(inst, ast35.Tuple):
+ if isinstance(inst, ast3.Tuple):
args = inst.elts
else:
args = [inst]
- e = ast35.Call(e, args, [], lineno=e.lineno, col_offset=-1)
+ e = ast3.Call(e, args, [], lineno=e.lineno, col_offset=-1)
- ret = ast35.Raise(e, None)
+ ret = ast3.Raise(e, None)
if n.tback is not None:
ret.traceback = self.visit(n.tback)
return ret
@@ -134,96 +134,98 @@ class _AST2To3(ast27.NodeTransformer):
def visit_Exec(self, n):
new_globals = self.maybe_visit(n.globals)
if new_globals is None:
- new_globals = ast35.Name("None", ast35.Load(), lineno=-1, col_offset=-1)
+ new_globals = ast3.Name("None", ast3.Load(), lineno=-1, col_offset=-1)
new_locals = self.maybe_visit(n.locals)
if new_locals is None:
- new_locals = ast35.Name("None", ast35.Load(), lineno=-1, col_offset=-1)
+ new_locals = ast3.Name("None", ast3.Load(), lineno=-1, col_offset=-1)
- return ast35.Expr(ast35.Call(ast35.Name("exec", ast35.Load(), lineno=n.lineno, col_offset=-1),
- [self.visit(n.body), new_globals, new_locals],
- [],
- lineno=n.lineno, col_offset=-1))
+ return ast3.Expr(ast3.Call(ast3.Name("exec", ast3.Load(), lineno=n.lineno, col_offset=-1),
+ [self.visit(n.body), new_globals, new_locals],
+ [],
+ lineno=n.lineno, col_offset=-1))
# TODO(ddfisher): the name repr could be used locally as something else; disambiguate
def visit_Repr(self, n):
- return ast35.Call(ast35.Name("repr", ast35.Load(), lineno=n.lineno, col_offset=-1),
- [self.visit(n.value)],
- [])
+ return ast3.Call(ast3.Name("repr", ast3.Load(), lineno=n.lineno, col_offset=-1),
+ [self.visit(n.value)],
+ [])
# TODO(ddfisher): this will cause strange behavior on multi-item with statements with type comments
def visit_With(self, n):
- return ast35.With([ast35.withitem(self.visit(n.context_expr), self.maybe_visit(n.optional_vars))],
+ return ast3.With([ast3.withitem(self.visit(n.context_expr), self.maybe_visit(n.optional_vars))],
self.visit(n.body),
n.type_comment)
def visit_Call(self, n):
args = self.visit(n.args)
if n.starargs is not None:
- args.append(ast35.Starred(self.visit(n.starargs), ast35.Load(), lineno=n.starargs.lineno, col_offset=n.starargs.col_offset))
+ args.append(ast3.Starred(self.visit(n.starargs), ast3.Load(), lineno=n.starargs.lineno, col_offset=n.starargs.col_offset))
keywords = self.visit(n.keywords)
if n.kwargs is not None:
- keywords.append(ast35.keyword(None, self.visit(n.kwargs)))
+ keywords.append(ast3.keyword(None, self.visit(n.kwargs)))
- return ast35.Call(self.visit(n.func),
- args,
- keywords)
+ return ast3.Call(self.visit(n.func),
+ args,
+ keywords)
# TODO(ddfisher): find better attributes to give Ellipses
def visit_Ellipsis(self, n):
# ellipses in Python 2 only exist as a slice index
- return ast35.Index(ast35.Ellipsis(lineno=-1, col_offset=-1))
+ return ast3.Index(ast3.Ellipsis(lineno=-1, col_offset=-1))
def visit_arguments(self, n):
- def convert_arg(arg, annotation):
+ def convert_arg(arg, type_comment):
if isinstance(arg, ast27.Name):
v = arg.id
elif isinstance(arg, ast27.Tuple):
v = self.visit(arg)
else:
raise RuntimeError("'{}' is not a valid argument.".format(ast27.dump(arg)))
- return ast35.arg(v, annotation, lineno=arg.lineno, col_offset=arg.col_offset)
+ return ast3.arg(v, None, type_comment, lineno=arg.lineno, col_offset=arg.col_offset)
def get_type_comment(i):
if i < len(n.type_comments) and n.type_comments[i] is not None:
- return ast35.Str(n.type_comments[i])
+ return n.type_comments[i]
return None
args = [convert_arg(arg, get_type_comment(i)) for i, arg in enumerate(n.args)]
vararg = None
if n.vararg is not None:
- vararg = ast35.arg(n.vararg,
- get_type_comment(len(args)),
- lineno=-1, col_offset=-1)
+ vararg = ast3.arg(n.vararg,
+ None,
+ get_type_comment(len(args)),
+ lineno=-1, col_offset=-1)
kwarg = None
if n.kwarg is not None:
- kwarg = ast35.arg(n.kwarg,
- get_type_comment(len(args) + (0 if n.vararg is None else 1)),
- lineno=-1, col_offset=-1)
+ kwarg = ast3.arg(n.kwarg,
+ None,
+ get_type_comment(len(args) + (0 if n.vararg is None else 1)),
+ lineno=-1, col_offset=-1)
defaults = self.visit(n.defaults)
- return ast35.arguments(args,
- vararg,
- [],
- [],
- kwarg,
- defaults)
+ return ast3.arguments(args,
+ vararg,
+ [],
+ [],
+ kwarg,
+ defaults)
def visit_Str(self, s):
if isinstance(s.s, bytes):
- return ast35.Bytes(s.s)
+ return ast3.Bytes(s.s)
else:
- return ast35.Str(s.s)
+ return ast3.Str(s.s)
def visit_Num(self, n):
new = self.generic_visit(n)
if new.n < 0:
# Python 3 uses a unary - operator for negative literals.
new.n = -new.n
- return ast35.UnaryOp(op=ast35.USub(),
- operand=_copy_attributes(new, n))
+ return ast3.UnaryOp(op=ast3.USub(),
+ operand=_copy_attributes(new, n))
else:
return new
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python3-typed-ast.git
More information about the debian-med-commit
mailing list