[med-svn] [python3-typed-ast] 01/04: New upstream version 0.5.6
Sascha Steinbiss
satta at debian.org
Sat Sep 17 15:04:39 UTC 2016
This is an automated email from the git hooks/post-receive script.
satta pushed a commit to branch master
in repository python3-typed-ast.
commit fee8d11c0745a3a8ca5b999be76b14ac6162c6fc
Author: Sascha Steinbiss <satta at debian.org>
Date: Sat Sep 17 14:55:56 2016 +0000
New upstream version 0.5.6
---
.gitignore | 5 -
CONTRIBUTING.md | 1 -
LICENSE | 543 +++++++------
MANIFEST.in | 1 +
PKG-INFO | 20 +
README.md | 52 --
appveyor.yml | 47 --
ast27/Grammar/Grammar | 151 ----
ast27/Parser/Python.asdl | 122 ---
ast27/Parser/asdl.py | 413 ----------
ast27/Parser/asdl_c.py | 1249 -----------------------------
ast27/Parser/spark.py | 839 -------------------
ast35/Grammar/Grammar | 162 ----
ast35/Parser/Python.asdl | 126 ---
ast35/Parser/asdl.py | 375 ---------
ast35/Parser/asdl_c.py | 1326 -------------------------------
build.cmd | 21 -
setup.cfg | 5 +
setup.py | 6 +-
tools/find_exported_symbols | 8 -
tools/update_exported_symbols | 13 -
tools/update_header_guards | 12 -
typed_ast.egg-info/PKG-INFO | 20 +
typed_ast.egg-info/SOURCES.txt | 62 ++
typed_ast.egg-info/dependency_links.txt | 1 +
typed_ast.egg-info/top_level.txt | 3 +
typed_ast/conversions.py | 21 +-
27 files changed, 425 insertions(+), 5179 deletions(-)
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index cc7f758..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-*.o
-*.pyc
-build/
-__pycache__/
-.DS_Store
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index 16aa21f..0000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1 +0,0 @@
-To contribute code to this project, you'll need to sign [Dropbox's Contributor License Agreement](https://opensource.dropbox.com/cla/).
diff --git a/LICENSE b/LICENSE
index ee233ec..2565558 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,253 +1,290 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright 2016 Dropbox, Inc.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
---------------
-
-The original CPython source is licensed under the Python Software Foundation License Version 2:
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights
-Reserved" are retained in Python alone or in any derivative version prepared by
-Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee. This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
+Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: typed-ast
+Source: https://pypi.python.org/pypi/typed-ast
+
+Files: *
+Copyright: © 2016 David Fisher <ddfisher at dropbox.com>
+License: Apache-2.0
+
+Files: *
+Copyright: © 2016 David Fisher <ddfisher at dropbox.com>
+ © 2008 Armin Ronacher
+Comment: The original CPython source is licensed under the
+ Python Software Foundation License Version 2
+License: Python
+
+Files: ast27/Parser/spark.py
+Copyright: © 1998-2002 John Aycock
+License: Expat
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+License: Apache-2.0
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+ .
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+ .
+ 1. Definitions.
+ .
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+ .
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+ .
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+ .
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+ .
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+ .
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+ .
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+ .
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+ .
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+ .
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+ .
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+ .
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+ .
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+ .
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+ .
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+ .
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+ .
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+ .
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+ .
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+ .
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+ .
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+ .
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+ .
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+ .
+ END OF TERMS AND CONDITIONS
+ .
+ APPENDIX: How to apply the Apache License to your work.
+ .
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+ .
+ Copyright 2016 Dropbox, Inc.
+ .
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ .
+ http://www.apache.org/licenses/LICENSE-2.0
+ .
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+License: Python
+ PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+ --------------------------------------------
+ .
+ 1. This LICENSE AGREEMENT is between the Python Software Foundation
+ ("PSF"), and the Individual or Organization ("Licensee") accessing and
+ otherwise using this software ("Python") in source or binary form and
+ its associated documentation.
+ .
+ 2. Subject to the terms and conditions of this License Agreement, PSF hereby
+ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+ analyze, test, perform and/or display publicly, prepare derivative works,
+ distribute, and otherwise use Python alone or in any derivative version,
+ provided, however, that PSF's License Agreement and PSF's notice of copyright,
+ i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+ 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights
+ Reserved" are retained in Python alone or in any derivative version prepared by
+ Licensee.
+ .
+ 3. In the event Licensee prepares a derivative work that is based on
+ or incorporates Python or any part thereof, and wants to make
+ the derivative work available to others as provided herein, then
+ Licensee hereby agrees to include in any such work a brief summary of
+ the changes made to Python.
+ .
+ 4. PSF is making Python available to Licensee on an "AS IS"
+ basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+ IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+ DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+ FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+ INFRINGE ANY THIRD PARTY RIGHTS.
+ .
+ 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+ FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+ A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+ OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+ .
+ 6. This License Agreement will automatically terminate upon a material
+ breach of its terms and conditions.
+ .
+ 7. Nothing in this License Agreement shall be deemed to create any
+ relationship of agency, partnership, or joint venture between PSF and
+ Licensee. This License Agreement does not grant permission to use PSF
+ trademarks or trade name in a trademark sense to endorse or promote
+ products or services of Licensee, or any third party.
+ .
+ 8. By copying, installing or otherwise using Python, Licensee
+ agrees to be bound by the terms and conditions of this License
+ Agreement.
diff --git a/MANIFEST.in b/MANIFEST.in
index 5faa125..41f118b 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,2 +1,3 @@
recursive-include ast27 *.h
recursive-include ast35 *.h
+include LICENSE
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..3ed89fd
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,20 @@
+Metadata-Version: 1.1
+Name: typed-ast
+Version: 0.5.6
+Summary: fork of Python 2 and 3 ast modules with type comment support
+Home-page: https://github.com/dropbox/typed_ast
+Author: David Fisher
+Author-email: ddfisher at dropbox.com
+License: Apache License 2.0
+Description: This package is a fork of the CPython 2.7 and 3.5 `ast` modules with the
+ ability to parse PEP 484 (https://www.python.org/dev/peps/pep-0484/) type
+ comments. The primary goals of this package are correctness and speed.
+Platform: POSIX
+Classifier: Development Status :: 3 - Alpha
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Topic :: Software Development
diff --git a/README.md b/README.md
deleted file mode 100644
index 2bc27e5..0000000
--- a/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# Typed AST Package
-This package is a fork of the CPython 2.7 and 3.5 `ast` modules with the ability
-to parse [PEP 484](https://www.python.org/dev/peps/pep-0484/) type comments.
-The primary goals of this package are correctness and speed. This package is
-compatible with Python 3.3 - 3.5.
-
-### Current Caveats for Use
-- Type comments in invalid locations produce syntax errors.
-- When using per-argument function comment annotations, the type comments must
- come after the argument-separating comma.
-
-## Development Notes
-### General Notes
-- token names in `Python.asdl` need to match token names both in `tokenizer.c`
- AND in `pgen`'s copy of `tokenizer.c`.
-
-### Rebuilding Autogenerated Files
-- After changing `Parser/Python.asdl`, you must run:
- - `python3 Parser/asdl_c.py -h Include/ Parser/Python.asdl`
- - `python3 Parser/asdl_c.py -c Python/ Parser/Python.asdl`
-- After changing `Grammar/Grammar`, you must run:
- - `pgen Grammar/Grammar Include/graminit.h Python/graminit.c`
-- To get a working `pgen` binary: (this is hacky and will be changed)
- - get a clean copy of Python 3.5.1
- - overwrite `Include/token.h`, `Include/compile.h`, and `Parser/tokenizer.c`
- - (or instead of copying Parser/tokenizer.h just modify _PyParser_TokenNames at the top and tokenization to RARROW)
- with the versions from typed\_ast
- - In the Python directory, run: `./configure && make`
- - `Parser/pgen` (in the Python directory) can now be used to regenerate
- typed\_ast's graminit files.
-
-### TODO
-- [x] stop aliasing the official Python headers
-- [x] ensure we're not using duplicate versions of important standard library
- functions (like object creation)
-- [x] hide most global symbols from being exported to prevent conflicts with other
- libraries -- *changed to a unique prefix*
-- [x] allow type ignores to be followed by a comment
-- [ ] prevent type comments in incorrect locations from causing syntax errors
-- [ ] find a better way to compile pgen
-- [x] parse Python 2.7 ASTs
-- [x] ast35: ensure compatibility with older Python versions
- - [x] Python 3.4 (*works on 3.4.4*)
- - [x] Python 3.3 (*works on 3.3.6*)
-- [x] ast27: ensure compatibility with older Python versions
- - [x] Python 3.4
- - [x] Python 3.3
-- [ ] refactor out shared code
- - [ ] common functions in typed\_ast.c
- - [ ] type\_ignore array resizing functions in parsetok.c
- - [ ] type comment parsing code in tokenizer.c
- - [ ] func\_type\_input parsing in ast.c
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 38e45c6..0000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-environment:
-
- matrix:
-
- # For Python versions available on Appveyor, see
- # http://www.appveyor.com/docs/installed-software#python
-
- - PYTHON: "C:\\Python33"
- - PYTHON: "C:\\Python34"
- - PYTHON: "C:\\Python35"
- - PYTHON: "C:\\Python33-x64"
- DISTUTILS_USE_SDK: "1"
- - PYTHON: "C:\\Python34-x64"
- DISTUTILS_USE_SDK: "1"
- - PYTHON: "C:\\Python35-x64"
-
-install:
- # We need wheel installed to build wheels
- - "%PYTHON%\\python.exe -m pip install wheel"
-
-build: off
-
-test_script:
- # Put your test command here.
- # If you don't need to build C extensions on 64-bit Python 3.3 or 3.4,
- # you can remove "build.cmd" from the front of the command, as it's
- # only needed to support those cases.
- # Note that you must use the environment variable %PYTHON% to refer to
- # the interpreter you're using - Appveyor does not do anything special
- # to put the Python evrsion you want to use on PATH.
- - "build.cmd %PYTHON%\\python.exe setup.py test"
-
-after_test:
- # This step builds your wheels.
- # Again, you only need build.cmd if you're building C extensions for
- # 64-bit Python 3.3/3.4. And you need to use %PYTHON% to get the correct
- # interpreter
- - "build.cmd %PYTHON%\\python.exe setup.py bdist_wheel"
-
-artifacts:
- # bdist_wheel puts your built wheel in the dist directory
- - path: dist\*
-
-#on_success:
-# You can use this step to upload your artifacts to a public website.
-# See Appveyor's documentation for more details. Or you can simply
-# access your wheels from the Appveyor "artifacts" tab for your build.
diff --git a/ast27/Grammar/Grammar b/ast27/Grammar/Grammar
deleted file mode 100644
index 60dbf50..0000000
--- a/ast27/Grammar/Grammar
+++ /dev/null
@@ -1,151 +0,0 @@
-# Grammar for Python
-
-# Note: Changing the grammar specified in this file will most likely
-# require corresponding changes in the parser module
-# (../Modules/parsermodule.c). If you can't make the changes to
-# that module yourself, please co-ordinate the required changes
-# with someone who can; ask around on python-dev for help. Fred
-# Drake <fdrake at acm.org> will probably be listening there.
-
-# NOTE WELL: You should also follow all the steps listed in PEP 306,
-# "How to Change Python's Grammar"
-
-# Start symbols for the grammar:
-# single_input is a single interactive statement;
-# file_input is a module or sequence of commands read from an input file;
-# eval_input is the input for the eval() and input() functions.
-# func_type_input is a PEP 484 Python 2 function type comment
-# NB: compound_stmt in single_input is followed by extra NEWLINE!
-single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
-file_input: (NEWLINE | stmt)* ENDMARKER
-eval_input: testlist NEWLINE* ENDMARKER
-
-decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
-decorators: decorator+
-decorated: decorators (classdef | funcdef)
-funcdef: 'def' NAME parameters ':' [TYPE_COMMENT] suite
-parameters: '(' [varargslist] ')'
-varargslist: ((fpdef ['=' test] ',' [TYPE_COMMENT])*
- ('*' NAME [',' [TYPE_COMMENT] '**' NAME] [TYPE_COMMENT] | '**' NAME [TYPE_COMMENT]) |
- fpdef ['=' test] (',' [TYPE_COMMENT] fpdef ['=' test])* [','] [TYPE_COMMENT])
-fpdef: NAME | '(' fplist ')'
-fplist: fpdef (',' fpdef)* [',']
-
-stmt: simple_stmt | compound_stmt
-simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
-small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
- import_stmt | global_stmt | exec_stmt | assert_stmt)
-expr_stmt: testlist (augassign (yield_expr|testlist) |
- ('=' (yield_expr|testlist))* [TYPE_COMMENT])
-augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
- '<<=' | '>>=' | '**=' | '//=')
-# For normal assignments, additional restrictions enforced by the interpreter
-print_stmt: 'print' ( [ test (',' test)* [','] ] |
- '>>' test [ (',' test)+ [','] ] )
-del_stmt: 'del' exprlist
-pass_stmt: 'pass'
-flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
-break_stmt: 'break'
-continue_stmt: 'continue'
-return_stmt: 'return' [testlist]
-yield_stmt: yield_expr
-raise_stmt: 'raise' [test [',' test [',' test]]]
-import_stmt: import_name | import_from
-import_name: 'import' dotted_as_names
-import_from: ('from' ('.'* dotted_name | '.'+)
- 'import' ('*' | '(' import_as_names ')' | import_as_names))
-import_as_name: NAME ['as' NAME]
-dotted_as_name: dotted_name ['as' NAME]
-import_as_names: import_as_name (',' import_as_name)* [',']
-dotted_as_names: dotted_as_name (',' dotted_as_name)*
-dotted_name: NAME ('.' NAME)*
-global_stmt: 'global' NAME (',' NAME)*
-exec_stmt: 'exec' expr ['in' test [',' test]]
-assert_stmt: 'assert' test [',' test]
-
-compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated
-if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
-while_stmt: 'while' test ':' suite ['else' ':' suite]
-for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite]
-try_stmt: ('try' ':' suite
- ((except_clause ':' suite)+
- ['else' ':' suite]
- ['finally' ':' suite] |
- 'finally' ':' suite))
-with_stmt: 'with' with_item (',' with_item)* ':' [TYPE_COMMENT] suite
-with_item: test ['as' expr]
-# NB compile.c makes sure that the default except clause is last
-except_clause: 'except' [test [('as' | ',') test]]
-# the TYPE_COMMENT in suites is only parsed for funcdefs, but can't go elsewhere due to ambiguity
-suite: simple_stmt | NEWLINE [TYPE_COMMENT NEWLINE] INDENT stmt+ DEDENT
-
-# Backward compatibility cruft to support:
-# [ x for x in lambda: True, lambda: False if x() ]
-# even while also allowing:
-# lambda x: 5 if x else 2
-# (But not a mix of the two)
-testlist_safe: old_test [(',' old_test)+ [',']]
-old_test: or_test | old_lambdef
-old_lambdef: 'lambda' [varargslist] ':' old_test
-
-test: or_test ['if' or_test 'else' test] | lambdef
-or_test: and_test ('or' and_test)*
-and_test: not_test ('and' not_test)*
-not_test: 'not' not_test | comparison
-comparison: expr (comp_op expr)*
-comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
-expr: xor_expr ('|' xor_expr)*
-xor_expr: and_expr ('^' and_expr)*
-and_expr: shift_expr ('&' shift_expr)*
-shift_expr: arith_expr (('<<'|'>>') arith_expr)*
-arith_expr: term (('+'|'-') term)*
-term: factor (('*'|'/'|'%'|'//') factor)*
-factor: ('+'|'-'|'~') factor | power
-power: atom trailer* ['**' factor]
-atom: ('(' [yield_expr|testlist_comp] ')' |
- '[' [listmaker] ']' |
- '{' [dictorsetmaker] '}' |
- '`' testlist1 '`' |
- NAME | NUMBER | STRING+)
-listmaker: test ( list_for | (',' test)* [','] )
-testlist_comp: test ( comp_for | (',' test)* [','] )
-lambdef: 'lambda' [varargslist] ':' test
-trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
-subscriptlist: subscript (',' subscript)* [',']
-subscript: '.' '.' '.' | test | [test] ':' [test] [sliceop]
-sliceop: ':' [test]
-exprlist: expr (',' expr)* [',']
-testlist: test (',' test)* [',']
-dictorsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) |
- (test (comp_for | (',' test)* [','])) )
-
-classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
-
-arglist: (argument ',')* (argument [',']
- |'*' test (',' argument)* [',' '**' test]
- |'**' test)
-# The reason that keywords are test nodes instead of NAME is that using NAME
-# results in an ambiguity. ast.c makes sure it's a NAME.
-argument: test [comp_for] | test '=' test
-
-list_iter: list_for | list_if
-list_for: 'for' exprlist 'in' testlist_safe [list_iter]
-list_if: 'if' old_test [list_iter]
-
-comp_iter: comp_for | comp_if
-comp_for: 'for' exprlist 'in' or_test [comp_iter]
-comp_if: 'if' old_test [comp_iter]
-
-testlist1: test (',' test)*
-
-# not used in grammar, but may appear in "node" passed from Parser to Compiler
-encoding_decl: NAME
-
-yield_expr: 'yield' [testlist]
-
-func_type_input: func_type NEWLINE* ENDMARKER
-func_type: '(' [typelist] ')' '->' test
-# typelist is a modified typedargslist (see above)
-typelist: (test (',' test)* [','
- ['*' [test] (',' test)* [',' '**' test] | '**' test]]
- | '*' [test] (',' test)* [',' '**' test] | '**' test)
diff --git a/ast27/Parser/Python.asdl b/ast27/Parser/Python.asdl
deleted file mode 100644
index 1864bc9..0000000
--- a/ast27/Parser/Python.asdl
+++ /dev/null
@@ -1,122 +0,0 @@
--- ASDL's five builtin types are identifier, int, string, object, bool
-
-module Python version "$Revision$"
-{
- mod = Module(stmt* body, type_ignore *type_ignores)
- | Interactive(stmt* body)
- | Expression(expr body)
- | FunctionType(expr* argtypes, expr returns)
-
- -- not really an actual node but useful in Jython's typesystem.
- | Suite(stmt* body)
-
- stmt = FunctionDef(identifier name, arguments args,
- stmt* body, expr* decorator_list, string? type_comment)
- | ClassDef(identifier name, expr* bases, stmt* body, expr* decorator_list)
- | Return(expr? value)
-
- | Delete(expr* targets)
- | Assign(expr* targets, expr value, string? type_comment)
- | AugAssign(expr target, operator op, expr value)
-
- -- not sure if bool is allowed, can always use int
- | Print(expr? dest, expr* values, bool nl)
-
- -- use 'orelse' because else is a keyword in target languages
- | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
- | While(expr test, stmt* body, stmt* orelse)
- | If(expr test, stmt* body, stmt* orelse)
- | With(expr context_expr, expr? optional_vars, stmt* body, string? type_comment)
-
- -- 'type' is a bad name
- | Raise(expr? type, expr? inst, expr? tback)
- | TryExcept(stmt* body, excepthandler* handlers, stmt* orelse)
- | TryFinally(stmt* body, stmt* finalbody)
- | Assert(expr test, expr? msg)
-
- | Import(alias* names)
- | ImportFrom(identifier? module, alias* names, int? level)
-
- -- Doesn't capture requirement that locals must be
- -- defined if globals is
- -- still supports use as a function!
- | Exec(expr body, expr? globals, expr? locals)
-
- | Global(identifier* names)
- | Expr(expr value)
- | Pass | Break | Continue
-
- -- XXX Jython will be different
- -- col_offset is the byte offset in the utf8 string the parser uses
- attributes (int lineno, int col_offset)
-
- -- BoolOp() can use left & right?
- expr = BoolOp(boolop op, expr* values)
- | BinOp(expr left, operator op, expr right)
- | UnaryOp(unaryop op, expr operand)
- | Lambda(arguments args, expr body)
- | IfExp(expr test, expr body, expr orelse)
- | Dict(expr* keys, expr* values)
- | Set(expr* elts)
- | ListComp(expr elt, comprehension* generators)
- | SetComp(expr elt, comprehension* generators)
- | DictComp(expr key, expr value, comprehension* generators)
- | GeneratorExp(expr elt, comprehension* generators)
- -- the grammar constrains where yield expressions can occur
- | Yield(expr? value)
- -- need sequences for compare to distinguish between
- -- x < 4 < 3 and (x < 4) < 3
- | Compare(expr left, cmpop* ops, expr* comparators)
- | Call(expr func, expr* args, keyword* keywords,
- expr? starargs, expr? kwargs)
- | Repr(expr value)
- | Num(object n) -- a number as a PyObject.
- | Str(string s) -- need to specify raw, unicode, etc?
- -- other literals? bools?
-
- -- the following expression can appear in assignment context
- | Attribute(expr value, identifier attr, expr_context ctx)
- | Subscript(expr value, slice slice, expr_context ctx)
- | Name(identifier id, expr_context ctx)
- | List(expr* elts, expr_context ctx)
- | Tuple(expr* elts, expr_context ctx)
-
- -- col_offset is the byte offset in the utf8 string the parser uses
- attributes (int lineno, int col_offset)
-
- expr_context = Load | Store | Del | AugLoad | AugStore | Param
-
- slice = Ellipsis | Slice(expr? lower, expr? upper, expr? step)
- | ExtSlice(slice* dims)
- | Index(expr value)
-
- boolop = And | Or
-
- operator = Add | Sub | Mult | Div | Mod | Pow | LShift
- | RShift | BitOr | BitXor | BitAnd | FloorDiv
-
- unaryop = Invert | Not | UAdd | USub
-
- cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn
-
- comprehension = (expr target, expr iter, expr* ifs)
-
- -- not sure what to call the first argument for raise and except
- excepthandler = ExceptHandler(expr? type, expr? name, stmt* body)
- attributes (int lineno, int col_offset)
-
- -- type_comments is used to support the per-argument type comment syntax.
- -- It is either an empty list or a list with length equal to the number of
- -- args (including varargs and kwargs, if present) and with members set to the
- -- string of each arg's type comment, if present, or None otherwise.
- arguments = (expr* args, identifier? vararg,
- identifier? kwarg, expr* defaults, string* type_comments)
-
- -- keyword arguments supplied to call
- keyword = (identifier arg, expr value)
-
- -- import name with optional 'as' alias.
- alias = (identifier name, identifier? asname)
-
- type_ignore = TypeIgnore(int lineno)
-}
diff --git a/ast27/Parser/asdl.py b/ast27/Parser/asdl.py
deleted file mode 100644
index 7f5856b..0000000
--- a/ast27/Parser/asdl.py
+++ /dev/null
@@ -1,413 +0,0 @@
-"""An implementation of the Zephyr Abstract Syntax Definition Language.
-
-See http://asdl.sourceforge.net/ and
-http://www.cs.princeton.edu/research/techreps/TR-554-97
-
-Only supports top level module decl, not view. I'm guessing that view
-is intended to support the browser and I'm not interested in the
-browser.
-
-Changes for Python: Add support for module versions
-"""
-
-import os
-import traceback
-
-import spark
-
-class Token(object):
- # spark seems to dispatch in the parser based on a token's
- # type attribute
- def __init__(self, type, lineno):
- self.type = type
- self.lineno = lineno
-
- def __str__(self):
- return self.type
-
- def __repr__(self):
- return str(self)
-
-class Id(Token):
- def __init__(self, value, lineno):
- self.type = 'Id'
- self.value = value
- self.lineno = lineno
-
- def __str__(self):
- return self.value
-
-class String(Token):
- def __init__(self, value, lineno):
- self.type = 'String'
- self.value = value
- self.lineno = lineno
-
-class ASDLSyntaxError(Exception):
-
- def __init__(self, lineno, token=None, msg=None):
- self.lineno = lineno
- self.token = token
- self.msg = msg
-
- def __str__(self):
- if self.msg is None:
- return "Error at '%s', line %d" % (self.token, self.lineno)
- else:
- return "%s, line %d" % (self.msg, self.lineno)
-
-class ASDLScanner(spark.GenericScanner, object):
-
- def tokenize(self, input):
- self.rv = []
- self.lineno = 1
- super(ASDLScanner, self).tokenize(input)
- return self.rv
-
- def t_id(self, s):
- r"[\w\.]+"
- # XXX doesn't distinguish upper vs. lower, which is
- # significant for ASDL.
- self.rv.append(Id(s, self.lineno))
-
- def t_string(self, s):
- r'"[^"]*"'
- self.rv.append(String(s, self.lineno))
-
- def t_xxx(self, s): # not sure what this production means
- r"<="
- self.rv.append(Token(s, self.lineno))
-
- def t_punctuation(self, s):
- r"[\{\}\*\=\|\(\)\,\?\:]"
- self.rv.append(Token(s, self.lineno))
-
- def t_comment(self, s):
- r"\-\-[^\n]*"
- pass
-
- def t_newline(self, s):
- r"\n"
- self.lineno += 1
-
- def t_whitespace(self, s):
- r"[ \t]+"
- pass
-
- def t_default(self, s):
- r" . +"
- raise ValueError, "unmatched input: %s" % `s`
-
-class ASDLParser(spark.GenericParser, object):
- def __init__(self):
- super(ASDLParser, self).__init__("module")
-
- def typestring(self, tok):
- return tok.type
-
- def error(self, tok):
- raise ASDLSyntaxError(tok.lineno, tok)
-
- def p_module_0(self, (module, name, version, _0, _1)):
- " module ::= Id Id version { } "
- if module.value != "module":
- raise ASDLSyntaxError(module.lineno,
- msg="expected 'module', found %s" % module)
- return Module(name, None, version)
-
- def p_module(self, (module, name, version, _0, definitions, _1)):
- " module ::= Id Id version { definitions } "
- if module.value != "module":
- raise ASDLSyntaxError(module.lineno,
- msg="expected 'module', found %s" % module)
- return Module(name, definitions, version)
-
- def p_version(self, (version, V)):
- "version ::= Id String"
- if version.value != "version":
- raise ASDLSyntaxError(version.lineno,
- msg="expected 'version', found %" % version)
- return V
-
- def p_definition_0(self, (definition,)):
- " definitions ::= definition "
- return definition
-
- def p_definition_1(self, (definitions, definition)):
- " definitions ::= definition definitions "
- return definitions + definition
-
- def p_definition(self, (id, _, type)):
- " definition ::= Id = type "
- return [Type(id, type)]
-
- def p_type_0(self, (product,)):
- " type ::= product "
- return product
-
- def p_type_1(self, (sum,)):
- " type ::= sum "
- return Sum(sum)
-
- def p_type_2(self, (sum, id, _0, attributes, _1)):
- " type ::= sum Id ( fields ) "
- if id.value != "attributes":
- raise ASDLSyntaxError(id.lineno,
- msg="expected attributes, found %s" % id)
- if attributes:
- attributes.reverse()
- return Sum(sum, attributes)
-
- def p_product(self, (_0, fields, _1)):
- " product ::= ( fields ) "
- # XXX can't I just construct things in the right order?
- fields.reverse()
- return Product(fields)
-
- def p_sum_0(self, (constructor,)):
- " sum ::= constructor "
- return [constructor]
-
- def p_sum_1(self, (constructor, _, sum)):
- " sum ::= constructor | sum "
- return [constructor] + sum
-
- def p_sum_2(self, (constructor, _, sum)):
- " sum ::= constructor | sum "
- return [constructor] + sum
-
- def p_constructor_0(self, (id,)):
- " constructor ::= Id "
- return Constructor(id)
-
- def p_constructor_1(self, (id, _0, fields, _1)):
- " constructor ::= Id ( fields ) "
- # XXX can't I just construct things in the right order?
- fields.reverse()
- return Constructor(id, fields)
-
- def p_fields_0(self, (field,)):
- " fields ::= field "
- return [field]
-
- def p_fields_1(self, (field, _, fields)):
- " fields ::= field , fields "
- return fields + [field]
-
- def p_field_0(self, (type,)):
- " field ::= Id "
- return Field(type)
-
- def p_field_1(self, (type, name)):
- " field ::= Id Id "
- return Field(type, name)
-
- def p_field_2(self, (type, _, name)):
- " field ::= Id * Id "
- return Field(type, name, seq=True)
-
- def p_field_3(self, (type, _, name)):
- " field ::= Id ? Id "
- return Field(type, name, opt=True)
-
- def p_field_4(self, (type, _)):
- " field ::= Id * "
- return Field(type, seq=True)
-
- def p_field_5(self, (type, _)):
- " field ::= Id ? "
- return Field(type, opt=True)
-
-builtin_types = ("identifier", "string", "int", "bool", "object")
-
-# below is a collection of classes to capture the AST of an AST :-)
-# not sure if any of the methods are useful yet, but I'm adding them
-# piecemeal as they seem helpful
-
-class AST(object):
- pass # a marker class
-
-class Module(AST):
- def __init__(self, name, dfns, version):
- self.name = name
- self.dfns = dfns
- self.version = version
- self.types = {} # maps type name to value (from dfns)
- for type in dfns:
- self.types[type.name.value] = type.value
-
- def __repr__(self):
- return "Module(%s, %s)" % (self.name, self.dfns)
-
-class Type(AST):
- def __init__(self, name, value):
- self.name = name
- self.value = value
-
- def __repr__(self):
- return "Type(%s, %s)" % (self.name, self.value)
-
-class Constructor(AST):
- def __init__(self, name, fields=None):
- self.name = name
- self.fields = fields or []
-
- def __repr__(self):
- return "Constructor(%s, %s)" % (self.name, self.fields)
-
-class Field(AST):
- def __init__(self, type, name=None, seq=False, opt=False):
- self.type = type
- self.name = name
- self.seq = seq
- self.opt = opt
-
- def __repr__(self):
- if self.seq:
- extra = ", seq=True"
- elif self.opt:
- extra = ", opt=True"
- else:
- extra = ""
- if self.name is None:
- return "Field(%s%s)" % (self.type, extra)
- else:
- return "Field(%s, %s%s)" % (self.type, self.name, extra)
-
-class Sum(AST):
- def __init__(self, types, attributes=None):
- self.types = types
- self.attributes = attributes or []
-
- def __repr__(self):
- if self.attributes is None:
- return "Sum(%s)" % self.types
- else:
- return "Sum(%s, %s)" % (self.types, self.attributes)
-
-class Product(AST):
- def __init__(self, fields):
- self.fields = fields
-
- def __repr__(self):
- return "Product(%s)" % self.fields
-
-class VisitorBase(object):
-
- def __init__(self, skip=False):
- self.cache = {}
- self.skip = skip
-
- def visit(self, object, *args):
- meth = self._dispatch(object)
- if meth is None:
- return
- try:
- meth(object, *args)
- except Exception, err:
- print "Error visiting", repr(object)
- print err
- traceback.print_exc()
- # XXX hack
- if hasattr(self, 'file'):
- self.file.flush()
- os._exit(1)
-
- def _dispatch(self, object):
- assert isinstance(object, AST), repr(object)
- klass = object.__class__
- meth = self.cache.get(klass)
- if meth is None:
- methname = "visit" + klass.__name__
- if self.skip:
- meth = getattr(self, methname, None)
- else:
- meth = getattr(self, methname)
- self.cache[klass] = meth
- return meth
-
-class Check(VisitorBase):
-
- def __init__(self):
- super(Check, self).__init__(skip=True)
- self.cons = {}
- self.errors = 0
- self.types = {}
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type):
- self.visit(type.value, str(type.name))
-
- def visitSum(self, sum, name):
- for t in sum.types:
- self.visit(t, name)
-
- def visitConstructor(self, cons, name):
- key = str(cons.name)
- conflict = self.cons.get(key)
- if conflict is None:
- self.cons[key] = name
- else:
- print "Redefinition of constructor %s" % key
- print "Defined in %s and %s" % (conflict, name)
- self.errors += 1
- for f in cons.fields:
- self.visit(f, key)
-
- def visitField(self, field, name):
- key = str(field.type)
- l = self.types.setdefault(key, [])
- l.append(name)
-
- def visitProduct(self, prod, name):
- for f in prod.fields:
- self.visit(f, name)
-
-def check(mod):
- v = Check()
- v.visit(mod)
-
- for t in v.types:
- if t not in mod.types and not t in builtin_types:
- v.errors += 1
- uses = ", ".join(v.types[t])
- print "Undefined type %s, used in %s" % (t, uses)
-
- return not v.errors
-
-def parse(file):
- scanner = ASDLScanner()
- parser = ASDLParser()
-
- buf = open(file).read()
- tokens = scanner.tokenize(buf)
- try:
- return parser.parse(tokens)
- except ASDLSyntaxError, err:
- print err
- lines = buf.split("\n")
- print lines[err.lineno - 1] # lines starts at 0, files at 1
-
-if __name__ == "__main__":
- import glob
- import sys
-
- if len(sys.argv) > 1:
- files = sys.argv[1:]
- else:
- testdir = "tests"
- files = glob.glob(testdir + "/*.asdl")
-
- for file in files:
- print file
- mod = parse(file)
- print "module", mod.name
- print len(mod.dfns), "definitions"
- if not check(mod):
- print "Check failed"
- else:
- for dfn in mod.dfns:
- print dfn.type
diff --git a/ast27/Parser/asdl_c.py b/ast27/Parser/asdl_c.py
deleted file mode 100755
index 145f7f7..0000000
--- a/ast27/Parser/asdl_c.py
+++ /dev/null
@@ -1,1249 +0,0 @@
-#! /usr/bin/env python
-"""Generate C code from an ASDL description."""
-
-# TO DO
-# handle fields that have a type but no name
-
-import os, sys
-
-import asdl
-
-TABSIZE = 8
-MAX_COL = 100
-
-def get_c_type(name):
- """Return a string for the C name of the type.
-
- This function special cases the default types provided by asdl:
- identifier, string, int, bool.
- """
- # XXX ack! need to figure out where Id is useful and where string
- if isinstance(name, asdl.Id):
- name = name.value
- if name in asdl.builtin_types:
- return name
- else:
- return "%s_ty" % name
-
-def reflow_lines(s, depth):
- """Reflow the line s indented depth tabs.
-
- Return a sequence of lines where no line extends beyond MAX_COL
- when properly indented. The first line is properly indented based
- exclusively on depth * TABSIZE. All following lines -- these are
- the reflowed lines generated by this function -- start at the same
- column as the first character beyond the opening { in the first
- line.
- """
- size = MAX_COL - depth * TABSIZE
- if len(s) < size:
- return [s]
-
- lines = []
- cur = s
- padding = ""
- while len(cur) > size:
- i = cur.rfind(' ', 0, size)
- # XXX this should be fixed for real
- if i == -1 and 'GeneratorExp' in cur:
- i = size + 3
- assert i != -1, "Impossible line %d to reflow: %r" % (size, s)
- lines.append(padding + cur[:i])
- if len(lines) == 1:
- # find new size based on brace
- j = cur.find('{', 0, i)
- if j >= 0:
- j += 2 # account for the brace and the space after it
- size -= j
- padding = " " * j
- else:
- j = cur.find('(', 0, i)
- if j >= 0:
- j += 1 # account for the paren (no space after it)
- size -= j
- padding = " " * j
- cur = cur[i+1:]
- else:
- lines.append(padding + cur)
- return lines
-
-def is_simple(sum):
- """Return True if a sum is a simple.
-
- A sum is simple if its types have no fields, e.g.
- unaryop = Invert | Not | UAdd | USub
- """
- for t in sum.types:
- if t.fields:
- return False
- return True
-
-
-class EmitVisitor(asdl.VisitorBase):
- """Visit that emits lines"""
-
- def __init__(self, file):
- self.file = file
- super(EmitVisitor, self).__init__()
-
- def emit(self, s, depth, reflow=True):
- # XXX reflow long lines?
- if reflow:
- lines = reflow_lines(s, depth)
- else:
- lines = [s]
- for line in lines:
- line = (" " * TABSIZE * depth) + line + "\n"
- self.file.write(line)
-
-
-class TypeDefVisitor(EmitVisitor):
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type, depth=0):
- self.visit(type.value, type.name, depth)
-
- def visitSum(self, sum, name, depth):
- if is_simple(sum):
- self.simple_sum(sum, name, depth)
- else:
- self.sum_with_constructors(sum, name, depth)
-
- def simple_sum(self, sum, name, depth):
- enum = []
- for i in range(len(sum.types)):
- type = sum.types[i]
- enum.append("%s=%d" % (type.name, i + 1))
- enums = ", ".join(enum)
- ctype = get_c_type(name)
- s = "typedef enum _%s { %s } %s;" % (name, enums, ctype)
- self.emit(s, depth)
- self.emit("", depth)
-
- def sum_with_constructors(self, sum, name, depth):
- ctype = get_c_type(name)
- s = "typedef struct _%(name)s *%(ctype)s;" % locals()
- self.emit(s, depth)
- self.emit("", depth)
-
- def visitProduct(self, product, name, depth):
- ctype = get_c_type(name)
- s = "typedef struct _%(name)s *%(ctype)s;" % locals()
- self.emit(s, depth)
- self.emit("", depth)
-
-
-class StructVisitor(EmitVisitor):
- """Visitor to generate typdefs for AST."""
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type, depth=0):
- self.visit(type.value, type.name, depth)
-
- def visitSum(self, sum, name, depth):
- if not is_simple(sum):
- self.sum_with_constructors(sum, name, depth)
-
- def sum_with_constructors(self, sum, name, depth):
- def emit(s, depth=depth):
- self.emit(s % sys._getframe(1).f_locals, depth)
- enum = []
- for i in range(len(sum.types)):
- type = sum.types[i]
- enum.append("%s_kind=%d" % (type.name, i + 1))
-
- emit("enum _%(name)s_kind {" + ", ".join(enum) + "};")
-
- emit("struct _%(name)s {")
- emit("enum _%(name)s_kind kind;", depth + 1)
- emit("union {", depth + 1)
- for t in sum.types:
- self.visit(t, depth + 2)
- emit("} v;", depth + 1)
- for field in sum.attributes:
- # rudimentary attribute handling
- type = str(field.type)
- assert type in asdl.builtin_types, type
- emit("%s %s;" % (type, field.name), depth + 1);
- emit("};")
- emit("")
-
- def visitConstructor(self, cons, depth):
- if cons.fields:
- self.emit("struct {", depth)
- for f in cons.fields:
- self.visit(f, depth + 1)
- self.emit("} %s;" % cons.name, depth)
- self.emit("", depth)
- else:
- # XXX not sure what I want here, nothing is probably fine
- pass
-
- def visitField(self, field, depth):
- # XXX need to lookup field.type, because it might be something
- # like a builtin...
- ctype = get_c_type(field.type)
- name = field.name
- if field.seq:
- if field.type.value in ('cmpop',):
- self.emit("asdl_int_seq *%(name)s;" % locals(), depth)
- else:
- self.emit("asdl_seq *%(name)s;" % locals(), depth)
- else:
- self.emit("%(ctype)s %(name)s;" % locals(), depth)
-
- def visitProduct(self, product, name, depth):
- self.emit("struct _%(name)s {" % locals(), depth)
- for f in product.fields:
- self.visit(f, depth + 1)
- self.emit("};", depth)
- self.emit("", depth)
-
-
-class PrototypeVisitor(EmitVisitor):
- """Generate function prototypes for the .h file"""
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type):
- self.visit(type.value, type.name)
-
- def visitSum(self, sum, name):
- if is_simple(sum):
- pass # XXX
- else:
- for t in sum.types:
- self.visit(t, name, sum.attributes)
-
- def get_args(self, fields):
- """Return list of C argument into, one for each field.
-
- Argument info is 3-tuple of a C type, variable name, and flag
- that is true if type can be NULL.
- """
- args = []
- unnamed = {}
- for f in fields:
- if f.name is None:
- name = f.type
- c = unnamed[name] = unnamed.get(name, 0) + 1
- if c > 1:
- name = "name%d" % (c - 1)
- else:
- name = f.name
- # XXX should extend get_c_type() to handle this
- if f.seq:
- if f.type.value in ('cmpop',):
- ctype = "asdl_int_seq *"
- else:
- ctype = "asdl_seq *"
- else:
- ctype = get_c_type(f.type)
- args.append((ctype, name, f.opt or f.seq))
- return args
-
- def visitConstructor(self, cons, type, attrs):
- args = self.get_args(cons.fields)
- attrs = self.get_args(attrs)
- ctype = get_c_type(type)
- self.emit_function(cons.name, ctype, args, attrs)
-
- def emit_function(self, name, ctype, args, attrs, union=True):
- args = args + attrs
- if args:
- argstr = ", ".join(["%s %s" % (atype, aname)
- for atype, aname, opt in args])
- argstr += ", PyArena *arena"
- else:
- argstr = "PyArena *arena"
- margs = "a0"
- for i in range(1, len(args)+1):
- margs += ", a%d" % i
- self.emit("#define %s(%s) _Ta27_%s(%s)" % (name, margs, name, margs), 0,
- reflow=False)
- self.emit("%s _Ta27_%s(%s);" % (ctype, name, argstr), False)
-
- def visitProduct(self, prod, name):
- self.emit_function(name, get_c_type(name),
- self.get_args(prod.fields), [], union=False)
-
-
-class FunctionVisitor(PrototypeVisitor):
- """Visitor to generate constructor functions for AST."""
-
- def emit_function(self, name, ctype, args, attrs, union=True):
- def emit(s, depth=0, reflow=True):
- self.emit(s, depth, reflow)
- argstr = ", ".join(["%s %s" % (atype, aname)
- for atype, aname, opt in args + attrs])
- if argstr:
- argstr += ", PyArena *arena"
- else:
- argstr = "PyArena *arena"
- self.emit("%s" % ctype, 0)
- emit("%s(%s)" % (name, argstr))
- emit("{")
- emit("%s p;" % ctype, 1)
- for argtype, argname, opt in args:
- # XXX hack alert: false is allowed for a bool
- if not opt and not (argtype == "bool" or argtype == "int"):
- emit("if (!%s) {" % argname, 1)
- emit("PyErr_SetString(PyExc_ValueError,", 2)
- msg = "field %s is required for %s" % (argname, name)
- emit(' "%s");' % msg,
- 2, reflow=False)
- emit('return NULL;', 2)
- emit('}', 1)
-
- emit("p = (%s)PyArena_Malloc(arena, sizeof(*p));" % ctype, 1);
- emit("if (!p)", 1)
- emit("return NULL;", 2)
- if union:
- self.emit_body_union(name, args, attrs)
- else:
- self.emit_body_struct(name, args, attrs)
- emit("return p;", 1)
- emit("}")
- emit("")
-
- def emit_body_union(self, name, args, attrs):
- def emit(s, depth=0, reflow=True):
- self.emit(s, depth, reflow)
- emit("p->kind = %s_kind;" % name, 1)
- for argtype, argname, opt in args:
- emit("p->v.%s.%s = %s;" % (name, argname, argname), 1)
- for argtype, argname, opt in attrs:
- emit("p->%s = %s;" % (argname, argname), 1)
-
- def emit_body_struct(self, name, args, attrs):
- def emit(s, depth=0, reflow=True):
- self.emit(s, depth, reflow)
- for argtype, argname, opt in args:
- emit("p->%s = %s;" % (argname, argname), 1)
- assert not attrs
-
-
-class PickleVisitor(EmitVisitor):
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type):
- self.visit(type.value, type.name)
-
- def visitSum(self, sum, name):
- pass
-
- def visitProduct(self, sum, name):
- pass
-
- def visitConstructor(self, cons, name):
- pass
-
- def visitField(self, sum):
- pass
-
-
-class Obj2ModPrototypeVisitor(PickleVisitor):
- def visitProduct(self, prod, name):
- code = "static int obj2ast_%s(PyObject* obj, %s* out, PyArena* arena);"
- self.emit(code % (name, get_c_type(name)), 0)
-
- visitSum = visitProduct
-
-
-class Obj2ModVisitor(PickleVisitor):
- def funcHeader(self, name):
- ctype = get_c_type(name)
- self.emit("int", 0)
- self.emit("obj2ast_%s(PyObject* obj, %s* out, PyArena* arena)" % (name, ctype), 0)
- self.emit("{", 0)
- self.emit("PyObject* tmp = NULL;", 1)
- self.emit("int isinstance;", 1)
- self.emit("", 0)
-
- def sumTrailer(self, name):
- self.emit("", 0)
- self.emit("tmp = PyObject_Repr(obj);", 1)
- # there's really nothing more we can do if this fails ...
- self.emit("if (tmp == NULL) goto failed;", 1)
- error = "expected some sort of %s, but got %%.400s" % name
- format = "PyErr_Format(PyExc_TypeError, \"%s\", _PyUnicode_AsString(tmp));"
- self.emit(format % error, 1, reflow=False)
- self.emit("failed:", 0)
- self.emit("Py_XDECREF(tmp);", 1)
- self.emit("return 1;", 1)
- self.emit("}", 0)
- self.emit("", 0)
-
- def simpleSum(self, sum, name):
- self.funcHeader(name)
- for t in sum.types:
- line = ("isinstance = PyObject_IsInstance(obj, "
- "(PyObject *)%s_type);")
- self.emit(line % (t.name,), 1)
- self.emit("if (isinstance == -1) {", 1)
- self.emit("return 1;", 2)
- self.emit("}", 1)
- self.emit("if (isinstance) {", 1)
- self.emit("*out = %s;" % t.name, 2)
- self.emit("return 0;", 2)
- self.emit("}", 1)
- self.sumTrailer(name)
-
- def buildArgs(self, fields):
- return ", ".join(fields + ["arena"])
-
- def complexSum(self, sum, name):
- self.funcHeader(name)
- for a in sum.attributes:
- self.visitAttributeDeclaration(a, name, sum=sum)
- self.emit("", 0)
- # XXX: should we only do this for 'expr'?
- self.emit("if (obj == Py_None) {", 1)
- self.emit("*out = NULL;", 2)
- self.emit("return 0;", 2)
- self.emit("}", 1)
- for a in sum.attributes:
- self.visitField(a, name, sum=sum, depth=1)
- for t in sum.types:
- line = "isinstance = PyObject_IsInstance(obj, (PyObject*)%s_type);"
- self.emit(line % (t.name,), 1)
- self.emit("if (isinstance == -1) {", 1)
- self.emit("return 1;", 2)
- self.emit("}", 1)
- self.emit("if (isinstance) {", 1)
- for f in t.fields:
- self.visitFieldDeclaration(f, t.name, sum=sum, depth=2)
- self.emit("", 0)
- for f in t.fields:
- self.visitField(f, t.name, sum=sum, depth=2)
- args = [f.name.value for f in t.fields] + [a.name.value for a in sum.attributes]
- self.emit("*out = %s(%s);" % (t.name, self.buildArgs(args)), 2)
- self.emit("if (*out == NULL) goto failed;", 2)
- self.emit("return 0;", 2)
- self.emit("}", 1)
- self.sumTrailer(name)
-
- def visitAttributeDeclaration(self, a, name, sum=sum):
- ctype = get_c_type(a.type)
- self.emit("%s %s;" % (ctype, a.name), 1)
-
- def visitSum(self, sum, name):
- if is_simple(sum):
- self.simpleSum(sum, name)
- else:
- self.complexSum(sum, name)
-
- def visitProduct(self, prod, name):
- ctype = get_c_type(name)
- self.emit("int", 0)
- self.emit("obj2ast_%s(PyObject* obj, %s* out, PyArena* arena)" % (name, ctype), 0)
- self.emit("{", 0)
- self.emit("PyObject* tmp = NULL;", 1)
- for f in prod.fields:
- self.visitFieldDeclaration(f, name, prod=prod, depth=1)
- self.emit("", 0)
- for f in prod.fields:
- self.visitField(f, name, prod=prod, depth=1)
- args = [f.name.value for f in prod.fields]
- self.emit("*out = %s(%s);" % (name, self.buildArgs(args)), 1)
- self.emit("return 0;", 1)
- self.emit("failed:", 0)
- self.emit("Py_XDECREF(tmp);", 1)
- self.emit("return 1;", 1)
- self.emit("}", 0)
- self.emit("", 0)
-
- def visitFieldDeclaration(self, field, name, sum=None, prod=None, depth=0):
- ctype = get_c_type(field.type)
- if field.seq:
- if self.isSimpleType(field):
- self.emit("asdl_int_seq* %s;" % field.name, depth)
- else:
- self.emit("asdl_seq* %s;" % field.name, depth)
- else:
- ctype = get_c_type(field.type)
- self.emit("%s %s;" % (ctype, field.name), depth)
-
- def isSimpleSum(self, field):
- # XXX can the members of this list be determined automatically?
- return field.type.value in ('expr_context', 'boolop', 'operator',
- 'unaryop', 'cmpop')
-
- def isNumeric(self, field):
- return get_c_type(field.type) in ("int", "bool")
-
- def isSimpleType(self, field):
- return self.isSimpleSum(field) or self.isNumeric(field)
-
- def visitField(self, field, name, sum=None, prod=None, depth=0):
- ctype = get_c_type(field.type)
- self.emit("if (PyObject_HasAttrString(obj, \"%s\")) {" % field.name, depth)
- self.emit("int res;", depth+1)
- if field.seq:
- self.emit("Py_ssize_t len;", depth+1)
- self.emit("Py_ssize_t i;", depth+1)
- self.emit("tmp = PyObject_GetAttrString(obj, \"%s\");" % field.name, depth+1)
- self.emit("if (tmp == NULL) goto failed;", depth+1)
- if field.seq:
- self.emit("if (!PyList_Check(tmp)) {", depth+1)
- self.emit("PyErr_Format(PyExc_TypeError, \"%s field \\\"%s\\\" must "
- "be a list, not a %%.200s\", tmp->ob_type->tp_name);" %
- (name, field.name),
- depth+2, reflow=False)
- self.emit("goto failed;", depth+2)
- self.emit("}", depth+1)
- self.emit("len = PyList_GET_SIZE(tmp);", depth+1)
- if self.isSimpleType(field):
- self.emit("%s = asdl_int_seq_new(len, arena);" % field.name, depth+1)
- else:
- self.emit("%s = asdl_seq_new(len, arena);" % field.name, depth+1)
- self.emit("if (%s == NULL) goto failed;" % field.name, depth+1)
- self.emit("for (i = 0; i < len; i++) {", depth+1)
- self.emit("%s value;" % ctype, depth+2)
- self.emit("res = obj2ast_%s(PyList_GET_ITEM(tmp, i), &value, arena);" %
- field.type, depth+2, reflow=False)
- self.emit("if (res != 0) goto failed;", depth+2)
- self.emit("asdl_seq_SET(%s, i, value);" % field.name, depth+2)
- self.emit("}", depth+1)
- else:
- self.emit("res = obj2ast_%s(tmp, &%s, arena);" %
- (field.type, field.name), depth+1)
- self.emit("if (res != 0) goto failed;", depth+1)
-
- self.emit("Py_XDECREF(tmp);", depth+1)
- self.emit("tmp = NULL;", depth+1)
- self.emit("} else {", depth)
- if not field.opt:
- message = "required field \\\"%s\\\" missing from %s" % (field.name, name)
- format = "PyErr_SetString(PyExc_TypeError, \"%s\");"
- self.emit(format % message, depth+1, reflow=False)
- self.emit("return 1;", depth+1)
- else:
- if self.isNumeric(field):
- self.emit("%s = 0;" % field.name, depth+1)
- elif not self.isSimpleType(field):
- self.emit("%s = NULL;" % field.name, depth+1)
- else:
- raise TypeError("could not determine the default value for %s" % field.name)
- self.emit("}", depth)
-
-
-class MarshalPrototypeVisitor(PickleVisitor):
-
- def prototype(self, sum, name):
- ctype = get_c_type(name)
- self.emit("static int marshal_write_%s(PyObject **, int *, %s);"
- % (name, ctype), 0)
-
- visitProduct = visitSum = prototype
-
-
-class PyTypesDeclareVisitor(PickleVisitor):
-
- def visitProduct(self, prod, name):
- self.emit("static PyTypeObject *%s_type;" % name, 0)
- self.emit("static PyObject* ast2obj_%s(void*);" % name, 0)
- if prod.fields:
- self.emit("static char *%s_fields[]={" % name,0)
- for f in prod.fields:
- self.emit('"%s",' % f.name, 1)
- self.emit("};", 0)
-
- def visitSum(self, sum, name):
- self.emit("static PyTypeObject *%s_type;" % name, 0)
- if sum.attributes:
- self.emit("static char *%s_attributes[] = {" % name, 0)
- for a in sum.attributes:
- self.emit('"%s",' % a.name, 1)
- self.emit("};", 0)
- ptype = "void*"
- if is_simple(sum):
- ptype = get_c_type(name)
- tnames = []
- for t in sum.types:
- tnames.append(str(t.name)+"_singleton")
- tnames = ", *".join(tnames)
- self.emit("static PyObject *%s;" % tnames, 0)
- self.emit("static PyObject* ast2obj_%s(%s);" % (name, ptype), 0)
- for t in sum.types:
- self.visitConstructor(t, name)
-
- def visitConstructor(self, cons, name):
- self.emit("static PyTypeObject *%s_type;" % cons.name, 0)
- if cons.fields:
- self.emit("static char *%s_fields[]={" % cons.name, 0)
- for t in cons.fields:
- self.emit('"%s",' % t.name, 1)
- self.emit("};",0)
-
-class PyTypesVisitor(PickleVisitor):
-
- def visitModule(self, mod):
- self.emit("""
-static int
-ast_type_init(PyObject *self, PyObject *args, PyObject *kw)
-{
- Py_ssize_t i, numfields = 0;
- int res = -1;
- PyObject *key, *value, *fields;
- fields = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "_fields");
- if (!fields)
- PyErr_Clear();
- if (fields) {
- numfields = PySequence_Size(fields);
- if (numfields == -1)
- goto cleanup;
- }
- res = 0; /* if no error occurs, this stays 0 to the end */
- if (PyTuple_GET_SIZE(args) > 0) {
- if (numfields != PyTuple_GET_SIZE(args)) {
- PyErr_Format(PyExc_TypeError, "%.400s constructor takes %s"
- "%zd positional argument%s",
- Py_TYPE(self)->tp_name,
- numfields == 0 ? "" : "either 0 or ",
- numfields, numfields == 1 ? "" : "s");
- res = -1;
- goto cleanup;
- }
- for (i = 0; i < PyTuple_GET_SIZE(args); i++) {
- /* cannot be reached when fields is NULL */
- PyObject *name = PySequence_GetItem(fields, i);
- if (!name) {
- res = -1;
- goto cleanup;
- }
- res = PyObject_SetAttr(self, name, PyTuple_GET_ITEM(args, i));
- Py_DECREF(name);
- if (res < 0)
- goto cleanup;
- }
- }
- if (kw) {
- i = 0; /* needed by PyDict_Next */
- while (PyDict_Next(kw, &i, &key, &value)) {
- res = PyObject_SetAttr(self, key, value);
- if (res < 0)
- goto cleanup;
- }
- }
- cleanup:
- Py_XDECREF(fields);
- return res;
-}
-
-/* Pickling support */
-static PyObject *
-ast_type_reduce(PyObject *self, PyObject *unused)
-{
- PyObject *res;
- PyObject *dict = PyObject_GetAttrString(self, "__dict__");
- if (dict == NULL) {
- if (PyErr_ExceptionMatches(PyExc_AttributeError))
- PyErr_Clear();
- else
- return NULL;
- }
- if (dict) {
- res = Py_BuildValue("O()O", Py_TYPE(self), dict);
- Py_DECREF(dict);
- return res;
- }
- return Py_BuildValue("O()", Py_TYPE(self));
-}
-
-static PyMethodDef ast_type_methods[] = {
- {"__reduce__", ast_type_reduce, METH_NOARGS, NULL},
- {NULL}
-};
-
-static PyTypeObject AST_type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "_ast27.AST",
- sizeof(PyObject),
- 0,
- 0, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- PyObject_GenericGetAttr, /* tp_getattro */
- PyObject_GenericSetAttr, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
- 0, /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- ast_type_methods, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)ast_type_init, /* tp_init */
- PyType_GenericAlloc, /* tp_alloc */
- PyType_GenericNew, /* tp_new */
- PyObject_Del, /* tp_free */
-};
-
-
-static PyTypeObject* make_type(char *type, PyTypeObject* base, char**fields, int num_fields)
-{
- PyObject *fnames, *result;
- int i;
- fnames = PyTuple_New(num_fields);
- if (!fnames) return NULL;
- for (i = 0; i < num_fields; i++) {
- PyObject *field = PyUnicode_FromString(fields[i]);
- if (!field) {
- Py_DECREF(fnames);
- return NULL;
- }
- PyTuple_SET_ITEM(fnames, i, field);
- }
- result = PyObject_CallFunction((PyObject*)&PyType_Type, "s(O){sOss}",
- type, base, "_fields", fnames, "__module__", "_ast27");
- Py_DECREF(fnames);
- return (PyTypeObject*)result;
-}
-
-static int add_attributes(PyTypeObject* type, char**attrs, int num_fields)
-{
- int i, result;
- PyObject *s, *l = PyTuple_New(num_fields);
- if (!l)
- return 0;
- for (i = 0; i < num_fields; i++) {
- s = PyUnicode_FromString(attrs[i]);
- if (!s) {
- Py_DECREF(l);
- return 0;
- }
- PyTuple_SET_ITEM(l, i, s);
- }
- result = PyObject_SetAttrString((PyObject*)type, "_attributes", l) >= 0;
- Py_DECREF(l);
- return result;
-}
-
-/* Conversion AST -> Python */
-
-static PyObject* ast2obj_list(asdl_seq *seq, PyObject* (*func)(void*))
-{
- int i, n = asdl_seq_LEN(seq);
- PyObject *result = PyList_New(n);
- PyObject *value;
- if (!result)
- return NULL;
- for (i = 0; i < n; i++) {
- value = func(asdl_seq_GET(seq, i));
- if (!value) {
- Py_DECREF(result);
- return NULL;
- }
- PyList_SET_ITEM(result, i, value);
- }
- return result;
-}
-
-static PyObject* ast2obj_object(void *o)
-{
- if (!o)
- o = Py_None;
- Py_INCREF((PyObject*)o);
- return (PyObject*)o;
-}
-#define ast2obj_identifier ast2obj_object
-#define ast2obj_string ast2obj_object
-static PyObject* ast2obj_bool(bool b)
-{
- return PyBool_FromLong(b);
-}
-
-static PyObject* ast2obj_int(long b)
-{
- return PyLong_FromLong(b);
-}
-
-/* Conversion Python -> AST */
-
-static int obj2ast_object(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (obj == Py_None)
- obj = NULL;
- if (obj)
- PyArena_AddPyObject(arena, obj);
- Py_XINCREF(obj);
- *out = obj;
- return 0;
-}
-
-static int obj2ast_identifier(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (!PyUnicode_CheckExact(obj) && obj != Py_None) {
- PyErr_Format(PyExc_TypeError,
- "AST identifier must be of type str");
- return 1;
- }
- return obj2ast_object(obj, out, arena);
-}
-
-static int obj2ast_string(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (!PyUnicode_CheckExact(obj) && !PyUnicode_CheckExact(obj)) {
- PyErr_SetString(PyExc_TypeError,
- "AST string must be of type str or unicode");
- return 1;
- }
- return obj2ast_object(obj, out, arena);
-}
-
-static int obj2ast_int(PyObject* obj, int* out, PyArena* arena)
-{
- int i;
- if (!PyLong_Check(obj) && !PyLong_Check(obj)) {
- PyObject *s = PyObject_Repr(obj);
- if (s == NULL) return 1;
- PyErr_Format(PyExc_ValueError, "invalid integer value: %.400s",
- _PyUnicode_AsString(s));
- Py_DECREF(s);
- return 1;
- }
-
- i = (int)PyLong_AsLong(obj);
- if (i == -1 && PyErr_Occurred())
- return 1;
- *out = i;
- return 0;
-}
-
-static int obj2ast_bool(PyObject* obj, bool* out, PyArena* arena)
-{
- if (!PyBool_Check(obj)) {
- PyObject *s = PyObject_Repr(obj);
- if (s == NULL) return 1;
- PyErr_Format(PyExc_ValueError, "invalid boolean value: %.400s",
- _PyUnicode_AsString(s));
- Py_DECREF(s);
- return 1;
- }
-
- *out = (obj == Py_True);
- return 0;
-}
-
-static int add_ast_fields(void)
-{
- PyObject *empty_tuple, *d;
- if (PyType_Ready(&AST_type) < 0)
- return -1;
- d = AST_type.tp_dict;
- empty_tuple = PyTuple_New(0);
- if (!empty_tuple ||
- PyDict_SetItemString(d, "_fields", empty_tuple) < 0 ||
- PyDict_SetItemString(d, "_attributes", empty_tuple) < 0) {
- Py_XDECREF(empty_tuple);
- return -1;
- }
- Py_DECREF(empty_tuple);
- return 0;
-}
-
-""", 0, reflow=False)
-
- self.emit("static int init_types(void)",0)
- self.emit("{", 0)
- self.emit("static int initialized;", 1)
- self.emit("if (initialized) return 1;", 1)
- self.emit("if (add_ast_fields() < 0) return 0;", 1)
- for dfn in mod.dfns:
- self.visit(dfn)
- self.emit("initialized = 1;", 1)
- self.emit("return 1;", 1);
- self.emit("}", 0)
-
- def visitProduct(self, prod, name):
- if prod.fields:
- fields = name.value+"_fields"
- else:
- fields = "NULL"
- self.emit('%s_type = make_type("%s", &AST_type, %s, %d);' %
- (name, name, fields, len(prod.fields)), 1)
- self.emit("if (!%s_type) return 0;" % name, 1)
-
- def visitSum(self, sum, name):
- self.emit('%s_type = make_type("%s", &AST_type, NULL, 0);' %
- (name, name), 1)
- self.emit("if (!%s_type) return 0;" % name, 1)
- if sum.attributes:
- self.emit("if (!add_attributes(%s_type, %s_attributes, %d)) return 0;" %
- (name, name, len(sum.attributes)), 1)
- else:
- self.emit("if (!add_attributes(%s_type, NULL, 0)) return 0;" % name, 1)
- simple = is_simple(sum)
- for t in sum.types:
- self.visitConstructor(t, name, simple)
-
- def visitConstructor(self, cons, name, simple):
- if cons.fields:
- fields = cons.name.value+"_fields"
- else:
- fields = "NULL"
- self.emit('%s_type = make_type("%s", %s_type, %s, %d);' %
- (cons.name, cons.name, name, fields, len(cons.fields)), 1)
- self.emit("if (!%s_type) return 0;" % cons.name, 1)
- if simple:
- self.emit("%s_singleton = PyType_GenericNew(%s_type, NULL, NULL);" %
- (cons.name, cons.name), 1)
- self.emit("if (!%s_singleton) return 0;" % cons.name, 1)
-
-
-class ASTModuleVisitor(PickleVisitor):
-
- def visitModule(self, mod):
- self.emit('PyObject *ast27_parse(PyObject *self, PyObject *args);', 0)
- self.emit('static PyMethodDef ast27_methods[] = {', 0)
- self.emit('{"parse", ast27_parse, METH_VARARGS, "Parse string into typed AST."},', 1)
- self.emit('{NULL, NULL, 0, NULL}', 1)
- self.emit('};', 0)
-
- self.emit("static struct PyModuleDef _astmodule27 = {", 0)
- self.emit(' PyModuleDef_HEAD_INIT, "_ast27", NULL, 0, ast27_methods', 0)
- self.emit("};", 0)
- self.emit("PyMODINIT_FUNC", 0)
- self.emit("PyInit__ast27(void)", 0)
- self.emit("{", 0)
- self.emit("PyObject *m, *d;", 1)
- self.emit("if (!init_types()) return NULL;", 1)
- self.emit('m = PyModule_Create(&_astmodule27);', 1)
- self.emit("if (!m) return NULL;", 1)
- self.emit("d = PyModule_GetDict(m);", 1)
- self.emit('if (PyDict_SetItemString(d, "AST", (PyObject*)&AST_type) < 0) return NULL;', 1)
- self.emit('if (PyModule_AddIntMacro(m, PyCF_ONLY_AST) < 0)', 1)
- self.emit("return NULL;", 2)
- for dfn in mod.dfns:
- self.visit(dfn)
- self.emit("return m;", 1)
- self.emit("}", 0)
-
- def visitProduct(self, prod, name):
- self.addObj(name)
-
- def visitSum(self, sum, name):
- self.addObj(name)
- for t in sum.types:
- self.visitConstructor(t, name)
-
- def visitConstructor(self, cons, name):
- self.addObj(cons.name)
-
- def addObj(self, name):
- self.emit('if (PyDict_SetItemString(d, "%s", (PyObject*)%s_type) < 0) return NULL;' % (name, name), 1)
-
-
-_SPECIALIZED_SEQUENCES = ('stmt', 'expr')
-
-def find_sequence(fields, doing_specialization):
- """Return True if any field uses a sequence."""
- for f in fields:
- if f.seq:
- if not doing_specialization:
- return True
- if str(f.type) not in _SPECIALIZED_SEQUENCES:
- return True
- return False
-
-def has_sequence(types, doing_specialization):
- for t in types:
- if find_sequence(t.fields, doing_specialization):
- return True
- return False
-
-
-class StaticVisitor(PickleVisitor):
- CODE = '''Very simple, always emit this static code. Override CODE'''
-
- def visit(self, object):
- self.emit(self.CODE, 0, reflow=False)
-
-
-class ObjVisitor(PickleVisitor):
-
- def func_begin(self, name):
- ctype = get_c_type(name)
- self.emit("PyObject*", 0)
- self.emit("ast2obj_%s(void* _o)" % (name), 0)
- self.emit("{", 0)
- self.emit("%s o = (%s)_o;" % (ctype, ctype), 1)
- self.emit("PyObject *result = NULL, *value = NULL;", 1)
- self.emit('if (!o) {', 1)
- self.emit("Py_INCREF(Py_None);", 2)
- self.emit('return Py_None;', 2)
- self.emit("}", 1)
- self.emit('', 0)
-
- def func_end(self):
- self.emit("return result;", 1)
- self.emit("failed:", 0)
- self.emit("Py_XDECREF(value);", 1)
- self.emit("Py_XDECREF(result);", 1)
- self.emit("return NULL;", 1)
- self.emit("}", 0)
- self.emit("", 0)
-
- def visitSum(self, sum, name):
- if is_simple(sum):
- self.simpleSum(sum, name)
- return
- self.func_begin(name)
- self.emit("switch (o->kind) {", 1)
- for i in range(len(sum.types)):
- t = sum.types[i]
- self.visitConstructor(t, i + 1, name)
- self.emit("}", 1)
- for a in sum.attributes:
- self.emit("value = ast2obj_%s(o->%s);" % (a.type, a.name), 1)
- self.emit("if (!value) goto failed;", 1)
- self.emit('if (PyObject_SetAttrString(result, "%s", value) < 0)' % a.name, 1)
- self.emit('goto failed;', 2)
- self.emit('Py_DECREF(value);', 1)
- self.func_end()
-
- def simpleSum(self, sum, name):
- self.emit("PyObject* ast2obj_%s(%s_ty o)" % (name, name), 0)
- self.emit("{", 0)
- self.emit("switch(o) {", 1)
- for t in sum.types:
- self.emit("case %s:" % t.name, 2)
- self.emit("Py_INCREF(%s_singleton);" % t.name, 3)
- self.emit("return %s_singleton;" % t.name, 3)
- self.emit("default:", 2)
- self.emit('/* should never happen, but just in case ... */', 3)
- code = "PyErr_Format(PyExc_SystemError, \"unknown %s found\");" % name
- self.emit(code, 3, reflow=False)
- self.emit("return NULL;", 3)
- self.emit("}", 1)
- self.emit("}", 0)
-
- def visitProduct(self, prod, name):
- self.func_begin(name)
- self.emit("result = PyType_GenericNew(%s_type, NULL, NULL);" % name, 1);
- self.emit("if (!result) return NULL;", 1)
- for field in prod.fields:
- self.visitField(field, name, 1, True)
- self.func_end()
-
- def visitConstructor(self, cons, enum, name):
- self.emit("case %s_kind:" % cons.name, 1)
- self.emit("result = PyType_GenericNew(%s_type, NULL, NULL);" % cons.name, 2);
- self.emit("if (!result) goto failed;", 2)
- for f in cons.fields:
- self.visitField(f, cons.name, 2, False)
- self.emit("break;", 2)
-
- def visitField(self, field, name, depth, product):
- def emit(s, d):
- self.emit(s, depth + d)
- if product:
- value = "o->%s" % field.name
- else:
- value = "o->v.%s.%s" % (name, field.name)
- self.set(field, value, depth)
- emit("if (!value) goto failed;", 0)
- emit('if (PyObject_SetAttrString(result, "%s", value) == -1)' % field.name, 0)
- emit("goto failed;", 1)
- emit("Py_DECREF(value);", 0)
-
- def emitSeq(self, field, value, depth, emit):
- emit("seq = %s;" % value, 0)
- emit("n = asdl_seq_LEN(seq);", 0)
- emit("value = PyList_New(n);", 0)
- emit("if (!value) goto failed;", 0)
- emit("for (i = 0; i < n; i++) {", 0)
- self.set("value", field, "asdl_seq_GET(seq, i)", depth + 1)
- emit("if (!value1) goto failed;", 1)
- emit("PyList_SET_ITEM(value, i, value1);", 1)
- emit("value1 = NULL;", 1)
- emit("}", 0)
-
- def set(self, field, value, depth):
- if field.seq:
- # XXX should really check for is_simple, but that requires a symbol table
- if field.type.value == "cmpop":
- # While the sequence elements are stored as void*,
- # ast2obj_cmpop expects an enum
- self.emit("{", depth)
- self.emit("int i, n = asdl_seq_LEN(%s);" % value, depth+1)
- self.emit("value = PyList_New(n);", depth+1)
- self.emit("if (!value) goto failed;", depth+1)
- self.emit("for(i = 0; i < n; i++)", depth+1)
- # This cannot fail, so no need for error handling
- self.emit("PyList_SET_ITEM(value, i, ast2obj_cmpop((cmpop_ty)asdl_seq_GET(%s, i)));" % value,
- depth+2, reflow=False)
- self.emit("}", depth)
- else:
- self.emit("value = ast2obj_list(%s, ast2obj_%s);" % (value, field.type), depth)
- else:
- ctype = get_c_type(field.type)
- self.emit("value = ast2obj_%s(%s);" % (field.type, value), depth, reflow=False)
-
-
-class PartingShots(StaticVisitor):
-
- CODE = """
-PyObject* Ta27AST_mod2obj(mod_ty t)
-{
- init_types();
- return ast2obj_mod(t);
-}
-
-/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
-mod_ty Ta27AST_obj2mod(PyObject* ast, PyArena* arena, int mode)
-{
- mod_ty res;
- PyObject *req_type[3];
- char *req_name[3];
- int isinstance;
-
- req_type[0] = (PyObject*)Module_type;
- req_type[1] = (PyObject*)Expression_type;
- req_type[2] = (PyObject*)Interactive_type;
-
- req_name[0] = "Module";
- req_name[1] = "Expression";
- req_name[2] = "Interactive";
-
- assert(0 <= mode && mode <= 2);
-
- init_types();
-
- isinstance = PyObject_IsInstance(ast, req_type[mode]);
- if (isinstance == -1)
- return NULL;
- if (!isinstance) {
- PyErr_Format(PyExc_TypeError, "expected %s node, got %.400s",
- req_name[mode], Py_TYPE(ast)->tp_name);
- return NULL;
- }
- if (obj2ast_mod(ast, &res, arena) != 0)
- return NULL;
- else
- return res;
-}
-
-int Ta27AST_Check(PyObject* obj)
-{
- init_types();
- return PyObject_IsInstance(obj, (PyObject*)&AST_type);
-}
-"""
-
-class ChainOfVisitors:
- def __init__(self, *visitors):
- self.visitors = visitors
-
- def visit(self, object):
- for v in self.visitors:
- v.visit(object)
- v.emit("", 0)
-
-common_msg = "/* File automatically generated by %s. */\n\n"
-
-c_file_msg = """
-/*
- __version__ %s.
-
- This module must be committed separately after each AST grammar change;
- The __version__ number is set to the revision number of the commit
- containing the grammar change.
-*/
-
-"""
-
-def main(srcfile):
- argv0 = sys.argv[0]
- components = argv0.split(os.sep)
- argv0 = os.sep.join(components[-2:])
- auto_gen_msg = common_msg % argv0
- mod = asdl.parse(srcfile)
- mod.version = "82160"
- if not asdl.check(mod):
- sys.exit(1)
- if INC_DIR:
- p = "%s/%s-ast.h" % (INC_DIR, mod.name)
- f = open(p, "wb")
- f.write(auto_gen_msg)
- f.write('#include "asdl.h"\n\n')
- c = ChainOfVisitors(TypeDefVisitor(f),
- StructVisitor(f),
- PrototypeVisitor(f),
- )
- c.visit(mod)
- f.write("PyObject* Ta27AST_mod2obj(mod_ty t);\n")
- f.write("mod_ty Ta27AST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n")
- f.write("int Ta27AST_Check(PyObject* obj);\n")
- f.close()
-
- if SRC_DIR:
- p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c")
- f = open(p, "wb")
- f.write(auto_gen_msg)
- f.write(c_file_msg % mod.version)
- f.write('#include "Python.h"\n')
- f.write('#include "%s-ast.h"\n' % mod.name)
- f.write('\n')
- f.write("static PyTypeObject AST_type;\n")
- v = ChainOfVisitors(
- PyTypesDeclareVisitor(f),
- PyTypesVisitor(f),
- Obj2ModPrototypeVisitor(f),
- FunctionVisitor(f),
- ObjVisitor(f),
- Obj2ModVisitor(f),
- ASTModuleVisitor(f),
- PartingShots(f),
- )
- v.visit(mod)
- f.close()
-
-if __name__ == "__main__":
- import sys
- import getopt
-
- INC_DIR = ''
- SRC_DIR = ''
- opts, args = getopt.getopt(sys.argv[1:], "h:c:")
- if len(opts) != 1:
- print "Must specify exactly one output file"
- sys.exit(1)
- for o, v in opts:
- if o == '-h':
- INC_DIR = v
- if o == '-c':
- SRC_DIR = v
- if len(args) != 1:
- print "Must specify single input file"
- sys.exit(1)
- main(args[0])
diff --git a/ast27/Parser/spark.py b/ast27/Parser/spark.py
deleted file mode 100644
index b064d62..0000000
--- a/ast27/Parser/spark.py
+++ /dev/null
@@ -1,839 +0,0 @@
-# Copyright (c) 1998-2002 John Aycock
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-__version__ = 'SPARK-0.7 (pre-alpha-5)'
-
-import re
-import string
-
-def _namelist(instance):
- namelist, namedict, classlist = [], {}, [instance.__class__]
- for c in classlist:
- for b in c.__bases__:
- classlist.append(b)
- for name in c.__dict__.keys():
- if not namedict.has_key(name):
- namelist.append(name)
- namedict[name] = 1
- return namelist
-
-class GenericScanner:
- def __init__(self, flags=0):
- pattern = self.reflect()
- self.re = re.compile(pattern, re.VERBOSE|flags)
-
- self.index2func = {}
- for name, number in self.re.groupindex.items():
- self.index2func[number-1] = getattr(self, 't_' + name)
-
- def makeRE(self, name):
- doc = getattr(self, name).__doc__
- rv = '(?P<%s>%s)' % (name[2:], doc)
- return rv
-
- def reflect(self):
- rv = []
- for name in _namelist(self):
- if name[:2] == 't_' and name != 't_default':
- rv.append(self.makeRE(name))
-
- rv.append(self.makeRE('t_default'))
- return string.join(rv, '|')
-
- def error(self, s, pos):
- print "Lexical error at position %s" % pos
- raise SystemExit
-
- def tokenize(self, s):
- pos = 0
- n = len(s)
- while pos < n:
- m = self.re.match(s, pos)
- if m is None:
- self.error(s, pos)
-
- groups = m.groups()
- for i in range(len(groups)):
- if groups[i] and self.index2func.has_key(i):
- self.index2func[i](groups[i])
- pos = m.end()
-
- def t_default(self, s):
- r'( . | \n )+'
- print "Specification error: unmatched input"
- raise SystemExit
-
-#
-# Extracted from GenericParser and made global so that [un]picking works.
-#
-class _State:
- def __init__(self, stateno, items):
- self.T, self.complete, self.items = [], [], items
- self.stateno = stateno
-
-class GenericParser:
- #
- # An Earley parser, as per J. Earley, "An Efficient Context-Free
- # Parsing Algorithm", CACM 13(2), pp. 94-102. Also J. C. Earley,
- # "An Efficient Context-Free Parsing Algorithm", Ph.D. thesis,
- # Carnegie-Mellon University, August 1968. New formulation of
- # the parser according to J. Aycock, "Practical Earley Parsing
- # and the SPARK Toolkit", Ph.D. thesis, University of Victoria,
- # 2001, and J. Aycock and R. N. Horspool, "Practical Earley
- # Parsing", unpublished paper, 2001.
- #
-
- def __init__(self, start):
- self.rules = {}
- self.rule2func = {}
- self.rule2name = {}
- self.collectRules()
- self.augment(start)
- self.ruleschanged = 1
-
- _NULLABLE = '\e_'
- _START = 'START'
- _BOF = '|-'
-
- #
- # When pickling, take the time to generate the full state machine;
- # some information is then extraneous, too. Unfortunately we
- # can't save the rule2func map.
- #
- def __getstate__(self):
- if self.ruleschanged:
- #
- # XXX - duplicated from parse()
- #
- self.computeNull()
- self.newrules = {}
- self.new2old = {}
- self.makeNewRules()
- self.ruleschanged = 0
- self.edges, self.cores = {}, {}
- self.states = { 0: self.makeState0() }
- self.makeState(0, self._BOF)
- #
- # XXX - should find a better way to do this..
- #
- changes = 1
- while changes:
- changes = 0
- for k, v in self.edges.items():
- if v is None:
- state, sym = k
- if self.states.has_key(state):
- self.goto(state, sym)
- changes = 1
- rv = self.__dict__.copy()
- for s in self.states.values():
- del s.items
- del rv['rule2func']
- del rv['nullable']
- del rv['cores']
- return rv
-
- def __setstate__(self, D):
- self.rules = {}
- self.rule2func = {}
- self.rule2name = {}
- self.collectRules()
- start = D['rules'][self._START][0][1][1] # Blech.
- self.augment(start)
- D['rule2func'] = self.rule2func
- D['makeSet'] = self.makeSet_fast
- self.__dict__ = D
-
- #
- # A hook for GenericASTBuilder and GenericASTMatcher. Mess
- # thee not with this; nor shall thee toucheth the _preprocess
- # argument to addRule.
- #
- def preprocess(self, rule, func): return rule, func
-
- def addRule(self, doc, func, _preprocess=1):
- fn = func
- rules = string.split(doc)
-
- index = []
- for i in range(len(rules)):
- if rules[i] == '::=':
- index.append(i-1)
- index.append(len(rules))
-
- for i in range(len(index)-1):
- lhs = rules[index[i]]
- rhs = rules[index[i]+2:index[i+1]]
- rule = (lhs, tuple(rhs))
-
- if _preprocess:
- rule, fn = self.preprocess(rule, func)
-
- if self.rules.has_key(lhs):
- self.rules[lhs].append(rule)
- else:
- self.rules[lhs] = [ rule ]
- self.rule2func[rule] = fn
- self.rule2name[rule] = func.__name__[2:]
- self.ruleschanged = 1
-
- def collectRules(self):
- for name in _namelist(self):
- if name[:2] == 'p_':
- func = getattr(self, name)
- doc = func.__doc__
- self.addRule(doc, func)
-
- def augment(self, start):
- rule = '%s ::= %s %s' % (self._START, self._BOF, start)
- self.addRule(rule, lambda args: args[1], 0)
-
- def computeNull(self):
- self.nullable = {}
- tbd = []
-
- for rulelist in self.rules.values():
- lhs = rulelist[0][0]
- self.nullable[lhs] = 0
- for rule in rulelist:
- rhs = rule[1]
- if len(rhs) == 0:
- self.nullable[lhs] = 1
- continue
- #
- # We only need to consider rules which
- # consist entirely of nonterminal symbols.
- # This should be a savings on typical
- # grammars.
- #
- for sym in rhs:
- if not self.rules.has_key(sym):
- break
- else:
- tbd.append(rule)
- changes = 1
- while changes:
- changes = 0
- for lhs, rhs in tbd:
- if self.nullable[lhs]:
- continue
- for sym in rhs:
- if not self.nullable[sym]:
- break
- else:
- self.nullable[lhs] = 1
- changes = 1
-
- def makeState0(self):
- s0 = _State(0, [])
- for rule in self.newrules[self._START]:
- s0.items.append((rule, 0))
- return s0
-
- def finalState(self, tokens):
- #
- # Yuck.
- #
- if len(self.newrules[self._START]) == 2 and len(tokens) == 0:
- return 1
- start = self.rules[self._START][0][1][1]
- return self.goto(1, start)
-
- def makeNewRules(self):
- worklist = []
- for rulelist in self.rules.values():
- for rule in rulelist:
- worklist.append((rule, 0, 1, rule))
-
- for rule, i, candidate, oldrule in worklist:
- lhs, rhs = rule
- n = len(rhs)
- while i < n:
- sym = rhs[i]
- if not self.rules.has_key(sym) or \
- not self.nullable[sym]:
- candidate = 0
- i = i + 1
- continue
-
- newrhs = list(rhs)
- newrhs[i] = self._NULLABLE+sym
- newrule = (lhs, tuple(newrhs))
- worklist.append((newrule, i+1,
- candidate, oldrule))
- candidate = 0
- i = i + 1
- else:
- if candidate:
- lhs = self._NULLABLE+lhs
- rule = (lhs, rhs)
- if self.newrules.has_key(lhs):
- self.newrules[lhs].append(rule)
- else:
- self.newrules[lhs] = [ rule ]
- self.new2old[rule] = oldrule
-
- def typestring(self, token):
- return None
-
- def error(self, token):
- print "Syntax error at or near `%s' token" % token
- raise SystemExit
-
- def parse(self, tokens):
- sets = [ [(1,0), (2,0)] ]
- self.links = {}
-
- if self.ruleschanged:
- self.computeNull()
- self.newrules = {}
- self.new2old = {}
- self.makeNewRules()
- self.ruleschanged = 0
- self.edges, self.cores = {}, {}
- self.states = { 0: self.makeState0() }
- self.makeState(0, self._BOF)
-
- for i in xrange(len(tokens)):
- sets.append([])
-
- if sets[i] == []:
- break
- self.makeSet(tokens[i], sets, i)
- else:
- sets.append([])
- self.makeSet(None, sets, len(tokens))
-
- #_dump(tokens, sets, self.states)
-
- finalitem = (self.finalState(tokens), 0)
- if finalitem not in sets[-2]:
- if len(tokens) > 0:
- self.error(tokens[i-1])
- else:
- self.error(None)
-
- return self.buildTree(self._START, finalitem,
- tokens, len(sets)-2)
-
- def isnullable(self, sym):
- #
- # For symbols in G_e only. If we weren't supporting 1.5,
- # could just use sym.startswith().
- #
- return self._NULLABLE == sym[0:len(self._NULLABLE)]
-
- def skip(self, (lhs, rhs), pos=0):
- n = len(rhs)
- while pos < n:
- if not self.isnullable(rhs[pos]):
- break
- pos = pos + 1
- return pos
-
- def makeState(self, state, sym):
- assert sym is not None
- #
- # Compute \epsilon-kernel state's core and see if
- # it exists already.
- #
- kitems = []
- for rule, pos in self.states[state].items:
- lhs, rhs = rule
- if rhs[pos:pos+1] == (sym,):
- kitems.append((rule, self.skip(rule, pos+1)))
- core = kitems
-
- core.sort()
- tcore = tuple(core)
- if self.cores.has_key(tcore):
- return self.cores[tcore]
- #
- # Nope, doesn't exist. Compute it and the associated
- # \epsilon-nonkernel state together; we'll need it right away.
- #
- k = self.cores[tcore] = len(self.states)
- K, NK = _State(k, kitems), _State(k+1, [])
- self.states[k] = K
- predicted = {}
-
- edges = self.edges
- rules = self.newrules
- for X in K, NK:
- worklist = X.items
- for item in worklist:
- rule, pos = item
- lhs, rhs = rule
- if pos == len(rhs):
- X.complete.append(rule)
- continue
-
- nextSym = rhs[pos]
- key = (X.stateno, nextSym)
- if not rules.has_key(nextSym):
- if not edges.has_key(key):
- edges[key] = None
- X.T.append(nextSym)
- else:
- edges[key] = None
- if not predicted.has_key(nextSym):
- predicted[nextSym] = 1
- for prule in rules[nextSym]:
- ppos = self.skip(prule)
- new = (prule, ppos)
- NK.items.append(new)
- #
- # Problem: we know K needs generating, but we
- # don't yet know about NK. Can't commit anything
- # regarding NK to self.edges until we're sure. Should
- # we delay committing on both K and NK to avoid this
- # hacky code? This creates other problems..
- #
- if X is K:
- edges = {}
-
- if NK.items == []:
- return k
-
- #
- # Check for \epsilon-nonkernel's core. Unfortunately we
- # need to know the entire set of predicted nonterminals
- # to do this without accidentally duplicating states.
- #
- core = predicted.keys()
- core.sort()
- tcore = tuple(core)
- if self.cores.has_key(tcore):
- self.edges[(k, None)] = self.cores[tcore]
- return k
-
- nk = self.cores[tcore] = self.edges[(k, None)] = NK.stateno
- self.edges.update(edges)
- self.states[nk] = NK
- return k
-
- def goto(self, state, sym):
- key = (state, sym)
- if not self.edges.has_key(key):
- #
- # No transitions from state on sym.
- #
- return None
-
- rv = self.edges[key]
- if rv is None:
- #
- # Target state isn't generated yet. Remedy this.
- #
- rv = self.makeState(state, sym)
- self.edges[key] = rv
- return rv
-
- def gotoT(self, state, t):
- return [self.goto(state, t)]
-
- def gotoST(self, state, st):
- rv = []
- for t in self.states[state].T:
- if st == t:
- rv.append(self.goto(state, t))
- return rv
-
- def add(self, set, item, i=None, predecessor=None, causal=None):
- if predecessor is None:
- if item not in set:
- set.append(item)
- else:
- key = (item, i)
- if item not in set:
- self.links[key] = []
- set.append(item)
- self.links[key].append((predecessor, causal))
-
- def makeSet(self, token, sets, i):
- cur, next = sets[i], sets[i+1]
-
- ttype = token is not None and self.typestring(token) or None
- if ttype is not None:
- fn, arg = self.gotoT, ttype
- else:
- fn, arg = self.gotoST, token
-
- for item in cur:
- ptr = (item, i)
- state, parent = item
- add = fn(state, arg)
- for k in add:
- if k is not None:
- self.add(next, (k, parent), i+1, ptr)
- nk = self.goto(k, None)
- if nk is not None:
- self.add(next, (nk, i+1))
-
- if parent == i:
- continue
-
- for rule in self.states[state].complete:
- lhs, rhs = rule
- for pitem in sets[parent]:
- pstate, pparent = pitem
- k = self.goto(pstate, lhs)
- if k is not None:
- why = (item, i, rule)
- pptr = (pitem, parent)
- self.add(cur, (k, pparent),
- i, pptr, why)
- nk = self.goto(k, None)
- if nk is not None:
- self.add(cur, (nk, i))
-
- def makeSet_fast(self, token, sets, i):
- #
- # Call *only* when the entire state machine has been built!
- # It relies on self.edges being filled in completely, and
- # then duplicates and inlines code to boost speed at the
- # cost of extreme ugliness.
- #
- cur, next = sets[i], sets[i+1]
- ttype = token is not None and self.typestring(token) or None
-
- for item in cur:
- ptr = (item, i)
- state, parent = item
- if ttype is not None:
- k = self.edges.get((state, ttype), None)
- if k is not None:
- #self.add(next, (k, parent), i+1, ptr)
- #INLINED --v
- new = (k, parent)
- key = (new, i+1)
- if new not in next:
- self.links[key] = []
- next.append(new)
- self.links[key].append((ptr, None))
- #INLINED --^
- #nk = self.goto(k, None)
- nk = self.edges.get((k, None), None)
- if nk is not None:
- #self.add(next, (nk, i+1))
- #INLINED --v
- new = (nk, i+1)
- if new not in next:
- next.append(new)
- #INLINED --^
- else:
- add = self.gotoST(state, token)
- for k in add:
- if k is not None:
- self.add(next, (k, parent), i+1, ptr)
- #nk = self.goto(k, None)
- nk = self.edges.get((k, None), None)
- if nk is not None:
- self.add(next, (nk, i+1))
-
- if parent == i:
- continue
-
- for rule in self.states[state].complete:
- lhs, rhs = rule
- for pitem in sets[parent]:
- pstate, pparent = pitem
- #k = self.goto(pstate, lhs)
- k = self.edges.get((pstate, lhs), None)
- if k is not None:
- why = (item, i, rule)
- pptr = (pitem, parent)
- #self.add(cur, (k, pparent),
- # i, pptr, why)
- #INLINED --v
- new = (k, pparent)
- key = (new, i)
- if new not in cur:
- self.links[key] = []
- cur.append(new)
- self.links[key].append((pptr, why))
- #INLINED --^
- #nk = self.goto(k, None)
- nk = self.edges.get((k, None), None)
- if nk is not None:
- #self.add(cur, (nk, i))
- #INLINED --v
- new = (nk, i)
- if new not in cur:
- cur.append(new)
- #INLINED --^
-
- def predecessor(self, key, causal):
- for p, c in self.links[key]:
- if c == causal:
- return p
- assert 0
-
- def causal(self, key):
- links = self.links[key]
- if len(links) == 1:
- return links[0][1]
- choices = []
- rule2cause = {}
- for p, c in links:
- rule = c[2]
- choices.append(rule)
- rule2cause[rule] = c
- return rule2cause[self.ambiguity(choices)]
-
- def deriveEpsilon(self, nt):
- if len(self.newrules[nt]) > 1:
- rule = self.ambiguity(self.newrules[nt])
- else:
- rule = self.newrules[nt][0]
- #print rule
-
- rhs = rule[1]
- attr = [None] * len(rhs)
-
- for i in range(len(rhs)-1, -1, -1):
- attr[i] = self.deriveEpsilon(rhs[i])
- return self.rule2func[self.new2old[rule]](attr)
-
- def buildTree(self, nt, item, tokens, k):
- state, parent = item
-
- choices = []
- for rule in self.states[state].complete:
- if rule[0] == nt:
- choices.append(rule)
- rule = choices[0]
- if len(choices) > 1:
- rule = self.ambiguity(choices)
- #print rule
-
- rhs = rule[1]
- attr = [None] * len(rhs)
-
- for i in range(len(rhs)-1, -1, -1):
- sym = rhs[i]
- if not self.newrules.has_key(sym):
- if sym != self._BOF:
- attr[i] = tokens[k-1]
- key = (item, k)
- item, k = self.predecessor(key, None)
- #elif self.isnullable(sym):
- elif self._NULLABLE == sym[0:len(self._NULLABLE)]:
- attr[i] = self.deriveEpsilon(sym)
- else:
- key = (item, k)
- why = self.causal(key)
- attr[i] = self.buildTree(sym, why[0],
- tokens, why[1])
- item, k = self.predecessor(key, why)
- return self.rule2func[self.new2old[rule]](attr)
-
- def ambiguity(self, rules):
- #
- # XXX - problem here and in collectRules() if the same rule
- # appears in >1 method. Also undefined results if rules
- # causing the ambiguity appear in the same method.
- #
- sortlist = []
- name2index = {}
- for i in range(len(rules)):
- lhs, rhs = rule = rules[i]
- name = self.rule2name[self.new2old[rule]]
- sortlist.append((len(rhs), name))
- name2index[name] = i
- sortlist.sort()
- list = map(lambda (a,b): b, sortlist)
- return rules[name2index[self.resolve(list)]]
-
- def resolve(self, list):
- #
- # Resolve ambiguity in favor of the shortest RHS.
- # Since we walk the tree from the top down, this
- # should effectively resolve in favor of a "shift".
- #
- return list[0]
-
-#
-# GenericASTBuilder automagically constructs a concrete/abstract syntax tree
-# for a given input. The extra argument is a class (not an instance!)
-# which supports the "__setslice__" and "__len__" methods.
-#
-# XXX - silently overrides any user code in methods.
-#
-
-class GenericASTBuilder(GenericParser):
- def __init__(self, AST, start):
- GenericParser.__init__(self, start)
- self.AST = AST
-
- def preprocess(self, rule, func):
- rebind = lambda lhs, self=self: \
- lambda args, lhs=lhs, self=self: \
- self.buildASTNode(args, lhs)
- lhs, rhs = rule
- return rule, rebind(lhs)
-
- def buildASTNode(self, args, lhs):
- children = []
- for arg in args:
- if isinstance(arg, self.AST):
- children.append(arg)
- else:
- children.append(self.terminal(arg))
- return self.nonterminal(lhs, children)
-
- def terminal(self, token): return token
-
- def nonterminal(self, type, args):
- rv = self.AST(type)
- rv[:len(args)] = args
- return rv
-
-#
-# GenericASTTraversal is a Visitor pattern according to Design Patterns. For
-# each node it attempts to invoke the method n_<node type>, falling
-# back onto the default() method if the n_* can't be found. The preorder
-# traversal also looks for an exit hook named n_<node type>_exit (no default
-# routine is called if it's not found). To prematurely halt traversal
-# of a subtree, call the prune() method -- this only makes sense for a
-# preorder traversal. Node type is determined via the typestring() method.
-#
-
-class GenericASTTraversalPruningException:
- pass
-
-class GenericASTTraversal:
- def __init__(self, ast):
- self.ast = ast
-
- def typestring(self, node):
- return node.type
-
- def prune(self):
- raise GenericASTTraversalPruningException
-
- def preorder(self, node=None):
- if node is None:
- node = self.ast
-
- try:
- name = 'n_' + self.typestring(node)
- if hasattr(self, name):
- func = getattr(self, name)
- func(node)
- else:
- self.default(node)
- except GenericASTTraversalPruningException:
- return
-
- for kid in node:
- self.preorder(kid)
-
- name = name + '_exit'
- if hasattr(self, name):
- func = getattr(self, name)
- func(node)
-
- def postorder(self, node=None):
- if node is None:
- node = self.ast
-
- for kid in node:
- self.postorder(kid)
-
- name = 'n_' + self.typestring(node)
- if hasattr(self, name):
- func = getattr(self, name)
- func(node)
- else:
- self.default(node)
-
-
- def default(self, node):
- pass
-
-#
-# GenericASTMatcher. AST nodes must have "__getitem__" and "__cmp__"
-# implemented.
-#
-# XXX - makes assumptions about how GenericParser walks the parse tree.
-#
-
-class GenericASTMatcher(GenericParser):
- def __init__(self, start, ast):
- GenericParser.__init__(self, start)
- self.ast = ast
-
- def preprocess(self, rule, func):
- rebind = lambda func, self=self: \
- lambda args, func=func, self=self: \
- self.foundMatch(args, func)
- lhs, rhs = rule
- rhslist = list(rhs)
- rhslist.reverse()
-
- return (lhs, tuple(rhslist)), rebind(func)
-
- def foundMatch(self, args, func):
- func(args[-1])
- return args[-1]
-
- def match_r(self, node):
- self.input.insert(0, node)
- children = 0
-
- for child in node:
- if children == 0:
- self.input.insert(0, '(')
- children = children + 1
- self.match_r(child)
-
- if children > 0:
- self.input.insert(0, ')')
-
- def match(self, ast=None):
- if ast is None:
- ast = self.ast
- self.input = []
-
- self.match_r(ast)
- self.parse(self.input)
-
- def resolve(self, list):
- #
- # Resolve ambiguity in favor of the longest RHS.
- #
- return list[-1]
-
-def _dump(tokens, sets, states):
- for i in range(len(sets)):
- print 'set', i
- for item in sets[i]:
- print '\t', item
- for (lhs, rhs), pos in states[item[0]].items:
- print '\t\t', lhs, '::=',
- print string.join(rhs[:pos]),
- print '.',
- print string.join(rhs[pos:])
- if i < len(tokens):
- print
- print 'token', str(tokens[i])
- print
diff --git a/ast35/Grammar/Grammar b/ast35/Grammar/Grammar
deleted file mode 100644
index 7aef9be..0000000
--- a/ast35/Grammar/Grammar
+++ /dev/null
@@ -1,162 +0,0 @@
-# Grammar for Python
-
-# Note: Changing the grammar specified in this file will most likely
-# require corresponding changes in the parser module
-# (../Modules/parsermodule.c). If you can't make the changes to
-# that module yourself, please co-ordinate the required changes
-# with someone who can; ask around on python-dev for help. Fred
-# Drake <fdrake at acm.org> will probably be listening there.
-
-# NOTE WELL: You should also follow all the steps listed at
-# https://docs.python.org/devguide/grammar.html
-
-# Start symbols for the grammar:
-# single_input is a single interactive statement;
-# file_input is a module or sequence of commands read from an input file;
-# eval_input is the input for the eval() functions.
-# func_type_input is a PEP 484 Python 2 function type comment
-# NB: compound_stmt in single_input is followed by extra NEWLINE!
-# NB: due to the way TYPE_COMMENT is tokenized it will always be followed by a
-# NEWLINE
-single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
-file_input: (NEWLINE | stmt)* ENDMARKER
-eval_input: testlist NEWLINE* ENDMARKER
-
-decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
-decorators: decorator+
-decorated: decorators (classdef | funcdef | async_funcdef)
-
-async_funcdef: ASYNC funcdef
-funcdef: 'def' NAME parameters ['->' test] ':' [TYPE_COMMENT] suite
-
-parameters: '(' [typedargslist] ')'
-typedargslist: (tfpdef ['=' test] (',' [TYPE_COMMENT] tfpdef ['=' test])* [',' [TYPE_COMMENT]
- ['*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* [',' [TYPE_COMMENT] '**' tfpdef] | '**' tfpdef]] [TYPE_COMMENT]
- | '*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* [',' [TYPE_COMMENT] '**' tfpdef] [TYPE_COMMENT]
- | '**' tfpdef [TYPE_COMMENT])
-tfpdef: NAME [':' test]
-varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [','
- ['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]]
- | '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef)
-vfpdef: NAME
-
-stmt: simple_stmt | compound_stmt
-simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
-small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt |
- import_stmt | global_stmt | nonlocal_stmt | assert_stmt)
-expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
- ('=' (yield_expr|testlist_star_expr))* [TYPE_COMMENT])
-testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
-augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
- '<<=' | '>>=' | '**=' | '//=')
-# For normal assignments, additional restrictions enforced by the interpreter
-del_stmt: 'del' exprlist
-pass_stmt: 'pass'
-flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
-break_stmt: 'break'
-continue_stmt: 'continue'
-return_stmt: 'return' [testlist]
-yield_stmt: yield_expr
-raise_stmt: 'raise' [test ['from' test]]
-import_stmt: import_name | import_from
-import_name: 'import' dotted_as_names
-# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
-import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+)
- 'import' ('*' | '(' import_as_names ')' | import_as_names))
-import_as_name: NAME ['as' NAME]
-dotted_as_name: dotted_name ['as' NAME]
-import_as_names: import_as_name (',' import_as_name)* [',']
-dotted_as_names: dotted_as_name (',' dotted_as_name)*
-dotted_name: NAME ('.' NAME)*
-global_stmt: 'global' NAME (',' NAME)*
-nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
-assert_stmt: 'assert' test [',' test]
-
-compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
-async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
-if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
-while_stmt: 'while' test ':' suite ['else' ':' suite]
-for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite]
-try_stmt: ('try' ':' suite
- ((except_clause ':' suite)+
- ['else' ':' suite]
- ['finally' ':' suite] |
- 'finally' ':' suite))
-with_stmt: 'with' with_item (',' with_item)* ':' [TYPE_COMMENT] suite
-with_item: test ['as' expr]
-# NB compile.c makes sure that the default except clause is last
-except_clause: 'except' [test ['as' NAME]]
-# the TYPE_COMMENT in suites is only parsed for funcdefs, but can't go elsewhere due to ambiguity
-suite: simple_stmt | NEWLINE [TYPE_COMMENT NEWLINE] INDENT stmt+ DEDENT
-
-test: or_test ['if' or_test 'else' test] | lambdef
-test_nocond: or_test | lambdef_nocond
-lambdef: 'lambda' [varargslist] ':' test
-lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
-or_test: and_test ('or' and_test)*
-and_test: not_test ('and' not_test)*
-not_test: 'not' not_test | comparison
-comparison: expr (comp_op expr)*
-# <> isn't actually a valid comparison operator in Python. It's here for the
-# sake of a __future__ import described in PEP 401 (which really works :-)
-comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
-star_expr: '*' expr
-expr: xor_expr ('|' xor_expr)*
-xor_expr: and_expr ('^' and_expr)*
-and_expr: shift_expr ('&' shift_expr)*
-shift_expr: arith_expr (('<<'|'>>') arith_expr)*
-arith_expr: term (('+'|'-') term)*
-term: factor (('*'|'@'|'/'|'%'|'//') factor)*
-factor: ('+'|'-'|'~') factor | power
-power: atom_expr ['**' factor]
-atom_expr: [AWAIT] atom trailer*
-atom: ('(' [yield_expr|testlist_comp] ')' |
- '[' [testlist_comp] ']' |
- '{' [dictorsetmaker] '}' |
- NAME | NUMBER | STRING+ | '...' | 'None' | 'True' | 'False')
-testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
-trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
-subscriptlist: subscript (',' subscript)* [',']
-subscript: test | [test] ':' [test] [sliceop]
-sliceop: ':' [test]
-exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
-testlist: test (',' test)* [',']
-dictorsetmaker: ( ((test ':' test | '**' expr)
- (comp_for | (',' (test ':' test | '**' expr))* [','])) |
- ((test | star_expr)
- (comp_for | (',' (test | star_expr))* [','])) )
-
-classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
-
-arglist: argument (',' argument)* [',']
-
-# The reason that keywords are test nodes instead of NAME is that using NAME
-# results in an ambiguity. ast.c makes sure it's a NAME.
-# "test '=' test" is really "keyword '=' test", but we have no such token.
-# These need to be in a single rule to avoid grammar that is ambiguous
-# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr,
-# we explicitly match '*' here, too, to give it proper precedence.
-# Illegal combinations and orderings are blocked in ast.c:
-# multiple (test comp_for) arguements are blocked; keyword unpackings
-# that precede iterable unpackings are blocked; etc.
-argument: ( test [comp_for] |
- test '=' test |
- '**' test |
- '*' test )
-
-comp_iter: comp_for | comp_if
-comp_for: 'for' exprlist 'in' or_test [comp_iter]
-comp_if: 'if' test_nocond [comp_iter]
-
-# not used in grammar, but may appear in "node" passed from Parser to Compiler
-encoding_decl: NAME
-
-yield_expr: 'yield' [yield_arg]
-yield_arg: 'from' test | testlist
-
-func_type_input: func_type NEWLINE* ENDMARKER
-func_type: '(' [typelist] ')' '->' test
-# typelist is a modified typedargslist (see above)
-typelist: (test (',' test)* [','
- ['*' [test] (',' test)* [',' '**' test] | '**' test]]
- | '*' [test] (',' test)* [',' '**' test] | '**' test)
diff --git a/ast35/Parser/Python.asdl b/ast35/Parser/Python.asdl
deleted file mode 100644
index e493725..0000000
--- a/ast35/Parser/Python.asdl
+++ /dev/null
@@ -1,126 +0,0 @@
--- ASDL's six builtin types are identifier, int, string, bytes, object, singleton
-
-module Python
-{
- mod = Module(stmt* body, type_ignore *type_ignores)
- | Interactive(stmt* body)
- | Expression(expr body)
- | FunctionType(expr* argtypes, expr returns)
-
- -- not really an actual node but useful in Jython's typesystem.
- | Suite(stmt* body)
-
- stmt = FunctionDef(identifier name, arguments args,
- stmt* body, expr* decorator_list, expr? returns, string? type_comment)
- | AsyncFunctionDef(identifier name, arguments args,
- stmt* body, expr* decorator_list, expr? returns, string? type_comment)
-
- | ClassDef(identifier name,
- expr* bases,
- keyword* keywords,
- stmt* body,
- expr* decorator_list)
- | Return(expr? value)
-
- | Delete(expr* targets)
- | Assign(expr* targets, expr value, string? type_comment)
- | AugAssign(expr target, operator op, expr value)
-
- -- use 'orelse' because else is a keyword in target languages
- | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
- | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse)
- | While(expr test, stmt* body, stmt* orelse)
- | If(expr test, stmt* body, stmt* orelse)
- | With(withitem* items, stmt* body, string? type_comment)
- | AsyncWith(withitem* items, stmt* body)
-
- | Raise(expr? exc, expr? cause)
- | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
- | Assert(expr test, expr? msg)
-
- | Import(alias* names)
- | ImportFrom(identifier? module, alias* names, int? level)
-
- | Global(identifier* names)
- | Nonlocal(identifier* names)
- | Expr(expr value)
- | Pass | Break | Continue
-
- -- XXX Jython will be different
- -- col_offset is the byte offset in the utf8 string the parser uses
- attributes (int lineno, int col_offset)
-
- -- BoolOp() can use left & right?
- expr = BoolOp(boolop op, expr* values)
- | BinOp(expr left, operator op, expr right)
- | UnaryOp(unaryop op, expr operand)
- | Lambda(arguments args, expr body)
- | IfExp(expr test, expr body, expr orelse)
- | Dict(expr* keys, expr* values)
- | Set(expr* elts)
- | ListComp(expr elt, comprehension* generators)
- | SetComp(expr elt, comprehension* generators)
- | DictComp(expr key, expr value, comprehension* generators)
- | GeneratorExp(expr elt, comprehension* generators)
- -- the grammar constrains where yield expressions can occur
- | Await(expr value)
- | Yield(expr? value)
- | YieldFrom(expr value)
- -- need sequences for compare to distinguish between
- -- x < 4 < 3 and (x < 4) < 3
- | Compare(expr left, cmpop* ops, expr* comparators)
- | Call(expr func, expr* args, keyword* keywords)
- | Num(object n) -- a number as a PyObject.
- | Str(string s) -- need to specify raw, unicode, etc?
- | Bytes(bytes s)
- | NameConstant(singleton value)
- | Ellipsis
-
- -- the following expression can appear in assignment context
- | Attribute(expr value, identifier attr, expr_context ctx)
- | Subscript(expr value, slice slice, expr_context ctx)
- | Starred(expr value, expr_context ctx)
- | Name(identifier id, expr_context ctx)
- | List(expr* elts, expr_context ctx)
- | Tuple(expr* elts, expr_context ctx)
-
- -- col_offset is the byte offset in the utf8 string the parser uses
- attributes (int lineno, int col_offset)
-
- expr_context = Load | Store | Del | AugLoad | AugStore | Param
-
- slice = Slice(expr? lower, expr? upper, expr? step)
- | ExtSlice(slice* dims)
- | Index(expr value)
-
- boolop = And | Or
-
- operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift
- | RShift | BitOr | BitXor | BitAnd | FloorDiv
-
- unaryop = Invert | Not | UAdd | USub
-
- cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn
-
- comprehension = (expr target, expr iter, expr* ifs)
-
- excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body)
- attributes (int lineno, int col_offset)
-
- arguments = (arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults,
- arg? kwarg, expr* defaults)
-
- arg = (identifier arg, expr? annotation)
- attributes (int lineno, int col_offset)
-
- -- keyword arguments supplied to call (NULL identifier for **kwargs)
- keyword = (identifier? arg, expr value)
-
- -- import name with optional 'as' alias.
- alias = (identifier name, identifier? asname)
-
- withitem = (expr context_expr, expr? optional_vars)
-
- type_ignore = TypeIgnore(int lineno)
-}
-
diff --git a/ast35/Parser/asdl.py b/ast35/Parser/asdl.py
deleted file mode 100644
index 121cdab..0000000
--- a/ast35/Parser/asdl.py
+++ /dev/null
@@ -1,375 +0,0 @@
-#-------------------------------------------------------------------------------
-# Parser for ASDL [1] definition files. Reads in an ASDL description and parses
-# it into an AST that describes it.
-#
-# The EBNF we're parsing here: Figure 1 of the paper [1]. Extended to support
-# modules and attributes after a product. Words starting with Capital letters
-# are terminals. Literal tokens are in "double quotes". Others are
-# non-terminals. Id is either TokenId or ConstructorId.
-#
-# module ::= "module" Id "{" [definitions] "}"
-# definitions ::= { TypeId "=" type }
-# type ::= product | sum
-# product ::= fields ["attributes" fields]
-# fields ::= "(" { field, "," } field ")"
-# field ::= TypeId ["?" | "*"] [Id]
-# sum ::= constructor { "|" constructor } ["attributes" fields]
-# constructor ::= ConstructorId [fields]
-#
-# [1] "The Zephyr Abstract Syntax Description Language" by Wang, et. al. See
-# http://asdl.sourceforge.net/
-#-------------------------------------------------------------------------------
-from collections import namedtuple
-import re
-
-__all__ = [
- 'builtin_types', 'parse', 'AST', 'Module', 'Type', 'Constructor',
- 'Field', 'Sum', 'Product', 'VisitorBase', 'Check', 'check']
-
-# The following classes define nodes into which the ASDL description is parsed.
-# Note: this is a "meta-AST". ASDL files (such as Python.asdl) describe the AST
-# structure used by a programming language. But ASDL files themselves need to be
-# parsed. This module parses ASDL files and uses a simple AST to represent them.
-# See the EBNF at the top of the file to understand the logical connection
-# between the various node types.
-
-builtin_types = {'identifier', 'string', 'bytes', 'int', 'object', 'singleton'}
-
-class AST:
- def __repr__(self):
- raise NotImplementedError
-
-class Module(AST):
- def __init__(self, name, dfns):
- self.name = name
- self.dfns = dfns
- self.types = {type.name: type.value for type in dfns}
-
- def __repr__(self):
- return 'Module({0.name}, {0.dfns})'.format(self)
-
-class Type(AST):
- def __init__(self, name, value):
- self.name = name
- self.value = value
-
- def __repr__(self):
- return 'Type({0.name}, {0.value})'.format(self)
-
-class Constructor(AST):
- def __init__(self, name, fields=None):
- self.name = name
- self.fields = fields or []
-
- def __repr__(self):
- return 'Constructor({0.name}, {0.fields})'.format(self)
-
-class Field(AST):
- def __init__(self, type, name=None, seq=False, opt=False):
- self.type = type
- self.name = name
- self.seq = seq
- self.opt = opt
-
- def __repr__(self):
- if self.seq:
- extra = ", seq=True"
- elif self.opt:
- extra = ", opt=True"
- else:
- extra = ""
- if self.name is None:
- return 'Field({0.type}{1})'.format(self, extra)
- else:
- return 'Field({0.type}, {0.name}{1})'.format(self, extra)
-
-class Sum(AST):
- def __init__(self, types, attributes=None):
- self.types = types
- self.attributes = attributes or []
-
- def __repr__(self):
- if self.attributes:
- return 'Sum({0.types}, {0.attributes})'.format(self)
- else:
- return 'Sum({0.types})'.format(self)
-
-class Product(AST):
- def __init__(self, fields, attributes=None):
- self.fields = fields
- self.attributes = attributes or []
-
- def __repr__(self):
- if self.attributes:
- return 'Product({0.fields}, {0.attributes})'.format(self)
- else:
- return 'Product({0.fields})'.format(self)
-
-# A generic visitor for the meta-AST that describes ASDL. This can be used by
-# emitters. Note that this visitor does not provide a generic visit method, so a
-# subclass needs to define visit methods from visitModule to as deep as the
-# interesting node.
-# We also define a Check visitor that makes sure the parsed ASDL is well-formed.
-
-class VisitorBase(object):
- """Generic tree visitor for ASTs."""
- def __init__(self):
- self.cache = {}
-
- def visit(self, obj, *args):
- klass = obj.__class__
- meth = self.cache.get(klass)
- if meth is None:
- methname = "visit" + klass.__name__
- meth = getattr(self, methname, None)
- self.cache[klass] = meth
- if meth:
- try:
- meth(obj, *args)
- except Exception as e:
- print("Error visiting %r: %s" % (obj, e))
- raise
-
-class Check(VisitorBase):
- """A visitor that checks a parsed ASDL tree for correctness.
-
- Errors are printed and accumulated.
- """
- def __init__(self):
- super(Check, self).__init__()
- self.cons = {}
- self.errors = 0
- self.types = {}
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type):
- self.visit(type.value, str(type.name))
-
- def visitSum(self, sum, name):
- for t in sum.types:
- self.visit(t, name)
-
- def visitConstructor(self, cons, name):
- key = str(cons.name)
- conflict = self.cons.get(key)
- if conflict is None:
- self.cons[key] = name
- else:
- print('Redefinition of constructor {}'.format(key))
- print('Defined in {} and {}'.format(conflict, name))
- self.errors += 1
- for f in cons.fields:
- self.visit(f, key)
-
- def visitField(self, field, name):
- key = str(field.type)
- l = self.types.setdefault(key, [])
- l.append(name)
-
- def visitProduct(self, prod, name):
- for f in prod.fields:
- self.visit(f, name)
-
-def check(mod):
- """Check the parsed ASDL tree for correctness.
-
- Return True if success. For failure, the errors are printed out and False
- is returned.
- """
- v = Check()
- v.visit(mod)
-
- for t in v.types:
- if t not in mod.types and not t in builtin_types:
- v.errors += 1
- uses = ", ".join(v.types[t])
- print('Undefined type {}, used in {}'.format(t, uses))
- return not v.errors
-
-# The ASDL parser itself comes next. The only interesting external interface
-# here is the top-level parse function.
-
-def parse(filename):
- """Parse ASDL from the given file and return a Module node describing it."""
- with open(filename) as f:
- parser = ASDLParser()
- return parser.parse(f.read())
-
-# Types for describing tokens in an ASDL specification.
-class TokenKind:
- """TokenKind is provides a scope for enumerated token kinds."""
- (ConstructorId, TypeId, Equals, Comma, Question, Pipe, Asterisk,
- LParen, RParen, LBrace, RBrace) = range(11)
-
- operator_table = {
- '=': Equals, ',': Comma, '?': Question, '|': Pipe, '(': LParen,
- ')': RParen, '*': Asterisk, '{': LBrace, '}': RBrace}
-
-Token = namedtuple('Token', 'kind value lineno')
-
-class ASDLSyntaxError(Exception):
- def __init__(self, msg, lineno=None):
- self.msg = msg
- self.lineno = lineno or '<unknown>'
-
- def __str__(self):
- return 'Syntax error on line {0.lineno}: {0.msg}'.format(self)
-
-def tokenize_asdl(buf):
- """Tokenize the given buffer. Yield Token objects."""
- for lineno, line in enumerate(buf.splitlines(), 1):
- for m in re.finditer(r'\s*(\w+|--.*|.)', line.strip()):
- c = m.group(1)
- if c[0].isalpha():
- # Some kind of identifier
- if c[0].isupper():
- yield Token(TokenKind.ConstructorId, c, lineno)
- else:
- yield Token(TokenKind.TypeId, c, lineno)
- elif c[:2] == '--':
- # Comment
- break
- else:
- # Operators
- try:
- op_kind = TokenKind.operator_table[c]
- except KeyError:
- raise ASDLSyntaxError('Invalid operator %s' % c, lineno)
- yield Token(op_kind, c, lineno)
-
-class ASDLParser:
- """Parser for ASDL files.
-
- Create, then call the parse method on a buffer containing ASDL.
- This is a simple recursive descent parser that uses tokenize_asdl for the
- lexing.
- """
- def __init__(self):
- self._tokenizer = None
- self.cur_token = None
-
- def parse(self, buf):
- """Parse the ASDL in the buffer and return an AST with a Module root.
- """
- self._tokenizer = tokenize_asdl(buf)
- self._advance()
- return self._parse_module()
-
- def _parse_module(self):
- if self._at_keyword('module'):
- self._advance()
- else:
- raise ASDLSyntaxError(
- 'Expected "module" (found {})'.format(self.cur_token.value),
- self.cur_token.lineno)
- name = self._match(self._id_kinds)
- self._match(TokenKind.LBrace)
- defs = self._parse_definitions()
- self._match(TokenKind.RBrace)
- return Module(name, defs)
-
- def _parse_definitions(self):
- defs = []
- while self.cur_token.kind == TokenKind.TypeId:
- typename = self._advance()
- self._match(TokenKind.Equals)
- type = self._parse_type()
- defs.append(Type(typename, type))
- return defs
-
- def _parse_type(self):
- if self.cur_token.kind == TokenKind.LParen:
- # If we see a (, it's a product
- return self._parse_product()
- else:
- # Otherwise it's a sum. Look for ConstructorId
- sumlist = [Constructor(self._match(TokenKind.ConstructorId),
- self._parse_optional_fields())]
- while self.cur_token.kind == TokenKind.Pipe:
- # More constructors
- self._advance()
- sumlist.append(Constructor(
- self._match(TokenKind.ConstructorId),
- self._parse_optional_fields()))
- return Sum(sumlist, self._parse_optional_attributes())
-
- def _parse_product(self):
- return Product(self._parse_fields(), self._parse_optional_attributes())
-
- def _parse_fields(self):
- fields = []
- self._match(TokenKind.LParen)
- while self.cur_token.kind == TokenKind.TypeId:
- typename = self._advance()
- is_seq, is_opt = self._parse_optional_field_quantifier()
- id = (self._advance() if self.cur_token.kind in self._id_kinds
- else None)
- fields.append(Field(typename, id, seq=is_seq, opt=is_opt))
- if self.cur_token.kind == TokenKind.RParen:
- break
- elif self.cur_token.kind == TokenKind.Comma:
- self._advance()
- self._match(TokenKind.RParen)
- return fields
-
- def _parse_optional_fields(self):
- if self.cur_token.kind == TokenKind.LParen:
- return self._parse_fields()
- else:
- return None
-
- def _parse_optional_attributes(self):
- if self._at_keyword('attributes'):
- self._advance()
- return self._parse_fields()
- else:
- return None
-
- def _parse_optional_field_quantifier(self):
- is_seq, is_opt = False, False
- if self.cur_token.kind == TokenKind.Asterisk:
- is_seq = True
- self._advance()
- elif self.cur_token.kind == TokenKind.Question:
- is_opt = True
- self._advance()
- return is_seq, is_opt
-
- def _advance(self):
- """ Return the value of the current token and read the next one into
- self.cur_token.
- """
- cur_val = None if self.cur_token is None else self.cur_token.value
- try:
- self.cur_token = next(self._tokenizer)
- except StopIteration:
- self.cur_token = None
- return cur_val
-
- _id_kinds = (TokenKind.ConstructorId, TokenKind.TypeId)
-
- def _match(self, kind):
- """The 'match' primitive of RD parsers.
-
- * Verifies that the current token is of the given kind (kind can
- be a tuple, in which the kind must match one of its members).
- * Returns the value of the current token
- * Reads in the next token
- """
- if (isinstance(kind, tuple) and self.cur_token.kind in kind or
- self.cur_token.kind == kind
- ):
- value = self.cur_token.value
- self._advance()
- return value
- else:
- raise ASDLSyntaxError(
- 'Unmatched {} (found {})'.format(kind, self.cur_token.kind),
- self.cur_token.lineno)
-
- def _at_keyword(self, keyword):
- return (self.cur_token.kind == TokenKind.TypeId and
- self.cur_token.value == keyword)
diff --git a/ast35/Parser/asdl_c.py b/ast35/Parser/asdl_c.py
deleted file mode 100755
index cabc241..0000000
--- a/ast35/Parser/asdl_c.py
+++ /dev/null
@@ -1,1326 +0,0 @@
-#! /usr/bin/env python
-"""Generate C code from an ASDL description."""
-
-import os, sys
-
-import asdl
-
-TABSIZE = 4
-MAX_COL = 80
-
-def get_c_type(name):
- """Return a string for the C name of the type.
-
- This function special cases the default types provided by asdl.
- """
- if name in asdl.builtin_types:
- return name
- else:
- return "%s_ty" % name
-
-def reflow_lines(s, depth):
- """Reflow the line s indented depth tabs.
-
- Return a sequence of lines where no line extends beyond MAX_COL
- when properly indented. The first line is properly indented based
- exclusively on depth * TABSIZE. All following lines -- these are
- the reflowed lines generated by this function -- start at the same
- column as the first character beyond the opening { in the first
- line.
- """
- size = MAX_COL - depth * TABSIZE
- if len(s) < size:
- return [s]
-
- lines = []
- cur = s
- padding = ""
- while len(cur) > size:
- i = cur.rfind(' ', 0, size)
- # XXX this should be fixed for real
- if i == -1 and 'GeneratorExp' in cur:
- i = size + 3
- assert i != -1, "Impossible line %d to reflow: %r" % (size, s)
- lines.append(padding + cur[:i])
- if len(lines) == 1:
- # find new size based on brace
- j = cur.find('{', 0, i)
- if j >= 0:
- j += 2 # account for the brace and the space after it
- size -= j
- padding = " " * j
- else:
- j = cur.find('(', 0, i)
- if j >= 0:
- j += 1 # account for the paren (no space after it)
- size -= j
- padding = " " * j
- cur = cur[i+1:]
- else:
- lines.append(padding + cur)
- return lines
-
-def is_simple(sum):
- """Return True if a sum is a simple.
-
- A sum is simple if its types have no fields, e.g.
- unaryop = Invert | Not | UAdd | USub
- """
- for t in sum.types:
- if t.fields:
- return False
- return True
-
-
-class EmitVisitor(asdl.VisitorBase):
- """Visit that emits lines"""
-
- def __init__(self, file):
- self.file = file
- self.identifiers = set()
- super(EmitVisitor, self).__init__()
-
- def emit_identifier(self, name):
- name = str(name)
- if name in self.identifiers:
- return
- self.emit("_Py_IDENTIFIER(%s);" % name, 0)
- self.identifiers.add(name)
-
- def emit(self, s, depth, reflow=True):
- # XXX reflow long lines?
- if reflow:
- lines = reflow_lines(s, depth)
- else:
- lines = [s]
- for line in lines:
- line = (" " * TABSIZE * depth) + line + "\n"
- self.file.write(line)
-
-
-class TypeDefVisitor(EmitVisitor):
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type, depth=0):
- self.visit(type.value, type.name, depth)
-
- def visitSum(self, sum, name, depth):
- if is_simple(sum):
- self.simple_sum(sum, name, depth)
- else:
- self.sum_with_constructors(sum, name, depth)
-
- def simple_sum(self, sum, name, depth):
- enum = []
- for i in range(len(sum.types)):
- type = sum.types[i]
- enum.append("%s=%d" % (type.name, i + 1))
- enums = ", ".join(enum)
- ctype = get_c_type(name)
- s = "typedef enum _%s { %s } %s;" % (name, enums, ctype)
- self.emit(s, depth)
- self.emit("", depth)
-
- def sum_with_constructors(self, sum, name, depth):
- ctype = get_c_type(name)
- s = "typedef struct _%(name)s *%(ctype)s;" % locals()
- self.emit(s, depth)
- self.emit("", depth)
-
- def visitProduct(self, product, name, depth):
- ctype = get_c_type(name)
- s = "typedef struct _%(name)s *%(ctype)s;" % locals()
- self.emit(s, depth)
- self.emit("", depth)
-
-
-class StructVisitor(EmitVisitor):
- """Visitor to generate typedefs for AST."""
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type, depth=0):
- self.visit(type.value, type.name, depth)
-
- def visitSum(self, sum, name, depth):
- if not is_simple(sum):
- self.sum_with_constructors(sum, name, depth)
-
- def sum_with_constructors(self, sum, name, depth):
- def emit(s, depth=depth):
- self.emit(s % sys._getframe(1).f_locals, depth)
- enum = []
- for i in range(len(sum.types)):
- type = sum.types[i]
- enum.append("%s_kind=%d" % (type.name, i + 1))
-
- emit("enum _%(name)s_kind {" + ", ".join(enum) + "};")
-
- emit("struct _%(name)s {")
- emit("enum _%(name)s_kind kind;", depth + 1)
- emit("union {", depth + 1)
- for t in sum.types:
- self.visit(t, depth + 2)
- emit("} v;", depth + 1)
- for field in sum.attributes:
- # rudimentary attribute handling
- type = str(field.type)
- assert type in asdl.builtin_types, type
- emit("%s %s;" % (type, field.name), depth + 1);
- emit("};")
- emit("")
-
- def visitConstructor(self, cons, depth):
- if cons.fields:
- self.emit("struct {", depth)
- for f in cons.fields:
- self.visit(f, depth + 1)
- self.emit("} %s;" % cons.name, depth)
- self.emit("", depth)
-
- def visitField(self, field, depth):
- # XXX need to lookup field.type, because it might be something
- # like a builtin...
- ctype = get_c_type(field.type)
- name = field.name
- if field.seq:
- if field.type == 'cmpop':
- self.emit("asdl_int_seq *%(name)s;" % locals(), depth)
- else:
- self.emit("asdl_seq *%(name)s;" % locals(), depth)
- else:
- self.emit("%(ctype)s %(name)s;" % locals(), depth)
-
- def visitProduct(self, product, name, depth):
- self.emit("struct _%(name)s {" % locals(), depth)
- for f in product.fields:
- self.visit(f, depth + 1)
- for field in product.attributes:
- # rudimentary attribute handling
- type = str(field.type)
- assert type in asdl.builtin_types, type
- self.emit("%s %s;" % (type, field.name), depth + 1);
- self.emit("};", depth)
- self.emit("", depth)
-
-
-class PrototypeVisitor(EmitVisitor):
- """Generate function prototypes for the .h file"""
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type):
- self.visit(type.value, type.name)
-
- def visitSum(self, sum, name):
- if is_simple(sum):
- pass # XXX
- else:
- for t in sum.types:
- self.visit(t, name, sum.attributes)
-
- def get_args(self, fields):
- """Return list of C argument into, one for each field.
-
- Argument info is 3-tuple of a C type, variable name, and flag
- that is true if type can be NULL.
- """
- args = []
- unnamed = {}
- for f in fields:
- if f.name is None:
- name = f.type
- c = unnamed[name] = unnamed.get(name, 0) + 1
- if c > 1:
- name = "name%d" % (c - 1)
- else:
- name = f.name
- # XXX should extend get_c_type() to handle this
- if f.seq:
- if f.type == 'cmpop':
- ctype = "asdl_int_seq *"
- else:
- ctype = "asdl_seq *"
- else:
- ctype = get_c_type(f.type)
- args.append((ctype, name, f.opt or f.seq))
- return args
-
- def visitConstructor(self, cons, type, attrs):
- args = self.get_args(cons.fields)
- attrs = self.get_args(attrs)
- ctype = get_c_type(type)
- self.emit_function(cons.name, ctype, args, attrs)
-
- def emit_function(self, name, ctype, args, attrs, union=True):
- args = args + attrs
- if args:
- argstr = ", ".join(["%s %s" % (atype, aname)
- for atype, aname, opt in args])
- argstr += ", PyArena *arena"
- else:
- argstr = "PyArena *arena"
- margs = "a0"
- for i in range(1, len(args)+1):
- margs += ", a%d" % i
- self.emit("#define %s(%s) _Ta35_%s(%s)" % (name, margs, name, margs), 0,
- reflow=False)
- self.emit("%s _Ta35_%s(%s);" % (ctype, name, argstr), False)
-
- def visitProduct(self, prod, name):
- self.emit_function(name, get_c_type(name),
- self.get_args(prod.fields),
- self.get_args(prod.attributes),
- union=False)
-
-
-class FunctionVisitor(PrototypeVisitor):
- """Visitor to generate constructor functions for AST."""
-
- def emit_function(self, name, ctype, args, attrs, union=True):
- def emit(s, depth=0, reflow=True):
- self.emit(s, depth, reflow)
- argstr = ", ".join(["%s %s" % (atype, aname)
- for atype, aname, opt in args + attrs])
- if argstr:
- argstr += ", PyArena *arena"
- else:
- argstr = "PyArena *arena"
- self.emit("%s" % ctype, 0)
- emit("%s(%s)" % (name, argstr))
- emit("{")
- emit("%s p;" % ctype, 1)
- for argtype, argname, opt in args:
- if not opt and argtype != "int":
- emit("if (!%s) {" % argname, 1)
- emit("PyErr_SetString(PyExc_ValueError,", 2)
- msg = "field %s is required for %s" % (argname, name)
- emit(' "%s");' % msg,
- 2, reflow=False)
- emit('return NULL;', 2)
- emit('}', 1)
-
- emit("p = (%s)PyArena_Malloc(arena, sizeof(*p));" % ctype, 1);
- emit("if (!p)", 1)
- emit("return NULL;", 2)
- if union:
- self.emit_body_union(name, args, attrs)
- else:
- self.emit_body_struct(name, args, attrs)
- emit("return p;", 1)
- emit("}")
- emit("")
-
- def emit_body_union(self, name, args, attrs):
- def emit(s, depth=0, reflow=True):
- self.emit(s, depth, reflow)
- emit("p->kind = %s_kind;" % name, 1)
- for argtype, argname, opt in args:
- emit("p->v.%s.%s = %s;" % (name, argname, argname), 1)
- for argtype, argname, opt in attrs:
- emit("p->%s = %s;" % (argname, argname), 1)
-
- def emit_body_struct(self, name, args, attrs):
- def emit(s, depth=0, reflow=True):
- self.emit(s, depth, reflow)
- for argtype, argname, opt in args:
- emit("p->%s = %s;" % (argname, argname), 1)
- for argtype, argname, opt in attrs:
- emit("p->%s = %s;" % (argname, argname), 1)
-
-
-class PickleVisitor(EmitVisitor):
-
- def visitModule(self, mod):
- for dfn in mod.dfns:
- self.visit(dfn)
-
- def visitType(self, type):
- self.visit(type.value, type.name)
-
- def visitSum(self, sum, name):
- pass
-
- def visitProduct(self, sum, name):
- pass
-
- def visitConstructor(self, cons, name):
- pass
-
- def visitField(self, sum):
- pass
-
-
-class Obj2ModPrototypeVisitor(PickleVisitor):
- def visitProduct(self, prod, name):
- code = "static int obj2ast_%s(PyObject* obj, %s* out, PyArena* arena);"
- self.emit(code % (name, get_c_type(name)), 0)
-
- visitSum = visitProduct
-
-
-class Obj2ModVisitor(PickleVisitor):
- def funcHeader(self, name):
- ctype = get_c_type(name)
- self.emit("int", 0)
- self.emit("obj2ast_%s(PyObject* obj, %s* out, PyArena* arena)" % (name, ctype), 0)
- self.emit("{", 0)
- self.emit("int isinstance;", 1)
- self.emit("", 0)
-
- def sumTrailer(self, name, add_label=False):
- self.emit("", 0)
- # there's really nothing more we can do if this fails ...
- error = "expected some sort of %s, but got %%R" % name
- format = "PyErr_Format(PyExc_TypeError, \"%s\", obj);"
- self.emit(format % error, 1, reflow=False)
- if add_label:
- self.emit("failed:", 1)
- self.emit("Py_XDECREF(tmp);", 1)
- self.emit("return 1;", 1)
- self.emit("}", 0)
- self.emit("", 0)
-
- def simpleSum(self, sum, name):
- self.funcHeader(name)
- for t in sum.types:
- line = ("isinstance = PyObject_IsInstance(obj, "
- "(PyObject *)%s_type);")
- self.emit(line % (t.name,), 1)
- self.emit("if (isinstance == -1) {", 1)
- self.emit("return 1;", 2)
- self.emit("}", 1)
- self.emit("if (isinstance) {", 1)
- self.emit("*out = %s;" % t.name, 2)
- self.emit("return 0;", 2)
- self.emit("}", 1)
- self.sumTrailer(name)
-
- def buildArgs(self, fields):
- return ", ".join(fields + ["arena"])
-
- def complexSum(self, sum, name):
- self.funcHeader(name)
- self.emit("PyObject *tmp = NULL;", 1)
- for a in sum.attributes:
- self.visitAttributeDeclaration(a, name, sum=sum)
- self.emit("", 0)
- # XXX: should we only do this for 'expr'?
- self.emit("if (obj == Py_None) {", 1)
- self.emit("*out = NULL;", 2)
- self.emit("return 0;", 2)
- self.emit("}", 1)
- for a in sum.attributes:
- self.visitField(a, name, sum=sum, depth=1)
- for t in sum.types:
- line = "isinstance = PyObject_IsInstance(obj, (PyObject*)%s_type);"
- self.emit(line % (t.name,), 1)
- self.emit("if (isinstance == -1) {", 1)
- self.emit("return 1;", 2)
- self.emit("}", 1)
- self.emit("if (isinstance) {", 1)
- for f in t.fields:
- self.visitFieldDeclaration(f, t.name, sum=sum, depth=2)
- self.emit("", 0)
- for f in t.fields:
- self.visitField(f, t.name, sum=sum, depth=2)
- args = [f.name for f in t.fields] + [a.name for a in sum.attributes]
- self.emit("*out = %s(%s);" % (t.name, self.buildArgs(args)), 2)
- self.emit("if (*out == NULL) goto failed;", 2)
- self.emit("return 0;", 2)
- self.emit("}", 1)
- self.sumTrailer(name, True)
-
- def visitAttributeDeclaration(self, a, name, sum=sum):
- ctype = get_c_type(a.type)
- self.emit("%s %s;" % (ctype, a.name), 1)
-
- def visitSum(self, sum, name):
- if is_simple(sum):
- self.simpleSum(sum, name)
- else:
- self.complexSum(sum, name)
-
- def visitProduct(self, prod, name):
- ctype = get_c_type(name)
- self.emit("int", 0)
- self.emit("obj2ast_%s(PyObject* obj, %s* out, PyArena* arena)" % (name, ctype), 0)
- self.emit("{", 0)
- self.emit("PyObject* tmp = NULL;", 1)
- for f in prod.fields:
- self.visitFieldDeclaration(f, name, prod=prod, depth=1)
- for a in prod.attributes:
- self.visitFieldDeclaration(a, name, prod=prod, depth=1)
- self.emit("", 0)
- for f in prod.fields:
- self.visitField(f, name, prod=prod, depth=1)
- for a in prod.attributes:
- self.visitField(a, name, prod=prod, depth=1)
- args = [f.name for f in prod.fields]
- args.extend([a.name for a in prod.attributes])
- self.emit("*out = %s(%s);" % (name, self.buildArgs(args)), 1)
- self.emit("return 0;", 1)
- self.emit("failed:", 0)
- self.emit("Py_XDECREF(tmp);", 1)
- self.emit("return 1;", 1)
- self.emit("}", 0)
- self.emit("", 0)
-
- def visitFieldDeclaration(self, field, name, sum=None, prod=None, depth=0):
- ctype = get_c_type(field.type)
- if field.seq:
- if self.isSimpleType(field):
- self.emit("asdl_int_seq* %s;" % field.name, depth)
- else:
- self.emit("asdl_seq* %s;" % field.name, depth)
- else:
- ctype = get_c_type(field.type)
- self.emit("%s %s;" % (ctype, field.name), depth)
-
- def isSimpleSum(self, field):
- # XXX can the members of this list be determined automatically?
- return field.type in ('expr_context', 'boolop', 'operator',
- 'unaryop', 'cmpop')
-
- def isNumeric(self, field):
- return get_c_type(field.type) in ("int", "bool")
-
- def isSimpleType(self, field):
- return self.isSimpleSum(field) or self.isNumeric(field)
-
- def visitField(self, field, name, sum=None, prod=None, depth=0):
- ctype = get_c_type(field.type)
- if field.opt:
- check = "exists_not_none(obj, &PyId_%s)" % (field.name,)
- else:
- check = "_PyObject_HasAttrId(obj, &PyId_%s)" % (field.name,)
- self.emit("if (%s) {" % (check,), depth, reflow=False)
- self.emit("int res;", depth+1)
- if field.seq:
- self.emit("Py_ssize_t len;", depth+1)
- self.emit("Py_ssize_t i;", depth+1)
- self.emit("tmp = _PyObject_GetAttrId(obj, &PyId_%s);" % field.name, depth+1)
- self.emit("if (tmp == NULL) goto failed;", depth+1)
- if field.seq:
- self.emit("if (!PyList_Check(tmp)) {", depth+1)
- self.emit("PyErr_Format(PyExc_TypeError, \"%s field \\\"%s\\\" must "
- "be a list, not a %%.200s\", tmp->ob_type->tp_name);" %
- (name, field.name),
- depth+2, reflow=False)
- self.emit("goto failed;", depth+2)
- self.emit("}", depth+1)
- self.emit("len = PyList_GET_SIZE(tmp);", depth+1)
- if self.isSimpleType(field):
- self.emit("%s = _Py_asdl_int_seq_new(len, arena);" % field.name, depth+1)
- else:
- self.emit("%s = _Py_asdl_seq_new(len, arena);" % field.name, depth+1)
- self.emit("if (%s == NULL) goto failed;" % field.name, depth+1)
- self.emit("for (i = 0; i < len; i++) {", depth+1)
- self.emit("%s value;" % ctype, depth+2)
- self.emit("res = obj2ast_%s(PyList_GET_ITEM(tmp, i), &value, arena);" %
- field.type, depth+2, reflow=False)
- self.emit("if (res != 0) goto failed;", depth+2)
- self.emit("asdl_seq_SET(%s, i, value);" % field.name, depth+2)
- self.emit("}", depth+1)
- else:
- self.emit("res = obj2ast_%s(tmp, &%s, arena);" %
- (field.type, field.name), depth+1)
- self.emit("if (res != 0) goto failed;", depth+1)
-
- self.emit("Py_CLEAR(tmp);", depth+1)
- self.emit("} else {", depth)
- if not field.opt:
- message = "required field \\\"%s\\\" missing from %s" % (field.name, name)
- format = "PyErr_SetString(PyExc_TypeError, \"%s\");"
- self.emit(format % message, depth+1, reflow=False)
- self.emit("return 1;", depth+1)
- else:
- if self.isNumeric(field):
- self.emit("%s = 0;" % field.name, depth+1)
- elif not self.isSimpleType(field):
- self.emit("%s = NULL;" % field.name, depth+1)
- else:
- raise TypeError("could not determine the default value for %s" % field.name)
- self.emit("}", depth)
-
-
-class MarshalPrototypeVisitor(PickleVisitor):
-
- def prototype(self, sum, name):
- ctype = get_c_type(name)
- self.emit("static int marshal_write_%s(PyObject **, int *, %s);"
- % (name, ctype), 0)
-
- visitProduct = visitSum = prototype
-
-
-class PyTypesDeclareVisitor(PickleVisitor):
-
- def visitProduct(self, prod, name):
- self.emit("static PyTypeObject *%s_type;" % name, 0)
- self.emit("static PyObject* ast2obj_%s(void*);" % name, 0)
- if prod.attributes:
- for a in prod.attributes:
- self.emit_identifier(a.name)
- self.emit("static char *%s_attributes[] = {" % name, 0)
- for a in prod.attributes:
- self.emit('"%s",' % a.name, 1)
- self.emit("};", 0)
- if prod.fields:
- for f in prod.fields:
- self.emit_identifier(f.name)
- self.emit("static char *%s_fields[]={" % name,0)
- for f in prod.fields:
- self.emit('"%s",' % f.name, 1)
- self.emit("};", 0)
-
- def visitSum(self, sum, name):
- self.emit("static PyTypeObject *%s_type;" % name, 0)
- if sum.attributes:
- for a in sum.attributes:
- self.emit_identifier(a.name)
- self.emit("static char *%s_attributes[] = {" % name, 0)
- for a in sum.attributes:
- self.emit('"%s",' % a.name, 1)
- self.emit("};", 0)
- ptype = "void*"
- if is_simple(sum):
- ptype = get_c_type(name)
- tnames = []
- for t in sum.types:
- tnames.append(str(t.name)+"_singleton")
- tnames = ", *".join(tnames)
- self.emit("static PyObject *%s;" % tnames, 0)
- self.emit("static PyObject* ast2obj_%s(%s);" % (name, ptype), 0)
- for t in sum.types:
- self.visitConstructor(t, name)
-
- def visitConstructor(self, cons, name):
- self.emit("static PyTypeObject *%s_type;" % cons.name, 0)
- if cons.fields:
- for t in cons.fields:
- self.emit_identifier(t.name)
- self.emit("static char *%s_fields[]={" % cons.name, 0)
- for t in cons.fields:
- self.emit('"%s",' % t.name, 1)
- self.emit("};",0)
-
-class PyTypesVisitor(PickleVisitor):
-
- def visitModule(self, mod):
- self.emit("""
-typedef struct {
- PyObject_HEAD
- PyObject *dict;
-} AST_object;
-
-static void
-ast_dealloc(AST_object *self)
-{
- Py_CLEAR(self->dict);
- Py_TYPE(self)->tp_free(self);
-}
-
-static int
-ast_traverse(AST_object *self, visitproc visit, void *arg)
-{
- Py_VISIT(self->dict);
- return 0;
-}
-
-static void
-ast_clear(AST_object *self)
-{
- Py_CLEAR(self->dict);
-}
-
-static int
-ast_type_init(PyObject *self, PyObject *args, PyObject *kw)
-{
- _Py_IDENTIFIER(_fields);
- Py_ssize_t i, numfields = 0;
- int res = -1;
- PyObject *key, *value, *fields;
- fields = _PyObject_GetAttrId((PyObject*)Py_TYPE(self), &PyId__fields);
- if (!fields)
- PyErr_Clear();
- if (fields) {
- numfields = PySequence_Size(fields);
- if (numfields == -1)
- goto cleanup;
- }
- res = 0; /* if no error occurs, this stays 0 to the end */
- if (PyTuple_GET_SIZE(args) > 0) {
- if (numfields != PyTuple_GET_SIZE(args)) {
- PyErr_Format(PyExc_TypeError, "%.400s constructor takes %s"
- "%zd positional argument%s",
- Py_TYPE(self)->tp_name,
- numfields == 0 ? "" : "either 0 or ",
- numfields, numfields == 1 ? "" : "s");
- res = -1;
- goto cleanup;
- }
- for (i = 0; i < PyTuple_GET_SIZE(args); i++) {
- /* cannot be reached when fields is NULL */
- PyObject *name = PySequence_GetItem(fields, i);
- if (!name) {
- res = -1;
- goto cleanup;
- }
- res = PyObject_SetAttr(self, name, PyTuple_GET_ITEM(args, i));
- Py_DECREF(name);
- if (res < 0)
- goto cleanup;
- }
- }
- if (kw) {
- i = 0; /* needed by PyDict_Next */
- while (PyDict_Next(kw, &i, &key, &value)) {
- res = PyObject_SetAttr(self, key, value);
- if (res < 0)
- goto cleanup;
- }
- }
- cleanup:
- Py_XDECREF(fields);
- return res;
-}
-
-/* Pickling support */
-static PyObject *
-ast_type_reduce(PyObject *self, PyObject *unused)
-{
- PyObject *res;
- _Py_IDENTIFIER(__dict__);
- PyObject *dict = _PyObject_GetAttrId(self, &PyId___dict__);
- if (dict == NULL) {
- if (PyErr_ExceptionMatches(PyExc_AttributeError))
- PyErr_Clear();
- else
- return NULL;
- }
- if (dict) {
- res = Py_BuildValue("O()O", Py_TYPE(self), dict);
- Py_DECREF(dict);
- return res;
- }
- return Py_BuildValue("O()", Py_TYPE(self));
-}
-
-static PyMethodDef ast_type_methods[] = {
- {"__reduce__", ast_type_reduce, METH_NOARGS, NULL},
- {NULL}
-};
-
-static PyGetSetDef ast_type_getsets[] = {
- {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
- {NULL}
-};
-
-static PyTypeObject AST_type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "_ast35.AST",
- sizeof(AST_object),
- 0,
- (destructor)ast_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_reserved */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- PyObject_GenericGetAttr, /* tp_getattro */
- PyObject_GenericSetAttr, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)ast_traverse, /* tp_traverse */
- (inquiry)ast_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- ast_type_methods, /* tp_methods */
- 0, /* tp_members */
- ast_type_getsets, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- offsetof(AST_object, dict),/* tp_dictoffset */
- (initproc)ast_type_init, /* tp_init */
- PyType_GenericAlloc, /* tp_alloc */
- PyType_GenericNew, /* tp_new */
- PyObject_GC_Del, /* tp_free */
-};
-
-
-static PyTypeObject* make_type(char *type, PyTypeObject* base, char**fields, int num_fields)
-{
- PyObject *fnames, *result;
- int i;
- fnames = PyTuple_New(num_fields);
- if (!fnames) return NULL;
- for (i = 0; i < num_fields; i++) {
- PyObject *field = PyUnicode_FromString(fields[i]);
- if (!field) {
- Py_DECREF(fnames);
- return NULL;
- }
- PyTuple_SET_ITEM(fnames, i, field);
- }
- result = PyObject_CallFunction((PyObject*)&PyType_Type, "s(O){sOss}",
- type, base, "_fields", fnames, "__module__", "_ast35");
- Py_DECREF(fnames);
- return (PyTypeObject*)result;
-}
-
-static int add_attributes(PyTypeObject* type, char**attrs, int num_fields)
-{
- int i, result;
- _Py_IDENTIFIER(_attributes);
- PyObject *s, *l = PyTuple_New(num_fields);
- if (!l)
- return 0;
- for (i = 0; i < num_fields; i++) {
- s = PyUnicode_FromString(attrs[i]);
- if (!s) {
- Py_DECREF(l);
- return 0;
- }
- PyTuple_SET_ITEM(l, i, s);
- }
- result = _PyObject_SetAttrId((PyObject*)type, &PyId__attributes, l) >= 0;
- Py_DECREF(l);
- return result;
-}
-
-/* Conversion AST -> Python */
-
-static PyObject* ast2obj_list(asdl_seq *seq, PyObject* (*func)(void*))
-{
- Py_ssize_t i, n = asdl_seq_LEN(seq);
- PyObject *result = PyList_New(n);
- PyObject *value;
- if (!result)
- return NULL;
- for (i = 0; i < n; i++) {
- value = func(asdl_seq_GET(seq, i));
- if (!value) {
- Py_DECREF(result);
- return NULL;
- }
- PyList_SET_ITEM(result, i, value);
- }
- return result;
-}
-
-static PyObject* ast2obj_object(void *o)
-{
- if (!o)
- o = Py_None;
- Py_INCREF((PyObject*)o);
- return (PyObject*)o;
-}
-#define ast2obj_singleton ast2obj_object
-#define ast2obj_identifier ast2obj_object
-#define ast2obj_string ast2obj_object
-#define ast2obj_bytes ast2obj_object
-
-static PyObject* ast2obj_int(long b)
-{
- return PyLong_FromLong(b);
-}
-
-/* Conversion Python -> AST */
-
-static int obj2ast_singleton(PyObject *obj, PyObject** out, PyArena* arena)
-{
- if (obj != Py_None && obj != Py_True && obj != Py_False) {
- PyErr_SetString(PyExc_ValueError,
- "AST singleton must be True, False, or None");
- return 1;
- }
- *out = obj;
- return 0;
-}
-
-static int obj2ast_object(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (obj == Py_None)
- obj = NULL;
- if (obj) {
- if (PyArena_AddPyObject(arena, obj) < 0) {
- *out = NULL;
- return -1;
- }
- Py_INCREF(obj);
- }
- *out = obj;
- return 0;
-}
-
-static int obj2ast_identifier(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (!PyUnicode_CheckExact(obj) && obj != Py_None) {
- PyErr_SetString(PyExc_TypeError, "AST identifier must be of type str");
- return 1;
- }
- return obj2ast_object(obj, out, arena);
-}
-
-static int obj2ast_string(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (!PyUnicode_CheckExact(obj) && !PyBytes_CheckExact(obj)) {
- PyErr_SetString(PyExc_TypeError, "AST string must be of type str");
- return 1;
- }
- return obj2ast_object(obj, out, arena);
-}
-
-static int obj2ast_bytes(PyObject* obj, PyObject** out, PyArena* arena)
-{
- if (!PyBytes_CheckExact(obj)) {
- PyErr_SetString(PyExc_TypeError, "AST bytes must be of type bytes");
- return 1;
- }
- return obj2ast_object(obj, out, arena);
-}
-
-static int obj2ast_int(PyObject* obj, int* out, PyArena* arena)
-{
- int i;
- if (!PyLong_Check(obj)) {
- PyErr_Format(PyExc_ValueError, "invalid integer value: %R", obj);
- return 1;
- }
-
- i = (int)PyLong_AsLong(obj);
- if (i == -1 && PyErr_Occurred())
- return 1;
- *out = i;
- return 0;
-}
-
-static int add_ast_fields(void)
-{
- PyObject *empty_tuple, *d;
- if (PyType_Ready(&AST_type) < 0)
- return -1;
- d = AST_type.tp_dict;
- empty_tuple = PyTuple_New(0);
- if (!empty_tuple ||
- PyDict_SetItemString(d, "_fields", empty_tuple) < 0 ||
- PyDict_SetItemString(d, "_attributes", empty_tuple) < 0) {
- Py_XDECREF(empty_tuple);
- return -1;
- }
- Py_DECREF(empty_tuple);
- return 0;
-}
-
-static int exists_not_none(PyObject *obj, _Py_Identifier *id)
-{
- int isnone;
- PyObject *attr = _PyObject_GetAttrId(obj, id);
- if (!attr) {
- PyErr_Clear();
- return 0;
- }
- isnone = attr == Py_None;
- Py_DECREF(attr);
- return !isnone;
-}
-
-""", 0, reflow=False)
-
- self.emit("static int init_types(void)",0)
- self.emit("{", 0)
- self.emit("static int initialized;", 1)
- self.emit("if (initialized) return 1;", 1)
- self.emit("if (add_ast_fields() < 0) return 0;", 1)
- for dfn in mod.dfns:
- self.visit(dfn)
- self.emit("initialized = 1;", 1)
- self.emit("return 1;", 1);
- self.emit("}", 0)
-
- def visitProduct(self, prod, name):
- if prod.fields:
- fields = name+"_fields"
- else:
- fields = "NULL"
- self.emit('%s_type = make_type("%s", &AST_type, %s, %d);' %
- (name, name, fields, len(prod.fields)), 1)
- self.emit("if (!%s_type) return 0;" % name, 1)
- if prod.attributes:
- self.emit("if (!add_attributes(%s_type, %s_attributes, %d)) return 0;" %
- (name, name, len(prod.attributes)), 1)
- else:
- self.emit("if (!add_attributes(%s_type, NULL, 0)) return 0;" % name, 1)
-
- def visitSum(self, sum, name):
- self.emit('%s_type = make_type("%s", &AST_type, NULL, 0);' %
- (name, name), 1)
- self.emit("if (!%s_type) return 0;" % name, 1)
- if sum.attributes:
- self.emit("if (!add_attributes(%s_type, %s_attributes, %d)) return 0;" %
- (name, name, len(sum.attributes)), 1)
- else:
- self.emit("if (!add_attributes(%s_type, NULL, 0)) return 0;" % name, 1)
- simple = is_simple(sum)
- for t in sum.types:
- self.visitConstructor(t, name, simple)
-
- def visitConstructor(self, cons, name, simple):
- if cons.fields:
- fields = cons.name+"_fields"
- else:
- fields = "NULL"
- self.emit('%s_type = make_type("%s", %s_type, %s, %d);' %
- (cons.name, cons.name, name, fields, len(cons.fields)), 1)
- self.emit("if (!%s_type) return 0;" % cons.name, 1)
- if simple:
- self.emit("%s_singleton = PyType_GenericNew(%s_type, NULL, NULL);" %
- (cons.name, cons.name), 1)
- self.emit("if (!%s_singleton) return 0;" % cons.name, 1)
-
-
-class ASTModuleVisitor(PickleVisitor):
-
- def visitModule(self, mod):
- # add parse method to module
- self.emit('PyObject *ast35_parse(PyObject *self, PyObject *args);', 0)
- self.emit('static PyMethodDef ast35_methods[] = {', 0)
- self.emit('{"_parse", ast35_parse, METH_VARARGS, "Parse string into typed AST."},', 1)
- self.emit('{NULL, NULL, 0, NULL}', 1)
- self.emit('};', 0)
-
- self.emit("static struct PyModuleDef _astmodule35 = {", 0)
- self.emit(' PyModuleDef_HEAD_INIT, "_ast35", NULL, 0, ast35_methods', 0)
- self.emit("};", 0)
- self.emit("PyMODINIT_FUNC", 0)
- self.emit("PyInit__ast35(void)", 0)
- self.emit("{", 0)
- self.emit("PyObject *m, *d;", 1)
- self.emit("if (!init_types()) return NULL;", 1)
- self.emit('m = PyModule_Create(&_astmodule35);', 1)
- self.emit("if (!m) return NULL;", 1)
- self.emit("d = PyModule_GetDict(m);", 1)
- self.emit('if (PyDict_SetItemString(d, "AST", (PyObject*)&AST_type) < 0) return NULL;', 1)
- self.emit('if (PyModule_AddIntMacro(m, PyCF_ONLY_AST) < 0)', 1)
- self.emit("return NULL;", 2)
- for dfn in mod.dfns:
- self.visit(dfn)
- self.emit("return m;", 1)
- self.emit("}", 0)
-
- def visitProduct(self, prod, name):
- self.addObj(name)
-
- def visitSum(self, sum, name):
- self.addObj(name)
- for t in sum.types:
- self.visitConstructor(t, name)
-
- def visitConstructor(self, cons, name):
- self.addObj(cons.name)
-
- def addObj(self, name):
- self.emit('if (PyDict_SetItemString(d, "%s", (PyObject*)%s_type) < 0) return NULL;' % (name, name), 1)
-
-
-_SPECIALIZED_SEQUENCES = ('stmt', 'expr')
-
-def find_sequence(fields, doing_specialization):
- """Return True if any field uses a sequence."""
- for f in fields:
- if f.seq:
- if not doing_specialization:
- return True
- if str(f.type) not in _SPECIALIZED_SEQUENCES:
- return True
- return False
-
-def has_sequence(types, doing_specialization):
- for t in types:
- if find_sequence(t.fields, doing_specialization):
- return True
- return False
-
-
-class StaticVisitor(PickleVisitor):
- CODE = '''Very simple, always emit this static code. Override CODE'''
-
- def visit(self, object):
- self.emit(self.CODE, 0, reflow=False)
-
-
-class ObjVisitor(PickleVisitor):
-
- def func_begin(self, name):
- ctype = get_c_type(name)
- self.emit("PyObject*", 0)
- self.emit("ast2obj_%s(void* _o)" % (name), 0)
- self.emit("{", 0)
- self.emit("%s o = (%s)_o;" % (ctype, ctype), 1)
- self.emit("PyObject *result = NULL, *value = NULL;", 1)
- self.emit('if (!o) {', 1)
- self.emit("Py_INCREF(Py_None);", 2)
- self.emit('return Py_None;', 2)
- self.emit("}", 1)
- self.emit('', 0)
-
- def func_end(self):
- self.emit("return result;", 1)
- self.emit("failed:", 0)
- self.emit("Py_XDECREF(value);", 1)
- self.emit("Py_XDECREF(result);", 1)
- self.emit("return NULL;", 1)
- self.emit("}", 0)
- self.emit("", 0)
-
- def visitSum(self, sum, name):
- if is_simple(sum):
- self.simpleSum(sum, name)
- return
- self.func_begin(name)
- self.emit("switch (o->kind) {", 1)
- for i in range(len(sum.types)):
- t = sum.types[i]
- self.visitConstructor(t, i + 1, name)
- self.emit("}", 1)
- for a in sum.attributes:
- self.emit("value = ast2obj_%s(o->%s);" % (a.type, a.name), 1)
- self.emit("if (!value) goto failed;", 1)
- self.emit('if (_PyObject_SetAttrId(result, &PyId_%s, value) < 0)' % a.name, 1)
- self.emit('goto failed;', 2)
- self.emit('Py_DECREF(value);', 1)
- self.func_end()
-
- def simpleSum(self, sum, name):
- self.emit("PyObject* ast2obj_%s(%s_ty o)" % (name, name), 0)
- self.emit("{", 0)
- self.emit("switch(o) {", 1)
- for t in sum.types:
- self.emit("case %s:" % t.name, 2)
- self.emit("Py_INCREF(%s_singleton);" % t.name, 3)
- self.emit("return %s_singleton;" % t.name, 3)
- self.emit("default:", 2)
- self.emit('/* should never happen, but just in case ... */', 3)
- code = "PyErr_Format(PyExc_SystemError, \"unknown %s found\");" % name
- self.emit(code, 3, reflow=False)
- self.emit("return NULL;", 3)
- self.emit("}", 1)
- self.emit("}", 0)
-
- def visitProduct(self, prod, name):
- self.func_begin(name)
- self.emit("result = PyType_GenericNew(%s_type, NULL, NULL);" % name, 1);
- self.emit("if (!result) return NULL;", 1)
- for field in prod.fields:
- self.visitField(field, name, 1, True)
- for a in prod.attributes:
- self.emit("value = ast2obj_%s(o->%s);" % (a.type, a.name), 1)
- self.emit("if (!value) goto failed;", 1)
- self.emit('if (_PyObject_SetAttrId(result, &PyId_%s, value) < 0)' % a.name, 1)
- self.emit('goto failed;', 2)
- self.emit('Py_DECREF(value);', 1)
- self.func_end()
-
- def visitConstructor(self, cons, enum, name):
- self.emit("case %s_kind:" % cons.name, 1)
- self.emit("result = PyType_GenericNew(%s_type, NULL, NULL);" % cons.name, 2);
- self.emit("if (!result) goto failed;", 2)
- for f in cons.fields:
- self.visitField(f, cons.name, 2, False)
- self.emit("break;", 2)
-
- def visitField(self, field, name, depth, product):
- def emit(s, d):
- self.emit(s, depth + d)
- if product:
- value = "o->%s" % field.name
- else:
- value = "o->v.%s.%s" % (name, field.name)
- self.set(field, value, depth)
- emit("if (!value) goto failed;", 0)
- emit('if (_PyObject_SetAttrId(result, &PyId_%s, value) == -1)' % field.name, 0)
- emit("goto failed;", 1)
- emit("Py_DECREF(value);", 0)
-
- def emitSeq(self, field, value, depth, emit):
- emit("seq = %s;" % value, 0)
- emit("n = asdl_seq_LEN(seq);", 0)
- emit("value = PyList_New(n);", 0)
- emit("if (!value) goto failed;", 0)
- emit("for (i = 0; i < n; i++) {", 0)
- self.set("value", field, "asdl_seq_GET(seq, i)", depth + 1)
- emit("if (!value1) goto failed;", 1)
- emit("PyList_SET_ITEM(value, i, value1);", 1)
- emit("value1 = NULL;", 1)
- emit("}", 0)
-
- def set(self, field, value, depth):
- if field.seq:
- # XXX should really check for is_simple, but that requires a symbol table
- if field.type == "cmpop":
- # While the sequence elements are stored as void*,
- # ast2obj_cmpop expects an enum
- self.emit("{", depth)
- self.emit("Py_ssize_t i, n = asdl_seq_LEN(%s);" % value, depth+1)
- self.emit("value = PyList_New(n);", depth+1)
- self.emit("if (!value) goto failed;", depth+1)
- self.emit("for(i = 0; i < n; i++)", depth+1)
- # This cannot fail, so no need for error handling
- self.emit("PyList_SET_ITEM(value, i, ast2obj_cmpop((cmpop_ty)asdl_seq_GET(%s, i)));" % value,
- depth+2, reflow=False)
- self.emit("}", depth)
- else:
- self.emit("value = ast2obj_list(%s, ast2obj_%s);" % (value, field.type), depth)
- else:
- ctype = get_c_type(field.type)
- self.emit("value = ast2obj_%s(%s);" % (field.type, value), depth, reflow=False)
-
-
-class PartingShots(StaticVisitor):
-
- CODE = """
-PyObject* Ta35AST_mod2obj(mod_ty t)
-{
- if (!init_types())
- return NULL;
- return ast2obj_mod(t);
-}
-
-/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
-mod_ty Ta35AST_obj2mod(PyObject* ast, PyArena* arena, int mode)
-{
- mod_ty res;
- PyObject *req_type[3];
- char *req_name[] = {"Module", "Expression", "Interactive"};
- int isinstance;
-
- req_type[0] = (PyObject*)Module_type;
- req_type[1] = (PyObject*)Expression_type;
- req_type[2] = (PyObject*)Interactive_type;
-
- assert(0 <= mode && mode <= 2);
-
- if (!init_types())
- return NULL;
-
- isinstance = PyObject_IsInstance(ast, req_type[mode]);
- if (isinstance == -1)
- return NULL;
- if (!isinstance) {
- PyErr_Format(PyExc_TypeError, "expected %s node, got %.400s",
- req_name[mode], Py_TYPE(ast)->tp_name);
- return NULL;
- }
- if (obj2ast_mod(ast, &res, arena) != 0)
- return NULL;
- else
- return res;
-}
-
-int Ta35AST_Check(PyObject* obj)
-{
- if (!init_types())
- return -1;
- return PyObject_IsInstance(obj, (PyObject*)&AST_type);
-}
-"""
-
-class ChainOfVisitors:
- def __init__(self, *visitors):
- self.visitors = visitors
-
- def visit(self, object):
- for v in self.visitors:
- v.visit(object)
- v.emit("", 0)
-
-common_msg = "/* File automatically generated by %s. */\n\n"
-
-def main(srcfile, dump_module=False):
- argv0 = sys.argv[0]
- components = argv0.split(os.sep)
- argv0 = os.sep.join(components[-2:])
- auto_gen_msg = common_msg % argv0
- mod = asdl.parse(srcfile)
- if dump_module:
- print('Parsed Module:')
- print(mod)
- if not asdl.check(mod):
- sys.exit(1)
- if INC_DIR:
- p = "%s/%s-ast.h" % (INC_DIR, mod.name)
- f = open(p, "w")
- f.write(auto_gen_msg)
- f.write('#include "asdl.h"\n\n')
- c = ChainOfVisitors(TypeDefVisitor(f),
- StructVisitor(f),
- PrototypeVisitor(f),
- )
- c.visit(mod)
- f.write("PyObject* Ta35AST_mod2obj(mod_ty t);\n")
- f.write("mod_ty Ta35AST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n")
- f.write("int Ta35AST_Check(PyObject* obj);\n")
- f.close()
-
- if SRC_DIR:
- p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c")
- f = open(p, "w")
- f.write(auto_gen_msg)
- f.write('#include <stddef.h>\n')
- f.write('\n')
- f.write('#include "Python.h"\n')
- f.write('#include "%s-ast.h"\n' % mod.name)
- f.write('\n')
- f.write("static PyTypeObject AST_type;\n")
- v = ChainOfVisitors(
- PyTypesDeclareVisitor(f),
- PyTypesVisitor(f),
- Obj2ModPrototypeVisitor(f),
- FunctionVisitor(f),
- ObjVisitor(f),
- Obj2ModVisitor(f),
- ASTModuleVisitor(f),
- PartingShots(f),
- )
- v.visit(mod)
- f.close()
-
-if __name__ == "__main__":
- import getopt
-
- INC_DIR = ''
- SRC_DIR = ''
- dump_module = False
- opts, args = getopt.getopt(sys.argv[1:], "dh:c:")
- for o, v in opts:
- if o == '-h':
- INC_DIR = v
- if o == '-c':
- SRC_DIR = v
- if o == '-d':
- dump_module = True
- if INC_DIR and SRC_DIR:
- print('Must specify exactly one output file')
- sys.exit(1)
- elif len(args) != 1:
- print('Must specify single input file')
- sys.exit(1)
- main(args[0], dump_module)
diff --git a/build.cmd b/build.cmd
deleted file mode 100644
index 243dc9a..0000000
--- a/build.cmd
+++ /dev/null
@@ -1,21 +0,0 @@
- at echo off
-:: To build extensions for 64 bit Python 3, we need to configure environment
-:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of:
-:: MS Windows SDK for Windows 7 and .NET Framework 4
-::
-:: More details at:
-:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows
-
-IF "%DISTUTILS_USE_SDK%"=="1" (
- ECHO Configuring environment to build with MSVC on a 64bit architecture
- ECHO Using Windows SDK 7.1
- "C:\Program Files\Microsoft SDKs\Windows\v7.1\Setup\WindowsSdkVer.exe" -q -version:v7.1
- CALL "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 /release
- SET MSSdk=1
- REM Need the following to allow tox to see the SDK compiler
- SET TOX_TESTENV_PASSENV=DISTUTILS_USE_SDK MSSdk INCLUDE LIB
-) ELSE (
- ECHO Using default MSVC build environment
-)
-
-CALL %*
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..861a9f5
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/setup.py b/setup.py
index 907cda5..4cd5b5f 100644
--- a/setup.py
+++ b/setup.py
@@ -1,3 +1,7 @@
+import sys
+if sys.version_info[0] < 3 or sys.version_info[1] < 3:
+ sys.exit('Error: typed_ast only runs on Python 3.3 and above.')
+
try:
from setuptools import setup, Extension
except ImportError:
@@ -78,7 +82,7 @@ comments. The primary goals of this package are correctness and speed.
""".strip()
setup (name = 'typed-ast',
- version = '0.5.5',
+ version = '0.5.6',
description = 'fork of Python 2 and 3 ast modules with type comment support',
long_description = long_description,
author = 'David Fisher',
diff --git a/tools/find_exported_symbols b/tools/find_exported_symbols
deleted file mode 100755
index 2fcb6ad..0000000
--- a/tools/find_exported_symbols
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-PROJ_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.."
-
-gobjdump -t $PROJ_DIR/build/lib*/_ast${1}.*.so | grep ' g ' | grep -v UND > "exported_symbols${1}.txt"
-echo "Symbols written to exported_symbols${1}.txt. You should edit this file to "
-echo "remove any symbols you still want to export (like PyInit functions) "
-echo "and to make each line contain only a function name you want updated "
-echo "(and none of the other output) before running update_exported_symbols."
diff --git a/tools/update_exported_symbols b/tools/update_exported_symbols
deleted file mode 100755
index e346b39..0000000
--- a/tools/update_exported_symbols
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-PROJ_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.."
-
-for CHANGE in $( cat "$PROJ_DIR/exported_symbols${1}.txt" ); do
- if [[ ${CHANGE:0:1} == "_" ]] ; then
- NEW="_Ta${1}${CHANGE:3}"
- else
- NEW="Ta${1}${CHANGE:2}"
- fi
- find "$PROJ_DIR/ast${1}" -type f -name '*.h' -or -name '*.c' | xargs -n 1 sed -i '' "s/$CHANGE/$NEW/"
-done
-
-echo "Symbols updated. Remember to also update autogeneration code like Parser/asdl_c.py."
diff --git a/tools/update_header_guards b/tools/update_header_guards
deleted file mode 100755
index 24d6c0e..0000000
--- a/tools/update_header_guards
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash -eux
-
-# usage: ./update_header_guards VERSION_NUMBER
-
-PROJ_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.."
-
-# only works on OS X due to silly sed incompatibility
-FOLDER="ast$1"
-PATTERN='s/Py\([A-Z_]*_H\( \*\/\)\{0,1\}\)$/'
-PATTERN+="Ta$1"
-PATTERN+='\1/'
-find "$FOLDER" -type f -name '*.h' | xargs -n 1 sed -i '' "$PATTERN"
diff --git a/typed_ast.egg-info/PKG-INFO b/typed_ast.egg-info/PKG-INFO
new file mode 100644
index 0000000..3ed89fd
--- /dev/null
+++ b/typed_ast.egg-info/PKG-INFO
@@ -0,0 +1,20 @@
+Metadata-Version: 1.1
+Name: typed-ast
+Version: 0.5.6
+Summary: fork of Python 2 and 3 ast modules with type comment support
+Home-page: https://github.com/dropbox/typed_ast
+Author: David Fisher
+Author-email: ddfisher at dropbox.com
+License: Apache License 2.0
+Description: This package is a fork of the CPython 2.7 and 3.5 `ast` modules with the
+ ability to parse PEP 484 (https://www.python.org/dev/peps/pep-0484/) type
+ comments. The primary goals of this package are correctness and speed.
+Platform: POSIX
+Classifier: Development Status :: 3 - Alpha
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Topic :: Software Development
diff --git a/typed_ast.egg-info/SOURCES.txt b/typed_ast.egg-info/SOURCES.txt
new file mode 100644
index 0000000..b174e36
--- /dev/null
+++ b/typed_ast.egg-info/SOURCES.txt
@@ -0,0 +1,62 @@
+LICENSE
+MANIFEST.in
+setup.py
+ast27/Custom/typed_ast.c
+ast27/Include/Python-ast.h
+ast27/Include/asdl.h
+ast27/Include/ast.h
+ast27/Include/bitset.h
+ast27/Include/compile.h
+ast27/Include/errcode.h
+ast27/Include/graminit.h
+ast27/Include/grammar.h
+ast27/Include/node.h
+ast27/Include/parsetok.h
+ast27/Include/token.h
+ast27/Parser/acceler.c
+ast27/Parser/bitset.c
+ast27/Parser/grammar.c
+ast27/Parser/grammar1.c
+ast27/Parser/node.c
+ast27/Parser/parser.c
+ast27/Parser/parser.h
+ast27/Parser/parsetok.c
+ast27/Parser/tokenizer.c
+ast27/Parser/tokenizer.h
+ast27/Python/Python-ast.c
+ast27/Python/ast.c
+ast27/Python/graminit.c
+ast27/Python/mystrtoul.c
+ast35/Custom/typed_ast.c
+ast35/Include/Python-ast.h
+ast35/Include/asdl.h
+ast35/Include/ast.h
+ast35/Include/bitset.h
+ast35/Include/compile.h
+ast35/Include/errcode.h
+ast35/Include/graminit.h
+ast35/Include/grammar.h
+ast35/Include/node.h
+ast35/Include/parsetok.h
+ast35/Include/token.h
+ast35/Parser/acceler.c
+ast35/Parser/bitset.c
+ast35/Parser/grammar.c
+ast35/Parser/grammar1.c
+ast35/Parser/node.c
+ast35/Parser/parser.c
+ast35/Parser/parser.h
+ast35/Parser/parsetok.c
+ast35/Parser/tokenizer.c
+ast35/Parser/tokenizer.h
+ast35/Python/Python-ast.c
+ast35/Python/ast.c
+ast35/Python/graminit.c
+typed_ast/__init__.py
+typed_ast/ast27.py
+typed_ast/ast35.py
+typed_ast/conversions.py
+typed_ast.egg-info/PKG-INFO
+typed_ast.egg-info/SOURCES.txt
+typed_ast.egg-info/dependency_links.txt
+typed_ast.egg-info/top_level.txt
\ No newline at end of file
diff --git a/typed_ast.egg-info/dependency_links.txt b/typed_ast.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/typed_ast.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/typed_ast.egg-info/top_level.txt b/typed_ast.egg-info/top_level.txt
new file mode 100644
index 0000000..b7d8e46
--- /dev/null
+++ b/typed_ast.egg-info/top_level.txt
@@ -0,0 +1,3 @@
+_ast27
+_ast35
+typed_ast
diff --git a/typed_ast/conversions.py b/typed_ast/conversions.py
index 953133a..61af7d2 100644
--- a/typed_ast/conversions.py
+++ b/typed_ast/conversions.py
@@ -29,8 +29,6 @@ def _copy_attributes(new_value, old_value):
class _AST2To3(ast27.NodeTransformer):
# note: None, True, and False are *not* translated into NameConstants.
- # note: Negative numeric literals are not converted to use unary -
-
def __init__(self):
pass
@@ -134,8 +132,15 @@ class _AST2To3(ast27.NodeTransformer):
return ret
def visit_Exec(self, n):
+ new_globals = self.maybe_visit(n.globals)
+ if new_globals is None:
+ new_globals = ast35.Name("None", ast35.Load(), lineno=-1, col_offset=-1)
+ new_locals = self.maybe_visit(n.locals)
+ if new_locals is None:
+ new_locals = ast35.Name("None", ast35.Load(), lineno=-1, col_offset=-1)
+
return ast35.Expr(ast35.Call(ast35.Name("exec", ast35.Load(), lineno=n.lineno, col_offset=-1),
- [self.visit(n.body), self.maybe_visit(n.globals), self.maybe_visit(n.locals)],
+ [self.visit(n.body), new_globals, new_locals],
[],
lineno=n.lineno, col_offset=-1))
@@ -212,3 +217,13 @@ class _AST2To3(ast27.NodeTransformer):
return ast35.Bytes(s.s)
else:
return ast35.Str(s.s)
+
+ def visit_Num(self, n):
+ new = self.generic_visit(n)
+ if new.n < 0:
+ # Python 3 uses a unary - operator for negative literals.
+ new.n = -new.n
+ return ast35.UnaryOp(op=ast35.USub(),
+ operand=_copy_attributes(new, n))
+ else:
+ return new
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python3-typed-ast.git
More information about the debian-med-commit
mailing list