diff --git a/.appveyor.yml b/.appveyor.yml index 4f06db9d2..80cc236b7 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -21,6 +21,9 @@ environment: - PYTHON: "C:\\Python310" PYTHON_VERSION: "3.10.x" PYTHON_ARCH: "64" + - PYTHON: "C:\\Python311" + PYTHON_VERSION: "3.11.x" + PYTHON_ARCH: "64" install: # pywinpty installation fails without prior rust installation on some Python versions diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index c94b10213..208b946a0 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -5,7 +5,20 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['2.7', 'pypy-2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10', 'pypy-3.6'] + python-version: + - '2.7' + - '3.5' + - '3.6' + - '3.7' + - '3.8' + - '3.9' + - '3.10' + - '3.11' + - 'pypy-2.7' + - 'pypy-3.6' + - 'pypy-3.7' + - 'pypy-3.8' + - 'pypy-3.9' fail-fast: false name: Python ${{ matrix.python-version }} steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ace2fe6cf..754f21c81 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: args: - --ignore=W503,E501,E265,E402,F405,E305,E126 - repo: https://github.com/pre-commit/mirrors-autopep8 - rev: v1.7.0 + rev: v2.0.0 hooks: - id: autopep8 args: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5e1ff47a4..04361f1b9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -177,7 +177,7 @@ After you've tested your changes locally, you'll want to add more permanent test 1. Release a new version of [`sublime-coconut`](https://github.com/evhub/sublime-coconut) if applicable 1. Edit the [`package.json`](https://github.com/evhub/sublime-coconut/blob/master/package.json) with the new version 2. Run `make publish` - 3. Release a new version on GitHub + 3. [Release a new version on GitHub](https://github.com/evhub/sublime-coconut/releases) 2. Merge pull request and mark as resolved 3. Release `master` on GitHub 4. `git fetch`, `git checkout master`, and `git pull` diff --git a/DOCS.md b/DOCS.md index 23a11db37..9e5d5fd31 100644 --- a/DOCS.md +++ b/DOCS.md @@ -121,11 +121,12 @@ depth: 1 ### Usage ``` -coconut [-h] [--and source dest] [-v] [-t version] [-i] [-p] [-a] [-l] [-k] [-w] [-r] [-n] [-d] [-q] [-s] - [--no-tco] [--no-wrap] [-c code] [-j processes] [-f] [--minify] [--jupyter ...] [--mypy ...] - [--argv ...] [--tutorial] [--docs] [--style name] [--history-file path] [--vi-mode] - [--recursion-limit limit] [--site-install] [--site-uninstall] [--verbose] [--trace] [--profile] - [source] [dest] +coconut [-h] [--and source [dest ...]] [-v] [-t version] [-i] [-p] [-a] [-l] [-k] [-w] + [-r] [-n] [-d] [-q] [-s] [--no-tco] [--no-wrap] [-c code] [-j processes] [-f] + [--minify] [--jupyter ...] [--mypy ...] [--argv ...] [--tutorial] [--docs] + [--style name] [--history-file path] [--vi-mode] [--recursion-limit limit] + [--site-install] [--site-uninstall] [--verbose] [--trace] [--profile] + [source] [dest] ``` #### Positional Arguments @@ -141,17 +142,19 @@ dest destination directory for compiled files (defaults to ``` optional arguments: -h, --help show this help message and exit - --and source dest additional source/dest pairs to compile + --and source [dest ...] + additional source/dest pairs to compile -v, -V, --version print Coconut and Python version information -t version, --target version specify target Python version (defaults to universal) - -i, --interact force the interpreter to start (otherwise starts if no other command is - given) (implies --run) + -i, --interact force the interpreter to start (otherwise starts if no other command + is given) (implies --run) -p, --package compile source as part of a package (defaults to only if source is a directory) -a, --standalone, --stand-alone - compile source as standalone files (defaults to only if source is a single - file) + compile source as standalone files (defaults to only if source is a + single file) + -l, --line-numbers, --linenumbers add line number comments for ease of debugging -k, --keep-lines, --keeplines include source code in comments for ease of debugging @@ -160,39 +163,40 @@ optional arguments: -n, --no-write, --nowrite disable writing compiled Python -d, --display print compiled Python - -q, --quiet suppress all informational output (combine with --display to write runnable - code to stdout) + -q, --quiet suppress all informational output (combine with --display to write + runnable code to stdout) -s, --strict enforce code cleanliness standards --no-tco, --notco disable tail call optimization - --no-wrap, --nowrap disable wrapping type annotations in strings and turn off 'from __future__ - import annotations' behavior + --no-wrap, --nowrap disable wrapping type annotations in strings and turn off 'from + __future__ import annotations' behavior -c code, --code code run Coconut passed in as a string (can also be piped into stdin) -j processes, --jobs processes - number of additional processes to use (defaults to 0) (pass 'sys' to use - machine default) + number of additional processes to use (defaults to 0) (pass 'sys' to + use machine default) -f, --force force re-compilation even when source code and compilation parameters haven't changed --minify reduce size of compiled Python --jupyter ..., --ipython ... - run Jupyter/IPython with Coconut as the kernel (remaining args passed to - Jupyter) + run Jupyter/IPython with Coconut as the kernel (remaining args passed + to Jupyter) --mypy ... run MyPy on compiled Python (remaining args passed to MyPy) (implies --package) --argv ..., --args ... - set sys.argv to source plus remaining args for use in the Coconut script - being run + set sys.argv to source plus remaining args for use in the Coconut + script being run --tutorial open Coconut's tutorial in the default web browser --docs, --documentation open Coconut's documentation in the default web browser - --style name set Pygments syntax highlighting style (or 'list' to list styles) (defaults - to COCONUT_STYLE environment variable if it exists, otherwise 'default') - --history-file path set history file (or '' for no file) (defaults to - '~/.coconut_history') (can be modified by setting + --style name set Pygments syntax highlighting style (or 'list' to list styles) + (defaults to COCONUT_STYLE environment variable if it exists, + otherwise 'default') + --history-file path set history file (or '' for no file) (currently set to + 'C:\\Users\\evanj\\.coconut_history') (can be modified by setting COCONUT_HOME environment variable) - --vi-mode, --vimode enable vi mode in the interpreter (defaults to False) (can be modified - by setting COCONUT_VI_MODE environment variable) + --vi-mode, --vimode enable vi mode in the interpreter (currently set to False) (can be + modified by setting COCONUT_VI_MODE environment variable) --recursion-limit limit, --recursionlimit limit - set maximum recursion depth in compiler (defaults to 2000) + set maximum recursion depth in compiler (defaults to 4096) --site-install, --siteinstall set up coconut.convenience to be imported on Python start --site-uninstall, --siteuninstall @@ -231,7 +235,7 @@ By default, if the `source` argument to the command-line utility is a file, it w ### Compatible Python Versions -While Coconut syntax is based off of Python 3, Coconut code compiled in universal mode (the default `--target`)—and the Coconut compiler itself—should run on any Python version `>= 2.6` on the `2.x` branch or `>= 3.2` on the `3.x` branch (and on either [CPython](https://www.python.org/) or [PyPy](http://pypy.org/)). +While Coconut syntax is based off of the latest Python 3, Coconut code compiled in universal mode (the default `--target`)—and the Coconut compiler itself—should run on any Python version `>= 2.6` on the `2.x` branch or `>= 3.2` on the `3.x` branch (and on either [CPython](https://www.python.org/) or [PyPy](http://pypy.org/)). To make Coconut built-ins universal across Python versions, Coconut makes available on any Python version built-ins that only exist in later versions, including **automatically overwriting Python 2 built-ins with their Python 3 counterparts.** Additionally, Coconut also [overwrites some Python 3 built-ins for optimization and enhancement purposes](#enhanced-built-ins). If access to the original Python versions of any overwritten built-ins is desired, the old built-ins can be retrieved by prefixing them with `py_`. Specifically, the overwritten built-ins are: @@ -256,7 +260,7 @@ To make Coconut built-ins universal across Python versions, Coconut makes availa - `py_repr`, and - `py_breakpoint`. -_Note: Coconut's `repr` can be somewhat tricky, as it will attempt to remove the `u` before reprs of unicode strings, but will not always be able to do so if the unicode string is nested._ +_Note: Coconut's `repr` can be somewhat tricky, as it will attempt to remove the `u` before reprs of unicode strings on Python 2, but will not always be able to do so if the unicode string is nested._ For standard library compatibility, **Coconut automatically maps imports under Python 3 names to imports under Python 2 names**. Thus, Coconut will automatically take care of any standard library modules that were renamed from Python 2 to Python 3 if just the Python 3 name is used. For modules or packages that only exist in Python 3, however, Coconut has no way of maintaining compatibility. @@ -265,16 +269,16 @@ Additionally, Coconut allows the [`__set_name__`](https://docs.python.org/3/refe Finally, while Coconut will try to compile Python-3-specific syntax to its universal equivalent, the following constructs have no equivalent in Python 2, and require the specification of a target of at least `3` to be used: - the `nonlocal` keyword, -- keyword-only function parameters (use pattern-matching function definition for universal code), +- keyword-only function parameters (use [pattern-matching function definition](#pattern-matching-functions) for universal code), - `async` and `await` statements (requires `--target 3.5`), - `:=` assignment expressions (requires `--target 3.8`), -- positional-only function parameters (use pattern-matching function definition for universal code) (requires `--target 3.8`), -- `a[x, *y]` variadic generic syntax (requires `--target 3.11`), and +- positional-only function parameters (use [pattern-matching function definition](#pattern-matching-functions) for universal code) (requires `--target 3.8`), +- `a[x, *y]` variadic generic syntax (use [type parameter syntax](#type-parameter-syntax) for universal code) (requires `--target 3.11`), and - `except*` multi-except statements (requires `--target 3.11`). ### Allowable Targets -If the version of Python that the compiled code will be running on is known ahead of time, a target should be specified with `--target`. The given target will only affect the compiled code and whether or not the Python-3-specific syntax detailed above is allowed. Where Python 3 and Python 2 syntax standards differ, Coconut syntax will always follow Python 3 across all targets. The supported targets are: +If the version of Python that the compiled code will be running on is known ahead of time, a target should be specified with `--target`. The given target will only affect the compiled code and whether or not the Python-3-specific syntax detailed above is allowed. Where Python syntax differs across versions, Coconut syntax will always follow the latest Python 3 across all targets. The supported targets are: - `universal` (default) (will work on _any_ of the below), - `2`, `2.6` (will work on any Python `>= 2.6` but `< 3`), @@ -288,14 +292,15 @@ If the version of Python that the compiled code will be running on is known ahea - `3.8` (will work on any Python `>= 3.8`), - `3.9` (will work on any Python `>= 3.9`), - `3.10` (will work on any Python `>= 3.10`), -- `3.11` (will work on any Python `>= 3.11`), and +- `3.11` (will work on any Python `>= 3.11`), +- `3.12` (will work on any Python `>= 3.12`), and - `sys` (chooses the target corresponding to the current Python version). _Note: Periods are ignored in target specifications, such that the target `27` is equivalent to the target `2.7`._ ### `strict` Mode -If the `--strict` (`-s` for short) flag is enabled, Coconut will perform additional checks on the code being compiled. It is recommended that you use the `--strict` flag if you are starting a new Coconut project, as it will help you write cleaner code. Specifically, the extra checks done by `--strict` are +If the `--strict` (`-s` for short) flag is enabled, Coconut will perform additional checks on the code being compiled. It is recommended that you use the `--strict` flag if you are starting a new Coconut project, as it will help you write cleaner code. Specifically, the extra checks done by `--strict` are: - disabling deprecated features (making them entirely unavailable to code compiled with `--strict`), - warning about unused imports, @@ -389,9 +394,12 @@ _Note: Unlike the normal Coconut command-line, `%%coconut` defaults to the `sys` Coconut has the ability to integrate with [MyPy](http://mypy-lang.org/) to provide optional static type_checking, including for all Coconut built-ins. Simply pass `--mypy` to `coconut` to enable MyPy integration, though be careful to pass it only as the last argument, since all arguments after `--mypy` are passed to `mypy`, not Coconut. -You can also call `mypy` directly on the compiled Coconut if you run `coconut --mypy` at least once and then add `~/.coconut_stubs` to your [`MYPYPATH`](https://mypy.readthedocs.io/en/latest/running_mypy.html#how-imports-are-found). To install the stubs without launching the interpreter, you can also run `coconut --mypy install` instead of `coconut --mypy`. +You can also run `mypy`—or any other static type checker—directly on the compiled Coconut. If the static type checker is unable to find the necessary stub files, however, then you may need to: -To explicitly annotate your code with types for MyPy to check, Coconut supports [Python 3 function type annotations](https://www.python.org/dev/peps/pep-0484/), [Python 3.6 variable type annotations](https://www.python.org/dev/peps/pep-0526/), and even Coconut's own [enhanced type annotation syntax](#enhanced-type-annotation). By default, all type annotations are compiled to Python-2-compatible type comments, which means it all works on any Python version. +1. run `coconut --mypy install` and +2. tell your static type checker of choice to look in `~/.coconut_stubs` for stub files (for `mypy`, this is done by adding it to your [`MYPYPATH`](https://mypy.readthedocs.io/en/latest/running_mypy.html#how-imports-are-found)). + +To explicitly annotate your code with types to be checked, Coconut supports [Python 3 function type annotations](https://www.python.org/dev/peps/pep-0484/), [Python 3.6 variable type annotations](https://www.python.org/dev/peps/pep-0526/), and even Coconut's own [enhanced type annotation syntax](#enhanced-type-annotation). By default, all type annotations are compiled to Python-2-compatible type comments, which means it all works on any Python version. Coconut also supports [PEP 695 type parameter syntax](#type-parameter-syntax) for easily adding type parameters to classes, functions, [`data` types](#data), and type aliases. Coconut even supports `--mypy` in the interpreter, which will intelligently scan each new line of code, in the context of previous lines, for newly-introduced MyPy errors. For example: ```coconut_pycon @@ -403,7 +411,9 @@ Coconut even supports `--mypy` in the interpreter, which will intelligently scan ``` _For more information on `reveal_type`, see [`reveal_type` and `reveal_locals`](#reveal-type-and-reveal-locals)._ -Sometimes, MyPy will not know how to handle certain Coconut constructs, such as `addpattern`. For the `addpattern` case, it is recommended to pass `--allow-redefinition` to MyPy (i.e. run `coconut --mypy --allow-redefinition`), though in some cases `--allow-redefinition` may not be sufficient. In that case, either hide the offending code using [`TYPE_CHECKING`](#type_checking) or put a `# type: ignore` comment on the Coconut line which is generating the line MyPy is complaining about (you can figure out what line this is using `--line-numbers`) and the comment will be added to every generated line. +Sometimes, MyPy will not know how to handle certain Coconut constructs, such as `addpattern`. For the `addpattern` case, it is recommended to pass `--allow-redefinition` to MyPy (i.e. run `coconut --mypy --allow-redefinition`), though in some cases `--allow-redefinition` may not be sufficient. In that case, either hide the offending code using [`TYPE_CHECKING`](#type_checking) or put a `# type: ignore` comment on the Coconut line which is generating the line MyPy is complaining about and the comment will be added to every generated line. + +To distribute your code with checkable type annotations, you'll need to include `coconut` as a dependency (though a `--no-deps` install should be fine), as installing it is necessary to make the requisite stub files available. You'll also probably want to include a [`py.typed`](https://peps.python.org/pep-0561/) file. ### `numpy` Integration @@ -913,8 +923,10 @@ Coconut supports Unicode alternatives to many different operator symbols. The Un ⁻ (\u207b) => "-" (only negation) ¬ (\xac) => "~" ≠ (\u2260) or ¬= (\xac=) => "!=" -≤ (\u2264) => "<=" -≥ (\u2265) => ">=" +≤ (\u2264) or ⊆ (\u2286) => "<=" +≥ (\u2265) or ⊇ (\u2287) => ">=" +⊊ (\u228a) => "<" +⊋ (\u228b) => ">" ∧ (\u2227) or ∩ (\u2229) => "&" ∨ (\u2228) or ∪ (\u222a) => "|" ⊻ (\u22bb) or ⊕ (\u2295) => "^" @@ -1014,7 +1026,7 @@ base_pattern ::= ( | "(|" patterns "|)" ]] | [STRING "+"] NAME # complex string matching - ["+" STRING] # (does not work with f-string literals) + ["+" STRING] ["+" NAME ["+" STRING]] ) ``` @@ -1033,7 +1045,7 @@ base_pattern ::= ( - Identity Checks (`is `): will check that whatever is in that position `is` the expression ``. - Sets (`{}`): will only match a set (`collections.abc.Set`) of the same length and contents. - Arbitrary Function Patterns: - - Infix Checks (`` `` ``): will check that the operator `$(?, )` returns a truthy value when called on whatever is in that position, then matches ``. For example, `` x `isinstance` int `` will check that whatever is in that position `isinstance$(?, int)` and bind it to `x`. If `` is not given, will simply check `` directly rather than `$()`. + - Infix Checks (`` `` ``): will check that the operator `$(?, )` returns a truthy value when called on whatever is in that position, then matches ``. For example, `` x `isinstance` int `` will check that whatever is in that position `isinstance$(?, int)` and bind it to `x`. If `` is not given, will simply check `` directly rather than `$()`. Additionally, `` `` `` can instead be a [custom operator](#custom-operators) (in that case, no backticks should be used). - View Patterns (`() -> `): calls `` on the item being matched and matches the result to ``. The match fails if a [`MatchError`](#matcherror) is raised. `` may be unparenthesized only when it is a single atom. - Class and Data Type Matching: - Classes or Data Types (`()`): will match as a data type if given [a Coconut `data` type](#data) (or a tuple of Coconut data types) and a class otherwise. @@ -1162,7 +1174,7 @@ match : ``` where `` is any `match` pattern, `` is the item to match against, `` is an optional additional check, and `` is simply code that is executed if the header above it succeeds. Note the absence of an `in` in the `match` statements: that's because the `` in `case ` is taking its place. If no `else` is present and no match succeeds, then the `case` statement is simply skipped over as with [`match` statements](#match) (though unlike [destructuring assignments](#destructuring-assignment)). -Additionally, `cases` can be used as the top-level keyword instead of `case`, and in such a `case` block `match` is allowed for each case rather than `case`. +Additionally, `cases` can be used as the top-level keyword instead of `match`, and in such a `case` block `match` is allowed for each case rather than `case`. _DEPRECATED: Coconut also supports `case` instead of `cases` as the top-level keyword for backwards-compatibility purposes._ ##### Examples @@ -1398,6 +1410,7 @@ In Coconut, the following keywords are also valid variable names: - `operator` - `then` - `λ` (a [Unicode alternative](#unicode-alternatives) for `lambda`) +- `exec` (keyword in Python 2) While Coconut can usually disambiguate these two use cases, special syntax is available for disambiguating them if necessary. Note that, if what you're writing can be interpreted as valid Python 3, Coconut will always prefer that interpretation by default. @@ -1443,12 +1456,16 @@ The statement lambda syntax is an extension of the [normal lambda syntax](#lambd The syntax for a statement lambda is ``` -def (arguments) -> statement; statement; ... +[async] [match] def (arguments) -> statement; statement; ... ``` -where `arguments` can be standard function arguments or [pattern-matching function definition](#pattern-matching-functions) arguments and `statement` can be an assignment statement or a keyword statement. If the last `statement` (not followed by a semicolon) is an `expression`, it will automatically be returned. +where `arguments` can be standard function arguments or [pattern-matching function definition](#pattern-matching-functions) arguments and `statement` can be an assignment statement or a keyword statement. Note that the `async` and `match` keywords can be in any order. + +If the last `statement` (not followed by a semicolon) in a statement lambda is an `expression`, it will automatically be returned. Statement lambdas also support implicit lambda syntax such that `def -> _` is equivalent to `def (_=None) -> _` as well as explicitly marking them as pattern-matching such that `match def (x) -> x` will be a pattern-matching function. +Note that statement lambdas have a lower precedence than normal lambdas and thus capture things like trailing commas. + ##### Example **Coconut:** @@ -1582,7 +1599,7 @@ Additionally, Coconut also supports implicit operator function partials for arbi (. `` ) ( `` .) ``` -based on Coconut's [infix notation](#infix-functions) where `` is the name of the function. +based on Coconut's [infix notation](#infix-functions) where `` is the name of the function. Additionally, `` `` `` can instead be a [custom operator](#custom-operators) (in that case, no backticks should be used). ##### Example @@ -1610,6 +1627,10 @@ Furthermore, when compiling type annotations to Python 3 versions without [PEP 5 Additionally, Coconut adds special syntax for making type annotations easier and simpler to write. When inside of a type annotation, Coconut treats certain syntax constructs differently, compiling them to type annotations instead of what they would normally represent. Specifically, Coconut applies the following transformations: ```coconut + | + => typing.Union[, ] +(; ) + => typing.Tuple[, ] ? => typing.Optional[] [] @@ -1624,18 +1645,26 @@ Additionally, Coconut adds special syntax for making type annotations easier and => typing.Callable[[], ] -> => typing.Callable[..., ] -(; ) - => typing.Tuple[, ] - | - => typing.Union[, ] +(, **) -> + => typing.Callable[typing.Concatenate[, ], ] +async () -> + => typing.Callable[[], typing.Awaitable[]] ``` -where `typing` is the Python 3.5 built-in [`typing` module](https://docs.python.org/3/library/typing.html). +where `typing` is the Python 3.5 built-in [`typing` module](https://docs.python.org/3/library/typing.html). For more information on the Callable syntax, see [PEP 677](https://peps.python.org/pep-0677), which Coconut fully supports. _Note: The transformation to `Union` is not done on Python 3.10 as Python 3.10 has native [PEP 604](https://www.python.org/dev/peps/pep-0604) support._ +To use these transformations in a [type alias](https://peps.python.org/pep-0484/#type-aliases), use the syntax +``` +type = +``` +which will allow `` to include Coconut's special type annotation syntax and type `` as a [`typing.TypeAlias`](https://docs.python.org/3/library/typing.html#typing.TypeAlias). If you try to instead just do a naked ` = ` type alias, Coconut won't be able to tell you're attempting a type alias and thus won't apply any of the above transformations. + +Such type alias statements—as well as all `class`, `data`, and function definitions in Coconut—also support Coconut's [type parameter syntax](#type-parameter-syntax), allowing you to do things like `type OrStr[T] = T | str`. + Importantly, note that `[]` does not map onto `typing.List[]` but onto `typing.Sequence[]`. This is because, when writing in an idiomatic functional style, assignment should be rare and tuples should be common. Using `Sequence` covers both cases, accommodating tuples and lists and preventing indexed assignment. When an indexed assignment is attempted into a variable typed with `Sequence`, MyPy will generate an error: -``` +```coconut foo: int[] = [0, 1, 2, 3, 4, 5] foo[0] = 1 # MyPy error: "Unsupported target for indexed assignment" ``` @@ -1986,11 +2015,11 @@ Because this could have unintended and potentially damaging consequences, Coconu Coconut allows for assignment function definition that automatically returns the last line of the function body. An assignment function is constructed by substituting `=` for `:` after the function definition line. Thus, the syntax for assignment function definition is either ```coconut -def () = +[async] def () = ``` for one-liners or ```coconut -def () = +[async] def () = ``` @@ -2020,14 +2049,14 @@ print(binexp(5)) Coconut pattern-matching functions are just normal functions, except where the arguments are patterns to be matched against instead of variables to be assigned to. The syntax for pattern-matching function definition is ```coconut -[match] def (, , ... [if ]) [-> ]: +[async] [match] def (, , ... [if ]) [-> ]: ``` where `` is defined as ```coconut [*|**] [= ] ``` -where `` is the name of the function, `` is an optional additional check, `` is the body of the function, `` is defined by Coconut's [`match` statement](#match), `` is the optional default if no argument is passed, and `` is the optional return type annotation (note that argument type annotations are not supported for pattern-matching functions). The `match` keyword at the beginning is optional, but is sometimes necessary to disambiguate pattern-matching function definition from normal function definition, since Python function definition will always take precedence. +where `` is the name of the function, `` is an optional additional check, `` is the body of the function, `` is defined by Coconut's [`match` statement](#match), `` is the optional default if no argument is passed, and `` is the optional return type annotation (note that argument type annotations are not supported for pattern-matching functions). The `match` keyword at the beginning is optional, but is sometimes necessary to disambiguate pattern-matching function definition from normal function definition, since Python function definition will always take precedence. Note that the `async` and `match` keywords can be in any order. If `` has a variable name (either directly or with `as`), the resulting pattern-matching function will support keyword arguments using that variable name. @@ -2080,10 +2109,12 @@ _Can't be done without a complicated decorator definition and a long series of c Coconut supports the syntax ``` -yield def (): +[async] yield def (): ``` -to denote that you are explicitly defining a generator function. This is useful to ensure that, even if all the `yield`s in your function are removed, it'll always be a generator function. Explicit generator functions also support [pattern-matching syntax](#pattern-matching-functions), but not [assignment function syntax](#assignment-functions), as an assignment function would create a generator return, which is usually undesirable. +to denote that you are explicitly defining a generator function. This is useful to ensure that, even if all the `yield`s in your function are removed, it'll always be a generator function. Note that the `async` and `yield` keywords can be in any order. + +Explicit generator functions also support [pattern-matching syntax](#pattern-matching-functions), [infix function definition](#infix-functions), and [assignment function syntax](#assignment-functions) (though note that assignment function syntax here creates a generator return). ##### Example @@ -2156,6 +2187,83 @@ print(a, b) **Python:** _Can't be done without a long series of checks in place of the destructuring assignment statement. See the compiled code for the Python syntax._ +### Type Parameter Syntax + +Coconut fully supports [PEP 695](https://peps.python.org/pep-0695/) type parameter syntax (with the caveat that all type variables are invariant rather than inferred). + +That includes type parameters for classes, [`data` types](#data), and [all types of function definition](#function-definition). For different types of function definition, the type parameters always come in brackets right after the function name. + +Coconut's [enhanced type annotation syntax](#enhanced-type-annotation) is supported for all type parameter bounds. Additionally, Coconut supports the alternative bounds syntax of `type NewType[T <= bound] = ...` rather than `type NewType[T: bound] = ...`, to make it more clear that it is an upper bound rather than a type. + +##### PEP 695 Docs + +Defining a generic class prior to this PEP looks something like this. + +```coconut_python +from typing import Generic, TypeVar + +_T_co = TypeVar("_T_co", covariant=True, bound=str) + +class ClassA(Generic[_T_co]): + def method1(self) -> _T_co: + ... +``` + +With the new syntax, it looks like this. + +```coconut +class ClassA[T: str]: + def method1(self) -> T: + ... +``` + +Here is an example of a generic function today. + +```coconut_python +from typing import TypeVar + +_T = TypeVar("_T") + +def func(a: _T, b: _T) -> _T: + ... +``` + +And the new syntax. + +```coconut +def func[T](a: T, b: T) -> T: + ... +``` + +Here is an example of a generic type alias today. + +```coconut_python +from typing import TypeAlias + +_T = TypeVar("_T") + +ListOrSet: TypeAlias = list[_T] | set[_T] +``` + +And with the new syntax. + +```coconut +type ListOrSet[T] = list[T] | set[T] +``` + + +##### Example + +**Coconut:** +```coconut +data D[T](x: T, y: T) + +def my_ident[T](x: T) -> T = x +``` + +**Python:** +_Can't be done without a complex definition for the data type. See the compiled code for the Python syntax._ + ### Implicit `pass` Coconut supports the simple `class name(base)` and `data name(args)` as aliases for `class name(base): pass` and `data name(args): pass`. @@ -2284,7 +2392,7 @@ cdef f(x): Since Coconut syntax is a superset of Python 3 syntax, Coconut supports the same line continuation syntax as Python. That means both backslash line continuation and implied line continuation inside of parentheses, brackets, or braces will all work. -In Python, however, there are some cases (such as multiple `with` statements) where only backslash continuation, and not parenthetical continuation, is supported. Coconut adds support for parenthetical continuation in all these cases. +In Python, however, there are some cases (such as multiple `with` statements) where only backslash continuation, and not parenthetical continuation, is supported. Coconut adds support for parenthetical continuation in all these cases. This also includes support as per [PEP 679](https://peps.python.org/pep-0679) for parenthesized `assert` statements. Supporting parenthetical continuation everywhere allows the [PEP 8](https://www.python.org/dev/peps/pep-0008/) convention, which avoids backslash continuation in favor of implied parenthetical continuation, to always be possible to follow. From PEP 8: @@ -3451,9 +3559,9 @@ If _state_ is `None`, gets a new state object, whereas if _state_ is `False`, th #### `parse` -**coconut.convenience.parse**(_code_=`""`, _mode_=`"sys"`, _state_=`False`) +**coconut.convenience.parse**(_code_=`""`, _mode_=`"sys"`, _state_=`False`, _keep\_internal\_state_=`None`) -Likely the most useful of the convenience functions, `parse` takes Coconut code as input and outputs the equivalent compiled Python code. _mode_ is used to indicate the context for the parsing and _state_ is the state object storing the compilation parameters to use as obtained from [**get_state**](#get_state) (if `False`, uses the global state object). +Likely the most useful of the convenience functions, `parse` takes Coconut code as input and outputs the equivalent compiled Python code. _mode_ is used to indicate the context for the parsing and _state_ is the state object storing the compilation parameters to use as obtained from [**get_state**](#get_state) (if `False`, uses the global state object). _keep\_internal\_state_ determines whether the state object will keep internal state (such as what [custom operators](#custom-operators) have been declared)—if `None`, internal state will be kept iff you are not using the global _state_. If _code_ is not passed, `parse` will output just the given _mode_'s header, which can be executed to set up an execution environment in which future code can be parsed and executed without a header. @@ -3532,7 +3640,7 @@ Has the same effect of setting the command-line flags on the given _state_ objec #### `coconut_eval` -**coconut.convenience.coconut_eval**(_expression_, _globals_=`None`, _locals_=`None`, _state_=`False`) +**coconut.convenience.coconut_eval**(_expression_, _globals_=`None`, _locals_=`None`, _state_=`False`, _keep\_internal\_state_=`None`) Version of [`eval`](https://docs.python.org/3/library/functions.html#eval) which can evaluate Coconut code. diff --git a/FAQ.md b/FAQ.md index df294ea71..76ea35c60 100644 --- a/FAQ.md +++ b/FAQ.md @@ -74,6 +74,10 @@ I certainly hope not! Unlike most transpiled languages, all valid Python is vali First, you're going to want a fast compiler, so you should make sure you're using [`cPyparsing`](https://github.com/evhub/cpyparsing). Second, there are two simple things you can do to make Coconut produce faster Python: compile with `--no-tco` and compile with a `--target` specification for the exact version of Python you want to run your code on. Passing `--target` helps Coconut optimize the compiled code for the Python version you want, and, though [Tail Call Optimization](./DOCS.md#tail-call-optimization) is useful, it will usually significantly slow down functions that use it, so disabling it will often provide a major performance boost. +### When I try to use Coconut on the command line, I get weird unprintable characters and numbers; how do I get rid of them? + +You're probably seeing color codes while using a terminal that doesn't support them (e.g. Windows `cmd`). Try setting the `COCONUT_USE_COLOR` environment variable to `FALSE` to get rid of them. + ### I want to contribute to Coconut, how do I get started? That's great! Coconut is completely open-source, and new contributors are always welcome. Check out Coconut's [contributing guidelines](./CONTRIBUTING.md) for more information. diff --git a/MANIFEST.in b/MANIFEST.in index 5e7b04a3b..c0c085b1e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -8,6 +8,7 @@ global-include *.md global-include *.json global-include *.toml global-include *.coco +global-include py.typed prune coconut/tests/dest prune docs prune pyston @@ -15,8 +16,8 @@ prune pyprover prune bbopt prune coconut-prelude prune .mypy_cache -prune coconut/stubs/.mypy_cache prune .pytest_cache prune *.egg-info +prune .github exclude index.rst exclude profile.json diff --git a/Makefile b/Makefile index cb5f62bba..39e07f7d9 100644 --- a/Makefile +++ b/Makefile @@ -17,22 +17,27 @@ dev-py3: clean setup-py3 .PHONY: setup setup: + python -m ensurepip python -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata .PHONY: setup-py2 setup-py2: + python2 -m ensurepip python2 -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata .PHONY: setup-py3 setup-py3: + python3 -m ensurepip python3 -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata .PHONY: setup-pypy setup-pypy: + pypy -m ensurepip pypy -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata .PHONY: setup-pypy3 setup-pypy3: + pypy3 -m ensurepip pypy3 -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata .PHONY: install @@ -71,6 +76,7 @@ test: test-mypy # basic testing for the universal target .PHONY: test-univ +test-univ: export COCONUT_USE_COLOR=TRUE test-univ: python ./coconut/tests --strict --line-numbers --force python ./coconut/tests/dest/runner.py @@ -79,6 +85,7 @@ test-univ: # same as test-univ, but doesn't recompile unchanged test files; # should only be used when testing the tests not the compiler .PHONY: test-tests +test-tests: export COCONUT_USE_COLOR=TRUE test-tests: python ./coconut/tests --strict --line-numbers python ./coconut/tests/dest/runner.py @@ -86,6 +93,7 @@ test-tests: # same as test-univ but uses Python 2 .PHONY: test-py2 +test-py2: export COCONUT_USE_COLOR=TRUE test-py2: python2 ./coconut/tests --strict --line-numbers --force python2 ./coconut/tests/dest/runner.py @@ -93,6 +101,7 @@ test-py2: # same as test-univ but uses Python 3 .PHONY: test-py3 +test-py3: export COCONUT_USE_COLOR=TRUE test-py3: python3 ./coconut/tests --strict --line-numbers --force python3 ./coconut/tests/dest/runner.py @@ -100,6 +109,7 @@ test-py3: # same as test-univ but uses PyPy .PHONY: test-pypy +test-pypy: export COCONUT_USE_COLOR=TRUE test-pypy: pypy ./coconut/tests --strict --line-numbers --force pypy ./coconut/tests/dest/runner.py @@ -107,13 +117,23 @@ test-pypy: # same as test-univ but uses PyPy3 .PHONY: test-pypy3 +test-pypy3: export COCONUT_USE_COLOR=TRUE test-pypy3: pypy3 ./coconut/tests --strict --line-numbers --force pypy3 ./coconut/tests/dest/runner.py pypy3 ./coconut/tests/dest/extras.py +# same as test-pypy3 but includes verbose output for better debugging +.PHONY: test-pypy3-verbose +test-pypy3-verbose: export COCONUT_USE_COLOR=TRUE +test-pypy3-verbose: + pypy3 ./coconut/tests --strict --line-numbers --force --verbose --jobs 0 + pypy3 ./coconut/tests/dest/runner.py + pypy3 ./coconut/tests/dest/extras.py + # same as test-univ but also runs mypy .PHONY: test-mypy +test-mypy: export COCONUT_USE_COLOR=TRUE test-mypy: python ./coconut/tests --strict --force --target sys --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition python ./coconut/tests/dest/runner.py @@ -121,6 +141,7 @@ test-mypy: # same as test-mypy but uses the universal target .PHONY: test-mypy-univ +test-mypy-univ: export COCONUT_USE_COLOR=TRUE test-mypy-univ: python ./coconut/tests --strict --force --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition python ./coconut/tests/dest/runner.py @@ -128,6 +149,7 @@ test-mypy-univ: # same as test-univ but includes verbose output for better debugging .PHONY: test-verbose +test-verbose: export COCONUT_USE_COLOR=TRUE test-verbose: python ./coconut/tests --strict --line-numbers --force --verbose --jobs 0 python ./coconut/tests/dest/runner.py @@ -135,6 +157,7 @@ test-verbose: # same as test-mypy but uses --verbose and --check-untyped-defs .PHONY: test-mypy-all +test-mypy-all: export COCONUT_USE_COLOR=TRUE test-mypy-all: python ./coconut/tests --strict --force --target sys --verbose --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition --check-untyped-defs python ./coconut/tests/dest/runner.py @@ -142,6 +165,7 @@ test-mypy-all: # same as test-univ but also tests easter eggs .PHONY: test-easter-eggs +test-easter-eggs: export COCONUT_USE_COLOR=TRUE test-easter-eggs: python ./coconut/tests --strict --line-numbers --force python ./coconut/tests/dest/runner.py --test-easter-eggs @@ -154,6 +178,7 @@ test-pyparsing: test-univ # same as test-univ but uses --minify .PHONY: test-minify +test-minify: export COCONUT_USE_COLOR=TRUE test-minify: python ./coconut/tests --strict --line-numbers --force --minify python ./coconut/tests/dest/runner.py @@ -161,6 +186,7 @@ test-minify: # same as test-univ but watches tests before running them .PHONY: test-watch +test-watch: export COCONUT_USE_COLOR=TRUE test-watch: python ./coconut/tests --strict --line-numbers --force coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --watch --strict --line-numbers @@ -176,6 +202,10 @@ test-mini: diff: git diff origin/develop +.PHONY: fix-develop +fix-develop: + git merge master -s ours + .PHONY: docs docs: clean sphinx-build -b html . ./docs diff --git a/README.rst b/README.rst index 8fa00205a..eba903ffc 100644 --- a/README.rst +++ b/README.rst @@ -25,7 +25,7 @@ Coconut is developed on GitHub_ and hosted on PyPI_. Installing Coconut is as ea pip install coconut -after which the entire world of Coconut will be at your disposal. To help you get started, check out these links for more information about Coconut: +To help you get started, check out these links for more information about Coconut: - Tutorial_: If you're new to Coconut, a good place to start is Coconut's **tutorial**. - Documentation_: If you're looking for info about a specific feature, check out Coconut's **documentation**. diff --git a/__coconut__/__init__.py b/__coconut__/__init__.py new file mode 100644 index 000000000..6918dfdbd --- /dev/null +++ b/__coconut__/__init__.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# ----------------------------------------------------------------------------------------------------------------------- +# INFO: +# ----------------------------------------------------------------------------------------------------------------------- + +""" +Author: Evan Hubinger +License: Apache 2.0 +Description: For type checking purposes only. Should never be imported. +""" + +# ----------------------------------------------------------------------------------------------------------------------- +# IMPORTS: +# ----------------------------------------------------------------------------------------------------------------------- + +from __future__ import print_function, absolute_import, unicode_literals, division + +from coconut.root import * # NOQA + +# ----------------------------------------------------------------------------------------------------------------------- +# ERROR: +# ----------------------------------------------------------------------------------------------------------------------- + +raise ImportError("Importing the top-level __coconut__ package should never be done at runtime; __coconut__ exists for type checking purposes only. Try 'import coconut.__coconut__' instead.") diff --git a/coconut/stubs/__coconut__.pyi b/__coconut__/__init__.pyi similarity index 100% rename from coconut/stubs/__coconut__.pyi rename to __coconut__/__init__.pyi diff --git a/coconut/stubs/coconut/py.typed b/__coconut__/py.typed similarity index 100% rename from coconut/stubs/coconut/py.typed rename to __coconut__/py.typed diff --git a/_coconut/__init__.py b/_coconut/__init__.py new file mode 100644 index 000000000..a35e80db1 --- /dev/null +++ b/_coconut/__init__.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# ----------------------------------------------------------------------------------------------------------------------- +# INFO: +# ----------------------------------------------------------------------------------------------------------------------- + +""" +Author: Evan Hubinger +License: Apache 2.0 +Description: For type checking purposes only. Should never be imported. +""" + +# ----------------------------------------------------------------------------------------------------------------------- +# IMPORTS: +# ----------------------------------------------------------------------------------------------------------------------- + +from __future__ import print_function, absolute_import, unicode_literals, division + +from coconut.root import * # NOQA + +# ----------------------------------------------------------------------------------------------------------------------- +# ERROR: +# ----------------------------------------------------------------------------------------------------------------------- + +raise ImportError("Importing the top-level _coconut package should never be done at runtime; _coconut exists for type checking purposes only. Try 'from coconut.__coconut__ import _coconut' instead.") diff --git a/coconut/stubs/_coconut.pyi b/_coconut/__init__.pyi similarity index 87% rename from coconut/stubs/_coconut.pyi rename to _coconut/__init__.pyi index b7d30de2e..9c66413ea 100644 --- a/coconut/stubs/_coconut.pyi +++ b/_coconut/__init__.pyi @@ -60,11 +60,32 @@ except ImportError: else: _abc.Sequence.register(_numpy.ndarray) +if sys.version_info < (3, 10): + try: + from typing_extensions import TypeAlias, ParamSpec, Concatenate + except ImportError: + TypeAlias = ... + ParamSpec = ... + typing.TypeAlias = TypeAlias + typing.ParamSpec = ParamSpec + typing.Concatenate = Concatenate + + +if sys.version_info < (3, 11): + try: + from typing_extensions import TypeVarTuple, Unpack + except ImportError: + TypeVarTuple = ... + Unpack = ... + typing.TypeVarTuple = TypeVarTuple + typing.Unpack = Unpack + # ----------------------------------------------------------------------------------------------------------------------- # STUB: # ----------------------------------------------------------------------------------------------------------------------- -typing = _t # The real _coconut doesn't import typing, but we want type-checkers to treat it as if it does +typing = _t + collections = _collections copy = _copy copyreg = _copyreg diff --git a/coconut/stubs/coconut/command/__init__.pyi b/_coconut/py.typed similarity index 100% rename from coconut/stubs/coconut/command/__init__.pyi rename to _coconut/py.typed diff --git a/coconut/__coconut__.py b/coconut/__coconut__.py index 81630eedd..a7bd65266 100644 --- a/coconut/__coconut__.py +++ b/coconut/__coconut__.py @@ -9,6 +9,8 @@ Author: Evan Hubinger License: Apache 2.0 Description: Mimics what a compiled __coconut__.py would do. + +Must match __coconut__.__init__. """ # ----------------------------------------------------------------------------------------------------------------------- diff --git a/coconut/stubs/coconut/__coconut__.pyi b/coconut/__coconut__.pyi similarity index 100% rename from coconut/stubs/coconut/__coconut__.pyi rename to coconut/__coconut__.pyi diff --git a/coconut/stubs/coconut/__init__.pyi b/coconut/__init__.pyi similarity index 100% rename from coconut/stubs/coconut/__init__.pyi rename to coconut/__init__.pyi diff --git a/coconut/_pyparsing.py b/coconut/_pyparsing.py index 6d1c37104..1806c433b 100644 --- a/coconut/_pyparsing.py +++ b/coconut/_pyparsing.py @@ -115,6 +115,12 @@ _trim_arity = _pyparsing._trim_arity _ParseResultsWithOffset = _pyparsing._ParseResultsWithOffset +if MODERN_PYPARSING: + warn( + "This version of Coconut is not built for pyparsing v3; some syntax features WILL NOT WORK" + + " (run either '{python} -m pip install cPyparsing<{max_ver}' or '{python} -m pip install pyparsing<{max_ver}' to fix)".format(python=sys.executable, max_ver=max_ver_str), + ) + USE_COMPUTATION_GRAPH = ( not MODERN_PYPARSING # not yet supported and not PYPY # experimentally determined @@ -174,11 +180,6 @@ def _parseCache(self, instring, loc, doActions=True, callPreParse=True): return value[0], value[1].copy() ParserElement.packrat_context = [] ParserElement._parseCache = _parseCache -else: - warn( - "This version of Coconut is not built for pyparsing v3; some syntax features WILL NOT WORK" - + " (run either '{python} -m pip install --upgrade cPyparsing' or '{python} -m pip install pyparsing<{max_ver}' to fix)".format(python=sys.executable, max_ver=max_ver_str), - ) # ----------------------------------------------------------------------------------------------------------------------- diff --git a/coconut/command/__init__.pyi b/coconut/command/__init__.pyi new file mode 100644 index 000000000..e69de29bb diff --git a/coconut/command/cli.py b/coconut/command/cli.py index 9755f52b2..dfb8cc4ba 100644 --- a/coconut/command/cli.py +++ b/coconut/command/cli.py @@ -74,7 +74,7 @@ "--and", metavar=("source", "dest"), type=str, - nargs=2, + nargs="+", action="append", help="additional source/dest pairs to compile", ) diff --git a/coconut/command/command.py b/coconut/command/command.py index e364952be..6922b7771 100644 --- a/coconut/command/command.py +++ b/coconut/command/command.py @@ -43,6 +43,7 @@ internal_assert, ) from coconut.constants import ( + PY32, fixpath, code_exts, comp_ext, @@ -63,9 +64,9 @@ mypy_install_arg, mypy_builtin_regex, coconut_pth_file, + error_color_code, ) from coconut.util import ( - printerr, univ_open, ver_tuple_to_str, install_custom_kernel, @@ -172,7 +173,9 @@ def exit_on_error(self): """Exit if exit_code is abnormal.""" if self.exit_code: if self.errmsg is not None: - logger.show("Exiting with error: " + self.errmsg) + # show on stdout with error color code so that stdout + # listeners see the error + logger.show("Coconut exiting with error: " + self.errmsg, color=error_color_code) self.errmsg = None if self.using_jobs: kill_children() @@ -192,6 +195,7 @@ def use_args(self, args, interact=True, original_args=None): unset_fast_pyparsing_reprs() if args.profile: collect_timing_info() + logger.enable_colors() logger.log(cli_version) if original_args is not None: @@ -203,6 +207,14 @@ def use_args(self, args, interact=True, original_args=None): logger.warn("extraneous --line-numbers argument passed; --mypy implies --line-numbers") if args.site_install and args.site_uninstall: raise CoconutException("cannot --site-install and --site-uninstall simultaneously") + for and_args in getattr(args, "and") or []: + if len(and_args) > 2: + raise CoconutException( + "--and accepts at most two arguments, source and dest ({n} given: {args!r})".format( + n=len(and_args), + args=and_args, + ), + ) # process general command args if args.recursion_limit is not None: @@ -399,7 +411,7 @@ def handling_exceptions(self): logger.print_exc() elif not isinstance(err, KeyboardInterrupt): logger.print_exc() - printerr(report_this_text) + logger.printerr(report_this_text) self.register_exit_code(err=err) def compile_path(self, path, write=True, package=True, **kwargs): @@ -493,7 +505,7 @@ def compile(self, codepath, destpath=None, package=False, run=False, force=False if show_unchanged: logger.show_tabulated("Left unchanged", showpath(destpath), "(pass --force to override).") if self.show: - print(foundhash) + logger.print(foundhash) if run: self.execute_file(destpath, argv_source_path=codepath) @@ -508,7 +520,7 @@ def callback(compiled): writefile(opened, compiled) logger.show_tabulated("Compiled to", showpath(destpath), ".") if self.show: - print(compiled) + logger.print(compiled) if run: if destpath is None: self.execute(compiled, path=codepath, allow_show=False) @@ -619,10 +631,9 @@ def get_input(self, more=False): try: received = self.prompt.input(more) except KeyboardInterrupt: - print() - printerr("KeyboardInterrupt") + logger.printerr("\nKeyboardInterrupt") except EOFError: - print() + logger.print() self.exit_runner() else: if received.startswith(exit_chars): @@ -654,7 +665,7 @@ def start_prompt(self): if compiled: self.execute(compiled, use_eval=None) except KeyboardInterrupt: - printerr("\nKeyboardInterrupt") + logger.printerr("\nKeyboardInterrupt") def exit_runner(self, exit_code=0): """Exit the interpreter.""" @@ -689,7 +700,7 @@ def execute(self, compiled=None, path=None, use_eval=False, allow_show=True): if compiled is not None: if allow_show and self.show: - print(compiled) + logger.print(compiled) if path is None: # header is not included if not self.mypy: @@ -784,16 +795,16 @@ def run_mypy(self, paths=(), code=None): logger.log("[MyPy]", line) if line.startswith(mypy_silent_err_prefixes): if code is None: # file - printerr(line) + logger.printerr(line) self.register_exit_code(errmsg="MyPy error") elif not line.startswith(mypy_silent_non_err_prefixes): if code is None: # file - printerr(line) + logger.printerr(line) if any(infix in line for infix in mypy_err_infixes): self.register_exit_code(errmsg="MyPy error") if line not in self.mypy_errs: if code is not None: # interpreter - printerr(line) + logger.printerr(line) self.mypy_errs.append(line) def run_silent_cmd(self, *args): @@ -959,15 +970,21 @@ def recompile(path, src, dest, package): def get_python_lib(self): """Get current Python lib location.""" - from distutils import sysconfig # expensive, so should only be imported here - return fixpath(sysconfig.get_python_lib()) + # these are expensive, so should only be imported here + if PY32: + from sysconfig import get_path + python_lib = get_path("purelib") + else: + from distutils import sysconfig + python_lib = sysconfig.get_python_lib() + return fixpath(python_lib) def site_install(self): """Add Coconut's pth file to site-packages.""" python_lib = self.get_python_lib() shutil.copy(coconut_pth_file, python_lib) - logger.show_sig("Added %s to %s." % (os.path.basename(coconut_pth_file), python_lib)) + logger.show_sig("Added %s to %s" % (os.path.basename(coconut_pth_file), python_lib)) def site_uninstall(self): """Remove Coconut's pth file from site-packages.""" @@ -976,6 +993,6 @@ def site_uninstall(self): if os.path.isfile(pth_file): os.remove(pth_file) - logger.show_sig("Removed %s from %s." % (os.path.basename(coconut_pth_file), python_lib)) + logger.show_sig("Removed %s from %s" % (os.path.basename(coconut_pth_file), python_lib)) else: raise CoconutException("failed to find %s file to remove" % (os.path.basename(coconut_pth_file),)) diff --git a/coconut/stubs/coconut/command/command.pyi b/coconut/command/command.pyi similarity index 100% rename from coconut/stubs/coconut/command/command.pyi rename to coconut/command/command.pyi diff --git a/coconut/command/util.py b/coconut/command/util.py index 0dfe60e46..087bb6c91 100644 --- a/coconut/command/util.py +++ b/coconut/command/util.py @@ -21,7 +21,6 @@ import sys import os -import traceback import subprocess import shutil from select import select @@ -36,6 +35,7 @@ logger, complain, internal_assert, + isatty, ) from coconut.exceptions import ( CoconutException, @@ -261,18 +261,18 @@ def call_output(cmd, stdin=None, encoding_errors="replace", **kwargs): stdout, stderr, retcode = [], [], None while retcode is None: if stdin is not None: - logger.log_prefix("<0 ", stdin.rstrip()) + logger.log_prefix("STDIN < ", stdin.rstrip()) raw_out, raw_err = p.communicate(stdin) stdin = None out = raw_out.decode(get_encoding(sys.stdout), encoding_errors) if raw_out else "" if out: - logger.log_prefix("1> ", out.rstrip()) + logger.log_stdout(out.rstrip()) stdout.append(out) err = raw_err.decode(get_encoding(sys.stderr), encoding_errors) if raw_err else "" if err: - logger.log_prefix("2> ", err.rstrip()) + logger.log(err.rstrip()) stderr.append(err) retcode = p.poll() @@ -369,11 +369,8 @@ def stdin_readable(): return bool(select([sys.stdin], [], [], 0)[0]) except Exception: logger.log_exc() - try: - return not sys.stdin.isatty() - except Exception: - logger.log_exc() - return False + # by default assume not readable + return not isatty(sys.stdin, default=True) def set_recursion_limit(limit): @@ -455,7 +452,7 @@ def set_style(self, style): elif prompt_toolkit is None: raise CoconutException("syntax highlighting is not supported on this Python version") elif style == "list": - print("Coconut Styles: none, " + ", ".join(pygments.styles.get_all_styles())) + logger.print("Coconut Styles: none, " + ", ".join(pygments.styles.get_all_styles())) sys.exit(0) elif style in pygments.styles.get_all_styles(): self.style = style @@ -585,7 +582,7 @@ def handling_errors(self, all_errors_exit=False): if tb is None or not subpath(tb.tb_frame.f_code.co_filename, base_dir): break tb = tb.tb_next - traceback.print_exception(etype, value, tb) + logger.print_exception(etype, value, tb) if all_errors_exit: self.exit(1) diff --git a/coconut/compiler/compiler.py b/coconut/compiler/compiler.py index 34654ecf1..0968db355 100644 --- a/coconut/compiler/compiler.py +++ b/coconut/compiler/compiler.py @@ -37,6 +37,7 @@ from threading import Lock from coconut._pyparsing import ( + USE_COMPUTATION_GRAPH, ParseBaseException, ParseResults, col as getcol, @@ -54,6 +55,7 @@ openindent, closeindent, strwrapper, + errwrapper, lnwrapper, unwrapper, holds, @@ -75,9 +77,9 @@ super_names, custom_op_var, all_keywords, - internally_reserved_symbols, + reserved_compiler_symbols, delimiter_symbols, - exit_chars, + reserved_command_symbols, streamline_grammar_for_len, ) from coconut.util import ( @@ -114,6 +116,7 @@ attrgetter_atom_split, attrgetter_atom_handle, itemgetter_handle, + partial_op_item_handle, ) from coconut.compiler.util import ( sys_target, @@ -150,6 +153,9 @@ normalize_indent_markers, try_parse, prep_grammar, + split_leading_whitespace, + ordered_items, + tuple_str_of_str, ) from coconut.compiler.header import ( minify_header, @@ -352,7 +358,6 @@ class Compiler(Grammar, pickleable_obj): """The Coconut compiler.""" lock = Lock() current_compiler = [None] # list for mutability - operators = None preprocs = [ lambda self: self.prepare, @@ -378,7 +383,7 @@ def __init__(self, *args, **kwargs): """Creates a new compiler with the given parsing parameters.""" self.setup(*args, **kwargs) - # changes here should be reflected in stubs.coconut.convenience.setup + # changes here should be reflected in the stub for coconut.convenience.setup def setup(self, target=None, strict=False, minify=False, line_numbers=False, keep_lines=False, no_tco=False, no_wrap=False): """Initializes parsing parameters.""" if target is None: @@ -435,6 +440,9 @@ def genhash(self, code, package_level=-1): ), ) + temp_var_counts = None + operators = None + def reset(self, keep_state=False): """Resets references.""" self.indchar = None @@ -442,7 +450,9 @@ def reset(self, keep_state=False): self.refs = [] self.skips = [] self.docstring = "" - self.temp_var_counts = defaultdict(int) + # need to keep temp_var_counts in interpreter to avoid overwriting typevars + if self.temp_var_counts is None or not keep_state: + self.temp_var_counts = defaultdict(int) self.parsing_context = defaultdict(list) self.add_code_before = {} self.add_code_before_regexes = {} @@ -478,6 +488,14 @@ def inner_environment(self): self.kept_lines = kept_lines self.num_lines = num_lines + def current_parsing_context(self, name, default=None): + """Get the current parsing context for the given name.""" + stack = self.parsing_context[name] + if stack: + return stack[-1] + else: + return default + @contextmanager def disable_checks(self): """Run the block without checking names or strict errors.""" @@ -533,26 +551,45 @@ def method(original, loc, tokens): @classmethod def bind(cls): """Binds reference objects to the proper parse actions.""" - # parsing_context["class"] handling - new_classdef = Wrap(cls.classdef_ref, cls.method("class_manage")) - cls.classdef <<= trace_attach(new_classdef, cls.method("classdef_handle")) - - new_datadef = Wrap(cls.datadef_ref, cls.method("class_manage")) - cls.datadef <<= trace_attach(new_datadef, cls.method("datadef_handle")) + # handle parsing_context for class definitions + new_classdef = trace_attach(cls.classdef_ref, cls.method("classdef_handle")) + cls.classdef <<= Wrap(new_classdef, cls.method("class_manage"), greedy=True) - new_match_datadef = Wrap(cls.match_datadef_ref, cls.method("class_manage")) - cls.match_datadef <<= trace_attach(new_match_datadef, cls.method("match_datadef_handle")) + new_datadef = trace_attach(cls.datadef_ref, cls.method("datadef_handle")) + cls.datadef <<= Wrap(new_datadef, cls.method("class_manage"), greedy=True) - cls.stmt_lambdef_body <<= Wrap(cls.stmt_lambdef_body_ref, cls.method("func_manage")) - cls.func_suite <<= Wrap(cls.func_suite_ref, cls.method("func_manage")) - cls.func_suite_tokens <<= Wrap(cls.func_suite_tokens_ref, cls.method("func_manage")) - cls.math_funcdef_suite <<= Wrap(cls.math_funcdef_suite_ref, cls.method("func_manage")) + new_match_datadef = trace_attach(cls.match_datadef_ref, cls.method("match_datadef_handle")) + cls.match_datadef <<= Wrap(new_match_datadef, cls.method("class_manage"), greedy=True) cls.classname <<= trace_attach(cls.classname_ref, cls.method("classname_handle"), greedy=True) + # handle parsing_context for function definitions + new_stmt_lambdef = trace_attach(cls.stmt_lambdef_ref, cls.method("stmt_lambdef_handle")) + cls.stmt_lambdef <<= Wrap(new_stmt_lambdef, cls.method("func_manage"), greedy=True) + + new_decoratable_normal_funcdef_stmt = trace_attach( + cls.decoratable_normal_funcdef_stmt_ref, + cls.method("decoratable_funcdef_stmt_handle"), + ) + cls.decoratable_normal_funcdef_stmt <<= Wrap(new_decoratable_normal_funcdef_stmt, cls.method("func_manage"), greedy=True) + + new_decoratable_async_funcdef_stmt = trace_attach( + cls.decoratable_async_funcdef_stmt_ref, + cls.method("decoratable_funcdef_stmt_handle", is_async=True), + ) + cls.decoratable_async_funcdef_stmt <<= Wrap(new_decoratable_async_funcdef_stmt, cls.method("func_manage"), greedy=True) + + # handle parsing_context for type aliases + new_type_alias_stmt = trace_attach(cls.type_alias_stmt_ref, cls.method("type_alias_stmt_handle")) + cls.type_alias_stmt <<= Wrap(new_type_alias_stmt, cls.method("type_alias_stmt_manage"), greedy=True) + # greedy handlers (we need to know about them even if suppressed and/or they use the parsing_context) - cls.name <<= attach(cls.unsafe_name, cls.method("name_handle"), greedy=True) cls.comment <<= attach(cls.comment_tokens, cls.method("comment_handle"), greedy=True) + cls.type_param <<= trace_attach(cls.type_param_ref, cls.method("type_param_handle"), greedy=True) + + # name handlers + cls.refname <<= attach(cls.name_ref, cls.method("name_handle")) + cls.setname <<= attach(cls.name_ref, cls.method("name_handle", assign=True)) # abnormally named handlers cls.moduledoc_item <<= trace_attach(cls.moduledoc, cls.method("set_moduledoc")) @@ -589,7 +626,6 @@ def bind(cls): cls.name_match_funcdef <<= trace_attach(cls.name_match_funcdef_ref, cls.method("name_match_funcdef_handle")) cls.op_match_funcdef <<= trace_attach(cls.op_match_funcdef_ref, cls.method("op_match_funcdef_handle")) cls.yield_from <<= trace_attach(cls.yield_from_ref, cls.method("yield_from_handle")) - cls.stmt_lambdef <<= trace_attach(cls.stmt_lambdef_ref, cls.method("stmt_lambdef_handle")) cls.typedef <<= trace_attach(cls.typedef_ref, cls.method("typedef_handle")) cls.typedef_default <<= trace_attach(cls.typedef_default_ref, cls.method("typedef_handle")) cls.unsafe_typedef_default <<= trace_attach(cls.unsafe_typedef_default_ref, cls.method("unsafe_typedef_handle")) @@ -606,16 +642,7 @@ def bind(cls): cls.anon_namedtuple <<= trace_attach(cls.anon_namedtuple_ref, cls.method("anon_namedtuple_handle")) cls.base_match_for_stmt <<= trace_attach(cls.base_match_for_stmt_ref, cls.method("base_match_for_stmt_handle")) cls.unsafe_typedef_tuple <<= trace_attach(cls.unsafe_typedef_tuple_ref, cls.method("unsafe_typedef_tuple_handle")) - - # handle normal and async function definitions - cls.decoratable_normal_funcdef_stmt <<= trace_attach( - cls.decoratable_normal_funcdef_stmt_ref, - cls.method("decoratable_funcdef_stmt_handle"), - ) - cls.decoratable_async_funcdef_stmt <<= trace_attach( - cls.decoratable_async_funcdef_stmt_ref, - cls.method("decoratable_funcdef_stmt_handle", is_async=True), - ) + cls.funcname_typeparams <<= trace_attach(cls.funcname_typeparams_ref, cls.method("funcname_typeparams_handle")) # these handlers just do strict/target checking cls.u_string <<= trace_attach(cls.u_string_ref, cls.method("u_string_check")) @@ -623,9 +650,9 @@ def bind(cls): cls.star_assign_item <<= trace_attach(cls.star_assign_item_ref, cls.method("star_assign_item_check")) cls.classic_lambdef <<= trace_attach(cls.classic_lambdef_ref, cls.method("lambdef_check")) cls.star_sep_arg <<= trace_attach(cls.star_sep_arg_ref, cls.method("star_sep_check")) - cls.star_sep_vararg <<= trace_attach(cls.star_sep_vararg_ref, cls.method("star_sep_check")) + cls.star_sep_setarg <<= trace_attach(cls.star_sep_setarg_ref, cls.method("star_sep_check")) cls.slash_sep_arg <<= trace_attach(cls.slash_sep_arg_ref, cls.method("slash_sep_check")) - cls.slash_sep_vararg <<= trace_attach(cls.slash_sep_vararg_ref, cls.method("slash_sep_check")) + cls.slash_sep_setarg <<= trace_attach(cls.slash_sep_setarg_ref, cls.method("slash_sep_check")) cls.endline_semicolon <<= trace_attach(cls.endline_semicolon_ref, cls.method("endline_semicolon_check")) cls.async_stmt <<= trace_attach(cls.async_stmt_ref, cls.method("async_stmt_check")) cls.async_comp_for <<= trace_attach(cls.async_comp_for_ref, cls.method("async_comp_check")) @@ -634,6 +661,7 @@ def bind(cls): cls.match_dotted_name_const <<= trace_attach(cls.match_dotted_name_const_ref, cls.method("match_dotted_name_const_check")) cls.except_star_clause <<= trace_attach(cls.except_star_clause_ref, cls.method("except_star_clause_check")) cls.subscript_star <<= trace_attach(cls.subscript_star_ref, cls.method("subscript_star_check")) + cls.top_level_case_kwd <<= trace_attach(cls.case_kwd, cls.method("top_level_case_kwd_check")) # these checking handlers need to be greedy since they can be suppressed cls.match_check_equals <<= trace_attach(cls.match_check_equals_ref, cls.method("match_check_equals_check"), greedy=True) @@ -781,11 +809,23 @@ def wrap_passthrough(self, text, multiline=True, early=False): def wrap_comment(self, text, reformat=True): """Wrap a comment.""" if reformat: - text = self.reformat(text, ignore_errors=False) + whitespace, base_comment = split_leading_whitespace(text) + text = whitespace + self.reformat(base_comment, ignore_errors=False) return "#" + self.add_ref("comment", text) + unwrapper + def wrap_error(self, error): + """Create a symbol that will raise the given error in postprocessing.""" + return errwrapper + self.add_ref("error", error) + unwrapper + + def raise_or_wrap_error(self, error): + """Raise if USE_COMPUTATION_GRAPH else wrap.""" + if USE_COMPUTATION_GRAPH: + raise error + else: + return self.wrap_error(error) + def type_ignore_comment(self): - return (" " if not self.minify else "") + self.wrap_comment(" type: ignore", reformat=False) + return self.wrap_comment(" type: ignore", reformat=False) def wrap_line_number(self, ln): """Wrap a line number.""" @@ -957,7 +997,7 @@ def run_final_checks(self, original, keep_state=False): logger.warn_err( self.make_err( CoconutSyntaxWarning, - "found unused import: " + name, + "found unused import: " + self.reformat(name, ignore_errors=True), original, loc, extra="add NOQA comment or remove --strict to dismiss", @@ -1182,7 +1222,7 @@ def operator_proc(self, inputstring, **kwargs): raise self.make_err(CoconutSyntaxError, "cannot redefine number " + repr(op), raw_line, ln=self.adjust(ln)) if self.existing_operator_regex.match(op): raise self.make_err(CoconutSyntaxError, "cannot redefine existing operator " + repr(op), raw_line, ln=self.adjust(ln)) - for sym in internally_reserved_symbols + exit_chars: + for sym in reserved_compiler_symbols + reserved_command_symbols: if sym in op: sym_repr = ascii(sym.replace(strwrapper, '"')) raise self.make_err(CoconutSyntaxError, "invalid custom operator", raw_line, ln=self.adjust(ln), extra="cannot contain " + sym_repr) @@ -1541,9 +1581,9 @@ def tre_return_handle(loc, tokens): if not func_args or func_args == args: tre_recurse = "continue" elif mock_var is None: - tre_recurse = func_args + " = " + args + "\ncontinue" + tre_recurse = tuple_str_of_str(func_args) + " = " + tuple_str_of_str(args) + "\ncontinue" else: - tre_recurse = func_args + " = " + mock_var + "(" + args + ")" + "\ncontinue" + tre_recurse = tuple_str_of_str(func_args) + " = " + mock_var + "(" + args + ")" + "\ncontinue" tre_check_var = self.get_temp_var("tre_check") return handle_indentation( @@ -1614,10 +1654,11 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i self.internal_assert(not (not normal_func and (attempt_tre or attempt_tco)), original, loc, "cannot tail call optimize async/generator functions") if ( + not normal_func # don't transform generator returns if they're supported - is_gen and self.target_info >= (3, 3) + and (not is_gen or self.target_info >= (3, 3)) # don't transform async returns if they're supported - or is_async and self.target_info >= (3, 5) + and (not is_async or self.target_info >= (3, 5)) ): func_code = "".join(raw_lines) return func_code, tco, tre @@ -1705,7 +1746,7 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i func_code = "".join(lines) return func_code, tco, tre - def proc_funcdef(self, original, loc, decorators, funcdef, is_async): + def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, is_stmt_lambda): """Determines if TCO or TRE can be done and if so does it, handles dotted function names, and universalizes async functions.""" # process tokens @@ -1826,7 +1867,8 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async): attempt_tre = ( func_name is not None and not is_gen - # tre does not work with decorators, though tco does + and not in_method + and not is_stmt_lambda and not decorators ) if attempt_tre: @@ -1879,7 +1921,7 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async): i=i, ), ) - mock_body_lines.append("return " + func_args) + mock_body_lines.append("return " + tuple_str_of_str(func_args)) mock_def = handle_indentation( """ def {mock_var}({mock_paramdef}): @@ -1956,10 +1998,15 @@ def deferred_code_proc(self, inputstring, add_code_at_start=False, ignore_names= # handle early passthroughs line = self.base_passthrough_repl(line, wrap_char=early_passthrough_wrapper, **kwargs) + # look for deferred errors + if errwrapper in raw_line: + err_ref = raw_line.split(errwrapper, 1)[1].split(unwrapper, 1)[0] + raise self.get_ref("error", err_ref) + # look for functions if line.startswith(funcwrapper): func_id = int(line[len(funcwrapper):]) - original, loc, decorators, funcdef, is_async = self.get_ref("func", func_id) + original, loc, decorators, funcdef, is_async, in_method, is_stmt_lambda = self.get_ref("func", func_id) # process inner code decorators = self.deferred_code_proc(decorators, add_code_at_start=True, ignore_names=ignore_names, **kwargs) @@ -1978,12 +2025,12 @@ def deferred_code_proc(self, inputstring, add_code_at_start=False, ignore_names= out.append(bef_ind) out.extend(pre_def_lines) - out.append(self.proc_funcdef(original, loc, decorators, "".join(post_def_lines), is_async)) + out.append(self.proc_funcdef(original, loc, decorators, "".join(post_def_lines), is_async, in_method, is_stmt_lambda)) out.append(aft_ind) # look for add_code_before regexes else: - for name, raw_code in self.add_code_before.items(): + for name, raw_code in ordered_items(self.add_code_before): if name in ignore_names: continue @@ -1993,7 +2040,7 @@ def deferred_code_proc(self, inputstring, add_code_at_start=False, ignore_names= if replacement is None: saw_name = regex.search(line) else: - line, saw_name = regex.subn(replacement, line) + line, saw_name = regex.subn(lambda match: replacement, line) if saw_name: # process inner code @@ -2074,14 +2121,17 @@ def function_call_handle(self, loc, tokens): def pipe_item_split(self, tokens, loc): """Process a pipe item, which could be a partial, an attribute access, a method call, or an expression. - Return (type, split) where split is - - (expr,) for expression, - - (func, pos_args, kwd_args) for partial, - - (name, args) for attr/method, and - - (op, args)+ for itemgetter.""" + + Return (type, split) where split is: + - (expr,) for expression + - (func, pos_args, kwd_args) for partial + - (name, args) for attr/method + - (op, args)+ for itemgetter + - (op, arg) for right op partial + """ # list implies artificial tokens, which must be expr if isinstance(tokens, list) or "expr" in tokens: - internal_assert(len(tokens) == 1, "invalid expr pipe item tokens", tokens) + internal_assert(len(tokens) == 1, "invalid pipe item", tokens) return "expr", tokens elif "partial" in tokens: func, args = tokens @@ -2093,6 +2143,16 @@ def pipe_item_split(self, tokens, loc): elif "itemgetter" in tokens: internal_assert(len(tokens) >= 2, "invalid itemgetter pipe item tokens", tokens) return "itemgetter", tokens + elif "op partial" in tokens: + inner_toks, = tokens + if "left partial" in inner_toks: + arg, op = inner_toks + return "partial", (op, arg, "") + elif "right partial" in inner_toks: + op, arg = inner_toks + return "right op partial", (op, arg) + else: + raise CoconutInternalException("invalid op partial tokens in pipe_item", inner_toks) else: raise CoconutInternalException("invalid pipe item tokens", tokens) @@ -2108,8 +2168,8 @@ def pipe_handle(self, original, loc, tokens, **kwargs): # we've only been given one operand, so we can't do any optimization, so just produce the standard object name, split_item = self.pipe_item_split(item, loc) if name == "expr": - self.internal_assert(len(split_item) == 1, original, loc) - return split_item[0] + expr, = split_item + return expr elif name == "partial": self.internal_assert(len(split_item) == 3, original, loc) return "_coconut.functools.partial(" + join_args(split_item) + ")" @@ -2117,6 +2177,8 @@ def pipe_handle(self, original, loc, tokens, **kwargs): return attrgetter_atom_handle(loc, item) elif name == "itemgetter": return itemgetter_handle(item) + elif name == "right op partial": + return partial_op_item_handle(item) else: raise CoconutInternalException("invalid split pipe item", split_item) @@ -2177,6 +2239,11 @@ def pipe_handle(self, original, loc, tokens, **kwargs): raise CoconutInternalException("pipe into invalid implicit itemgetter operation", op) out = fmtstr.format(x=out, args=args) return out + elif name == "right op partial": + if stars: + raise CoconutDeferredSyntaxError("cannot star pipe into operator partial", loc) + op, arg = split_item + return "({op})({x}, {arg})".format(op=op, x=subexpr, arg=arg) else: raise CoconutInternalException("invalid split pipe item", split_item) @@ -2279,13 +2346,13 @@ def item_handle(self, loc, tokens): def set_moduledoc(self, tokens): """Set the docstring.""" - internal_assert(len(tokens) == 2, "invalid moduledoc tokens", tokens) - self.docstring = self.reformat(tokens[0], ignore_errors=False) + "\n\n" - return tokens[1] + moduledoc, endline = tokens + self.docstring = self.reformat(moduledoc, ignore_errors=False) + "\n\n" + return endline def yield_from_handle(self, tokens): """Process Python 3.3 yield from.""" - internal_assert(len(tokens) == 1, "invalid yield from tokens", tokens) + expr, = tokens if self.target_info < (3, 3): ret_val_name = self.get_temp_var("yield_from") self.add_code_before[ret_val_name] = handle_indentation( @@ -2300,19 +2367,19 @@ def yield_from_handle(self, tokens): ''', add_newline=True, ).format( - expr=tokens[0], + expr=expr, yield_from_var=self.get_temp_var("yield_from"), yield_err_var=self.get_temp_var("yield_err"), ret_val_name=ret_val_name, ) return ret_val_name else: - return "yield from " + tokens[0] + return "yield from " + expr def endline_handle(self, original, loc, tokens): """Add line number information to end of line.""" - self.internal_assert(len(tokens) == 1, original, loc, "invalid endline tokens", tokens) - lines = tokens[0].splitlines(True) + endline, = tokens + lines = endline.splitlines(True) if self.minify: lines = lines[0] out = [] @@ -2324,12 +2391,12 @@ def endline_handle(self, original, loc, tokens): def comment_handle(self, original, loc, tokens): """Store comment in comments.""" - self.internal_assert(len(tokens) == 1, original, loc, "invalid comment tokens", tokens) + comment_marker, = tokens ln = self.adjust(lineno(loc, original)) if ln in self.comments: - self.comments[ln] += " " + tokens[0] + self.comments[ln] += " " + comment_marker else: - self.comments[ln] = tokens[0] + self.comments[ln] = comment_marker return "" def kwd_augassign_handle(self, original, loc, tokens): @@ -2393,18 +2460,13 @@ def augassign_stmt_handle(self, original, loc, tokens): def classdef_handle(self, original, loc, tokens): """Process class definitions.""" - name, classlist_toks, body = tokens + decorators, name, paramdefs, classlist_toks, body = tokens - out = "class " + name + out = "".join(paramdefs) + decorators + "class " + name # handle classlist - if len(classlist_toks) == 0: - if self.target.startswith("3"): - out += "" - else: - out += "(_coconut.object)" - - else: + base_classes = [] + if classlist_toks: pos_args, star_args, kwd_args, dubstar_args = self.split_function_call(classlist_toks, loc) # check for just inheriting from object @@ -2429,9 +2491,15 @@ def classdef_handle(self, original, loc, tokens): out = "@_coconut_handle_cls_kwargs(" + join_args(kwd_args, dubstar_args) + ")\n" + out kwd_args = dubstar_args = () - out += "(" + join_args(pos_args, star_args, kwd_args, dubstar_args) + ")" + base_classes.append(join_args(pos_args, star_args, kwd_args, dubstar_args)) - out += body + if paramdefs: + base_classes.append(self.get_generic_for_typevars()) + + if not classlist_toks and not self.target.startswith("3"): + base_classes.append("_coconut.object") + + out += "(" + ", ".join(base_classes) + ")" + body # add override detection if self.target_info < (3, 6): @@ -2441,13 +2509,13 @@ def classdef_handle(self, original, loc, tokens): def match_datadef_handle(self, original, loc, tokens): """Process pattern-matching data blocks.""" - if len(tokens) == 3: - name, match_tokens, stmts = tokens + if len(tokens) == 4: + decorators, name, match_tokens, stmts = tokens inherit = None - elif len(tokens) == 4: - name, match_tokens, inherit, stmts = tokens + elif len(tokens) == 5: + decorators, name, match_tokens, inherit, stmts = tokens else: - raise CoconutInternalException("invalid pattern-matching data tokens", tokens) + raise CoconutInternalException("invalid match_datadef tokens", tokens) if len(match_tokens) == 1: matches, = match_tokens @@ -2485,17 +2553,17 @@ def __new__(_coconut_cls, *{match_to_args_var}, **{match_to_kwargs_var}): ) namedtuple_call = self.make_namedtuple_call(name, matcher.name_list) - return self.assemble_data(name, namedtuple_call, inherit, extra_stmts, stmts, matcher.name_list) + return self.assemble_data(decorators, name, namedtuple_call, inherit, extra_stmts, stmts, matcher.name_list) def datadef_handle(self, loc, tokens): """Process data blocks.""" - if len(tokens) == 3: - name, original_args, stmts = tokens + if len(tokens) == 5: + decorators, name, paramdefs, original_args, stmts = tokens inherit = None - elif len(tokens) == 4: - name, original_args, inherit, stmts = tokens + elif len(tokens) == 6: + decorators, name, paramdefs, original_args, inherit, stmts = tokens else: - raise CoconutInternalException("invalid data tokens", tokens) + raise CoconutInternalException("invalid datadef tokens", tokens) all_args = [] # string definitions for all args base_args = [] # names of all the non-starred args @@ -2507,19 +2575,15 @@ def datadef_handle(self, loc, tokens): star, default, typedef = False, None, None if "name" in arg: - internal_assert(len(arg) == 1) - argname = arg[0] + argname, = arg elif "default" in arg: - internal_assert(len(arg) == 2) argname, default = arg elif "star" in arg: - internal_assert(len(arg) == 1) - star, argname = True, arg[0] + argname, = arg + star = True elif "type" in arg: - internal_assert(len(arg) == 2) argname, typedef = arg elif "type default" in arg: - internal_assert(len(arg) == 3) argname, typedef, default = arg else: raise CoconutInternalException("invalid data arg tokens", arg) @@ -2624,7 +2688,7 @@ def __new__(_coconut_cls, {all_args}): namedtuple_args = base_args + ([] if starred_arg is None else [starred_arg]) namedtuple_call = self.make_namedtuple_call(name, namedtuple_args, types) - return self.assemble_data(name, namedtuple_call, inherit, extra_stmts, stmts, namedtuple_args) + return self.assemble_data(decorators, name, namedtuple_call, inherit, extra_stmts, stmts, namedtuple_args, paramdefs) def make_namedtuple_call(self, name, namedtuple_args, types=None): """Construct a namedtuple call.""" @@ -2643,13 +2707,16 @@ def make_namedtuple_call(self, name, namedtuple_args, types=None): else: return '_coconut.collections.namedtuple("' + name + '", ' + tuple_str_of(namedtuple_args, add_quotes=True) + ')' - def assemble_data(self, name, namedtuple_call, inherit, extra_stmts, stmts, match_args): + def assemble_data(self, decorators, name, namedtuple_call, inherit, extra_stmts, stmts, match_args, paramdefs=()): """Create a data class definition from the given components.""" # create class out = ( - "class " + name + "(" + "".join(paramdefs) + + decorators + + "class " + name + "(" + namedtuple_call + (", " + inherit if inherit is not None else "") + + (", " + self.get_generic_for_typevars() if paramdefs else "") + (", _coconut.object" if not self.target.startswith("3") else "") + "):\n" + openindent @@ -2869,25 +2936,23 @@ def import_handle(self, original, loc, tokens): def complex_raise_stmt_handle(self, tokens): """Process Python 3 raise from statement.""" - internal_assert(len(tokens) == 2, "invalid raise from tokens", tokens) + raise_expr, from_expr = tokens if self.target.startswith("3"): - return "raise " + tokens[0] + " from " + tokens[1] + return "raise " + raise_expr + " from " + from_expr else: raise_from_var = self.get_temp_var("raise_from") return ( - raise_from_var + " = " + tokens[0] + "\n" - + raise_from_var + ".__cause__ = " + tokens[1] + "\n" + raise_from_var + " = " + raise_expr + "\n" + + raise_from_var + ".__cause__ = " + from_expr + "\n" + "raise " + raise_from_var ) def dict_comp_handle(self, loc, tokens): """Process Python 2.7 dictionary comprehension.""" - internal_assert(len(tokens) == 3, "invalid dictionary comprehension tokens", tokens) + key, val, comp = tokens if self.target.startswith("3"): - key, val, comp = tokens return "{" + key + ": " + val + " " + comp + "}" else: - key, val, comp = tokens return "dict(((" + key + "), (" + val + ")) " + comp + ")" def pattern_error(self, original, loc, value_var, check_var, match_error_class='_coconut_MatchError'): @@ -3028,48 +3093,62 @@ def set_letter_literal_handle(self, tokens): def stmt_lambdef_handle(self, original, loc, tokens): """Process multi-line lambdef statements.""" - if len(tokens) == 2: - params, stmts = tokens - elif len(tokens) == 3: - params, stmts, last = tokens - if "tests" in tokens: + kwds, params, stmts_toks = tokens + + is_async = False + for kwd in kwds: + if kwd == "async": + internal_assert(not is_async, "duplicate stmt_lambdef async keyword", kwd) + is_async = True + else: + raise CoconutInternalException("invalid stmt_lambdef keyword", kwd) + + if len(stmts_toks) == 1: + stmts, = stmts_toks + elif len(stmts_toks) == 2: + stmts, last = stmts_toks + if "tests" in stmts_toks: stmts = stmts.asList() + ["return " + last] else: stmts = stmts.asList() + [last] else: - raise CoconutInternalException("invalid statement lambda tokens", tokens) + raise CoconutInternalException("invalid statement lambda body tokens", stmts_toks) name = self.get_temp_var("lambda") body = openindent + "\n".join(stmts) + closeindent if isinstance(params, str): - self.add_code_before[name] = "def " + name + params + ":\n" + body + decorators = "" + funcdef = "def " + name + params + ":\n" + body else: match_tokens = [name] + list(params) before_colon, after_docstring = self.name_match_funcdef_handle(original, loc, match_tokens) - self.add_code_before[name] = ( - "@_coconut_mark_as_match\n" - + before_colon + decorators = "@_coconut_mark_as_match\n" + funcdef = ( + before_colon + ":\n" + after_docstring + body ) + self.add_code_before[name] = self.decoratable_funcdef_stmt_handle(original, loc, [decorators, funcdef], is_async, is_stmt_lambda=True) + return name - def decoratable_funcdef_stmt_handle(self, original, loc, tokens, is_async=False): + def decoratable_funcdef_stmt_handle(self, original, loc, tokens, is_async=False, is_stmt_lambda=False): """Wraps the given function for later processing""" if len(tokens) == 1: - decorators, funcdef = "", tokens[0] + funcdef, = tokens + decorators = "" elif len(tokens) == 2: decorators, funcdef = tokens else: raise CoconutInternalException("invalid function definition tokens", tokens) - return funcwrapper + self.add_ref("func", (original, loc, decorators, funcdef, is_async)) + "\n" + return funcwrapper + self.add_ref("func", (original, loc, decorators, funcdef, is_async, self.in_method, is_stmt_lambda)) + "\n" def await_expr_handle(self, original, loc, tokens): """Check for Python 3.5 await expression.""" - self.internal_assert(len(tokens) == 1, original, loc, "invalid await statement tokens", tokens) + await_expr, = tokens if not self.target: raise self.make_err( CoconutTargetError, @@ -3078,13 +3157,13 @@ def await_expr_handle(self, original, loc, tokens): target="sys", ) elif self.target_info >= (3, 5): - return "await " + tokens[0] + return "await " + await_expr elif self.target_info >= (3, 3): # we have to wrap the yield here so it doesn't cause the function to be detected as an async generator - return self.wrap_passthrough("(yield from " + tokens[0] + ")", early=True) + return self.wrap_passthrough("(yield from " + await_expr + ")", early=True) else: # this yield is fine because we can detect the _coconut.asyncio.From - return "(yield _coconut.asyncio.From(" + tokens[0] + "))" + return "(yield _coconut.asyncio.From(" + await_expr + "))" def unsafe_typedef_handle(self, tokens): """Process type annotations without a comma after them.""" @@ -3143,6 +3222,99 @@ def typed_assign_stmt_handle(self, tokens): annotation=self.wrap_typedef(typedef, ignore_target=True), ) + def funcname_typeparams_handle(self, tokens): + """Handle function names with type parameters.""" + if len(tokens) == 1: + name, = tokens + return name + else: + name, paramdefs = tokens + # temp_marker will be set back later, but needs to be a unique name until then for add_code_before + temp_marker = self.get_temp_var("type_param_func") + self.add_code_before[temp_marker] = "".join(paramdefs) + self.add_code_before_replacements[temp_marker] = name + return temp_marker + + funcname_typeparams_handle.ignore_one_token = True + + def type_param_handle(self, loc, tokens): + """Compile a type param into an assignment.""" + bounds = "" + if "TypeVar" in tokens: + TypeVarFunc = "TypeVar" + if len(tokens) == 1: + name, = tokens + else: + name, bound = tokens + bounds = ", bound=" + bound + elif "TypeVarTuple" in tokens: + TypeVarFunc = "TypeVarTuple" + name, = tokens + elif "ParamSpec" in tokens: + TypeVarFunc = "ParamSpec" + name, = tokens + else: + raise CoconutInternalException("invalid type_param tokens", tokens) + + typevar_info = self.current_parsing_context("typevars") + if typevar_info is not None: + if name in typevar_info["all_typevars"]: + raise CoconutDeferredSyntaxError("type variable {name!r} already defined", loc) + temp_name = self.get_temp_var("typevar_" + name) + typevar_info["all_typevars"][name] = temp_name + typevar_info["new_typevars"].append((TypeVarFunc, temp_name)) + name = temp_name + + return '{name} = _coconut.typing.{TypeVarFunc}("{name}"{bounds})\n'.format( + name=name, + TypeVarFunc=TypeVarFunc, + bounds=bounds, + ) + + def get_generic_for_typevars(self): + """Get the Generic instances for the current typevars.""" + typevar_info = self.current_parsing_context("typevars") + internal_assert(typevar_info is not None, "get_generic_for_typevars called with no typevars") + generics = [] + for TypeVarFunc, name in typevar_info["new_typevars"]: + if TypeVarFunc in ("TypeVar", "ParamSpec"): + generics.append(name) + elif TypeVarFunc == "TypeVarTuple": + if self.target_info >= (3, 11): + generics.append("*" + name) + else: + generics.append("_coconut.typing.Unpack[" + name + "]") + else: + raise CoconutInternalException("invalid TypeVarFunc", TypeVarFunc) + return "_coconut.typing.Generic[" + ", ".join(generics) + "]" + + @contextmanager + def type_alias_stmt_manage(self, item=None, original=None, loc=None): + """Manage the typevars parsing context.""" + typevars_stack = self.parsing_context["typevars"] + prev_typevar_info = self.current_parsing_context("typevars") + typevars_stack.append({ + "all_typevars": {} if prev_typevar_info is None else prev_typevar_info["all_typevars"].copy(), + "new_typevars": [], + }) + try: + yield + finally: + typevars_stack.pop() + + def type_alias_stmt_handle(self, tokens): + """Handle type alias statements.""" + if len(tokens) == 2: + name, typedef = tokens + paramdefs = () + else: + name, paramdefs, typedef = tokens + return "".join(paramdefs) + self.typed_assign_stmt_handle([ + name, + "_coconut.typing.TypeAlias", + self.wrap_typedef(typedef), + ]) + def with_stmt_handle(self, tokens): """Process with statements.""" withs, body = tokens @@ -3211,8 +3383,7 @@ def cases_stmt_handle(self, original, loc, tokens): def f_string_handle(self, loc, tokens): """Process Python 3.6 format strings.""" - internal_assert(len(tokens) == 1, "invalid format string tokens", tokens) - string = tokens[0] + string, = tokens # strip raw r raw = string.startswith("r") @@ -3220,7 +3391,7 @@ def f_string_handle(self, loc, tokens): string = string[1:] # strip wrappers - internal_assert(string.startswith(strwrapper) and string.endswith(unwrapper)) + internal_assert(string.startswith(strwrapper) and string.endswith(unwrapper), "invalid f string item", string) string = string[1:-1] # get text @@ -3541,6 +3712,10 @@ def match_check_equals_check(self, original, loc, tokens): """Check for old-style =item in pattern-matching.""" return self.check_strict("deprecated equality-checking '=...' pattern; use '==...' instead", original, loc, tokens) + def top_level_case_kwd_check(self, original, loc, tokens): + """Check for case keyword at top level in match-case block.""" + return self.check_strict("deprecated case keyword at top level in match-case block (use Python 3.10 match-case syntax instead)", original, loc, tokens) + def check_py(self, version, name, original, loc, tokens): """Check for Python-version-specific syntax.""" self.internal_assert(len(tokens) == 1, original, loc, "invalid " + name + " tokens", tokens) @@ -3556,7 +3731,7 @@ def class_manage(self, item, original, loc): cls_stack = self.parsing_context["class"] if cls_stack: cls_context = cls_stack[-1] - if cls_context["name"] is None: # this should only happen when the managed class item will fail to fully match + if cls_context["name"] is None: # this should only happen when the managed class item will fail to fully parse name_prefix = cls_context["name_prefix"] elif cls_context["in_method"]: # if we're in a function, we shouldn't use the prefix to look up the class name_prefix = "" @@ -3570,60 +3745,110 @@ def class_manage(self, item, original, loc): "in_method": False, }) try: - yield + # handles support for class type variables + with self.type_alias_stmt_manage(): + yield finally: cls_stack.pop() def classname_handle(self, tokens): """Handle class names.""" - cls_stack = self.parsing_context["class"] - internal_assert(cls_stack, "found classname outside of class", tokens) + cls_context = self.current_parsing_context("class") + internal_assert(cls_context is not None, "found classname outside of class", tokens) name, = tokens - cls_stack[-1]["name"] = name + cls_context["name"] = name return name @contextmanager def func_manage(self, item, original, loc): """Manage the function parsing context.""" - cls_stack = self.parsing_context["class"] - if cls_stack: - in_method, cls_stack[-1]["in_method"] = cls_stack[-1]["in_method"], True - try: + cls_context = self.current_parsing_context("class") + if cls_context is not None: + in_method, cls_context["in_method"] = cls_context["in_method"], True + try: + # handles support for function type variables + with self.type_alias_stmt_manage(): yield - finally: - cls_stack[-1]["in_method"] = in_method - else: - yield + finally: + if cls_context is not None: + cls_context["in_method"] = in_method - def name_handle(self, loc, tokens): + @property + def in_method(self): + """Determine if currently in a method.""" + cls_context = self.current_parsing_context("class") + return cls_context is not None and cls_context["name"] is not None and cls_context["in_method"] + + def name_handle(self, original, loc, tokens, assign=False): """Handle the given base name.""" - name, = tokens # avoid the overhead of an internal_assert call here + name, = tokens + if name.startswith("\\"): + name = name[1:] + escaped = True + else: + escaped = False if self.disable_name_check: return name - if self.strict: + + # raise_or_wrap_error for all errors here to make sure we don't + # raise spurious errors if not using the computation graph + + if not escaped: + typevar_info = self.current_parsing_context("typevars") + if typevar_info is not None: + typevars = typevar_info["all_typevars"] + if name in typevars: + if assign: + return self.raise_or_wrap_error( + self.make_err( + CoconutSyntaxError, + "cannot reassign type variable: " + repr(name), + original, + loc, + ), + ) + return typevars[name] + + if self.strict and not assign: self.unused_imports.pop(name, None) - if name == "exec": + if not escaped and name == "exec": if self.target.startswith("3"): return name + elif assign: + return self.raise_or_wrap_error( + self.make_err( + CoconutTargetError, + "found Python-3-only assignment to 'exec' as a variable name", + original, + loc, + target="3", + ), + ) else: return "_coconut_exec" - elif name in super_names and not self.target.startswith("3"): - cls_stack = self.parsing_context["class"] - if cls_stack: - cls_context = cls_stack[-1] - if cls_context["name"] is not None and cls_context["in_method"]: - enclosing_cls = cls_context["name_prefix"] + cls_context["name"] - # temp_marker will be set back later, but needs to be a unique name until then for add_code_before - temp_marker = self.get_temp_var("super") - self.add_code_before[temp_marker] = "__class__ = " + enclosing_cls + "\n" - self.add_code_before_replacements[temp_marker] = name - return temp_marker - return name - elif name.startswith(reserved_prefix) and name not in self.operators: - raise CoconutDeferredSyntaxError("variable names cannot start with reserved prefix " + reserved_prefix, loc) + elif not assign and name in super_names and not self.target.startswith("3"): + if self.in_method: + cls_context = self.current_parsing_context("class") + enclosing_cls = cls_context["name_prefix"] + cls_context["name"] + # temp_marker will be set back later, but needs to be a unique name until then for add_code_before + temp_marker = self.get_temp_var("super") + self.add_code_before[temp_marker] = "__class__ = " + enclosing_cls + "\n" + self.add_code_before_replacements[temp_marker] = name + return temp_marker + else: + return name + elif not escaped and name.startswith(reserved_prefix) and name not in self.operators: + return self.raise_or_wrap_error( + self.make_err( + CoconutSyntaxError, + "variable names cannot start with reserved prefix " + repr(reserved_prefix), + original, + loc, + ), + ) else: return name diff --git a/coconut/compiler/grammar.py b/coconut/compiler/grammar.py index 755470d8f..f0bafb250 100644 --- a/coconut/compiler/grammar.py +++ b/coconut/compiler/grammar.py @@ -336,32 +336,64 @@ def lambdef_handle(tokens): raise CoconutInternalException("invalid lambda tokens", tokens) -def typedef_callable_handle(tokens): +def typedef_callable_handle(loc, tokens): """Process -> to Callable inside type annotations.""" - if len(tokens) == 1: - return '_coconut.typing.Callable[..., ' + tokens[0] + ']' - elif len(tokens) == 2: - return '_coconut.typing.Callable[[' + tokens[0] + '], ' + tokens[1] + ']' + if len(tokens) == 2: + async_kwd, ret_typedef = tokens + args_typedef = "..." + elif len(tokens) == 3: + async_kwd, args_tokens, ret_typedef = tokens + args = [] + paramspec = None + ellipsis = None + for arg_toks in args_tokens: + if paramspec is not None: + raise CoconutDeferredSyntaxError("ParamSpecs must come at end of Callable parameters", loc) + elif ellipsis is not None: + raise CoconutDeferredSyntaxError("only a single ellipsis is supported in Callable parameters", loc) + elif "arg" in arg_toks: + arg, = arg_toks + args.append(arg) + elif "paramspec" in arg_toks: + paramspec, = arg_toks + elif "ellipsis" in arg_toks: + if args or paramspec is not None: + raise CoconutDeferredSyntaxError("only a single ellipsis is supported in Callable parameters", loc) + ellipsis, = arg_toks + else: + raise CoconutInternalException("invalid typedef_callable arg tokens", arg_toks) + if ellipsis is not None: + args_typedef = ellipsis + elif paramspec is None: + args_typedef = "[" + ", ".join(args) + "]" + elif not args: + args_typedef = paramspec + else: + args_typedef = "_coconut.typing.Concatenate[" + ", ".join(args) + ", " + paramspec + "]" else: raise CoconutInternalException("invalid Callable typedef tokens", tokens) + if async_kwd: + internal_assert(async_kwd == "async", "invalid typedef_callable async kwd", async_kwd) + ret_typedef = "_coconut.typing.Awaitable[" + ret_typedef + "]" + return "_coconut.typing.Callable[" + args_typedef + ", " + ret_typedef + "]" def make_suite_handle(tokens): """Make simple statements into suites.""" - internal_assert(len(tokens) == 1, "invalid simple suite tokens", tokens) - return "\n" + openindent + tokens[0] + closeindent + suite, = tokens + return "\n" + openindent + suite + closeindent def implicit_return_handle(tokens): """Add an implicit return.""" - internal_assert(len(tokens) == 1, "invalid implicit return tokens", tokens) - return "return " + tokens[0] + expr, = tokens + return "return " + expr def math_funcdef_handle(tokens): """Process assignment function definition.""" - internal_assert(len(tokens) == 2, "invalid assignment function definition tokens", tokens) - return tokens[0] + ("" if tokens[1].startswith("\n") else " ") + tokens[1] + funcdef, suite = tokens + return funcdef + ("" if suite.startswith("\n") else " ") + suite def except_handle(tokens): @@ -420,8 +452,8 @@ def itemgetter_handle(tokens): def class_suite_handle(tokens): """Process implicit pass in class suite.""" - internal_assert(len(tokens) == 1, "invalid implicit pass in class suite tokens", tokens) - return ": pass" + tokens[0] + newline, = tokens + return ": pass" + newline def simple_kwd_assign_handle(tokens): @@ -495,8 +527,8 @@ def where_handle(tokens): def kwd_err_msg_handle(tokens): """Handle keyword parse error messages.""" - internal_assert(len(tokens) == 1, "invalid keyword err msg tokens", tokens) - return 'invalid use of the keyword "' + tokens[0] + '"' + kwd, = tokens + return 'invalid use of the keyword "' + kwd + '"' def alt_ternary_handle(tokens): @@ -507,14 +539,15 @@ def alt_ternary_handle(tokens): def yield_funcdef_handle(tokens): """Handle yield def explicit generators.""" - internal_assert(len(tokens) == 1, "invalid yield def tokens", tokens) - return tokens[0] + openindent + handle_indentation( + funcdef, = tokens + return funcdef + handle_indentation( """ if False: yield """, add_newline=True, - ) + closeindent + extra_indent=1, + ) def partial_op_item_handle(tokens): @@ -656,14 +689,28 @@ class Grammar(object): where_kwd = keyword("where", explicit_prefix=colon) addpattern_kwd = keyword("addpattern", explicit_prefix=colon) then_kwd = keyword("then", explicit_prefix=colon) + type_kwd = keyword("type", explicit_prefix=colon) ellipsis = Forward() ellipsis_tokens = Literal("...") | fixto(Literal("\u2026"), "...") - lt = ~Literal("<<") + ~Literal("<=") + ~Literal("<|") + ~Literal("<..") + ~Literal("<*") + Literal("<") - gt = ~Literal(">>") + ~Literal(">=") + Literal(">") - le = Literal("<=") | fixto(Literal("\u2264"), "<=") - ge = Literal(">=") | fixto(Literal("\u2265"), ">=") + lt = ( + ~Literal("<<") + + ~Literal("<=") + + ~Literal("<|") + + ~Literal("<..") + + ~Literal("<*") + + Literal("<") + | fixto(Literal("\u228a"), "<") + ) + gt = ( + ~Literal(">>") + + ~Literal(">=") + + Literal(">") + | fixto(Literal("\u228b"), ">") + ) + le = Literal("<=") | fixto(Literal("\u2264") | Literal("\u2286"), "<=") + ge = Literal(">=") | fixto(Literal("\u2265") | Literal("\u2287"), ">=") ne = Literal("!=") | fixto(Literal("\xac=") | Literal("\u2260"), "!=") mul_star = star | fixto(Literal("\xd7"), "*") @@ -684,17 +731,23 @@ class Grammar(object): test_no_chain, dubcolon = disable_inside(test, unsafe_dubcolon) test_no_infix, backtick = disable_inside(test, unsafe_backtick) - unsafe_name_regex = r"" + base_name_regex = r"" for no_kwd in keyword_vars + const_vars: - unsafe_name_regex += r"(?!" + no_kwd + r"\b)" - # we disallow '"{ after to not match the "b" in b"" or the "s" in s{} - unsafe_name_regex += r"(?![0-9])\w+\b(?![{" + strwrapper + r"])" - unsafe_name = combine(Optional(backslash.suppress()) + regex_item(unsafe_name_regex)) + base_name_regex += r"(?!" + no_kwd + r"\b)" + # we disallow ['"{] after to not match the "b" in b"" or the "s" in s{} + base_name_regex += r"(?![0-9])\w+\b(?![{" + strwrapper + r"])" + base_name = regex_item(base_name_regex) + + refname = Forward() + setname = Forward() + name_ref = combine(Optional(backslash) + base_name) + unsafe_name = combine(Optional(backslash.suppress()) + base_name) - name = Forward() # use unsafe_name for dotted components since name should only be used for base names - dotted_name = condense(name + ZeroOrMore(dot + unsafe_name)) - must_be_dotted_name = condense(name + OneOrMore(dot + unsafe_name)) + dotted_refname = condense(refname + ZeroOrMore(dot + unsafe_name)) + dotted_setname = condense(setname + ZeroOrMore(dot + unsafe_name)) + unsafe_dotted_name = condense(unsafe_name + ZeroOrMore(dot + unsafe_name)) + must_be_dotted_name = condense(refname + OneOrMore(dot + unsafe_name)) integer = combine(Word(nums) + ZeroOrMore(underscore.suppress() + Word(nums))) binint = combine(Word("01") + ZeroOrMore(underscore.suppress() + Word("01"))) @@ -740,7 +793,8 @@ class Grammar(object): endline_ref = condense(OneOrMore(Literal("\n"))) lineitem = ZeroOrMore(comment) + endline newline = condense(OneOrMore(lineitem)) - end_simple_stmt_item = FollowedBy(semicolon | newline) + # rparen handles simple stmts ending parenthesized stmt lambdas + end_simple_stmt_item = FollowedBy(semicolon | newline | rparen) start_marker = StringStart() moduledoc_marker = condense(ZeroOrMore(lineitem) - Optional(moduledoc_item)) @@ -916,13 +970,15 @@ class Grammar(object): | fixto(keyword("in"), "_coconut.operator.contains") ) partialable_op = base_op_item | infix_op - partial_op_item = attach( + partial_op_item_tokens = ( labeled_group(dot.suppress() + partialable_op + test_no_infix, "right partial") - | labeled_group(test_no_infix + partialable_op + dot.suppress(), "left partial"), - partial_op_item_handle, + | labeled_group(test_no_infix + partialable_op + dot.suppress(), "left partial") ) + partial_op_item = attach(partial_op_item_tokens, partial_op_item_handle) op_item = trace(partial_op_item | base_op_item) + partial_op_atom_tokens = lparen.suppress() + partial_op_item_tokens + rparen.suppress() + typedef = Forward() typedef_default = Forward() unsafe_typedef_default = Forward() @@ -932,23 +988,23 @@ class Grammar(object): # we include (var)arg_comma to ensure the pattern matches the whole arg arg_comma = comma | fixto(FollowedBy(rparen), "") - vararg_comma = arg_comma | fixto(FollowedBy(colon), "") - typedef_ref = name + colon.suppress() + typedef_test + arg_comma + setarg_comma = arg_comma | fixto(FollowedBy(colon), "") + typedef_ref = setname + colon.suppress() + typedef_test + arg_comma default = condense(equals + test) - unsafe_typedef_default_ref = name + colon.suppress() + typedef_test + Optional(default) + unsafe_typedef_default_ref = setname + colon.suppress() + typedef_test + Optional(default) typedef_default_ref = unsafe_typedef_default_ref + arg_comma - tfpdef = typedef | condense(name + arg_comma) - tfpdef_default = typedef_default | condense(name + Optional(default) + arg_comma) + tfpdef = typedef | condense(setname + arg_comma) + tfpdef_default = typedef_default | condense(setname + Optional(default) + arg_comma) star_sep_arg = Forward() star_sep_arg_ref = condense(star + arg_comma) - star_sep_vararg = Forward() - star_sep_vararg_ref = condense(star + vararg_comma) + star_sep_setarg = Forward() + star_sep_setarg_ref = condense(star + setarg_comma) slash_sep_arg = Forward() slash_sep_arg_ref = condense(slash + arg_comma) - slash_sep_vararg = Forward() - slash_sep_vararg_ref = condense(slash + vararg_comma) + slash_sep_setarg = Forward() + slash_sep_setarg_ref = condense(slash + setarg_comma) just_star = star + rparen just_slash = slash + rparen @@ -970,16 +1026,16 @@ class Grammar(object): ), ) parameters = condense(lparen + args_list + rparen) - var_args_list = trace( + set_args_list = trace( ~just_op + addspace( ZeroOrMore( condense( - # everything here must end with vararg_comma - (star | dubstar) + name + vararg_comma - | star_sep_vararg - | slash_sep_vararg - | name + Optional(default) + vararg_comma, + # everything here must end with setarg_comma + (star | dubstar) + setname + setarg_comma + | star_sep_setarg + | slash_sep_setarg + | setname + Optional(default) + setarg_comma, ), ), ), @@ -1003,7 +1059,7 @@ class Grammar(object): call_item = ( dubstar + test | star + test - | name + default + | unsafe_name + default | namedexpr_test ) function_call_tokens = lparen.suppress() + ( @@ -1018,7 +1074,7 @@ class Grammar(object): tokenlist( Group( questionmark - | name + condense(equals + questionmark) + | unsafe_name + condense(equals + questionmark) | call_item, ), comma, @@ -1051,7 +1107,7 @@ class Grammar(object): anon_namedtuple = Forward() anon_namedtuple_ref = tokenlist( Group( - name + unsafe_name + Optional(colon.suppress() + typedef_test) + equals.suppress() + test, ), @@ -1136,7 +1192,7 @@ class Grammar(object): atom = ( # known_atom must come before name to properly parse string prefixes known_atom - | name + | refname | paren_atom | passthrough_atom ) @@ -1174,7 +1230,7 @@ class Grammar(object): complex_trailer = no_partial_complex_trailer | partial_trailer trailer = simple_trailer | complex_trailer - attrgetter_atom_tokens = dot.suppress() + dotted_name + Optional( + attrgetter_atom_tokens = dot.suppress() + unsafe_dotted_name + Optional( lparen + Optional(methodcaller_args) + rparen.suppress(), ) attrgetter_atom = attach(attrgetter_atom_tokens, attrgetter_atom_handle) @@ -1201,7 +1257,7 @@ class Grammar(object): simple_assign = Forward() simple_assign_ref = maybeparens( lparen, - (name | passthrough_atom) + (setname | passthrough_atom) + ZeroOrMore(ZeroOrMore(complex_trailer) + OneOrMore(simple_trailer)), rparen, ) @@ -1222,10 +1278,21 @@ class Grammar(object): typed_assign_stmt_ref = simple_assign + colon.suppress() + typedef_test + Optional(equals.suppress() + test_expr) basic_stmt = trace(addspace(ZeroOrMore(assignlist + equals) + test_expr)) + type_param = Forward() + type_param_ref = ( + (setname + Optional((colon | le).suppress() + typedef_test))("TypeVar") + | (star.suppress() + setname)("TypeVarTuple") + | (dubstar.suppress() + setname)("ParamSpec") + ) + type_params = Group(lbrack.suppress() + tokenlist(type_param, comma) + rbrack.suppress()) + + type_alias_stmt = Forward() + type_alias_stmt_ref = type_kwd.suppress() + setname + Optional(type_params) + equals.suppress() + typedef_test + impl_call_arg = disallow_keywords(reserved_vars) + ( keyword_atom | number - | dotted_name + | dotted_refname ) impl_call = attach( disallow_keywords(reserved_vars) @@ -1324,13 +1391,16 @@ class Grammar(object): labeled_group(attrgetter_atom_tokens, "attrgetter") + pipe_op | labeled_group(itemgetter_atom_tokens, "itemgetter") + pipe_op | labeled_group(partial_atom_tokens, "partial") + pipe_op + | labeled_group(partial_op_atom_tokens, "op partial") + pipe_op + # expr must come at end | labeled_group(comp_pipe_expr, "expr") + pipe_op ) pipe_augassign_item = trace( # should match pipe_item but with pipe_op -> end_simple_stmt_item and no expr labeled_group(attrgetter_atom_tokens, "attrgetter") + end_simple_stmt_item | labeled_group(itemgetter_atom_tokens, "itemgetter") + end_simple_stmt_item - | labeled_group(partial_atom_tokens, "partial") + end_simple_stmt_item, + | labeled_group(partial_atom_tokens, "partial") + end_simple_stmt_item + | labeled_group(partial_op_atom_tokens, "op partial") + end_simple_stmt_item, ) last_pipe_item = Group( lambdef("expr") @@ -1339,6 +1409,7 @@ class Grammar(object): attrgetter_atom_tokens("attrgetter"), itemgetter_atom_tokens("itemgetter"), partial_atom_tokens("partial"), + partial_op_atom_tokens("op partial"), comp_pipe_expr("expr"), ), ) @@ -1372,37 +1443,44 @@ class Grammar(object): base_suite = Forward() classic_lambdef = Forward() - classic_lambdef_params = maybeparens(lparen, var_args_list, rparen) - new_lambdef_params = lparen.suppress() + var_args_list + rparen.suppress() | name + classic_lambdef_params = maybeparens(lparen, set_args_list, rparen) + new_lambdef_params = lparen.suppress() + set_args_list + rparen.suppress() | setname classic_lambdef_ref = addspace(lambda_kwd + condense(classic_lambdef_params + colon)) new_lambdef = attach(new_lambdef_params + arrow.suppress(), lambdef_handle) implicit_lambdef = fixto(arrow, "lambda _=None:") lambdef_base = classic_lambdef | new_lambdef | implicit_lambdef stmt_lambdef = Forward() - stmt_lambdef_body = Forward() match_guard = Optional(keyword("if").suppress() + namedexpr_test) closing_stmt = longest(new_testlist_star_expr("tests"), unsafe_simple_stmt_item) stmt_lambdef_match_params = Group(lparen.suppress() + match_args_list + match_guard + rparen.suppress()) stmt_lambdef_params = Optional( - attach(name, add_parens_handle) + attach(setname, add_parens_handle) | parameters | stmt_lambdef_match_params, default="(_=None)", ) - stmt_lambdef_body_ref = ( + stmt_lambdef_body = Group( Group(OneOrMore(simple_stmt_item + semicolon.suppress())) + Optional(closing_stmt) - | Group(ZeroOrMore(simple_stmt_item + semicolon.suppress())) + closing_stmt + | Group(ZeroOrMore(simple_stmt_item + semicolon.suppress())) + closing_stmt, ) general_stmt_lambdef = ( - keyword("def").suppress() + Group( + any_len_perm( + async_kwd, + ), + ) + keyword("def").suppress() + stmt_lambdef_params + arrow.suppress() + stmt_lambdef_body ) match_stmt_lambdef = ( - match_kwd.suppress() - + keyword("def").suppress() + Group( + any_len_perm( + match_kwd.suppress(), + async_kwd, + ), + ) + keyword("def").suppress() + stmt_lambdef_match_params + arrow.suppress() + stmt_lambdef_body @@ -1412,11 +1490,24 @@ class Grammar(object): lambdef <<= addspace(lambdef_base + test) | stmt_lambdef lambdef_no_cond = trace(addspace(lambdef_base + test_no_cond)) - typedef_callable_params = ( - lparen.suppress() + Optional(testlist, default="") + rparen.suppress() - | Optional(negable_atom_item) + typedef_callable_arg = Group( + test("arg") + | (dubstar.suppress() + refname)("paramspec"), + ) + typedef_callable_params = Optional( + Group( + labeled_group(maybeparens(lparen, ellipsis_tokens, rparen), "ellipsis") + | lparen.suppress() + Optional(tokenlist(typedef_callable_arg, comma)) + rparen.suppress() + | labeled_group(negable_atom_item, "arg"), + ), + ) + unsafe_typedef_callable = attach( + Optional(async_kwd, default="") + + typedef_callable_params + + arrow.suppress() + + typedef_test, + typedef_callable_handle, ) - unsafe_typedef_callable = attach(typedef_callable_params + arrow.suppress() + typedef_test, typedef_callable_handle) unsafe_typedef_trailer = ( # use special type signifier for item_handle Group(fixto(lbrack + rbrack, "type:[]")) @@ -1458,7 +1549,7 @@ class Grammar(object): namedexpr = Forward() namedexpr_ref = addspace( - name + colon_eq + ( + setname + colon_eq + ( test + ~colon_eq | attach(namedexpr, add_parens_handle) ), @@ -1477,13 +1568,21 @@ class Grammar(object): classdef = Forward() classname = Forward() - classname_ref = name + decorators = Forward() + classname_ref = setname classlist = Group( Optional(function_call_tokens) + ~equals, # don't match class destructuring assignment ) class_suite = suite | attach(newline, class_suite_handle) - classdef_ref = keyword("class").suppress() + classname + classlist + class_suite + classdef_ref = ( + Optional(decorators, default="") + + keyword("class").suppress() + + classname + + Optional(type_params, default=()) + + classlist + + class_suite + ) async_comp_for = Forward() comp_iter = Forward() @@ -1514,25 +1613,33 @@ class Grammar(object): | continue_stmt ) - # maybeparens here allow for using custom operator names there - dotted_as_name = Group( - dotted_name + imp_name = ( + # maybeparens allows for using custom operator names here + maybeparens(lparen, setname, rparen) + | passthrough_item + ) + dotted_imp_name = ( + dotted_setname + | passthrough_item + ) + import_item = Group( + dotted_imp_name - Optional( keyword("as").suppress() - - maybeparens(lparen, name, rparen), + - imp_name, ), ) - import_as_name = Group( - maybeparens(lparen, name, rparen) + from_import_item = Group( + imp_name - Optional( keyword("as").suppress() - - maybeparens(lparen, name, rparen), + - imp_name, ), ) - import_names = Group(maybeparens(lparen, tokenlist(dotted_as_name, comma), rparen)) - from_import_names = Group(maybeparens(lparen, tokenlist(import_as_name, comma), rparen)) + import_names = Group(maybeparens(lparen, tokenlist(import_item, comma), rparen)) + from_import_names = Group(maybeparens(lparen, tokenlist(from_import_item, comma), rparen)) basic_import = keyword("import").suppress() - (import_names | Group(star)) - import_from_name = condense(ZeroOrMore(unsafe_dot) + dotted_name | OneOrMore(unsafe_dot) | star) + import_from_name = condense(ZeroOrMore(unsafe_dot) + dotted_setname | OneOrMore(unsafe_dot) | star) from_import = ( keyword("from").suppress() - import_from_name @@ -1549,11 +1656,11 @@ class Grammar(object): augassign_stmt_ref = simple_assign + augassign_rhs simple_kwd_assign = attach( - maybeparens(lparen, itemlist(name, comma), rparen) + Optional(equals.suppress() - test_expr), + maybeparens(lparen, itemlist(setname, comma), rparen) + Optional(equals.suppress() - test_expr), simple_kwd_assign_handle, ) kwd_augassign = Forward() - kwd_augassign_ref = name + augassign_rhs + kwd_augassign_ref = setname + augassign_rhs kwd_assign = ( kwd_augassign | simple_kwd_assign @@ -1594,7 +1701,7 @@ class Grammar(object): match_tuple = Group(lparen + matchlist_tuple + rparen.suppress()) match_lazy = Group(lbanana + matchlist_list + rbanana.suppress()) - interior_name_match = labeled_group(name, "var") + interior_name_match = labeled_group(setname, "var") match_string = interleaved_tokenlist( # f_string_atom must come first f_string_atom("f_string") | fixed_len_string_tokens("string"), @@ -1631,17 +1738,17 @@ class Grammar(object): | match_const("const") | (keyword_atom | keyword("is").suppress() + negable_atom_item)("is") | (keyword("in").suppress() + negable_atom_item)("in") - | (lbrace.suppress() + matchlist_dict + Optional(dubstar.suppress() + (name | condense(lbrace + rbrace))) + rbrace.suppress())("dict") + | (lbrace.suppress() + matchlist_dict + Optional(dubstar.suppress() + (setname | condense(lbrace + rbrace))) + rbrace.suppress())("dict") | (Optional(set_s.suppress()) + lbrace.suppress() + matchlist_set + rbrace.suppress())("set") | iter_match | match_lazy("lazy") | sequence_match | star_match | (lparen.suppress() + match + rparen.suppress())("paren") - | (data_kwd.suppress() + dotted_name + lparen.suppress() + matchlist_data + rparen.suppress())("data") - | (keyword("class").suppress() + dotted_name + lparen.suppress() + matchlist_data + rparen.suppress())("class") - | (dotted_name + lparen.suppress() + matchlist_data + rparen.suppress())("data_or_class") - | Optional(keyword("as").suppress()) + name("var"), + | (data_kwd.suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data") + | (keyword("class").suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("class") + | (dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data_or_class") + | Optional(keyword("as").suppress()) + setname("var"), ), ) @@ -1654,7 +1761,7 @@ class Grammar(object): matchlist_infix = bar_or_match + OneOrMore(Group(infix_op + Optional(negable_atom_item))) infix_match = labeled_group(matchlist_infix, "infix") | bar_or_match - matchlist_as = infix_match + OneOrMore(keyword("as").suppress() + name) + matchlist_as = infix_match + OneOrMore(keyword("as").suppress() + setname) as_match = labeled_group(matchlist_as, "as") | infix_match matchlist_and = as_match + OneOrMore(keyword("and").suppress() + as_match) @@ -1688,9 +1795,8 @@ class Grammar(object): base_destructuring_stmt = Optional(match_kwd.suppress()) + many_match + equals.suppress() + test_expr destructuring_stmt_ref, match_dotted_name_const_ref = disable_inside(base_destructuring_stmt, must_be_dotted_name + ~lparen) - cases_stmt = Forward() - # both syntaxes here must be kept matching except for the keywords - cases_kwd = cases_kwd | case_kwd + top_level_case_kwd = Forward() + # both syntaxes here must be kept the same except for the keywords case_match_co_syntax = trace( Group( (match_kwd | case_kwd).suppress() @@ -1701,7 +1807,7 @@ class Grammar(object): ), ) cases_stmt_co_syntax = ( - cases_kwd + testlist_star_namedexpr + colon.suppress() + newline.suppress() + (cases_kwd | top_level_case_kwd) + testlist_star_namedexpr + colon.suppress() + newline.suppress() + indent.suppress() + Group(OneOrMore(case_match_co_syntax)) + dedent.suppress() + Optional(keyword("else").suppress() + suite) ) @@ -1719,9 +1825,16 @@ class Grammar(object): + indent.suppress() + Group(OneOrMore(case_match_py_syntax)) + dedent.suppress() + Optional(keyword("else").suppress() - suite) ) + cases_stmt = Forward() cases_stmt_ref = cases_stmt_co_syntax | cases_stmt_py_syntax - assert_stmt = addspace(keyword("assert") - testlist) + assert_stmt = addspace( + keyword("assert") + - ( + lparen.suppress() + testlist + rparen.suppress() + end_simple_stmt_item + | testlist + ), + ) if_stmt = condense( addspace(keyword("if") + condense(namedexpr_test + suite)) - ZeroOrMore(addspace(keyword("elif") - condense(namedexpr_test - suite))) @@ -1739,7 +1852,7 @@ class Grammar(object): testlist_has_comma("list") | test("test") ) - Optional( - keyword("as").suppress() - name, + keyword("as").suppress() - setname, ) except_clause = attach(except_kwd + except_item, except_handle) except_star_clause = Forward() @@ -1760,12 +1873,12 @@ class Grammar(object): with_stmt_ref = keyword("with").suppress() - with_item_list - suite with_stmt = Forward() - return_typedef = Forward() - func_suite = Forward() - name_funcdef = trace(condense(dotted_name + parameters)) - op_tfpdef = unsafe_typedef_default | condense(name + Optional(default)) - op_funcdef_arg = name | condense(lparen.suppress() + op_tfpdef + rparen.suppress()) - op_funcdef_name = unsafe_backtick.suppress() + dotted_name + unsafe_backtick.suppress() + funcname_typeparams = Forward() + funcname_typeparams_ref = dotted_setname + Optional(type_params) + name_funcdef = trace(condense(funcname_typeparams + parameters)) + op_tfpdef = unsafe_typedef_default | condense(setname + Optional(default)) + op_funcdef_arg = setname | condense(lparen.suppress() + op_tfpdef + rparen.suppress()) + op_funcdef_name = unsafe_backtick.suppress() + funcname_typeparams + unsafe_backtick.suppress() op_funcdef = trace( attach( Group(Optional(op_funcdef_arg)) @@ -1774,15 +1887,15 @@ class Grammar(object): op_funcdef_handle, ), ) + + return_typedef = Forward() return_typedef_ref = arrow.suppress() + typedef_test end_func_colon = return_typedef + colon.suppress() | colon base_funcdef = op_funcdef | name_funcdef - func_suite_ref = nocolon_suite - funcdef = trace(addspace(keyword("def") + condense(base_funcdef + end_func_colon + func_suite))) + funcdef = trace(addspace(keyword("def") + condense(base_funcdef + end_func_colon + nocolon_suite))) name_match_funcdef = Forward() op_match_funcdef = Forward() - func_suite_tokens = Forward() op_match_funcdef_arg = Group( Optional( Group( @@ -1795,10 +1908,10 @@ class Grammar(object): ), ), ) - name_match_funcdef_ref = keyword("def").suppress() + dotted_name + lparen.suppress() + match_args_list + match_guard + rparen.suppress() + name_match_funcdef_ref = keyword("def").suppress() + funcname_typeparams + lparen.suppress() + match_args_list + match_guard + rparen.suppress() op_match_funcdef_ref = keyword("def").suppress() + op_match_funcdef_arg + op_funcdef_name + op_match_funcdef_arg + match_guard base_match_funcdef = trace(op_match_funcdef | name_match_funcdef) - func_suite_tokens_ref = ( + func_suite = ( attach(simple_stmt, make_suite_handle) | ( newline.suppress() @@ -1812,7 +1925,7 @@ class Grammar(object): attach( base_match_funcdef + end_func_colon - - func_suite_tokens, + - func_suite, join_match_funcdef, ), ) @@ -1832,7 +1945,6 @@ class Grammar(object): where_handle, ) - math_funcdef_suite = Forward() implicit_return = ( invalid_syntax(return_stmt, "expected expression but got return statement") | attach(new_testlist_star_expr, implicit_return_handle) @@ -1848,7 +1960,7 @@ class Grammar(object): | implicit_return_where ) math_funcdef_body = condense(ZeroOrMore(~(implicit_return_stmt + dedent) + stmt) - implicit_return_stmt) - math_funcdef_suite_ref = ( + math_funcdef_suite = ( attach(implicit_return_stmt, make_suite_handle) | condense(newline - indent - math_funcdef_body - dedent) ) @@ -1892,17 +2004,17 @@ class Grammar(object): match_kwd.suppress(), # we don't suppress addpattern so its presence can be detected later addpattern_kwd, - # makes async required - (1, async_kwd.suppress()), + required=(async_kwd.suppress(),), ) + (def_match_funcdef | math_match_funcdef), ), ) async_yield_funcdef = attach( trace( any_len_perm( - # makes both required - (1, async_kwd.suppress()), - (2, keyword("yield").suppress()), + required=( + async_kwd.suppress(), + keyword("yield").suppress(), + ), ) + (funcdef | math_funcdef), ), yield_funcdef_handle, @@ -1914,9 +2026,10 @@ class Grammar(object): match_kwd.suppress(), # we don't suppress addpattern so its presence can be detected later addpattern_kwd, - # makes both required - (1, async_kwd.suppress()), - (2, keyword("yield").suppress()), + required=( + async_kwd.suppress(), + keyword("yield").suppress(), + ), ) + (def_match_funcdef | math_match_funcdef), ), ), @@ -1936,28 +2049,36 @@ class Grammar(object): match_kwd.suppress(), # we don't suppress addpattern so its presence can be detected later addpattern_kwd, - # makes yield required - (1, keyword("yield").suppress()), + required=(keyword("yield").suppress(),), ) + (def_match_funcdef | math_match_funcdef), ), ) yield_funcdef = attach(yield_normal_funcdef | yield_match_funcdef, yield_funcdef_handle) + normal_funcdef_stmt = ( + funcdef + | math_funcdef + | math_match_funcdef + | match_funcdef + | yield_funcdef + ) + datadef = Forward() data_args = Group( Optional( lparen.suppress() + ZeroOrMore( Group( # everything here must end with arg_comma - (name + arg_comma.suppress())("name") - | (name + equals.suppress() + test + arg_comma.suppress())("default") - | (star.suppress() + name + arg_comma.suppress())("star") - | (name + colon.suppress() + typedef_test + equals.suppress() + test + arg_comma.suppress())("type default") - | (name + colon.suppress() + typedef_test + arg_comma.suppress())("type"), + (unsafe_name + arg_comma.suppress())("name") + | (unsafe_name + equals.suppress() + test + arg_comma.suppress())("default") + | (star.suppress() + unsafe_name + arg_comma.suppress())("star") + | (unsafe_name + colon.suppress() + typedef_test + equals.suppress() + test + arg_comma.suppress())("type default") + | (unsafe_name + colon.suppress() + typedef_test + arg_comma.suppress())("type"), ), ) + rparen.suppress(), ), - ) + Optional(keyword("from").suppress() + testlist) + ) + data_inherit = Optional(keyword("from").suppress() + testlist) data_suite = Group( colon.suppress() - ( (newline.suppress() + indent.suppress() + Optional(docstring) + Group(OneOrMore(stmt)) - dedent.suppress())("complex") @@ -1965,15 +2086,32 @@ class Grammar(object): | simple_stmt("simple") ) | newline("empty"), ) - datadef_ref = data_kwd.suppress() + classname + data_args + data_suite + datadef_ref = ( + Optional(decorators, default="") + + data_kwd.suppress() + + classname + + Optional(type_params, default=()) + + data_args + + data_inherit + + data_suite + ) match_datadef = Forward() match_data_args = lparen.suppress() + Group( match_args_list + match_guard, - ) + rparen.suppress() + Optional(keyword("from").suppress() + testlist) - match_datadef_ref = Optional(match_kwd.suppress()) + data_kwd.suppress() + classname + match_data_args + data_suite - - simple_decorator = condense(dotted_name + Optional(function_call) + newline)("simple") + ) + rparen.suppress() + # we don't support type_params here since we don't support types + match_datadef_ref = ( + Optional(decorators, default="") + + Optional(match_kwd.suppress()) + + data_kwd.suppress() + + classname + + match_data_args + + data_inherit + + data_suite + ) + + simple_decorator = condense(dotted_refname + Optional(function_call) + newline)("simple") complex_decorator = condense(namedexpr_test + newline)("complex") decorators_ref = OneOrMore( at.suppress() @@ -1982,16 +2120,8 @@ class Grammar(object): | complex_decorator, ), ) - decorators = Forward() decoratable_normal_funcdef_stmt = Forward() - normal_funcdef_stmt = ( - funcdef - | math_funcdef - | math_match_funcdef - | match_funcdef - | yield_funcdef - ) decoratable_normal_funcdef_stmt_ref = Optional(decorators) + normal_funcdef_stmt decoratable_async_funcdef_stmt = Forward() @@ -1999,8 +2129,8 @@ class Grammar(object): decoratable_func_stmt = decoratable_normal_funcdef_stmt | decoratable_async_funcdef_stmt - class_stmt = classdef | datadef | match_datadef - decoratable_class_stmt = trace(condense(Optional(decorators) + class_stmt)) + # decorators are integrated into the definitions of each item here + decoratable_class_stmt = classdef | datadef | match_datadef passthrough_stmt = condense(passthrough_block - (base_suite | newline)) @@ -2036,6 +2166,7 @@ class Grammar(object): keyword_stmt | augassign_stmt | typed_assign_stmt + | type_alias_stmt ) unsafe_simple_stmt_item <<= special_stmt | longest(basic_stmt, destructuring_stmt) simple_stmt_item <<= ( @@ -2079,7 +2210,7 @@ class Grammar(object): unsafe_anything_stmt = originalTextFor(regex_item("[^\n]+\n+")) unsafe_xonsh_command = originalTextFor( (Optional(at) + dollar | bang) - + (parens | brackets | braces | name), + + (parens | brackets | braces | unsafe_name), ) xonsh_parser, _anything_stmt, _xonsh_command = disable_outside( single_parser, @@ -2190,7 +2321,6 @@ def get_tre_return_grammar(self, func_name): ), ) - unsafe_dotted_name = condense(unsafe_name + ZeroOrMore(dot + unsafe_name)) split_func = ( start_marker - keyword("def").suppress() diff --git a/coconut/compiler/header.py b/coconut/compiler/header.py index fac28755c..a0f064ca9 100644 --- a/coconut/compiler/header.py +++ b/coconut/compiler/header.py @@ -34,6 +34,7 @@ report_this_text, numpy_modules, jax_numpy_modules, + self_match_types, ) from coconut.util import ( univ_open, @@ -201,6 +202,7 @@ def process_header_args(which, target, use_hash, no_tco, strict, no_wrap): report_this_text=report_this_text, numpy_modules=tuple_str_of(numpy_modules, add_quotes=True), jax_numpy_modules=tuple_str_of(jax_numpy_modules, add_quotes=True), + self_match_types=tuple_str_of(self_match_types), set_super=( # we have to use _coconut_super even on the universal target, since once we set __class__ it becomes a local variable "super = _coconut_super\n" if target_startswith != 3 else "" @@ -401,6 +403,17 @@ def _coconut_matmul(a, b, **kwargs): raise _coconut.TypeError("unsupported operand type(s) for @: " + _coconut.repr(_coconut.type(a)) + " and " + _coconut.repr(_coconut.type(b))) ''', ), + import_typing_NamedTuple=pycondition( + (3, 6), + if_lt=''' +def NamedTuple(name, fields): + return _coconut.collections.namedtuple(name, [x for x, t in fields]) +typing.NamedTuple = NamedTuple +NamedTuple = staticmethod(NamedTuple) + ''', + indent=1, + newline=True, + ), # used in the second round tco_comma="_coconut_tail_call, _coconut_tco, " if not no_tco else "", call_set_names_comma="_coconut_call_set_names, " if target_info < (3, 6) else "", @@ -410,12 +423,6 @@ def _coconut_matmul(a, b, **kwargs): async def __anext__(self): return self.func(await self.aiter.__anext__()) ''' if target_info >= (3, 5) else - r''' -@_coconut.asyncio.coroutine -def __anext__(self): - result = yield from self.aiter.__anext__() - return self.func(result) - ''' if target_info >= (3, 3) else pycondition( (3, 5), if_ge=r''' @@ -438,43 +445,74 @@ def __anext__(self): ) # second round for format dict elements that use the format dict - format_dict.update( - dict( - # when anything is added to this list it must also be added to *both* __coconut__.pyi stub files - underscore_imports="{tco_comma}{call_set_names_comma}{handle_cls_args_comma}_namedtuple_of, _coconut, _coconut_super, _coconut_MatchError, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_raise, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op, _coconut_multi_dim_arr, _coconut_mk_anon_namedtuple, _coconut_matmul".format(**format_dict), - import_typing_NamedTuple=pycondition( - (3, 6), - if_lt=''' -class typing{object}: - @staticmethod - def NamedTuple(name, fields): - return _coconut.collections.namedtuple(name, [x for x, t in fields]) + extra_format_dict = dict( + # when anything is added to this list it must also be added to *both* __coconut__ stub files + underscore_imports="{tco_comma}{call_set_names_comma}{handle_cls_args_comma}_namedtuple_of, _coconut, _coconut_super, _coconut_MatchError, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_raise, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op, _coconut_multi_dim_arr, _coconut_mk_anon_namedtuple, _coconut_matmul".format(**format_dict), + import_typing=pycondition( + (3, 5), + if_ge="import typing", + if_lt=''' +class typing_mock{object}: + TYPE_CHECKING = False + def __getattr__(self, name): + raise _coconut.ImportError("the typing module is not available at runtime in Python 3.4 or earlier; try hiding your typedefs behind an 'if TYPE_CHECKING:' block") +typing = typing_mock() '''.format(**format_dict), - if_ge=''' -import typing - ''', - indent=1, - ), - import_asyncio=pycondition( - (3, 4), - if_lt=''' + indent=1, + ), + # all typing_extensions imports must be added to the _coconut stub file + import_typing_TypeAlias_ParamSpec_Concatenate=pycondition( + (3, 10), + if_lt=''' +try: + from typing_extensions import TypeAlias, ParamSpec, Concatenate +except ImportError: + class you_need_to_install_typing_extensions{object}: + __slots__ = () + TypeAlias = ParamSpec = Concatenate = you_need_to_install_typing_extensions() +typing.TypeAlias = TypeAlias +typing.ParamSpec = ParamSpec +typing.Concatenate = Concatenate + '''.format(**format_dict), + indent=1, + newline=True, + ), + import_typing_TypeVarTuple_Unpack=pycondition( + (3, 11), + if_lt=''' +try: + from typing_extensions import TypeVarTuple, Unpack +except ImportError: + class you_need_to_install_typing_extensions{object}: + __slots__ = () + TypeVarTuple = Unpack = you_need_to_install_typing_extensions() +typing.TypeVarTuple = TypeVarTuple +typing.Unpack = Unpack + '''.format(**format_dict), + indent=1, + newline=True, + ), + import_asyncio=pycondition( + (3, 4), + if_lt=''' try: import trollius as asyncio except ImportError: - class you_need_to_install_trollius{object}: pass + class you_need_to_install_trollius{object}: + __slots__ = () asyncio = you_need_to_install_trollius() '''.format(**format_dict), - if_ge=''' + if_ge=''' import asyncio ''', - indent=1, - ), - class_amap=pycondition( - (3, 3), - if_lt=r''' + indent=1, + ), + class_amap=pycondition( + (3, 3), + if_lt=r''' _coconut_amap = None - ''', - if_ge=r''' + ''', + if_ge=r''' class _coconut_amap(_coconut_base_hashable): __slots__ = ("func", "aiter") def __init__(self, func, aiter): @@ -485,24 +523,25 @@ def __reduce__(self): def __aiter__(self): return self {async_def_anext} - '''.format(**format_dict), - ), - maybe_bind_lru_cache=pycondition( - (3, 2), - if_lt=''' + '''.format(**format_dict), + ), + maybe_bind_lru_cache=pycondition( + (3, 2), + if_lt=''' try: from backports.functools_lru_cache import lru_cache functools.lru_cache = lru_cache except ImportError: - class you_need_to_install_backports_functools_lru_cache{object}: pass + class you_need_to_install_backports_functools_lru_cache{object}: + __slots__ = () functools.lru_cache = you_need_to_install_backports_functools_lru_cache() '''.format(**format_dict), - if_ge=None, - indent=1, - newline=True, - ), + if_ge=None, + indent=1, + newline=True, ), ) + format_dict.update(extra_format_dict) return format_dict @@ -553,6 +592,9 @@ def getheader(which, target, use_hash, no_tco, strict, no_wrap): if target_startswith != "3": header += "from __future__ import print_function, absolute_import, unicode_literals, division\n" + # including generator_stop here is fine, even though to universalize + # generator returns we raise StopIteration errors, since we only do so + # when target_info < (3, 3) elif target_info >= (3, 7): if no_wrap: header += "from __future__ import generator_stop\n" diff --git a/coconut/compiler/matching.py b/coconut/compiler/matching.py index 59e0c0899..f4e0d76c2 100644 --- a/coconut/compiler/matching.py +++ b/coconut/compiler/matching.py @@ -40,11 +40,13 @@ match_set_name_var, is_data_var, default_matcher_style, + self_match_types, ) from coconut.compiler.util import ( paren_join, handle_indentation, add_int_and_strs, + ordered_items, ) # ----------------------------------------------------------------------------------------------------------------------- @@ -54,6 +56,7 @@ def get_match_names(match): """Gets keyword names for the given match.""" + internal_assert(not isinstance(match, str), "invalid match in get_match_names", match) names = [] # these constructs directly contain top-level variable names if "var" in match: @@ -67,8 +70,8 @@ def get_match_names(match): # these constructs continue matching on the entire original item, # meaning they can also contain top-level variable names elif "paren" in match: - (match,) = match - names += get_match_names(match) + (paren_match,) = match + names += get_match_names(paren_match) elif "and" in match: for and_match in match: names += get_match_names(and_match) @@ -78,6 +81,10 @@ def get_match_names(match): elif "isinstance_is" in match: isinstance_is_match = match[0] names += get_match_names(isinstance_is_match) + elif "class" in match or "data_or_class" in match: + cls_name, class_matches = match + if cls_name in self_match_types and len(class_matches) == 1 and len(class_matches[0]) == 1: + names += get_match_names(class_matches[0][0]) return names @@ -425,8 +432,7 @@ def match_in_args_kwargs(self, pos_only_match_args, match_args, args, kwargs, al # length checking max_len = None if allow_star_args else len(pos_only_match_args) + len(match_args) self.check_len_in(req_len, max_len, args) - for i in sorted(arg_checks): - lt_check, ge_check = arg_checks[i] + for i, (lt_check, ge_check) in ordered_items(arg_checks): if i < req_len: if lt_check is not None: self.add_check(lt_check) @@ -1179,6 +1185,7 @@ def make_match(self, flag, tokens): def match(self, tokens, item): """Performs pattern-matching processing.""" + internal_assert(not isinstance(tokens, str), "invalid match tokens", tokens) for flag, get_handler in self.matchers.items(): if flag in tokens: return get_handler(self)(tokens, item) diff --git a/coconut/compiler/templates/header.py_template b/coconut/compiler/templates/header.py_template index 7f7c94196..54b6ba5e0 100644 --- a/coconut/compiler/templates/header.py_template +++ b/coconut/compiler/templates/header.py_template @@ -19,8 +19,8 @@ def _coconut_super(type=None, object_or_type=None): {import_pickle} {import_OrderedDict} {import_collections_abc} -{import_typing_NamedTuple} -{set_zip_longest} +{import_typing} +{import_typing_NamedTuple}{import_typing_TypeAlias_ParamSpec_Concatenate}{import_typing_TypeVarTuple_Unpack}{set_zip_longest} try: import numpy except ImportError: @@ -96,7 +96,7 @@ def _coconut_tco(func):{COMMENT._coconut_tco_func_attr_is_used_in_main_coco} wkref_func = None if wkref is None else wkref() if wkref_func is call_func: call_func = call_func._coconut_tco_func - result = call_func(*args, **kwargs) # pass --no-tco to clean up your traceback + result = call_func(*args, **kwargs) # use coconut --no-tco to clean up your traceback if not isinstance(result, _coconut_tail_call): return result call_func, args, kwargs = result.func, result.args, result.kwargs @@ -1324,5 +1324,5 @@ def _coconut_multi_dim_arr(arrs, dim): arr_dims.append(dim) max_arr_dim = _coconut.max(arr_dims) return _coconut_concatenate(arrs, max_arr_dim - dim) -_coconut_self_match_types = (bool, bytearray, bytes, dict, float, frozenset, int, py_int, list, set, str, py_str, tuple) +_coconut_self_match_types = {self_match_types} _coconut_MatchError, _coconut_count, _coconut_enumerate, _coconut_filter, _coconut_map, _coconut_reiterable, _coconut_reversed, _coconut_starmap, _coconut_tee, _coconut_zip, TYPE_CHECKING, reduce, takewhile, dropwhile = MatchError, count, enumerate, filter, map, reiterable, reversed, starmap, tee, zip, False, _coconut.functools.reduce, _coconut.itertools.takewhile, _coconut.itertools.dropwhile diff --git a/coconut/compiler/util.py b/coconut/compiler/util.py index e96a4889a..c77e5f0e7 100644 --- a/coconut/compiler/util.py +++ b/coconut/compiler/util.py @@ -35,7 +35,7 @@ from functools import partial, reduce from collections import defaultdict from contextlib import contextmanager -from pprint import pformat +from pprint import pformat, pprint from coconut._pyparsing import ( USE_COMPUTATION_GRAPH, @@ -108,15 +108,20 @@ def evaluate_tokens(tokens, **kwargs): - """Evaluate the given tokens in the computation graph.""" + """Evaluate the given tokens in the computation graph. + Very performance sensitive.""" # can't have this be a normal kwarg to make evaluate_tokens a valid parse action evaluated_toklists = kwargs.pop("evaluated_toklists", ()) - internal_assert(not kwargs, "invalid keyword arguments to evaluate_tokens", kwargs) + if DEVELOP: # avoid the overhead of the call if not develop + internal_assert(not kwargs, "invalid keyword arguments to evaluate_tokens", kwargs) + + if not USE_COMPUTATION_GRAPH: + return tokens if isinstance(tokens, ParseResults): # evaluate the list portion of the ParseResults - old_toklist, name, asList, modal = tokens.__getnewargs__() + old_toklist, old_name, asList, modal = tokens.__getnewargs__() new_toklist = None for eval_old_toklist, eval_new_toklist in evaluated_toklists: if old_toklist == eval_old_toklist: @@ -127,7 +132,10 @@ def evaluate_tokens(tokens, **kwargs): # overwrite evaluated toklists rather than appending, since this # should be all the information we need for evaluating the dictionary evaluated_toklists = ((old_toklist, new_toklist),) - new_tokens = ParseResults(new_toklist, name, asList, modal) + # we have to pass name=None here and then set __name after otherwise + # the constructor might generate a new tokdict item we don't want + new_tokens = ParseResults(new_toklist, None, asList, modal) + new_tokens._ParseResults__name = old_name new_tokens._ParseResults__accumNames.update(tokens._ParseResults__accumNames) # evaluate the dictionary portion of the ParseResults @@ -140,6 +148,9 @@ def evaluate_tokens(tokens, **kwargs): new_tokdict[name] = new_occurrences new_tokens._ParseResults__tokdict.update(new_tokdict) + if DEVELOP: # avoid the overhead of the call if not develop + internal_assert(set(tokens._ParseResults__tokdict.keys()) == set(new_tokens._ParseResults__tokdict.keys()), "evaluate_tokens on ParseResults failed to maintain tokdict keys", (tokens, "->", new_tokens)) + return new_tokens else: @@ -161,6 +172,7 @@ def evaluate_tokens(tokens, **kwargs): ), ) + # base cases (performance sensitive; should be in likelihood order): if isinstance(tokens, str): return tokens @@ -173,6 +185,9 @@ def evaluate_tokens(tokens, **kwargs): elif isinstance(tokens, tuple): return tuple(evaluate_tokens(inner_toks, evaluated_toklists=evaluated_toklists) for inner_toks in tokens) + elif isinstance(tokens, DeferredNode): + return tokens + else: raise CoconutInternalException("invalid computation graph tokens", tokens) @@ -180,6 +195,7 @@ def evaluate_tokens(tokens, **kwargs): class ComputationNode(object): """A single node in the computation graph.""" __slots__ = ("action", "original", "loc", "tokens") + (("been_called",) if DEVELOP else ()) + pprinting = False def __new__(cls, action, original, loc, tokens, ignore_no_tokens=False, ignore_one_token=False, greedy=False, trim_arity=True): """Create a ComputionNode to return from a parse action. @@ -215,7 +231,8 @@ def name(self): return name if name is not None else repr(self.action) def evaluate(self): - """Get the result of evaluating the computation graph at this node.""" + """Get the result of evaluating the computation graph at this node. + Very performance sensitive.""" if DEVELOP: # avoid the overhead of the call if not develop internal_assert(not self.been_called, "inefficient reevaluation of action " + self.name + " with tokens", self.tokens) self.been_called = True @@ -244,16 +261,32 @@ def __repr__(self): if not logger.tracing: logger.warn_err(CoconutInternalException("ComputationNode.__repr__ called when not tracing")) inner_repr = "\n".join("\t" + line for line in repr(self.tokens).splitlines()) - return self.name + "(\n" + inner_repr + "\n)" + if self.pprinting: + return '("' + self.name + '",\n' + inner_repr + "\n)" + else: + return self.name + "(\n" + inner_repr + "\n)" + + +class DeferredNode(object): + """A node in the computation graph that has had its evaluation explicitly deferred.""" + + def __init__(self, original, loc, tokens): + self.original = original + self.loc = loc + self.tokens = tokens + + def evaluate(self): + """Evaluate the deferred computation.""" + return unpack(self.tokens) -class CombineNode(Combine): +class CombineToNode(Combine): """Modified Combine to work with the computation graph.""" __slots__ = () def _combine(self, original, loc, tokens): """Implement the parse action for Combine.""" - combined_tokens = super(CombineNode, self).postParse(original, loc, tokens) + combined_tokens = super(CombineToNode, self).postParse(original, loc, tokens) if DEVELOP: # avoid the overhead of the call if not develop internal_assert(len(combined_tokens) == 1, "Combine produced multiple tokens", combined_tokens) return combined_tokens[0] @@ -265,7 +298,7 @@ def postParse(self, original, loc, tokens): if USE_COMPUTATION_GRAPH: - combine = CombineNode + combine = CombineToNode else: combine = Combine @@ -274,14 +307,15 @@ def add_action(item, action, make_copy=None): """Add a parse action to the given item.""" if make_copy is None: item_ref_count = sys.getrefcount(item) if CPYTHON else float("inf") - internal_assert(item_ref_count >= temp_grammar_item_ref_count, "add_action got item with too low ref count", (item, type(item), item_ref_count)) + # keep this a lambda to prevent cPython refcounting changes from breaking release builds + internal_assert(lambda: item_ref_count >= temp_grammar_item_ref_count, "add_action got item with too low ref count", (item, type(item), item_ref_count)) make_copy = item_ref_count > temp_grammar_item_ref_count if make_copy: item = item.copy() return item.addParseAction(action) -def attach(item, action, ignore_no_tokens=None, ignore_one_token=None, ignore_tokens=None, trim_arity=None, **kwargs): +def attach(item, action, ignore_no_tokens=None, ignore_one_token=None, ignore_tokens=None, trim_arity=None, make_copy=None, **kwargs): """Set the parse action for the given item to create a node in the computation graph.""" if ignore_tokens is None: ignore_tokens = getattr(action, "ignore_tokens", False) @@ -302,7 +336,7 @@ def attach(item, action, ignore_no_tokens=None, ignore_one_token=None, ignore_to if not trim_arity: kwargs["trim_arity"] = trim_arity action = partial(ComputationNode, action, **kwargs) - return add_action(item, action) + return add_action(item, action, make_copy) def trace_attach(*args, **kwargs): @@ -315,10 +349,7 @@ def final_evaluate_tokens(tokens): if use_packrat_parser: # clear cache without resetting stats ParserElement.packrat_cache.clear() - if USE_COMPUTATION_GRAPH: - return evaluate_tokens(tokens) - else: - return tokens + return evaluate_tokens(tokens) def final(item): @@ -327,11 +358,16 @@ def final(item): return add_action(item, final_evaluate_tokens) +def defer(item): + """Defers evaluation of the given item. + Only does any actual deferring if USE_COMPUTATION_GRAPH is True.""" + return add_action(item, DeferredNode) + + def unpack(tokens): """Evaluate and unpack the given computation graph.""" logger.log_tag("unpack", tokens) - if USE_COMPUTATION_GRAPH: - tokens = evaluate_tokens(tokens) + tokens = evaluate_tokens(tokens) if isinstance(tokens, ParseResults) and len(tokens) == 1: tokens = tokens[0] return tokens @@ -485,12 +521,16 @@ def get_target_info_smart(target, mode="lowest"): class Wrap(ParseElementEnhance): """PyParsing token that wraps the given item in the given context manager.""" - __slots__ = ("errmsg", "wrapper") - def __init__(self, item, wrapper): + def __init__(self, item, wrapper, greedy=False, can_affect_parse_success=False): super(Wrap, self).__init__(item) self.wrapper = wrapper - self.setName(get_name(item) + " (Wrapped)") + self.greedy = greedy + self.can_affect_parse_success = can_affect_parse_success + + @property + def wrapped_name(self): + return get_name(self.expr) + " (Wrapped)" @contextmanager def wrapped_packrat_context(self): @@ -498,7 +538,7 @@ def wrapped_packrat_context(self): Required to allow the packrat cache to distinguish between wrapped and unwrapped parses. Only supported natively on cPyparsing.""" - if hasattr(self, "packrat_context"): + if self.can_affect_parse_success and hasattr(self, "packrat_context"): self.packrat_context.append(self.wrapper) try: yield @@ -511,14 +551,22 @@ def wrapped_packrat_context(self): def parseImpl(self, original, loc, *args, **kwargs): """Wrapper around ParseElementEnhance.parseImpl.""" if logger.tracing: # avoid the overhead of the call if not tracing - logger.log_trace(self.name, original, loc) + logger.log_trace(self.wrapped_name, original, loc) with logger.indent_tracing(): with self.wrapper(self, original, loc): with self.wrapped_packrat_context(): - evaluated_toks = super(Wrap, self).parseImpl(original, loc, *args, **kwargs) + parse_loc, evaluated_toks = super(Wrap, self).parseImpl(original, loc, *args, **kwargs) + if self.greedy: + evaluated_toks = evaluate_tokens(evaluated_toks) if logger.tracing: # avoid the overhead of the call if not tracing - logger.log_trace(self.name, original, loc, evaluated_toks) - return evaluated_toks + logger.log_trace(self.wrapped_name, original, loc, evaluated_toks) + return parse_loc, evaluated_toks + + def __str__(self): + return self.wrapped_name + + def __repr__(self): + return self.wrapped_name def disable_inside(item, *elems, **kwargs): @@ -539,7 +587,7 @@ def manage_item(self, original, loc): finally: level[0] -= 1 - yield Wrap(item, manage_item) + yield Wrap(item, manage_item, can_affect_parse_success=True) @contextmanager def manage_elem(self, original, loc): @@ -549,7 +597,7 @@ def manage_elem(self, original, loc): raise ParseException(original, loc, self.errmsg, self) for elem in elems: - yield Wrap(elem, manage_elem) + yield Wrap(elem, manage_elem, can_affect_parse_success=True) def disable_outside(item, *elems): @@ -729,7 +777,7 @@ def stores_loc_action(loc, tokens): stores_loc_action.ignore_tokens = True -stores_loc_item = attach(Empty(), stores_loc_action) +stores_loc_item = attach(Empty(), stores_loc_action, make_copy=False) def disallow_keywords(kwds, with_suffix=None): @@ -773,7 +821,7 @@ def keyword(name, explicit_prefix=None, require_whitespace=False): boundary = regex_item(r"\b") -def any_len_perm(*groups_and_elems): +def any_len_perm_with_one_of_each_group(*groups_and_elems): """Matches any len permutation of elems that contains at least one of each group.""" elems = [] groups = defaultdict(list) @@ -811,10 +859,38 @@ def any_len_perm(*groups_and_elems): return out +def any_len_perm(*optional, **kwargs): + """Any length permutation of optional and required.""" + required = kwargs.pop("required", ()) + internal_assert(not kwargs, "invalid any_len_perm kwargs", kwargs) + + groups_and_elems = [] + groups_and_elems.extend(optional) + groups_and_elems.extend(enumerate(required)) + return any_len_perm_with_one_of_each_group(*groups_and_elems) + + # ----------------------------------------------------------------------------------------------------------------------- # UTILITIES: # ----------------------------------------------------------------------------------------------------------------------- +def ordered_items(inputdict): + """Return the items of inputdict in a deterministic order.""" + if PY2: + return sorted(inputdict.items()) + else: + return inputdict.items() + + +def pprint_tokens(tokens): + """Pretty print tokens.""" + pprinting, ComputationNode.pprinting = ComputationNode.pprinting, True + try: + pprint(eval(repr(tokens))) + finally: + ComputationNode.pprinting = pprinting + + def getline(loc, original): """Get the line at loc in original.""" return _line(loc, original.replace(non_syntactic_newline, "\n")) @@ -912,6 +988,14 @@ def tuple_str_of(items, add_quotes=False, add_parens=True): return out +def tuple_str_of_str(argstr, add_parens=True): + """Make a tuple repr of the given comma-delimited argstr.""" + out = argstr + ("," if argstr else "") + if add_parens: + out = "(" + out + ")" + return out + + def split_comment(line, move_indents=False): """Split line into base and comment.""" if move_indents: @@ -1000,6 +1084,14 @@ def split_leading_trailing_indent(line, max_indents=None): return leading_indent, line, trailing_indent +def split_leading_whitespace(inputstr): + """Split leading whitespace.""" + basestr = inputstr.lstrip() + whitespace = inputstr[:len(inputstr) - len(basestr)] + internal_assert(whitespace + basestr == inputstr, "invalid whitespace split", inputstr) + return whitespace, basestr + + def rem_and_count_indents(inputstr): """Removes and counts the ind_change (opens - closes).""" no_opens = inputstr.replace(openindent, "") @@ -1056,7 +1148,7 @@ def interleaved_join(first_list, second_list): return "".join(interleaved) -def handle_indentation(inputstr, add_newline=False): +def handle_indentation(inputstr, add_newline=False, extra_indent=0): """Replace tabideal indentation with openindent and closeindent. Ignores whitespace-only lines.""" out_lines = [] @@ -1083,6 +1175,8 @@ def handle_indentation(inputstr, add_newline=False): if prev_ind > 0: out_lines[-1] += closeindent * prev_ind out = "\n".join(out_lines) + if extra_indent: + out = openindent * extra_indent + out + closeindent * extra_indent internal_assert(lambda: out.count(openindent) == out.count(closeindent), "failed to properly handle indentation in", out) return out diff --git a/coconut/constants.py b/coconut/constants.py index 76d7c309e..80cddd2b4 100644 --- a/coconut/constants.py +++ b/coconut/constants.py @@ -25,6 +25,7 @@ import platform import re import datetime as dt +from warnings import warn # ----------------------------------------------------------------------------------------------------------------------- # UTILITIES: @@ -36,14 +37,16 @@ def fixpath(path): return os.path.normpath(os.path.realpath(os.path.expanduser(path))) -def str_to_bool(boolstr, default=False): - """Convert a string to a boolean.""" - boolstr = boolstr.lower() +def get_bool_env_var(env_var, default=False): + """Get a boolean from an environment variable.""" + boolstr = os.getenv(env_var, "").lower() if boolstr in ("true", "yes", "on", "1"): return True elif boolstr in ("false", "no", "off", "0"): return False else: + if boolstr not in ("", "none", "default"): + warn("{env_var} has invalid value {value!r} (defaulting to {default})".format(env_var=env_var, value=os.getenv(env_var), default=default)) return default @@ -57,7 +60,6 @@ def str_to_bool(boolstr, default=False): version_tag = "develop" else: version_tag = "v" + VERSION -version_str_tag = "v" + VERSION_STR version_tuple = tuple(VERSION.split(".")) @@ -71,18 +73,23 @@ def str_to_bool(boolstr, default=False): PY36 = sys.version_info >= (3, 6) PY37 = sys.version_info >= (3, 7) PY38 = sys.version_info >= (3, 8) +PY39 = sys.version_info >= (3, 9) PY310 = sys.version_info >= (3, 10) +PY311 = sys.version_info >= (3, 11) IPY = ( ((PY2 and not PY26) or PY35) and not (PYPY and WINDOWS) - # necessary until jupyter-console fixes https://github.com/jupyter/jupyter_console/issues/245 - and not PY310 + and not (PY311 and not WINDOWS) ) MYPY = ( PY37 and not WINDOWS and not PYPY ) +XONSH = ( + PY35 + and not (PYPY and PY39) +) py_version_str = sys.version.split()[0] @@ -115,7 +122,7 @@ def str_to_bool(boolstr, default=False): assert not embed_on_internal_exc or DEVELOP, "embed_on_internal_exc should never be enabled on non-develop build" # should be the minimal ref count observed by attach -temp_grammar_item_ref_count = 5 +temp_grammar_item_ref_count = 3 if PY311 else 5 minimum_recursion_limit = 128 default_recursion_limit = 4096 @@ -152,6 +159,7 @@ def str_to_bool(boolstr, default=False): (3, 9), (3, 10), (3, 11), + (3, 12), ) # must match supported vers above and must be replicated in DOCS @@ -168,6 +176,7 @@ def str_to_bool(boolstr, default=False): "39", "310", "311", + "312", ) pseudo_targets = { "universal": "", @@ -199,12 +208,13 @@ def str_to_bool(boolstr, default=False): function_match_error_var = reserved_prefix + "_FunctionMatchError" match_set_name_var = reserved_prefix + "_match_set_name" -# should match internally_reserved_symbols below +# should match reserved_compiler_symbols below openindent = "\u204b" # reverse pilcrow closeindent = "\xb6" # pilcrow strwrapper = "\u25b6" # black right-pointing triangle -lnwrapper = "\u2021" # double dagger +errwrapper = "\u24d8" # circled letter i early_passthrough_wrapper = "\u2038" # caret +lnwrapper = "\u2021" # double dagger unwrapper = "\u23f9" # stop square funcwrapper = "def:" @@ -219,10 +229,11 @@ def str_to_bool(boolstr, default=False): # together should include all the constants defined above delimiter_symbols = tuple(opens + closes + holds) + ( strwrapper, + errwrapper, early_passthrough_wrapper, unwrapper, ) + indchars + comment_chars -internally_reserved_symbols = delimiter_symbols + ( +reserved_compiler_symbols = delimiter_symbols + ( reserved_prefix, funcwrapper, ) @@ -287,11 +298,14 @@ def str_to_bool(boolstr, default=False): "addpattern", "then", "operator", + "type", "\u03bb", # lambda ) # names that trigger __class__ to be bound to local vars super_names = ( + # we would include py_super, but it's not helpful, since + # py_super is unsatisfied by a simple local __class__ var "super", "__class__", ) @@ -300,7 +314,7 @@ def str_to_bool(boolstr, default=False): untcoable_funcs = ( r"locals", r"globals", - r"super", + r"(py_)?super", r"(typing\.)?cast", r"(sys\.)?exc_info", r"(sys\.)?_getframe", @@ -408,6 +422,22 @@ def str_to_bool(boolstr, default=False): "typing.Unpack": ("typing_extensions./Unpack", (3, 11)), } +self_match_types = ( + "bool", + "bytearray", + "bytes", + "dict", + "float", + "frozenset", + "int", + "py_int", + "list", + "set", + "str", + "py_str", + "tuple", +) + # ----------------------------------------------------------------------------------------------------------------------- # COMMAND CONSTANTS: # ----------------------------------------------------------------------------------------------------------------------- @@ -424,20 +454,25 @@ def str_to_bool(boolstr, default=False): style_env_var = "COCONUT_STYLE" vi_mode_env_var = "COCONUT_VI_MODE" home_env_var = "COCONUT_HOME" +use_color_env_var = "COCONUT_USE_COLOR" coconut_home = fixpath(os.getenv(home_env_var, "~")) +use_color = get_bool_env_var(use_color_env_var, default=None) +error_color_code = "31" +log_color_code = "93" + default_style = "default" prompt_histfile = os.path.join(coconut_home, ".coconut_history") prompt_multiline = False -prompt_vi_mode = str_to_bool(os.getenv(vi_mode_env_var, "")) +prompt_vi_mode = get_bool_env_var(vi_mode_env_var) prompt_wrap_lines = True prompt_history_search = True prompt_use_suggester = False base_dir = os.path.dirname(os.path.abspath(fixpath(__file__))) -base_stub_dir = os.path.join(base_dir, "stubs") +base_stub_dir = os.path.dirname(base_dir) installed_stub_dir = os.path.join(coconut_home, ".coconut_stubs") watch_interval = .1 # seconds @@ -455,6 +490,12 @@ def str_to_bool(boolstr, default=False): "\x04", # Ctrl-D "\x1a", # Ctrl-Z ) +ansii_escape = "\x1b" + +# should match special characters above +reserved_command_symbols = exit_chars + ( + ansii_escape, +) # always use atomic --xxx=yyy rather than --xxx yyy coconut_run_args = ("--run", "--target=sys", "--line-numbers", "--quiet") @@ -487,7 +528,7 @@ def str_to_bool(boolstr, default=False): mypy_install_arg = "install" -mypy_builtin_regex = re.compile(r"\b(reveal_type|reveal_locals|TYPE_CHECKING)\b") +mypy_builtin_regex = re.compile(r"\b(reveal_type|reveal_locals)\b") interpreter_uses_auto_compilation = True interpreter_uses_coconut_breakpoint = True @@ -601,9 +642,12 @@ def str_to_bool(boolstr, default=False): "\xbb", # >> "\xd7", # @ "\u2026", # ... + "\u2286", # C= + "\u2287", # ^reversed + "\u228a", # C!= + "\u228b", # ^reversed ) - # ----------------------------------------------------------------------------------------------------------------------- # INSTALLATION CONSTANTS: # ----------------------------------------------------------------------------------------------------------------------- @@ -619,7 +663,7 @@ def str_to_bool(boolstr, default=False): license_name = "Apache 2.0" pure_python_env_var = "COCONUT_PURE_PYTHON" -PURE_PYTHON = str_to_bool(os.getenv(pure_python_env_var, "")) +PURE_PYTHON = get_bool_env_var(pure_python_env_var) # the different categories here are defined in requirements.py, # anything after a colon is ignored but allows different versions @@ -657,21 +701,25 @@ def str_to_bool(boolstr, default=False): ("ipykernel", "py2"), ("ipykernel", "py3"), ("jupyter-client", "py2"), - ("jupyter-client", "py3"), + ("jupyter-client", "py==35"), + ("jupyter-client", "py36"), "jedi", + ("pywinpty", "py2;windows"), ), "jupyter": ( "jupyter", ("jupyter-console", "py2"), - ("jupyter-console", "py3"), + ("jupyter-console", "py==35"), + ("jupyter-console", "py36"), ("jupyterlab", "py35"), ("jupytext", "py3"), "papermill", - ("pywinpty", "py2;windows"), ), "mypy": ( "mypy[python2]", "types-backports", + ("typing_extensions", "py==35"), + ("typing_extensions", "py36"), ), "watch": ( "watchdog", @@ -683,7 +731,8 @@ def str_to_bool(boolstr, default=False): ("trollius", "py2;cpy"), ("aenum", "py<34"), ("dataclasses", "py==36"), - ("typing_extensions", "py3"), + ("typing_extensions", "py==35"), + ("typing_extensions", "py36"), ), "dev": ( ("pre-commit", "py3"), @@ -711,7 +760,7 @@ def str_to_bool(boolstr, default=False): "psutil": (5,), "jupyter": (1, 0), "types-backports": (0, 1), - "futures": (3, 3), + "futures": (3, 4), "backports.functools-lru-cache": (1, 6), "argparse": (1, 4), "pexpect": (4,), @@ -724,21 +773,23 @@ def str_to_bool(boolstr, default=False): "sphinx": (5, 3), "pydata-sphinx-theme": (0, 11), "myst-parser": (0, 18), - "mypy[python2]": (0, 982), + "mypy[python2]": (0, 990), + ("jupyter-console", "py36"): (6, 4), # pinned reqs: (must be added to pinned_reqs below) - # latest version supported on Python 2 - ("jupyter-client", "py2"): (5, 3), + # don't upgrade this; it breaks on Python 3.6 + ("jupyter-client", "py36"): (7, 1, 2), + ("typing_extensions", "py36"): (4, 1), # don't upgrade these; they break on Python 3.5 ("ipykernel", "py3"): (5, 5), ("ipython", "py3"): (7, 9), - ("jupyter-console", "py3"): (6, 1), - ("jupyter-client", "py3"): (6, 1, 12), + ("jupyter-console", "py==35"): (6, 1), + ("jupyter-client", "py==35"): (6, 1, 12), ("jupytext", "py3"): (1, 8), ("jupyterlab", "py35"): (2, 2), "xonsh": (0, 9), - ("typing_extensions", "py3"): (3, 10), + ("typing_extensions", "py==35"): (3, 10), # don't upgrade this to allow all versions ("prompt_toolkit", "mark3"): (1,), # don't upgrade this; it breaks on Python 2.6 @@ -748,6 +799,7 @@ def str_to_bool(boolstr, default=False): # don't upgrade this; it breaks on Python 3.4 "pygments": (2, 3), # don't upgrade these; they break on Python 2 + ("jupyter-client", "py2"): (5, 3), ("pywinpty", "py2;windows"): (0, 5), ("jupyter-console", "py2"): (5, 2), ("ipython", "py2"): (5, 4), @@ -757,21 +809,23 @@ def str_to_bool(boolstr, default=False): "papermill": (1, 2), # don't upgrade this; it breaks with old IPython versions "jedi": (0, 17), - # Coconut works best on pyparsing 2 + # Coconut requires pyparsing 2 "pyparsing": (2, 4, 7), } # should match the reqs with comments above pinned_reqs = ( - ("jupyter-client", "py3"), + ("jupyter-client", "py36"), + ("typing_extensions", "py36"), ("jupyter-client", "py2"), ("ipykernel", "py3"), ("ipython", "py3"), - ("jupyter-console", "py3"), + ("jupyter-console", "py==35"), + ("jupyter-client", "py==35"), ("jupytext", "py3"), ("jupyterlab", "py35"), "xonsh", - ("typing_extensions", "py3"), + ("typing_extensions", "py==35"), ("prompt_toolkit", "mark3"), "pytest", "vprof", @@ -792,7 +846,7 @@ def str_to_bool(boolstr, default=False): # that the element corresponding to the last None should be incremented _ = None max_versions = { - ("jupyter-client", "py3"): _, + ("jupyter-client", "py==35"): _, "pyparsing": _, "cPyparsing": (_, _, _), ("prompt_toolkit", "mark2"): _, diff --git a/coconut/convenience.py b/coconut/convenience.py index 823cbf11e..917734d60 100644 --- a/coconut/convenience.py +++ b/coconut/convenience.py @@ -22,7 +22,10 @@ import sys import os.path import codecs -import encodings +try: + from encodings import utf_8 +except ImportError: + utf_8 = None from coconut.integrations import embed from coconut.exceptions import CoconutException @@ -109,10 +112,10 @@ def setup(*args, **kwargs): PARSERS["any"] = PARSERS["debug"] = PARSERS["lenient"] -def parse(code="", mode="sys", state=False, keep_state=None): +def parse(code="", mode="sys", state=False, keep_internal_state=None): """Compile Coconut code.""" - if keep_state is None: - keep_state = bool(state) + if keep_internal_state is None: + keep_internal_state = bool(state) command = get_state(state) if command.comp is None: command.setup() @@ -121,7 +124,7 @@ def parse(code="", mode="sys", state=False, keep_state=None): "invalid parse mode " + repr(mode), extra="valid modes are " + ", ".join(PARSERS), ) - return PARSERS[mode](command.comp)(code, keep_state=keep_state) + return PARSERS[mode](command.comp)(code, keep_state=keep_internal_state) def coconut_eval(expression, globals=None, locals=None, state=False, **kwargs): @@ -224,28 +227,28 @@ def auto_compilation(on=True): # ----------------------------------------------------------------------------------------------------------------------- -class CoconutStreamReader(encodings.utf_8.StreamReader, object): - """Compile Coconut code from a stream of UTF-8.""" - coconut_compiler = None +if utf_8 is not None: + class CoconutStreamReader(utf_8.StreamReader, object): + """Compile Coconut code from a stream of UTF-8.""" + coconut_compiler = None - @classmethod - def compile_coconut(cls, source): - """Compile the given Coconut source text.""" - if cls.coconut_compiler is None: - cls.coconut_compiler = Compiler(**coconut_kernel_kwargs) - return cls.coconut_compiler.parse_sys(source) + @classmethod + def compile_coconut(cls, source): + """Compile the given Coconut source text.""" + if cls.coconut_compiler is None: + cls.coconut_compiler = Compiler(**coconut_kernel_kwargs) + return cls.coconut_compiler.parse_sys(source) - @classmethod - def decode(cls, input_bytes, errors="strict"): - """Decode and compile the given Coconut source bytes.""" - input_str, len_consumed = super(CoconutStreamReader, cls).decode(input_bytes, errors) - return cls.compile_coconut(input_str), len_consumed + @classmethod + def decode(cls, input_bytes, errors="strict"): + """Decode and compile the given Coconut source bytes.""" + input_str, len_consumed = super(CoconutStreamReader, cls).decode(input_bytes, errors) + return cls.compile_coconut(input_str), len_consumed - -class CoconutIncrementalDecoder(encodings.utf_8.IncrementalDecoder, object): - """Compile Coconut at the end of incrementally decoding UTF-8.""" - invertible = False - _buffer_decode = CoconutStreamReader.decode + class CoconutIncrementalDecoder(utf_8.IncrementalDecoder, object): + """Compile Coconut at the end of incrementally decoding UTF-8.""" + invertible = False + _buffer_decode = CoconutStreamReader.decode def get_coconut_encoding(encoding="coconut"): @@ -254,14 +257,16 @@ def get_coconut_encoding(encoding="coconut"): return None if encoding != "coconut": raise CoconutException("unknown Coconut encoding: " + repr(encoding)) + if utf_8 is None: + raise CoconutException("coconut encoding requires encodings.utf_8") return codecs.CodecInfo( name=encoding, - encode=encodings.utf_8.encode, + encode=utf_8.encode, decode=CoconutStreamReader.decode, - incrementalencoder=encodings.utf_8.IncrementalEncoder, + incrementalencoder=utf_8.IncrementalEncoder, incrementaldecoder=CoconutIncrementalDecoder, streamreader=CoconutStreamReader, - streamwriter=encodings.utf_8.StreamWriter, + streamwriter=utf_8.StreamWriter, ) diff --git a/coconut/stubs/coconut/convenience.pyi b/coconut/convenience.pyi similarity index 88% rename from coconut/stubs/coconut/convenience.pyi rename to coconut/convenience.pyi index d8b693208..ef9b64194 100644 --- a/coconut/stubs/coconut/convenience.pyi +++ b/coconut/convenience.pyi @@ -31,8 +31,10 @@ class CoconutException(Exception): # COMMAND: #----------------------------------------------------------------------------------------------------------------------- +GLOBAL_STATE: Optional[Command] = None -CLI: Command = ... + +def get_state(state: Optional[Command]=None) -> Command: ... def cmd(args: Union[Text, bytes, Iterable], interact: bool=False) -> None: ... @@ -63,13 +65,20 @@ def setup( PARSERS: Dict[Text, Callable] = ... -def parse(code: Text, mode: Text=...) -> Text: ... +def parse( + code: Text, + mode: Text=..., + state: Optional[Command]=..., + keep_internal_state: Optional[bool]=None, +) -> Text: ... def coconut_eval( expression: Text, globals: Optional[Dict[Text, Any]]=None, locals: Optional[Dict[Text, Any]]=None, + state: Optional[Command]=..., + keep_internal_state: Optional[bool]=None, ) -> Any: ... diff --git a/coconut/exceptions.py b/coconut/exceptions.py index dde883d16..5cde83846 100644 --- a/coconut/exceptions.py +++ b/coconut/exceptions.py @@ -87,6 +87,7 @@ class CoconutException(BaseCoconutException, Exception): class CoconutSyntaxError(CoconutException): """Coconut SyntaxError.""" + point_to_endpoint = False def __init__(self, message, source=None, point=None, ln=None, extra=None, endpoint=None): """Creates the Coconut SyntaxError.""" @@ -146,7 +147,11 @@ def message(self, message, source, point, ln, extra=None, endpoint=None): if point_ind > 0 or endpoint_ind > 0: message += "\n" + " " * (taberrfmt + point_ind) if endpoint_ind - point_ind > 1: - message += "~" * (endpoint_ind - point_ind - 1) + "^" + if not self.point_to_endpoint: + message += "^" + message += "~" * (endpoint_ind - point_ind - 1) + if self.point_to_endpoint: + message += "^" else: message += "^" @@ -213,6 +218,7 @@ def message(self, message, source, point, ln, target, endpoint): class CoconutParseError(CoconutSyntaxError): """Coconut ParseError.""" + point_to_endpoint = True class CoconutWarning(CoconutException): diff --git a/coconut/icoconut/root.py b/coconut/icoconut/root.py index a25a2afce..2067673b2 100644 --- a/coconut/icoconut/root.py +++ b/coconut/icoconut/root.py @@ -34,6 +34,7 @@ from coconut.constants import ( WINDOWS, PY38, + PY311, py_syntax_version, mimetype, version_banner, @@ -213,8 +214,7 @@ def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=Tr if asyncio is not None: @override - @asyncio.coroutine - def run_cell_async(self, raw_cell, store_history=False, silent=False, shell_futures=True, cell_id=None, **kwargs): + {async_}def run_cell_async(self, raw_cell, store_history=False, silent=False, shell_futures=True, cell_id=None, **kwargs): """Version of run_cell_async that always uses shell_futures.""" # same as above return super({cls}, self).run_cell_async(raw_cell, store_history, silent, shell_futures=True, **kwargs) @@ -231,15 +231,24 @@ def user_expressions(self, expressions): return super({cls}, self).user_expressions(compiled_expressions) ''' + format_dict = dict( + dict="{}", + async_=( + "async " if PY311 else + """@asyncio.coroutine + """ + ), + ) + class CoconutShell(ZMQInteractiveShell, object): """ZMQInteractiveShell for Coconut.""" - exec(INTERACTIVE_SHELL_CODE.format(dict="{}", cls="CoconutShell")) + exec(INTERACTIVE_SHELL_CODE.format(cls="CoconutShell", **format_dict)) InteractiveShellABC.register(CoconutShell) class CoconutShellEmbed(InteractiveShellEmbed, object): """InteractiveShellEmbed for Coconut.""" - exec(INTERACTIVE_SHELL_CODE.format(dict="{}", cls="CoconutShellEmbed")) + exec(INTERACTIVE_SHELL_CODE.format(cls="CoconutShellEmbed", **format_dict)) InteractiveShellABC.register(CoconutShellEmbed) diff --git a/coconut/py.typed b/coconut/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/coconut/requirements.py b/coconut/requirements.py index 5d84430e6..bbd880084 100644 --- a/coconut/requirements.py +++ b/coconut/requirements.py @@ -26,9 +26,9 @@ PYPY, CPYTHON, PY34, - PY35, IPY, MYPY, + XONSH, WINDOWS, PURE_PYTHON, all_reqs, @@ -208,7 +208,7 @@ def everything_in(req_dict): extras["jobs"] if not PYPY else [], extras["jupyter"] if IPY else [], extras["mypy"] if MYPY else [], - extras["xonsh"] if PY35 else [], + extras["xonsh"] if XONSH else [], ), }) diff --git a/coconut/root.py b/coconut/root.py index 33858ab18..e1ca6c0ff 100644 --- a/coconut/root.py +++ b/coconut/root.py @@ -23,11 +23,11 @@ # VERSION: # ----------------------------------------------------------------------------------------------------------------------- -VERSION = "2.1.0" +VERSION = "2.1.1" VERSION_NAME = "The Spanish Inquisition" # False for release, int >= 1 for develop DEVELOP = False -ALPHA = False +ALPHA = False # for pre releases rather than post releases # ----------------------------------------------------------------------------------------------------------------------- # UTILITIES: diff --git a/coconut/terminal.py b/coconut/terminal.py index ff701893a..56a377038 100644 --- a/coconut/terminal.py +++ b/coconut/terminal.py @@ -20,6 +20,7 @@ from coconut.root import * # NOQA import sys +import os import traceback import logging from contextlib import contextmanager @@ -42,9 +43,12 @@ taberrfmt, use_packrat_parser, embed_on_internal_exc, + use_color, + error_color_code, + log_color_code, + ansii_escape, ) from coconut.util import ( - printerr, get_clock_time, get_name, displayable, @@ -60,6 +64,18 @@ # UTILITIES: # ----------------------------------------------------------------------------------------------------------------------- +ansii_reset = ansii_escape + "[0m" + + +def isatty(stream, default=None): + """Check if a stream is a terminal interface.""" + try: + return stream.isatty() + except Exception: + logger.log_exc() + return default + + def format_error(err_value, err_type=None, err_trace=None): """Properly formats the specified error.""" if err_type is None: @@ -163,6 +179,7 @@ class Logger(object): quiet = False path = None name = None + colors_enabled = False tracing = False trace_ind = 0 @@ -172,6 +189,17 @@ def __init__(self, other=None): self.copy_from(other) self.patch_logging() + @classmethod + def enable_colors(cls): + """Attempt to enable CLI colors.""" + if not cls.colors_enabled: + # necessary to resolve https://bugs.python.org/issue40134 + try: + os.system("") + except Exception: + logger.log_exc() + cls.colors_enabled = True + def copy_from(self, other): """Copy other onto self.""" self.verbose, self.quiet, self.path, self.name, self.tracing, self.trace_ind = other.verbose, other.quiet, other.path, other.name, other.tracing, other.trace_ind @@ -184,48 +212,81 @@ def copy(self): """Make a copy of the logger.""" return Logger(self) - def display(self, messages, sig="", debug=False, end="\n", **kwargs): + def display(self, messages, sig="", end="\n", file=None, level="normal", color=None, **kwargs): """Prints an iterator of messages.""" - full_message = "".join( - sig + line for line in " ".join( - str(msg) for msg in messages - ).splitlines(True) - ) + end - if not full_message: - full_message = sig.rstrip() - # we use end="" to ensure atomic printing - if debug: - printerr(full_message, end="", **kwargs) + if level == "normal": + file = file or sys.stdout + elif level == "logging": + file = file or sys.stderr + color = color or log_color_code + elif level == "error": + file = file or sys.stderr + color = color or error_color_code else: - print(full_message, end="", **kwargs) + raise CoconutInternalException("invalid logging level", level) + + if use_color is False or (use_color is None and not isatty(file)): + color = None + + if color: + self.enable_colors() + + raw_message = " ".join(str(msg) for msg in messages) + # if there's nothing to display but there is a sig, display the sig + if not raw_message and sig: + raw_message = "\n" + + components = [] + if color: + components.append(ansii_escape + "[" + color + "m") + for line in raw_message.splitlines(True): + if sig: + line = sig + line + components.append(line) + if color: + components.append(ansii_reset) + components.append(end) + full_message = "".join(components) + + # we use end="" to ensure atomic printing (and so we add the end in earlier) + print(full_message, file=file, end="", **kwargs) def print(self, *messages, **kwargs): """Print messages to stdout.""" self.display(messages, **kwargs) def printerr(self, *messages, **kwargs): + """Print errors to stderr.""" + self.display(messages, level="error", **kwargs) + + def printlog(self, *messages, **kwargs): """Print messages to stderr.""" - self.display(messages, debug=True, **kwargs) + self.display(messages, level="logging", **kwargs) - def show(self, *messages): + def show(self, *messages, **kwargs): """Prints messages if not --quiet.""" if not self.quiet: - self.display(messages) + self.display(messages, **kwargs) - def show_sig(self, *messages): + def show_sig(self, *messages, **kwargs): """Prints messages with main signature if not --quiet.""" if not self.quiet: - self.display(messages, main_sig) + self.display(messages, main_sig, **kwargs) - def show_error(self, *messages): + def show_error(self, *messages, **kwargs): """Prints error messages with main signature if not --quiet.""" if not self.quiet: - self.display(messages, main_sig, debug=True) + self.display(messages, main_sig, level="error", **kwargs) def log(self, *messages): """Logs debug messages if --verbose.""" if self.verbose: - self.printerr(*messages) + self.printlog(*messages) + + def log_stdout(self, *messages): + """Logs debug messages to stdout if --verbose.""" + if self.verbose: + self.print(*messages) def log_lambda(self, *msg_funcs): if self.verbose: @@ -234,7 +295,7 @@ def log_lambda(self, *msg_funcs): if callable(msg): msg = msg() messages.append(msg) - self.printerr(*messages) + self.printlog(*messages) def log_func(self, func): """Calls a function and logs the results if --verbose.""" @@ -242,12 +303,12 @@ def log_func(self, func): to_log = func() if not isinstance(to_log, tuple): to_log = (to_log,) - self.printerr(*to_log) + self.printlog(*to_log) def log_prefix(self, prefix, *messages): """Logs debug messages with the given signature if --verbose.""" if self.verbose: - self.display(messages, prefix, debug=True) + self.display(messages, prefix, level="logging") def log_sig(self, *messages): """Logs debug messages with the main signature if --verbose.""" @@ -259,7 +320,7 @@ def log_vars(self, message, variables, rem_vars=("self",)): new_vars = dict(variables) for v in rem_vars: del new_vars[v] - self.printerr(message, new_vars) + self.printlog(message, new_vars) def get_error(self, err=None, show_tb=None): """Properly formats the current error.""" @@ -301,11 +362,18 @@ def warn_err(self, warning, force=False): try: raise warning except Exception: - self.print_exc() + self.print_exc(warning=True) - def print_exc(self, err=None, show_tb=None): - """Properly prints an exception in the exception context.""" - errmsg = self.get_error(err, show_tb) + def print_exc(self, err=None, show_tb=None, warning=False): + """Properly prints an exception.""" + self.print_formatted_error(self.get_error(err, show_tb), warning) + + def print_exception(self, err_type, err_value, err_tb): + """Properly prints the given exception details.""" + self.print_formatted_error(format_error(err_value, err_type, err_tb)) + + def print_formatted_error(self, errmsg, warning=False): + """Print a formatted error message in the current context.""" if errmsg is not None: if self.path is not None: errmsg_lines = ["in " + self.path + ":"] @@ -314,7 +382,10 @@ def print_exc(self, err=None, show_tb=None): line = " " * taberrfmt + line errmsg_lines.append(line) errmsg = "\n".join(errmsg_lines) - self.printerr(errmsg) + if warning: + self.printlog(errmsg) + else: + self.printerr(errmsg) def log_exc(self, err=None): """Display an exception only if --verbose.""" @@ -342,7 +413,7 @@ def indent_tracing(self): def print_trace(self, *args): """Print to stderr with tracing indent.""" trace = " ".join(str(arg) for arg in args) - self.printerr(_indent(trace, self.trace_ind)) + self.printlog(_indent(trace, self.trace_ind)) def log_tag(self, tag, code, multiline=False): """Logs a tagged message if tracing.""" @@ -411,10 +482,10 @@ def gather_parsing_stats(self): yield finally: elapsed_time = get_clock_time() - start_time - self.printerr("Time while parsing:", elapsed_time, "secs") + self.printlog("Time while parsing:", elapsed_time, "secs") if use_packrat_parser: hits, misses = ParserElement.packrat_cache_stats - self.printerr("\tPackrat parsing stats:", hits, "hits;", misses, "misses") + self.printlog("\tPackrat parsing stats:", hits, "hits;", misses, "misses") else: yield @@ -430,7 +501,7 @@ def getLogger(name=None): def pylog(self, *args, **kwargs): """Display all available logging information.""" - self.printerr(self.name, args, kwargs, traceback.format_exc()) + self.printlog(self.name, args, kwargs, traceback.format_exc()) debug = info = warning = error = critical = exception = pylog diff --git a/coconut/tests/main_test.py b/coconut/tests/main_test.py index 62c882d81..b423f254e 100644 --- a/coconut/tests/main_test.py +++ b/coconut/tests/main_test.py @@ -93,7 +93,7 @@ mypy_args = ["--follow-imports", "silent", "--ignore-missing-imports", "--allow-redefinition"] ignore_mypy_errs_with = ( - "Exiting with error: MyPy error", + "with error: MyPy error", "tutorial.py", "unused 'type: ignore' comment", "site-packages/numpy", @@ -464,11 +464,6 @@ def comp_3(args=[], **kwargs): comp(path="cocotest", folder="target_3", args=["--target", "3"] + args, **kwargs) -def comp_33(args=[], **kwargs): - """Compiles target_33.""" - comp(path="cocotest", folder="target_33", args=["--target", "33"] + args, **kwargs) - - def comp_35(args=[], **kwargs): """Compiles target_35.""" comp(path="cocotest", folder="target_35", args=["--target", "35"] + args, **kwargs) @@ -519,8 +514,6 @@ def run(args=[], agnostic_target=None, use_run_arg=False, convert_to_import=Fals comp_2(args, **kwargs) else: comp_3(args, **kwargs) - if sys.version_info >= (3, 3): - comp_33(args, **kwargs) if sys.version_info >= (3, 5): comp_35(args, **kwargs) if sys.version_info >= (3, 6): @@ -564,7 +557,6 @@ def comp_all(args=[], agnostic_target=None, **kwargs): comp_2(args, **kwargs) comp_3(args, **kwargs) - comp_33(args, **kwargs) comp_35(args, **kwargs) comp_36(args, **kwargs) comp_38(args, **kwargs) diff --git a/coconut/tests/src/cocotest/agnostic/main.coco b/coconut/tests/src/cocotest/agnostic/main.coco index 12eee0ba9..e22994468 100644 --- a/coconut/tests/src/cocotest/agnostic/main.coco +++ b/coconut/tests/src/cocotest/agnostic/main.coco @@ -799,7 +799,7 @@ def main_test() -> bool: def \match(x) = (+)$(1) <| x assert match(1) == 2 try: - match[0] = 1 + match[0] = 1 # type: ignore except TypeError: pass else: @@ -1104,13 +1104,6 @@ def main_test() -> bool: assert_raises(-> (|1,2,3|)$[0.5:], TypeError) assert_raises(-> (|1,2,3|)$[:2.5], TypeError) assert_raises(-> (|1,2,3|)$[::1.5], TypeError) - def exec_rebind_test(): - exec = 1 - assert exec + 1 == 2 - def exec(x) = x - assert exec(1) == 1 - return True - assert exec_rebind_test() is True try: (raise)(TypeError(), ValueError()) except TypeError as err: @@ -1191,6 +1184,49 @@ def main_test() -> bool: for x in *(1, 2), *(3, 4): xs.append(x) assert xs == [1, 2, 3, 4] + assert \_coconut.typing.NamedTuple + class Asup: + a = 1 + class Bsup(Asup): + def get_super_1(self) = super() + def get_super_2(self) = super(Bsup, self) + def get_super_3(self) = py_super(Bsup, self) + bsup = Bsup() + assert bsup.get_super_1().a == 1 + assert bsup.get_super_2().a == 1 + assert bsup.get_super_3().a == 1 + e = exec + test: dict = {} + e("a=1", test) + assert test["a"] == 1 + class SupSup: + sup = "sup" + class Sup(SupSup): + def super(self) = super() + assert Sup().super().sup == "sup" + assert s{1, 2} ⊆ s{1, 2, 3} + try: + assert (False, "msg") + except AssertionError: + pass + else: + assert False + mut = [0] + (def -> mut[0] += 1)() + assert mut[0] == 1 + to_int: ... -> int = -> 5 + to_int_: (...) -> int = -> 5 + assert to_int() + to_int_() == 10 + assert 3 |> (./2) == 3/2 == (./2) <| 3 + assert 2 |> (3/.) == 3/2 == (3/.) <| 2 + x = 3 + x |>= (./2) + assert x == 3/2 + x = 2 + x |>= (3/.) + assert x == 3/2 + assert (./2) |> (.`of`3) == 3/2 + assert 5 |> (.*2) |> (2/.) == 1/5 == 5 |> (2*.) |> (./2) |> (1/.) return True def test_asyncio() -> bool: @@ -1203,7 +1239,7 @@ def easter_egg_test() -> bool: import sys as _sys num_mods_0 = len(_sys.modules) import * # type: ignore - assert sys == _sys + assert sys is _sys assert len(_sys.modules) > num_mods_0 orig_name = __name__ from * import * # type: ignore @@ -1262,9 +1298,6 @@ def run_main(test_easter_eggs=False) -> bool: else: from .py3_test import py3_test assert py3_test() is True - if sys.version_info >= (3, 3): - from .py33_test import py33_test - assert py33_test() is True if sys.version_info >= (3, 5): from .py35_test import py35_test assert py35_test() is True diff --git a/coconut/tests/src/cocotest/agnostic/specific.coco b/coconut/tests/src/cocotest/agnostic/specific.coco index e5ad3f375..a39e0a01d 100644 --- a/coconut/tests/src/cocotest/agnostic/specific.coco +++ b/coconut/tests/src/cocotest/agnostic/specific.coco @@ -32,7 +32,7 @@ def non_py32_test() -> bool: def py36_spec_test(tco: bool) -> bool: """Tests for any py36+ version.""" from dataclasses import dataclass - from typing import Any + from typing import Any, Literal outfile = StringIO() @@ -79,6 +79,58 @@ def py36_spec_test(tco: bool) -> bool: assert outfile.getvalue() == "\n" * 10001 + class HasGens[T, U] + assert HasGens `issubclass` object + + class HasPSpec[**P] + assert HasPSpec `issubclass` object + + data D1[T](x: T, y: T) # type: ignore + assert D1(10, 20).y == 20 + + data D2[T: int[]](xs: T) # type: ignore + assert D2((10, 20)).xs == (10, 20) + + def myid[T](x: T) -> T = x + assert myid(10) == 10 + + def fst[T](x: T, y: T) -> T = x + assert fst(1, 2) == 1 + + def twople[T, U](x: T, y: U) -> (T; U) = (x, y) + assert twople(1, 2) == (1, 2) + + def head[T: int[]](xs: T) -> (int; T) = (xs[0], xs) + def head_[T <= int[]](xs: T) -> (int; T) = (xs[0], xs) + assert head(range(5)) == (0, range(5)) == head_(range(5)) + + def duplicate[T](x: T) -> (T; T) = x, y where: + y: T = x + assert duplicate(10) == (10, 10) + + class HasStr[T <= str]: + def __init__(self, x: T): + self.x: T = x + + def get(self) -> T: + return self.x + + hello: Literal["hello"] = "hello" + hello = HasStr(hello).get() + + def and_then[**P, T, U](f: (**P) -> T, g: T -> U) -> (**P) -> U = + (*args, **kwargs) -> g(f(*args, **kwargs)) + assert (.+5) `and_then` (.*2) <| 3 == 16 + + def mk_repeat[T, **P](f: (T, **P) -> T) -> (int, T, **P) -> T: + def newf(n: int, x: T, *args, **kwargs) -> T: + if n == 0: + return x + else: + return newf(n - 1, f(x, *args, **kwargs), *args, **kwargs) + return newf + assert mk_repeat(+)(3, 1, 2) == 7 + return True @@ -98,6 +150,8 @@ def py37_spec_test() -> bool: l: typing.List[int] = [] range(10) |> toa |> fmap$(l.append) |> aconsume |> asyncio.run assert l == list(range(10)) + class HasVarGen[*Ts] # type: ignore + assert HasVarGen `issubclass` object return True diff --git a/coconut/tests/src/cocotest/agnostic/suite.coco b/coconut/tests/src/cocotest/agnostic/suite.coco index fa9589d11..9af1688ca 100644 --- a/coconut/tests/src/cocotest/agnostic/suite.coco +++ b/coconut/tests/src/cocotest/agnostic/suite.coco @@ -216,6 +216,7 @@ def suite_test() -> bool: assert inh_a.inh_true2() is True assert inh_a.inh_true3() is True assert inh_a.inh_true4() is True + assert inh_a.inh_true5() is True assert inh_A.inh_cls_true() is True assert pt.__doc__ out0 = grid() |> grid_trim$(xmax=5, ymax=5) @@ -961,6 +962,19 @@ forward 2""") == 900 assert !0 == 1 assert ![] is True assert (<$).__name__ == '_coconut_op_U3c_U24' == f"{(<$).__name__}" + a_list: list_or_tuple = [1, 2, 3] + a_list = (1, 2, 3) + a_func: func_to_int = (.+1) + a_tuple: TupleOf[int] = a_list + a_seq: Seq[int] = a_tuple + a_dict: TextMap[str, int] = {"a": 1} + assert HasT().T == 1 + assert dict_zip({"a": 1, "b": 2}, {"a": 3, "b": 4}) == {"a": [1, 3], "b": [2, 4]} + assert intdata(x=2).x == 2 == intdata_(x=2).x + assert weird_recursor() + summer.acc = 0 + summer.args = list(range(100_000)) + assert summer() == sum(range(100_000)) # must come at end assert fibs_calls[0] == 1 @@ -973,4 +987,5 @@ def tco_test() -> bool: assert is_even_(5000) and is_odd_(5001) assert hasattr(ret_none, "_coconut_tco_func") assert hasattr(tricky_tco, "_coconut_tco_func") + assert methtest().recurse_n_times(100_000) == "done!" return True diff --git a/coconut/tests/src/cocotest/agnostic/util.coco b/coconut/tests/src/cocotest/agnostic/util.coco index 89e49103f..bfc2c5a0c 100644 --- a/coconut/tests/src/cocotest/agnostic/util.coco +++ b/coconut/tests/src/cocotest/agnostic/util.coco @@ -5,8 +5,6 @@ import operator # NOQA from contextlib import contextmanager from functools import wraps from collections import defaultdict -if TYPE_CHECKING: - import typing # Helpers: def rand_list(n): @@ -33,6 +31,16 @@ def assert_raises(c, exc=Exception): else: raise AssertionError(f"{c} failed to raise exception {exc}") +try: + prepattern() # type: ignore +except NameError, TypeError: + def prepattern(base_func, **kwargs): # type: ignore + """Decorator to add a new case to a pattern-matching function, + where the new case is checked first.""" + def pattern_prepender(func): + return addpattern(func, base_func, **kwargs) + return pattern_prepender + # Old functions: old_fmap = fmap$(starmap_over_mappings=True) @@ -176,6 +184,23 @@ addpattern def (float(x))! = 0.0 if x else 1.0 # type: ignore addpattern def x! if x = False # type: ignore addpattern def x! = True # type: ignore +# Type aliases: +if sys.version_info >= (3, 5) or TYPE_CHECKING: + import typing + + type list_or_tuple = list | tuple + + type func_to_int = -> int + + type Seq[T] = T[] + + type TupleOf[T] = typing.Tuple[T, ...] + + type TextMap[T: typing.Text, U] = typing.Mapping[T, U] + +class HasT: + T = 1 + # Quick-Sorts: def qsort1(l: int[]) -> int[]: '''Non-Functional Quick Sort.''' @@ -362,6 +387,11 @@ def return_in_loop(x): class methtest: def meth(self, arg) = meth(self, arg) def tail_call_meth(self, arg) = self.meth(arg) + @staticmethod + def recurse_n_times(n): + if n == 0: return "done!" + return methtest.recurse_n_times(n-1) + def meth(self, arg) = arg def un_treable_func1(x, g=-> _): @@ -436,12 +466,30 @@ def tricky_tco(func): except TypeError: return func() +weird_recursor_ns = [50] + +def weird_recursor(n): + if n == 0: + weird_recursor_ns.pop() + return True + weird_recursor_ns[-1] -= 1 + return weird_recursor() # type: ignore + +@prepattern(weird_recursor) # type: ignore +match def weird_recursor() = weird_recursor(weird_recursor_ns[-1]) + +def summer(): + if not summer.args: + return summer.acc + summer.acc += summer.args.pop() + return summer() + # Data Blocks: try: datamaker() # type: ignore except NameError, TypeError: - def datamaker(data_type): + def datamaker(data_type): # type: ignore """Get the original constructor of the given data type or class.""" return makedata$(data_type) @@ -748,10 +796,12 @@ class inh_A(A): def inh_true1(self) = super().true() def inh_true2(self) = - py_super(inh_A, self).true() + super(inh_A, self).true() def inh_true3(nonstandard_self) = super().true() - inh_true4 = def (self) -> super().true() + def inh_true4(self) = + py_super(inh_A, self).true() + inh_true5 = def (self) -> super().true() @classmethod def inh_cls_true(cls) = super().cls_true() class B: @@ -817,16 +867,6 @@ def SHOPeriodTerminate(X, t, params): return 0 # keep going # Multiple dispatch: -try: - prepattern() # type: ignore -except NameError, TypeError: - def prepattern(base_func, **kwargs): # type: ignore - """Decorator to add a new case to a pattern-matching function, - where the new case is checked first.""" - def pattern_prepender(func): - return addpattern(func, base_func, **kwargs) - return pattern_prepender - def add_int_or_str_1(int() as x) = x + 1 addpattern def add_int_or_str_1(str() as x) = x + "1" # type: ignore @@ -1148,6 +1188,9 @@ data data6(int() as x) from BaseClass data namedpt(str() as name, int() as x, int() as y): def mag(self) = (self.x**2 + self.y**2)**0.5 +data intdata(int(x)) +data intdata_(class int(x)) + # Descriptor test def tuplify(*args) = args @@ -1387,6 +1430,13 @@ truncate_sentence = ( maxcolsum = map$(sum) ..> max +dict_zip = ( + (,) + ..> map$(.items()) + ..> flatten + ..> collectby$(.[0], value_func=.[1]) +) + # n-ary reduction def binary_reduce(binop, it) = ( diff --git a/coconut/tests/src/cocotest/target_3/py3_test.coco b/coconut/tests/src/cocotest/target_3/py3_test.coco index 0b0e2b7b2..acdef4f73 100644 --- a/coconut/tests/src/cocotest/target_3/py3_test.coco +++ b/coconut/tests/src/cocotest/target_3/py3_test.coco @@ -22,19 +22,20 @@ def py3_test() -> bool: assert head_tail((|1, 2, 3|)) == (1, [2, 3]) assert py_map((x) -> x+1, range(4)) |> tuple == (1, 2, 3, 4) assert py_zip(range(3), range(3)) |> tuple == ((0, 0), (1, 1), (2, 2)) - e = exec - test: dict = {} - e("a=1", test) - assert test["a"] == 1 def keyword_only(*, a) = a assert keyword_only(a=10) == 10 čeština = "czech" assert čeština == "czech" - class A: - a = 1 - class B(A): - def get_super_1(self) = super() - def get_super_2(self) = super(B, self) - b = B() - assert b.get_super_1().a == 1 == b.get_super_2().a + class HasExecMethod: + def exec(self, x) = x() + has_exec = HasExecMethod() + assert hasattr(has_exec, "exec") + assert has_exec.exec(-> 1) == 1 + def exec_rebind_test(): + exec = 1 + assert exec + 1 == 2 + def exec(x) = x + assert exec(1) == 1 + return True + assert exec_rebind_test() is True return True diff --git a/coconut/tests/src/cocotest/target_33/py33_test.coco b/coconut/tests/src/cocotest/target_33/py33_test.coco deleted file mode 100644 index 6bcf640a9..000000000 --- a/coconut/tests/src/cocotest/target_33/py33_test.coco +++ /dev/null @@ -1,10 +0,0 @@ -def py33_test() -> bool: - """Performs Python-3.3-specific tests.""" - yield def f(x) = x - l = [] - yield def g(x): - result = yield from f(x) - l.append(result) - assert g(10) |> list == [] - assert l == [10] - return True diff --git a/coconut/tests/src/cocotest/target_sys/target_sys_test.coco b/coconut/tests/src/cocotest/target_sys/target_sys_test.coco index ab484b1c3..26eb73610 100644 --- a/coconut/tests/src/cocotest/target_sys/target_sys_test.coco +++ b/coconut/tests/src/cocotest/target_sys/target_sys_test.coco @@ -55,9 +55,15 @@ def asyncio_test() -> bool: assert (await ((pow$(2), range(5)) |> async_map)) |> tuple == (1, 2, 4, 8, 16) True async def aplus(x) = y -> x + y + aplus_: async int -> int -> int = async def x -> y -> x + y async def main(): assert await async_map_test() assert `(+)$(1) .. await aplus 1` 1 == 3 + assert `(.+1) .. await aplus_ 1` 1 == 3 + assert await (async def (x, y) -> x + y)(1, 2) == 3 + assert await (async def (int(x), int(y)) -> x + y)(1, 2) == 3 + assert await (async match def (int(x), int(y)) -> x + y)(1, 2) == 3 + assert await (match async def (int(x), int(y)) -> x + y)(1, 2) == 3 loop = asyncio.new_event_loop() loop.run_until_complete(main()) @@ -109,4 +115,13 @@ def target_sys_test() -> bool: assert err.args[0] == (1, 2) else: assert False + + yield def f(x) = x + l = [] + yield def g(x): + result = yield from f(x) + l.append(result) + assert g(10) |> list == [] + assert l == [10] + return True diff --git a/coconut/tests/src/extras.coco b/coconut/tests/src/extras.coco index d9b1a8b3c..978127939 100644 --- a/coconut/tests/src/extras.coco +++ b/coconut/tests/src/extras.coco @@ -68,6 +68,9 @@ def unwrap_future(event_loop, maybe_future): def test_setup_none() -> bool: + assert_raises((def -> import \(_coconut)), ImportError, err_has="should never be done at runtime") # NOQA + assert_raises((def -> import \_coconut), ImportError, err_has="should never be done at runtime") # NOQA + assert consume(range(10), keep_last=1)[0] == 9 == coc_consume(range(10), keep_last=1)[0] assert version() == version("num") assert version("name") @@ -98,13 +101,15 @@ def test_setup_none() -> bool: assert "==" not in parse("None = None") assert parse("(1\f+\f2)", "lenient") == "(1 + 2)" == parse("(1\f+\f2)", "eval") assert "Ellipsis" not in parse("x: ...") + assert parse(r"\exec", "lenient") == "exec" # things that don't parse correctly without the computation graph if not PYPY: - exec(parse("assert (1,2,3,4) == ([1, 2], [3, 4]) |*> def (x, y) -> *x, *y")) + exec(parse("assert (1,2,3,4) == ([1, 2], [3, 4]) |*> def (x, y) -> *x, *y"), {}) assert_raises(-> parse("(a := b)"), CoconutTargetError) assert_raises(-> parse("async def f() = 1"), CoconutTargetError) + assert_raises(-> parse("exec = 1"), CoconutTargetError) assert_raises(-> parse(" abc", "file"), CoconutSyntaxError) assert_raises(-> parse("'"), CoconutSyntaxError) @@ -118,8 +123,18 @@ def test_setup_none() -> bool: assert_raises(-> parse("f(**x, y)"), CoconutSyntaxError) assert_raises(-> parse("def f(x) = return x"), CoconutSyntaxError) assert_raises(-> parse("def f(x) =\n return x"), CoconutSyntaxError) + assert_raises(-> parse('f"Black holes {*all_black_holes} and revelations"'), CoconutSyntaxError, err_has="format string") assert_raises(-> parse("operator ++\noperator ++"), CoconutSyntaxError, err_has="custom operator already declared") + assert_raises( + -> parse("type abc[T,T] = T | T"), + CoconutSyntaxError, + err_has=""" +cannot reassign type variable: 'T' (line 1) + type abc[T,T] = T | T + ^ + """.strip(), + ) assert_raises(-> parse("$"), CoconutParseError, err_has=" ^") assert_raises(-> parse("range(1,10) |> reduce$(*, initializer = 1000) |> print"), CoconutParseError, err_has=" ~~~~~~~~~~~~~~~~~~~~~~~~^") @@ -228,6 +243,9 @@ else: except CoconutStyleError as err: assert str(err) == """found deprecated isinstance-checking 'x is int is str' pattern; rewrite to use class patterns (try 'int(x) and str(x)') or explicit isinstance-checking ('x `isinstance` int and x `isinstance` str' should always work) (remove --strict to dismiss) (line 2) x is int is str = x""" + assert_raises(-> parse("""case x: + match x: + pass"""), CoconutStyleError, err_has="case x:") setup(target="2.7") assert parse("from io import BytesIO", mode="lenient") == "from io import BytesIO" diff --git a/conf.py b/conf.py index 54ca68d06..609517ff1 100644 --- a/conf.py +++ b/conf.py @@ -24,7 +24,7 @@ from coconut.root import * # NOQA from coconut.constants import ( - version_str_tag, + version_tag, without_toc, with_toc, exclude_docs_dirs, @@ -56,7 +56,7 @@ ) version = VERSION -release = version_str_tag +release = version_tag html_theme = "pydata_sphinx_theme" html_theme_options = {