diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..a94a63b09 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: true +contact_links: + - name: Discord Python Polska + url: https://discord.com/invite/VCyBDGH38e + about: Dyskusje o tłumaczeniach. + - name: Transifex + url: https://explore.transifex.com/python-doc/python-newest/ + about: Strona do tłumaczenia. diff --git a/.github/ISSUE_TEMPLATE/typo.yml b/.github/ISSUE_TEMPLATE/typo.yml new file mode 100644 index 000000000..ccd748c82 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/typo.yml @@ -0,0 +1,36 @@ +name: Błąd w tłumaczeniu +description: Zgłoś błąd w tłumaczeniu +labels: ["bug"] +body: + - type: markdown + attributes: + value: | + **Chcesz to naprawić samemu??** + + - Wejdź na stronę projektu [dokumentacji Pythona](https://explore.transifex.com/python-doc/python-newest/). + - Naciśnij przycisk „Join this project”, aby dołączyć do projektu. + - Utwórz konto Transifex. + - Na stronie projektu wybierz język polski. + - Po dołączeniu do zespołu wybierz zasób, który chcesz poprawić/zaktualizować. + + Więcej informacji znajdziesz w naszym (README)[https://github.com/python/python-docs-pl/blob/3.14/README.md]. + - type: textarea + attributes: + label: "Opis błędu:" + description: > + Opisz szczegółowo lokalizację błędu. + validations: + required: true + - type: dropdown + attributes: + label: "Wersja dokumentacji:" + multiple: true + options: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + - "3.13" + - "3.14" + validations: + required: false diff --git a/.github/workflows/update-lint-and-build.yml b/.github/workflows/update-lint-and-build.yml index e5d4d0fe5..91e12dfa8 100644 --- a/.github/workflows/update-lint-and-build.yml +++ b/.github/workflows/update-lint-and-build.yml @@ -9,7 +9,7 @@ on: workflow_dispatch: jobs: - update-translation: + update: runs-on: ubuntu-latest strategy: fail-fast: false @@ -54,7 +54,7 @@ jobs: - run: git config --local user.name "GitHub Action's update-translation job" - name: Check changes significance run: > - ! git diff -I'^"POT-Creation-Date: ' -I'^"Language-Team: ' -I'^# ' -I'^"Last-Translator: ' --exit-code && echo "SIGNIFICANT_CHANGES=1" >> $GITHUB_ENV || exit 0 + ! git diff -I'^"POT-Creation-Date: ' -I'^"Language-Team: ' -I'^# ' -I'^"Last-Translator: ' -I'^"Project-Id-Version: ' --exit-code && echo "SIGNIFICANT_CHANGES=1" >> $GITHUB_ENV || exit 0 - run: git add . - run: git commit -m 'Update translation from Transifex' if: env.SIGNIFICANT_CHANGES @@ -71,7 +71,7 @@ jobs: fail-fast: false matrix: version: [3.14, 3.13, 3.12, 3.11] - needs: ['update-translation'] + needs: ['update'] continue-on-error: true steps: - uses: actions/setup-python@master @@ -84,14 +84,14 @@ jobs: - uses: rffontenelle/sphinx-lint-problem-matcher@v1.0.0 - run: sphinx-lint - build-translation: + build: runs-on: ubuntu-latest strategy: fail-fast: false matrix: - version: [3.14, 3.13, 3.12, 3.11, '3.10', 3.9, 3.8] + version: [3.14, 3.13, 3.12, 3.11, '3.10', 3.9] format: [html, latex, epub] - needs: ['update-translation'] + needs: ['update'] steps: - uses: actions/setup-python@master with: @@ -121,8 +121,8 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - version: [3.14, 3.13, 3.12, 3.11, '3.10', 3.9, 3.8] - needs: ['build-translation'] + version: [3.14, 3.13, 3.12, 3.11, '3.10', 3.9] + needs: ['build'] steps: - uses: actions/download-artifact@master with: @@ -140,7 +140,7 @@ jobs: strategy: matrix: version: [3.14] - needs: ['build-translation'] + needs: ['build'] continue-on-error: true steps: - uses: actions/setup-python@v5 diff --git a/README.en.md b/README.en.md index 47f9cf3e3..f6f01433b 100644 --- a/README.en.md +++ b/README.en.md @@ -13,7 +13,7 @@ f'''![build](https://github.com/python/python-docs-pl/actions/workflows/update-l ![{translators} Translators](https://img.shields.io/badge/Translators-{translators}-0.svg)''') ]]] --> ![build](https://github.com/python/python-docs-pl/actions/workflows/update-lint-and-build.yml/badge.svg) -![Total Translation of Documentation](https://img.shields.io/badge/Total-5.169%25-0.svg) +![Total Translation of Documentation](https://img.shields.io/badge/Total-4.701%25-0.svg) ![24 Translators](https://img.shields.io/badge/Translators-24-0.svg) diff --git a/README.md b/README.md index c6f987af1..fdb276c38 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ f'''![build](https://github.com/python/python-docs-pl/actions/workflows/update-l ![{translators} tłumaczy](https://img.shields.io/badge/tłumaczy-{translators}-0.svg)''') ]]] --> ![build](https://github.com/python/python-docs-pl/actions/workflows/update-lint-and-build.yml/badge.svg) -![postęp tłumaczenia całości dokumentacji](https://img.shields.io/badge/całość-5.169%25-0.svg) +![postęp tłumaczenia całości dokumentacji](https://img.shields.io/badge/całość-4.701%25-0.svg) ![24 tłumaczy](https://img.shields.io/badge/tłumaczy-24-0.svg) @@ -68,7 +68,7 @@ Wyrażasz akceptację tej umowy przesyłając swoją pracę do włączenia do do * `cog -rP README.md` **Przydatne materiały** -* [statystyki oglądalności](https://plausible.io/docs.python.org/?filters=%28%28contains,page,%28/pl/%29%29%29) +* [statystyki oglądalności](https://analytics.python.org/docs.python.org?f=contains,page,/pl/) * [Python Developer's Guide: Documentation](https://devguide.python.org/documentation/) **Podobne projekty** diff --git a/c-api/unicode.po b/c-api/unicode.po index 8b47c7413..9a6b1eb71 100644 --- a/c-api/unicode.po +++ b/c-api/unicode.po @@ -12,7 +12,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 00:50+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -729,6 +729,14 @@ msgid "" "difference being that it decrements the reference count of *right* by one." msgstr "" +msgid "" +"Return a mapping suitable for decoding a custom single-byte encoding. Given " +"a Unicode string *string* of up to 256 characters representing an encoding " +"table, returns either a compact internal mapping object or a dictionary " +"mapping character ordinals to byte values. Raises a :exc:`TypeError` and " +"return ``NULL`` on invalid input." +msgstr "" + msgid "" "Return the name of the default string encoding, ``\"utf-8\"``. See :func:" "`sys.getdefaultencoding`." diff --git a/deprecations/index.po b/deprecations/index.po index d69124528..a76caaa03 100644 --- a/deprecations/index.po +++ b/deprecations/index.po @@ -12,7 +12,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2024-07-29 04:07+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -141,6 +141,14 @@ msgid "" "or the functional syntax instead." msgstr "" +msgid "" +"When using the functional syntax of :class:`~typing.TypedDict`\\s, failing " +"to pass a value to the *fields* parameter (``TD = TypedDict(\"TD\")``) or " +"passing ``None`` (``TD = TypedDict(\"TD\", None)``) has been deprecated " +"since Python 3.13. Use ``class TD(TypedDict): pass`` or ``TD = " +"TypedDict(\"TD\", {})`` to create a TypedDict with zero field." +msgstr "" + msgid "" "The :func:`typing.no_type_check_decorator` decorator function has been " "deprecated since Python 3.13. After eight years in the :mod:`typing` module, " diff --git a/deprecations/pending-removal-in-3.15.po b/deprecations/pending-removal-in-3.15.po index f66f6729f..fbf99c15e 100644 --- a/deprecations/pending-removal-in-3.15.po +++ b/deprecations/pending-removal-in-3.15.po @@ -11,7 +11,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2024-07-20 00:54+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -137,6 +137,14 @@ msgid "" "or the functional syntax instead." msgstr "" +msgid "" +"When using the functional syntax of :class:`~typing.TypedDict`\\s, failing " +"to pass a value to the *fields* parameter (``TD = TypedDict(\"TD\")``) or " +"passing ``None`` (``TD = TypedDict(\"TD\", None)``) has been deprecated " +"since Python 3.13. Use ``class TD(TypedDict): pass`` or ``TD = " +"TypedDict(\"TD\", {})`` to create a TypedDict with zero field." +msgstr "" + msgid "" "The :func:`typing.no_type_check_decorator` decorator function has been " "deprecated since Python 3.13. After eight years in the :mod:`typing` module, " diff --git a/library/annotationlib.po b/library/annotationlib.po index 3252ff296..6890690e9 100644 --- a/library/annotationlib.po +++ b/library/annotationlib.po @@ -12,7 +12,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2025-05-08 06:04+0000\n" "Last-Translator: Tomasz Rodzen , 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -186,6 +186,15 @@ msgstr "" msgid "Values are the result of evaluating the annotation expressions." msgstr "" +msgid "" +"Special value used to signal that an annotate function is being evaluated in " +"a special environment with fake globals. When passed this value, annotate " +"functions should either return the same value as for the :attr:`Format." +"VALUE` format, or raise :exc:`NotImplementedError` to signal that they do " +"not support execution in this environment. This format is only used " +"internally and should not be passed to the functions in this module." +msgstr "" + msgid "" "Values are real annotation values (as per :attr:`Format.VALUE` format) for " "defined values, and :class:`ForwardRef` proxies for undefined values. Real " @@ -202,15 +211,6 @@ msgid "" "The exact values of these strings may change in future versions of Python." msgstr "" -msgid "" -"Special value used to signal that an annotate function is being evaluated in " -"a special environment with fake globals. When passed this value, annotate " -"functions should either return the same value as for the :attr:`Format." -"VALUE` format, or raise :exc:`NotImplementedError` to signal that they do " -"not support execution in this environment. This format is only used " -"internally and should not be passed to the functions in this module." -msgstr "" - msgid "A proxy object for forward references in annotations." msgstr "" @@ -604,3 +604,202 @@ msgid "" "attribute\n" " return typ" msgstr "" + +msgid "Limitations of the ``STRING`` format" +msgstr "" + +msgid "" +"The :attr:`~Format.STRING` format is meant to approximate the source code of " +"the annotation, but the implementation strategy used means that it is not " +"always possible to recover the exact source code." +msgstr "" + +msgid "" +"First, the stringifier of course cannot recover any information that is not " +"present in the compiled code, including comments, whitespace, " +"parenthesization, and operations that get simplified by the compiler." +msgstr "" + +msgid "" +"Second, the stringifier can intercept almost all operations that involve " +"names looked up in some scope, but it cannot intercept operations that " +"operate fully on constants. As a corollary, this also means it is not safe " +"to request the ``STRING`` format on untrusted code: Python is powerful " +"enough that it is possible to achieve arbitrary code execution even with no " +"access to any globals or builtins. For example:" +msgstr "" + +msgid "" +">>> def f(x: (1).__class__.__base__.__subclasses__()[-1].__init__." +"__builtins__[\"print\"](\"Hello world\")): pass\n" +"...\n" +">>> annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)\n" +"Hello world\n" +"{'x': 'None'}" +msgstr "" + +msgid "" +"This particular example works as of the time of writing, but it relies on " +"implementation details and is not guaranteed to work in the future." +msgstr "" + +msgid "" +"Among the different kinds of expressions that exist in Python, as " +"represented by the :mod:`ast` module, some expressions are supported, " +"meaning that the ``STRING`` format can generally recover the original source " +"code; others are unsupported, meaning that they may result in incorrect " +"output or an error." +msgstr "" + +msgid "The following are supported (sometimes with caveats):" +msgstr "" + +msgid ":class:`ast.BinOp`" +msgstr "" + +msgid ":class:`ast.UnaryOp`" +msgstr "" + +msgid "" +":class:`ast.Invert` (``~``), :class:`ast.UAdd` (``+``), and :class:`ast." +"USub` (``-``) are supported" +msgstr "" + +msgid ":class:`ast.Not` (``not``) is not supported" +msgstr "" + +msgid ":class:`ast.Dict` (except when using ``**`` unpacking)" +msgstr "" + +msgid ":class:`ast.Set`" +msgstr "" + +msgid ":class:`ast.Compare`" +msgstr "" + +msgid ":class:`ast.Eq` and :class:`ast.NotEq` are supported" +msgstr "" + +msgid "" +":class:`ast.Lt`, :class:`ast.LtE`, :class:`ast.Gt`, and :class:`ast.GtE` are " +"supported, but the operand may be flipped" +msgstr "" + +msgid "" +":class:`ast.Is`, :class:`ast.IsNot`, :class:`ast.In`, and :class:`ast.NotIn` " +"are not supported" +msgstr "" + +msgid ":class:`ast.Call` (except when using ``**`` unpacking)" +msgstr "" + +msgid "" +":class:`ast.Constant` (though not the exact representation of the constant; " +"for example, escape sequences in strings are lost; hexadecimal numbers are " +"converted to decimal)" +msgstr "" + +msgid ":class:`ast.Attribute` (assuming the value is not a constant)" +msgstr "" + +msgid ":class:`ast.Subscript` (assuming the value is not a constant)" +msgstr "" + +msgid ":class:`ast.Starred` (``*`` unpacking)" +msgstr "" + +msgid ":class:`ast.Name`" +msgstr "" + +msgid ":class:`ast.List`" +msgstr "" + +msgid ":class:`ast.Tuple`" +msgstr "" + +msgid ":class:`ast.Slice`" +msgstr "" + +msgid "" +"The following are unsupported, but throw an informative error when " +"encountered by the stringifier:" +msgstr "" + +msgid "" +":class:`ast.FormattedValue` (f-strings; error is not detected if conversion " +"specifiers like ``!r`` are used)" +msgstr "" + +msgid ":class:`ast.JoinedStr` (f-strings)" +msgstr "" + +msgid "The following are unsupported and result in incorrect output:" +msgstr "" + +msgid ":class:`ast.BoolOp` (``and`` and ``or``)" +msgstr "" + +msgid ":class:`ast.IfExp`" +msgstr "" + +msgid ":class:`ast.Lambda`" +msgstr "" + +msgid ":class:`ast.ListComp`" +msgstr "" + +msgid ":class:`ast.SetComp`" +msgstr "" + +msgid ":class:`ast.DictComp`" +msgstr "" + +msgid ":class:`ast.GeneratorExp`" +msgstr "" + +msgid "" +"The following are disallowed in annotation scopes and therefore not relevant:" +msgstr "" + +msgid ":class:`ast.NamedExpr` (``:=``)" +msgstr "" + +msgid ":class:`ast.Await`" +msgstr "" + +msgid ":class:`ast.Yield`" +msgstr "" + +msgid ":class:`ast.YieldFrom`" +msgstr "" + +msgid "Limitations of the ``FORWARDREF`` format" +msgstr "" + +msgid "" +"The :attr:`~Format.FORWARDREF` format aims to produce real values as much as " +"possible, with anything that cannot be resolved replaced with :class:" +"`ForwardRef` objects. It is affected by broadly the same Limitations as the :" +"attr:`~Format.STRING` format: annotations that perform operations on " +"literals or that use unsupported expression types may raise exceptions when " +"evaluated using the :attr:`~Format.FORWARDREF` format." +msgstr "" + +msgid "Below are a few examples of the behavior with unsupported expressions:" +msgstr "" + +msgid "" +">>> from annotationlib import get_annotations, Format\n" +">>> def zerodiv(x: 1 / 0): ...\n" +">>> get_annotations(zerodiv, format=Format.STRING)\n" +"Traceback (most recent call last):\n" +" ...\n" +"ZeroDivisionError: division by zero\n" +">>> get_annotations(zerodiv, format=Format.FORWARDREF)\n" +"Traceback (most recent call last):\n" +" ...\n" +"ZeroDivisionError: division by zero\n" +">>> def ifexp(x: 1 if y else 0): ...\n" +">>> get_annotations(ifexp, format=Format.STRING)\n" +"{'x': '1'}" +msgstr "" diff --git a/library/datetime.po b/library/datetime.po index 8de1313da..b4a3d6e8c 100644 --- a/library/datetime.po +++ b/library/datetime.po @@ -14,7 +14,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:04+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -346,6 +346,24 @@ msgid "" "(-1, 86399, 999999)" msgstr "" +msgid "" +"Since the string representation of :class:`!timedelta` objects can be " +"confusing, use the following recipe to produce a more readable format:" +msgstr "" + +msgid "" +">>> def pretty_timedelta(td):\n" +"... if td.days >= 0:\n" +"... return str(td)\n" +"... return f'-({-td!s})'\n" +"...\n" +">>> d = timedelta(hours=-1)\n" +">>> str(d) # not human-friendly\n" +"'-1 day, 23:00:00'\n" +">>> pretty_timedelta(d)\n" +"'-(1:00:00)'" +msgstr "" + msgid "Class attributes:" msgstr "" diff --git a/library/math.po b/library/math.po index 9690cdf90..fdecd47cc 100644 --- a/library/math.po +++ b/library/math.po @@ -13,7 +13,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:09+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -484,9 +484,7 @@ msgid "" "exc:`ValueError` if either of the arguments are negative." msgstr "" -msgid "" -"Return *n* factorial as an integer. Raises :exc:`ValueError` if *n* is not " -"integral or is negative." +msgid "Return factorial of the nonnegative integer *n*." msgstr "" msgid "Floats with integral values (like ``5.0``) are no longer accepted." diff --git a/library/shutil.po b/library/shutil.po index 311b97ee9..ddd6fd12f 100644 --- a/library/shutil.po +++ b/library/shutil.po @@ -12,7 +12,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:13+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -497,6 +497,12 @@ msgid "" "to :data:`os.defpath` if it is not set." msgstr "" +msgid "" +"If *cmd* contains a directory component, :func:`!which` only checks the " +"specified path directly and does not search the directories listed in *path* " +"or in the system's :envvar:`PATH` environment variable." +msgstr "" + msgid "" "On Windows, the current directory is prepended to the *path* if *mode* does " "not include ``os.X_OK``. When the *mode* does include ``os.X_OK``, the " diff --git a/library/stdtypes.po b/library/stdtypes.po index 481f46d75..a47500fbc 100644 --- a/library/stdtypes.po +++ b/library/stdtypes.po @@ -18,7 +18,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-09 14:19+0000\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:13+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -2773,6 +2773,20 @@ msgid "" "['1', '2', '3']" msgstr "" +msgid "" +"If *sep* is not specified or is ``None`` and *maxsplit* is ``0``, only " +"leading runs of consecutive whitespace are considered." +msgstr "" + +msgid "" +">>> \"\".split(None, 0)\n" +"[]\n" +">>> \" \".split(None, 0)\n" +"[]\n" +">>> \" foo \".split(maxsplit=0)\n" +"['foo ']" +msgstr "" + msgid "" "Return a list of the lines in the string, breaking at line boundaries. Line " "breaks are not included in the resulting list unless *keepends* is given and " diff --git a/library/threading.po b/library/threading.po index 1e7c8f9bf..471a0811c 100644 --- a/library/threading.po +++ b/library/threading.po @@ -6,15 +6,16 @@ # Translators: # Maciej Olko , 2021 # Stan Ulbrych, 2025 +# Seweryn Piórkowski , 2025 # #, fuzzy msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:15+0000\n" -"Last-Translator: Stan Ulbrych, 2025\n" +"Last-Translator: Seweryn Piórkowski , 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -35,6 +36,65 @@ msgid "" "level :mod:`_thread` module." msgstr "" +msgid "Availability" +msgstr "Dostępność" + +msgid "" +"This module does not work or is not available on WebAssembly. See :ref:`wasm-" +"availability` for more information." +msgstr "" + +msgid "Introduction" +msgstr "Wprowadzenie" + +msgid "" +"The :mod:`!threading` module provides a way to run multiple `threads " +"`_ (smaller units of a " +"process) concurrently within a single process. It allows for the creation " +"and management of threads, making it possible to execute tasks in parallel, " +"sharing memory space. Threads are particularly useful when tasks are I/O " +"bound, such as file operations or making network requests, where much of the " +"time is spent waiting for external resources." +msgstr "" + +msgid "" +"A typical use case for :mod:`!threading` includes managing a pool of worker " +"threads that can process multiple tasks concurrently. Here's a basic " +"example of creating and starting threads using :class:`~threading.Thread`::" +msgstr "" + +msgid "" +"import threading\n" +"import time\n" +"\n" +"def crawl(link, delay=3):\n" +" print(f\"crawl started for {link}\")\n" +" time.sleep(delay) # Blocking I/O (simulating a network request)\n" +" print(f\"crawl ended for {link}\")\n" +"\n" +"links = [\n" +" \"https://python.org\",\n" +" \"https://docs.python.org\",\n" +" \"https://peps.python.org\",\n" +"]\n" +"\n" +"# Start threads for each link\n" +"threads = []\n" +"for link in links:\n" +" # Using `args` to pass positional arguments and `kwargs` for keyword " +"arguments\n" +" t = threading.Thread(target=crawl, args=(link,), kwargs={\"delay\": 2})\n" +" threads.append(t)\n" +"\n" +"# Start each thread\n" +"for t in threads:\n" +" t.start()\n" +"\n" +"# Wait for all threads to finish\n" +"for t in threads:\n" +" t.join()" +msgstr "" + msgid "This module used to be optional, it is now always available." msgstr "" @@ -71,12 +131,26 @@ msgid "" "appropriate model if you want to run multiple I/O-bound tasks simultaneously." msgstr "" -msgid "Availability" -msgstr "Dostępność" +msgid "GIL and performance considerations" +msgstr "" msgid "" -"This module does not work or is not available on WebAssembly. See :ref:`wasm-" -"availability` for more information." +"Unlike the :mod:`multiprocessing` module, which uses separate processes to " +"bypass the :term:`global interpreter lock` (GIL), the threading module " +"operates within a single process, meaning that all threads share the same " +"memory space. However, the GIL limits the performance gains of threading " +"when it comes to CPU-bound tasks, as only one thread can execute Python " +"bytecode at a time. Despite this, threads remain a useful tool for achieving " +"concurrency in many scenarios." +msgstr "" + +msgid "" +"As of Python 3.13, experimental :term:`free-threaded ` " +"builds can disable the GIL, enabling true parallel execution of threads, but " +"this feature is not available by default (see :pep:`703`)." +msgstr "" + +msgid "Reference" msgstr "" msgid "This module defines the following functions:" @@ -93,7 +167,7 @@ msgstr "" msgid "" "Return the current :class:`Thread` object, corresponding to the caller's " "thread of control. If the caller's thread of control was not created " -"through the :mod:`threading` module, a dummy thread object with limited " +"through the :mod:`!threading` module, a dummy thread object with limited " "functionality is returned." msgstr "" @@ -186,13 +260,13 @@ msgid "" msgstr "" msgid "" -"Set a trace function for all threads started from the :mod:`threading` " +"Set a trace function for all threads started from the :mod:`!threading` " "module. The *func* will be passed to :func:`sys.settrace` for each thread, " "before its :meth:`~Thread.run` method is called." msgstr "" msgid "" -"Set a trace function for all threads started from the :mod:`threading` " +"Set a trace function for all threads started from the :mod:`!threading` " "module and all Python threads that are currently executing." msgstr "" @@ -205,13 +279,13 @@ msgid "Get the trace function as set by :func:`settrace`." msgstr "" msgid "" -"Set a profile function for all threads started from the :mod:`threading` " +"Set a profile function for all threads started from the :mod:`!threading` " "module. The *func* will be passed to :func:`sys.setprofile` for each " "thread, before its :meth:`~Thread.run` method is called." msgstr "" msgid "" -"Set a profile function for all threads started from the :mod:`threading` " +"Set a profile function for all threads started from the :mod:`!threading` " "module and all Python threads that are currently executing." msgstr "" @@ -272,7 +346,7 @@ msgstr "" msgid "All of the methods described below are executed atomically." msgstr "" -msgid "Thread-Local Data" +msgid "Thread-local data" msgstr "" msgid "" @@ -447,7 +521,7 @@ msgstr "" msgid "A class that represents thread-local data." msgstr "" -msgid "Thread Objects" +msgid "Thread objects" msgstr "" msgid "" @@ -748,7 +822,7 @@ msgid "" "a property instead." msgstr "" -msgid "Lock Objects" +msgid "Lock objects" msgstr "" msgid "" @@ -853,7 +927,7 @@ msgstr "" msgid "Return ``True`` if the lock is acquired." msgstr "" -msgid "RLock Objects" +msgid "RLock objects" msgstr "" msgid "" @@ -971,7 +1045,7 @@ msgstr "" msgid "Return a boolean indicating whether this object is locked right now." msgstr "" -msgid "Condition Objects" +msgid "Condition objects" msgstr "" msgid "" @@ -1182,7 +1256,7 @@ msgstr "" msgid "The method ``notifyAll`` is a deprecated alias for this method." msgstr "" -msgid "Semaphore Objects" +msgid "Semaphore objects" msgstr "" msgid "" @@ -1266,7 +1340,7 @@ msgid "" "times it's a sign of a bug. If not given, *value* defaults to 1." msgstr "" -msgid ":class:`Semaphore` Example" +msgid ":class:`Semaphore` example" msgstr "" msgid "" @@ -1302,7 +1376,7 @@ msgid "" "undetected." msgstr "" -msgid "Event Objects" +msgid "Event objects" msgstr "" msgid "" @@ -1355,7 +1429,7 @@ msgid "" "fractions thereof." msgstr "" -msgid "Timer Objects" +msgid "Timer objects" msgstr "" msgid "" @@ -1396,7 +1470,7 @@ msgid "" "only work if the timer is still in its waiting stage." msgstr "" -msgid "Barrier Objects" +msgid "Barrier objects" msgstr "" msgid "" diff --git a/library/unittest.mock.po b/library/unittest.mock.po index 2bd79b5ad..7b3954ae0 100644 --- a/library/unittest.mock.po +++ b/library/unittest.mock.po @@ -13,7 +13,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:16+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -2386,13 +2386,10 @@ msgid "" msgstr "" msgid "" -">>> mock.has_data()\n" +">>> mock.header_items()\n" "\n" -">>> mock.has_data.assret_called_with() # Intentional typo!" +">>> mock.header_items.assret_called_with() # Intentional typo!" msgstr "" -">>> mock.has_data()\n" -"\n" -">>> mock.has_data.assret_called_with() # Intentional typo!" msgid "" "Auto-speccing solves this problem. You can either pass ``autospec=True`` to :" diff --git a/library/urllib.request.po b/library/urllib.request.po index e2dfacd12..fb9965eaf 100644 --- a/library/urllib.request.po +++ b/library/urllib.request.po @@ -12,7 +12,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:17+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -1089,7 +1089,7 @@ msgstr "" msgid "" "Send an HTTP request, which can be either GET or POST, depending on ``req." -"has_data()``." +"data``." msgstr "" msgid "HTTPSHandler Objects" @@ -1097,7 +1097,7 @@ msgstr "" msgid "" "Send an HTTPS request, which can be either GET or POST, depending on ``req." -"has_data()``." +"data``." msgstr "" msgid "FileHandler Objects" diff --git a/reference/compound_stmts.po b/reference/compound_stmts.po index e13873da5..4274fbbf4 100644 --- a/reference/compound_stmts.po +++ b/reference/compound_stmts.po @@ -14,7 +14,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:19+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -2048,7 +2048,7 @@ msgid "" msgstr "" msgid "" -"By default, annotations are lazily evaluated in a :ref:`annotation scope " +"By default, annotations are lazily evaluated in an :ref:`annotation scope " "`. This means that they are not evaluated when the code " "containing the annotation is evaluated. Instead, the interpreter saves " "information that can be used to evaluate the annotation later if requested. " @@ -2067,6 +2067,14 @@ msgid "" "{'param': 'annotation'}" msgstr "" +msgid "" +"This future statement will be deprecated and removed in a future version of " +"Python, but not before Python 3.13 reaches its end of life (see :pep:`749`). " +"When it is used, introspection tools like :func:`annotationlib." +"get_annotations` and :func:`typing.get_type_hints` are less likely to be " +"able to resolve annotations at runtime." +msgstr "" + msgid "Footnotes" msgstr "Przypisy" diff --git a/reference/grammar.po b/reference/grammar.po index 34f15b4ec..ef558fdc4 100644 --- a/reference/grammar.po +++ b/reference/grammar.po @@ -11,7 +11,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-09 14:19+0000\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:49+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -159,12 +159,12 @@ msgid "" "statements[asdl_stmt_seq*]: a=statement+ { _PyPegen_register_stmts(p, " "(asdl_stmt_seq*)_PyPegen_seq_flatten(p, a)) }\n" "\n" -"statement[asdl_stmt_seq*]: \n" -" | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) } \n" +"statement[asdl_stmt_seq*]:\n" +" | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) }\n" " | a[asdl_stmt_seq*]=simple_stmts { a }\n" "\n" "single_compound_stmt[asdl_stmt_seq*]:\n" -" | a=compound_stmt { \n" +" | a=compound_stmt {\n" " _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_singleton_seq(p, " "a)) }\n" "\n" @@ -575,9 +575,9 @@ msgid "" " _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }\n" " | 'except' e=expressions ':' b=block {\n" " CHECK_VERSION(\n" -" excepthandler_ty, \n" -" 14, \n" -" \"except expressions without parentheses are\", \n" +" excepthandler_ty,\n" +" 14,\n" +" \"except expressions without parentheses are\",\n" " _PyAST_ExceptHandler(e, NULL, b, EXTRA)) }\n" " | 'except' ':' b=block { _PyAST_ExceptHandler(NULL, NULL, b, EXTRA) }\n" " | invalid_except_stmt\n" @@ -589,9 +589,9 @@ msgid "" " _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }\n" " | 'except' '*' e=expressions ':' b=block {\n" " CHECK_VERSION(\n" -" excepthandler_ty, \n" -" 14, \n" -" \"except expressions without parentheses are\", \n" +" excepthandler_ty,\n" +" 14,\n" +" \"except expressions without parentheses are\",\n" " _PyAST_ExceptHandler(e, NULL, b, EXTRA)) }\n" " | invalid_except_star_stmt\n" "finally_block[asdl_stmt_seq*]:\n" @@ -1172,11 +1172,11 @@ msgid "" " | tstring_replacement_field\n" " | t=TSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }\n" "tstring[expr_ty] (memo):\n" -" | a=TSTRING_START b=tstring_middle* c=TSTRING_END { \n" +" | a=TSTRING_START b=tstring_middle* c=TSTRING_END {\n" " CHECK_VERSION(\n" -" expr_ty, \n" -" 14, \n" -" \"t-strings are\", \n" +" expr_ty,\n" +" 14,\n" +" \"t-strings are\",\n" " _PyPegen_template_str(p, a, (asdl_expr_seq*)b, c)) }\n" "\n" "string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) }\n" @@ -1693,12 +1693,12 @@ msgid "" "after 'import'\") }\n" "invalid_dotted_as_name:\n" " | dotted_name 'as' !(NAME (',' | ')' | NEWLINE)) a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \n" +" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a,\n" " \"cannot use %s as import target\", " "_PyPegen_get_expr_name(a)) }\n" "invalid_import_from_as_name:\n" " | NAME 'as' !(NAME (',' | ')' | NEWLINE)) a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \n" +" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a,\n" " \"cannot use %s as import target\", " "_PyPegen_get_expr_name(a)) }\n" "\n" @@ -1953,1907 +1953,3 @@ msgid "" " token,\n" " \"Type parameter list cannot be empty\")}\n" msgstr "" -"# PEG grammar for Python\n" -"\n" -"@trailer '''\n" -"void *\n" -"_PyPegen_parse(Parser *p)\n" -"{\n" -" // Initialize keywords\n" -" p->keywords = reserved_keywords;\n" -" p->n_keyword_lists = n_keyword_lists;\n" -" p->soft_keywords = soft_keywords;\n" -"\n" -" // Run parser\n" -" void *result = NULL;\n" -" if (p->start_rule == Py_file_input) {\n" -" result = file_rule(p);\n" -" } else if (p->start_rule == Py_single_input) {\n" -" result = interactive_rule(p);\n" -" } else if (p->start_rule == Py_eval_input) {\n" -" result = eval_rule(p);\n" -" } else if (p->start_rule == Py_func_type_input) {\n" -" result = func_type_rule(p);\n" -" }\n" -"\n" -" return result;\n" -"}\n" -"'''\n" -"\n" -"# ========================= START OF THE GRAMMAR =========================\n" -"\n" -"# General grammatical elements and rules:\n" -"#\n" -"# * Strings with double quotes (\") denote SOFT KEYWORDS\n" -"# * Strings with single quotes (') denote KEYWORDS\n" -"# * Upper case names (NAME) denote tokens in the Grammar/Tokens file\n" -"# * Rule names starting with \"invalid_\" are used for specialized syntax " -"errors\n" -"# - These rules are NOT used in the first pass of the parser.\n" -"# - Only if the first pass fails to parse, a second pass including the " -"invalid\n" -"# rules will be executed.\n" -"# - If the parser fails in the second phase with a generic syntax error, " -"the\n" -"# location of the generic failure of the first pass will be used (this " -"avoids\n" -"# reporting incorrect locations due to the invalid rules).\n" -"# - The order of the alternatives involving invalid rules matter\n" -"# (like any rule in PEG).\n" -"#\n" -"# Grammar Syntax (see PEP 617 for more information):\n" -"#\n" -"# rule_name: expression\n" -"# Optionally, a type can be included right after the rule name, which\n" -"# specifies the return type of the C or Python function corresponding to " -"the\n" -"# rule:\n" -"# rule_name[return_type]: expression\n" -"# If the return type is omitted, then a void * is returned in C and an Any " -"in\n" -"# Python.\n" -"# e1 e2\n" -"# Match e1, then match e2.\n" -"# e1 | e2\n" -"# Match e1 or e2.\n" -"# The first alternative can also appear on the line after the rule name " -"for\n" -"# formatting purposes. In that case, a | must be used before the first\n" -"# alternative, like so:\n" -"# rule_name[return_type]:\n" -"# | first_alt\n" -"# | second_alt\n" -"# ( e )\n" -"# Match e (allows also to use other operators in the group like '(e)*')\n" -"# [ e ] or e?\n" -"# Optionally match e.\n" -"# e*\n" -"# Match zero or more occurrences of e.\n" -"# e+\n" -"# Match one or more occurrences of e.\n" -"# s.e+\n" -"# Match one or more occurrences of e, separated by s. The generated parse " -"tree\n" -"# does not include the separator. This is otherwise identical to (e (s " -"e)*).\n" -"# &e\n" -"# Succeed if e can be parsed, without consuming any input.\n" -"# !e\n" -"# Fail if e can be parsed, without consuming any input.\n" -"# ~\n" -"# Commit to the current alternative, even if it fails to parse.\n" -"# &&e\n" -"# Eager parse e. The parser will not backtrack and will immediately\n" -"# fail with SyntaxError if e cannot be parsed.\n" -"#\n" -"\n" -"# STARTING RULES\n" -"# ==============\n" -"\n" -"file[mod_ty]: a=[statements] ENDMARKER { _PyPegen_make_module(p, a) }\n" -"interactive[mod_ty]: a=statement_newline { _PyAST_Interactive(a, p-" -">arena) }\n" -"eval[mod_ty]: a=expressions NEWLINE* ENDMARKER { _PyAST_Expression(a, p-" -">arena) }\n" -"func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* " -"ENDMARKER { _PyAST_FunctionType(a, b, p->arena) }\n" -"\n" -"# GENERAL STATEMENTS\n" -"# ==================\n" -"\n" -"statements[asdl_stmt_seq*]: a=statement+ { _PyPegen_register_stmts(p, " -"(asdl_stmt_seq*)_PyPegen_seq_flatten(p, a)) }\n" -"\n" -"statement[asdl_stmt_seq*]: \n" -" | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) } \n" -" | a[asdl_stmt_seq*]=simple_stmts { a }\n" -"\n" -"single_compound_stmt[asdl_stmt_seq*]:\n" -" | a=compound_stmt { \n" -" _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_singleton_seq(p, " -"a)) }\n" -"\n" -"statement_newline[asdl_stmt_seq*]:\n" -" | a=single_compound_stmt NEWLINE { a }\n" -" | simple_stmts\n" -" | NEWLINE { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, CHECK(stmt_ty, " -"_PyAST_Pass(EXTRA))) }\n" -" | ENDMARKER { _PyPegen_interactive_exit(p) }\n" -"\n" -"simple_stmts[asdl_stmt_seq*]:\n" -" | a=simple_stmt !';' NEWLINE { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, " -"a) } # Not needed, there for speedup\n" -" | a[asdl_stmt_seq*]=';'.simple_stmt+ [';'] NEWLINE { a }\n" -"\n" -"# NOTE: assignment MUST precede expression, else parsing a simple " -"assignment\n" -"# will throw a SyntaxError.\n" -"simple_stmt[stmt_ty] (memo):\n" -" | assignment\n" -" | &\"type\" type_alias\n" -" | e=star_expressions { _PyAST_Expr(e, EXTRA) }\n" -" | &'return' return_stmt\n" -" | &('import' | 'from') import_stmt\n" -" | &'raise' raise_stmt\n" -" | &'pass' pass_stmt\n" -" | &'del' del_stmt\n" -" | &'yield' yield_stmt\n" -" | &'assert' assert_stmt\n" -" | &'break' break_stmt\n" -" | &'continue' continue_stmt\n" -" | &'global' global_stmt\n" -" | &'nonlocal' nonlocal_stmt\n" -"\n" -"compound_stmt[stmt_ty]:\n" -" | &('def' | '@' | 'async') function_def\n" -" | &'if' if_stmt\n" -" | &('class' | '@') class_def\n" -" | &('with' | 'async') with_stmt\n" -" | &('for' | 'async') for_stmt\n" -" | &'try' try_stmt\n" -" | &'while' while_stmt\n" -" | match_stmt\n" -"\n" -"# SIMPLE STATEMENTS\n" -"# =================\n" -"\n" -"# NOTE: annotated_rhs may start with 'yield'; yield_expr must start with " -"'yield'\n" -"assignment[stmt_ty]:\n" -" | a=NAME ':' b=expression c=['=' d=annotated_rhs { d }] {\n" -" CHECK_VERSION(\n" -" stmt_ty,\n" -" 6,\n" -" \"Variable annotation syntax is\",\n" -" _PyAST_AnnAssign(CHECK(expr_ty, _PyPegen_set_expr_context(p, a, " -"Store)), b, c, 1, EXTRA)\n" -" ) }\n" -" | a=('(' b=single_target ')' { b }\n" -" | single_subscript_attribute_target) ':' b=expression c=['=' " -"d=annotated_rhs { d }] {\n" -" CHECK_VERSION(stmt_ty, 6, \"Variable annotations syntax is\", " -"_PyAST_AnnAssign(a, b, c, 0, EXTRA)) }\n" -" | a[asdl_expr_seq*]=(z=star_targets '=' { z })+ b=annotated_rhs !'=' " -"tc=[TYPE_COMMENT] {\n" -" _PyAST_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }\n" -" | a=single_target b=augassign ~ c=annotated_rhs {\n" -" _PyAST_AugAssign(a, b->kind, c, EXTRA) }\n" -" | invalid_assignment\n" -"\n" -"annotated_rhs[expr_ty]: yield_expr | star_expressions\n" -"\n" -"augassign[AugOperator*]:\n" -" | '+=' { _PyPegen_augoperator(p, Add) }\n" -" | '-=' { _PyPegen_augoperator(p, Sub) }\n" -" | '*=' { _PyPegen_augoperator(p, Mult) }\n" -" | '@=' { CHECK_VERSION(AugOperator*, 5, \"The '@' operator is\", " -"_PyPegen_augoperator(p, MatMult)) }\n" -" | '/=' { _PyPegen_augoperator(p, Div) }\n" -" | '%=' { _PyPegen_augoperator(p, Mod) }\n" -" | '&=' { _PyPegen_augoperator(p, BitAnd) }\n" -" | '|=' { _PyPegen_augoperator(p, BitOr) }\n" -" | '^=' { _PyPegen_augoperator(p, BitXor) }\n" -" | '<<=' { _PyPegen_augoperator(p, LShift) }\n" -" | '>>=' { _PyPegen_augoperator(p, RShift) }\n" -" | '**=' { _PyPegen_augoperator(p, Pow) }\n" -" | '//=' { _PyPegen_augoperator(p, FloorDiv) }\n" -"\n" -"return_stmt[stmt_ty]:\n" -" | 'return' a=[star_expressions] { _PyAST_Return(a, EXTRA) }\n" -"\n" -"raise_stmt[stmt_ty]:\n" -" | 'raise' a=expression b=['from' z=expression { z }] { _PyAST_Raise(a, " -"b, EXTRA) }\n" -" | 'raise' { _PyAST_Raise(NULL, NULL, EXTRA) }\n" -"\n" -"pass_stmt[stmt_ty]:\n" -" | 'pass' { _PyAST_Pass(EXTRA) }\n" -"\n" -"break_stmt[stmt_ty]:\n" -" | 'break' { _PyAST_Break(EXTRA) }\n" -"\n" -"continue_stmt[stmt_ty]:\n" -" | 'continue' { _PyAST_Continue(EXTRA) }\n" -"\n" -"global_stmt[stmt_ty]: 'global' a[asdl_expr_seq*]=','.NAME+ {\n" -" _PyAST_Global(CHECK(asdl_identifier_seq*, _PyPegen_map_names_to_ids(p, " -"a)), EXTRA) }\n" -"\n" -"nonlocal_stmt[stmt_ty]: 'nonlocal' a[asdl_expr_seq*]=','.NAME+ {\n" -" _PyAST_Nonlocal(CHECK(asdl_identifier_seq*, _PyPegen_map_names_to_ids(p, " -"a)), EXTRA) }\n" -"\n" -"del_stmt[stmt_ty]:\n" -" | 'del' a=del_targets &(';' | NEWLINE) { _PyAST_Delete(a, EXTRA) }\n" -" | invalid_del_stmt\n" -"\n" -"yield_stmt[stmt_ty]: y=yield_expr { _PyAST_Expr(y, EXTRA) }\n" -"\n" -"assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] " -"{ _PyAST_Assert(a, b, EXTRA) }\n" -"\n" -"import_stmt[stmt_ty]:\n" -" | invalid_import\n" -" | import_name\n" -" | import_from\n" -"\n" -"# Import statements\n" -"# -----------------\n" -"\n" -"import_name[stmt_ty]: 'import' a=dotted_as_names { _PyAST_Import(a, " -"EXTRA) }\n" -"# note below: the ('.' | '...') is necessary because '...' is tokenized as " -"ELLIPSIS\n" -"import_from[stmt_ty]:\n" -" | 'from' a=('.' | '...')* b=dotted_name 'import' c=import_from_targets " -"{\n" -" _PyPegen_checked_future_import(p, b->v.Name.id, c, " -"_PyPegen_seq_count_dots(a), EXTRA) }\n" -" | 'from' a=('.' | '...')+ 'import' b=import_from_targets {\n" -" _PyAST_ImportFrom(NULL, b, _PyPegen_seq_count_dots(a), EXTRA) }\n" -"import_from_targets[asdl_alias_seq*]:\n" -" | '(' a=import_from_as_names [','] ')' { a }\n" -" | import_from_as_names !','\n" -" | '*' { (asdl_alias_seq*)_PyPegen_singleton_seq(p, CHECK(alias_ty, " -"_PyPegen_alias_for_star(p, EXTRA))) }\n" -" | invalid_import_from_targets\n" -"import_from_as_names[asdl_alias_seq*]:\n" -" | a[asdl_alias_seq*]=','.import_from_as_name+ { a }\n" -"import_from_as_name[alias_ty]:\n" -" | invalid_import_from_as_name\n" -" | a=NAME b=['as' z=NAME { z }] { _PyAST_alias(\n" -" a->v.Name.id, (b) ? ((expr_ty) b)->v.Name.id : NULL, EXTRA) }\n" -"\n" -"dotted_as_names[asdl_alias_seq*]:\n" -" | a[asdl_alias_seq*]=','.dotted_as_name+ { a }\n" -"dotted_as_name[alias_ty]:\n" -" | invalid_dotted_as_name\n" -" | a=dotted_name b=['as' z=NAME { z }] { _PyAST_alias(\n" -" a->v.Name.id, (b) ? ((expr_ty) b)->v.Name.id : NULL, EXTRA) }\n" -"\n" -"dotted_name[expr_ty]:\n" -" | a=dotted_name '.' b=NAME { _PyPegen_join_names_with_dot(p, a, b) }\n" -" | NAME\n" -"\n" -"# COMPOUND STATEMENTS\n" -"# ===================\n" -"\n" -"# Common elements\n" -"# ---------------\n" -"\n" -"block[asdl_stmt_seq*] (memo):\n" -" | NEWLINE INDENT a=statements DEDENT { a }\n" -" | simple_stmts\n" -" | invalid_block\n" -"\n" -"decorators[asdl_expr_seq*]: a[asdl_expr_seq*]=('@' f=named_expression " -"NEWLINE { f })+ { a }\n" -"\n" -"# Class definitions\n" -"# -----------------\n" -"\n" -"class_def[stmt_ty]:\n" -" | a=decorators b=class_def_raw { _PyPegen_class_def_decorators(p, a, " -"b) }\n" -" | class_def_raw\n" -"\n" -"class_def_raw[stmt_ty]:\n" -" | invalid_class_def_raw\n" -" | 'class' a=NAME t=[type_params] b=['(' z=[arguments] ')' { z }] ':' " -"c=block {\n" -" _PyAST_ClassDef(a->v.Name.id,\n" -" (b) ? ((expr_ty) b)->v.Call.args : NULL,\n" -" (b) ? ((expr_ty) b)->v.Call.keywords : NULL,\n" -" c, NULL, t, EXTRA) }\n" -"\n" -"# Function definitions\n" -"# --------------------\n" -"\n" -"function_def[stmt_ty]:\n" -" | d=decorators f=function_def_raw { _PyPegen_function_def_decorators(p, " -"d, f) }\n" -" | function_def_raw\n" -"\n" -"function_def_raw[stmt_ty]:\n" -" | invalid_def_raw\n" -" | 'def' n=NAME t=[type_params] '(' params=[params] ')' a=['->' " -"z=expression { z }] ':' tc=[func_type_comment] b=block {\n" -" _PyAST_FunctionDef(n->v.Name.id,\n" -" (params) ? params : CHECK(arguments_ty, " -"_PyPegen_empty_arguments(p)),\n" -" b, NULL, a, NEW_TYPE_COMMENT(p, tc), t, EXTRA) }\n" -" | 'async' 'def' n=NAME t=[type_params] '(' params=[params] ')' a=['->' " -"z=expression { z }] ':' tc=[func_type_comment] b=block {\n" -" CHECK_VERSION(\n" -" stmt_ty,\n" -" 5,\n" -" \"Async functions are\",\n" -" _PyAST_AsyncFunctionDef(n->v.Name.id,\n" -" (params) ? params : CHECK(arguments_ty, " -"_PyPegen_empty_arguments(p)),\n" -" b, NULL, a, NEW_TYPE_COMMENT(p, tc), t, EXTRA)\n" -" ) }\n" -"\n" -"# Function parameters\n" -"# -------------------\n" -"\n" -"params[arguments_ty]:\n" -" | invalid_parameters\n" -" | parameters\n" -"\n" -"parameters[arguments_ty]:\n" -" | a=slash_no_default b[asdl_arg_seq*]=param_no_default* " -"c=param_with_default* d=[star_etc] {\n" -" CHECK_VERSION(arguments_ty, 8, \"Positional-only parameters are\", " -"_PyPegen_make_arguments(p, a, NULL, b, c, d)) }\n" -" | a=slash_with_default b=param_with_default* c=[star_etc] {\n" -" CHECK_VERSION(arguments_ty, 8, \"Positional-only parameters are\", " -"_PyPegen_make_arguments(p, NULL, a, NULL, b, c)) }\n" -" | a[asdl_arg_seq*]=param_no_default+ b=param_with_default* c=[star_etc] " -"{\n" -" _PyPegen_make_arguments(p, NULL, NULL, a, b, c) }\n" -" | a=param_with_default+ b=[star_etc] { _PyPegen_make_arguments(p, NULL, " -"NULL, NULL, a, b)}\n" -" | a=star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) }\n" -"\n" -"# Some duplication here because we can't write (',' | &')'),\n" -"# which is because we don't support empty alternatives (yet).\n" -"\n" -"slash_no_default[asdl_arg_seq*]:\n" -" | a[asdl_arg_seq*]=param_no_default+ '/' ',' { a }\n" -" | a[asdl_arg_seq*]=param_no_default+ '/' &')' { a }\n" -"slash_with_default[SlashWithDefault*]:\n" -" | a=param_no_default* b=param_with_default+ '/' " -"',' { _PyPegen_slash_with_default(p, (asdl_arg_seq *)a, b) }\n" -" | a=param_no_default* b=param_with_default+ '/' " -"&')' { _PyPegen_slash_with_default(p, (asdl_arg_seq *)a, b) }\n" -"\n" -"star_etc[StarEtc*]:\n" -" | invalid_star_etc\n" -" | '*' a=param_no_default b=param_maybe_default* c=[kwds] {\n" -" _PyPegen_star_etc(p, a, b, c) }\n" -" | '*' a=param_no_default_star_annotation b=param_maybe_default* c=[kwds] " -"{\n" -" _PyPegen_star_etc(p, a, b, c) }\n" -" | '*' ',' b=param_maybe_default+ c=[kwds] {\n" -" _PyPegen_star_etc(p, NULL, b, c) }\n" -" | a=kwds { _PyPegen_star_etc(p, NULL, NULL, a) }\n" -"\n" -"kwds[arg_ty]:\n" -" | invalid_kwds\n" -" | '**' a=param_no_default { a }\n" -"\n" -"# One parameter. This *includes* a following comma and type comment.\n" -"#\n" -"# There are three styles:\n" -"# - No default\n" -"# - With default\n" -"# - Maybe with default\n" -"#\n" -"# There are two alternative forms of each, to deal with type comments:\n" -"# - Ends in a comma followed by an optional type comment\n" -"# - No comma, optional type comment, must be followed by close paren\n" -"# The latter form is for a final parameter without trailing comma.\n" -"#\n" -"\n" -"param_no_default[arg_ty]:\n" -" | a=param ',' tc=TYPE_COMMENT? { _PyPegen_add_type_comment_to_arg(p, a, " -"tc) }\n" -" | a=param tc=TYPE_COMMENT? &')' { _PyPegen_add_type_comment_to_arg(p, a, " -"tc) }\n" -"param_no_default_star_annotation[arg_ty]:\n" -" | a=param_star_annotation ',' tc=TYPE_COMMENT? " -"{ _PyPegen_add_type_comment_to_arg(p, a, tc) }\n" -" | a=param_star_annotation tc=TYPE_COMMENT? " -"&')' { _PyPegen_add_type_comment_to_arg(p, a, tc) }\n" -"param_with_default[NameDefaultPair*]:\n" -" | a=param c=default ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, " -"a, c, tc) }\n" -" | a=param c=default tc=TYPE_COMMENT? " -"&')' { _PyPegen_name_default_pair(p, a, c, tc) }\n" -"param_maybe_default[NameDefaultPair*]:\n" -" | a=param c=default? ',' tc=TYPE_COMMENT? " -"{ _PyPegen_name_default_pair(p, a, c, tc) }\n" -" | a=param c=default? tc=TYPE_COMMENT? " -"&')' { _PyPegen_name_default_pair(p, a, c, tc) }\n" -"param[arg_ty]: a=NAME b=annotation? { _PyAST_arg(a->v.Name.id, b, NULL, " -"EXTRA) }\n" -"param_star_annotation[arg_ty]: a=NAME b=star_annotation { _PyAST_arg(a->v." -"Name.id, b, NULL, EXTRA) }\n" -"annotation[expr_ty]: ':' a=expression { a }\n" -"star_annotation[expr_ty]: ':' a=star_expression { a }\n" -"default[expr_ty]: '=' a=expression { a } | invalid_default\n" -"\n" -"# If statement\n" -"# ------------\n" -"\n" -"if_stmt[stmt_ty]:\n" -" | invalid_if_stmt\n" -" | 'if' a=named_expression ':' b=block c=elif_stmt {\n" -" _PyAST_If(a, b, CHECK(asdl_stmt_seq*, _PyPegen_singleton_seq(p, c)), " -"EXTRA) }\n" -" | 'if' a=named_expression ':' b=block c=[else_block] { _PyAST_If(a, b, " -"c, EXTRA) }\n" -"elif_stmt[stmt_ty]:\n" -" | invalid_elif_stmt\n" -" | 'elif' a=named_expression ':' b=block c=elif_stmt {\n" -" _PyAST_If(a, b, CHECK(asdl_stmt_seq*, _PyPegen_singleton_seq(p, c)), " -"EXTRA) }\n" -" | 'elif' a=named_expression ':' b=block c=[else_block] { _PyAST_If(a, b, " -"c, EXTRA) }\n" -"else_block[asdl_stmt_seq*]:\n" -" | invalid_else_stmt\n" -" | 'else' &&':' b=block { b }\n" -"\n" -"# While statement\n" -"# ---------------\n" -"\n" -"while_stmt[stmt_ty]:\n" -" | invalid_while_stmt\n" -" | 'while' a=named_expression ':' b=block c=[else_block] " -"{ _PyAST_While(a, b, c, EXTRA) }\n" -"\n" -"# For statement\n" -"# -------------\n" -"\n" -"for_stmt[stmt_ty]:\n" -" | invalid_for_stmt\n" -" | 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] " -"b=block el=[else_block] {\n" -" _PyAST_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) }\n" -" | 'async' 'for' t=star_targets 'in' ~ ex=star_expressions ':' " -"tc=[TYPE_COMMENT] b=block el=[else_block] {\n" -" CHECK_VERSION(stmt_ty, 5, \"Async for loops are\", " -"_PyAST_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) }\n" -" | invalid_for_target\n" -"\n" -"# With statement\n" -"# --------------\n" -"\n" -"with_stmt[stmt_ty]:\n" -" | invalid_with_stmt_indent\n" -" | 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' " -"tc=[TYPE_COMMENT] b=block {\n" -" _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }\n" -" | 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] " -"b=block {\n" -" _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }\n" -" | 'async' 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' " -"b=block {\n" -" CHECK_VERSION(stmt_ty, 5, \"Async with statements are\", " -"_PyAST_AsyncWith(a, b, NULL, EXTRA)) }\n" -" | 'async' 'with' a[asdl_withitem_seq*]=','.with_item+ ':' " -"tc=[TYPE_COMMENT] b=block {\n" -" CHECK_VERSION(stmt_ty, 5, \"Async with statements are\", " -"_PyAST_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) }\n" -" | invalid_with_stmt\n" -"\n" -"with_item[withitem_ty]:\n" -" | e=expression 'as' t=star_target &(',' | ')' | ':') " -"{ _PyAST_withitem(e, t, p->arena) }\n" -" | invalid_with_item\n" -" | e=expression { _PyAST_withitem(e, NULL, p->arena) }\n" -"\n" -"# Try statement\n" -"# -------------\n" -"\n" -"try_stmt[stmt_ty]:\n" -" | invalid_try_stmt\n" -" | 'try' &&':' b=block f=finally_block { _PyAST_Try(b, NULL, NULL, f, " -"EXTRA) }\n" -" | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_block+ " -"el=[else_block] f=[finally_block] { _PyAST_Try(b, ex, el, f, EXTRA) }\n" -" | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ " -"el=[else_block] f=[finally_block] {\n" -" CHECK_VERSION(stmt_ty, 11, \"Exception groups are\",\n" -" _PyAST_TryStar(b, ex, el, f, EXTRA)) }\n" -"\n" -"\n" -"# Except statement\n" -"# ----------------\n" -"\n" -"except_block[excepthandler_ty]:\n" -" | invalid_except_stmt_indent\n" -" | 'except' e=expression ':' b=block {\n" -" _PyAST_ExceptHandler(e, NULL, b, EXTRA) }\n" -" | 'except' e=expression 'as' t=NAME ':' b=block {\n" -" _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }\n" -" | 'except' e=expressions ':' b=block {\n" -" CHECK_VERSION(\n" -" excepthandler_ty, \n" -" 14, \n" -" \"except expressions without parentheses are\", \n" -" _PyAST_ExceptHandler(e, NULL, b, EXTRA)) }\n" -" | 'except' ':' b=block { _PyAST_ExceptHandler(NULL, NULL, b, EXTRA) }\n" -" | invalid_except_stmt\n" -"except_star_block[excepthandler_ty]:\n" -" | invalid_except_star_stmt_indent\n" -" | 'except' '*' e=expression ':' b=block {\n" -" _PyAST_ExceptHandler(e, NULL, b, EXTRA) }\n" -" | 'except' '*' e=expression 'as' t=NAME ':' b=block {\n" -" _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }\n" -" | 'except' '*' e=expressions ':' b=block {\n" -" CHECK_VERSION(\n" -" excepthandler_ty, \n" -" 14, \n" -" \"except expressions without parentheses are\", \n" -" _PyAST_ExceptHandler(e, NULL, b, EXTRA)) }\n" -" | invalid_except_star_stmt\n" -"finally_block[asdl_stmt_seq*]:\n" -" | invalid_finally_stmt\n" -" | 'finally' &&':' a=block { a }\n" -"\n" -"# Match statement\n" -"# ---------------\n" -"\n" -"match_stmt[stmt_ty]:\n" -" | \"match\" subject=subject_expr ':' NEWLINE INDENT " -"cases[asdl_match_case_seq*]=case_block+ DEDENT {\n" -" CHECK_VERSION(stmt_ty, 10, \"Pattern matching is\", " -"_PyAST_Match(subject, cases, EXTRA)) }\n" -" | invalid_match_stmt\n" -"\n" -"subject_expr[expr_ty]:\n" -" | value=star_named_expression ',' values=star_named_expressions? {\n" -" _PyAST_Tuple(CHECK(asdl_expr_seq*, _PyPegen_seq_insert_in_front(p, " -"value, values)), Load, EXTRA) }\n" -" | named_expression\n" -"\n" -"case_block[match_case_ty]:\n" -" | invalid_case_block\n" -" | \"case\" pattern=patterns guard=guard? ':' body=block {\n" -" _PyAST_match_case(pattern, guard, body, p->arena) }\n" -"\n" -"guard[expr_ty]: 'if' guard=named_expression { guard }\n" -"\n" -"patterns[pattern_ty]:\n" -" | patterns[asdl_pattern_seq*]=open_sequence_pattern {\n" -" _PyAST_MatchSequence(patterns, EXTRA) }\n" -" | pattern\n" -"\n" -"pattern[pattern_ty]:\n" -" | as_pattern\n" -" | or_pattern\n" -"\n" -"as_pattern[pattern_ty]:\n" -" | pattern=or_pattern 'as' target=pattern_capture_target {\n" -" _PyAST_MatchAs(pattern, target->v.Name.id, EXTRA) }\n" -" | invalid_as_pattern\n" -"\n" -"or_pattern[pattern_ty]:\n" -" | patterns[asdl_pattern_seq*]='|'.closed_pattern+ {\n" -" asdl_seq_LEN(patterns) == 1 ? asdl_seq_GET(patterns, 0) : " -"_PyAST_MatchOr(patterns, EXTRA) }\n" -"\n" -"closed_pattern[pattern_ty] (memo):\n" -" | literal_pattern\n" -" | capture_pattern\n" -" | wildcard_pattern\n" -" | value_pattern\n" -" | group_pattern\n" -" | sequence_pattern\n" -" | mapping_pattern\n" -" | class_pattern\n" -"\n" -"# Literal patterns are used for equality and identity constraints\n" -"literal_pattern[pattern_ty]:\n" -" | value=signed_number !('+' | '-') { _PyAST_MatchValue(value, EXTRA) }\n" -" | value=complex_number { _PyAST_MatchValue(value, EXTRA) }\n" -" | value=strings { _PyAST_MatchValue(value, EXTRA) }\n" -" | 'None' { _PyAST_MatchSingleton(Py_None, EXTRA) }\n" -" | 'True' { _PyAST_MatchSingleton(Py_True, EXTRA) }\n" -" | 'False' { _PyAST_MatchSingleton(Py_False, EXTRA) }\n" -"\n" -"# Literal expressions are used to restrict permitted mapping pattern keys\n" -"literal_expr[expr_ty]:\n" -" | signed_number !('+' | '-')\n" -" | complex_number\n" -" | &(STRING|FSTRING_START|TSTRING_START) strings\n" -" | 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) }\n" -" | 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) }\n" -" | 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) }\n" -"\n" -"complex_number[expr_ty]:\n" -" | real=signed_real_number '+' imag=imaginary_number {\n" -" _PyAST_BinOp(real, Add, imag, EXTRA) }\n" -" | real=signed_real_number '-' imag=imaginary_number {\n" -" _PyAST_BinOp(real, Sub, imag, EXTRA) }\n" -"\n" -"signed_number[expr_ty]:\n" -" | NUMBER\n" -" | '-' number=NUMBER { _PyAST_UnaryOp(USub, number, EXTRA) }\n" -"\n" -"signed_real_number[expr_ty]:\n" -" | real_number\n" -" | '-' real=real_number { _PyAST_UnaryOp(USub, real, EXTRA) }\n" -"\n" -"real_number[expr_ty]:\n" -" | real=NUMBER { _PyPegen_ensure_real(p, real) }\n" -"\n" -"imaginary_number[expr_ty]:\n" -" | imag=NUMBER { _PyPegen_ensure_imaginary(p, imag) }\n" -"\n" -"capture_pattern[pattern_ty]:\n" -" | target=pattern_capture_target { _PyAST_MatchAs(NULL, target->v.Name." -"id, EXTRA) }\n" -"\n" -"pattern_capture_target[expr_ty]:\n" -" | !\"_\" name=NAME !('.' | '(' | '=') {\n" -" _PyPegen_set_expr_context(p, name, Store) }\n" -"\n" -"wildcard_pattern[pattern_ty]:\n" -" | \"_\" { _PyAST_MatchAs(NULL, NULL, EXTRA) }\n" -"\n" -"value_pattern[pattern_ty]:\n" -" | attr=attr !('.' | '(' | '=') { _PyAST_MatchValue(attr, EXTRA) }\n" -"\n" -"attr[expr_ty]:\n" -" | value=name_or_attr '.' attr=NAME {\n" -" _PyAST_Attribute(value, attr->v.Name.id, Load, EXTRA) }\n" -"\n" -"name_or_attr[expr_ty]:\n" -" | attr\n" -" | NAME\n" -"\n" -"group_pattern[pattern_ty]:\n" -" | '(' pattern=pattern ')' { pattern }\n" -"\n" -"sequence_pattern[pattern_ty]:\n" -" | '[' patterns=maybe_sequence_pattern? " -"']' { _PyAST_MatchSequence(patterns, EXTRA) }\n" -" | '(' patterns=open_sequence_pattern? " -"')' { _PyAST_MatchSequence(patterns, EXTRA) }\n" -"\n" -"open_sequence_pattern[asdl_seq*]:\n" -" | pattern=maybe_star_pattern ',' patterns=maybe_sequence_pattern? {\n" -" _PyPegen_seq_insert_in_front(p, pattern, patterns) }\n" -"\n" -"maybe_sequence_pattern[asdl_seq*]:\n" -" | patterns=','.maybe_star_pattern+ ','? { patterns }\n" -"\n" -"maybe_star_pattern[pattern_ty]:\n" -" | star_pattern\n" -" | pattern\n" -"\n" -"star_pattern[pattern_ty] (memo):\n" -" | '*' target=pattern_capture_target {\n" -" _PyAST_MatchStar(target->v.Name.id, EXTRA) }\n" -" | '*' wildcard_pattern {\n" -" _PyAST_MatchStar(NULL, EXTRA) }\n" -"\n" -"mapping_pattern[pattern_ty]:\n" -" | '{' '}' {\n" -" _PyAST_MatchMapping(NULL, NULL, NULL, EXTRA) }\n" -" | '{' rest=double_star_pattern ','? '}' {\n" -" _PyAST_MatchMapping(NULL, NULL, rest->v.Name.id, EXTRA) }\n" -" | '{' items=items_pattern ',' rest=double_star_pattern ','? '}' {\n" -" _PyAST_MatchMapping(\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_pattern_keys(p, items)),\n" -" CHECK(asdl_pattern_seq*, _PyPegen_get_patterns(p, items)),\n" -" rest->v.Name.id,\n" -" EXTRA) }\n" -" | '{' items=items_pattern ','? '}' {\n" -" _PyAST_MatchMapping(\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_pattern_keys(p, items)),\n" -" CHECK(asdl_pattern_seq*, _PyPegen_get_patterns(p, items)),\n" -" NULL,\n" -" EXTRA) }\n" -"\n" -"items_pattern[asdl_seq*]:\n" -" | ','.key_value_pattern+\n" -"\n" -"key_value_pattern[KeyPatternPair*]:\n" -" | key=(literal_expr | attr) ':' pattern=pattern {\n" -" _PyPegen_key_pattern_pair(p, key, pattern) }\n" -"\n" -"double_star_pattern[expr_ty]:\n" -" | '**' target=pattern_capture_target { target }\n" -"\n" -"class_pattern[pattern_ty]:\n" -" | cls=name_or_attr '(' ')' {\n" -" _PyAST_MatchClass(cls, NULL, NULL, NULL, EXTRA) }\n" -" | cls=name_or_attr '(' patterns=positional_patterns ','? ')' {\n" -" _PyAST_MatchClass(cls, patterns, NULL, NULL, EXTRA) }\n" -" | cls=name_or_attr '(' keywords=keyword_patterns ','? ')' {\n" -" _PyAST_MatchClass(\n" -" cls, NULL,\n" -" CHECK(asdl_identifier_seq*, _PyPegen_map_names_to_ids(p,\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_pattern_keys(p, " -"keywords)))),\n" -" CHECK(asdl_pattern_seq*, _PyPegen_get_patterns(p, keywords)),\n" -" EXTRA) }\n" -" | cls=name_or_attr '(' patterns=positional_patterns ',' " -"keywords=keyword_patterns ','? ')' {\n" -" _PyAST_MatchClass(\n" -" cls,\n" -" patterns,\n" -" CHECK(asdl_identifier_seq*, _PyPegen_map_names_to_ids(p,\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_pattern_keys(p, " -"keywords)))),\n" -" CHECK(asdl_pattern_seq*, _PyPegen_get_patterns(p, keywords)),\n" -" EXTRA) }\n" -" | invalid_class_pattern\n" -"\n" -"positional_patterns[asdl_pattern_seq*]:\n" -" | args[asdl_pattern_seq*]=','.pattern+ { args }\n" -"\n" -"keyword_patterns[asdl_seq*]:\n" -" | ','.keyword_pattern+\n" -"\n" -"keyword_pattern[KeyPatternPair*]:\n" -" | arg=NAME '=' value=pattern { _PyPegen_key_pattern_pair(p, arg, " -"value) }\n" -"\n" -"# Type statement\n" -"# ---------------\n" -"\n" -"type_alias[stmt_ty]:\n" -" | \"type\" n=NAME t=[type_params] '=' b=expression {\n" -" CHECK_VERSION(stmt_ty, 12, \"Type statement is\",\n" -" _PyAST_TypeAlias(CHECK(expr_ty, _PyPegen_set_expr_context(p, n, " -"Store)), t, b, EXTRA)) }\n" -"\n" -"# Type parameter declaration\n" -"# --------------------------\n" -"\n" -"type_params[asdl_type_param_seq*]:\n" -" | invalid_type_params\n" -" | '[' t=type_param_seq ']' {\n" -" CHECK_VERSION(asdl_type_param_seq *, 12, \"Type parameter lists " -"are\", t) }\n" -"\n" -"type_param_seq[asdl_type_param_seq*]: a[asdl_type_param_seq*]=','." -"type_param+ [','] { a }\n" -"\n" -"type_param[type_param_ty] (memo):\n" -" | a=NAME b=[type_param_bound] c=[type_param_default] { _PyAST_TypeVar(a-" -">v.Name.id, b, c, EXTRA) }\n" -" | invalid_type_param\n" -" | '*' a=NAME b=[type_param_starred_default] { _PyAST_TypeVarTuple(a->v." -"Name.id, b, EXTRA) }\n" -" | '**' a=NAME b=[type_param_default] { _PyAST_ParamSpec(a->v.Name.id, b, " -"EXTRA) }\n" -"\n" -"type_param_bound[expr_ty]: ':' e=expression { e }\n" -"type_param_default[expr_ty]: '=' e=expression {\n" -" CHECK_VERSION(expr_ty, 13, \"Type parameter defaults are\", e) }\n" -"type_param_starred_default[expr_ty]: '=' e=star_expression {\n" -" CHECK_VERSION(expr_ty, 13, \"Type parameter defaults are\", e) }\n" -"\n" -"# EXPRESSIONS\n" -"# -----------\n" -"\n" -"expressions[expr_ty]:\n" -" | a=expression b=(',' c=expression { c })+ [','] {\n" -" _PyAST_Tuple(CHECK(asdl_expr_seq*, _PyPegen_seq_insert_in_front(p, " -"a, b)), Load, EXTRA) }\n" -" | a=expression ',' { _PyAST_Tuple(CHECK(asdl_expr_seq*, " -"_PyPegen_singleton_seq(p, a)), Load, EXTRA) }\n" -" | expression\n" -"\n" -"expression[expr_ty] (memo):\n" -" | invalid_expression\n" -" | invalid_legacy_expression\n" -" | a=disjunction 'if' b=disjunction 'else' c=expression { _PyAST_IfExp(b, " -"a, c, EXTRA) }\n" -" | disjunction\n" -" | lambdef\n" -"\n" -"yield_expr[expr_ty]:\n" -" | 'yield' 'from' a=expression { _PyAST_YieldFrom(a, EXTRA) }\n" -" | 'yield' a=[star_expressions] { _PyAST_Yield(a, EXTRA) }\n" -"\n" -"star_expressions[expr_ty]:\n" -" | a=star_expression b=(',' c=star_expression { c })+ [','] {\n" -" _PyAST_Tuple(CHECK(asdl_expr_seq*, _PyPegen_seq_insert_in_front(p, " -"a, b)), Load, EXTRA) }\n" -" | a=star_expression ',' { _PyAST_Tuple(CHECK(asdl_expr_seq*, " -"_PyPegen_singleton_seq(p, a)), Load, EXTRA) }\n" -" | star_expression\n" -"\n" -"star_expression[expr_ty] (memo):\n" -" | '*' a=bitwise_or { _PyAST_Starred(a, Load, EXTRA) }\n" -" | expression\n" -"\n" -"star_named_expressions[asdl_expr_seq*]: a[asdl_expr_seq*]=','." -"star_named_expression+ [','] { a }\n" -"\n" -"star_named_expression[expr_ty]:\n" -" | '*' a=bitwise_or { _PyAST_Starred(a, Load, EXTRA) }\n" -" | named_expression\n" -"\n" -"assignment_expression[expr_ty]:\n" -" | a=NAME ':=' ~ b=expression {\n" -" CHECK_VERSION(expr_ty, 8, \"Assignment expressions are\",\n" -" _PyAST_NamedExpr(CHECK(expr_ty, _PyPegen_set_expr_context(p, a, " -"Store)), b, EXTRA)) }\n" -"\n" -"named_expression[expr_ty]:\n" -" | assignment_expression\n" -" | invalid_named_expression\n" -" | expression !':='\n" -"\n" -"disjunction[expr_ty] (memo):\n" -" | a=conjunction b=('or' c=conjunction { c })+ { _PyAST_BoolOp(\n" -" Or,\n" -" CHECK(asdl_expr_seq*, _PyPegen_seq_insert_in_front(p, a, b)),\n" -" EXTRA) }\n" -" | conjunction\n" -"\n" -"conjunction[expr_ty] (memo):\n" -" | a=inversion b=('and' c=inversion { c })+ { _PyAST_BoolOp(\n" -" And,\n" -" CHECK(asdl_expr_seq*, _PyPegen_seq_insert_in_front(p, a, b)),\n" -" EXTRA) }\n" -" | inversion\n" -"\n" -"inversion[expr_ty] (memo):\n" -" | 'not' a=inversion { _PyAST_UnaryOp(Not, a, EXTRA) }\n" -" | comparison\n" -"\n" -"# Comparison operators\n" -"# --------------------\n" -"\n" -"comparison[expr_ty]:\n" -" | a=bitwise_or b=compare_op_bitwise_or_pair+ {\n" -" _PyAST_Compare(\n" -" a,\n" -" CHECK(asdl_int_seq*, _PyPegen_get_cmpops(p, b)),\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_exprs(p, b)),\n" -" EXTRA) }\n" -" | bitwise_or\n" -"\n" -"compare_op_bitwise_or_pair[CmpopExprPair*]:\n" -" | eq_bitwise_or\n" -" | noteq_bitwise_or\n" -" | lte_bitwise_or\n" -" | lt_bitwise_or\n" -" | gte_bitwise_or\n" -" | gt_bitwise_or\n" -" | notin_bitwise_or\n" -" | in_bitwise_or\n" -" | isnot_bitwise_or\n" -" | is_bitwise_or\n" -"\n" -"eq_bitwise_or[CmpopExprPair*]: '==' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, Eq, a) }\n" -"noteq_bitwise_or[CmpopExprPair*]:\n" -" | (tok='!=' { _PyPegen_check_barry_as_flufl(p, tok) ? NULL : tok}) " -"a=bitwise_or {_PyPegen_cmpop_expr_pair(p, NotEq, a) }\n" -"lte_bitwise_or[CmpopExprPair*]: '<=' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, LtE, a) }\n" -"lt_bitwise_or[CmpopExprPair*]: '<' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, Lt, a) }\n" -"gte_bitwise_or[CmpopExprPair*]: '>=' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, GtE, a) }\n" -"gt_bitwise_or[CmpopExprPair*]: '>' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, Gt, a) }\n" -"notin_bitwise_or[CmpopExprPair*]: 'not' 'in' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, NotIn, a) }\n" -"in_bitwise_or[CmpopExprPair*]: 'in' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, In, a) }\n" -"isnot_bitwise_or[CmpopExprPair*]: 'is' 'not' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, IsNot, a) }\n" -"is_bitwise_or[CmpopExprPair*]: 'is' a=bitwise_or " -"{ _PyPegen_cmpop_expr_pair(p, Is, a) }\n" -"\n" -"# Bitwise operators\n" -"# -----------------\n" -"\n" -"bitwise_or[expr_ty]:\n" -" | a=bitwise_or '|' b=bitwise_xor { _PyAST_BinOp(a, BitOr, b, EXTRA) }\n" -" | bitwise_xor\n" -"\n" -"bitwise_xor[expr_ty]:\n" -" | a=bitwise_xor '^' b=bitwise_and { _PyAST_BinOp(a, BitXor, b, EXTRA) }\n" -" | bitwise_and\n" -"\n" -"bitwise_and[expr_ty]:\n" -" | a=bitwise_and '&' b=shift_expr { _PyAST_BinOp(a, BitAnd, b, EXTRA) }\n" -" | shift_expr\n" -"\n" -"shift_expr[expr_ty]:\n" -" | a=shift_expr '<<' b=sum { _PyAST_BinOp(a, LShift, b, EXTRA) }\n" -" | a=shift_expr '>>' b=sum { _PyAST_BinOp(a, RShift, b, EXTRA) }\n" -" | invalid_arithmetic\n" -" | sum\n" -"\n" -"# Arithmetic operators\n" -"# --------------------\n" -"\n" -"sum[expr_ty]:\n" -" | a=sum '+' b=term { _PyAST_BinOp(a, Add, b, EXTRA) }\n" -" | a=sum '-' b=term { _PyAST_BinOp(a, Sub, b, EXTRA) }\n" -" | term\n" -"\n" -"term[expr_ty]:\n" -" | a=term '*' b=factor { _PyAST_BinOp(a, Mult, b, EXTRA) }\n" -" | a=term '/' b=factor { _PyAST_BinOp(a, Div, b, EXTRA) }\n" -" | a=term '//' b=factor { _PyAST_BinOp(a, FloorDiv, b, EXTRA) }\n" -" | a=term '%' b=factor { _PyAST_BinOp(a, Mod, b, EXTRA) }\n" -" | a=term '@' b=factor { CHECK_VERSION(expr_ty, 5, \"The '@' operator " -"is\", _PyAST_BinOp(a, MatMult, b, EXTRA)) }\n" -" | invalid_factor\n" -" | factor\n" -"\n" -"factor[expr_ty] (memo):\n" -" | '+' a=factor { _PyAST_UnaryOp(UAdd, a, EXTRA) }\n" -" | '-' a=factor { _PyAST_UnaryOp(USub, a, EXTRA) }\n" -" | '~' a=factor { _PyAST_UnaryOp(Invert, a, EXTRA) }\n" -" | power\n" -"\n" -"power[expr_ty]:\n" -" | a=await_primary '**' b=factor { _PyAST_BinOp(a, Pow, b, EXTRA) }\n" -" | await_primary\n" -"\n" -"# Primary elements\n" -"# ----------------\n" -"\n" -"# Primary elements are things like \"obj.something.something\", " -"\"obj[something]\", \"obj(something)\", \"obj\" ...\n" -"\n" -"await_primary[expr_ty] (memo):\n" -" | 'await' a=primary { CHECK_VERSION(expr_ty, 5, \"Await expressions " -"are\", _PyAST_Await(a, EXTRA)) }\n" -" | primary\n" -"\n" -"primary[expr_ty]:\n" -" | a=primary '.' b=NAME { _PyAST_Attribute(a, b->v.Name.id, Load, " -"EXTRA) }\n" -" | a=primary b=genexp { _PyAST_Call(a, CHECK(asdl_expr_seq*, " -"(asdl_expr_seq*)_PyPegen_singleton_seq(p, b)), NULL, EXTRA) }\n" -" | a=primary '(' b=[arguments] ')' {\n" -" _PyAST_Call(a,\n" -" (b) ? ((expr_ty) b)->v.Call.args : NULL,\n" -" (b) ? ((expr_ty) b)->v.Call.keywords : NULL,\n" -" EXTRA) }\n" -" | a=primary '[' b=slices ']' { _PyAST_Subscript(a, b, Load, EXTRA) }\n" -" | atom\n" -"\n" -"slices[expr_ty]:\n" -" | a=slice !',' { a }\n" -" | a[asdl_expr_seq*]=','.(slice | starred_expression)+ [','] " -"{ _PyAST_Tuple(a, Load, EXTRA) }\n" -"\n" -"slice[expr_ty]:\n" -" | a=[expression] ':' b=[expression] c=[':' d=[expression] { d }] " -"{ _PyAST_Slice(a, b, c, EXTRA) }\n" -" | a=named_expression { a }\n" -"\n" -"atom[expr_ty]:\n" -" | NAME\n" -" | 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) }\n" -" | 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) }\n" -" | 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) }\n" -" | &(STRING|FSTRING_START|TSTRING_START) strings\n" -" | NUMBER\n" -" | &'(' (tuple | group | genexp)\n" -" | &'[' (list | listcomp)\n" -" | &'{' (dict | set | dictcomp | setcomp)\n" -" | '...' { _PyAST_Constant(Py_Ellipsis, NULL, EXTRA) }\n" -"\n" -"group[expr_ty]:\n" -" | '(' a=(yield_expr | named_expression) ')' { a }\n" -" | invalid_group\n" -"\n" -"# Lambda functions\n" -"# ----------------\n" -"\n" -"lambdef[expr_ty]:\n" -" | 'lambda' a=[lambda_params] ':' b=expression {\n" -" _PyAST_Lambda((a) ? a : CHECK(arguments_ty, " -"_PyPegen_empty_arguments(p)), b, EXTRA) }\n" -"\n" -"lambda_params[arguments_ty]:\n" -" | invalid_lambda_parameters\n" -" | lambda_parameters\n" -"\n" -"# lambda_parameters etc. duplicates parameters but without annotations\n" -"# or type comments, and if there's no comma after a parameter, we expect\n" -"# a colon, not a close parenthesis. (For more, see parameters above.)\n" -"#\n" -"lambda_parameters[arguments_ty]:\n" -" | a=lambda_slash_no_default b[asdl_arg_seq*]=lambda_param_no_default* " -"c=lambda_param_with_default* d=[lambda_star_etc] {\n" -" CHECK_VERSION(arguments_ty, 8, \"Positional-only parameters are\", " -"_PyPegen_make_arguments(p, a, NULL, b, c, d)) }\n" -" | a=lambda_slash_with_default b=lambda_param_with_default* " -"c=[lambda_star_etc] {\n" -" CHECK_VERSION(arguments_ty, 8, \"Positional-only parameters are\", " -"_PyPegen_make_arguments(p, NULL, a, NULL, b, c)) }\n" -" | a[asdl_arg_seq*]=lambda_param_no_default+ b=lambda_param_with_default* " -"c=[lambda_star_etc] {\n" -" _PyPegen_make_arguments(p, NULL, NULL, a, b, c) }\n" -" | a=lambda_param_with_default+ b=[lambda_star_etc] " -"{ _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)}\n" -" | a=lambda_star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, " -"a) }\n" -"\n" -"lambda_slash_no_default[asdl_arg_seq*]:\n" -" | a[asdl_arg_seq*]=lambda_param_no_default+ '/' ',' { a }\n" -" | a[asdl_arg_seq*]=lambda_param_no_default+ '/' &':' { a }\n" -"\n" -"lambda_slash_with_default[SlashWithDefault*]:\n" -" | a=lambda_param_no_default* b=lambda_param_with_default+ '/' " -"',' { _PyPegen_slash_with_default(p, (asdl_arg_seq *)a, b) }\n" -" | a=lambda_param_no_default* b=lambda_param_with_default+ '/' " -"&':' { _PyPegen_slash_with_default(p, (asdl_arg_seq *)a, b) }\n" -"\n" -"lambda_star_etc[StarEtc*]:\n" -" | invalid_lambda_star_etc\n" -" | '*' a=lambda_param_no_default b=lambda_param_maybe_default* " -"c=[lambda_kwds] {\n" -" _PyPegen_star_etc(p, a, b, c) }\n" -" | '*' ',' b=lambda_param_maybe_default+ c=[lambda_kwds] {\n" -" _PyPegen_star_etc(p, NULL, b, c) }\n" -" | a=lambda_kwds { _PyPegen_star_etc(p, NULL, NULL, a) }\n" -"\n" -"lambda_kwds[arg_ty]:\n" -" | invalid_lambda_kwds\n" -" | '**' a=lambda_param_no_default { a }\n" -"\n" -"lambda_param_no_default[arg_ty]:\n" -" | a=lambda_param ',' { a }\n" -" | a=lambda_param &':' { a }\n" -"lambda_param_with_default[NameDefaultPair*]:\n" -" | a=lambda_param c=default ',' { _PyPegen_name_default_pair(p, a, c, " -"NULL) }\n" -" | a=lambda_param c=default &':' { _PyPegen_name_default_pair(p, a, c, " -"NULL) }\n" -"lambda_param_maybe_default[NameDefaultPair*]:\n" -" | a=lambda_param c=default? ',' { _PyPegen_name_default_pair(p, a, c, " -"NULL) }\n" -" | a=lambda_param c=default? &':' { _PyPegen_name_default_pair(p, a, c, " -"NULL) }\n" -"lambda_param[arg_ty]: a=NAME { _PyAST_arg(a->v.Name.id, NULL, NULL, " -"EXTRA) }\n" -"\n" -"# LITERALS\n" -"# ========\n" -"\n" -"fstring_middle[expr_ty]:\n" -" | fstring_replacement_field\n" -" | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }\n" -"fstring_replacement_field[expr_ty]:\n" -" | '{' a=annotated_rhs debug_expr='='? conversion=[fstring_conversion] " -"format=[fstring_full_format_spec] rbrace='}' {\n" -" _PyPegen_formatted_value(p, a, debug_expr, conversion, format, " -"rbrace, EXTRA) }\n" -" | invalid_fstring_replacement_field\n" -"fstring_conversion[ResultTokenWithMetadata*]:\n" -" | conv_token=\"!\" conv=NAME { _PyPegen_check_fstring_conversion(p, " -"conv_token, conv) }\n" -"fstring_full_format_spec[ResultTokenWithMetadata*]:\n" -" | colon=':' spec=fstring_format_spec* " -"{ _PyPegen_setup_full_format_spec(p, colon, (asdl_expr_seq *) spec, " -"EXTRA) }\n" -"fstring_format_spec[expr_ty]:\n" -" | t=FSTRING_MIDDLE { _PyPegen_decoded_constant_from_token(p, t) }\n" -" | fstring_replacement_field\n" -"fstring[expr_ty]:\n" -" | a=FSTRING_START b=fstring_middle* c=FSTRING_END " -"{ _PyPegen_joined_str(p, a, (asdl_expr_seq*)b, c) }\n" -"\n" -"tstring_format_spec_replacement_field[expr_ty]:\n" -" | '{' a=annotated_rhs debug_expr='='? conversion=[fstring_conversion] " -"format=[tstring_full_format_spec] rbrace='}' {\n" -" _PyPegen_formatted_value(p, a, debug_expr, conversion, format, " -"rbrace, EXTRA) }\n" -" | invalid_tstring_replacement_field\n" -"tstring_format_spec[expr_ty]:\n" -" | t=TSTRING_MIDDLE { _PyPegen_decoded_constant_from_token(p, t) }\n" -" | tstring_format_spec_replacement_field\n" -"tstring_full_format_spec[ResultTokenWithMetadata*]:\n" -" | colon=':' spec=tstring_format_spec* " -"{ _PyPegen_setup_full_format_spec(p, colon, (asdl_expr_seq *) spec, " -"EXTRA) }\n" -"tstring_replacement_field[expr_ty]:\n" -" | '{' a=annotated_rhs debug_expr='='? conversion=[fstring_conversion] " -"format=[tstring_full_format_spec] rbrace='}' {\n" -" _PyPegen_interpolation(p, a, debug_expr, conversion, format, rbrace, " -"EXTRA) }\n" -" | invalid_tstring_replacement_field\n" -"tstring_middle[expr_ty]:\n" -" | tstring_replacement_field\n" -" | t=TSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }\n" -"tstring[expr_ty] (memo):\n" -" | a=TSTRING_START b=tstring_middle* c=TSTRING_END { \n" -" CHECK_VERSION(\n" -" expr_ty, \n" -" 14, \n" -" \"t-strings are\", \n" -" _PyPegen_template_str(p, a, (asdl_expr_seq*)b, c)) }\n" -"\n" -"string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) }\n" -"strings[expr_ty] (memo): a[asdl_expr_seq*]=(fstring|string|tstring)+ " -"{ _PyPegen_concatenate_strings(p, a, EXTRA) }\n" -"\n" -"list[expr_ty]:\n" -" | '[' a=[star_named_expressions] ']' { _PyAST_List(a, Load, EXTRA) }\n" -"\n" -"tuple[expr_ty]:\n" -" | '(' a=[y=star_named_expression ',' z=[star_named_expressions] " -"{ _PyPegen_seq_insert_in_front(p, y, z) } ] ')' {\n" -" _PyAST_Tuple(a, Load, EXTRA) }\n" -"\n" -"set[expr_ty]: '{' a=star_named_expressions '}' { _PyAST_Set(a, EXTRA) }\n" -"\n" -"# Dicts\n" -"# -----\n" -"\n" -"dict[expr_ty]:\n" -" | '{' a=[double_starred_kvpairs] '}' {\n" -" _PyAST_Dict(\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_keys(p, a)),\n" -" CHECK(asdl_expr_seq*, _PyPegen_get_values(p, a)),\n" -" EXTRA) }\n" -" | '{' invalid_double_starred_kvpairs '}'\n" -"\n" -"double_starred_kvpairs[asdl_seq*]: a=','.double_starred_kvpair+ [','] { a }\n" -"\n" -"double_starred_kvpair[KeyValuePair*]:\n" -" | '**' a=bitwise_or { _PyPegen_key_value_pair(p, NULL, a) }\n" -" | kvpair\n" -"\n" -"kvpair[KeyValuePair*]: a=expression ':' b=expression " -"{ _PyPegen_key_value_pair(p, a, b) }\n" -"\n" -"# Comprehensions & Generators\n" -"# ---------------------------\n" -"\n" -"for_if_clauses[asdl_comprehension_seq*]:\n" -" | a[asdl_comprehension_seq*]=for_if_clause+ { a }\n" -"\n" -"for_if_clause[comprehension_ty]:\n" -" | 'async' 'for' a=star_targets 'in' ~ b=disjunction " -"c[asdl_expr_seq*]=('if' z=disjunction { z })* {\n" -" CHECK_VERSION(comprehension_ty, 6, \"Async comprehensions are\", " -"_PyAST_comprehension(a, b, c, 1, p->arena)) }\n" -" | 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' " -"z=disjunction { z })* {\n" -" _PyAST_comprehension(a, b, c, 0, p->arena) }\n" -" | invalid_for_if_clause\n" -" | invalid_for_target\n" -"\n" -"listcomp[expr_ty]:\n" -" | '[' a=named_expression b=for_if_clauses ']' { _PyAST_ListComp(a, b, " -"EXTRA) }\n" -" | invalid_comprehension\n" -"\n" -"setcomp[expr_ty]:\n" -" | '{' a=named_expression b=for_if_clauses '}' { _PyAST_SetComp(a, b, " -"EXTRA) }\n" -" | invalid_comprehension\n" -"\n" -"genexp[expr_ty]:\n" -" | '(' a=( assignment_expression | expression !':=') b=for_if_clauses " -"')' { _PyAST_GeneratorExp(a, b, EXTRA) }\n" -" | invalid_comprehension\n" -"\n" -"dictcomp[expr_ty]:\n" -" | '{' a=kvpair b=for_if_clauses '}' { _PyAST_DictComp(a->key, a->value, " -"b, EXTRA) }\n" -" | invalid_dict_comprehension\n" -"\n" -"# FUNCTION CALL ARGUMENTS\n" -"# =======================\n" -"\n" -"arguments[expr_ty] (memo):\n" -" | a=args [','] &')' { a }\n" -" | invalid_arguments\n" -"\n" -"args[expr_ty]:\n" -" | a[asdl_expr_seq*]=','.(starred_expression | ( assignment_expression | " -"expression !':=') !'=')+ b=[',' k=kwargs {k}] {\n" -" _PyPegen_collect_call_seqs(p, a, b, EXTRA) }\n" -" | a=kwargs { _PyAST_Call(_PyPegen_dummy_name(p),\n" -" CHECK_NULL_ALLOWED(asdl_expr_seq*, " -"_PyPegen_seq_extract_starred_exprs(p, a)),\n" -" CHECK_NULL_ALLOWED(asdl_keyword_seq*, " -"_PyPegen_seq_delete_starred_exprs(p, a)),\n" -" EXTRA) }\n" -"\n" -"kwargs[asdl_seq*]:\n" -" | a=','.kwarg_or_starred+ ',' b=','.kwarg_or_double_starred+ " -"{ _PyPegen_join_sequences(p, a, b) }\n" -" | ','.kwarg_or_starred+\n" -" | ','.kwarg_or_double_starred+\n" -"\n" -"starred_expression[expr_ty]:\n" -" | invalid_starred_expression_unpacking\n" -" | '*' a=expression { _PyAST_Starred(a, Load, EXTRA) }\n" -" | invalid_starred_expression\n" -"\n" -"kwarg_or_starred[KeywordOrStarred*]:\n" -" | invalid_kwarg\n" -" | a=NAME '=' b=expression {\n" -" _PyPegen_keyword_or_starred(p, CHECK(keyword_ty, _PyAST_keyword(a->v." -"Name.id, b, EXTRA)), 1) }\n" -" | a=starred_expression { _PyPegen_keyword_or_starred(p, a, 0) }\n" -"\n" -"kwarg_or_double_starred[KeywordOrStarred*]:\n" -" | invalid_kwarg\n" -" | a=NAME '=' b=expression {\n" -" _PyPegen_keyword_or_starred(p, CHECK(keyword_ty, _PyAST_keyword(a->v." -"Name.id, b, EXTRA)), 1) }\n" -" | '**' a=expression { _PyPegen_keyword_or_starred(p, CHECK(keyword_ty, " -"_PyAST_keyword(NULL, a, EXTRA)), 1) }\n" -"\n" -"# ASSIGNMENT TARGETS\n" -"# ==================\n" -"\n" -"# Generic targets\n" -"# ---------------\n" -"\n" -"# NOTE: star_targets may contain *bitwise_or, targets may not.\n" -"star_targets[expr_ty]:\n" -" | a=star_target !',' { a }\n" -" | a=star_target b=(',' c=star_target { c })* [','] {\n" -" _PyAST_Tuple(CHECK(asdl_expr_seq*, _PyPegen_seq_insert_in_front(p, " -"a, b)), Store, EXTRA) }\n" -"\n" -"star_targets_list_seq[asdl_expr_seq*]: a[asdl_expr_seq*]=','.star_target+ " -"[','] { a }\n" -"\n" -"star_targets_tuple_seq[asdl_expr_seq*]:\n" -" | a=star_target b=(',' c=star_target { c })+ [','] { (asdl_expr_seq*) " -"_PyPegen_seq_insert_in_front(p, a, b) }\n" -" | a=star_target ',' { (asdl_expr_seq*) _PyPegen_singleton_seq(p, a) }\n" -"\n" -"star_target[expr_ty] (memo):\n" -" | '*' a=(!'*' star_target) {\n" -" _PyAST_Starred(CHECK(expr_ty, _PyPegen_set_expr_context(p, a, " -"Store)), Store, EXTRA) }\n" -" | target_with_star_atom\n" -"\n" -"target_with_star_atom[expr_ty] (memo):\n" -" | a=t_primary '.' b=NAME !t_lookahead { _PyAST_Attribute(a, b->v.Name." -"id, Store, EXTRA) }\n" -" | a=t_primary '[' b=slices ']' !t_lookahead { _PyAST_Subscript(a, b, " -"Store, EXTRA) }\n" -" | star_atom\n" -"\n" -"star_atom[expr_ty]:\n" -" | a=NAME { _PyPegen_set_expr_context(p, a, Store) }\n" -" | '(' a=target_with_star_atom ')' { _PyPegen_set_expr_context(p, a, " -"Store) }\n" -" | '(' a=[star_targets_tuple_seq] ')' { _PyAST_Tuple(a, Store, EXTRA) }\n" -" | '[' a=[star_targets_list_seq] ']' { _PyAST_List(a, Store, EXTRA) }\n" -"\n" -"single_target[expr_ty]:\n" -" | single_subscript_attribute_target\n" -" | a=NAME { _PyPegen_set_expr_context(p, a, Store) }\n" -" | '(' a=single_target ')' { a }\n" -"\n" -"single_subscript_attribute_target[expr_ty]:\n" -" | a=t_primary '.' b=NAME !t_lookahead { _PyAST_Attribute(a, b->v.Name." -"id, Store, EXTRA) }\n" -" | a=t_primary '[' b=slices ']' !t_lookahead { _PyAST_Subscript(a, b, " -"Store, EXTRA) }\n" -"\n" -"t_primary[expr_ty]:\n" -" | a=t_primary '.' b=NAME &t_lookahead { _PyAST_Attribute(a, b->v.Name." -"id, Load, EXTRA) }\n" -" | a=t_primary '[' b=slices ']' &t_lookahead { _PyAST_Subscript(a, b, " -"Load, EXTRA) }\n" -" | a=t_primary b=genexp &t_lookahead {\n" -" _PyAST_Call(a, CHECK(asdl_expr_seq*, " -"(asdl_expr_seq*)_PyPegen_singleton_seq(p, b)), NULL, EXTRA) }\n" -" | a=t_primary '(' b=[arguments] ')' &t_lookahead {\n" -" _PyAST_Call(a,\n" -" (b) ? ((expr_ty) b)->v.Call.args : NULL,\n" -" (b) ? ((expr_ty) b)->v.Call.keywords : NULL,\n" -" EXTRA) }\n" -" | a=atom &t_lookahead { a }\n" -"\n" -"t_lookahead: '(' | '[' | '.'\n" -"\n" -"# Targets for del statements\n" -"# --------------------------\n" -"\n" -"del_targets[asdl_expr_seq*]: a[asdl_expr_seq*]=','.del_target+ [','] { a }\n" -"\n" -"del_target[expr_ty] (memo):\n" -" | a=t_primary '.' b=NAME !t_lookahead { _PyAST_Attribute(a, b->v.Name." -"id, Del, EXTRA) }\n" -" | a=t_primary '[' b=slices ']' !t_lookahead { _PyAST_Subscript(a, b, " -"Del, EXTRA) }\n" -" | del_t_atom\n" -"\n" -"del_t_atom[expr_ty]:\n" -" | a=NAME { _PyPegen_set_expr_context(p, a, Del) }\n" -" | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) }\n" -" | '(' a=[del_targets] ')' { _PyAST_Tuple(a, Del, EXTRA) }\n" -" | '[' a=[del_targets] ']' { _PyAST_List(a, Del, EXTRA) }\n" -"\n" -"# TYPING ELEMENTS\n" -"# ---------------\n" -"\n" -"# type_expressions allow */** but ignore them\n" -"type_expressions[asdl_expr_seq*]:\n" -" | a=','.expression+ ',' '*' b=expression ',' '**' c=expression {\n" -" (asdl_expr_seq*)_PyPegen_seq_append_to_end(\n" -" p,\n" -" CHECK(asdl_seq*, _PyPegen_seq_append_to_end(p, a, b)),\n" -" c) }\n" -" | a=','.expression+ ',' '*' b=expression " -"{ (asdl_expr_seq*)_PyPegen_seq_append_to_end(p, a, b) }\n" -" | a=','.expression+ ',' '**' b=expression " -"{ (asdl_expr_seq*)_PyPegen_seq_append_to_end(p, a, b) }\n" -" | '*' a=expression ',' '**' b=expression {\n" -" (asdl_expr_seq*)_PyPegen_seq_append_to_end(\n" -" p,\n" -" CHECK(asdl_seq*, _PyPegen_singleton_seq(p, a)),\n" -" b) }\n" -" | '*' a=expression { (asdl_expr_seq*)_PyPegen_singleton_seq(p, a) }\n" -" | '**' a=expression { (asdl_expr_seq*)_PyPegen_singleton_seq(p, a) }\n" -" | a[asdl_expr_seq*]=','.expression+ {a}\n" -"\n" -"func_type_comment[Token*]:\n" -" | NEWLINE t=TYPE_COMMENT &(NEWLINE INDENT) { t } # Must be followed by " -"indented block\n" -" | invalid_double_type_comments\n" -" | TYPE_COMMENT\n" -"\n" -"# ========================= END OF THE GRAMMAR ===========================\n" -"\n" -"\n" -"\n" -"# ========================= START OF INVALID RULES =======================\n" -"\n" -"# From here on, there are rules for invalid syntax with specialised error " -"messages\n" -"invalid_arguments:\n" -" | ((','.(starred_expression | ( assignment_expression | expression !':" -"=') !'=')+ ',' kwargs) | kwargs) a=',' ','.(starred_expression !'=')+ {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(a, \"iterable argument unpacking " -"follows keyword argument unpacking\") }\n" -" | a=expression b=for_if_clauses ',' [args | expression for_if_clauses] " -"{\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, " -"_PyPegen_get_last_comprehension_item(PyPegen_last_item(b, " -"comprehension_ty)), \"Generator expression must be parenthesized\") }\n" -" | a=NAME b='=' expression for_if_clauses {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"invalid syntax. Maybe you " -"meant '==' or ':=' instead of '='?\")}\n" -" | (args ',')? a=NAME b='=' &(',' | ')') {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"expected argument value " -"expression\")}\n" -" | a=args b=for_if_clauses { _PyPegen_nonparen_genexp_in_call(p, a, b) }\n" -" | args ',' a=expression b=for_if_clauses {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, " -"_PyPegen_get_last_comprehension_item(PyPegen_last_item(b, " -"comprehension_ty)), \"Generator expression must be parenthesized\") }\n" -" | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) }\n" -"invalid_kwarg:\n" -" | a[Token*]=('True'|'False'|'None') b='=' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"cannot assign to %s\", " -"PyBytes_AS_STRING(a->bytes)) }\n" -" | a=NAME b='=' expression for_if_clauses {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"invalid syntax. Maybe you " -"meant '==' or ':=' instead of '='?\")}\n" -" | !(NAME '=') a=expression b='=' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(\n" -" a, b, \"expression cannot contain assignment, perhaps you meant " -"\\\"==\\\"?\") }\n" -" | a='**' expression '=' b=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"cannot assign to keyword " -"argument unpacking\") }\n" -"\n" -"# IMPORTANT: Note that the \"_without_invalid\" suffix causes the rule to " -"not call invalid rules under it\n" -"expression_without_invalid[expr_ty]:\n" -" | a=disjunction 'if' b=disjunction 'else' c=expression { _PyAST_IfExp(b, " -"a, c, EXTRA) }\n" -" | disjunction\n" -" | lambdef\n" -"invalid_legacy_expression:\n" -" | a=NAME !'(' b=star_expressions {\n" -" _PyPegen_check_legacy_stmt(p, a) ? RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, " -"b,\n" -" \"Missing parentheses in call to '%U'. Did you mean %U(...)?\", " -"a->v.Name.id, a->v.Name.id) : NULL}\n" -"\n" -"invalid_type_param:\n" -" | '*' a=NAME colon=':' e=expression {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(colon, e->kind == Tuple_kind\n" -" ? \"cannot use constraints with TypeVarTuple\"\n" -" : \"cannot use bound with TypeVarTuple\")\n" -" }\n" -" | '**' a=NAME colon=':' e=expression {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(colon, e->kind == Tuple_kind\n" -" ? \"cannot use constraints with ParamSpec\"\n" -" : \"cannot use bound with ParamSpec\")\n" -" }\n" -"\n" -"invalid_expression:\n" -" | STRING a=(!STRING expression_without_invalid)+ STRING {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE( PyPegen_first_item(a, expr_ty), " -"PyPegen_last_item(a, expr_ty),\n" -" \"invalid syntax. Is this intended to be part of the string?\") }\n" -" # !(NAME STRING) is not matched so we don't show this error with some " -"invalid string prefixes like: kf\"dsfsdf\"\n" -" # Soft keywords need to also be ignored because they can be parsed as " -"NAME NAME\n" -" | !(NAME STRING | SOFT_KEYWORD) a=disjunction " -"b=expression_without_invalid {\n" -" _PyPegen_check_legacy_stmt(p, a) ? NULL : p->tokens[p->mark-1]-" -">level == 0 ? NULL :\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"invalid syntax. Perhaps you " -"forgot a comma?\") }\n" -" | a=disjunction 'if' b=disjunction !('else'|':') " -"{ RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"expected 'else' after 'if' " -"expression\") }\n" -" | a=disjunction 'if' b=disjunction 'else' !expression {\n" -" RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"expected expression after 'else', " -"but statement is given\") }\n" -" | a[stmt_ty]=(pass_stmt|break_stmt|continue_stmt) 'if' b=disjunction " -"'else' c=simple_stmt {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, \"expected expression before " -"'if', but statement is given\") }\n" -" | a='lambda' [lambda_params] b=':' &FSTRING_MIDDLE {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"f-string: lambda expressions " -"are not allowed without parentheses\") }\n" -" | a='lambda' [lambda_params] b=':' &TSTRING_MIDDLE {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"t-string: lambda expressions " -"are not allowed without parentheses\") }\n" -"\n" -"invalid_named_expression(memo):\n" -" | a=expression ':=' expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(\n" -" a, \"cannot use assignment expressions with %s\", " -"_PyPegen_get_expr_name(a)) }\n" -" | a=NAME '=' b=bitwise_or !('='|':=') {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"invalid syntax. Maybe you " -"meant '==' or ':=' instead of '='?\") }\n" -" | !(list|tuple|genexp|'True'|'None'|'False') a=bitwise_or b='=' " -"bitwise_or !('='|':=') {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"cannot assign to %s here. " -"Maybe you meant '==' instead of '='?\",\n" -" _PyPegen_get_expr_name(a)) }\n" -"\n" -"invalid_assignment:\n" -" | a=invalid_ann_assign_target ':' expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(\n" -" a,\n" -" \"only single target (not %s) can be annotated\",\n" -" _PyPegen_get_expr_name(a)\n" -" )}\n" -" | a=star_named_expression ',' star_named_expressions* ':' expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"only single target (not " -"tuple) can be annotated\") }\n" -" | a=expression ':' expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"illegal target for " -"annotation\") }\n" -" | (star_targets '=')* a=star_expressions '=' {\n" -" RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) }\n" -" | (star_targets '=')* a=yield_expr " -"'=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"assignment to yield expression " -"not possible\") }\n" -" | a=star_expressions augassign annotated_rhs {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(\n" -" a,\n" -" \"'%s' is an illegal expression for augmented assignment\",\n" -" _PyPegen_get_expr_name(a)\n" -" )}\n" -"invalid_ann_assign_target[expr_ty]:\n" -" | list\n" -" | tuple\n" -" | '(' a=invalid_ann_assign_target ')' { a }\n" -"invalid_del_stmt:\n" -" | 'del' a=star_expressions {\n" -" RAISE_SYNTAX_ERROR_INVALID_TARGET(DEL_TARGETS, a) }\n" -"invalid_block:\n" -" | NEWLINE !INDENT { RAISE_INDENTATION_ERROR(\"expected an indented " -"block\") }\n" -"invalid_comprehension:\n" -" | ('[' | '(' | '{') a=starred_expression for_if_clauses {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"iterable unpacking cannot be " -"used in comprehension\") }\n" -" | ('[' | '{') a=star_named_expression ',' b=star_named_expressions " -"for_if_clauses {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, PyPegen_last_item(b, expr_ty),\n" -" \"did you forget parentheses around the comprehension target?\") }\n" -" | ('[' | '{') a=star_named_expression b=',' for_if_clauses {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"did you forget parentheses " -"around the comprehension target?\") }\n" -"invalid_dict_comprehension:\n" -" | '{' a='**' bitwise_or for_if_clauses '}' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"dict unpacking cannot be used " -"in dict comprehension\") }\n" -"invalid_parameters:\n" -" | a=\"/\" ',' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"at least one argument must " -"precede /\") }\n" -" | (slash_no_default | slash_with_default) param_maybe_default* a='/' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"/ may appear only once\") }\n" -" | slash_no_default? param_no_default* invalid_parameters_helper " -"a=param_no_default {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"parameter without a default " -"follows parameter with a default\") }\n" -" | param_no_default* a='(' param_no_default+ ','? b=')' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"Function parameters cannot be " -"parenthesized\") }\n" -" | (slash_no_default | slash_with_default)? param_maybe_default* '*' (',' " -"| param_no_default) param_maybe_default* a='/' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"/ must be ahead of *\") }\n" -" | param_maybe_default+ '/' a='*' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"expected comma between / and " -"*\") }\n" -"invalid_default:\n" -" | a='=' &(')'|',') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"expected " -"default value expression\") }\n" -"invalid_star_etc:\n" -" | a='*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, " -"\"named arguments must follow bare *\") }\n" -" | '*' ',' TYPE_COMMENT { RAISE_SYNTAX_ERROR(\"bare * has associated type " -"comment\") }\n" -" | '*' param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"var-" -"positional argument cannot have default value\") }\n" -" | '*' (param_no_default | ',') param_maybe_default* " -"a='*' (param_no_default | ',') {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"* argument may appear only " -"once\") }\n" -"invalid_kwds:\n" -" | '**' param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"var-keyword " -"argument cannot have default value\") }\n" -" | '**' param ',' a=param { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, " -"\"arguments cannot follow var-keyword argument\") }\n" -" | '**' param ',' a[Token*]=('*'|'**'|'/') " -"{ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"arguments cannot follow var-keyword " -"argument\") }\n" -"invalid_parameters_helper: # This is only there to avoid type errors\n" -" | a=slash_with_default { _PyPegen_singleton_seq(p, a) }\n" -" | param_with_default+\n" -"invalid_lambda_parameters:\n" -" | a=\"/\" ',' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"at least one argument must " -"precede /\") }\n" -" | (lambda_slash_no_default | lambda_slash_with_default) " -"lambda_param_maybe_default* a='/' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"/ may appear only once\") }\n" -" | lambda_slash_no_default? lambda_param_no_default* " -"invalid_lambda_parameters_helper a=lambda_param_no_default {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"parameter without a default " -"follows parameter with a default\") }\n" -" | lambda_param_no_default* a='(' ','.lambda_param+ ','? b=')' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"Lambda expression parameters " -"cannot be parenthesized\") }\n" -" | (lambda_slash_no_default | lambda_slash_with_default)? " -"lambda_param_maybe_default* '*' (',' | lambda_param_no_default) " -"lambda_param_maybe_default* a='/' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"/ must be ahead of *\") }\n" -" | lambda_param_maybe_default+ '/' a='*' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"expected comma between / and " -"*\") }\n" -"invalid_lambda_parameters_helper:\n" -" | a=lambda_slash_with_default { _PyPegen_singleton_seq(p, a) }\n" -" | lambda_param_with_default+\n" -"invalid_lambda_star_etc:\n" -" | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR(\"named arguments " -"must follow bare *\") }\n" -" | '*' lambda_param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"var-" -"positional argument cannot have default value\") }\n" -" | '*' (lambda_param_no_default | ',') lambda_param_maybe_default* " -"a='*' (lambda_param_no_default | ',') {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"* argument may appear only " -"once\") }\n" -"invalid_lambda_kwds:\n" -" | '**' lambda_param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"var-" -"keyword argument cannot have default value\") }\n" -" | '**' lambda_param ',' a=lambda_param " -"{ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"arguments cannot follow var-keyword " -"argument\") }\n" -" | '**' lambda_param ',' a[Token*]=('*'|'**'|'/') " -"{ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"arguments cannot follow var-keyword " -"argument\") }\n" -"invalid_double_type_comments:\n" -" | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT {\n" -" RAISE_SYNTAX_ERROR(\"Cannot have two type comments on def\") }\n" -"invalid_with_item:\n" -" | expression 'as' a=expression &(',' | ')' | ':') {\n" -" RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) }\n" -"\n" -"invalid_for_if_clause:\n" -" | 'async'? 'for' (bitwise_or (',' bitwise_or)* [',']) !'in' {\n" -" RAISE_SYNTAX_ERROR(\"'in' expected after for-loop variables\") }\n" -"\n" -"invalid_for_target:\n" -" | 'async'? 'for' a=star_expressions {\n" -" RAISE_SYNTAX_ERROR_INVALID_TARGET(FOR_TARGETS, a) }\n" -"\n" -"invalid_group:\n" -" | '(' a=starred_expression ')' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"cannot use starred expression " -"here\") }\n" -" | '(' a='**' expression ')' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"cannot use double starred " -"expression here\") }\n" -"invalid_import:\n" -" | a='import' ','.dotted_name+ 'from' dotted_name {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(a, \"Did you mean to use 'from ... " -"import ...' instead?\") }\n" -" | 'import' token=NEWLINE {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(token, \"Expected one or more names " -"after 'import'\") }\n" -"invalid_dotted_as_name:\n" -" | dotted_name 'as' !(NAME (',' | ')' | NEWLINE)) a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \n" -" \"cannot use %s as import target\", " -"_PyPegen_get_expr_name(a)) }\n" -"invalid_import_from_as_name:\n" -" | NAME 'as' !(NAME (',' | ')' | NEWLINE)) a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \n" -" \"cannot use %s as import target\", " -"_PyPegen_get_expr_name(a)) }\n" -"\n" -"invalid_import_from_targets:\n" -" | import_from_as_names ',' NEWLINE {\n" -" RAISE_SYNTAX_ERROR(\"trailing comma not allowed without surrounding " -"parentheses\") }\n" -" | token=NEWLINE {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(token, \"Expected one or more names " -"after 'import'\") }\n" -"\n" -"invalid_with_stmt:\n" -" | ['async'] 'with' ','.(expression ['as' star_target])+ NEWLINE " -"{ RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -" | ['async'] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' " -"NEWLINE { RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -"invalid_with_stmt_indent:\n" -" | ['async'] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !" -"INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'with' " -"statement on line %d\", a->lineno) }\n" -" | ['async'] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' " -"':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'with' " -"statement on line %d\", a->lineno) }\n" -"\n" -"invalid_try_stmt:\n" -" | a='try' ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'try' " -"statement on line %d\", a->lineno) }\n" -" | 'try' ':' block !('except' | 'finally') " -"{ RAISE_SYNTAX_ERROR(\"expected 'except' or 'finally' block\") }\n" -" | 'try' ':' block* except_block+ a='except' b='*' expression ['as' NAME] " -"':' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"cannot have both 'except' and " -"'except*' on the same 'try'\") }\n" -" | 'try' ':' block* except_star_block+ a='except' [expression ['as' " -"NAME]] ':' {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"cannot have both 'except' and " -"'except*' on the same 'try'\") }\n" -"invalid_except_stmt:\n" -" | 'except' a=expression ',' expressions 'as' NAME ':' {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(a, \"multiple exception types must " -"be parenthesized when using 'as'\") }\n" -" | a='except' expression ['as' NAME ] NEWLINE " -"{ RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -" | a='except' NEWLINE { RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -" | 'except' expression 'as' a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(\n" -" a, \"cannot use except statement with %s\", " -"_PyPegen_get_expr_name(a)) }\n" -"invalid_except_star_stmt:\n" -" | 'except' '*' a=expression ',' expressions 'as' NAME ':' {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(a, \"multiple exception types must " -"be parenthesized when using 'as'\") }\n" -" | a='except' '*' expression ['as' NAME ] NEWLINE " -"{ RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -" | a='except' '*' (NEWLINE | ':') { RAISE_SYNTAX_ERROR(\"expected one or " -"more exception types\") }\n" -" | 'except' '*' expression 'as' a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(\n" -" a, \"cannot use except* statement with %s\", " -"_PyPegen_get_expr_name(a)) }\n" -"invalid_finally_stmt:\n" -" | a='finally' ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'finally' " -"statement on line %d\", a->lineno) }\n" -"invalid_except_stmt_indent:\n" -" | a='except' expression ['as' NAME ] ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'except' " -"statement on line %d\", a->lineno) }\n" -" | a='except' ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR(\"expected an " -"indented block after 'except' statement on line %d\", a->lineno) }\n" -"invalid_except_star_stmt_indent:\n" -" | a='except' '*' expression ['as' NAME ] ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'except*' " -"statement on line %d\", a->lineno) }\n" -"invalid_match_stmt:\n" -" | \"match\" subject_expr NEWLINE { CHECK_VERSION(void*, 10, \"Pattern " -"matching is\", RAISE_SYNTAX_ERROR(\"expected ':'\") ) }\n" -" | a=\"match\" subject=subject_expr ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'match' " -"statement on line %d\", a->lineno) }\n" -"invalid_case_block:\n" -" | \"case\" patterns guard? NEWLINE { RAISE_SYNTAX_ERROR(\"expected " -"':'\") }\n" -" | a=\"case\" patterns guard? ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'case' " -"statement on line %d\", a->lineno) }\n" -"invalid_as_pattern:\n" -" | or_pattern 'as' a=\"_\" { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, " -"\"cannot use '_' as a target\") }\n" -" | or_pattern 'as' a=expression {\n" -" RAISE_SYNTAX_ERROR_KNOWN_LOCATION(\n" -" a, \"cannot use %s as pattern target\", " -"_PyPegen_get_expr_name(a)) }\n" -"invalid_class_pattern:\n" -" | name_or_attr '(' a=invalid_class_argument_pattern " -"{ RAISE_SYNTAX_ERROR_KNOWN_RANGE(\n" -" PyPegen_first_item(a, pattern_ty),\n" -" PyPegen_last_item(a, pattern_ty),\n" -" \"positional patterns follow keyword patterns\") }\n" -"invalid_class_argument_pattern[asdl_pattern_seq*]:\n" -" | [positional_patterns ','] keyword_patterns ',' a=positional_patterns " -"{ a }\n" -"invalid_if_stmt:\n" -" | 'if' named_expression NEWLINE { RAISE_SYNTAX_ERROR(\"expected " -"':'\") }\n" -" | a='if' a=named_expression ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'if' " -"statement on line %d\", a->lineno) }\n" -"invalid_elif_stmt:\n" -" | 'elif' named_expression NEWLINE { RAISE_SYNTAX_ERROR(\"expected " -"':'\") }\n" -" | a='elif' named_expression ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'elif' " -"statement on line %d\", a->lineno) }\n" -"invalid_else_stmt:\n" -" | a='else' ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'else' " -"statement on line %d\", a->lineno) }\n" -" | 'else' ':' block 'elif' { RAISE_SYNTAX_ERROR(\"'elif' block follows an " -"'else' block\")}\n" -"invalid_while_stmt:\n" -" | 'while' named_expression NEWLINE { RAISE_SYNTAX_ERROR(\"expected " -"':'\") }\n" -" | a='while' named_expression ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'while' " -"statement on line %d\", a->lineno) }\n" -"invalid_for_stmt:\n" -" | ['async'] 'for' star_targets 'in' star_expressions NEWLINE " -"{ RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -" | ['async'] a='for' star_targets 'in' star_expressions ':' NEWLINE !" -"INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after 'for' " -"statement on line %d\", a->lineno) }\n" -"invalid_def_raw:\n" -" | ['async'] a='def' NAME [type_params] '(' [params] ')' ['->' " -"expression] ':' NEWLINE !INDENT {\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after function " -"definition on line %d\", a->lineno) }\n" -" | ['async'] 'def' NAME [type_params] &&'(' [params] ')' ['->' " -"expression] &&':' [func_type_comment] block\n" -"invalid_class_def_raw:\n" -" | 'class' NAME [type_params] ['(' [arguments] ')'] NEWLINE " -"{ RAISE_SYNTAX_ERROR(\"expected ':'\") }\n" -" | a='class' NAME [type_params] ['(' [arguments] ')'] ':' NEWLINE !INDENT " -"{\n" -" RAISE_INDENTATION_ERROR(\"expected an indented block after class " -"definition on line %d\", a->lineno) }\n" -"\n" -"invalid_double_starred_kvpairs:\n" -" | ','.double_starred_kvpair+ ',' invalid_kvpair\n" -" | expression ':' a='*' bitwise_or { RAISE_SYNTAX_ERROR_STARTING_FROM(a, " -"\"cannot use a starred expression in a dictionary value\") }\n" -" | expression a=':' &('}'|',') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, " -"\"expression expected after dictionary key and ':'\") }\n" -"invalid_kvpair:\n" -" | a=expression !(':') {\n" -" RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a-" -">end_col_offset - 1, a->end_lineno, -1, \"':' expected after dictionary " -"key\") }\n" -" | expression ':' a='*' bitwise_or { RAISE_SYNTAX_ERROR_STARTING_FROM(a, " -"\"cannot use a starred expression in a dictionary value\") }\n" -" | expression a=':' &('}'|',') {RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, " -"\"expression expected after dictionary key and ':'\") }\n" -"invalid_starred_expression_unpacking:\n" -" | a='*' expression '=' b=expression { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, " -"b, \"cannot assign to iterable argument unpacking\") }\n" -"invalid_starred_expression:\n" -" | '*' { RAISE_SYNTAX_ERROR(\"Invalid star expression\") }\n" -"\n" -"invalid_fstring_replacement_field:\n" -" | '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"f-string: valid " -"expression required before '='\") }\n" -" | '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"f-string: valid " -"expression required before '!'\") }\n" -" | '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"f-string: valid " -"expression required before ':'\") }\n" -" | '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"f-string: valid " -"expression required before '}'\") }\n" -" | '{' !annotated_rhs { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-string: " -"expecting a valid expression after '{'\") }\n" -" | '{' annotated_rhs !('=' | '!' | ':' | '}') {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-" -"string: expecting '=', or '!', or ':', or '}'\") }\n" -" | '{' annotated_rhs '=' !('!' | ':' | '}') {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-" -"string: expecting '!', or ':', or '}'\") }\n" -" | '{' annotated_rhs '='? invalid_fstring_conversion_character\n" -" | '{' annotated_rhs '='? ['!' NAME] !(':' | '}') {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-" -"string: expecting ':' or '}'\") }\n" -" | '{' annotated_rhs '='? ['!' NAME] ':' fstring_format_spec* !'}' {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-" -"string: expecting '}', or format specs\") }\n" -" | '{' annotated_rhs '='? ['!' NAME] !'}' {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-" -"string: expecting '}'\") }\n" -"\n" -"invalid_fstring_conversion_character:\n" -" | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-string: " -"missing conversion character\") }\n" -" | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"f-string: invalid " -"conversion character\") }\n" -"\n" -"invalid_tstring_replacement_field:\n" -" | '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"t-string: valid " -"expression required before '='\") }\n" -" | '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"t-string: valid " -"expression required before '!'\") }\n" -" | '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"t-string: valid " -"expression required before ':'\") }\n" -" | '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, \"t-string: valid " -"expression required before '}'\") }\n" -" | '{' !annotated_rhs { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-string: " -"expecting a valid expression after '{'\") }\n" -" | '{' annotated_rhs !('=' | '!' | ':' | '}') {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-" -"string: expecting '=', or '!', or ':', or '}'\") }\n" -" | '{' annotated_rhs '=' !('!' | ':' | '}') {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-" -"string: expecting '!', or ':', or '}'\") }\n" -" | '{' annotated_rhs '='? invalid_tstring_conversion_character\n" -" | '{' annotated_rhs '='? ['!' NAME] !(':' | '}') {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-" -"string: expecting ':' or '}'\") }\n" -" | '{' annotated_rhs '='? ['!' NAME] ':' fstring_format_spec* !'}' {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-" -"string: expecting '}', or format specs\") }\n" -" | '{' annotated_rhs '='? ['!' NAME] !'}' {\n" -" PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-" -"string: expecting '}'\") }\n" -"\n" -"invalid_tstring_conversion_character:\n" -" | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-string: " -"missing conversion character\") }\n" -" | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(\"t-string: invalid " -"conversion character\") }\n" -"\n" -"invalid_arithmetic:\n" -" | sum ('+'|'-'|'*'|'/'|'%'|'//'|'@') a='not' b=inversion " -"{ RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, \"'not' after an operator must be " -"parenthesized\") }\n" -"invalid_factor:\n" -" | ('+' | '-' | '~') a='not' b=factor { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, " -"b, \"'not' after an operator must be parenthesized\") }\n" -"\n" -"invalid_type_params:\n" -" | '[' token=']' {\n" -" RAISE_SYNTAX_ERROR_STARTING_FROM(\n" -" token,\n" -" \"Type parameter list cannot be empty\")}\n" diff --git a/using/windows.po b/using/windows.po index 31b3be0fc..a60e748c0 100644 --- a/using/windows.po +++ b/using/windows.po @@ -14,7 +14,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-09 14:19+0000\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2021-06-28 01:51+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -112,17 +112,10 @@ msgstr "" msgid "" "After installation, the ``python``, ``py``, and ``pymanager`` commands " -"should be available. If they are not, click Start and search for \"Manage " -"app execution aliases\". This settings page will let you enable the relevant " -"commands. They will be labelled \"Python (default)\", \"Python (default " -"windowed)\", and \"Python install manager\"." -msgstr "" - -msgid "" -"If you have existing installations of Python, or you have modified your :" -"envvar:`PATH` variable, you may need to remove them or undo the " -"modifications in order for the commands to work. Old versions of Python can " -"be reinstalled using the Python install manager." +"should be available. If you have existing installations of Python, or you " +"have modified your :envvar:`PATH` variable, you may need to remove them or " +"undo the modifications. See :ref:`pymanager-troubleshoot` for more help with " +"fixing non-working commands." msgstr "" msgid "" @@ -130,9 +123,9 @@ msgid "" "directory to your :envvar:`PATH`. This is optional, if you prefer to use the " "``py`` command, but is offered for those who prefer the full range of " "aliases (such as ``python3.14.exe``) to be available. The directory will be :" -"file:`%LocalAppData%\\Python\\bin` by default, but may be customized by an " -"administrator. Click Start and search for \"Edit environment variables for " -"your account\" for the system settings page to add the path." +"file:`%LocalAppData%\\\\Python\\\\bin` by default, but may be customized by " +"an administrator. Click Start and search for \"Edit environment variables " +"for your account\" for the system settings page to add the path." msgstr "" msgid "" @@ -148,8 +141,9 @@ msgstr "" msgid "" "If you are not able to install an MSIX in your context, for example, you are " -"using automated deployment software that does not support it, please see :" -"ref:`pymanager-advancedinstall` below for more information." +"using automated deployment software that does not support it, or are " +"targeting Windows Server 2019, please see :ref:`pymanager-advancedinstall` " +"below for more information." msgstr "" msgid "Basic Use" @@ -207,6 +201,12 @@ msgid "" "..." msgstr "" +msgid "" +"The default runtime can be overridden with the :envvar:" +"`PYTHON_MANAGER_DEFAULT` environment variable, or a configuration file. See :" +"ref:`pymanager-config` for information about configuration settings." +msgstr "" + msgid "" "To launch a specific runtime, the ``py`` command accepts a ``-V:`` " "option. This option must be specified before any others. The tag is part or " @@ -680,6 +680,12 @@ msgid "" "to select a particular runtime." msgstr "" +msgid "" +"If no runtimes are installed, or if automatic installation is enabled, the " +"requested runtime will be installed if necessary. See :ref:`pymanager-" +"config` for information about configuration settings." +msgstr "" + msgid "" "The ``/usr/bin/env`` form of shebang line will also search the :envvar:" "`PATH` environment variable for unrecognized commands. This corresponds to " @@ -713,6 +719,11 @@ msgid "" "configuration." msgstr "" +msgid "" +"Windows Server 2019 is the only version of Windows that CPython supports " +"that does not support MSIX. For Windows Server 2019, you should use the MSI." +msgstr "" + msgid "" "Be aware that the MSI package does not bundle any runtimes, and so is not " "suitable for installs into offline environments without also creating an " @@ -741,11 +752,38 @@ msgid "" msgstr "" msgid "" -"To programmatically install or uninstall the MSIX without using your " -"distribution platform's native support, the `Add-AppxPackage `_ and `Remove-" -"AppxPackage `_ PowerShell cmdlets are simplest to use:" +"To programmatically install the Python install manager, it is easiest to use " +"WinGet, which is included with all supported versions of Windows:" +msgstr "" + +msgid "" +"$> winget install 9NQ7512CXL7T -e --accept-package-agreements --disable-" +"interactivity\n" +"\n" +"# Optionally run the configuration checker and accept all changes\n" +"$> py install --configure -y" +msgstr "" + +msgid "" +"To download the Python install manager and install on another machine, the " +"following WinGet command will download the required files from the Store to " +"your Downloads directory (add ``-d `` to customize the output " +"location). This also generates a YAML file that appears to be unnecessary, " +"as the downloaded MSIX can be installed by launching or using the commands " +"below." +msgstr "" + +msgid "" +"$> winget download 9NQ7512CXL7T -e --skip-license --accept-package-" +"agreements --disable-interactivity" +msgstr "" + +msgid "" +"To programmatically install or uninstall an MSIX using only PowerShell, the " +"`Add-AppxPackage `_ and `Remove-AppxPackage `_ PowerShell cmdlets are " +"recommended:" msgstr "" msgid "" @@ -766,6 +804,13 @@ msgid "" "those installs without being a logged in user." msgstr "" +msgid "" +"Note that the MSIX downloadable from the Store and from the Python website " +"are subtly different and cannot be installed at the same time. Wherever " +"possible, we suggest using the above commands to download the package from " +"the Store to reduce the risk of setting up conflicting installs." +msgstr "" + msgid "Administrative Configuration" msgstr "" diff --git a/whatsnew/3.12.po b/whatsnew/3.12.po index fdc1f9fd0..7b74b92c5 100644 --- a/whatsnew/3.12.po +++ b/whatsnew/3.12.po @@ -14,7 +14,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-09 14:19+0000\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2023-05-24 13:08+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -2000,6 +2000,14 @@ msgid "" "or the functional syntax instead." msgstr "" +msgid "" +"When using the functional syntax of :class:`~typing.TypedDict`\\s, failing " +"to pass a value to the *fields* parameter (``TD = TypedDict(\"TD\")``) or " +"passing ``None`` (``TD = TypedDict(\"TD\", None)``) has been deprecated " +"since Python 3.13. Use ``class TD(TypedDict): pass`` or ``TD = " +"TypedDict(\"TD\", {})`` to create a TypedDict with zero field." +msgstr "" + msgid "" "The :func:`typing.no_type_check_decorator` decorator function has been " "deprecated since Python 3.13. After eight years in the :mod:`typing` module, " diff --git a/whatsnew/3.13.po b/whatsnew/3.13.po index 4d4c92c1d..453b3dc4d 100644 --- a/whatsnew/3.13.po +++ b/whatsnew/3.13.po @@ -14,7 +14,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-09 14:19+0000\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2024-05-11 01:09+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -2747,6 +2747,14 @@ msgid "" "or the functional syntax instead." msgstr "" +msgid "" +"When using the functional syntax of :class:`~typing.TypedDict`\\s, failing " +"to pass a value to the *fields* parameter (``TD = TypedDict(\"TD\")``) or " +"passing ``None`` (``TD = TypedDict(\"TD\", None)``) has been deprecated " +"since Python 3.13. Use ``class TD(TypedDict): pass`` or ``TD = " +"TypedDict(\"TD\", {})`` to create a TypedDict with zero field." +msgstr "" + msgid "" "The :func:`typing.no_type_check_decorator` decorator function has been " "deprecated since Python 3.13. After eight years in the :mod:`typing` module, " diff --git a/whatsnew/3.14.po b/whatsnew/3.14.po index f7ed8423f..9de857fb6 100644 --- a/whatsnew/3.14.po +++ b/whatsnew/3.14.po @@ -14,7 +14,7 @@ msgid "" msgstr "" "Project-Id-Version: Python 3.14\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2025-05-08 02:53-0300\n" +"POT-Creation-Date: 2025-05-16 14:19+0000\n" "PO-Revision-Date: 2025-05-08 06:05+0000\n" "Last-Translator: Stan Ulbrych, 2025\n" "Language-Team: Polish (https://app.transifex.com/python-doc/teams/5390/pl/)\n" @@ -74,7 +74,9 @@ msgid "" "and improvements in user-friendliness and correctness." msgstr "" -msgid ":ref:`PEP 649: deferred evaluation of annotations `" +msgid "" +":ref:`PEP 649 and 749: deferred evaluation of annotations `" msgstr "" msgid ":ref:`PEP 741: Python Configuration C API `" @@ -96,6 +98,10 @@ msgid "" "`" msgstr "" +msgid "" +":ref:`Free-threaded mode improvements `" +msgstr "" + msgid "" ":ref:`PEP 768: Safe external debugger interface for CPython `" @@ -186,18 +192,18 @@ msgstr "" msgid "" "attributes = {\"src\": \"shrubbery.jpg\", \"alt\": \"looks nice\"}\n" -"template = t\"\"\n" +"template = t\"\"\n" "assert html(template) == '\"looks'" +"class=\"looks-nice\">'" msgstr "" msgid "" -"Unlike f-strings, the ``html`` function has access to template attributes " -"containing the original information: static strings, interpolations, and " -"values from the original scope. Unlike existing templating approaches, t-" -"strings build from the well-known f-string syntax and rules. Template " -"systems thus benefit from Python tooling as they are much closer to the " -"Python language, syntax, scoping, and more." +"Compared to using an f-string, the ``html`` function has access to template " +"attributes containing the original information: static strings, " +"interpolations, and values from the original scope. Unlike existing " +"templating approaches, t-strings build from the well-known f-string syntax " +"and rules. Template systems thus benefit from Python tooling as they are " +"much closer to the Python language, syntax, scoping, and more." msgstr "" msgid "Writing template handlers is straightforward:" @@ -437,22 +443,23 @@ msgstr "" msgid ":pep:`758`." msgstr "" -msgid "PEP 649: deferred evaluation of annotations" +msgid "PEP 649 and 749: deferred evaluation of annotations" msgstr "" msgid "" "The :term:`annotations ` on functions, classes, and modules are " "no longer evaluated eagerly. Instead, annotations are stored in special-" "purpose :term:`annotate functions ` and evaluated only " -"when necessary. This is specified in :pep:`649` and :pep:`749`." +"when necessary (except if ``from __future__ import annotations`` is used). " +"This is specified in :pep:`649` and :pep:`749`." msgstr "" msgid "" "This change is designed to make annotations in Python more performant and " "more usable in most circumstances. The runtime cost for defining annotations " "is minimized, but it remains possible to introspect annotations at runtime. " -"It is usually no longer necessary to enclose annotations in strings if they " -"contain forward references." +"It is no longer necessary to enclose annotations in strings if they contain " +"forward references." msgstr "" msgid "" @@ -495,9 +502,10 @@ msgid "" "You will likely be able to remove quoted strings in annotations, which are " "frequently used for forward references. Similarly, if you use ``from " "__future__ import annotations`` to avoid having to write strings in " -"annotations, you may well be able to remove that import. However, if you " -"rely on third-party libraries that read annotations, those libraries may " -"need changes to support unquoted annotations before they work as expected." +"annotations, you may well be able to remove that import once you support " +"only Python 3.14 and newer. However, if you rely on third-party libraries " +"that read annotations, those libraries may need changes to support unquoted " +"annotations before they work as expected." msgstr "" msgid "Implications for readers of ``__annotations__``" @@ -511,6 +519,14 @@ msgid "" "FORWARDREF` format, as the :mod:`dataclasses` module now does." msgstr "" +msgid "" +"The external :pypi:`typing_extensions` package provides partial backports of " +"some of the functionality of the :mod:`annotationlib` module, such as the :" +"class:`~annotationlib.Format` enum and the :func:`~annotationlib." +"get_annotations` function. These can be used to write cross-version code " +"that takes advantage of the new behavior in Python 3.14." +msgstr "" + msgid "Related changes" msgstr "" @@ -524,6 +540,13 @@ msgid "" "use only the documented functionality of the :mod:`annotationlib` module." msgstr "" +msgid "" +"In particular, do not read annotations directly from the namespace " +"dictionary attribute of type objects. Use :func:`annotationlib." +"get_annotate_from_class_namespace` during class construction and :func:" +"`annotationlib.get_annotations` afterwards." +msgstr "" + msgid "``from __future__ import annotations``" msgstr "" @@ -537,6 +560,14 @@ msgid "" "of code using ``from __future__ import annotations`` is unchanged." msgstr "" +msgid "" +"(Contributed by Jelle Zijlstra in :gh:`119180`; :pep:`649` was written by " +"Larry Hastings.)" +msgstr "" + +msgid ":pep:`649` and :pep:`749`." +msgstr "" + msgid "Improved error messages" msgstr "" @@ -699,7 +730,28 @@ msgid "" "SyntaxError: cannot use subscript as import target" msgstr "" -msgid ":pep:`649`." +msgid "" +"Improved error message when trying to add an instance of an unhashable type " +"to a :class:`dict` or :class:`set`. (Contributed by CF Bolz-Tereick and " +"Victor Stinner in :gh:`132828`.)" +msgstr "" + +msgid "" +">>> s = set()\n" +">>> s.add({'pages': 12, 'grade': 'A'})\n" +"Traceback (most recent call last):\n" +" File \"\", line 1, in \n" +" s.add({'pages': 12, 'grade': 'A'})\n" +" ~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n" +"TypeError: cannot use 'dict' as a set element (unhashable type: 'dict')\n" +">>> d = {}\n" +">>> l = [1, 2, 3]\n" +">>> d[l] = 12\n" +"Traceback (most recent call last):\n" +" File \"\", line 1, in \n" +" d[l] = 12\n" +" ~^^^\n" +"TypeError: cannot use 'list' as a dict key (unhashable type: 'list')" msgstr "" msgid "PEP 741: Python Configuration C API" @@ -922,6 +974,32 @@ msgid "" "in CPython by Mark Shannon, Garrett Gu, Haoran Xu, and Josh Haberman.)" msgstr "" +msgid "Free-threaded mode" +msgstr "" + +msgid "" +"Free-threaded mode (:pep:`703`), initially added in 3.13, has been " +"significantly improved. The implementation described in PEP 703 was " +"finished, including C API changes, and temporary workarounds in the " +"interpreter were replaced with more permanent solutions. The specializing " +"adaptive interpreter (:pep:`659`) is now enabled in free-threaded mode, " +"which along with many other optimizations greatly improves its performance. " +"The performance penalty on single-threaded code in free-threaded mode is now " +"roughly 5-10%, depending on platform and C compiler used." +msgstr "" + +msgid "" +"This work was done by many contributors: Sam Gross, Matt Page, Neil " +"Schemenauer, Thomas Wouters, Donghee Na, Kirill Podoprigora, Ken Jin, Itamar " +"Oren, Brett Simmers, Dino Viehland, Nathan Goldbaum, Ralf Gommers, Lysandros " +"Nikolaou, Kumar Aditya, Edgar Margffoy, and many others." +msgstr "" + +msgid "" +"Some of these contributors are employed by Meta, which has continued to " +"provide significant engineering resources to support this project." +msgstr "" + msgid "Syntax highlighting in PyREPL" msgstr "" @@ -2783,6 +2861,14 @@ msgid "" "or the functional syntax instead." msgstr "" +msgid "" +"When using the functional syntax of :class:`~typing.TypedDict`\\s, failing " +"to pass a value to the *fields* parameter (``TD = TypedDict(\"TD\")``) or " +"passing ``None`` (``TD = TypedDict(\"TD\", None)``) has been deprecated " +"since Python 3.13. Use ``class TD(TypedDict): pass`` or ``TD = " +"TypedDict(\"TD\", {})`` to create a TypedDict with zero field." +msgstr "" + msgid "" "The :func:`typing.no_type_check_decorator` decorator function has been " "deprecated since Python 3.13. After eight years in the :mod:`typing` module, "