Merge branch 'main' into main

This commit is contained in:
Todd Sankey 2023-01-02 07:32:52 -08:00 committed by GitHub
commit 20390f635f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
63 changed files with 770 additions and 1012 deletions

View File

@ -11,11 +11,6 @@ on:
schedule:
- cron: 0 0 * * MON # Run every Monday at 00:00 UTC
env:
# The "FORCE_COLOR" variable, when set to 1,
# tells Nox to colorize itself.
FORCE_COLOR: "1"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true

View File

@ -0,0 +1,28 @@
name: Update documentation redirects
on:
push:
branches: [main]
schedule:
- cron: 0 0 * * MON # Run every Monday at 00:00 UTC
env:
FORCE_COLOR: "1"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
jobs:
update-rtd-redirects:
runs-on: ubuntu-latest
environment: RTD Deploys
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.11"
- run: pip install httpx pyyaml rich
- run: python tools/update-rtd-redirects.py
env:
RTD_API_TOKEN: ${{ secrets.RTD_API_TOKEN }}

View File

@ -52,6 +52,7 @@ repos:
'types-setuptools==57.4.14',
'types-freezegun==1.1.9',
'types-six==1.16.15',
'types-pyyaml==6.0.12.2',
]
- repo: https://github.com/pre-commit/pygrep-hooks

View File

@ -0,0 +1,15 @@
# This file is read by tools/update-rtd-redirects.py.
# It is related to Read the Docs, but is not a file processed by the platform.
/dev/news-entry-failure: >-
https://pip.pypa.io/en/stable/development/contributing/#news-entries
/errors/resolution-impossible: >-
https://pip.pypa.io/en/latest/topics/dependency-resolution/#dealing-with-dependency-conflicts
/surveys/backtracking: >-
https://forms.gle/LkZP95S4CfqBAU1N6
/warnings/backtracking: >-
https://pip.pypa.io/en/stable/topics/dependency-resolution/#possible-ways-to-reduce-backtracking
/warnings/enable-long-paths: >-
https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd#enable-long-paths-in-windows-10-version-1607-and-later
/warnings/venv: >-
https://docs.python.org/3/tutorial/venv.html

View File

@ -18,6 +18,7 @@ exclude .mailmap
exclude .appveyor.yml
exclude .readthedocs.yml
exclude .pre-commit-config.yaml
exclude .readthedocs-custom-redirects.yml
exclude tox.ini
exclude noxfile.py

View File

@ -130,18 +130,19 @@ VCS source will not overwrite it without an `--upgrade` flag. Further, pip
looks at the package version, at the target revision to determine what action to
take on the VCS requirement (not the commit itself).
The {ref}`pip freeze` subcommand will record the VCS requirement specifier
(referencing a specific commit) only if the install is done with the editable
option.
## URL fragments
pip looks at 2 fragments for VCS URLs:
pip looks at the `subdirectory` fragments of VCS URLs for specifying the path to the
Python package, when it is not in the root of the VCS directory. eg: `pkg_dir`.
- `egg`: For specifying the "project name" for use in pip's dependency
resolution logic. eg: `egg=project_name`
- `subdirectory`: For specifying the path to the Python package, when it is not
in the root of the VCS directory. eg: `pkg_dir`
pip also looks at the `egg` fragment specifying the "project name". In practice the
`egg` fragment is only required to help pip determine the VCS clone location in editable
mode. In all other circumstances, the `egg` fragment is not necessary and its use is
discouraged.
The `egg` fragment **should** be a bare
[PEP 508](https://peps.python.org/pep-0508/) project name. Anything else
is not guaranteed to work.
````{admonition} Example
If your repository layout is:
@ -157,6 +158,12 @@ some_other_file
Then, to install from this repository, the syntax would be:
```{pip-cli}
$ pip install "pkg @ vcs+protocol://repo_url/#subdirectory=pkg_dir"
```
or:
```{pip-cli}
$ pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir"
```

3
news/11617.bugfix.rst Normal file
View File

@ -0,0 +1,3 @@
Deprecated a historical ambiguity in how ``egg`` fragments in URL-style
requirements are formatted and handled. ``egg`` fragments that do not look
like PEP 508 names now produce a deprecation warning.

2
news/11675.doc.rst Normal file
View File

@ -0,0 +1,2 @@
Remove mention that editable installs are necessary for pip freeze to report the VCS
URL.

2
news/11676.doc.rst Normal file
View File

@ -0,0 +1,2 @@
Clarify that the egg URL fragment is only necessary for editable VCS installs, and
otherwise not necessary anymore.

1
news/pep517.vendor.rst Normal file
View File

@ -0,0 +1 @@
Remove pep517 from vendored packages

View File

@ -0,0 +1 @@
Add pyproject-hooks 1.0.0

1
news/rich.vendor.rst Normal file
View File

@ -0,0 +1 @@
Upgrade rich to 12.6.0

View File

@ -48,7 +48,7 @@ def create_vendor_txt_map() -> Dict[str, str]:
def get_module_from_module_name(module_name: str) -> ModuleType:
# Module name can be uppercase in vendor.txt for some reason...
module_name = module_name.lower()
module_name = module_name.lower().replace("-", "_")
# PATCH: setuptools is actually only pkg_resources.
if module_name == "setuptools":
module_name = "pkg_resources"

View File

@ -18,6 +18,7 @@ from typing import (
Union,
)
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.filetypes import WHEEL_EXTENSION
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.misc import (
@ -166,6 +167,7 @@ class Link(KeyBasedCompareMixin):
"dist_info_metadata",
"link_hash",
"cache_link_parsing",
"egg_fragment",
]
def __init__(
@ -229,6 +231,7 @@ class Link(KeyBasedCompareMixin):
super().__init__(key=url, defining_class=Link)
self.cache_link_parsing = cache_link_parsing
self.egg_fragment = self._egg_fragment()
@classmethod
def from_json(
@ -358,12 +361,28 @@ class Link(KeyBasedCompareMixin):
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
@property
def egg_fragment(self) -> Optional[str]:
# Per PEP 508.
_project_name_re = re.compile(
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
)
def _egg_fragment(self) -> Optional[str]:
match = self._egg_fragment_re.search(self._url)
if not match:
return None
return match.group(1)
# An egg fragment looks like a PEP 508 project name, along with
# an optional extras specifier. Anything else is invalid.
project_name = match.group(1)
if not self._project_name_re.match(project_name):
deprecated(
reason=f"{self} contains an egg fragment with a non-PEP 508 name",
replacement="to use the req @ url syntax, and remove the egg fragment",
gone_in="25.0",
issue=11617,
)
return project_name
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")

View File

@ -3,7 +3,7 @@
import os
from pip._vendor.pep517.wrappers import Pep517HookCaller
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment
from pip._internal.exceptions import (
@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory
def generate_metadata(
build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
) -> str:
"""Generate metadata using mechanisms described in PEP 517.
@ -26,7 +26,7 @@ def generate_metadata(
metadata_dir = metadata_tmpdir.path
with build_env:
# Note that Pep517HookCaller implements a fallback for
# Note that BuildBackendHookCaller implements a fallback for
# prepare_metadata_for_build_wheel, so we don't have to
# consider the possibility that this hook doesn't exist.
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")

View File

@ -3,7 +3,7 @@
import os
from pip._vendor.pep517.wrappers import Pep517HookCaller
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment
from pip._internal.exceptions import (
@ -15,7 +15,7 @@ from pip._internal.utils.temp_dir import TempDirectory
def generate_editable_metadata(
build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
) -> str:
"""Generate metadata using mechanisms described in PEP 660.
@ -26,7 +26,7 @@ def generate_editable_metadata(
metadata_dir = metadata_tmpdir.path
with build_env:
# Note that Pep517HookCaller implements a fallback for
# Note that BuildBackendHookCaller implements a fallback for
# prepare_metadata_for_build_wheel/editable, so we don't have to
# consider the possibility that this hook doesn't exist.
runner = runner_with_spinner_message(

View File

@ -2,7 +2,7 @@ import logging
import os
from typing import Optional
from pip._vendor.pep517.wrappers import Pep517HookCaller
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.utils.subprocess import runner_with_spinner_message
@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def build_wheel_pep517(
name: str,
backend: Pep517HookCaller,
backend: BuildBackendHookCaller,
metadata_directory: str,
tempd: str,
) -> Optional[str]:

View File

@ -2,7 +2,7 @@ import logging
import os
from typing import Optional
from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller
from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
from pip._internal.utils.subprocess import runner_with_spinner_message
@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def build_wheel_editable(
name: str,
backend: Pep517HookCaller,
backend: BuildBackendHookCaller,
metadata_directory: str,
tempd: str,
) -> Optional[str]:

View File

@ -18,7 +18,7 @@ from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pep517.wrappers import Pep517HookCaller
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
@ -51,7 +51,7 @@ from pip._internal.utils.direct_url_helpers import (
)
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.misc import (
ConfiguredPep517HookCaller,
ConfiguredBuildBackendHookCaller,
ask_path_exists,
backup_dir,
display_path,
@ -173,7 +173,7 @@ class InstallRequirement:
self.requirements_to_check: List[str] = []
# The PEP 517 backend we should use to build the project
self.pep517_backend: Optional[Pep517HookCaller] = None
self.pep517_backend: Optional[BuildBackendHookCaller] = None
# Are we using PEP 517 for this requirement?
# After pyproject.toml has been loaded, the only valid values are True
@ -482,7 +482,7 @@ class InstallRequirement:
requires, backend, check, backend_path = pyproject_toml_data
self.requirements_to_check = check
self.pyproject_requires = requires
self.pep517_backend = ConfiguredPep517HookCaller(
self.pep517_backend = ConfiguredBuildBackendHookCaller(
self,
self.unpacked_source_directory,
backend,

View File

@ -34,7 +34,7 @@ from typing import (
cast,
)
from pip._vendor.pep517 import Pep517HookCaller
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
from pip import __version__
@ -57,7 +57,7 @@ __all__ = [
"captured_stdout",
"ensure_dir",
"remove_auth_from_url",
"ConfiguredPep517HookCaller",
"ConfiguredBuildBackendHookCaller",
]
@ -635,7 +635,7 @@ def partition(
return filterfalse(pred, t1), filter(pred, t2)
class ConfiguredPep517HookCaller(Pep517HookCaller):
class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
def __init__(
self,
config_holder: Any,

View File

@ -239,8 +239,8 @@ def call_subprocess(
def runner_with_spinner_message(message: str) -> Callable[..., None]:
"""Provide a subprocess_runner that shows a spinner message.
Intended for use with for pep517's Pep517HookCaller. Thus, the runner has
an API that matches what's expected by Pep517HookCaller.subprocess_runner.
Intended for use with for BuildBackendHookCaller. Thus, the runner has
an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
"""
def runner(

View File

@ -1 +0,0 @@
from pep517 import *

View File

@ -1,6 +0,0 @@
"""Wrappers to build Python packages using PEP 517 hooks
"""
__version__ = '0.13.0'
from .wrappers import * # noqa: F401, F403

View File

@ -1,126 +0,0 @@
"""Build a project using PEP 517 hooks.
"""
import argparse
import logging
import os
import shutil
import tempfile
from ._compat import tomllib
from .envbuild import BuildEnvironment
from .wrappers import Pep517HookCaller
log = logging.getLogger(__name__)
def validate_system(system):
"""
Ensure build system has the requisite fields.
"""
required = {'requires', 'build-backend'}
if not (required <= set(system)):
message = "Missing required fields: {missing}".format(
missing=required-set(system),
)
raise ValueError(message)
def load_system(source_dir):
"""
Load the build system from a source dir (pyproject.toml).
"""
pyproject = os.path.join(source_dir, 'pyproject.toml')
with open(pyproject, 'rb') as f:
pyproject_data = tomllib.load(f)
return pyproject_data['build-system']
def compat_system(source_dir):
"""
Given a source dir, attempt to get a build system backend
and requirements from pyproject.toml. Fallback to
setuptools but only if the file was not found or a build
system was not indicated.
"""
try:
system = load_system(source_dir)
except (FileNotFoundError, KeyError):
system = {}
system.setdefault(
'build-backend',
'setuptools.build_meta:__legacy__',
)
system.setdefault('requires', ['setuptools', 'wheel'])
return system
def _do_build(hooks, env, dist, dest):
get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
get_requires = getattr(hooks, get_requires_name)
reqs = get_requires({})
log.info('Got build requires: %s', reqs)
env.pip_install(reqs)
log.info('Installed dynamic build dependencies')
with tempfile.TemporaryDirectory() as td:
log.info('Trying to build %s in %s', dist, td)
build_name = 'build_{dist}'.format(**locals())
build = getattr(hooks, build_name)
filename = build(td, {})
source = os.path.join(td, filename)
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
def build(source_dir, dist, dest=None, system=None):
system = system or load_system(source_dir)
dest = os.path.join(source_dir, dest or 'dist')
os.makedirs(dest, exist_ok=True)
validate_system(system)
hooks = Pep517HookCaller(
source_dir, system['build-backend'], system.get('backend-path')
)
with BuildEnvironment() as env:
env.pip_install(system['requires'])
_do_build(hooks, env, dist, dest)
parser = argparse.ArgumentParser()
parser.add_argument(
'source_dir',
help="A directory containing pyproject.toml",
)
parser.add_argument(
'--binary', '-b',
action='store_true',
default=False,
)
parser.add_argument(
'--source', '-s',
action='store_true',
default=False,
)
parser.add_argument(
'--out-dir', '-o',
help="Destination in which to save the builds relative to source dir",
)
def main(args):
log.warning('pep517.build is deprecated. '
'Consider switching to https://pypi.org/project/build/')
# determine which dists to build
dists = list(filter(None, (
'sdist' if args.source or not args.binary else None,
'wheel' if args.binary or not args.source else None,
)))
for dist in dists:
build(args.source_dir, dist, args.out_dir)
if __name__ == '__main__':
main(parser.parse_args())

View File

@ -1,207 +0,0 @@
"""Check a project and backend by attempting to build using PEP 517 hooks.
"""
import argparse
import logging
import os
import shutil
import sys
import tarfile
import zipfile
from os.path import isfile
from os.path import join as pjoin
from subprocess import CalledProcessError
from tempfile import mkdtemp
from ._compat import tomllib
from .colorlog import enable_colourful_output
from .envbuild import BuildEnvironment
from .wrappers import Pep517HookCaller
log = logging.getLogger(__name__)
def check_build_sdist(hooks, build_sys_requires):
with BuildEnvironment() as env:
try:
env.pip_install(build_sys_requires)
log.info('Installed static build dependencies')
except CalledProcessError:
log.error('Failed to install static build dependencies')
return False
try:
reqs = hooks.get_requires_for_build_sdist({})
log.info('Got build requires: %s', reqs)
except Exception:
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
return False
try:
env.pip_install(reqs)
log.info('Installed dynamic build dependencies')
except CalledProcessError:
log.error('Failed to install dynamic build dependencies')
return False
td = mkdtemp()
log.info('Trying to build sdist in %s', td)
try:
try:
filename = hooks.build_sdist(td, {})
log.info('build_sdist returned %r', filename)
except Exception:
log.info('Failure in build_sdist', exc_info=True)
return False
if not filename.endswith('.tar.gz'):
log.error(
"Filename %s doesn't have .tar.gz extension", filename)
return False
path = pjoin(td, filename)
if isfile(path):
log.info("Output file %s exists", path)
else:
log.error("Output file %s does not exist", path)
return False
if tarfile.is_tarfile(path):
log.info("Output file is a tar file")
else:
log.error("Output file is not a tar file")
return False
finally:
shutil.rmtree(td)
return True
def check_build_wheel(hooks, build_sys_requires):
with BuildEnvironment() as env:
try:
env.pip_install(build_sys_requires)
log.info('Installed static build dependencies')
except CalledProcessError:
log.error('Failed to install static build dependencies')
return False
try:
reqs = hooks.get_requires_for_build_wheel({})
log.info('Got build requires: %s', reqs)
except Exception:
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
return False
try:
env.pip_install(reqs)
log.info('Installed dynamic build dependencies')
except CalledProcessError:
log.error('Failed to install dynamic build dependencies')
return False
td = mkdtemp()
log.info('Trying to build wheel in %s', td)
try:
try:
filename = hooks.build_wheel(td, {})
log.info('build_wheel returned %r', filename)
except Exception:
log.info('Failure in build_wheel', exc_info=True)
return False
if not filename.endswith('.whl'):
log.error("Filename %s doesn't have .whl extension", filename)
return False
path = pjoin(td, filename)
if isfile(path):
log.info("Output file %s exists", path)
else:
log.error("Output file %s does not exist", path)
return False
if zipfile.is_zipfile(path):
log.info("Output file is a zip file")
else:
log.error("Output file is not a zip file")
return False
finally:
shutil.rmtree(td)
return True
def check(source_dir):
pyproject = pjoin(source_dir, 'pyproject.toml')
if isfile(pyproject):
log.info('Found pyproject.toml')
else:
log.error('Missing pyproject.toml')
return False
try:
with open(pyproject, 'rb') as f:
pyproject_data = tomllib.load(f)
# Ensure the mandatory data can be loaded
buildsys = pyproject_data['build-system']
requires = buildsys['requires']
backend = buildsys['build-backend']
backend_path = buildsys.get('backend-path')
log.info('Loaded pyproject.toml')
except (tomllib.TOMLDecodeError, KeyError):
log.error("Invalid pyproject.toml", exc_info=True)
return False
hooks = Pep517HookCaller(source_dir, backend, backend_path)
sdist_ok = check_build_sdist(hooks, requires)
wheel_ok = check_build_wheel(hooks, requires)
if not sdist_ok:
log.warning('Sdist checks failed; scroll up to see')
if not wheel_ok:
log.warning('Wheel checks failed')
return sdist_ok
def main(argv=None):
log.warning('pep517.check is deprecated. '
'Consider switching to https://pypi.org/project/build/')
ap = argparse.ArgumentParser()
ap.add_argument(
'source_dir',
help="A directory containing pyproject.toml")
args = ap.parse_args(argv)
enable_colourful_output()
ok = check(args.source_dir)
if ok:
print(ansi('Checks passed', 'green'))
else:
print(ansi('Checks failed', 'red'))
sys.exit(1)
ansi_codes = {
'reset': '\x1b[0m',
'bold': '\x1b[1m',
'red': '\x1b[31m',
'green': '\x1b[32m',
}
def ansi(s, attr):
if os.name != 'nt' and sys.stdout.isatty():
return ansi_codes[attr] + str(s) + ansi_codes['reset']
else:
return str(s)
if __name__ == '__main__':
main()

View File

@ -1,113 +0,0 @@
"""Nicer log formatting with colours.
Code copied from Tornado, Apache licensed.
"""
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
try:
import curses
except ImportError:
curses = None
def _stderr_supports_color():
color = False
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
try:
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except Exception:
pass
return color
class LogFormatter(logging.Formatter):
"""Log formatter with colour support
"""
DEFAULT_COLORS = {
logging.INFO: 2, # Green
logging.WARNING: 3, # Yellow
logging.ERROR: 1, # Red
logging.CRITICAL: 1,
}
def __init__(self, color=True, datefmt=None):
r"""
:arg bool color: Enables color support.
:arg string fmt: Log message format.
It will be applied to the attributes dict of log records. The
text between ``%(color)s`` and ``%(end_color)s`` will be colored
depending on the level if color support is on.
:arg dict colors: color mappings from logging level to terminal color
code
:arg string datefmt: Datetime format.
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
.. versionchanged:: 3.2
Added ``fmt`` and ``datefmt`` arguments.
"""
logging.Formatter.__init__(self, datefmt=datefmt)
self._colors = {}
if color and _stderr_supports_color():
# The curses module has some str/bytes confusion in
# python3. Until version 3.2.3, most methods return
# bytes, but only accept strings. In addition, we want to
# output these strings with the logging module, which
# works with unicode strings. The explicit calls to
# unicode() below are harmless in python2 but will do the
# right conversion in python 3.
fg_color = (curses.tigetstr("setaf") or
curses.tigetstr("setf") or "")
for levelno, code in self.DEFAULT_COLORS.items():
self._colors[levelno] = str(
curses.tparm(fg_color, code), "ascii")
self._normal = str(curses.tigetstr("sgr0"), "ascii")
scr = curses.initscr()
self.termwidth = scr.getmaxyx()[1]
curses.endwin()
else:
self._normal = ''
# Default width is usually 80, but too wide is
# worse than too narrow
self.termwidth = 70
def formatMessage(self, record):
mlen = len(record.message)
right_text = '{initial}-{name}'.format(initial=record.levelname[0],
name=record.name)
if mlen + len(right_text) < self.termwidth:
space = ' ' * (self.termwidth - (mlen + len(right_text)))
else:
space = ' '
if record.levelno in self._colors:
start_color = self._colors[record.levelno]
end_color = self._normal
else:
start_color = end_color = ''
return record.message + space + start_color + right_text + end_color
def enable_colourful_output(level=logging.INFO):
handler = logging.StreamHandler()
handler.setFormatter(LogFormatter())
logging.root.addHandler(handler)
logging.root.setLevel(level)

View File

@ -1,19 +0,0 @@
import io
import os
import zipfile
def dir_to_zipfile(root):
"""Construct an in-memory zip file for a directory."""
buffer = io.BytesIO()
zip_file = zipfile.ZipFile(buffer, 'w')
for root, dirs, files in os.walk(root):
for path in dirs:
fs_path = os.path.join(root, path)
rel_path = os.path.relpath(fs_path, root)
zip_file.writestr(rel_path + '/', '')
for path in files:
fs_path = os.path.join(root, path)
rel_path = os.path.relpath(fs_path, root)
zip_file.write(fs_path, rel_path)
return zip_file

View File

@ -1,170 +0,0 @@
"""Build wheels/sdists by installing build deps to a temporary environment.
"""
import logging
import os
import shutil
import sys
from subprocess import check_call
from sysconfig import get_paths
from tempfile import mkdtemp
from ._compat import tomllib
from .wrappers import LoggerWrapper, Pep517HookCaller
log = logging.getLogger(__name__)
def _load_pyproject(source_dir):
with open(
os.path.join(source_dir, 'pyproject.toml'),
'rb',
) as f:
pyproject_data = tomllib.load(f)
buildsys = pyproject_data['build-system']
return (
buildsys['requires'],
buildsys['build-backend'],
buildsys.get('backend-path'),
)
class BuildEnvironment:
"""Context manager to install build deps in a simple temporary environment
Based on code I wrote for pip, which is MIT licensed.
"""
# Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
path = None
def __init__(self, cleanup=True):
self._cleanup = cleanup
def __enter__(self):
self.path = mkdtemp(prefix='pep517-build-env-')
log.info('Temporary build environment: %s', self.path)
self.save_path = os.environ.get('PATH', None)
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
install_dirs = get_paths(install_scheme, vars={
'base': self.path,
'platbase': self.path,
})
scripts = install_dirs['scripts']
if self.save_path:
os.environ['PATH'] = scripts + os.pathsep + self.save_path
else:
os.environ['PATH'] = scripts + os.pathsep + os.defpath
if install_dirs['purelib'] == install_dirs['platlib']:
lib_dirs = install_dirs['purelib']
else:
lib_dirs = install_dirs['purelib'] + os.pathsep + \
install_dirs['platlib']
if self.save_pythonpath:
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
self.save_pythonpath
else:
os.environ['PYTHONPATH'] = lib_dirs
return self
def pip_install(self, reqs):
"""Install dependencies into this env by calling pip in a subprocess"""
if not reqs:
return
log.info('Calling pip to install %s', reqs)
cmd = [
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
'--prefix', self.path] + list(reqs)
check_call(
cmd,
stdout=LoggerWrapper(log, logging.INFO),
stderr=LoggerWrapper(log, logging.ERROR),
)
def __exit__(self, exc_type, exc_val, exc_tb):
needs_cleanup = (
self._cleanup and
self.path is not None and
os.path.isdir(self.path)
)
if needs_cleanup:
shutil.rmtree(self.path)
if self.save_path is None:
os.environ.pop('PATH', None)
else:
os.environ['PATH'] = self.save_path
if self.save_pythonpath is None:
os.environ.pop('PYTHONPATH', None)
else:
os.environ['PYTHONPATH'] = self.save_pythonpath
def build_wheel(source_dir, wheel_dir, config_settings=None):
"""Build a wheel from a source directory using PEP 517 hooks.
:param str source_dir: Source directory containing pyproject.toml
:param str wheel_dir: Target directory to create wheel in
:param dict config_settings: Options to pass to build backend
This is a blocking function which will run pip in a subprocess to install
build requirements.
"""
if config_settings is None:
config_settings = {}
requires, backend, backend_path = _load_pyproject(source_dir)
hooks = Pep517HookCaller(source_dir, backend, backend_path)
with BuildEnvironment() as env:
env.pip_install(requires)
reqs = hooks.get_requires_for_build_wheel(config_settings)
env.pip_install(reqs)
return hooks.build_wheel(wheel_dir, config_settings)
def build_sdist(source_dir, sdist_dir, config_settings=None):
"""Build an sdist from a source directory using PEP 517 hooks.
:param str source_dir: Source directory containing pyproject.toml
:param str sdist_dir: Target directory to place sdist in
:param dict config_settings: Options to pass to build backend
This is a blocking function which will run pip in a subprocess to install
build requirements.
"""
if config_settings is None:
config_settings = {}
requires, backend, backend_path = _load_pyproject(source_dir)
hooks = Pep517HookCaller(source_dir, backend, backend_path)
with BuildEnvironment() as env:
env.pip_install(requires)
reqs = hooks.get_requires_for_build_sdist(config_settings)
env.pip_install(reqs)
return hooks.build_sdist(sdist_dir, config_settings)

View File

@ -1,26 +0,0 @@
"""This is a subpackage because the directory is on sys.path for _in_process.py
The subpackage should stay as empty as possible to avoid shadowing modules that
the backend might import.
"""
from contextlib import contextmanager
from os.path import abspath, dirname
from os.path import join as pjoin
try:
import importlib.resources as resources
try:
resources.files
except AttributeError:
# Python 3.8 compatibility
def _in_proc_script_path():
return resources.path(__package__, '_in_process.py')
else:
def _in_proc_script_path():
return resources.as_file(
resources.files(__package__).joinpath('_in_process.py'))
except ImportError:
# Python 3.6 compatibility
@contextmanager
def _in_proc_script_path():
yield pjoin(dirname(abspath(__file__)), '_in_process.py')

View File

@ -1,93 +0,0 @@
"""Build metadata for a project using PEP 517 hooks.
"""
import argparse
import functools
import logging
import os
import shutil
import tempfile
try:
import importlib.metadata as imp_meta
except ImportError:
import importlib_metadata as imp_meta
try:
from zipfile import Path
except ImportError:
from zipp import Path
from .build import compat_system, load_system, validate_system
from .dirtools import dir_to_zipfile
from .envbuild import BuildEnvironment
from .wrappers import Pep517HookCaller, quiet_subprocess_runner
log = logging.getLogger(__name__)
def _prep_meta(hooks, env, dest):
reqs = hooks.get_requires_for_build_wheel({})
log.info('Got build requires: %s', reqs)
env.pip_install(reqs)
log.info('Installed dynamic build dependencies')
with tempfile.TemporaryDirectory() as td:
log.info('Trying to build metadata in %s', td)
filename = hooks.prepare_metadata_for_build_wheel(td, {})
source = os.path.join(td, filename)
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
def build(source_dir='.', dest=None, system=None):
system = system or load_system(source_dir)
dest = os.path.join(source_dir, dest or 'dist')
os.makedirs(dest, exist_ok=True)
validate_system(system)
hooks = Pep517HookCaller(
source_dir, system['build-backend'], system.get('backend-path')
)
with hooks.subprocess_runner(quiet_subprocess_runner):
with BuildEnvironment() as env:
env.pip_install(system['requires'])
_prep_meta(hooks, env, dest)
def build_as_zip(builder=build):
with tempfile.TemporaryDirectory() as out_dir:
builder(dest=out_dir)
return dir_to_zipfile(out_dir)
def load(root):
"""
Given a source directory (root) of a package,
return an importlib.metadata.Distribution object
with metadata build from that package.
"""
root = os.path.expanduser(root)
system = compat_system(root)
builder = functools.partial(build, source_dir=root, system=system)
path = Path(build_as_zip(builder))
return imp_meta.PathDistribution(path)
parser = argparse.ArgumentParser()
parser.add_argument(
'source_dir',
help="A directory containing pyproject.toml",
)
parser.add_argument(
'--out-dir', '-o',
help="Destination in which to save the builds relative to source dir",
)
def main():
args = parser.parse_args()
build(args.source_dir, args.out_dir)
if __name__ == '__main__':
main()

View File

@ -0,0 +1 @@
from pyproject_hooks import *

View File

@ -0,0 +1,23 @@
"""Wrappers to call pyproject.toml-based build backend hooks.
"""
from ._impl import (
BackendInvalid,
BackendUnavailable,
BuildBackendHookCaller,
HookMissing,
UnsupportedOperation,
default_subprocess_runner,
quiet_subprocess_runner,
)
__version__ = '1.0.0'
__all__ = [
'BackendUnavailable',
'BackendInvalid',
'HookMissing',
'UnsupportedOperation',
'default_subprocess_runner',
'quiet_subprocess_runner',
'BuildBackendHookCaller',
]

View File

@ -2,23 +2,12 @@ import json
import os
import sys
import tempfile
import threading
from contextlib import contextmanager
from os.path import abspath
from os.path import join as pjoin
from subprocess import STDOUT, check_call, check_output
from .in_process import _in_proc_script_path
__all__ = [
'BackendUnavailable',
'BackendInvalid',
'HookMissing',
'UnsupportedOperation',
'default_subprocess_runner',
'quiet_subprocess_runner',
'Pep517HookCaller',
]
from ._in_process import _in_proc_script_path
def write_json(obj, path, **kwargs):
@ -40,13 +29,13 @@ class BackendUnavailable(Exception):
class BackendInvalid(Exception):
"""Will be raised if the backend is invalid."""
def __init__(self, backend_name, backend_path, message):
super().__init__(message)
self.backend_name = backend_name
self.backend_path = backend_path
self.message = message
class HookMissing(Exception):
"""Will be raised on missing hooks."""
"""Will be raised on missing hooks (if a fallback can't be used)."""
def __init__(self, hook_name):
super().__init__(hook_name)
self.hook_name = hook_name
@ -59,7 +48,10 @@ class UnsupportedOperation(Exception):
def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
"""The default method of calling the wrapper subprocess."""
"""The default method of calling the wrapper subprocess.
This uses :func:`subprocess.check_call` under the hood.
"""
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
@ -68,7 +60,10 @@ def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None):
"""A method of calling the wrapper subprocess while suppressing output."""
"""Call the subprocess while suppressing output.
This uses :func:`subprocess.check_output` under the hood.
"""
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
@ -100,26 +95,10 @@ def norm_and_check(source_tree, requested):
return abs_requested
class Pep517HookCaller:
"""A wrapper around a source directory to be built with a PEP 517 backend.
:param source_dir: The path to the source directory, containing
pyproject.toml.
:param build_backend: The build backend spec, as per PEP 517, from
pyproject.toml.
:param backend_path: The backend path, as per PEP 517, from pyproject.toml.
:param runner: A callable that invokes the wrapper subprocess.
:param python_executable: The Python executable used to invoke the backend
The 'runner', if provided, must expect the following:
- cmd: a list of strings representing the command and arguments to
execute, as would be passed to e.g. 'subprocess.check_call'.
- cwd: a string representing the working directory that must be
used for the subprocess. Corresponds to the provided source_dir.
- extra_environ: a dict mapping environment variable names to values
which must be set for the subprocess execution.
class BuildBackendHookCaller:
"""A wrapper to call the build backend hooks for a source directory.
"""
def __init__(
self,
source_dir,
@ -128,6 +107,14 @@ class Pep517HookCaller:
runner=None,
python_executable=None,
):
"""
:param source_dir: The source directory to invoke the build backend for
:param build_backend: The build backend spec
:param backend_path: Additional path entries for the build backend spec
:param runner: The :ref:`subprocess runner <Subprocess Runners>` to use
:param python_executable:
The Python executable used to invoke the build backend
"""
if runner is None:
runner = default_subprocess_runner
@ -145,8 +132,14 @@ class Pep517HookCaller:
@contextmanager
def subprocess_runner(self, runner):
"""A context manager for temporarily overriding the default subprocess
runner.
"""A context manager for temporarily overriding the default
:ref:`subprocess runner <Subprocess Runners>`.
.. code-block:: python
hook_caller = BuildBackendHookCaller(...)
with hook_caller.subprocess_runner(quiet_subprocess_runner):
...
"""
prev = self._subprocess_runner
self._subprocess_runner = runner
@ -160,15 +153,15 @@ class Pep517HookCaller:
return self._call_hook('_supported_features', {})
def get_requires_for_build_wheel(self, config_settings=None):
"""Identify packages required for building a wheel
"""Get additional dependencies required for building a wheel.
Returns a list of dependency specifications, e.g.::
:returns: A list of :pep:`dependency specifiers <508>`.
:rtype: list[str]
["wheel >= 0.25", "setuptools"]
.. admonition:: Fallback
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
If the build backend does not defined a hook with this name, an
empty list will be returned.
"""
return self._call_hook('get_requires_for_build_wheel', {
'config_settings': config_settings
@ -179,12 +172,16 @@ class Pep517HookCaller:
_allow_fallback=True):
"""Prepare a ``*.dist-info`` folder with metadata for this project.
Returns the name of the newly created folder.
:returns: Name of the newly created subfolder within
``metadata_directory``, containing the metadata.
:rtype: str
If the build backend defines a hook with this name, it will be called
in a subprocess. If not, the backend will be asked to build a wheel,
and the dist-info extracted from that (unless _allow_fallback is
False).
.. admonition:: Fallback
If the build backend does not define a hook with this name and
``_allow_fallback`` is truthy, the backend will be asked to build a
wheel via the ``build_wheel`` hook and the dist-info extracted from
that will be returned.
"""
return self._call_hook('prepare_metadata_for_build_wheel', {
'metadata_directory': abspath(metadata_directory),
@ -197,12 +194,15 @@ class Pep517HookCaller:
metadata_directory=None):
"""Build a wheel from this project.
Returns the name of the newly created file.
:returns:
The name of the newly created wheel within ``wheel_directory``.
In general, this will call the 'build_wheel' hook in the backend.
However, if that was previously called by
'prepare_metadata_for_build_wheel', and the same metadata_directory is
used, the previously built wheel will be copied to wheel_directory.
.. admonition:: Interaction with fallback
If the ``build_wheel`` hook was called in the fallback for
:meth:`prepare_metadata_for_build_wheel`, the build backend would
not be invoked. Instead, the previously built wheel will be copied
to ``wheel_directory`` and the name of that file will be returned.
"""
if metadata_directory is not None:
metadata_directory = abspath(metadata_directory)
@ -213,15 +213,15 @@ class Pep517HookCaller:
})
def get_requires_for_build_editable(self, config_settings=None):
"""Identify packages required for building an editable wheel
"""Get additional dependencies required for building an editable wheel.
Returns a list of dependency specifications, e.g.::
:returns: A list of :pep:`dependency specifiers <508>`.
:rtype: list[str]
["wheel >= 0.25", "setuptools"]
.. admonition:: Fallback
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
If the build backend does not defined a hook with this name, an
empty list will be returned.
"""
return self._call_hook('get_requires_for_build_editable', {
'config_settings': config_settings
@ -232,12 +232,16 @@ class Pep517HookCaller:
_allow_fallback=True):
"""Prepare a ``*.dist-info`` folder with metadata for this project.
Returns the name of the newly created folder.
:returns: Name of the newly created subfolder within
``metadata_directory``, containing the metadata.
:rtype: str
If the build backend defines a hook with this name, it will be called
in a subprocess. If not, the backend will be asked to build an editable
wheel, and the dist-info extracted from that (unless _allow_fallback is
False).
.. admonition:: Fallback
If the build backend does not define a hook with this name and
``_allow_fallback`` is truthy, the backend will be asked to build a
wheel via the ``build_editable`` hook and the dist-info
extracted from that will be returned.
"""
return self._call_hook('prepare_metadata_for_build_editable', {
'metadata_directory': abspath(metadata_directory),
@ -250,12 +254,16 @@ class Pep517HookCaller:
metadata_directory=None):
"""Build an editable wheel from this project.
Returns the name of the newly created file.
:returns:
The name of the newly created wheel within ``wheel_directory``.
In general, this will call the 'build_editable' hook in the backend.
However, if that was previously called by
'prepare_metadata_for_build_editable', and the same metadata_directory
is used, the previously built wheel will be copied to wheel_directory.
.. admonition:: Interaction with fallback
If the ``build_editable`` hook was called in the fallback for
:meth:`prepare_metadata_for_build_editable`, the build backend
would not be invoked. Instead, the previously built wheel will be
copied to ``wheel_directory`` and the name of that file will be
returned.
"""
if metadata_directory is not None:
metadata_directory = abspath(metadata_directory)
@ -266,15 +274,10 @@ class Pep517HookCaller:
})
def get_requires_for_build_sdist(self, config_settings=None):
"""Identify packages required for building a wheel
"""Get additional dependencies required for building an sdist.
Returns a list of dependency specifications, e.g.::
["setuptools >= 26"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
:returns: A list of :pep:`dependency specifiers <508>`.
:rtype: list[str]
"""
return self._call_hook('get_requires_for_build_sdist', {
'config_settings': config_settings
@ -283,9 +286,8 @@ class Pep517HookCaller:
def build_sdist(self, sdist_directory, config_settings=None):
"""Build an sdist from this project.
Returns the name of the newly created file.
This calls the 'build_sdist' backend hook in a subprocess.
:returns:
The name of the newly created sdist within ``wheel_directory``.
"""
return self._call_hook('build_sdist', {
'sdist_directory': abspath(sdist_directory),
@ -326,37 +328,3 @@ class Pep517HookCaller:
if data.get('hook_missing'):
raise HookMissing(data.get('missing_hook_name') or hook_name)
return data['return_val']
class LoggerWrapper(threading.Thread):
"""
Read messages from a pipe and redirect them
to a logger (see python's logging module).
"""
def __init__(self, logger, level):
threading.Thread.__init__(self)
self.daemon = True
self.logger = logger
self.level = level
# create the pipe and reader
self.fd_read, self.fd_write = os.pipe()
self.reader = os.fdopen(self.fd_read)
self.start()
def fileno(self):
return self.fd_write
@staticmethod
def remove_newline(msg):
return msg[:-1] if msg.endswith(os.linesep) else msg
def run(self):
for line in self.reader:
self._write(self.remove_newline(line))
def _write(self, message):
self.logger.log(self.level, message)

View File

@ -0,0 +1,18 @@
"""This is a subpackage because the directory is on sys.path for _in_process.py
The subpackage should stay as empty as possible to avoid shadowing modules that
the backend might import.
"""
import importlib.resources as resources
try:
resources.files
except AttributeError:
# Python 3.8 compatibility
def _in_proc_script_path():
return resources.path(__package__, '_in_process.py')
else:
def _in_proc_script_path():
return resources.as_file(
resources.files(__package__).joinpath('_in_process.py'))

View File

@ -145,11 +145,13 @@ def prepare_metadata_for_build_wheel(
except AttributeError:
if not _allow_fallback:
raise HookMissing()
whl_basename = backend.build_wheel(metadata_directory, config_settings)
return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory,
config_settings)
else:
return hook(metadata_directory, config_settings)
# fallback to build_wheel outside the try block to avoid exception chaining
# which can be confusing to users and is not relevant
whl_basename = backend.build_wheel(metadata_directory, config_settings)
return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory,
config_settings)
def prepare_metadata_for_build_editable(

View File

@ -5,7 +5,7 @@ from typing import IO, TYPE_CHECKING, Any, Callable, Optional, Union
from ._extension import load_ipython_extension # noqa: F401
__all__ = ["get_console", "reconfigure", "print", "inspect"]
__all__ = ["get_console", "reconfigure", "print", "inspect", "print_json"]
if TYPE_CHECKING:
from .console import Console
@ -40,7 +40,8 @@ def reconfigure(*args: Any, **kwargs: Any) -> None:
"""Reconfigures the global console by replacing it with another.
Args:
console (Console): Replacement console instance.
*args (Any): Positional arguments for the replacement :class:`~rich.console.Console`.
**kwargs (Any): Keyword arguments for the replacement :class:`~rich.console.Console`.
"""
from pip._vendor.rich.console import Console
@ -80,7 +81,7 @@ def print_json(
indent: Union[None, int, str] = 2,
highlight: bool = True,
skip_keys: bool = False,
ensure_ascii: bool = True,
ensure_ascii: bool = False,
check_circular: bool = True,
allow_nan: bool = True,
default: Optional[Callable[[Any], Any]] = None,

View File

@ -227,10 +227,6 @@ if __name__ == "__main__": # pragma: no cover
c = Console(record=True)
c.print(test_card)
# c.save_svg(
# path="/Users/darrenburns/Library/Application Support/JetBrains/PyCharm2021.3/scratches/svg_export.svg",
# title="Rich can export to SVG",
# )
print(f"rendered in {pre_cache_taken}ms (cold cache)")
print(f"rendered in {taken}ms (warm cache)")
@ -247,10 +243,6 @@ if __name__ == "__main__": # pragma: no cover
"Textualize",
"[u blue link=https://github.com/textualize]https://github.com/textualize",
)
sponsor_message.add_row(
"Buy devs a :coffee:",
"[u blue link=https://ko-fi.com/textualize]https://ko-fi.com/textualize",
)
sponsor_message.add_row(
"Twitter",
"[u blue link=https://twitter.com/willmcgugan]https://twitter.com/willmcgugan",

View File

@ -0,0 +1,83 @@
from types import TracebackType
from typing import IO, Iterable, Iterator, List, Optional, Type
class NullFile(IO[str]):
# TODO: "mode", "name" and "closed" are only required for Python 3.6.
@property
def mode(self) -> str:
return ""
@property
def name(self) -> str:
return "NullFile"
def closed(self) -> bool:
return False
def close(self) -> None:
pass
def isatty(self) -> bool:
return False
def read(self, __n: int = 1) -> str:
return ""
def readable(self) -> bool:
return False
def readline(self, __limit: int = 1) -> str:
return ""
def readlines(self, __hint: int = 1) -> List[str]:
return []
def seek(self, __offset: int, __whence: int = 1) -> int:
return 0
def seekable(self) -> bool:
return False
def tell(self) -> int:
return 0
def truncate(self, __size: Optional[int] = 1) -> int:
return 0
def writable(self) -> bool:
return False
def writelines(self, __lines: Iterable[str]) -> None:
pass
def __next__(self) -> str:
return ""
def __iter__(self) -> Iterator[str]:
return iter([""])
def __enter__(self) -> IO[str]:
pass
def __exit__(
self,
__t: Optional[Type[BaseException]],
__value: Optional[BaseException],
__traceback: Optional[TracebackType],
) -> None:
pass
def write(self, text: str) -> int:
return 0
def flush(self) -> None:
pass
def fileno(self) -> int:
return -1
NULL_FILE = NullFile()

View File

@ -120,7 +120,7 @@ class AnsiDecoder:
self.style = Style.null()
def decode(self, terminal_text: str) -> Iterable[Text]:
"""Decode ANSI codes in an interable of lines.
"""Decode ANSI codes in an iterable of lines.
Args:
lines (Iterable[str]): An iterable of lines of terminal output.

View File

@ -514,4 +514,4 @@ if __name__ == "__main__": # pragma: no cover
columns.add_renderable(table)
console.print(columns)
# console.save_html("box.html", inline_styles=True)
# console.save_svg("box.svg")

View File

@ -29,6 +29,9 @@ class ColorSystem(IntEnum):
def __repr__(self) -> str:
return f"ColorSystem.{self.name}"
def __str__(self) -> str:
return repr(self)
class ColorType(IntEnum):
"""Type of color stored in Color class."""
@ -310,7 +313,7 @@ class Color(NamedTuple):
"""A triplet of color components, if an RGB color."""
def __rich__(self) -> "Text":
"""Dispays the actual color if Rich printed."""
"""Displays the actual color if Rich printed."""
from .style import Style
from .text import Text

View File

@ -34,6 +34,8 @@ from typing import (
cast,
)
from pip._vendor.rich._null_file import NULL_FILE
if sys.version_info >= (3, 8):
from typing import Literal, Protocol, runtime_checkable
else:
@ -104,7 +106,11 @@ _STD_STREAMS = (_STDIN_FILENO, _STDOUT_FILENO, _STDERR_FILENO)
_STD_STREAMS_OUTPUT = (_STDOUT_FILENO, _STDERR_FILENO)
_TERM_COLORS = {"256color": ColorSystem.EIGHT_BIT, "16color": ColorSystem.STANDARD}
_TERM_COLORS = {
"kitty": ColorSystem.EIGHT_BIT,
"256color": ColorSystem.EIGHT_BIT,
"16color": ColorSystem.STANDARD,
}
class ConsoleDimensions(NamedTuple):
@ -516,7 +522,11 @@ def _is_jupyter() -> bool: # pragma: no cover
return False
ipython = get_ipython() # type: ignore[name-defined]
shell = ipython.__class__.__name__
if "google.colab" in str(ipython.__class__) or shell == "ZMQInteractiveShell":
if (
"google.colab" in str(ipython.__class__)
or os.getenv("DATABRICKS_RUNTIME_VERSION")
or shell == "ZMQInteractiveShell"
):
return True # Jupyter notebook or qtconsole
elif shell == "TerminalInteractiveShell":
return False # Terminal running IPython
@ -697,7 +707,16 @@ class Console:
self._height = height
self._color_system: Optional[ColorSystem]
self._force_terminal = force_terminal
self._force_terminal = None
if force_terminal is not None:
self._force_terminal = force_terminal
else:
# If FORCE_COLOR env var has any value at all, we force terminal.
force_color = self._environ.get("FORCE_COLOR")
if force_color is not None:
self._force_terminal = True
self._file = file
self.quiet = quiet
self.stderr = stderr
@ -746,6 +765,8 @@ class Console:
"""Get the file object to write to."""
file = self._file or (sys.stderr if self.stderr else sys.stdout)
file = getattr(file, "rich_proxied_file", file)
if file is None:
file = NULL_FILE
return file
@file.setter
@ -1701,7 +1722,7 @@ class Console:
indent: Union[None, int, str] = 2,
highlight: bool = True,
skip_keys: bool = False,
ensure_ascii: bool = True,
ensure_ascii: bool = False,
check_circular: bool = True,
allow_nan: bool = True,
default: Optional[Callable[[Any], Any]] = None,
@ -1996,9 +2017,11 @@ class Console:
from pip._vendor.rich._win32_console import LegacyWindowsTerm
from pip._vendor.rich._windows_renderer import legacy_windows_render
legacy_windows_render(
self._buffer[:], LegacyWindowsTerm(self.file)
)
buffer = self._buffer[:]
if self.no_color and self._color_system:
buffer = list(Segment.remove_color(buffer))
legacy_windows_render(buffer, LegacyWindowsTerm(self.file))
else:
# Either a non-std stream on legacy Windows, or modern Windows.
text = self._render_buffer(self._buffer[:])
@ -2238,18 +2261,24 @@ class Console:
theme: Optional[TerminalTheme] = None,
clear: bool = True,
code_format: str = CONSOLE_SVG_FORMAT,
font_aspect_ratio: float = 0.61,
unique_id: Optional[str] = None,
) -> str:
"""
Generate an SVG from the console contents (requires record=True in Console constructor).
Args:
path (str): The path to write the SVG to.
title (str): The title of the tab in the output image
title (str, optional): The title of the tab in the output image
theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal
clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``
code_format (str): Format string used to generate the SVG. Rich will inject a number of variables
code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables
into the string in order to form the final SVG output. The default template used and the variables
injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable.
font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format``
string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font).
If you aren't specifying a different font inside ``code_format``, you probably don't need this.
unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node
ids). If not set, this defaults to a computed value based on the recorded content.
"""
from pip._vendor.rich.cells import cell_len
@ -2293,7 +2322,7 @@ class Console:
width = self.width
char_height = 20
char_width = char_height * 0.61
char_width = char_height * font_aspect_ratio
line_height = char_height * 1.22
margin_top = 1
@ -2345,14 +2374,16 @@ class Console:
if clear:
self._record_buffer.clear()
unique_id = "terminal-" + str(
zlib.adler32(
("".join(segment.text for segment in segments)).encode(
"utf-8", "ignore"
if unique_id is None:
unique_id = "terminal-" + str(
zlib.adler32(
("".join(repr(segment) for segment in segments)).encode(
"utf-8",
"ignore",
)
+ title.encode("utf-8", "ignore")
)
+ title.encode("utf-8", "ignore")
)
)
y = 0
for y, line in enumerate(Segment.split_and_crop_lines(segments, length=width)):
x = 0
@ -2482,23 +2513,32 @@ class Console:
theme: Optional[TerminalTheme] = None,
clear: bool = True,
code_format: str = CONSOLE_SVG_FORMAT,
font_aspect_ratio: float = 0.61,
unique_id: Optional[str] = None,
) -> None:
"""Generate an SVG file from the console contents (requires record=True in Console constructor).
Args:
path (str): The path to write the SVG to.
title (str): The title of the tab in the output image
title (str, optional): The title of the tab in the output image
theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal
clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``
code_format (str): Format string used to generate the SVG. Rich will inject a number of variables
code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables
into the string in order to form the final SVG output. The default template used and the variables
injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable.
font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format``
string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font).
If you aren't specifying a different font inside ``code_format``, you probably don't need this.
unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node
ids). If not set, this defaults to a computed value based on the recorded content.
"""
svg = self.export_svg(
title=title,
theme=theme,
clear=clear,
code_format=code_format,
font_aspect_ratio=font_aspect_ratio,
unique_id=unique_id,
)
with open(path, "wt", encoding="utf-8") as write_file:
write_file.write(svg)

View File

@ -2,7 +2,7 @@
"""Functions for reporting filesizes. Borrowed from https://github.com/PyFilesystem/pyfilesystem2
The functions declared in this module should cover the different
usecases needed to generate a string representation of a file size
use cases needed to generate a string representation of a file size
using several different units. Since there are many standards regarding
file size units, three different functions have been implemented.

View File

@ -27,7 +27,7 @@ class JSON:
indent: Union[None, int, str] = 2,
highlight: bool = True,
skip_keys: bool = False,
ensure_ascii: bool = True,
ensure_ascii: bool = False,
check_circular: bool = True,
allow_nan: bool = True,
default: Optional[Callable[[Any], Any]] = None,
@ -56,7 +56,7 @@ class JSON:
indent: Union[None, int, str] = 2,
highlight: bool = True,
skip_keys: bool = False,
ensure_ascii: bool = True,
ensure_ascii: bool = False,
check_circular: bool = True,
allow_nan: bool = True,
default: Optional[Callable[[Any], Any]] = None,

View File

@ -20,8 +20,8 @@ from .console import Console, ConsoleOptions, RenderableType, RenderResult
from .highlighter import ReprHighlighter
from .panel import Panel
from .pretty import Pretty
from .repr import rich_repr, Result
from .region import Region
from .repr import Result, rich_repr
from .segment import Segment
from .style import StyleType
@ -162,7 +162,6 @@ class Layout:
minimum_size: int = 1,
ratio: int = 1,
visible: bool = True,
height: Optional[int] = None,
) -> None:
self._renderable = renderable or _Placeholder(self)
self.size = size
@ -170,7 +169,6 @@ class Layout:
self.ratio = ratio
self.name = name
self.visible = visible
self.height = height
self.splitter: Splitter = self.splitters["column"]()
self._children: List[Layout] = []
self._render_map: RenderMap = {}

View File

@ -3,10 +3,12 @@ from datetime import datetime
from logging import Handler, LogRecord
from pathlib import Path
from types import ModuleType
from typing import ClassVar, List, Optional, Iterable, Type, Union
from typing import ClassVar, Iterable, List, Optional, Type, Union
from pip._vendor.rich._null_file import NullFile
from . import get_console
from ._log_render import LogRender, FormatTimeCallable
from ._log_render import FormatTimeCallable, LogRender
from .console import Console, ConsoleRenderable
from .highlighter import Highlighter, ReprHighlighter
from .text import Text
@ -158,16 +160,23 @@ class RichHandler(Handler):
log_renderable = self.render(
record=record, traceback=traceback, message_renderable=message_renderable
)
try:
self.console.print(log_renderable)
except Exception:
if isinstance(self.console.file, NullFile):
# Handles pythonw, where stdout/stderr are null, and we return NullFile
# instance from Console.file. In this case, we still want to make a log record
# even though we won't be writing anything to a file.
self.handleError(record)
else:
try:
self.console.print(log_renderable)
except Exception:
self.handleError(record)
def render_message(self, record: LogRecord, message: str) -> "ConsoleRenderable":
"""Render message text in to Text.
record (LogRecord): logging Record.
message (str): String containing log message.
Args:
record (LogRecord): logging Record.
message (str): String containing log message.
Returns:
ConsoleRenderable: Renderable to display log message.

View File

@ -2,11 +2,12 @@ from typing import TYPE_CHECKING, Optional
from .align import AlignMethod
from .box import ROUNDED, Box
from .cells import cell_len
from .jupyter import JupyterMixin
from .measure import Measurement, measure_renderables
from .padding import Padding, PaddingDimensions
from .segment import Segment
from .style import StyleType
from .style import Style, StyleType
from .text import Text, TextType
if TYPE_CHECKING:
@ -149,9 +150,53 @@ class Panel(JupyterMixin):
safe_box: bool = console.safe_box if self.safe_box is None else self.safe_box
box = self.box.substitute(options, safe=safe_box)
def align_text(
text: Text, width: int, align: str, character: str, style: Style
) -> Text:
"""Gets new aligned text.
Args:
text (Text): Title or subtitle text.
width (int): Desired width.
align (str): Alignment.
character (str): Character for alignment.
style (Style): Border style
Returns:
Text: New text instance
"""
text = text.copy()
text.truncate(width)
excess_space = width - cell_len(text.plain)
if excess_space:
if align == "left":
return Text.assemble(
text,
(character * excess_space, style),
no_wrap=True,
end="",
)
elif align == "center":
left = excess_space // 2
return Text.assemble(
(character * left, style),
text,
(character * (excess_space - left), style),
no_wrap=True,
end="",
)
else:
return Text.assemble(
(character * excess_space, style),
text,
no_wrap=True,
end="",
)
return text
title_text = self._title
if title_text is not None:
title_text.style = border_style
title_text.stylize_before(border_style)
child_width = (
width - 2
@ -180,7 +225,13 @@ class Panel(JupyterMixin):
if title_text is None or width <= 4:
yield Segment(box.get_top([width - 2]), border_style)
else:
title_text.align(self.title_align, width - 4, character=box.top)
title_text = align_text(
title_text,
width - 4,
self.title_align,
box.top,
border_style,
)
yield Segment(box.top_left + box.top, border_style)
yield from console.render(title_text, child_options.update_width(width - 4))
yield Segment(box.top + box.top_right, border_style)
@ -194,12 +245,18 @@ class Panel(JupyterMixin):
subtitle_text = self._subtitle
if subtitle_text is not None:
subtitle_text.style = border_style
subtitle_text.stylize_before(border_style)
if subtitle_text is None or width <= 4:
yield Segment(box.get_bottom([width - 2]), border_style)
else:
subtitle_text.align(self.subtitle_align, width - 4, character=box.bottom)
subtitle_text = align_text(
subtitle_text,
width - 4,
self.subtitle_align,
box.bottom,
border_style,
)
yield Segment(box.bottom_left + box.bottom, border_style)
yield from console.render(
subtitle_text, child_options.update_width(width - 4)

View File

@ -120,6 +120,7 @@ def _ipy_display_hook(
indent_guides: bool = False,
max_length: Optional[int] = None,
max_string: Optional[int] = None,
max_depth: Optional[int] = None,
expand_all: bool = False,
) -> None:
# needed here to prevent circular import:
@ -177,6 +178,7 @@ def _ipy_display_hook(
indent_guides=indent_guides,
max_length=max_length,
max_string=max_string,
max_depth=max_depth,
expand_all=expand_all,
margin=12,
),
@ -202,6 +204,7 @@ def install(
indent_guides: bool = False,
max_length: Optional[int] = None,
max_string: Optional[int] = None,
max_depth: Optional[int] = None,
expand_all: bool = False,
) -> None:
"""Install automatic pretty printing in the Python REPL.
@ -214,6 +217,7 @@ def install(
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None.
max_depth (int, optional): Maximum depth of nested data structures, or None for no maximum. Defaults to None.
expand_all (bool, optional): Expand all containers. Defaults to False.
max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100.
"""
@ -236,6 +240,7 @@ def install(
indent_guides=indent_guides,
max_length=max_length,
max_string=max_string,
max_depth=max_depth,
expand_all=expand_all,
),
crop=crop,
@ -258,6 +263,7 @@ def install(
indent_guides=indent_guides,
max_length=max_length,
max_string=max_string,
max_depth=max_depth,
expand_all=expand_all,
)
else:
@ -333,7 +339,7 @@ class Pretty(JupyterMixin):
max_depth=self.max_depth,
expand_all=self.expand_all,
)
pretty_text = Text(
pretty_text = Text.from_ansi(
pretty_str,
justify=self.justify or options.justify,
overflow=self.overflow or options.overflow,
@ -630,6 +636,11 @@ def traverse(
def _traverse(obj: Any, root: bool = False, depth: int = 0) -> Node:
"""Walk the object depth first."""
obj_id = id(obj)
if obj_id in visited_ids:
# Recursion detected
return Node(value_repr="...")
obj_type = type(obj)
py_version = (sys.version_info.major, sys.version_info.minor)
children: List[Node]
@ -667,6 +678,7 @@ def traverse(
pass
if rich_repr_result is not None:
push_visited(obj_id)
angular = getattr(obj.__rich_repr__, "angular", False)
args = list(iter_rich_args(rich_repr_result))
class_name = obj.__class__.__name__
@ -676,7 +688,10 @@ def traverse(
append = children.append
if reached_max_depth:
node = Node(value_repr=f"...")
if angular:
node = Node(value_repr=f"<{class_name}...>")
else:
node = Node(value_repr=f"{class_name}(...)")
else:
if angular:
node = Node(
@ -711,14 +726,16 @@ def traverse(
children=[],
last=root,
)
pop_visited(obj_id)
elif _is_attr_object(obj) and not fake_attributes:
push_visited(obj_id)
children = []
append = children.append
attr_fields = _get_attr_fields(obj)
if attr_fields:
if reached_max_depth:
node = Node(value_repr=f"...")
node = Node(value_repr=f"{obj.__class__.__name__}(...)")
else:
node = Node(
open_brace=f"{obj.__class__.__name__}(",
@ -758,23 +775,18 @@ def traverse(
node = Node(
value_repr=f"{obj.__class__.__name__}()", children=[], last=root
)
pop_visited(obj_id)
elif (
is_dataclass(obj)
and not _safe_isinstance(obj, type)
and not fake_attributes
and (_is_dataclass_repr(obj) or py_version == (3, 6))
):
obj_id = id(obj)
if obj_id in visited_ids:
# Recursion detected
return Node(value_repr="...")
push_visited(obj_id)
children = []
append = children.append
if reached_max_depth:
node = Node(value_repr=f"...")
node = Node(value_repr=f"{obj.__class__.__name__}(...)")
else:
node = Node(
open_brace=f"{obj.__class__.__name__}(",
@ -792,42 +804,43 @@ def traverse(
child_node.key_separator = "="
append(child_node)
pop_visited(obj_id)
pop_visited(obj_id)
elif _is_namedtuple(obj) and _has_default_namedtuple_repr(obj):
push_visited(obj_id)
class_name = obj.__class__.__name__
if reached_max_depth:
node = Node(value_repr="...")
# If we've reached the max depth, we still show the class name, but not its contents
node = Node(
value_repr=f"{class_name}(...)",
)
else:
children = []
class_name = obj.__class__.__name__
append = children.append
node = Node(
open_brace=f"{class_name}(",
close_brace=")",
children=children,
empty=f"{class_name}()",
)
append = children.append
for last, (key, value) in loop_last(obj._asdict().items()):
child_node = _traverse(value, depth=depth + 1)
child_node.key_repr = key
child_node.last = last
child_node.key_separator = "="
append(child_node)
pop_visited(obj_id)
elif _safe_isinstance(obj, _CONTAINERS):
for container_type in _CONTAINERS:
if _safe_isinstance(obj, container_type):
obj_type = container_type
break
obj_id = id(obj)
if obj_id in visited_ids:
# Recursion detected
return Node(value_repr="...")
push_visited(obj_id)
open_brace, close_brace, empty = _BRACES[obj_type](obj)
if reached_max_depth:
node = Node(value_repr=f"...", last=root)
node = Node(value_repr=f"{open_brace}...{close_brace}")
elif obj_type.__repr__ != type(obj).__repr__:
node = Node(value_repr=to_repr(obj), last=root)
elif obj:
@ -1007,4 +1020,10 @@ if __name__ == "__main__": # pragma: no cover
from pip._vendor.rich import print
print(Pretty(data, indent_guides=True, max_string=20))
# print(Pretty(data, indent_guides=True, max_string=20))
class Thing:
def __repr__(self) -> str:
return "Hello\x1b[38;5;239m World!"
print(Pretty(Thing()))

View File

@ -129,7 +129,7 @@ def track(
refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10.
style (StyleType, optional): Style for the bar background. Defaults to "bar.back".
complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished".
pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse".
update_period (float, optional): Minimum time (in seconds) between calls to update(). Defaults to 0.1.
disable (bool, optional): Disable display of progress.
@ -216,6 +216,10 @@ class _Reader(RawIOBase, BinaryIO):
def isatty(self) -> bool:
return self.handle.isatty()
@property
def mode(self) -> str:
return self.handle.mode
@property
def name(self) -> str:
return self.handle.name
@ -315,7 +319,7 @@ def wrap_file(
refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10.
style (StyleType, optional): Style for the bar background. Defaults to "bar.back".
complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished".
pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse".
disable (bool, optional): Disable display of progress.
Returns:
@ -440,7 +444,7 @@ def open(
refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10.
style (StyleType, optional): Style for the bar background. Defaults to "bar.back".
complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished".
pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse".
disable (bool, optional): Disable display of progress.
encoding (str, optional): The encoding to use when reading in text mode.
@ -634,7 +638,7 @@ class BarColumn(ProgressColumn):
bar_width (Optional[int], optional): Width of bar or None for full width. Defaults to 40.
style (StyleType, optional): Style for the bar background. Defaults to "bar.back".
complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished".
pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse".
"""

View File

@ -25,7 +25,7 @@ class ProgressBar(JupyterMixin):
pulse (bool, optional): Enable pulse effect. Defaults to False. Will pulse if a None total was passed.
style (StyleType, optional): Style for the bar background. Defaults to "bar.back".
complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done".
finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished".
pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse".
animation_time (Optional[float], optional): Time in seconds to use for animation, or None to use system time.
"""

View File

@ -1,21 +1,18 @@
from functools import partial
import inspect
import sys
from functools import partial
from typing import (
Any,
Callable,
Iterable,
List,
Optional,
overload,
Union,
Tuple,
Type,
TypeVar,
Union,
overload,
)
T = TypeVar("T")

View File

@ -26,7 +26,7 @@ def render_scope(
scope (Mapping): A mapping containing variable names and values.
title (str, optional): Optional title. Defaults to None.
sort_keys (bool, optional): Enable sorting of items. Defaults to True.
indent_guides (bool, optional): Enable indentaton guides. Defaults to False.
indent_guides (bool, optional): Enable indentation guides. Defaults to False.
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None.

View File

@ -188,8 +188,10 @@ class Style:
)
self._link = link
self._link_id = f"{randint(0, 999999)}" if link else ""
self._meta = None if meta is None else dumps(meta)
self._link_id = (
f"{randint(0, 999999)}{hash(self._meta)}" if (link or meta) else ""
)
self._hash: Optional[int] = None
self._null = not (self._set_attributes or color or bgcolor or link or meta)
@ -237,8 +239,8 @@ class Style:
style._set_attributes = 0
style._attributes = 0
style._link = None
style._link_id = ""
style._meta = dumps(meta)
style._link_id = f"{randint(0, 999999)}{hash(style._meta)}"
style._hash = None
style._null = not (meta)
return style

View File

@ -40,6 +40,7 @@ from pip._vendor.rich.containers import Lines
from pip._vendor.rich.padding import Padding, PaddingDimensions
from ._loop import loop_first
from .cells import cell_len
from .color import Color, blend_rgb
from .console import Console, ConsoleOptions, JustifyMethod, RenderResult
from .jupyter import JupyterMixin
@ -586,11 +587,21 @@ class Syntax(JupyterMixin):
def __rich_measure__(
self, console: "Console", options: "ConsoleOptions"
) -> "Measurement":
_, right, _, left = Padding.unpack(self.padding)
padding = left + right
if self.code_width is not None:
width = self.code_width + self._numbers_column_width + right + left
width = self.code_width + self._numbers_column_width + padding + 1
return Measurement(self._numbers_column_width, width)
return Measurement(self._numbers_column_width, options.max_width)
lines = self.code.splitlines()
width = (
self._numbers_column_width
+ padding
+ (max(cell_len(line) for line in lines) if lines else 0)
)
if self.line_numbers:
width += 1
return Measurement(self._numbers_column_width, width)
def __rich_console__(
self, console: Console, options: ConsoleOptions

View File

@ -462,6 +462,12 @@ class Table(JupyterMixin):
)
self.rows.append(Row(style=style, end_section=end_section))
def add_section(self) -> None:
"""Add a new section (draw a line after current row)."""
if self.rows:
self.rows[-1].end_section = True
def __rich_console__(
self, console: "Console", options: "ConsoleOptions"
) -> "RenderResult":

View File

@ -450,7 +450,6 @@ class Text(JupyterMixin):
style (Union[str, Style]): Style instance or style definition to apply.
start (int): Start offset (negative indexing is supported). Defaults to 0.
end (Optional[int], optional): End offset (negative indexing is supported), or None for end of text. Defaults to None.
"""
if style:
length = len(self)
@ -465,6 +464,32 @@ class Text(JupyterMixin):
return
self._spans.append(Span(start, min(length, end), style))
def stylize_before(
self,
style: Union[str, Style],
start: int = 0,
end: Optional[int] = None,
) -> None:
"""Apply a style to the text, or a portion of the text. Styles will be applied before other styles already present.
Args:
style (Union[str, Style]): Style instance or style definition to apply.
start (int): Start offset (negative indexing is supported). Defaults to 0.
end (Optional[int], optional): End offset (negative indexing is supported), or None for end of text. Defaults to None.
"""
if style:
length = len(self)
if start < 0:
start = length + start
if end is None:
end = length
if end < 0:
end = length + end
if start >= length or end <= start:
# Span not in text or not valid
return
self._spans.insert(0, Span(start, min(length, end), style))
def apply_meta(
self, meta: Dict[str, Any], start: int = 0, end: Optional[int] = None
) -> None:

View File

@ -337,7 +337,7 @@ class Traceback:
from pip._vendor.rich import _IMPORT_CWD
def safe_str(_object: Any) -> str:
"""Don't allow exceptions from __str__ to propegate."""
"""Don't allow exceptions from __str__ to propagate."""
try:
return str(_object)
except Exception:
@ -389,19 +389,17 @@ class Traceback:
del stack.frames[:]
cause = getattr(exc_value, "__cause__", None)
if cause and cause.__traceback__:
if cause:
exc_type = cause.__class__
exc_value = cause
# __traceback__ can be None, e.g. for exceptions raised by the
# 'multiprocessing' module
traceback = cause.__traceback__
is_cause = True
continue
cause = exc_value.__context__
if (
cause
and cause.__traceback__
and not getattr(exc_value, "__suppress_context__", False)
):
if cause and not getattr(exc_value, "__suppress_context__", False):
exc_type = cause.__class__
exc_value = cause
traceback = cause.__traceback__

View File

@ -4,15 +4,15 @@ distlib==0.3.6
distro==1.8.0
msgpack==1.0.4
packaging==21.3
pep517==0.13.0
platformdirs==2.5.3
pyparsing==3.0.9
pyproject-hooks==1.0.0
requests==2.28.1
certifi==2022.09.24
chardet==5.0.0
idna==3.4
urllib3==1.26.12
rich==12.5.1
rich==12.6.0
pygments==2.13.0
typing_extensions==4.4.0
resolvelib==0.8.1

View File

@ -80,6 +80,37 @@ class TestLink:
assert "eggname" == Link(url).egg_fragment
assert "subdir" == Link(url).subdirectory_fragment
# Extras are supported and preserved in the egg fragment,
# even the empty extras specifier.
# This behavior is deprecated and will change in pip 25.
url = "git+https://example.com/package#egg=eggname[extra]"
assert "eggname[extra]" == Link(url).egg_fragment
assert None is Link(url).subdirectory_fragment
url = "git+https://example.com/package#egg=eggname[extra1,extra2]"
assert "eggname[extra1,extra2]" == Link(url).egg_fragment
assert None is Link(url).subdirectory_fragment
url = "git+https://example.com/package#egg=eggname[]"
assert "eggname[]" == Link(url).egg_fragment
assert None is Link(url).subdirectory_fragment
@pytest.mark.xfail(reason="Behavior change scheduled for 25.0", strict=True)
@pytest.mark.parametrize(
"fragment",
[
# Package names in egg fragments must be in PEP 508 form.
"~invalid~package~name~",
# Version specifiers are not valid in egg fragments.
"eggname==1.2.3",
"eggname>=1.2.3",
# The extras specifier must be in PEP 508 form.
"eggname[!]",
],
)
def test_invalid_egg_fragments(self, fragment: str) -> None:
url = f"git+https://example.com/package#egg={fragment}"
with pytest.raises(Exception):
Link(url)
@pytest.mark.parametrize(
"yanked_reason, expected",
[

View File

@ -0,0 +1,155 @@
"""Update the 'exact' redirects on Read the Docs to match an in-tree file's contents.
Relevant API reference: https://docs.readthedocs.io/en/stable/api/v3.html#redirects
"""
import operator
import os
import sys
from pathlib import Path
import httpx
import rich
import yaml
try:
_TOKEN = os.environ["RTD_API_TOKEN"]
except KeyError:
rich.print(
"[bold]error[/]: [red]No API token provided. Please set `RTD_API_TOKEN`.[/]",
file=sys.stderr,
)
sys.exit(1)
RTD_API_HEADERS = {"Authorization": f"token {_TOKEN}"}
RTD_API_BASE_URL = "https://readthedocs.org/api/v3/projects/pip/"
REPO_ROOT = Path(__file__).resolve().parent.parent
# --------------------------------------------------------------------------------------
# Helpers
# --------------------------------------------------------------------------------------
def next_step(msg: str) -> None:
rich.print(f"> [blue]{msg}[/]")
def log_response(response: httpx.Response) -> None:
request = response.request
rich.print(f"[bold magenta]{request.method}[/] {request.url} -> {response}")
def get_rtd_api() -> httpx.Client:
return httpx.Client(
headers=RTD_API_HEADERS,
base_url=RTD_API_BASE_URL,
event_hooks={"response": [log_response]},
)
# --------------------------------------------------------------------------------------
# Actual logic
# --------------------------------------------------------------------------------------
next_step("Loading local redirects from the yaml file.")
with open(REPO_ROOT / ".readthedocs-custom-redirects.yml") as f:
local_redirects = yaml.safe_load(f)
rich.print("Loaded local redirects!")
for src, dst in sorted(local_redirects.items()):
rich.print(f" [yellow]{src}[/] --> {dst}")
rich.print(f"{len(local_redirects)} entries.")
next_step("Fetch redirects configured on RTD.")
with get_rtd_api() as rtd_api:
response = rtd_api.get("redirects/")
response.raise_for_status()
rtd_redirects = response.json()
for redirect in sorted(
rtd_redirects["results"], key=operator.itemgetter("type", "from_url", "to_url")
):
if redirect["type"] != "exact":
rich.print(f" [magenta]{redirect['type']}[/]")
continue
pk = redirect["pk"]
src = redirect["from_url"]
dst = redirect["to_url"]
rich.print(f" [yellow]{src}[/] -({pk:^5})-> {dst}")
rich.print(f"{rtd_redirects['count']} entries.")
next_step("Compare and determine modifications.")
redirects_to_remove: list[int] = []
redirects_to_add: dict[str, str] = {}
for redirect in rtd_redirects["results"]:
if redirect["type"] != "exact":
continue
rtd_src = redirect["from_url"]
rtd_dst = redirect["to_url"]
redirect_id = redirect["pk"]
if rtd_src not in local_redirects:
redirects_to_remove.append(redirect_id)
continue
local_dst = local_redirects[rtd_src]
if local_dst != rtd_dst:
redirects_to_remove.append(redirect_id)
redirects_to_add[rtd_src] = local_dst
del local_redirects[rtd_src]
for src, dst in sorted(local_redirects.items()):
redirects_to_add[src] = dst
del local_redirects[src]
assert not local_redirects
if not redirects_to_remove:
rich.print("Nothing to remove.")
else:
rich.print(f"To remove: ({len(redirects_to_remove)} entries)")
for redirect_id in redirects_to_remove:
rich.print(" ", redirect_id)
if not redirects_to_add:
rich.print("Nothing to add.")
else:
rich.print(f"To add: ({len(redirects_to_add)} entries)")
for src, dst in redirects_to_add.items():
rich.print(f" {src} --> {dst}")
next_step("Update the RTD redirects.")
if not (redirects_to_add or redirects_to_remove):
rich.print("[green]Nothing to do![/]")
sys.exit(0)
exit_code = 0
with get_rtd_api() as rtd_api:
for redirect_id in redirects_to_remove:
response = rtd_api.delete(f"redirects/{redirect_id}/")
response.raise_for_status()
if response.status_code != 204:
rich.print("[red]This might not have been removed correctly.[/]")
exit_code = 1
for src, dst in redirects_to_add.items():
response = rtd_api.post(
"redirects/",
json={"from_url": src, "to_url": dst, "type": "exact"},
)
response.raise_for_status()
if response.status_code != 201:
rich.print("[red]This might not have been added correctly.[/]")
exit_code = 1
sys.exit(exit_code)