diff options
-rw-r--r-- | .github/workflows/doc.yml | 6 | ||||
-rw-r--r-- | .github/workflows/release.yml | 16 | ||||
-rw-r--r-- | .github/workflows/test.yml | 9 | ||||
-rw-r--r-- | MANIFEST.in | 9 | ||||
-rw-r--r-- | Makefile | 15 | ||||
-rw-r--r-- | data/lib/pkgcore/ebd/Makefile | 47 | ||||
-rw-r--r-- | doc/conf.py | 32 | ||||
-rw-r--r-- | py_build.py | 99 | ||||
-rw-r--r-- | pyproject.toml | 82 | ||||
-rw-r--r-- | pytest.ini | 3 | ||||
-rw-r--r-- | requirements/ci.txt | 1 | ||||
-rw-r--r-- | requirements/dev.txt | 2 | ||||
-rw-r--r-- | requirements/dist.txt | 4 | ||||
-rw-r--r-- | requirements/docs.txt | 1 | ||||
-rw-r--r-- | requirements/install.txt | 2 | ||||
-rw-r--r-- | requirements/pyproject.toml | 3 | ||||
-rw-r--r-- | requirements/test.txt | 1 | ||||
-rw-r--r-- | requirements/tox.txt | 3 | ||||
-rw-r--r-- | setup.cfg | 4 | ||||
-rwxr-xr-x | setup.py | 253 | ||||
-rw-r--r-- | src/pkgcore/const.py | 7 | ||||
-rw-r--r-- | src/pkgcore/ebuild/const.py | 3 | ||||
-rwxr-xr-x | src/pkgcore/scripts/__init__.py | 8 |
23 files changed, 282 insertions, 328 deletions
diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 7a290586..776871b9 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -26,17 +26,15 @@ jobs: uses: actions/setup-python@v4 with: python-version: '3.10' - cache: 'pip' - cache-dependency-path: requirements/*.txt - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirements/dev.txt -r requirements/docs.txt + pip install ".[doc]" - name: Build sphinx documentation run: | - python setup.py build_docs + make html # notify github this isn't a jekyll site touch build/sphinx/html/.nojekyll diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6935f452..af0b9d08 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -4,6 +4,7 @@ on: push: branches: [deploy] tags: [v*] + workflow_dispatch: jobs: build-and-deploy: @@ -13,31 +14,30 @@ jobs: uses: actions/checkout@v3 - name: Set up Python 3.10 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' - cache-dependency-path: | - requirements/dist.txt - requirements/test.txt + cache-dependency-path: pyproject.toml - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirements/dist.txt -r requirements/test.txt + pip install build ".[test,doc]" - name: Test with pytest env: PY_COLORS: 1 # forcibly enable pytest colors - run: python setup.py test + run: pytest - name: Build sdist run: | git clean -fxd - python setup.py sdist + make man + make sdist - name: Build wheel - run: python setup.py bdist_wheel + run: make wheel - name: Output dist file info run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c928b30f..ee1e52ee 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -37,16 +37,14 @@ jobs: - name: Pin dependencies to minimal versions if: ${{ matrix.deps == 'minimal-deps' }} - run: | - sed -e 's:~=:==:' -i requirements/* - mv requirements/install.txt requirements/dev.txt + run: sed -e '/dependencies/,$s/~=/==/' -i pyproject.toml - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} cache: 'pip' - cache-dependency-path: requirements/*.txt + cache-dependency-path: pyproject.toml # experimental targets generally lack lxml wheels - name: Install libxml2 and libxslt development packages @@ -61,8 +59,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirements/test.txt -r requirements/ci.txt - pip install . + pip install ".[test]" - name: Test with pytest env: diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 27ad4921..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,9 +0,0 @@ -include LICENSE *.py *.rst -include pytest.ini tox.ini pyproject.toml .coveragerc -recursive-include bin * -recursive-include data * -recursive-include doc * -recursive-include examples * -recursive-include requirements * -recursive-include tests * -global-exclude *.pyc *.pyo __pycache__ diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..be7a5b69 --- /dev/null +++ b/Makefile @@ -0,0 +1,15 @@ +PYTHON ?= python +SPHINX_BUILD ?= sphinx-build + +.PHONY: man html +man html: + $(SPHINX_BUILD) -a -b $@ doc build/sphinx/$@ + +.PHONY: sdist wheel +sdist wheel: + $(PYTHON) -m build --$@ + +.PHONY: clean +clean: + $(RM) -r build/sphinx doc/api dist + $(MAKE) -C data/lib/pkgcore/ebd clean diff --git a/data/lib/pkgcore/ebd/Makefile b/data/lib/pkgcore/ebd/Makefile new file mode 100644 index 00000000..ec6280a3 --- /dev/null +++ b/data/lib/pkgcore/ebd/Makefile @@ -0,0 +1,47 @@ +PYTHON ?= python +REPO_SRC = ../../../../src +PYTHON_CALL = PYTHONPATH=$(REPO_SRC) PYTHONDONTWRITEBYTECODE=1 $(PYTHON) + +TARGET = .generated +KNOWN_EAPI = 0 1 2 3 4 5 6 7 8 + +all: funcs cmds libs + +clean: + $(RM) -r $(TARGET) + + +.PHONY: funcs +funcs: $(TARGET)/funcs/global $(addprefix $(TARGET)/funcs/,$(KNOWN_EAPI)) + +$(TARGET)/funcs/%: + @mkdir -p $(dir $@) + ./generate_eapi_func_list $* > $@ +$(TARGET)/funcs/global: + @mkdir -p $(dir $@) + ./generate_global_func_list > $@ + + +.PHONY: cmds +cmds: $(addprefix $(TARGET)/cmds/,$(KNOWN_EAPI)) +$(TARGET)/cmds/%: + @mkdir -p $@ + ./generate_eapi_cmd_list -b $* > $@/banned + ./generate_eapi_cmd_list -d $* > $@/deprecated + ./generate_eapi_cmd_list -i $* > $@/internal + + +.PHONY: libs +libs: $(addprefix $(TARGET)/libs/,$(KNOWN_EAPI)) + +.SECONDARY: $(TARGET)/libs/phases-% +$(TARGET)/libs/phases-%: + @mkdir -p $(dir $@) + $(PYTHON_CALL) -c "from pkgcore.ebuild import eapi; print('\n'.join(eapi.eapi$*.phases.values()))" > $@ + +$(TARGET)/libs/%: $(TARGET)/libs/phases-% + @mkdir -p $@ + ./generate_eapi_lib $* > $@/global + for phase in $$(cat $<); do \ + ./generate_eapi_lib -s $${phase} $* > $@/$${phase}; \ + done diff --git a/doc/conf.py b/doc/conf.py index 7166728f..6165ae91 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -18,12 +18,11 @@ from importlib import import_module # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -libdir = os.path.abspath(os.path.join('..', 'build', 'lib')) -if os.path.exists(libdir): - sys.path.insert(0, libdir) +sys.path.insert(0, os.path.abspath('../src/')) os.environ['PKGDIST_REPODIR'] = os.path.abspath('..') from snakeoil.dist import distutils_extensions as pkgdist +from snakeoil.dist.generate_docs import generate_man, generate_html on_rtd = os.environ.get('READTHEDOCS', None) == 'True' @@ -36,7 +35,7 @@ on_rtd = os.environ.get('READTHEDOCS', None) == 'True' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ - 'sphinx.ext.extlinks', + 'sphinx.ext.extlinks', 'sphinx.ext.autosummary', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', @@ -55,7 +54,7 @@ source_suffix = '.rst' master_doc = 'index' # General information about the project. -project = pkgdist.MODULE_NAME +project = 'pkgcore' authors = '' copyright = '2006-2022, pkgcore contributors' @@ -227,25 +226,32 @@ latex_documents = [ # -- Options for manual page output -------------------------------------------- +generate_man(os.path.abspath('..'), '', 'pkgcore') +generate_html(os.path.abspath('..'), os.path.abspath('../src'), 'pkgcore') -bin_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'bin') -scripts = os.listdir(bin_path) +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + +with open('../pyproject.toml', 'rb') as file: + pyproj = tomllib.load(file) -# Note that filter-env is specially specified, since the command is installed -# as 'filter-env', but due to python namespace contraints, it uses a '_' -# instead. generated_man_pages = [ - ('%s.scripts.' % (project,) + s.replace('-', '_'), s) for s in scripts + (entry.split(':')[0], name) for name, entry in pyproj['project']['scripts'].items() +] +authors_list = [ + f'{author["name"]} <{author["email"]}>' for author in pyproj['project']['authors'] ] # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('man/%s' % script, script, import_module(module).__doc__.strip().split('\n', 1)[0], [], 1) + (f'man/{script}', script, import_module(module).__doc__.strip().split('\n', 1)[0], authors_list, 1) for module, script in generated_man_pages ] -man_pages.append(('man/pkgcore', 'pkgcore', 'a framework for package management', [], 5)) +man_pages.append(('man/pkgcore', 'pkgcore', 'a framework for package management', authors_list, 5)) # -- Options for Epub output --------------------------------------------------- diff --git a/py_build.py b/py_build.py new file mode 100644 index 00000000..f844b1c2 --- /dev/null +++ b/py_build.py @@ -0,0 +1,99 @@ +import os +import subprocess +import sys +import textwrap +from contextlib import contextmanager +from functools import partial +from pathlib import Path + +from flit_core import buildapi + + +@contextmanager +def sys_path(): + orig_path = sys.path[:] + sys.path.insert(0, str(Path.cwd() / 'src')) + try: + yield + finally: + sys.path = orig_path + + +def write_pkgcore_lookup_configs(cleanup_files): + """Generate file of install path constants.""" + cleanup_files.append(path := Path.cwd() / "src/pkgcore/_const.py") + path.parent.mkdir(parents=True, exist_ok=True) + print(f"writing lookup config to {path}") + + with open(path, "w") as f: + os.chmod(path, 0o644) + f.write(textwrap.dedent("""\ + from os.path import join, abspath + import sys + + from snakeoil import process + + INSTALL_PREFIX = abspath(sys.prefix) + DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcore') + CONFIG_PATH = join(DATA_PATH, 'config') + LIBDIR_PATH = join(INSTALL_PREFIX, 'lib/pkgcore') + EBD_PATH = join(LIBDIR_PATH, 'ebd') + INJECTED_BIN_PATH = () + """)) + + +def write_verinfo(cleanup_files): + cleanup_files.append(path := Path.cwd() / "src/pkgcore/_verinfo.py") + path.parent.mkdir(parents=True, exist_ok=True) + print(f"generating version info: {path}") + from snakeoil.version import get_git_version + path.write_text(f"version_info={get_git_version(Path.cwd())!r}") + + +def prepare_pkgcore(callback, consts: bool, plugincache: bool): + cleanup_files = [] + try: + with sys_path(): + write_verinfo(cleanup_files) + + # Install module plugincache + if plugincache: + from pkgcore import plugin, plugins + print('Generating plugin cache') + cleanup_files.append(path := Path.cwd() / "src/pkgcore/plugins") + plugin.initialize_cache(plugins, force=True, cache_dir=path) + + # Install configuration data so pkgcore knows where to find its content, + # rather than assuming it is running from a tarball/git repo. + if consts: + write_pkgcore_lookup_configs(cleanup_files) + + # generate function lists so they don't need to be created on install + if subprocess.call(['make', f'PYTHON={sys.executable}'], cwd=Path.cwd() / 'data/lib/pkgcore/ebd'): + raise Exception("Running makefile failed") + + return callback() + finally: + for path in cleanup_files: + try: + path.unlink() + except OSError: + pass + + +def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): + """Builds a wheel, places it in wheel_directory""" + callback = partial(buildapi.build_wheel, wheel_directory, config_settings, metadata_directory) + return prepare_pkgcore(callback, consts=True, plugincache=True) + + +def build_editable(wheel_directory, config_settings=None, metadata_directory=None): + """Builds an "editable" wheel, places it in wheel_directory""" + callback = partial(buildapi.build_editable, wheel_directory, config_settings, metadata_directory) + return prepare_pkgcore(callback, consts=False, plugincache=True) + + +def build_sdist(sdist_directory, config_settings=None): + """Builds an sdist, places it in sdist_directory""" + callback = partial(buildapi.build_sdist, sdist_directory, config_settings) + return prepare_pkgcore(callback, consts=False, plugincache=False) diff --git a/pyproject.toml b/pyproject.toml index 1006a99c..020991b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,85 @@ [build-system] requires = [ - "wheel", "setuptools", - "snakeoil @ https://github.com/pkgcore/snakeoil/archive/master.tar.gz", + "flit_core >=3.8,<4", + "snakeoil", ] -build-backend = "setuptools.build_meta" +build-backend = "py_build" +backend-path = ["."] + +[project] +name = "pkgcore" +description = "package managing framework" +readme = "README.rst" +license = {file = "LICENSE"} +requires-python = "~=3.9" +authors = [ + {name = "Tim Harder", email = "radhermit@gmail.com"}, + {name = "Arthur Zamarin", email = "arthurzam@gentoo.org"}, +] +maintainers = [ + {name = "Arthur Zamarin", email = "arthurzam@gentoo.org"}, +] +classifiers = [ + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dynamic = ["version"] + +dependencies = [ + "snakeoil~=0.10.0", + "lxml", +] + +[project.optional-dependencies] +test = [ + "pytest>=6.0", + "pytest-cov", +] +doc = [ + "sphinx", + "tomli; python_version < '3.11'" +] + +[project.urls] +Homepage = "https://github.com/pkgcore/pkgcore" +Documentation = "https://pkgcore.github.io/pkgcore/" +Source = "https://github.com/pkgcore/pkgcore" + +[project.scripts] +patom = "pkgcore.scripts.__init__:main" +pclean = "pkgcore.scripts.__init__:main" +pclonecache = "pkgcore.scripts.__init__:main" +pconfig = "pkgcore.scripts.__init__:main" +pebuild = "pkgcore.scripts.__init__:main" +pinspect = "pkgcore.scripts.__init__:main" +pmaint = "pkgcore.scripts.__init__:main" +pmerge = "pkgcore.scripts.__init__:main" +pplugincache = "pkgcore.scripts.__init__:main" +pquery = "pkgcore.scripts.__init__:main" + +[tool.flit.external-data] +directory = "data" + +[tool.flit.sdist] +include = [ + "tox.ini", ".coveragerc", "Makefile", "py_build.py", + "NEWS.rst", "doc", "tests", "examples", "bin", + "build/sphinx/man/*.1", "build/sphinx/man/*.5", +] +exclude = [ + ".github/", ".gitignore", + "doc/api/", "doc/generated/", +] + +[project.entry-points.pytest11] +pkgcore = "pkgcore.pytest.plugin" [tool.pytest.ini_options] minversion = "6.0" -addopts = "-ra" +addopts = "-vv -ra -l" testpaths = ["tests"] +markers = [ + "net: tests that require network access (must enable --network option to run)", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 1ee85793..00000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -markers = - net: tests that require network access (must enable --network option to run) diff --git a/requirements/ci.txt b/requirements/ci.txt deleted file mode 100644 index c75c448b..00000000 --- a/requirements/ci.txt +++ /dev/null @@ -1 +0,0 @@ -pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index f5a4d304..00000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,2 +0,0 @@ -lxml -snakeoil @ https://github.com/pkgcore/snakeoil/archive/master.tar.gz diff --git a/requirements/dist.txt b/requirements/dist.txt deleted file mode 100644 index ec031163..00000000 --- a/requirements/dist.txt +++ /dev/null @@ -1,4 +0,0 @@ -# deps for building sdist/wheels for pypi --r install.txt --r docs.txt -wheel diff --git a/requirements/docs.txt b/requirements/docs.txt deleted file mode 100644 index 6966869c..00000000 --- a/requirements/docs.txt +++ /dev/null @@ -1 +0,0 @@ -sphinx diff --git a/requirements/install.txt b/requirements/install.txt deleted file mode 100644 index 6ec8511e..00000000 --- a/requirements/install.txt +++ /dev/null @@ -1,2 +0,0 @@ -lxml -snakeoil~=0.10.0 diff --git a/requirements/pyproject.toml b/requirements/pyproject.toml deleted file mode 100644 index 8d102c81..00000000 --- a/requirements/pyproject.toml +++ /dev/null @@ -1,3 +0,0 @@ -[build-system] -requires = ["wheel", "setuptools", "snakeoil~=0.10.0"] -build-backend = "setuptools.build_meta" diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index e079f8a6..00000000 --- a/requirements/test.txt +++ /dev/null @@ -1 +0,0 @@ -pytest diff --git a/requirements/tox.txt b/requirements/tox.txt deleted file mode 100644 index 4e49649d..00000000 --- a/requirements/tox.txt +++ /dev/null @@ -1,3 +0,0 @@ --r dev.txt --r test.txt --r ci.txt diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e02618b3..00000000 --- a/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[build_sphinx] -source-dir = doc -build-dir = build/sphinx -all_files = 1 diff --git a/setup.py b/setup.py deleted file mode 100755 index 73916099..00000000 --- a/setup.py +++ /dev/null @@ -1,253 +0,0 @@ -#!/usr/bin/env python3 - -import os -import subprocess -import sys - -from setuptools import setup -from setuptools._distutils import log -from setuptools._distutils.errors import DistutilsExecError -from setuptools._distutils.util import byte_compile -from snakeoil.dist import distutils_extensions as pkgdist - -pkgdist_setup, pkgdist_cmds = pkgdist.setup() - -# These offsets control where we install the pkgcore config files and the EBD -# bits relative to the install-data path given to the install subcmd. -DATA_INSTALL_OFFSET = 'share/pkgcore' -CONFIG_INSTALL_OFFSET = os.path.join(DATA_INSTALL_OFFSET, 'config') -LIBDIR_INSTALL_OFFSET = 'lib/pkgcore' -EBD_INSTALL_OFFSET = os.path.join(LIBDIR_INSTALL_OFFSET, 'ebd') - -EBD_SCRIPTS_DIR = os.path.join(pkgdist.REPODIR, 'data/lib/pkgcore/ebd') - -class sdist(pkgdist.sdist): - """sdist wrapper to bundle generated files for release.""" - - def make_release_tree(self, base_dir, files): - """Generate bash function lists for releases.""" - import shutil - - # generate function lists so they don't need to be created on install - write_pkgcore_ebd_funclists(root='/', target='data/lib/pkgcore/ebd/.generated') - write_pkgcore_ebd_cmdlists(root='/', target='data/lib/pkgcore/ebd/.generated') - write_pkgcore_ebd_eapi_libs(root='/', target='data/lib/pkgcore/ebd/.generated') - shutil.copytree( - os.path.join(pkgdist.REPODIR, 'data/lib/pkgcore/ebd/.generated'), - os.path.join(base_dir, 'data/lib/pkgcore/ebd/.generated')) - - pkgdist.sdist.make_release_tree(self, base_dir, files) - - -class install(pkgdist.install): - """Install wrapper to generate and install pkgcore-related files.""" - - def run(self): - super().run() - target = self.install_data - root = self.root or '/' - if target.startswith(root): - target = os.path.join('/', os.path.relpath(target, root)) - target = os.path.abspath(target) - if not self.dry_run: - # Install module plugincache - # TODO: move this to pkgdist once plugin support is moved to snakeoil - with pkgdist.syspath(pkgdist.PACKAGEDIR): - from pkgcore import plugin, plugins - log.info('Generating plugin cache') - path = os.path.join(self.install_purelib, 'pkgcore', 'plugins') - plugin.initialize_cache(plugins, force=True, cache_dir=path) - - # Install configuration data so pkgcore knows where to find its content, - # rather than assuming it is running from a tarball/git repo. - write_pkgcore_lookup_configs(self.install_purelib, target) - - # Generate ebd libs when not running from release tarballs that - # contain pre-generated files. - if not os.path.exists(os.path.join(pkgdist.REPODIR, 'man')): - generated_target = os.path.join(target, EBD_INSTALL_OFFSET, '.generated') - write_pkgcore_ebd_funclists(root=root, target=generated_target) - write_pkgcore_ebd_cmdlists(root=root, target=generated_target) - write_pkgcore_ebd_eapi_libs(root=root, target=generated_target) - - -def write_pkgcore_ebd_funclists(root, target): - "Generate bash function lists from ebd implementation for env filtering.""" - ebd_dir = target - if root != '/': - ebd_dir = os.path.join(root, target.lstrip('/')) - os.makedirs(os.path.join(ebd_dir, 'funcs'), exist_ok=True) - - # generate global function list - path = os.path.join(ebd_dir, 'funcs', 'global') - log.info(f'writing ebd global function list: {path!r}') - with open(path, 'w') as f: - if subprocess.call( - [os.path.join(EBD_SCRIPTS_DIR, 'generate_global_func_list')], - cwd=ebd_dir, stdout=f): - raise DistutilsExecError("generating global function list failed") - - # generate EAPI specific function lists - with pkgdist.syspath(pkgdist.PACKAGEDIR): - from pkgcore.ebuild.eapi import EAPI - for eapi_obj in EAPI.known_eapis.values(): - eapi = str(eapi_obj) - path = os.path.join(ebd_dir, 'funcs', eapi) - log.info(f'writing EAPI {eapi} function list: {path!r}') - with open(path, 'w') as f: - if subprocess.call( - [os.path.join(EBD_SCRIPTS_DIR, 'generate_eapi_func_list'), eapi], - cwd=ebd_dir, stdout=f): - raise DistutilsExecError(f"generating EAPI {eapi} function list failed") - - -def write_pkgcore_ebd_cmdlists(root, target): - "Generate bash function lists from ebd implementation for env filtering.""" - ebd_dir = target - if root != '/': - ebd_dir = os.path.join(root, target.lstrip('/')) - os.makedirs(os.path.join(ebd_dir, 'cmds'), exist_ok=True) - - # generate EAPI specific command lists - script = os.path.join(EBD_SCRIPTS_DIR, 'generate_eapi_cmd_list') - with pkgdist.syspath(pkgdist.PACKAGEDIR): - from pkgcore.ebuild.eapi import EAPI - for eapi_obj in EAPI.known_eapis.values(): - eapi = str(eapi_obj) - os.makedirs(os.path.join(ebd_dir, 'cmds', eapi), exist_ok=True) - - path = os.path.join(ebd_dir, 'cmds', eapi, 'banned') - log.info(f'writing EAPI {eapi} banned command list: {path!r}') - with open(path, 'w') as f: - if subprocess.call( - [script, '-b', eapi], - cwd=ebd_dir, stdout=f): - raise DistutilsExecError(f'generating EAPI {eapi} banned command list failed') - - path = os.path.join(ebd_dir, 'cmds', eapi, 'deprecated') - log.info(f'writing EAPI {eapi} deprecated command list: {path!r}') - with open(path, 'w') as f: - if subprocess.call( - [script, '-d', eapi], - cwd=ebd_dir, stdout=f): - raise DistutilsExecError(f'generating EAPI {eapi} deprecated command list failed') - - path = os.path.join(ebd_dir, 'cmds', eapi, 'internal') - log.info(f'writing EAPI {eapi} internal command list: {path!r}') - with open(path, 'w') as f: - if subprocess.call( - [script, '-i', eapi], - cwd=ebd_dir, stdout=f): - raise DistutilsExecError(f'generating EAPI {eapi} internal command list failed') - - -def write_pkgcore_ebd_eapi_libs(root, target): - "Generate bash EAPI scope libs for sourcing.""" - ebd_dir = target - if root != '/': - ebd_dir = os.path.join(root, target.lstrip('/')) - - script = os.path.join(EBD_SCRIPTS_DIR, 'generate_eapi_lib') - with pkgdist.syspath(pkgdist.PACKAGEDIR): - from pkgcore.ebuild.eapi import EAPI - for eapi_obj in EAPI.known_eapis.values(): - eapi = str(eapi_obj) - os.makedirs(os.path.join(ebd_dir, 'libs', eapi), exist_ok=True) - - # generate global scope lib - path = os.path.join(ebd_dir, 'libs', eapi, 'global') - log.info(f'writing global EAPI {eapi} lib: {path!r}') - with open(path, 'w') as f: - if subprocess.call([script, eapi], cwd=ebd_dir, stdout=f): - raise DistutilsExecError( - f"generating global scope EAPI {eapi} lib failed") - - for phase in eapi_obj.phases.values(): - # generate phase scope lib - path = os.path.join(ebd_dir, 'libs', eapi, phase) - log.info(f'writing EAPI {eapi} {phase} phase lib: {path!r}') - with open(path, 'w') as f: - if subprocess.call([script, '-s', phase, eapi], cwd=ebd_dir, stdout=f): - raise DistutilsExecError( - f"generating {phase} phase scope EAPI {eapi} lib failed") - - -def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()): - """Generate file of install path constants.""" - path = os.path.join(python_base, "pkgcore", "_const.py") - os.makedirs(os.path.dirname(path), exist_ok=True) - log.info("writing lookup config to %r", path) - - wheel_install = ( - install_prefix != os.path.abspath(sys.prefix) - and not install_prefix.startswith(pkgdist.REPODIR) - ) - - with open(path, "w") as f: - os.chmod(path, 0o644) - # write more dynamic _const file for wheel installs - if wheel_install: - import textwrap - f.write(textwrap.dedent(f"""\ - import os.path as osp - import sys - - from snakeoil import process - - INSTALL_PREFIX = osp.abspath(sys.prefix) - DATA_PATH = osp.join(INSTALL_PREFIX, {DATA_INSTALL_OFFSET!r}) - CONFIG_PATH = osp.join(INSTALL_PREFIX, {CONFIG_INSTALL_OFFSET!r}) - LIBDIR_PATH = osp.join(INSTALL_PREFIX, {LIBDIR_INSTALL_OFFSET!r}) - EBD_PATH = osp.join(INSTALL_PREFIX, {EBD_INSTALL_OFFSET!r}) - INJECTED_BIN_PATH = () - CP_BINARY = process.find_binary('cp') - """)) - else: - f.write(f"INSTALL_PREFIX={install_prefix!r}\n") - f.write(f"DATA_PATH={os.path.join(install_prefix, DATA_INSTALL_OFFSET)!r}\n") - f.write(f"CONFIG_PATH={os.path.join(install_prefix, CONFIG_INSTALL_OFFSET)!r}\n") - f.write(f"LIBDIR_PATH={os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET)!r}\n") - f.write(f"EBD_PATH={os.path.join(install_prefix, EBD_INSTALL_OFFSET)!r}\n") - - # This is added to suppress the default behaviour of looking - # within the repo for a bin subdir. - f.write(f"INJECTED_BIN_PATH={tuple(injected_bin_path)!r}\n") - - # Static paths for various utilities. - from snakeoil import process - required_progs = ('cp',) - try: - for prog in required_progs: - prog_path = process.find_binary(prog) - f.write(f"{prog.upper()}_BINARY={prog_path!r}\n") - except process.CommandNotFound: - raise DistutilsExecError( - f"generating lookup config failed: required utility {prog!r} missing from PATH") - - f.close() - byte_compile([path], prefix=python_base) - byte_compile([path], optimize=1, prefix=python_base) - byte_compile([path], optimize=2, prefix=python_base) - - -setup(**dict( - pkgdist_setup, - description='package managing framework', - url='https://github.com/pkgcore/pkgcore', - license='BSD', - author='Tim Harder', - author_email='radhermit@gmail.com', - entry_points={'pytest11': ['pkgcore = pkgcore.pytest.plugin']}, - data_files=list(pkgdist.data_mapping('.', 'data')), - cmdclass=dict( - pkgdist_cmds, - sdist=sdist, - install=install, - ), - classifiers=[ - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - ], -)) diff --git a/src/pkgcore/const.py b/src/pkgcore/const.py index f5dde16a..6f231523 100644 --- a/src/pkgcore/const.py +++ b/src/pkgcore/const.py @@ -5,7 +5,7 @@ Internal constants. import os import sys -from snakeoil import mappings +from snakeoil import mappings, process from . import __title__ @@ -50,7 +50,8 @@ SYSTEM_CACHE_PATH = '/var/cache/pkgcore' REPO_PATH = _GET_CONST('REPO_PATH', _reporoot) DATA_PATH = _GET_CONST('DATA_PATH', '%(REPO_PATH)s/data/share/pkgcore') -LIBDIR_PATH = _GET_CONST('LIBDIR_PATH', '%(REPO_PATH)s') +LIBDIR_PATH = _GET_CONST('LIBDIR_PATH', '%(REPO_PATH)s/data/lib/pkgcore') +EBD_PATH = _GET_CONST('EBD_PATH', '%(LIBDIR_PATH)s/ebd') CONFIG_PATH = _GET_CONST('CONFIG_PATH', '%(DATA_PATH)s/config') PATH_FORCED_PREPEND = _GET_CONST('INJECTED_BIN_PATH', ('%(REPO_PATH)s/bin',)) -CP_BINARY = _GET_CONST('CP_BINARY', '/bin/cp') +CP_BINARY = process.find_binary('cp', fallback='/bin/cp') diff --git a/src/pkgcore/ebuild/const.py b/src/pkgcore/ebuild/const.py index c32fa9ba..53bcdfbc 100644 --- a/src/pkgcore/ebuild/const.py +++ b/src/pkgcore/ebuild/const.py @@ -4,7 +4,7 @@ ebuild internal constants from snakeoil.osutils import pjoin -from .. import const +from ..const import EBD_PATH incrementals = ( "ACCEPT_KEYWORDS", "ACCEPT_LICENSE", "CONFIG_PROTECT", @@ -25,7 +25,6 @@ metadata_keys = ( WORLD_FILE = '/var/lib/portage/world' -EBD_PATH = const._GET_CONST('EBD_PATH', '%(REPO_PATH)s/data/lib/pkgcore/ebd') EBUILD_DAEMON_PATH = pjoin(EBD_PATH, "ebuild-daemon.bash") EBUILD_HELPERS_PATH = pjoin(EBD_PATH, "helpers") diff --git a/src/pkgcore/scripts/__init__.py b/src/pkgcore/scripts/__init__.py index 24cd6dfd..a2c9414d 100755 --- a/src/pkgcore/scripts/__init__.py +++ b/src/pkgcore/scripts/__init__.py @@ -32,9 +32,13 @@ def run(script_name): sys.exit(tool()) -if __name__ == '__main__': +def main(): # We're in a git repo or tarball so add the src dir to the system path. # Note that this assumes a certain module layout. src_dir = os.path.realpath(__file__).rsplit(os.path.sep, 3)[0] sys.path.insert(0, src_dir) - run(os.path.basename(__file__)) + run(os.path.basename(sys.argv[0])) + + +if __name__ == '__main__': + main() |