aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohn Helmert III <ajak@gentoo.org>2022-08-14 19:39:48 -0500
committerSam James <sam@gentoo.org>2022-12-03 01:38:22 +0000
commit88ce0e6fc46ebd3bad583790ceb19f9be218d0ca (patch)
tree59b39ce1ffa6c577a4108dac779dea9f5e41905e
parentbintree: Fix breaking GPKG structure on updates (diff)
downloadportage-88ce0e6fc46ebd3bad583790ceb19f9be218d0ca.tar.gz
portage-88ce0e6fc46ebd3bad583790ceb19f9be218d0ca.tar.bz2
portage-88ce0e6fc46ebd3bad583790ceb19f9be218d0ca.zip
pyupgrade everything
Signed-off-by: John Helmert III <ajak@gentoo.org> Signed-off-by: Sam James <sam@gentoo.org>
-rwxr-xr-xbin/archive-conf6
-rwxr-xr-xbin/binhost-snapshot11
-rwxr-xr-xbin/chpathtool.py10
-rwxr-xr-xbin/clean_locks4
-rwxr-xr-xbin/dispatch-conf4
-rw-r--r--bin/doins.py8
-rwxr-xr-xbin/ebuild49
-rwxr-xr-xbin/ebuild-ipc.py6
-rwxr-xr-xbin/egencache98
-rwxr-xr-xbin/emaint2
-rwxr-xr-xbin/emerge2
-rwxr-xr-xbin/env-update2
-rwxr-xr-xbin/glsa-check16
-rw-r--r--bin/pid-ns-init6
-rwxr-xr-xbin/portageq22
-rwxr-xr-xbin/quickpkg10
-rwxr-xr-xbin/regenworld8
-rw-r--r--doc/api/conf.py6
-rw-r--r--lib/_emerge/AbstractEbuildProcess.py10
-rw-r--r--lib/_emerge/AbstractPollTask.py4
-rw-r--r--lib/_emerge/AsynchronousLock.py4
-rw-r--r--lib/_emerge/AsynchronousTask.py4
-rw-r--r--lib/_emerge/Binpkg.py27
-rw-r--r--lib/_emerge/BinpkgVerifier.py4
-rw-r--r--lib/_emerge/BlockerCache.py4
-rw-r--r--lib/_emerge/BlockerDB.py7
-rw-r--r--lib/_emerge/CompositeTask.py2
-rw-r--r--lib/_emerge/DependencyArg.py2
-rw-r--r--lib/_emerge/EbuildBuild.py22
-rw-r--r--lib/_emerge/EbuildBuildDir.py4
-rw-r--r--lib/_emerge/EbuildExecuter.py4
-rw-r--r--lib/_emerge/EbuildFetcher.py12
-rw-r--r--lib/_emerge/EbuildFetchonly.py2
-rw-r--r--lib/_emerge/EbuildIpcDaemon.py2
-rw-r--r--lib/_emerge/EbuildMerge.py2
-rw-r--r--lib/_emerge/EbuildMetadataPhase.py3
-rw-r--r--lib/_emerge/EbuildPhase.py8
-rw-r--r--lib/_emerge/FakeVartree.py4
-rw-r--r--lib/_emerge/JobStatusDisplay.py10
-rw-r--r--lib/_emerge/MergeListItem.py4
-rw-r--r--lib/_emerge/MetadataRegen.py15
-rw-r--r--lib/_emerge/Package.py18
-rw-r--r--lib/_emerge/PackageMerge.py6
-rw-r--r--lib/_emerge/PackagePhase.py5
-rw-r--r--lib/_emerge/PackageUninstall.py10
-rw-r--r--lib/_emerge/Scheduler.py48
-rw-r--r--lib/_emerge/SpawnProcess.py8
-rw-r--r--lib/_emerge/SubProcess.py6
-rw-r--r--lib/_emerge/Task.py6
-rw-r--r--lib/_emerge/UseFlagDisplay.py2
-rw-r--r--lib/_emerge/actions.py110
-rw-r--r--lib/_emerge/create_world_atom.py4
-rw-r--r--lib/_emerge/depgraph.py187
-rw-r--r--lib/_emerge/emergelog.py8
-rw-r--r--lib/_emerge/getloadavg.py2
-rw-r--r--lib/_emerge/resolver/backtracking.py2
-rw-r--r--lib/_emerge/resolver/circular_dependency.py2
-rw-r--r--lib/_emerge/resolver/output.py48
-rw-r--r--lib/_emerge/resolver/output_helpers.py8
-rw-r--r--lib/_emerge/resolver/package_tracker.py5
-rw-r--r--lib/_emerge/resolver/slot_collision.py28
-rw-r--r--lib/_emerge/search.py9
-rw-r--r--lib/_emerge/show_invalid_depstring_notice.py2
-rw-r--r--lib/_emerge/unmerge.py10
-rw-r--r--lib/portage/__init__.py8
-rw-r--r--lib/portage/_compat_upgrade/binpkg_compression.py2
-rw-r--r--lib/portage/_compat_upgrade/binpkg_multi_instance.py2
-rw-r--r--lib/portage/_compat_upgrade/default_locations.py4
-rw-r--r--lib/portage/_emirrordist/Config.py4
-rw-r--r--lib/portage/_emirrordist/DeletionIterator.py8
-rw-r--r--lib/portage/_emirrordist/DeletionTask.py8
-rw-r--r--lib/portage/_emirrordist/FetchIterator.py19
-rw-r--r--lib/portage/_emirrordist/FetchTask.py58
-rw-r--r--lib/portage/_emirrordist/MirrorDistTask.py4
-rw-r--r--lib/portage/_emirrordist/main.py2
-rw-r--r--lib/portage/_global_updates.py2
-rw-r--r--lib/portage/_sets/ProfilePackageSet.py4
-rw-r--r--lib/portage/_sets/base.py16
-rw-r--r--lib/portage/_sets/dbapi.py36
-rw-r--r--lib/portage/_sets/files.py27
-rw-r--r--lib/portage/_sets/libs.py6
-rw-r--r--lib/portage/_sets/profiles.py10
-rw-r--r--lib/portage/_sets/security.py4
-rw-r--r--lib/portage/_sets/shell.py2
-rw-r--r--lib/portage/cache/anydbm.py4
-rw-r--r--lib/portage/cache/cache_errors.py10
-rw-r--r--lib/portage/cache/ebuild_xattr.py15
-rw-r--r--lib/portage/cache/flat_hash.py19
-rw-r--r--lib/portage/cache/fs_template.py6
-rw-r--r--lib/portage/cache/index/pkg_desc_index.py4
-rw-r--r--lib/portage/cache/metadata.py20
-rw-r--r--lib/portage/cache/sql_template.py6
-rw-r--r--lib/portage/cache/sqlite.py6
-rw-r--r--lib/portage/cache/template.py14
-rw-r--r--lib/portage/cache/volatile.py2
-rw-r--r--lib/portage/checksum.py4
-rw-r--r--lib/portage/cvstree.py11
-rw-r--r--lib/portage/dbapi/IndexedPortdb.py4
-rw-r--r--lib/portage/dbapi/_ContentsCaseSensitivityManager.py8
-rw-r--r--lib/portage/dbapi/_MergeProcess.py10
-rw-r--r--lib/portage/dbapi/_VdbMetadataDelta.py5
-rw-r--r--lib/portage/dbapi/__init__.py6
-rw-r--r--lib/portage/dbapi/bintree.py127
-rw-r--r--lib/portage/dbapi/porttree.py55
-rw-r--r--lib/portage/dbapi/vartree.py179
-rw-r--r--lib/portage/debug.py2
-rw-r--r--lib/portage/dep/__init__.py18
-rw-r--r--lib/portage/dep/_dnf.py12
-rw-r--r--lib/portage/dep/_slot_operator.py5
-rw-r--r--lib/portage/dep/dep_check.py24
-rw-r--r--lib/portage/dep/soname/SonameAtom.py6
-rw-r--r--lib/portage/dep/soname/multilib_category.py2
-rw-r--r--lib/portage/dispatch_conf.py6
-rw-r--r--lib/portage/elog/__init__.py2
-rw-r--r--lib/portage/elog/messages.py3
-rw-r--r--lib/portage/elog/mod_mail_summary.py2
-rw-r--r--lib/portage/elog/mod_save.py4
-rw-r--r--lib/portage/elog/mod_save_summary.py4
-rw-r--r--lib/portage/elog/mod_syslog.py2
-rw-r--r--lib/portage/emaint/main.py12
-rw-r--r--lib/portage/emaint/modules/merges/merges.py8
-rw-r--r--lib/portage/emaint/modules/move/move.py6
-rw-r--r--lib/portage/emaint/modules/sync/sync.py2
-rw-r--r--lib/portage/env/config.py16
-rw-r--r--lib/portage/env/loaders.py7
-rw-r--r--lib/portage/exception.py2
-rw-r--r--lib/portage/getbinpkg.py4
-rw-r--r--lib/portage/glsa.py6
-rw-r--r--lib/portage/gpkg.py2
-rw-r--r--lib/portage/locks.py18
-rw-r--r--lib/portage/mail.py2
-rw-r--r--lib/portage/manifest.py19
-rw-r--r--lib/portage/module.py16
-rw-r--r--lib/portage/news.py3
-rw-r--r--lib/portage/output.py15
-rw-r--r--lib/portage/package/ebuild/_config/LocationsManager.py9
-rw-r--r--lib/portage/package/ebuild/_ipc/QueryCommand.py8
-rw-r--r--lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py3
-rw-r--r--lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py2
-rw-r--r--lib/portage/package/ebuild/config.py24
-rw-r--r--lib/portage/package/ebuild/deprecated_profile_check.py3
-rw-r--r--lib/portage/package/ebuild/digestgen.py2
-rw-r--r--lib/portage/package/ebuild/doebuild.py77
-rw-r--r--lib/portage/package/ebuild/fetch.py33
-rw-r--r--lib/portage/package/ebuild/getmaskingstatus.py6
-rw-r--r--lib/portage/package/ebuild/prepare_build_dirs.py17
-rw-r--r--lib/portage/process.py8
-rw-r--r--lib/portage/proxy/lazyimport.py2
-rw-r--r--lib/portage/repository/config.py25
-rw-r--r--lib/portage/sync/controller.py6
-rw-r--r--lib/portage/sync/modules/git/git.py42
-rw-r--r--lib/portage/sync/modules/mercurial/mercurial.py30
-rw-r--r--lib/portage/sync/modules/rsync/rsync.py18
-rw-r--r--lib/portage/sync/modules/svn/svn.py4
-rw-r--r--lib/portage/sync/modules/webrsync/webrsync.py8
-rw-r--r--lib/portage/sync/old_tree_timestamp.py4
-rw-r--r--lib/portage/sync/syncbase.py12
-rw-r--r--lib/portage/tests/__init__.py10
-rw-r--r--lib/portage/tests/bin/test_eapi7_ver_funcs.py22
-rw-r--r--lib/portage/tests/dbapi/test_fakedbapi.py4
-rw-r--r--lib/portage/tests/dbapi/test_portdb_cache.py2
-rw-r--r--lib/portage/tests/dep/testAtom.py22
-rw-r--r--lib/portage/tests/dep/testStandalone.py4
-rw-r--r--lib/portage/tests/dep/test_dep_getusedeps.py2
-rw-r--r--lib/portage/tests/dep/test_get_operator.py2
-rw-r--r--lib/portage/tests/dep/test_get_required_use_flags.py2
-rw-r--r--lib/portage/tests/dep/test_isvalidatom.py2
-rw-r--r--lib/portage/tests/dep/test_match_from_list.py2
-rw-r--r--lib/portage/tests/dep/test_use_reduce.py2
-rw-r--r--lib/portage/tests/ebuild/test_array_fromfile_eof.py2
-rw-r--r--lib/portage/tests/ebuild/test_config.py6
-rw-r--r--lib/portage/tests/ebuild/test_fetch.py24
-rw-r--r--lib/portage/tests/ebuild/test_spawn.py5
-rw-r--r--lib/portage/tests/ebuild/test_use_expand_incremental.py4
-rw-r--r--lib/portage/tests/emerge/test_config_protect.py4
-rw-r--r--lib/portage/tests/emerge/test_emerge_blocker_file_collision.py4
-rw-r--r--lib/portage/tests/emerge/test_emerge_slot_abi.py4
-rw-r--r--lib/portage/tests/emerge/test_simple.py6
-rw-r--r--lib/portage/tests/env/config/test_PackageKeywordsFile.py2
-rw-r--r--lib/portage/tests/env/config/test_PackageUseFile.py2
-rw-r--r--lib/portage/tests/env/config/test_PortageModulesFile.py2
-rw-r--r--lib/portage/tests/glsa/test_security_set.py2
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_path.py1
-rw-r--r--lib/portage/tests/lint/test_compile_modules.py2
-rw-r--r--lib/portage/tests/lint/test_import_modules.py2
-rw-r--r--lib/portage/tests/process/test_PipeLogger.py2
-rw-r--r--lib/portage/tests/process/test_PopenProcess.py4
-rw-r--r--lib/portage/tests/process/test_PopenProcessBlockingIO.py2
-rw-r--r--lib/portage/tests/process/test_poll.py8
-rw-r--r--lib/portage/tests/resolver/ResolverPlayground.py28
-rw-r--r--lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py1
-rw-r--r--lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_autounmask.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_downgrade.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_or_choices.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_reinstall.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_skip_update.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_slot_conflict_update.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_soname_provided.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_unsatisfiable.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_unsatisfied.py1
-rw-r--r--lib/portage/tests/resolver/test_autounmask.py22
-rw-r--r--lib/portage/tests/resolver/test_autounmask_binpkg_use.py1
-rw-r--r--lib/portage/tests/resolver/test_bdeps.py1
-rw-r--r--lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py1
-rw-r--r--lib/portage/tests/resolver/test_changed_deps.py1
-rw-r--r--lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py1
-rw-r--r--lib/portage/tests/resolver/test_disjunctive_depend_order.py1
-rw-r--r--lib/portage/tests/resolver/test_multirepo.py1
-rw-r--r--lib/portage/tests/resolver/test_package_tracker.py2
-rw-r--r--lib/portage/tests/resolver/test_profile_default_eapi.py2
-rw-r--r--lib/portage/tests/resolver/test_profile_package_set.py2
-rw-r--r--lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py1
-rw-r--r--lib/portage/tests/resolver/test_simple.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_abi.py3
-rw-r--r--lib/portage/tests/resolver/test_slot_abi_downgrade.py3
-rw-r--r--lib/portage/tests/resolver/test_slot_change_without_revbump.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_autounmask.py3
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_rebuild.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_unsolved.py3
-rw-r--r--lib/portage/tests/resolver/test_useflags.py1
-rw-r--r--lib/portage/tests/sets/base/testInternalPackageSet.py4
-rw-r--r--lib/portage/tests/sync/test_sync_local.py8
-rw-r--r--lib/portage/tests/unicode/test_string_format.py12
-rw-r--r--lib/portage/tests/update/test_move_ent.py1
-rw-r--r--lib/portage/tests/update/test_move_slot_ent.py1
-rw-r--r--lib/portage/tests/update/test_update_dbentry.py1
-rw-r--r--lib/portage/tests/util/futures/asyncio/test_pipe_closed.py4
-rw-r--r--lib/portage/tests/util/futures/test_retry.py2
-rw-r--r--lib/portage/tests/util/test_digraph.py60
-rw-r--r--lib/portage/tests/util/test_getconfig.py4
-rw-r--r--lib/portage/tests/util/test_socks5.py2
-rw-r--r--lib/portage/tests/util/test_xattr.py2
-rw-r--r--lib/portage/tests/versions/test_vercmp.py8
-rw-r--r--lib/portage/update.py17
-rw-r--r--lib/portage/util/ExtractKernelVersion.py9
-rw-r--r--lib/portage/util/__init__.py34
-rw-r--r--lib/portage/util/_async/AsyncScheduler.py4
-rw-r--r--lib/portage/util/_async/BuildLogger.py2
-rw-r--r--lib/portage/util/_async/FileCopier.py2
-rw-r--r--lib/portage/util/_async/ForkProcess.py8
-rw-r--r--lib/portage/util/_async/SchedulerInterface.py2
-rw-r--r--lib/portage/util/_dyn_libs/LinkageMapELF.py26
-rw-r--r--lib/portage/util/_dyn_libs/PreservedLibsRegistry.py6
-rw-r--r--lib/portage/util/_dyn_libs/display_preserved_libs.py9
-rw-r--r--lib/portage/util/_dyn_libs/soname_deps_qa.py8
-rw-r--r--lib/portage/util/_info_files.py6
-rw-r--r--lib/portage/util/_pty.py2
-rw-r--r--lib/portage/util/_xattr.py6
-rw-r--r--lib/portage/util/backoff.py2
-rw-r--r--lib/portage/util/bin_entry_point.py2
-rw-r--r--lib/portage/util/compression_probe.py2
-rw-r--r--lib/portage/util/configparser.py9
-rw-r--r--lib/portage/util/digraph.py4
-rw-r--r--lib/portage/util/env_update.py57
-rw-r--r--lib/portage/util/futures/_asyncio/streams.py2
-rw-r--r--lib/portage/util/futures/extendedfutures.py10
-rw-r--r--lib/portage/util/futures/iter_completed.py3
-rw-r--r--lib/portage/util/futures/unix_events.py4
-rw-r--r--lib/portage/util/hooks.py2
-rw-r--r--lib/portage/util/listdir.py6
-rw-r--r--lib/portage/util/locale.py5
-rw-r--r--lib/portage/util/movefile.py24
-rw-r--r--lib/portage/util/mtimedb.py4
-rw-r--r--lib/portage/util/netlink.py2
-rw-r--r--lib/portage/util/socks5.py2
-rw-r--r--lib/portage/util/whirlpool.py2
-rw-r--r--lib/portage/util/writeable_check.py6
-rw-r--r--lib/portage/versions.py3
-rw-r--r--lib/portage/xml/metadata.py12
-rw-r--r--lib/portage/xpak.py4
-rwxr-xr-xruntests8
-rwxr-xr-xsetup.py6
274 files changed, 1462 insertions, 1503 deletions
diff --git a/bin/archive-conf b/bin/archive-conf
index 3f7d186fe..b068212b4 100755
--- a/bin/archive-conf
+++ b/bin/archive-conf
@@ -59,10 +59,10 @@ def archive_conf():
for filename in content_files:
filename = filename.rstrip()
try:
- contents = open(filename, "r")
- except IOError as e:
+ contents = open(filename)
+ except OSError as e:
print(
- "archive-conf: Unable to open %s: %s" % (filename, e),
+ "archive-conf: Unable to open {}: {}".format(filename, e),
file=sys.stderr,
)
sys.exit(1)
diff --git a/bin/binhost-snapshot b/bin/binhost-snapshot
index 2076f2e53..0f28920be 100755
--- a/bin/binhost-snapshot
+++ b/bin/binhost-snapshot
@@ -79,17 +79,17 @@ def main(argv):
src_pkgs_index = os.path.join(src_pkg_dir, "Packages")
if not os.path.isdir(src_pkg_dir):
- parser.error("src_pkg_dir is not a directory: '%s'" % (src_pkg_dir,))
+ parser.error("src_pkg_dir is not a directory: '{}'".format(src_pkg_dir))
if not os.path.isfile(src_pkgs_index):
parser.error(
"src_pkg_dir does not contain a "
- + "'Packages' index: '%s'" % (src_pkg_dir,)
+ + "'Packages' index: '{}'".format(src_pkg_dir)
)
parse_result = urlparse(snapshot_uri)
if not (parse_result.scheme and parse_result.netloc and parse_result.path):
- parser.error("snapshot_uri is not a valid URI: '%s'" % (snapshot_uri,))
+ parser.error("snapshot_uri is not a valid URI: '{}'".format(snapshot_uri))
if os.path.isdir(snapshot_dir):
parser.error("snapshot_dir already exists: '%s'" % snapshot_dir)
@@ -117,7 +117,7 @@ def main(argv):
else:
cp_opts += "l"
- cp_cmd = "cp -%s %s %s" % (
+ cp_cmd = "cp -{} {} {}".format(
cp_opts,
portage._shell_quote(src_pkg_dir),
portage._shell_quote(snapshot_dir),
@@ -127,11 +127,10 @@ def main(argv):
if not (os.WIFEXITED(ret) and os.WEXITSTATUS(ret) == os.EX_OK):
return 1
- infile = io.open(
+ infile = open(
portage._unicode_encode(
src_pkgs_index, encoding=portage._encodings["fs"], errors="strict"
),
- mode="r",
encoding=portage._encodings["repo.content"],
errors="strict",
)
diff --git a/bin/chpathtool.py b/bin/chpathtool.py
index de47b097f..352fed6d0 100755
--- a/bin/chpathtool.py
+++ b/bin/chpathtool.py
@@ -52,7 +52,7 @@ class IsTextFile:
def _is_text_encoding(self, filename):
try:
- for line in io.open(filename, mode="r", encoding=self._encoding):
+ for line in open(filename, encoding=self._encoding):
pass
except UnicodeDecodeError:
return False
@@ -67,17 +67,17 @@ def chpath_inplace(filename, is_text_file, old, new):
modified = False
orig_stat = os.lstat(filename)
try:
- f = io.open(filename, buffering=0, mode="r+b")
- except IOError:
+ f = open(filename, buffering=0, mode="r+b")
+ except OSError:
try:
orig_mode = stat.S_IMODE(os.lstat(filename).st_mode)
except OSError as e:
- sys.stderr.write("%s: %s\n" % (e, filename))
+ sys.stderr.write("{}: {}\n".format(e, filename))
return
temp_mode = 0o200 | orig_mode
os.chmod(filename, temp_mode)
try:
- f = io.open(filename, buffering=0, mode="r+b")
+ f = open(filename, buffering=0, mode="r+b")
finally:
os.chmod(filename, orig_mode)
diff --git a/bin/clean_locks b/bin/clean_locks
index b80213911..2a5e4ff0f 100755
--- a/bin/clean_locks
+++ b/bin/clean_locks
@@ -23,8 +23,8 @@ if not sys.argv[1:] or "--help" in sys.argv or "-h" in sys.argv:
print("of the locks, even if we can't establish if they are in use.")
print("Please attempt cleaning without force first.")
print()
- print("%s %s/.locks" % (sys.argv[0], portage.settings["DISTDIR"]))
- print("%s --force %s/.locks" % (sys.argv[0], portage.settings["DISTDIR"]))
+ print("{} {}/.locks".format(sys.argv[0], portage.settings["DISTDIR"]))
+ print("{} --force {}/.locks".format(sys.argv[0], portage.settings["DISTDIR"]))
print()
sys.exit(1)
diff --git a/bin/dispatch-conf b/bin/dispatch-conf
index e7634eab6..1ef3c422d 100755
--- a/bin/dispatch-conf
+++ b/bin/dispatch-conf
@@ -433,7 +433,7 @@ class dispatch:
the diff of what changed into the configured log file."""
if "log-file" in self.options:
status, output = diff(curconf, newconf)
- with io.open(
+ with open(
self.options["log-file"], mode="a", encoding=_encodings["stdio"]
) as f:
f.write(output + "\n")
@@ -442,7 +442,7 @@ class dispatch:
try:
os.rename(newconf, curconf)
- except (IOError, os.error) as why:
+ except (OSError, os.error) as why:
writemsg(
"dispatch-conf: Error renaming %s to %s: %s; fatal\n"
% (newconf, curconf, str(why)),
diff --git a/bin/doins.py b/bin/doins.py
index 77f9fe021..fba8d7e55 100644
--- a/bin/doins.py
+++ b/bin/doins.py
@@ -36,7 +36,7 @@ def _warn(helper, msg):
helper: helper executable name.
msg: Message to be output.
"""
- print("!!! %s: %s\n" % (helper, msg), file=sys.stderr)
+ print("!!! {}: {}\n".format(helper, msg), file=sys.stderr)
def _parse_group(group):
@@ -111,7 +111,7 @@ def _parse_install_options(
# Because parsing '--mode' option is partially supported. If unknown
# arg for --mode is passed, namespace.mode is set to None.
if remaining or namespace.mode is None:
- _warn(helper, "Unknown install options: %s, %r" % (options, remaining))
+ _warn(helper, "Unknown install options: {}, {!r}".format(options, remaining))
if is_strict:
sys.exit(1)
_warn(
@@ -253,7 +253,7 @@ class _InsInProcessInstallRunner:
):
return True
- _warn(self._helper, "%s and %s are same file." % (source, dest))
+ _warn(self._helper, "{} and {} are same file.".format(source, dest))
return False
@@ -534,7 +534,7 @@ def _install_dir(opts, install_runner, source):
"""
if not opts.recursive:
if opts.helper == "dodoc":
- _warn(opts.helper, "%s is a directory" % (source,))
+ _warn(opts.helper, "{} is a directory".format(source))
return False
# Neither success nor fail. Return None to indicate skipped.
return None
diff --git a/bin/ebuild b/bin/ebuild
index 5368d030b..f7e509e82 100755
--- a/bin/ebuild
+++ b/bin/ebuild
@@ -96,7 +96,7 @@ opts, pargs = parser.parse_known_args(args=sys.argv[1:])
def err(txt):
- portage.writemsg("ebuild: %s\n" % (txt,), noiselevel=-1)
+ portage.writemsg("ebuild: {}\n".format(txt), noiselevel=-1)
sys.exit(1)
@@ -147,7 +147,7 @@ if ebuild.endswith(".ebuild"):
pf = os.path.basename(ebuild)[:-7]
if pf is None:
- err("%s: does not end with '.ebuild'" % (ebuild,))
+ err("{}: does not end with '.ebuild'".format(ebuild))
if not os.path.isabs(ebuild):
mycwd = os.getcwd()
@@ -180,14 +180,13 @@ if ebuild_portdir != vdb_path:
myrepo = portage.portdb.getRepositoryName(ebuild_portdir)
if not os.path.exists(ebuild):
- err("%s: does not exist" % (ebuild,))
+ err("{}: does not exist".format(ebuild))
ebuild_split = ebuild.split("/")
-cpv = "%s/%s" % (ebuild_split[-3], pf)
+cpv = "{}/{}".format(ebuild_split[-3], pf)
-with io.open(
+with open(
_unicode_encode(ebuild, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -195,7 +194,7 @@ with io.open(
if eapi is None:
eapi = "0"
if not portage.catpkgsplit(cpv, eapi=eapi):
- err("%s: %s: does not follow correct package syntax" % (ebuild, cpv))
+ err("{}: {}: does not follow correct package syntax".format(ebuild, cpv))
if ebuild.startswith(vdb_path):
mytree = "vartree"
@@ -204,7 +203,7 @@ if ebuild.startswith(vdb_path):
portage_ebuild = portage.db[portage.root][mytree].dbapi.findname(cpv, myrepo=myrepo)
if os.path.realpath(portage_ebuild) != ebuild:
- err("Portage seems to think that %s is at %s" % (cpv, portage_ebuild))
+ err("Portage seems to think that {} is at {}".format(cpv, portage_ebuild))
else:
mytree = "porttree"
@@ -213,7 +212,7 @@ else:
portage_ebuild = portage.portdb.findname(cpv, myrepo=myrepo)
if not portage_ebuild or portage_ebuild != ebuild:
- err("%s: does not seem to have a valid PORTDIR structure" % (ebuild,))
+ err("{}: does not seem to have a valid PORTDIR structure".format(ebuild))
if len(pargs) > 1 and "config" in pargs:
other_phases = set(pargs)
@@ -255,21 +254,19 @@ def discard_digests(myebuild, mysettings, mydbapi):
portage.settings.validate() # generate warning messages if necessary
-build_dir_phases = set(
- [
- "setup",
- "unpack",
- "prepare",
- "configure",
- "compile",
- "test",
- "install",
- "package",
- "rpm",
- "merge",
- "qmerge",
- ]
-)
+build_dir_phases = {
+ "setup",
+ "unpack",
+ "prepare",
+ "configure",
+ "compile",
+ "test",
+ "install",
+ "package",
+ "rpm",
+ "merge",
+ "qmerge",
+}
# If the current metadata is invalid then force the ebuild to be
# sourced again even if ${T}/environment already exists.
@@ -424,10 +421,10 @@ for arg in pargs:
portage.writemsg("!!! %s\n" % x, noiselevel=-1)
a = 1
except PortagePackageException as e:
- portage.writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ portage.writemsg("!!! {}\n".format(e), noiselevel=-1)
a = 1
except PermissionDenied as e:
- portage.writemsg("!!! Permission Denied: %s\n" % (e,), noiselevel=-1)
+ portage.writemsg("!!! Permission Denied: {}\n".format(e), noiselevel=-1)
a = 1
if a is None:
print("Could not run the required binary?")
diff --git a/bin/ebuild-ipc.py b/bin/ebuild-ipc.py
index c0b24c542..d0d902aff 100755
--- a/bin/ebuild-ipc.py
+++ b/bin/ebuild-ipc.py
@@ -104,7 +104,7 @@ class FifoWriter(AbstractPollTask):
while buf:
try:
buf = buf[os.write(fd, buf) :]
- except EnvironmentError:
+ except OSError:
self.returncode = RETURNCODE_WRITE_FAILED
self._async_wait()
return
@@ -236,7 +236,7 @@ class EbuildIpc:
if not buf:
portage.util.writemsg_level(
- "ebuild-ipc: %s\n" % (portage.localization._("read failed"),),
+ "ebuild-ipc: {}\n".format(portage.localization._("read failed")),
level=logging.ERROR,
noiselevel=-1,
)
@@ -251,7 +251,7 @@ class EbuildIpc:
# The pickle module can raise practically
# any exception when given corrupt data.
portage.util.writemsg_level(
- "ebuild-ipc: %s\n" % (e,), level=logging.ERROR, noiselevel=-1
+ "ebuild-ipc: {}\n".format(e), level=logging.ERROR, noiselevel=-1
)
else:
diff --git a/bin/egencache b/bin/egencache
index f5e7fb7b6..987abe073 100755
--- a/bin/egencache
+++ b/bin/egencache
@@ -232,7 +232,7 @@ def parse_args(args):
jobs = -1
if jobs < 1:
- parser.error("Invalid: --jobs='%s'" % (options.jobs,))
+ parser.error("Invalid: --jobs='{}'".format(options.jobs))
options.jobs = jobs
@@ -246,7 +246,7 @@ def parse_args(args):
load_average = 0.0
if load_average <= 0.0:
- parser.error("Invalid: --load-average='%s'" % (options.load_average,))
+ parser.error("Invalid: --load-average='{}'".format(options.load_average))
options.load_average = load_average
@@ -255,22 +255,24 @@ def parse_args(args):
options.config_root = options.portage_configroot
if options.config_root is not None and not os.path.isdir(options.config_root):
- parser.error("Not a directory: --config-root='%s'" % (options.config_root,))
+ parser.error("Not a directory: --config-root='{}'".format(options.config_root))
if options.cache_dir is not None:
if not os.path.isdir(options.cache_dir):
- parser.error("Not a directory: --cache-dir='%s'" % (options.cache_dir,))
+ parser.error("Not a directory: --cache-dir='{}'".format(options.cache_dir))
if not os.access(options.cache_dir, os.W_OK):
- parser.error("Write access denied: --cache-dir='%s'" % (options.cache_dir,))
+ parser.error(
+ "Write access denied: --cache-dir='{}'".format(options.cache_dir)
+ )
for atom in args:
try:
atom = portage.dep.Atom(atom)
except portage.exception.InvalidAtom:
- parser.error("Invalid atom: %s" % (atom,))
+ parser.error("Invalid atom: {}".format(atom))
if not isjustname(atom):
- parser.error("Atom is too specific: %s" % (atom,))
+ parser.error("Atom is too specific: {}".format(atom))
if options.update_use_local_desc:
try:
@@ -415,7 +417,7 @@ class GenCache:
except OSError as e:
self.returncode |= 1
writemsg_level(
- "%s writing target: %s\n" % (cpv, e),
+ "{} writing target: {}\n".format(cpv, e),
level=logging.ERROR,
noiselevel=-1,
)
@@ -428,7 +430,7 @@ class GenCache:
except CacheError as ce:
self.returncode |= 1
writemsg_level(
- "%s writing target: %s\n" % (cpv, ce),
+ "{} writing target: {}\n".format(cpv, ce),
level=logging.ERROR,
noiselevel=-1,
)
@@ -453,7 +455,7 @@ class GenCache:
if cp is None:
self.returncode |= 1
writemsg_level(
- "Unable to parse cp for '%s'\n" % (cpv,),
+ "Unable to parse cp for '{}'\n".format(cpv),
level=logging.ERROR,
noiselevel=-1,
)
@@ -463,7 +465,7 @@ class GenCache:
self.returncode |= 1
writemsg_level(
"Error listing cache entries for "
- + "'%s': %s, continuing...\n" % (trg_cache.location, ce),
+ + "'{}': {}, continuing...\n".format(trg_cache.location, ce),
level=logging.ERROR,
noiselevel=-1,
)
@@ -476,7 +478,7 @@ class GenCache:
if cp is None:
self.returncode |= 1
writemsg_level(
- "Unable to parse cp for '%s'\n" % (cpv,),
+ "Unable to parse cp for '{}'\n".format(cpv),
level=logging.ERROR,
noiselevel=-1,
)
@@ -488,7 +490,7 @@ class GenCache:
self.returncode |= 1
writemsg_level(
"Error listing cache entries for "
- + "'%s': %s, continuing...\n" % (trg_cache.location, ce),
+ + "'{}': {}, continuing...\n".format(trg_cache.location, ce),
level=logging.ERROR,
noiselevel=-1,
)
@@ -497,7 +499,7 @@ class GenCache:
self.returncode |= 1
for cp in sorted(cp_missing):
writemsg_level(
- "No ebuilds or cache entries found for '%s'\n" % (cp,),
+ "No ebuilds or cache entries found for '{}'\n".format(cp),
level=logging.ERROR,
noiselevel=-1,
)
@@ -512,7 +514,7 @@ class GenCache:
except CacheError as ce:
self.returncode |= 1
writemsg_level(
- "%s deleting stale cache: %s\n" % (k, ce),
+ "{} deleting stale cache: {}\n".format(k, ce),
level=logging.ERROR,
noiselevel=-1,
)
@@ -523,7 +525,7 @@ class GenCache:
except CacheError as ce:
self.returncode |= 1
writemsg_level(
- "committing target: %s\n" % (ce,),
+ "committing target: {}\n".format(ce),
level=logging.ERROR,
noiselevel=-1,
)
@@ -547,9 +549,7 @@ class GenPkgDescIndex:
new = {}
if display_updates:
try:
- with open(
- self._output_file, "rt", encoding=_encodings["repo.content"]
- ) as f:
+ with open(self._output_file, encoding=_encodings["repo.content"]) as f:
for line in f:
pkg_desc = pkg_desc_index_line_read(line)
old[pkg_desc.cp] = pkg_desc
@@ -666,7 +666,7 @@ class GenUseLocalDesc:
"r+b",
)
else:
- output = io.open(
+ output = open(
_unicode_encode(
desc_path, encoding=_encodings["fs"], errors="strict"
),
@@ -674,10 +674,12 @@ class GenUseLocalDesc:
encoding=_encodings["repo.content"],
errors="backslashreplace",
)
- except IOError as e:
+ except OSError as e:
if not self._preserve_comments or os.path.isfile(desc_path):
writemsg_level(
- "ERROR: failed to open output file %s: %s\n" % (desc_path, e),
+ "ERROR: failed to open output file {}: {}\n".format(
+ desc_path, e
+ ),
level=logging.ERROR,
noiselevel=-1,
)
@@ -689,13 +691,13 @@ class GenUseLocalDesc:
# preserve_comments mode now.
writemsg_level(
"WARNING: --preserve-comments enabled, but "
- + "output file not found: %s\n" % (desc_path,),
+ + "output file not found: {}\n".format(desc_path),
level=logging.WARNING,
noiselevel=-1,
)
self._preserve_comments = False
try:
- output = io.open(
+ output = open(
_unicode_encode(
desc_path, encoding=_encodings["fs"], errors="strict"
),
@@ -703,9 +705,11 @@ class GenUseLocalDesc:
encoding=_encodings["repo.content"],
errors="backslashreplace",
)
- except IOError as e:
+ except OSError as e:
writemsg_level(
- "ERROR: failed to open output file %s: %s\n" % (desc_path, e),
+ "ERROR: failed to open output file {}: {}\n".format(
+ desc_path, e
+ ),
level=logging.ERROR,
noiselevel=-1,
)
@@ -725,7 +729,7 @@ class GenUseLocalDesc:
# Finished probing comments in binary mode, now append
# in text mode.
- output = io.open(
+ output = open(
_unicode_encode(desc_path, encoding=_encodings["fs"], errors="strict"),
mode="a",
encoding=_encodings["repo.content"],
@@ -777,11 +781,11 @@ class GenUseLocalDesc:
),
parser=ElementTree.XMLParser(target=_MetadataTreeBuilder()),
)
- except IOError:
+ except OSError:
pass
- except (ExpatError, EnvironmentError) as e:
+ except (ExpatError, OSError) as e:
writemsg_level(
- "ERROR: failed parsing %s/metadata.xml: %s\n" % (cp, e),
+ "ERROR: failed parsing {}/metadata.xml: {}\n".format(cp, e),
level=logging.ERROR,
noiselevel=-1,
)
@@ -791,7 +795,7 @@ class GenUseLocalDesc:
usedict = parse_metadata_use(metadata)
except portage.exception.ParseError as e:
writemsg_level(
- "ERROR: failed parsing %s/metadata.xml: %s\n" % (cp, e),
+ "ERROR: failed parsing {}/metadata.xml: {}\n".format(cp, e),
level=logging.ERROR,
noiselevel=-1,
)
@@ -826,7 +830,7 @@ class GenUseLocalDesc:
resdesc = next(iter(resdict.items()))[1]
else:
try:
- reskeys = dict((_Atom(k), k) for k in resdict)
+ reskeys = {_Atom(k): k for k in resdict}
except portage.exception.InvalidAtom as e:
writemsg_level(
"ERROR: failed parsing %s/metadata.xml: %s\n"
@@ -840,7 +844,7 @@ class GenUseLocalDesc:
resatoms = sorted(reskeys, key=cmp_sort_key(atomcmp))
resdesc = resdict[reskeys[resatoms[-1]]]
- output.write("%s:%s - %s\n" % (cp, flag, resdesc))
+ output.write("{}:{} - {}\n".format(cp, flag, resdesc))
output.close()
if prev_mtime is not None and prev_md5 == portage.checksum.perform_md5(
@@ -910,13 +914,13 @@ class GenChangeLogs:
return
try:
- output = io.open(
+ output = open(
self._changelog_output,
mode="w",
encoding=_encodings["repo.content"],
errors="backslashreplace",
)
- except IOError as e:
+ except OSError as e:
writemsg_level(
"ERROR: failed to open ChangeLog for %s: %s\n"
% (
@@ -968,7 +972,7 @@ class GenChangeLogs:
"--no-renames",
"--format=%ct %cN <%cE>%n%B",
"--root",
- "--relative=%s" % (cp,),
+ "--relative={}".format(cp),
"-r",
c,
"--",
@@ -1030,7 +1034,7 @@ class GenChangeLogs:
# Reverse the sort order for headers.
for c in reversed(changed):
if c.startswith("+") and c.endswith(".ebuild"):
- output.write("*%s (%s)\n" % (c[1:-7], date))
+ output.write("*{} ({})\n".format(c[1:-7], date))
wroteheader = True
if wroteheader:
output.write("\n")
@@ -1057,7 +1061,9 @@ class GenChangeLogs:
# don't break filenames on hyphens
self._wrapper.break_on_hyphens = False
output.write(
- self._wrapper.fill("%s; %s %s:" % (date, author, ", ".join(changed)))
+ self._wrapper.fill(
+ "{}; {} {}:".format(date, author, ", ".join(changed))
+ )
)
# but feel free to break commit messages there
self._wrapper.break_on_hyphens = True
@@ -1153,7 +1159,7 @@ def egencache_main(args):
repo_path = settings.repositories.treemap.get(options.repo)
if repo_path is None:
- parser.error("Unable to locate repository named '%s'" % (options.repo,))
+ parser.error("Unable to locate repository named '{}'".format(options.repo))
return 1
repo_config = settings.repositories.get_repo_for_location(repo_path)
@@ -1193,7 +1199,9 @@ def egencache_main(args):
if not os.access(settings["PORTAGE_DEPCACHEDIR"], os.W_OK):
writemsg_level(
"ecachegen: error: "
- + "write access denied: %s\n" % (settings["PORTAGE_DEPCACHEDIR"],),
+ + "write access denied: {}\n".format(
+ settings["PORTAGE_DEPCACHEDIR"]
+ ),
level=logging.ERROR,
noiselevel=-1,
)
@@ -1311,8 +1319,12 @@ def egencache_main(args):
)
if not options.external_cache_only:
msg = [
- "WARNING: Repository is not writable: %s" % (repo_config.location,),
- " Using cache directory instead: %s" % (writable_location,),
+ "WARNING: Repository is not writable: {}".format(
+ repo_config.location
+ ),
+ " Using cache directory instead: {}".format(
+ writable_location
+ ),
]
msg = "".join(line + "\n" for line in msg)
writemsg_level(msg, level=logging.WARNING, noiselevel=-1)
@@ -1381,7 +1393,7 @@ def egencache_main(args):
portage.util.write_atomic(
timestamp_path, time.strftime("%s\n" % TIMESTAMP_FORMAT, time.gmtime())
)
- except (EnvironmentError, portage.exception.PortageException):
+ except (OSError, portage.exception.PortageException):
ret.append(os.EX_IOERR)
else:
ret.append(os.EX_OK)
diff --git a/bin/emaint b/bin/emaint
index b9a129ed0..103dc2571 100755
--- a/bin/emaint
+++ b/bin/emaint
@@ -40,7 +40,7 @@ from portage.util._eventloop.global_event_loop import global_event_loop
try:
emaint_main(sys.argv[1:])
-except IOError as e:
+except OSError as e:
if e.errno == errno.EACCES:
print("\nemaint: Need superuser access")
sys.exit(1)
diff --git a/bin/emerge b/bin/emerge
index 459db2c1f..d90a73c34 100755
--- a/bin/emerge
+++ b/bin/emerge
@@ -80,7 +80,7 @@ try:
sys.exit(retval)
except KeyboardInterrupt:
- sys.stderr.write("\n\nExiting on signal %(signal)s\n" % {"signal": signal.SIGINT})
+ sys.stderr.write("\n\nExiting on signal {signal}\n".format(signal=signal.SIGINT))
sys.stderr.flush()
sys.exit(128 + signal.SIGINT)
finally:
diff --git a/bin/env-update b/bin/env-update
index 8e597b03d..6ba80c3ef 100755
--- a/bin/env-update
+++ b/bin/env-update
@@ -39,7 +39,7 @@ portage._internal_caller = True
try:
portage.env_update(makelinks)
-except IOError as e:
+except OSError as e:
if e.errno == errno.EACCES:
print("env-update: Need superuser access")
sys.exit(1)
diff --git a/bin/glsa-check b/bin/glsa-check
index 431590cf8..753c13891 100755
--- a/bin/glsa-check
+++ b/bin/glsa-check
@@ -211,7 +211,7 @@ if "affected" in params:
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- ("invalid GLSA: %s (error message was: %s)\n" % (x, e))
+ "invalid GLSA: {} (error message was: {})\n".format(x, e)
)
continue
if myglsa.isVulnerable():
@@ -222,10 +222,8 @@ if "affected" in params:
for p in params[:]:
if not (p in completelist or os.path.exists(p)):
sys.stderr.write(
- (
- "(removing %s from parameter list as it isn't a valid GLSA specification)\n"
- % p
- )
+ "(removing %s from parameter list as it isn't a valid GLSA specification)\n"
+ % p
)
params.remove(p)
@@ -252,7 +250,7 @@ def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr, encoding="utf-8"):
myglsa = Glsa(myid, portage.settings, vardb, portdb)
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
- fd2.write(("invalid GLSA: %s (error message was: %s)\n" % (myid, e)))
+ fd2.write("invalid GLSA: {} (error message was: {})\n".format(myid, e))
continue
if myglsa.isInjected():
status = "[A]"
@@ -322,7 +320,7 @@ if mode in ["dump", "fix", "inject", "pretend"]:
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- ("invalid GLSA: %s (error message was: %s)\n" % (myid, e))
+ "invalid GLSA: {} (error message was: {})\n".format(myid, e)
)
continue
if mode == "dump":
@@ -416,7 +414,7 @@ if mode == "test":
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- ("invalid GLSA: %s (error message was: %s)\n" % (myid, e))
+ "invalid GLSA: {} (error message was: {})\n".format(myid, e)
)
continue
if myglsa.isVulnerable():
@@ -471,7 +469,7 @@ if mode == "mail":
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- ("invalid GLSA: %s (error message was: %s)\n" % (myid, e))
+ "invalid GLSA: {} (error message was: {})\n".format(myid, e)
)
continue
myfd = BytesIO()
diff --git a/bin/pid-ns-init b/bin/pid-ns-init
index 6bac5961c..4ea234d3a 100644
--- a/bin/pid-ns-init
+++ b/bin/pid-ns-init
@@ -115,7 +115,7 @@ def main(argv):
# since we created a new session with os.setsid() above.
try:
Path("/proc/self/autogroup").write_text(str(nice_value))
- except EnvironmentError as e:
+ except OSError as e:
# The process is likely not allowed to set the autogroup
# value (Linux employs a rate limiting for unprivileged
# changes to the autogroup value) or autogroups are not
@@ -125,7 +125,7 @@ def main(argv):
if sys.stdout.isatty():
try:
fcntl.ioctl(sys.stdout, termios.TIOCSCTTY, 0)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.EPERM:
# This means that stdout refers to the controlling terminal
# of the parent process, and in this case we do not want to
@@ -155,7 +155,7 @@ def main(argv):
while True:
try:
pid, status = os.wait()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.EINTR:
continue
raise
diff --git a/bin/portageq b/bin/portageq
index 70c5699bb..dca249a7b 100755
--- a/bin/portageq
+++ b/bin/portageq
@@ -139,7 +139,7 @@ def has_version(argv):
try:
atom = portage.dep.Atom(argv[1], allow_repo=allow_repo, eapi=eapi)
except portage.exception.InvalidAtom as e:
- warnings.append("QA Notice: %s: %s" % ("has_version", e))
+ warnings.append("QA Notice: {}: {}".format("has_version", e))
atom = eval_atom_use(atom)
if warnings:
@@ -188,7 +188,7 @@ def best_version(argv):
try:
atom = portage.dep.Atom(argv[1], allow_repo=allow_repo, eapi=eapi)
except portage.exception.InvalidAtom as e:
- warnings.append("QA Notice: %s: %s" % ("best_version", e))
+ warnings.append("QA Notice: {}: {}".format("best_version", e))
atom = eval_atom_use(atom)
if warnings:
@@ -217,7 +217,7 @@ def mass_best_version(argv):
try:
for pack in argv[1:]:
mylist = portage.db[argv[0]]["vartree"].dbapi.match(pack)
- print("%s:%s" % (pack, portage.best(mylist)))
+ print("{}:{}".format(pack, portage.best(mylist)))
except KeyError:
return 1
@@ -346,7 +346,7 @@ def owners(argv):
msg.append("%s\n" % cpv)
for f in sorted(owned_files):
f_abs = os.path.join(root, f.lstrip(os.path.sep))
- msg.append("\t%s\n" % (f_abs,))
+ msg.append("\t{}\n".format(f_abs))
orphan_abs_paths.discard(f_abs)
if orphan_basenames:
orphan_basenames.discard(os.path.basename(f_abs))
@@ -361,7 +361,7 @@ def owners(argv):
msg = []
msg.append("None of the installed packages claim these files:\n")
for f in orphans:
- msg.append("\t%s\n" % (f,))
+ msg.append("\t{}\n".format(f))
sys.stderr.write("".join(msg))
sys.stderr.flush()
@@ -576,7 +576,7 @@ def best_visible(argv):
continue
if pkg.visible:
- writemsg_stdout("%s\n" % (pkg.cpv,), noiselevel=-1)
+ writemsg_stdout("{}\n".format(pkg.cpv), noiselevel=-1)
return os.EX_OK
# No package found, write out an empty line.
@@ -717,7 +717,7 @@ def expand_virtual(argv):
results.sort()
for x in results:
if not x.blocker:
- writemsg_stdout("%s\n" % (x,))
+ writemsg_stdout("{}\n".format(x))
return os.EX_OK
@@ -1290,7 +1290,7 @@ def pquery(parser, opts, args):
metadata_xml_path = os.path.join(repo.location, cp, "metadata.xml")
try:
metadata_xml = MetaDataXML(metadata_xml_path, None)
- except (EnvironmentError, SyntaxError):
+ except (OSError, SyntaxError):
match = False
else:
for matcher in xml_matchers:
@@ -1344,12 +1344,12 @@ def pquery(parser, opts, args):
continue
if no_version:
- writemsg_stdout("%s\n" % (cp,), noiselevel=-1)
+ writemsg_stdout("{}\n".format(cp), noiselevel=-1)
else:
matches = list(set(matches))
portdb._cpv_sort_ascending(matches)
for cpv in matches:
- writemsg_stdout("%s\n" % (cpv,), noiselevel=-1)
+ writemsg_stdout("{}\n".format(cpv), noiselevel=-1)
return os.EX_OK
@@ -1510,7 +1510,7 @@ if atom_validate_strict:
def elog(elog_funcname, lines):
cmd = "source '%s/isolated-functions.sh' ; " % os.environ["PORTAGE_BIN_PATH"]
for line in lines:
- cmd += "%s %s ; " % (elog_funcname, portage._shell_quote(line))
+ cmd += "{} {} ; ".format(elog_funcname, portage._shell_quote(line))
subprocess.call([portage.const.BASH_BINARY, "-c", cmd])
else:
diff --git a/bin/quickpkg b/bin/quickpkg
index 44e3a7123..6aeabba93 100755
--- a/bin/quickpkg
+++ b/bin/quickpkg
@@ -67,12 +67,12 @@ def quickpkg_atom(options, infos, arg, eout):
infos["missing"].append(arg)
return 1
except (InvalidAtom, InvalidData):
- eout.eerror("Invalid atom: %s" % (arg,))
+ eout.eerror("Invalid atom: {}".format(arg))
infos["missing"].append(arg)
return 1
if atom[:1] == "=" and arg[:1] != "=":
# dep_expand() allows missing '=' but it's really invalid
- eout.eerror("Invalid atom: %s" % (arg,))
+ eout.eerror("Invalid atom: {}".format(arg))
infos["missing"].append(arg)
return 1
@@ -103,7 +103,7 @@ def quickpkg_atom(options, infos, arg, eout):
except InvalidDependString as e:
eout.eerror(
"Invalid RESTRICT metadata "
- + "for '%s': %s; skipping" % (cpv, str(e))
+ + "for '{}': {}; skipping".format(cpv, str(e))
)
del e
continue
@@ -271,7 +271,7 @@ def quickpkg_set(options, infos, arg, eout):
set_name = arg[1:]
if not set_name in sets:
- eout.eerror("Package set not found: '%s'; skipping" % (arg,))
+ eout.eerror("Package set not found: '{}'; skipping".format(arg))
infos["missing"].append(arg)
return 1
@@ -389,7 +389,7 @@ def quickpkg_main(options, args, eout):
size_str += unit
else:
size_str = str(size)
- eout.einfo("%s: %s" % (cpv, size_str))
+ eout.einfo("{}: {}".format(cpv, size_str))
if infos["config_files_excluded"]:
print()
eout.ewarn("Excluded config files: %d" % infos["config_files_excluded"])
diff --git a/bin/regenworld b/bin/regenworld
index 7927dd237..f07cfffe8 100755
--- a/bin/regenworld
+++ b/bin/regenworld
@@ -109,10 +109,14 @@ for mykey in biglist:
print("* ignoring broken log entry for %s (likely injected)" % mykey)
except ValueError as e:
try:
- print("* %s is an ambiguous package name, candidates are:\n%s" % (mykey, e))
+ print(
+ "* {} is an ambiguous package name, candidates are:\n{}".format(
+ mykey, e
+ )
+ )
except AttributeError:
# FIXME: Find out what causes this (bug #344845).
- print("* %s is an ambiguous package name" % (mykey,))
+ print("* {} is an ambiguous package name".format(mykey))
continue
if mylist:
# print "mylist:",mylist
diff --git a/doc/api/conf.py b/doc/api/conf.py
index ed3aeb214..c732054bd 100644
--- a/doc/api/conf.py
+++ b/doc/api/conf.py
@@ -64,7 +64,7 @@ html_theme = "sphinxdoc"
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
-autodoc_default_options = dict(
- (opt, True)
+autodoc_default_options = {
+ opt: True
for opt in filter(None, os.environ.get("SPHINX_APIDOC_OPTIONS", "").split(","))
-)
+}
diff --git a/lib/_emerge/AbstractEbuildProcess.py b/lib/_emerge/AbstractEbuildProcess.py
index 8712b8ea1..5b8b7a3b8 100644
--- a/lib/_emerge/AbstractEbuildProcess.py
+++ b/lib/_emerge/AbstractEbuildProcess.py
@@ -143,7 +143,7 @@ class AbstractEbuildProcess(SpawnProcess):
try:
with open(release_agent) as f:
release_agent_path = f.readline().rstrip("\n")
- except EnvironmentError:
+ except OSError:
release_agent_path = None
if release_agent_path is None or not os.path.exists(
@@ -159,7 +159,9 @@ class AbstractEbuildProcess(SpawnProcess):
cgroup_path = tempfile.mkdtemp(
dir=cgroup_portage,
- prefix="%s:%s." % (self.settings["CATEGORY"], self.settings["PF"]),
+ prefix="{}:{}.".format(
+ self.settings["CATEGORY"], self.settings["PF"]
+ ),
)
except (subprocess.CalledProcessError, OSError):
pass
@@ -451,7 +453,9 @@ class AbstractEbuildProcess(SpawnProcess):
SpawnProcess._async_wait(self)
elif self._build_dir_unlock is None:
if self.returncode is None:
- raise asyncio.InvalidStateError("Result is not ready for %s" % (self,))
+ raise asyncio.InvalidStateError(
+ "Result is not ready for {}".format(self)
+ )
self._async_unlock_builddir(returncode=self.returncode)
def _async_unlock_builddir(self, returncode=None):
diff --git a/lib/_emerge/AbstractPollTask.py b/lib/_emerge/AbstractPollTask.py
index eaf2c02e3..c665cb516 100644
--- a/lib/_emerge/AbstractPollTask.py
+++ b/lib/_emerge/AbstractPollTask.py
@@ -34,7 +34,7 @@ class AbstractPollTask(AsynchronousTask):
# Python 3.2:
# TypeError: read() didn't return bytes
pass
- except IOError as e:
+ except OSError as e:
# EIO happens with pty on Linux after the
# slave end of the pty has been closed.
if e.errno == errno.EIO:
@@ -93,7 +93,7 @@ class AbstractPollTask(AsynchronousTask):
def _async_wait(self):
self._unregister()
- super(AbstractPollTask, self)._async_wait()
+ super()._async_wait()
def _unregister(self):
self._registered = False
diff --git a/lib/_emerge/AsynchronousLock.py b/lib/_emerge/AsynchronousLock.py
index b55add737..ad8570559 100644
--- a/lib/_emerge/AsynchronousLock.py
+++ b/lib/_emerge/AsynchronousLock.py
@@ -262,7 +262,7 @@ class _LockProcess(AbstractPollTask):
# only safe to ignore if either the cancel() or
# unlock() methods have been previously called.
raise AssertionError(
- "lock process failed with returncode %s" % (proc.returncode,)
+ "lock process failed with returncode {}".format(proc.returncode)
)
if self._unlock_future is not None:
@@ -306,7 +306,7 @@ class _LockProcess(AbstractPollTask):
raise AssertionError("lock not acquired yet")
if self.returncode != os.EX_OK:
raise AssertionError(
- "lock process failed with returncode %s" % (self.returncode,)
+ "lock process failed with returncode {}".format(self.returncode)
)
if self._unlock_future is not None:
raise AssertionError("already unlocked")
diff --git a/lib/_emerge/AsynchronousTask.py b/lib/_emerge/AsynchronousTask.py
index c2c3e740e..2d1af8b7d 100644
--- a/lib/_emerge/AsynchronousTask.py
+++ b/lib/_emerge/AsynchronousTask.py
@@ -88,7 +88,9 @@ class AsynchronousTask(SlotObject):
"""
if self.returncode is None:
if self.scheduler.is_running():
- raise asyncio.InvalidStateError("Result is not ready for %s" % (self,))
+ raise asyncio.InvalidStateError(
+ "Result is not ready for {}".format(self)
+ )
self.scheduler.run_until_complete(self.async_wait())
self._wait_hook()
return self.returncode
diff --git a/lib/_emerge/Binpkg.py b/lib/_emerge/Binpkg.py
index 6c1c0666a..2b162f3f5 100644
--- a/lib/_emerge/Binpkg.py
+++ b/lib/_emerge/Binpkg.py
@@ -87,7 +87,7 @@ class Binpkg(CompositeTask):
)
if dir_path != self.settings["PORTAGE_BUILDDIR"]:
raise AssertionError(
- "'%s' != '%s'" % (dir_path, self.settings["PORTAGE_BUILDDIR"])
+ "'{}' != '{}'".format(dir_path, self.settings["PORTAGE_BUILDDIR"])
)
self._build_dir = EbuildBuildDir(scheduler=self.scheduler, settings=settings)
settings.configdict["pkg"]["EMERGE_FROM"] = "binary"
@@ -96,11 +96,11 @@ class Binpkg(CompositeTask):
if eapi_exports_replace_vars(settings["EAPI"]):
vardb = self.pkg.root_config.trees["vartree"].dbapi
settings["REPLACING_VERSIONS"] = " ".join(
- set(
+ {
portage.versions.cpv_getversion(x)
for x in vardb.match(self.pkg.slot_atom)
+ vardb.match("=" + self.pkg.cpv)
- )
+ }
)
# The prefetcher has already completed or it
@@ -179,13 +179,13 @@ class Binpkg(CompositeTask):
scheduler=self.scheduler,
)
- msg = " --- (%s of %s) Fetching Binary (%s::%s)" % (
+ msg = " --- ({} of {}) Fetching Binary ({}::{})".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
fetcher.pkg_path,
)
- short_msg = "emerge: (%s of %s) %s Fetch" % (
+ short_msg = "emerge: ({} of {}) {} Fetch".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
@@ -278,13 +278,13 @@ class Binpkg(CompositeTask):
self.wait()
return
- msg = " === (%s of %s) Merging Binary (%s::%s)" % (
+ msg = " === ({} of {}) Merging Binary ({}::{})".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
pkg_path,
)
- short_msg = "emerge: (%s of %s) %s Merge Binary" % (
+ short_msg = "emerge: ({} of {}) {} Merge Binary".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
@@ -350,7 +350,7 @@ class Binpkg(CompositeTask):
else:
continue
- f = io.open(
+ f = open(
_unicode_encode(
os.path.join(infloc, k), encoding=_encodings["fs"], errors="strict"
),
@@ -368,7 +368,7 @@ class Binpkg(CompositeTask):
(md5sum,) = self._bintree.dbapi.aux_get(self.pkg.cpv, ["MD5"])
if not md5sum:
md5sum = portage.checksum.perform_md5(pkg_path)
- with io.open(
+ with open(
_unicode_encode(
os.path.join(infloc, "BINPKGMD5"),
encoding=_encodings["fs"],
@@ -461,18 +461,17 @@ class Binpkg(CompositeTask):
)
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self._infloc, "EPREFIX"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
self._build_prefix = f.read().rstrip("\n")
- except IOError:
+ except OSError:
self._build_prefix = ""
if self._build_prefix == self.settings["EPREFIX"]:
@@ -503,7 +502,7 @@ class Binpkg(CompositeTask):
def _chpathtool_exit(self, chpathtool):
if self._final_exit(chpathtool) != os.EX_OK:
self._writemsg_level(
- "!!! Error Adjusting Prefix to %s\n" % (self.settings["EPREFIX"],),
+ "!!! Error Adjusting Prefix to {}\n".format(self.settings["EPREFIX"]),
noiselevel=-1,
level=logging.ERROR,
)
@@ -511,7 +510,7 @@ class Binpkg(CompositeTask):
return
# We want to install in "our" prefix, not the binary one
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self._infloc, "EPREFIX"),
encoding=_encodings["fs"],
diff --git a/lib/_emerge/BinpkgVerifier.py b/lib/_emerge/BinpkgVerifier.py
index 6d8efb9c4..830b0c000 100644
--- a/lib/_emerge/BinpkgVerifier.py
+++ b/lib/_emerge/BinpkgVerifier.py
@@ -105,7 +105,9 @@ class BinpkgVerifier(CompositeTask):
path = path[: -len(".partial")]
eout = EOutput()
eout.ebegin(
- "%s %s ;-)" % (os.path.basename(path), " ".join(sorted(self._digests)))
+ "{} {} ;-)".format(
+ os.path.basename(path), " ".join(sorted(self._digests))
+ )
)
eout.eend(0)
diff --git a/lib/_emerge/BlockerCache.py b/lib/_emerge/BlockerCache.py
index 5aad581f4..f0d1a1563 100644
--- a/lib/_emerge/BlockerCache.py
+++ b/lib/_emerge/BlockerCache.py
@@ -62,7 +62,7 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
pass
else:
writemsg(
- "!!! Error loading '%s': %s\n" % (self._cache_filename, str(e)),
+ "!!! Error loading '{}': {}\n".format(self._cache_filename, str(e)),
noiselevel=-1,
)
del e
@@ -145,7 +145,7 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
portage.util.apply_secpass_permissions(
self._cache_filename, gid=portage.portage_gid, mode=0o644
)
- except (IOError, OSError):
+ except OSError:
pass
self._modified.clear()
diff --git a/lib/_emerge/BlockerDB.py b/lib/_emerge/BlockerDB.py
index b6542157c..340076b99 100644
--- a/lib/_emerge/BlockerDB.py
+++ b/lib/_emerge/BlockerDB.py
@@ -71,7 +71,8 @@ class BlockerDB:
inst_pkg.root, portage.VDB_PATH, inst_pkg.category, inst_pkg.pf
)
portage.writemsg(
- "!!! %s/*DEPEND: %s\n" % (pkg_location, atoms), noiselevel=-1
+ "!!! {}/*DEPEND: {}\n".format(pkg_location, atoms),
+ noiselevel=-1,
)
continue
@@ -127,7 +128,9 @@ class BlockerDB:
def discardBlocker(self, pkg):
"""Discard a package from the list of potential blockers.
This will match any package(s) with identical cpv or cp:slot."""
- for cpv_match in self._fake_vartree.dbapi.match_pkgs(Atom("=%s" % (pkg.cpv,))):
+ for cpv_match in self._fake_vartree.dbapi.match_pkgs(
+ Atom("={}".format(pkg.cpv))
+ ):
if cpv_match.cp == pkg.cp:
self._fake_vartree.cpv_discard(cpv_match)
for slot_match in self._fake_vartree.dbapi.match_pkgs(pkg.slot_atom):
diff --git a/lib/_emerge/CompositeTask.py b/lib/_emerge/CompositeTask.py
index 6284816bc..ca2fab1fb 100644
--- a/lib/_emerge/CompositeTask.py
+++ b/lib/_emerge/CompositeTask.py
@@ -54,7 +54,7 @@ class CompositeTask(AsynchronousTask):
for detecting bugs.
"""
if task is not self._current_task:
- raise AssertionError("Unrecognized task: %s" % (task,))
+ raise AssertionError("Unrecognized task: {}".format(task))
def _default_exit(self, task):
"""
diff --git a/lib/_emerge/DependencyArg.py b/lib/_emerge/DependencyArg.py
index ea83cdb8c..32e0369b8 100644
--- a/lib/_emerge/DependencyArg.py
+++ b/lib/_emerge/DependencyArg.py
@@ -33,4 +33,4 @@ class DependencyArg:
return hash((self.arg, self.root_config.root))
def __str__(self):
- return "%s" % (self.arg,)
+ return "{}".format(self.arg)
diff --git a/lib/_emerge/EbuildBuild.py b/lib/_emerge/EbuildBuild.py
index 6d290e116..0e3dffe66 100644
--- a/lib/_emerge/EbuildBuild.py
+++ b/lib/_emerge/EbuildBuild.py
@@ -219,13 +219,13 @@ class EbuildBuild(CompositeTask):
lock_task.future.result()
# Cleaning needs to happen before fetch, since the build dir
# is used for log handling.
- msg = " === (%s of %s) Cleaning (%s::%s)" % (
+ msg = " === ({} of {}) Cleaning ({}::{})".format(
self.pkg_count.curval,
self.pkg_count.maxval,
self.pkg.cpv,
self._ebuild_path,
)
- short_msg = "emerge: (%s of %s) %s Clean" % (
+ short_msg = "emerge: ({} of {}) {} Clean".format(
self.pkg_count.curval,
self.pkg_count.maxval,
self.pkg.cpv,
@@ -298,7 +298,9 @@ class EbuildBuild(CompositeTask):
already_fetched = already_fetched_task.future.result()
except portage.exception.InvalidDependString as e:
msg_lines = []
- msg = "Fetch failed for '%s' due to invalid SRC_URI: %s" % (self.pkg.cpv, e)
+ msg = "Fetch failed for '{}' due to invalid SRC_URI: {}".format(
+ self.pkg.cpv, e
+ )
msg_lines.append(msg)
fetcher._eerror(msg_lines)
portage.elog.elog_process(self.pkg.cpv, self.settings)
@@ -364,13 +366,13 @@ class EbuildBuild(CompositeTask):
self._buildpkg = True
- msg = " === (%s of %s) Compiling/Packaging (%s::%s)" % (
+ msg = " === ({} of {}) Compiling/Packaging ({}::{})".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
ebuild_path,
)
- short_msg = "emerge: (%s of %s) %s Compile" % (
+ short_msg = "emerge: ({} of {}) {} Compile".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
@@ -378,13 +380,13 @@ class EbuildBuild(CompositeTask):
logger.log(msg, short_msg=short_msg)
else:
- msg = " === (%s of %s) Compiling/Merging (%s::%s)" % (
+ msg = " === ({} of {}) Compiling/Merging ({}::{})".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
ebuild_path,
)
- short_msg = "emerge: (%s of %s) %s Compile" % (
+ short_msg = "emerge: ({} of {}) {} Compile".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
@@ -563,7 +565,7 @@ class EbuildBuild(CompositeTask):
if pkg.build_id is not None:
info["BUILD_ID"] = "%s\n" % pkg.build_id
for k, v in info.items():
- with io.open(
+ with open(
_unicode_encode(
os.path.join(infoloc, k), encoding=_encodings["fs"], errors="strict"
),
@@ -623,13 +625,13 @@ class EbuildBuild(CompositeTask):
world_atom=world_atom,
)
- msg = " === (%s of %s) Merging (%s::%s)" % (
+ msg = " === ({} of {}) Merging ({}::{})".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
ebuild_path,
)
- short_msg = "emerge: (%s of %s) %s Merge" % (
+ short_msg = "emerge: ({} of {}) {} Merge".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
diff --git a/lib/_emerge/EbuildBuildDir.py b/lib/_emerge/EbuildBuildDir.py
index 78f98d2b5..b35ea5e10 100644
--- a/lib/_emerge/EbuildBuildDir.py
+++ b/lib/_emerge/EbuildBuildDir.py
@@ -23,7 +23,9 @@ class EbuildBuildDir(SlotObject):
if async_lock.returncode != os.EX_OK:
# TODO: create a better way to propagate this error to the caller
raise AssertionError(
- "AsynchronousLock failed with returncode %s" % (async_lock.returncode,)
+ "AsynchronousLock failed with returncode {}".format(
+ async_lock.returncode
+ )
)
def clean_log(self):
diff --git a/lib/_emerge/EbuildExecuter.py b/lib/_emerge/EbuildExecuter.py
index 450e53594..758afa8fc 100644
--- a/lib/_emerge/EbuildExecuter.py
+++ b/lib/_emerge/EbuildExecuter.py
@@ -28,10 +28,10 @@ class EbuildExecuter(CompositeTask):
if eapi_exports_replace_vars(settings["EAPI"]):
vardb = pkg.root_config.trees["vartree"].dbapi
settings["REPLACING_VERSIONS"] = " ".join(
- set(
+ {
portage.versions.cpv_getversion(match)
for match in vardb.match(pkg.slot_atom) + vardb.match("=" + pkg.cpv)
- )
+ }
)
setup_phase = EbuildPhase(
diff --git a/lib/_emerge/EbuildFetcher.py b/lib/_emerge/EbuildFetcher.py
index d20c6d50c..5becde176 100644
--- a/lib/_emerge/EbuildFetcher.py
+++ b/lib/_emerge/EbuildFetcher.py
@@ -69,7 +69,9 @@ class EbuildFetcher(CompositeTask):
uri_map = uri_map_task.future.result()
except portage.exception.InvalidDependString as e:
msg_lines = []
- msg = "Fetch failed for '%s' due to invalid SRC_URI: %s" % (self.pkg.cpv, e)
+ msg = "Fetch failed for '{}' due to invalid SRC_URI: {}".format(
+ self.pkg.cpv, e
+ )
msg_lines.append(msg)
self._fetcher_proc._eerror(msg_lines)
self._current_task = None
@@ -362,7 +364,7 @@ class _EbuildFetcherProcess(ForkProcess):
# fetch code will be skipped, so we need to generate equivalent
# output here.
if self.logfile is not None:
- f = io.open(
+ f = open(
_unicode_encode(
self.logfile, encoding=_encodings["fs"], errors="strict"
),
@@ -407,11 +409,11 @@ class _EbuildFetcherProcess(ForkProcess):
"""
if not self.prefetch and not future.cancelled() and proc.exitcode != os.EX_OK:
msg_lines = []
- msg = "Fetch failed for '%s'" % (self.pkg.cpv,)
+ msg = "Fetch failed for '{}'".format(self.pkg.cpv)
if self.logfile is not None:
msg += ", Log file:"
msg_lines.append(msg)
if self.logfile is not None:
- msg_lines.append(" '%s'" % (self.logfile,))
+ msg_lines.append(" '{}'".format(self.logfile))
self._eerror(msg_lines)
- super(_EbuildFetcherProcess, self)._proc_join_done(proc, future)
+ super()._proc_join_done(proc, future)
diff --git a/lib/_emerge/EbuildFetchonly.py b/lib/_emerge/EbuildFetchonly.py
index 9028272ae..380c19e70 100644
--- a/lib/_emerge/EbuildFetchonly.py
+++ b/lib/_emerge/EbuildFetchonly.py
@@ -37,7 +37,7 @@ class EbuildFetchonly(SlotObject):
# and the unsuccessful return value is used to trigger
# a call to the pkg_nofetch phase.
if rval != os.EX_OK and not self.pretend:
- msg = "Fetch failed for '%s'" % (pkg.cpv,)
+ msg = "Fetch failed for '{}'".format(pkg.cpv)
eerror(msg, phase="unpack", key=pkg.cpv)
return rval
diff --git a/lib/_emerge/EbuildIpcDaemon.py b/lib/_emerge/EbuildIpcDaemon.py
index 78594ff0a..8e5e747d9 100644
--- a/lib/_emerge/EbuildIpcDaemon.py
+++ b/lib/_emerge/EbuildIpcDaemon.py
@@ -111,7 +111,7 @@ class EbuildIpcDaemon(FifoIpcDaemon):
# This probably means that the client has been killed,
# which causes open to fail with ENXIO.
writemsg_level(
- "!!! EbuildIpcDaemon %s: %s\n" % (_("failed to send reply"), e),
+ "!!! EbuildIpcDaemon {}: {}\n".format(_("failed to send reply"), e),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/_emerge/EbuildMerge.py b/lib/_emerge/EbuildMerge.py
index cf43a3098..fefe737d5 100644
--- a/lib/_emerge/EbuildMerge.py
+++ b/lib/_emerge/EbuildMerge.py
@@ -70,7 +70,7 @@ class EbuildMerge(CompositeTask):
pkg_path = self.pkg_path
logger = self.logger
if "noclean" not in self.settings.features:
- short_msg = "emerge: (%s of %s) %s Clean Post" % (
+ short_msg = "emerge: ({} of {}) {} Clean Post".format(
pkg_count.curval,
pkg_count.maxval,
pkg.cpv,
diff --git a/lib/_emerge/EbuildMetadataPhase.py b/lib/_emerge/EbuildMetadataPhase.py
index 4b6add973..c52642bc5 100644
--- a/lib/_emerge/EbuildMetadataPhase.py
+++ b/lib/_emerge/EbuildMetadataPhase.py
@@ -48,9 +48,8 @@ class EbuildMetadataPhase(SubProcess):
def _start(self):
ebuild_path = self.ebuild_hash.location
- with io.open(
+ with open(
_unicode_encode(ebuild_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
diff --git a/lib/_emerge/EbuildPhase.py b/lib/_emerge/EbuildPhase.py
index a1eaf86c1..3be322f3e 100644
--- a/lib/_emerge/EbuildPhase.py
+++ b/lib/_emerge/EbuildPhase.py
@@ -136,7 +136,7 @@ class EbuildPhase(CompositeTask):
msg = []
msg.append(
- "Package: %s:%s" % (self.settings.mycpv, self.settings["SLOT"])
+ "Package: {}:{}".format(self.settings.mycpv, self.settings["SLOT"])
)
if self.settings.get("PORTAGE_REPO_NAME"):
msg.append("Repository: %s" % self.settings["PORTAGE_REPO_NAME"])
@@ -560,9 +560,7 @@ class _PostPhaseCommands(CompositeTask):
tasks = TaskSequence()
for kwargs, commands in cmds:
# Select args intended for MiscFunctionsProcess.
- kwargs = dict(
- (k, v) for k, v in kwargs.items() if k in ("ld_preload_sandbox",)
- )
+ kwargs = {k: v for k, v in kwargs.items() if k in ("ld_preload_sandbox",)}
tasks.add(
MiscFunctionsProcess(
background=self.background,
@@ -627,7 +625,7 @@ class _PostPhaseCommands(CompositeTask):
qa_msg = ["QA Notice: Unresolved soname dependencies:"]
qa_msg.append("")
qa_msg.extend(
- "\t%s: %s" % (filename, " ".join(sorted(soname_deps)))
+ "\t{}: {}".format(filename, " ".join(sorted(soname_deps)))
for filename, soname_deps in unresolved
)
qa_msg.append("")
diff --git a/lib/_emerge/FakeVartree.py b/lib/_emerge/FakeVartree.py
index a93d0cf65..e6c8a4215 100644
--- a/lib/_emerge/FakeVartree.py
+++ b/lib/_emerge/FakeVartree.py
@@ -193,7 +193,7 @@ class FakeVartree(vartree):
def dynamic_deps_preload(self, pkg, metadata):
if metadata is not None:
- metadata = dict((k, metadata.get(k, "")) for k in self._portdb_keys)
+ metadata = {k: metadata.get(k, "") for k in self._portdb_keys}
self._apply_dynamic_deps(pkg, metadata)
self._aux_get_history.add(pkg.cpv)
@@ -332,7 +332,7 @@ def perform_global_updates(mycpv, aux_dict, mydb, myupdates):
pkg = _pkg_str(mycpv, metadata=aux_dict, settings=mydb.settings)
except InvalidData:
return
- aux_dict = dict((k, aux_dict[k]) for k in Package._dep_keys)
+ aux_dict = {k: aux_dict[k] for k in Package._dep_keys}
try:
mycommands = myupdates[pkg.repo]
except KeyError:
diff --git a/lib/_emerge/JobStatusDisplay.py b/lib/_emerge/JobStatusDisplay.py
index e3cb2ff0f..02600c052 100644
--- a/lib/_emerge/JobStatusDisplay.py
+++ b/lib/_emerge/JobStatusDisplay.py
@@ -228,10 +228,10 @@ class JobStatusDisplay:
def _display_status(self):
# Don't use len(self._completed_tasks) here since that also
# can include uninstall tasks.
- curval_str = "%s" % (self.curval,)
- maxval_str = "%s" % (self.maxval,)
- running_str = "%s" % (self.running,)
- failed_str = "%s" % (self.failed,)
+ curval_str = "{}".format(self.curval)
+ maxval_str = "{}".format(self.maxval)
+ running_str = "{}".format(self.running)
+ failed_str = "{}".format(self.failed)
load_avg_str = self._load_avg_str()
color_output = io.StringIO()
@@ -292,5 +292,5 @@ class JobStatusDisplay:
title_str = " ".join(plain_output.split())
hostname = os.environ.get("HOSTNAME")
if hostname is not None:
- title_str = "%s: %s" % (hostname, title_str)
+ title_str = "{}: {}".format(hostname, title_str)
xtermTitle(title_str)
diff --git a/lib/_emerge/MergeListItem.py b/lib/_emerge/MergeListItem.py
index 87767c153..b80d1cb1d 100644
--- a/lib/_emerge/MergeListItem.py
+++ b/lib/_emerge/MergeListItem.py
@@ -68,7 +68,7 @@ class MergeListItem(CompositeTask):
if build_opts.fetchonly:
action_desc = "Fetching"
- msg = "%s (%s of %s) %s" % (
+ msg = "{} ({} of {}) {}".format(
action_desc,
colorize("MERGE_LIST_PROGRESS", str(pkg_count.curval)),
colorize("MERGE_LIST_PROGRESS", str(pkg_count.maxval)),
@@ -76,7 +76,7 @@ class MergeListItem(CompositeTask):
)
if pkg.root_config.settings["ROOT"] != "/":
- msg += " %s %s" % (preposition, pkg.root)
+ msg += " {} {}".format(preposition, pkg.root)
if not build_opts.pretend:
self.statusMessage(msg)
diff --git a/lib/_emerge/MetadataRegen.py b/lib/_emerge/MetadataRegen.py
index 484c5f43f..b272009b9 100644
--- a/lib/_emerge/MetadataRegen.py
+++ b/lib/_emerge/MetadataRegen.py
@@ -37,8 +37,7 @@ class MetadataRegen(AsyncScheduler):
# and in order to reduce latency in case of a signal interrupt.
cp_all = self._portdb.cp_all
for category in sorted(self._portdb.categories):
- for cp in cp_all(categories=(category,)):
- yield cp
+ yield from cp_all(categories=(category,))
def _iter_metadata_processes(self):
portdb = self._portdb
@@ -85,7 +84,7 @@ class MetadataRegen(AsyncScheduler):
)
def _cleanup(self):
- super(MetadataRegen, self)._cleanup()
+ super()._cleanup()
portdb = self._portdb
dead_nodes = {}
@@ -101,7 +100,7 @@ class MetadataRegen(AsyncScheduler):
except CacheError as e:
portage.writemsg(
"Error listing cache entries for "
- + "'%s': %s, continuing...\n" % (mytree, e),
+ + "'{}': {}, continuing...\n".format(mytree, e),
noiselevel=-1,
)
del e
@@ -112,13 +111,13 @@ class MetadataRegen(AsyncScheduler):
cpv_getkey = portage.cpv_getkey
for mytree in portdb.porttrees:
try:
- dead_nodes[mytree] = set(
+ dead_nodes[mytree] = {
cpv for cpv in portdb.auxdb[mytree] if cpv_getkey(cpv) in cp_set
- )
+ }
except CacheError as e:
portage.writemsg(
"Error listing cache entries for "
- + "'%s': %s, continuing...\n" % (mytree, e),
+ + "'{}': {}, continuing...\n".format(mytree, e),
noiselevel=-1,
)
del e
@@ -147,7 +146,7 @@ class MetadataRegen(AsyncScheduler):
self._valid_pkgs.discard(metadata_process.cpv)
if not self._terminated_tasks:
portage.writemsg(
- "Error processing %s, continuing...\n" % (metadata_process.cpv,),
+ "Error processing {}, continuing...\n".format(metadata_process.cpv),
noiselevel=-1,
)
diff --git a/lib/_emerge/Package.py b/lib/_emerge/Package.py
index afb69024e..8e741592d 100644
--- a/lib/_emerge/Package.py
+++ b/lib/_emerge/Package.py
@@ -131,7 +131,7 @@ class Package(Task):
self.version = self.cpv.version
self.slot = self.cpv.slot
self.sub_slot = self.cpv.sub_slot
- self.slot_atom = Atom("%s%s%s" % (self.cp, _slot_separator, self.slot))
+ self.slot_atom = Atom("{}{}{}".format(self.cp, _slot_separator, self.slot))
# sync metadata with validated repo (may be UNKNOWN_REPO)
self._metadata["repository"] = self.cpv.repo
@@ -355,7 +355,7 @@ class Package(Task):
self._metadata_exception(k, e)
self._validated_atoms = tuple(
- set(atom for atom in validated_atoms if isinstance(atom, Atom))
+ {atom for atom in validated_atoms if isinstance(atom, Atom)}
)
for k in self._use_conditional_misc_keys:
@@ -381,7 +381,7 @@ class Package(Task):
try:
check_required_use(v, (), self.iuse.is_valid_flag, eapi=eapi)
except InvalidDependString as e:
- self._invalid_metadata(k + ".syntax", "%s: %s" % (k, e))
+ self._invalid_metadata(k + ".syntax", "{}: {}".format(k, e))
k = "SRC_URI"
v = self._metadata.get(k)
@@ -403,13 +403,13 @@ class Package(Task):
try:
self._provides = frozenset(parse_soname_deps(self._metadata[k]))
except InvalidData as e:
- self._invalid_metadata(k + ".syntax", "%s: %s" % (k, e))
+ self._invalid_metadata(k + ".syntax", "{}: {}".format(k, e))
k = "REQUIRES"
try:
self._requires = frozenset(parse_soname_deps(self._metadata[k]))
except InvalidData as e:
- self._invalid_metadata(k + ".syntax", "%s: %s" % (k, e))
+ self._invalid_metadata(k + ".syntax", "{}: {}".format(k, e))
def copy(self):
return Package(
@@ -546,17 +546,17 @@ class Package(Task):
if getattr(error, "category", None) is None:
continue
categorized_error = True
- self._invalid_metadata(error.category, "%s: %s" % (k, error))
+ self._invalid_metadata(error.category, "{}: {}".format(k, error))
if not categorized_error:
- self._invalid_metadata(qacat, "%s: %s" % (k, e))
+ self._invalid_metadata(qacat, "{}: {}".format(k, e))
else:
# For installed packages, show the path of the file
# containing the invalid metadata, since the user may
# want to fix the deps by hand.
vardb = self.root_config.trees["vartree"].dbapi
path = vardb.getpath(self.cpv, filename=k)
- self._invalid_metadata(qacat, "%s: %s in '%s'" % (k, e, path))
+ self._invalid_metadata(qacat, "{}: {} in '{}'".format(k, e, path))
def _invalid_metadata(self, msg_type, msg):
if self._invalid is None:
@@ -582,7 +582,7 @@ class Package(Task):
if isinstance(self.cpv.build_id, int) and self.cpv.build_id > 0:
build_id_str = "-%s" % self.cpv.build_id
- s = "(%s, %s" % (
+ s = "({}, {}".format(
portage.output.colorize(
cpv_color,
self.cpv
diff --git a/lib/_emerge/PackageMerge.py b/lib/_emerge/PackageMerge.py
index 80a20690d..0fd02be7b 100644
--- a/lib/_emerge/PackageMerge.py
+++ b/lib/_emerge/PackageMerge.py
@@ -25,19 +25,19 @@ class PackageMerge(CompositeTask):
else:
action_desc = "Installing"
preposition = "to"
- counter_str = "(%s of %s) " % (
+ counter_str = "({} of {}) ".format(
colorize("MERGE_LIST_PROGRESS", str(pkg_count.curval)),
colorize("MERGE_LIST_PROGRESS", str(pkg_count.maxval)),
)
- msg = "%s %s%s" % (
+ msg = "{} {}{}".format(
action_desc,
counter_str,
colorize(pkg_color, pkg.cpv + _repo_separator + pkg.repo),
)
if pkg.root_config.settings["ROOT"] != "/":
- msg += " %s %s" % (preposition, pkg.root)
+ msg += " {} {}".format(preposition, pkg.root)
if (
not self.merge.build_opts.fetchonly
diff --git a/lib/_emerge/PackagePhase.py b/lib/_emerge/PackagePhase.py
index 17a858f5e..63d08e797 100644
--- a/lib/_emerge/PackagePhase.py
+++ b/lib/_emerge/PackagePhase.py
@@ -33,7 +33,7 @@ class PackagePhase(CompositeTask):
def _start(self):
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
self.settings["PORTAGE_BUILDDIR"],
@@ -43,12 +43,11 @@ class PackagePhase(CompositeTask):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
self._pkg_install_mask = InstallMask(f.read())
- except EnvironmentError:
+ except OSError:
self._pkg_install_mask = None
if self._pkg_install_mask:
self._proot = os.path.join(self.settings["T"], "packaging")
diff --git a/lib/_emerge/PackageUninstall.py b/lib/_emerge/PackageUninstall.py
index 468db3030..1bd70b8ca 100644
--- a/lib/_emerge/PackageUninstall.py
+++ b/lib/_emerge/PackageUninstall.py
@@ -93,8 +93,10 @@ class PackageUninstall(CompositeTask):
self._async_unlock_builddir(returncode=retval)
return
- self._writemsg_level(">>> Unmerging %s...\n" % (self.pkg.cpv,), noiselevel=-1)
- self._emergelog("=== Unmerging... (%s)" % (self.pkg.cpv,))
+ self._writemsg_level(
+ ">>> Unmerging {}...\n".format(self.pkg.cpv), noiselevel=-1
+ )
+ self._emergelog("=== Unmerging... ({})".format(self.pkg.cpv))
cat, pf = portage.catsplit(self.pkg.cpv)
unmerge_task = MergeProcess(
@@ -115,9 +117,9 @@ class PackageUninstall(CompositeTask):
def _unmerge_exit(self, unmerge_task):
if self._final_exit(unmerge_task) != os.EX_OK:
- self._emergelog(" !!! unmerge FAILURE: %s" % (self.pkg.cpv,))
+ self._emergelog(" !!! unmerge FAILURE: {}".format(self.pkg.cpv))
else:
- self._emergelog(" >>> unmerge success: %s" % (self.pkg.cpv,))
+ self._emergelog(" >>> unmerge success: {}".format(self.pkg.cpv))
self.world_atom(self.pkg)
self._async_unlock_builddir(returncode=self.returncode)
diff --git a/lib/_emerge/Scheduler.py b/lib/_emerge/Scheduler.py
index 356d6ce1f..f92573bb7 100644
--- a/lib/_emerge/Scheduler.py
+++ b/lib/_emerge/Scheduler.py
@@ -326,7 +326,7 @@ class Scheduler(PollScheduler):
# clear out existing fetch log if it exists
try:
open(self._fetch_log, "w").close()
- except EnvironmentError:
+ except OSError:
pass
self._running_portage = None
@@ -455,7 +455,7 @@ class Scheduler(PollScheduler):
msg.append(pkg_str)
msg.append("")
writemsg_level(
- "".join("%s\n" % (l,) for l in msg),
+ "".join("{}\n".format(l) for l in msg),
level=logging.INFO,
noiselevel=-1,
)
@@ -1011,10 +1011,10 @@ class Scheduler(PollScheduler):
vardb = root_config.trees["vartree"].dbapi
settings["REPLACING_VERSIONS"] = " ".join(
- set(
+ {
portage.versions.cpv_getversion(match)
for match in vardb.match(x.slot_atom) + vardb.match("=" + x.cpv)
- )
+ }
)
pretend_phase = EbuildPhase(
phase="pretend", scheduler=sched_iface, settings=settings
@@ -1133,7 +1133,7 @@ class Scheduler(PollScheduler):
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
portage.util.writemsg(
- "\n\nExiting on signal %(signal)s\n" % {"signal": signum}
+ "\n\nExiting on signal {signal}\n".format(signal=signum)
)
self.terminate()
received_signal.append(128 + signum)
@@ -1237,7 +1237,7 @@ class Scheduler(PollScheduler):
),
mode="rb",
)
- except IOError:
+ except OSError:
pass
else:
if log_path.endswith(".gz"):
@@ -1251,7 +1251,7 @@ class Scheduler(PollScheduler):
for line in log_file:
writemsg_level(line, noiselevel=-1)
except zlib.error as e:
- writemsg_level("%s\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level("{}\n".format(e), level=logging.ERROR, noiselevel=-1)
finally:
log_file.close()
if log_file_real is not None:
@@ -1311,7 +1311,7 @@ class Scheduler(PollScheduler):
printer.eerror(line)
printer.eerror("")
for failed_pkg in self._failed_pkgs_all:
- msg = " %s" % (failed_pkg.pkg,)
+ msg = " {}".format(failed_pkg.pkg)
if failed_pkg.postinst_failure:
msg += " (postinst failed)"
log_path = self._locate_failure_log(failed_pkg)
@@ -1700,7 +1700,7 @@ class Scheduler(PollScheduler):
completed_tasks = self._completed_tasks
dependent = False
- traversed_nodes = set([pkg])
+ traversed_nodes = {pkg}
direct_deps = graph.child_nodes(pkg)
node_stack = direct_deps
direct_deps = frozenset(direct_deps)
@@ -2024,9 +2024,9 @@ class Scheduler(PollScheduler):
def _failed_pkg_msg(self, failed_pkg, action, preposition):
pkg = failed_pkg.pkg
- msg = "%s to %s %s" % (bad("Failed"), action, colorize("INFORM", pkg.cpv))
+ msg = "{} to {} {}".format(bad("Failed"), action, colorize("INFORM", pkg.cpv))
if pkg.root_config.settings["ROOT"] != "/":
- msg += " %s %s" % (preposition, pkg.root)
+ msg += " {} {}".format(preposition, pkg.root)
log_path = self._locate_failure_log(failed_pkg)
if log_path is not None:
@@ -2034,7 +2034,7 @@ class Scheduler(PollScheduler):
self._status_msg(msg)
if log_path is not None:
- self._status_msg(" '%s'" % (colorize("INFORM", log_path),))
+ self._status_msg(" '{}'".format(colorize("INFORM", log_path)))
def _status_msg(self, msg):
"""
@@ -2162,9 +2162,9 @@ class Scheduler(PollScheduler):
if not (isinstance(task, Package) and task.operation == "merge"):
continue
pkg = task
- msg = "emerge --keep-going:" + " %s" % (pkg.cpv,)
+ msg = "emerge --keep-going:" + " {}".format(pkg.cpv)
if pkg.root_config.settings["ROOT"] != "/":
- msg += " for %s" % (pkg.root,)
+ msg += " for {}".format(pkg.root)
if not atoms:
msg += " dropped because it is masked or unavailable"
else:
@@ -2195,16 +2195,14 @@ class Scheduler(PollScheduler):
it's supposed to be added or removed. Otherwise, do nothing.
"""
- if set(
- (
- "--buildpkgonly",
- "--fetchonly",
- "--fetch-all-uri",
- "--oneshot",
- "--onlydeps",
- "--pretend",
- )
- ).intersection(self.myopts):
+ if {
+ "--buildpkgonly",
+ "--fetchonly",
+ "--fetch-all-uri",
+ "--oneshot",
+ "--onlydeps",
+ "--pretend",
+ }.intersection(self.myopts):
return
if pkg.root != self.target_root:
@@ -2254,7 +2252,7 @@ class Scheduler(PollScheduler):
world_set.add(atom)
else:
writemsg_level(
- '\n!!! Unable to record %s in "world"\n' % (atom,),
+ '\n!!! Unable to record {} in "world"\n'.format(atom),
level=logging.WARN,
noiselevel=-1,
)
diff --git a/lib/_emerge/SpawnProcess.py b/lib/_emerge/SpawnProcess.py
index ed5724c91..5ecf477b4 100644
--- a/lib/_emerge/SpawnProcess.py
+++ b/lib/_emerge/SpawnProcess.py
@@ -203,12 +203,12 @@ class SpawnProcess(SubProcess):
def _async_wait(self):
# Allow _main_task to exit normally rather than via cancellation.
if self._main_task is None:
- super(SpawnProcess, self)._async_wait()
+ super()._async_wait()
def _async_waitpid(self):
# Allow _main_task to exit normally rather than via cancellation.
if self._main_task is None:
- super(SpawnProcess, self)._async_waitpid()
+ super()._async_waitpid()
def _can_log(self, slave_fd):
return True
@@ -258,9 +258,9 @@ class SpawnProcess(SubProcess):
def get_pids(cgroup):
try:
- with open(os.path.join(cgroup, "cgroup.procs"), "r") as f:
+ with open(os.path.join(cgroup, "cgroup.procs")) as f:
return [int(p) for p in f.read().split()]
- except EnvironmentError:
+ except OSError:
# removed by cgroup-release-agent
return []
diff --git a/lib/_emerge/SubProcess.py b/lib/_emerge/SubProcess.py
index e2d2e61be..39b5ece0b 100644
--- a/lib/_emerge/SubProcess.py
+++ b/lib/_emerge/SubProcess.py
@@ -40,11 +40,11 @@ class SubProcess(AbstractPollTask):
def _async_wait(self):
if self.returncode is None:
- raise asyncio.InvalidStateError("Result is not ready for %s" % (self,))
+ raise asyncio.InvalidStateError("Result is not ready for {}".format(self))
else:
# This calls _unregister, so don't call it until pid status
# is available.
- super(SubProcess, self)._async_wait()
+ super()._async_wait()
def _async_waitpid(self):
"""
@@ -66,7 +66,7 @@ class SubProcess(AbstractPollTask):
def _async_waitpid_cb(self, pid, returncode):
if pid != self.pid:
- raise AssertionError("expected pid %s, got %s" % (self.pid, pid))
+ raise AssertionError("expected pid {}, got {}".format(self.pid, pid))
self.returncode = returncode
self._async_wait()
diff --git a/lib/_emerge/Task.py b/lib/_emerge/Task.py
index d99232134..e3faec087 100644
--- a/lib/_emerge/Task.py
+++ b/lib/_emerge/Task.py
@@ -41,13 +41,13 @@ class Task(SlotObject):
Emulate tuple.__repr__, but don't show 'foo' as u'foo' for unicode
strings.
"""
- return "(%s)" % ", ".join(("'%s'" % x for x in self._hash_key))
+ return "(%s)" % ", ".join("'%s'" % x for x in self._hash_key)
def __repr__(self):
if self._hash_key is None:
# triggered by python-trace
return SlotObject.__repr__(self)
- return "<%s (%s)>" % (
+ return "<{} ({})>".format(
self.__class__.__name__,
- ", ".join(("'%s'" % x for x in self._hash_key)),
+ ", ".join("'%s'" % x for x in self._hash_key),
)
diff --git a/lib/_emerge/UseFlagDisplay.py b/lib/_emerge/UseFlagDisplay.py
index 0487d3ac9..e79bbc83c 100644
--- a/lib/_emerge/UseFlagDisplay.py
+++ b/lib/_emerge/UseFlagDisplay.py
@@ -111,7 +111,7 @@ def pkg_use_display(pkg, opts, modified_use=None):
else:
flags.sort(key=UseFlagDisplay.sort_separated)
flag_displays.append(
- '%s="%s"' % (varname, " ".join("%s" % (f,) for f in flags))
+ '{}="{}"'.format(varname, " ".join("{}".format(f) for f in flags))
)
return " ".join(flag_displays)
diff --git a/lib/_emerge/actions.py b/lib/_emerge/actions.py
index 073e3ae7e..e93202b7f 100644
--- a/lib/_emerge/actions.py
+++ b/lib/_emerge/actions.py
@@ -226,7 +226,7 @@ def action_build(
# "myopts" is a list for backward compatibility.
resume_opts = mtimedb["resume"].get("myopts", [])
if isinstance(resume_opts, list):
- resume_opts = dict((k, True) for k in resume_opts)
+ resume_opts = {k: True for k in resume_opts}
for opt in ("--ask", "--color", "--skipfirst", "--tree"):
resume_opts.pop(opt, None)
@@ -236,7 +236,7 @@ def action_build(
myopts.update(resume_opts)
if "--debug" in myopts:
- writemsg_level("myopts %s\n" % (myopts,))
+ writemsg_level("myopts {}\n".format(myopts))
# Adjust config according to options of the command being resumed.
for myroot in trees:
@@ -285,7 +285,7 @@ def action_build(
prefix = bad(" * ")
writemsg(prefix + "\n")
for line in textwrap.wrap(msg, 72):
- writemsg("%s%s\n" % (prefix, line))
+ writemsg("{}{}\n".format(prefix, line))
writemsg(prefix + "\n")
if resume:
@@ -379,11 +379,12 @@ def action_build(
for task, atoms in dropped_tasks.items():
if not atoms:
writemsg(
- " %s is masked or unavailable\n" % (task,), noiselevel=-1
+ " {} is masked or unavailable\n".format(task),
+ noiselevel=-1,
)
else:
writemsg(
- " %s requires %s\n" % (task, ", ".join(atoms)),
+ " {} requires {}\n".format(task, ", ".join(atoms)),
noiselevel=-1,
)
@@ -1133,12 +1134,12 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
msg.append("Broken soname dependencies found:")
msg.append("")
for atom, parent in soname_deps:
- msg.append(" %s required by:" % (atom,))
- msg.append(" %s" % (parent,))
+ msg.append(" {} required by:".format(atom))
+ msg.append(" {}".format(parent))
msg.append("")
writemsg_level(
- "".join("%s%s\n" % (prefix, line) for line in msg),
+ "".join("{}{}\n".format(prefix, line) for line in msg),
level=logging.WARNING,
noiselevel=-1,
)
@@ -1180,11 +1181,11 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
and vardb.match(Atom(str(atom)))
):
msg.append(
- " %s (%s) pulled in by:" % (atom.unevaluated_atom, atom)
+ " {} ({}) pulled in by:".format(atom.unevaluated_atom, atom)
)
else:
- msg.append(" %s pulled in by:" % (atom,))
- msg.append(" %s" % (parent,))
+ msg.append(" {} pulled in by:".format(atom))
+ msg.append(" {}".format(parent))
msg.append("")
msg.extend(
textwrap.wrap(
@@ -1224,7 +1225,7 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
+ "dependencies then use %s." % good("--nodeps")
)
writemsg_level(
- "".join("%s%s\n" % (prefix, line) for line in msg),
+ "".join("{}{}\n".format(prefix, line) for line in msg),
level=logging.ERROR,
noiselevel=-1,
)
@@ -1276,9 +1277,9 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
)
parent_strs.sort()
msg = []
- msg.append(" %s pulled in by:\n" % (child_node.cpv,))
+ msg.append(" {} pulled in by:\n".format(child_node.cpv))
for parent_str in parent_strs:
- msg.append(" %s\n" % (parent_str,))
+ msg.append(" {}\n".format(parent_str))
msg.append("\n")
portage.writemsg_stdout("".join(msg), noiselevel=-1)
@@ -1533,10 +1534,12 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
consumer.mycpv for consumer in unique_consumers
)
msg.append("")
- msg.append(" %s pulled in by:" % (pkg.cpv,))
+ msg.append(" {} pulled in by:".format(pkg.cpv))
for consumer in unique_consumers:
libs = consumer_libs[consumer]
- msg.append(" %s needs %s" % (consumer, ", ".join(sorted(libs))))
+ msg.append(
+ " {} needs {}".format(consumer, ", ".join(sorted(libs)))
+ )
msg.append("")
writemsg_level(
"".join(prefix + "%s\n" % line for line in msg),
@@ -1618,17 +1621,17 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
if debug:
writemsg_level(
- "\nParent: %s\n" % (node,),
+ "\nParent: {}\n".format(node),
noiselevel=-1,
level=logging.DEBUG,
)
writemsg_level(
- "Depstring: %s\n" % (depstr,),
+ "Depstring: {}\n".format(depstr),
noiselevel=-1,
level=logging.DEBUG,
)
writemsg_level(
- "Priority: %s\n" % (priority,),
+ "Priority: {}\n".format(priority),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -1648,7 +1651,8 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
if debug:
writemsg_level(
- "Candidates: [%s]\n" % ", ".join("'%s'" % (x,) for x in atoms),
+ "Candidates: [%s]\n"
+ % ", ".join("'{}'".format(x) for x in atoms),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -1768,7 +1772,7 @@ def action_deselect(settings, trees, opts, atoms):
for cpv in vardb.match(atom):
pkg = vardb._pkg_str(cpv, None)
- expanded_atoms.add(Atom("%s:%s" % (pkg.cp, pkg.slot)))
+ expanded_atoms.add(Atom("{}:{}".format(pkg.cp, pkg.slot)))
discard_atoms = set()
for atom in world_set:
@@ -1896,7 +1900,7 @@ def action_info(settings, trees, myopts, myfiles):
# Discard null/ from failed cpv_expand category expansion.
xinfo = xinfo.replace("null/", "")
if settings["ROOT"] != "/":
- xinfo = "%s for %s" % (xinfo, eroot)
+ xinfo = "{} for {}".format(xinfo, eroot)
writemsg(
"\nemerge: there are no ebuilds to satisfy %s.\n"
% colorize("INFORM", xinfo),
@@ -1954,7 +1958,7 @@ def action_info(settings, trees, myopts, myfiles):
append(header_width * "=")
append(header_title.rjust(int(header_width / 2 + len(header_title) / 2)))
append(header_width * "=")
- append("System uname: %s" % (platform.platform(aliased=1),))
+ append("System uname: {}".format(platform.platform(aliased=1)))
vm_info = get_vm_info()
if "ram.total" in vm_info:
@@ -1974,7 +1978,7 @@ def action_info(settings, trees, myopts, myfiles):
)
head_commit = None
if last_sync:
- append("Timestamp of repository %s: %s" % (repo.name, last_sync[0]))
+ append("Timestamp of repository {}: {}".format(repo.name, last_sync[0]))
if repo.sync_type:
sync = portage.sync.module_controller.get_class(repo.sync_type)()
options = {"repo": repo}
@@ -1983,7 +1987,7 @@ def action_info(settings, trees, myopts, myfiles):
except NotImplementedError:
head_commit = (1, False)
if head_commit and head_commit[0] == os.EX_OK:
- append("Head commit of repository %s: %s" % (repo.name, head_commit[1]))
+ append("Head commit of repository {}: {}".format(repo.name, head_commit[1]))
# Searching contents for the /bin/sh provider is somewhat
# slow. Therefore, use the basename of the symlink target
@@ -2021,7 +2025,7 @@ def action_info(settings, trees, myopts, myfiles):
# Omit app-shells category from the output.
if name.startswith("app-shells/"):
name = name[len("app-shells/") :]
- sh_str = "%s %s" % (name, version)
+ sh_str = "{} {}".format(name, version)
else:
sh_str = basename
@@ -2130,14 +2134,14 @@ def action_info(settings, trees, myopts, myfiles):
if matched_cp == orig_atom.cp:
provide_suffix = ""
else:
- provide_suffix = " (%s)" % (orig_atom,)
+ provide_suffix = " ({})".format(orig_atom)
ver_map[ver] = _info_pkgs_ver(ver, repo_suffix, provide_suffix)
for cp in sorted(cp_map):
versions = sorted(cp_map[cp].values())
versions = ", ".join(ver.toString() for ver in versions)
- append("%s %s" % ((cp + ":").ljust(cp_max_len + 1), versions))
+ append("{} {}".format((cp + ":").ljust(cp_max_len + 1), versions))
append("Repositories:\n")
for repo in repos:
@@ -2217,7 +2221,7 @@ def action_info(settings, trees, myopts, myfiles):
v = _hide_url_passwd(v)
- append('%s="%s"' % (k, v))
+ append('{}="{}"'.format(k, v))
else:
use = set(v.split())
for varname in use_expand:
@@ -2231,7 +2235,7 @@ def action_info(settings, trees, myopts, myfiles):
for varname in use_expand:
myval = settings.get(varname)
if myval:
- use.append('%s="%s"' % (varname, myval))
+ use.append('{}="{}"'.format(varname, myval))
append(" ".join(use))
else:
unset_vars.append(k)
@@ -2305,7 +2309,7 @@ def action_info(settings, trees, myopts, myfiles):
if pkg_type == "installed":
for myvar in mydesiredvars:
if metadata[myvar].split() != settings.get(myvar, "").split():
- append('%s="%s"' % (myvar, metadata[myvar]))
+ append('{}="{}"'.format(myvar, metadata[myvar]))
append("")
append("")
writemsg_stdout("\n".join(output_buffer), noiselevel=-1)
@@ -2419,7 +2423,9 @@ def action_search(root_config, myopts, myfiles, spinner):
searchinstance.execute(mysearch)
except re.error as comment:
print(
- '\n!!! Regular expression error in "%s": %s' % (mysearch, comment)
+ '\n!!! Regular expression error in "{}": {}'.format(
+ mysearch, comment
+ )
)
sys.exit(1)
searchinstance.output()
@@ -2456,7 +2462,7 @@ def action_sync(
print_results(msgs)
elif msgs and not success:
writemsg_level(
- "".join("%s\n" % (line,) for line in msgs),
+ "".join("{}\n".format(line) for line in msgs),
level=logging.ERROR,
noiselevel=-1,
)
@@ -2492,7 +2498,7 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
)
for line in textwrap.wrap(msg, 70):
writemsg_level(
- "!!! %s\n" % (line,), level=logging.ERROR, noiselevel=-1
+ "!!! {}\n".format(line), level=logging.ERROR, noiselevel=-1
)
for i in e.args[0]:
writemsg_level(
@@ -2540,7 +2546,7 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
ext_atom = Atom(x, allow_repo=True, allow_wildcard=True)
except InvalidAtom:
msg = []
- msg.append("'%s' is not a valid package atom." % (x,))
+ msg.append("'{}' is not a valid package atom.".format(x))
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
"".join("!!! %s\n" % line for line in msg),
@@ -2575,7 +2581,7 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
else:
msg = []
- msg.append("'%s' is not a valid package atom." % (x,))
+ msg.append("'{}' is not a valid package atom.".format(x))
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
"".join("!!! %s\n" % line for line in msg),
@@ -2604,7 +2610,7 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
if owners:
for cpv in owners:
pkg = vardb._pkg_str(cpv, None)
- atom = "%s:%s" % (pkg.cp, pkg.slot)
+ atom = "{}:{}".format(pkg.cp, pkg.slot)
valid_atoms.append(portage.dep.Atom(atom))
else:
writemsg_level(
@@ -2896,7 +2902,7 @@ def getportageversion(portdir, _unused, profile, chost, vardb):
gccver = getgccversion(chost)
unameout = platform.release() + " " + platform.machine()
- return "Portage %s (%s, %s, %s, %s, %s)" % (
+ return "Portage {} ({}, {}, {}, {}, {})".format(
portage.VERSION,
pythonver,
profilever,
@@ -3361,7 +3367,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
newargs.extend(set_atoms)
for error_msg in sets[s].errors:
writemsg_level(
- "%s\n" % (error_msg,), level=logging.ERROR, noiselevel=-1
+ "{}\n".format(error_msg), level=logging.ERROR, noiselevel=-1
)
else:
newargs.append(a)
@@ -3390,7 +3396,7 @@ def repo_name_check(trees):
)
msg.append("")
for p in missing_repo_names:
- msg.append("\t%s/profiles/repo_name" % (p,))
+ msg.append("\t{}/profiles/repo_name".format(p))
msg.append("")
msg.extend(
textwrap.wrap(
@@ -3428,7 +3434,7 @@ def repo_name_duplicate_check(trees):
for k in sorted(ignored_repos):
msg.append(" %s overrides" % ", ".join(k))
for path in ignored_repos[k]:
- msg.append(" %s" % (path,))
+ msg.append(" {}".format(path))
msg.append("")
msg.extend(
" " + x
@@ -3525,7 +3531,7 @@ def run_action(emerge_config):
)
except ParseError as e:
writemsg(
- "\n\n!!!%s.\nSee make.conf(5) for more info.\n" % (e,),
+ "\n\n!!!{}.\nSee make.conf(5) for more info.\n".format(e),
noiselevel=-1,
)
return 1
@@ -3763,7 +3769,7 @@ def run_action(emerge_config):
# access is required but the user is not in the portage group.
if "--ask" in emerge_config.opts:
writemsg_stdout(
- "This action requires %s access...\n" % (access_desc,),
+ "This action requires {} access...\n".format(access_desc),
noiselevel=-1,
)
if portage.data.secpass < 1 and not need_superuser:
@@ -3801,10 +3807,10 @@ def run_action(emerge_config):
log_dir = emerge_log_dir if emerge_log_dir else default_log_dir
disable_emergelog = not all(
os.access(logfile, os.W_OK)
- for logfile in set(
+ for logfile in {
first_existing(os.path.join(log_dir, logfile))
for logfile in ("emerge.log", "emerge-fetch.log")
- )
+ }
)
break
else:
@@ -3829,7 +3835,7 @@ def run_action(emerge_config):
except portage.exception.PortageException as e:
writemsg_level(
"!!! Error creating directory for "
- + "EMERGE_LOG_DIR='%s':\n!!! %s\n" % (emerge_log_dir, e),
+ + "EMERGE_LOG_DIR='{}':\n!!! {}\n".format(emerge_log_dir, e),
noiselevel=-1,
level=logging.ERROR,
)
@@ -3858,9 +3864,9 @@ def run_action(emerge_config):
elif isinstance(arg, list):
# arguments like --exclude that use 'append' action
for x in arg:
- opt_list.append("%s=%s" % (opt, x))
+ opt_list.append("{}={}".format(opt, x))
else:
- opt_list.append("%s=%s" % (opt, arg))
+ opt_list.append("{}={}".format(opt, arg))
myelogstr = " ".join(opt_list)
if emerge_config.action:
myelogstr += " --" + emerge_config.action
@@ -3872,7 +3878,7 @@ def run_action(emerge_config):
def emergeexitsig(signum, frame):
signal.signal(signal.SIGTERM, signal.SIG_IGN)
- portage.util.writemsg("\n\nExiting on signal %(signal)s\n" % {"signal": signum})
+ portage.util.writemsg("\n\nExiting on signal {signal}\n".format(signal=signum))
sys.exit(128 + signum)
signal.signal(signal.SIGTERM, emergeexitsig)
@@ -3994,7 +4000,7 @@ def run_action(emerge_config):
)
for line in textwrap.wrap(msg, 70):
writemsg_level(
- "!!! %s\n" % (line,), level=logging.ERROR, noiselevel=-1
+ "!!! {}\n".format(line), level=logging.ERROR, noiselevel=-1
)
for i in e.args[0]:
writemsg_level(
@@ -4006,7 +4012,7 @@ def run_action(emerge_config):
return 1
continue
msg = []
- msg.append("'%s' is not a valid package atom." % (x,))
+ msg.append("'{}' is not a valid package atom.".format(x))
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
"".join("!!! %s\n" % line for line in msg),
@@ -4037,7 +4043,7 @@ def run_action(emerge_config):
except OSError:
pass
msg = []
- msg.append("'%s' is not a valid package atom." % (x,))
+ msg.append("'{}' is not a valid package atom.".format(x))
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
"".join("!!! %s\n" % line for line in msg),
diff --git a/lib/_emerge/create_world_atom.py b/lib/_emerge/create_world_atom.py
index bf45b2098..8cab94288 100644
--- a/lib/_emerge/create_world_atom.py
+++ b/lib/_emerge/create_world_atom.py
@@ -47,9 +47,9 @@ def create_world_atom(pkg, args_set, root_config, before_install=False):
)
if not slotted:
# check the vdb in case this is multislot
- available_slots = set(
+ available_slots = {
vardb._pkg_str(cpv, None).slot for cpv in vardb.match(Atom(cp))
- )
+ }
slotted = len(available_slots) > 1 or (
len(available_slots) == 1 and "0" not in available_slots
)
diff --git a/lib/_emerge/depgraph.py b/lib/_emerge/depgraph.py
index d0ea92ad9..bbd077098 100644
--- a/lib/_emerge/depgraph.py
+++ b/lib/_emerge/depgraph.py
@@ -961,7 +961,7 @@ class depgraph:
slot_atom,
), deps in self._dynamic_config._slot_operator_deps.items():
writemsg_level(
- " (%s, %s)\n" % (root, slot_atom),
+ " ({}, {})\n".format(root, slot_atom),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -972,7 +972,7 @@ class depgraph:
noiselevel=-1,
)
writemsg_level(
- " child: %s (%s)\n" % (dep.child, dep.priority),
+ " child: {} ({})\n".format(dep.child, dep.priority),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -1010,9 +1010,11 @@ class depgraph:
for root in self._forced_rebuilds:
for child in self._forced_rebuilds[root]:
- writemsg_stdout(" %s causes rebuilds for:\n" % (child,), noiselevel=-1)
+ writemsg_stdout(
+ " {} causes rebuilds for:\n".format(child), noiselevel=-1
+ )
for parent in self._forced_rebuilds[root][child]:
- writemsg_stdout(" %s\n" % (parent,), noiselevel=-1)
+ writemsg_stdout(" {}\n".format(parent), noiselevel=-1)
def _eliminate_ignored_binaries(self):
"""
@@ -1153,9 +1155,9 @@ class depgraph:
)
for pkg, ebuild in report_pkgs:
- writemsg(" %s::%s" % (pkg.cpv, pkg.repo), noiselevel=-1)
+ writemsg(" {}::{}".format(pkg.cpv, pkg.repo), noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" for %s" % (pkg.root,), noiselevel=-1)
+ writemsg(" for {}".format(pkg.root), noiselevel=-1)
writemsg("\n", noiselevel=-1)
msg = []
@@ -1243,9 +1245,9 @@ class depgraph:
flag_display.append(flag)
flag_display = " ".join(flag_display)
# The user can paste this line into package.use
- writemsg(" =%s %s" % (pkg.cpv, flag_display), noiselevel=-1)
+ writemsg(" ={} {}".format(pkg.cpv, flag_display), noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" # for %s" % (pkg.root,), noiselevel=-1)
+ writemsg(" # for {}".format(pkg.root), noiselevel=-1)
writemsg("\n", noiselevel=-1)
msg = [
@@ -1269,7 +1271,7 @@ class depgraph:
)
for pkg in changed_deps:
- msg = " %s%s%s" % (pkg.cpv, _repo_separator, pkg.repo)
+ msg = " {}{}{}".format(pkg.cpv, _repo_separator, pkg.repo)
if pkg.root_config.settings["ROOT"] != "/":
msg += " for %s" % pkg.root
writemsg("%s\n" % msg, noiselevel=-1)
@@ -1378,7 +1380,7 @@ class depgraph:
writemsg(str(pkg.slot_atom), noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" for %s" % (pkg.root,), noiselevel=-1)
+ writemsg(" for {}".format(pkg.root), noiselevel=-1)
writemsg("\n\n", noiselevel=-1)
selected_pkg = next(
@@ -1386,9 +1388,9 @@ class depgraph:
None,
)
- writemsg(" selected: %s\n" % (selected_pkg,), noiselevel=-1)
+ writemsg(" selected: {}\n".format(selected_pkg), noiselevel=-1)
writemsg(
- " skipped: %s (see unsatisfied dependency below)\n" % (pkg,),
+ " skipped: {} (see unsatisfied dependency below)\n".format(pkg),
noiselevel=-1,
)
@@ -1408,7 +1410,7 @@ class depgraph:
for pkg, parent_atoms in backtrack_masked:
writemsg(str(pkg.slot_atom), noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" for %s" % (pkg.root,), noiselevel=-1)
+ writemsg(" for {}".format(pkg.root), noiselevel=-1)
writemsg("\n", noiselevel=-1)
def _show_missed_update_slot_conflicts(self, missed_updates):
@@ -1426,7 +1428,7 @@ class depgraph:
for pkg, parent_atoms in missed_updates:
msg.append(str(pkg.slot_atom))
if pkg.root_config.settings["ROOT"] != "/":
- msg.append(" for %s" % (pkg.root,))
+ msg.append(" for {}".format(pkg.root))
msg.append("\n\n")
msg.append(indent)
@@ -1467,7 +1469,9 @@ class depgraph:
use_display = ""
msg.append(2 * indent)
- msg.append("%s required by %s %s\n" % (atom, parent, use_display))
+ msg.append(
+ "{} required by {} {}\n".format(atom, parent, use_display)
+ )
msg.append(2 * indent)
msg.append(marker)
msg.append("\n")
@@ -1804,7 +1808,7 @@ class depgraph:
)
for conflict in conflicts:
writemsg_level(
- " Conflict: (%s, %s)\n" % (conflict.root, conflict.atom),
+ " Conflict: ({}, {})\n".format(conflict.root, conflict.atom),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -1993,11 +1997,11 @@ class depgraph:
# atoms matched to_be_selected that did not
# match to_be_masked.
parent_atoms = self._dynamic_config._parent_atoms.get(to_be_masked, set())
- conflict_atoms = set(
+ conflict_atoms = {
parent_atom
for parent_atom in all_parents
if parent_atom not in parent_atoms
- )
+ }
similar_pkgs = []
if conflict_atoms:
@@ -2040,7 +2044,9 @@ class depgraph:
" package(s) to mask: %s" % str(to_be_masked),
" slot: %s" % slot_atom,
" parents: %s"
- % ", ".join("(%s, '%s')" % (ppkg, atom) for ppkg, atom in all_parents),
+ % ", ".join(
+ "({}, '{}')".format(ppkg, atom) for ppkg, atom in all_parents
+ ),
"",
]
writemsg_level(
@@ -2105,7 +2111,7 @@ class depgraph:
for unbuilt_child in chain(
matches,
self._iter_match_pkgs(
- root_config, "ebuild", Atom("=%s" % (dep.child.cpv,))
+ root_config, "ebuild", Atom("={}".format(dep.child.cpv))
),
):
if unbuilt_child in self._dynamic_config._runtime_pkg_mask:
@@ -2136,7 +2142,7 @@ class depgraph:
"",
"backtracking due to slot/sub-slot change:",
" child package: %s" % child,
- " child slot: %s/%s" % (child.slot, child.sub_slot),
+ " child slot: {}/{}".format(child.slot, child.sub_slot),
" new child: %s" % new_child_slot,
" new child slot: %s/%s"
% (new_child_slot.slot, new_child_slot.sub_slot),
@@ -3346,7 +3352,7 @@ class depgraph:
# For PackageArg and AtomArg types, it's
# redundant to display the atom attribute.
writemsg_level(
- "%s%s\n" % ("Parent Dep:".ljust(15), myparent),
+ "{}{}\n".format("Parent Dep:".ljust(15), myparent),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -3359,7 +3365,7 @@ class depgraph:
and dep.atom.package
and dep.atom is not dep.atom.unevaluated_atom
):
- uneval = " (%s)" % (dep.atom.unevaluated_atom,)
+ uneval = " ({})".format(dep.atom.unevaluated_atom)
writemsg_level(
"%s%s%s required by %s\n"
% ("Parent Dep:".ljust(15), dep.atom, uneval, myparent),
@@ -3654,11 +3660,11 @@ class depgraph:
for child in children:
try:
- self._dynamic_config._parent_atoms[child] = set(
+ self._dynamic_config._parent_atoms[child] = {
(parent, atom)
for (parent, atom) in self._dynamic_config._parent_atoms[child]
if parent is not pkg
- )
+ }
except KeyError:
pass
@@ -3847,15 +3853,15 @@ class depgraph:
continue
if debug:
writemsg_level(
- "\nParent: %s\n" % (pkg,), noiselevel=-1, level=logging.DEBUG
+ "\nParent: {}\n".format(pkg), noiselevel=-1, level=logging.DEBUG
)
writemsg_level(
- "Depstring: %s\n" % (dep_string,),
+ "Depstring: {}\n".format(dep_string),
noiselevel=-1,
level=logging.DEBUG,
)
writemsg_level(
- "Priority: %s\n" % (dep_priority,),
+ "Priority: {}\n".format(dep_priority),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4023,16 +4029,18 @@ class depgraph:
if debug:
writemsg_level(
- "\nParent: %s\n" % (pkg,), noiselevel=-1, level=logging.DEBUG
+ "\nParent: {}\n".format(pkg), noiselevel=-1, level=logging.DEBUG
)
dep_repr = portage.dep.paren_enclose(
dep_string, unevaluated_atom=True, opconvert=True
)
writemsg_level(
- "Depstring: %s\n" % (dep_repr,), noiselevel=-1, level=logging.DEBUG
+ "Depstring: {}\n".format(dep_repr), noiselevel=-1, level=logging.DEBUG
)
writemsg_level(
- "Priority: %s\n" % (dep_priority,), noiselevel=-1, level=logging.DEBUG
+ "Priority: {}\n".format(dep_priority),
+ noiselevel=-1,
+ level=logging.DEBUG,
)
try:
@@ -4054,7 +4062,7 @@ class depgraph:
if debug:
writemsg_level(
- "Candidates: %s\n" % ([str(x) for x in selected_atoms[pkg]],),
+ "Candidates: {}\n".format([str(x) for x in selected_atoms[pkg]]),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4170,7 +4178,9 @@ class depgraph:
if debug:
writemsg_level(
- "\nCandidates: %s: %s\n" % (virt_pkg.cpv, [str(x) for x in atoms]),
+ "\nCandidates: {}: {}\n".format(
+ virt_pkg.cpv, [str(x) for x in atoms]
+ ),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4275,7 +4285,7 @@ class depgraph:
if debug:
writemsg_level(
- "\nExiting... %s\n" % (pkg,), noiselevel=-1, level=logging.DEBUG
+ "\nExiting... {}\n".format(pkg), noiselevel=-1, level=logging.DEBUG
)
return 1
@@ -4303,8 +4313,7 @@ class depgraph:
atom_pkg_map[atom] = dep_pkg
if len(atom_pkg_map) < 2:
- for item in atom_pkg_map.items():
- yield item
+ yield from atom_pkg_map.items()
return
cp_pkg_map = {}
@@ -4403,14 +4412,13 @@ class depgraph:
if x and x[0] == "||":
disjunctions.append(x)
else:
- for y in self._queue_disjunctive_deps(
+ yield from self._queue_disjunctive_deps(
pkg,
dep_root,
dep_priority,
x,
_disjunctions_recursive=disjunctions,
- ):
- yield y
+ )
else:
# Note: Eventually this will check for PROPERTIES=virtual
# or whatever other metadata gets implemented for this
@@ -4470,7 +4478,7 @@ class depgraph:
categories = set()
for db, pkg_type, built, installed, db_keys in dbs:
for cat in db.categories:
- if db.cp_list("%s/%s" % (cat, atom_pn)):
+ if db.cp_list("{}/{}".format(cat, atom_pn)):
categories.add(cat)
deps = []
@@ -4757,7 +4765,7 @@ class depgraph:
for pset in list(depgraph_sets.sets.values()) + [sets[s]]:
for error_msg in pset.errors:
writemsg_level(
- "%s\n" % (error_msg,),
+ "{}\n".format(error_msg),
level=logging.ERROR,
noiselevel=-1,
)
@@ -4918,7 +4926,7 @@ class depgraph:
for cpv in owners:
pkg = vardb._pkg_str(cpv, None)
- atom = Atom("%s:%s" % (pkg.cp, pkg.slot))
+ atom = Atom("{}:{}".format(pkg.cp, pkg.slot))
args.append(AtomArg(arg=atom, atom=atom, root_config=root_config))
if "--update" in self._frozen_config.myopts:
@@ -5055,7 +5063,7 @@ class depgraph:
continue
if debug:
writemsg_level(
- "\n Arg: %s\n Atom: %s\n" % (arg, atom),
+ "\n Arg: {}\n Atom: {}\n".format(arg, atom),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -5167,7 +5175,9 @@ class depgraph:
"\n\n!!! Problem in '%s' dependencies.\n" % atom, noiselevel=-1
)
writemsg(
- "!!! %s %s\n" % (str(e), str(getattr(e, "__module__", None)))
+ "!!! {} {}\n".format(
+ str(e), str(getattr(e, "__module__", None))
+ )
)
raise
@@ -5429,7 +5439,7 @@ class depgraph:
slots.remove(highest_pkg.slot)
while slots:
slot = slots.pop()
- slot_atom = portage.dep.Atom("%s:%s" % (highest_pkg.cp, slot))
+ slot_atom = portage.dep.Atom("{}:{}".format(highest_pkg.cp, slot))
pkg, in_graph = self._select_package(root_config.root, slot_atom)
if pkg is not None and pkg.cp == highest_pkg.cp and pkg < highest_pkg:
greedy_pkgs.append(pkg)
@@ -5694,7 +5704,7 @@ class depgraph:
except InvalidDependString as e:
writemsg_level(
"!!! Invalid RDEPEND in "
- + "'%svar/db/pkg/%s/RDEPEND': %s\n" % (pkg.root, pkg.cpv, e),
+ + "'{}var/db/pkg/{}/RDEPEND': {}\n".format(pkg.root, pkg.cpv, e),
noiselevel=-1,
level=logging.ERROR,
)
@@ -5733,7 +5743,7 @@ class depgraph:
raise
writemsg_level(
"!!! Invalid RDEPEND in "
- + "'%svar/db/pkg/%s/RDEPEND': %s\n" % (pkg.root, pkg.cpv, e),
+ + "'{}var/db/pkg/{}/RDEPEND': {}\n".format(pkg.root, pkg.cpv, e),
noiselevel=-1,
level=logging.ERROR,
)
@@ -5768,7 +5778,7 @@ class depgraph:
graph = self._dynamic_config.digraph
def format_pkg(pkg):
- pkg_name = "%s%s%s" % (pkg.cpv, _repo_separator, pkg.repo)
+ pkg_name = "{}{}{}".format(pkg.cpv, _repo_separator, pkg.repo)
return pkg_name
if target_atom is not None and isinstance(node, Package):
@@ -5847,7 +5857,7 @@ class depgraph:
node_type = "set"
else:
node_type = "argument"
- dep_chain.append(("%s" % (node,), node_type))
+ dep_chain.append(("{}".format(node), node_type))
elif node is not start_node:
for ppkg, patom in all_parents[child]:
@@ -5943,7 +5953,7 @@ class depgraph:
if self._dynamic_config.digraph.parent_nodes(parent_arg):
selected_parent = parent_arg
else:
- dep_chain.append(("%s" % (parent_arg,), "argument"))
+ dep_chain.append(("{}".format(parent_arg), "argument"))
selected_parent = None
node = selected_parent
@@ -5991,11 +6001,11 @@ class depgraph:
if arg:
xinfo = '"%s"' % arg
if isinstance(myparent, AtomArg):
- xinfo = '"%s"' % (myparent,)
+ xinfo = '"{}"'.format(myparent)
# Discard null/ from failed cpv_expand category expansion.
xinfo = xinfo.replace("null/", "")
if root != self._frozen_config._running_root.root:
- xinfo = "%s for %s" % (xinfo, root)
+ xinfo = "{} for {}".format(xinfo, root)
masked_packages = []
missing_use = []
missing_use_adjustable = set()
@@ -6120,7 +6130,9 @@ class depgraph:
except InvalidAtom:
writemsg(
"violated_conditionals raised "
- + "InvalidAtom: '%s' parent: %s" % (atom, myparent),
+ + "InvalidAtom: '{}' parent: {}".format(
+ atom, myparent
+ ),
noiselevel=-1,
)
raise
@@ -6430,7 +6442,7 @@ class depgraph:
noiselevel=-1,
)
use_display = pkg_use_display(pkg, self._frozen_config.myopts)
- writemsg("- %s %s\n" % (output_cpv, use_display), noiselevel=-1)
+ writemsg("- {} {}\n".format(output_cpv, use_display), noiselevel=-1)
writemsg(
"\n The following REQUIRED_USE flag constraints "
+ "are unsatisfied:\n",
@@ -6586,10 +6598,9 @@ class depgraph:
installed,
db_keys,
) in self._dynamic_config._filtered_trees[root_config.root]["dbs"]:
- for pkg in self._iter_match_pkgs(
+ yield from self._iter_match_pkgs(
root_config, pkg_type, atom, onlydeps=onlydeps
- ):
- yield pkg
+ )
def _iter_match_pkgs(self, root_config, pkg_type, atom, onlydeps=False):
if atom.package:
@@ -6892,7 +6903,7 @@ class depgraph:
except portage.exception.PackageNotFound:
return next(
self._iter_match_pkgs(
- pkg.root_config, "ebuild", Atom("=%s" % (pkg.cpv,))
+ pkg.root_config, "ebuild", Atom("={}".format(pkg.cpv))
),
None,
)
@@ -6903,7 +6914,7 @@ class depgraph:
except portage.exception.PackageNotFound:
pkg_eb_visible = False
for pkg_eb in self._iter_match_pkgs(
- pkg.root_config, "ebuild", Atom("=%s" % (pkg.cpv,))
+ pkg.root_config, "ebuild", Atom("={}".format(pkg.cpv))
):
if self._pkg_visibility_check(pkg_eb, autounmask_level):
pkg_eb_visible = True
@@ -8424,7 +8435,7 @@ class depgraph:
or blocker.priority.runtime_post
]
if blockers is not None:
- blockers = set(blocker.atom for blocker in blockers)
+ blockers = {blocker.atom for blocker in blockers}
# If this node has any blockers, create a "nomerge"
# node for it so that they can be enforced.
@@ -8475,7 +8486,7 @@ class depgraph:
# is thrown from cpv_expand due to multiple
# matches (this can happen if an atom lacks a
# category).
- show_invalid_depstring_notice(pkg, "%s" % (e,))
+ show_invalid_depstring_notice(pkg, "{}".format(e))
del e
raise
if not success:
@@ -8513,7 +8524,7 @@ class depgraph:
except portage.exception.InvalidAtom as e:
depstr = " ".join(vardb.aux_get(pkg.cpv, dep_keys))
show_invalid_depstring_notice(
- pkg, "Invalid Atom: %s" % (e,)
+ pkg, "Invalid Atom: {}".format(e)
)
return False
for cpv in stale_cache:
@@ -9304,7 +9315,7 @@ class depgraph:
if leaves:
writemsg(
- "runtime cycle leaf: %s\n\n" % (selected_nodes[0],),
+ "runtime cycle leaf: {}\n\n".format(selected_nodes[0]),
noiselevel=-1,
)
@@ -9746,7 +9757,7 @@ class depgraph:
]
for node in retlist:
if isinstance(node, Package) and node.operation == "uninstall":
- msg.append("\t%s" % (node,))
+ msg.append("\t{}".format(node))
writemsg_level(
"\n%s\n" % "".join("%s\n" % line for line in msg),
level=logging.DEBUG,
@@ -9971,7 +9982,7 @@ class depgraph:
)
else:
msg.append(
- "%s required by %s %s" % (atom, parent, use_display)
+ "{} required by {} {}".format(atom, parent, use_display)
)
msg.append("\n")
@@ -10082,19 +10093,19 @@ class depgraph:
if autounmask_unrestricted_atoms:
if is_latest:
unstable_keyword_msg[root].append(
- ">=%s %s\n" % (pkg.cpv, keyword)
+ ">={} {}\n".format(pkg.cpv, keyword)
)
elif is_latest_in_slot:
unstable_keyword_msg[root].append(
- ">=%s:%s %s\n" % (pkg.cpv, pkg.slot, keyword)
+ ">={}:{} {}\n".format(pkg.cpv, pkg.slot, keyword)
)
else:
unstable_keyword_msg[root].append(
- "=%s %s\n" % (pkg.cpv, keyword)
+ "={} {}\n".format(pkg.cpv, keyword)
)
else:
unstable_keyword_msg[root].append(
- "=%s %s\n" % (pkg.cpv, keyword)
+ "={} {}\n".format(pkg.cpv, keyword)
)
p_mask_change_msg = {}
@@ -10135,7 +10146,7 @@ class depgraph:
p_mask_change_msg[root].append(">=%s\n" % pkg.cpv)
elif is_latest_in_slot:
p_mask_change_msg[root].append(
- ">=%s:%s\n" % (pkg.cpv, pkg.slot)
+ ">={}:{}\n".format(pkg.cpv, pkg.slot)
)
else:
p_mask_change_msg[root].append("=%s\n" % pkg.cpv)
@@ -10172,15 +10183,15 @@ class depgraph:
)
if is_latest:
use_changes_msg[root].append(
- ">=%s %s\n" % (pkg.cpv, " ".join(adjustments))
+ ">={} {}\n".format(pkg.cpv, " ".join(adjustments))
)
elif is_latest_in_slot:
use_changes_msg[root].append(
- ">=%s:%s %s\n" % (pkg.cpv, pkg.slot, " ".join(adjustments))
+ ">={}:{} {}\n".format(pkg.cpv, pkg.slot, " ".join(adjustments))
)
else:
use_changes_msg[root].append(
- "=%s %s\n" % (pkg.cpv, " ".join(adjustments))
+ "={} {}\n".format(pkg.cpv, " ".join(adjustments))
)
license_msg = {}
@@ -10198,7 +10209,7 @@ class depgraph:
license_msg[root].append(self._get_dep_chain_as_comment(pkg))
if is_latest:
license_msg[root].append(
- ">=%s %s\n" % (pkg.cpv, " ".join(sorted(missing_licenses)))
+ ">={} {}\n".format(pkg.cpv, " ".join(sorted(missing_licenses)))
)
elif is_latest_in_slot:
license_msg[root].append(
@@ -10207,7 +10218,7 @@ class depgraph:
)
else:
license_msg[root].append(
- "=%s %s\n" % (pkg.cpv, " ".join(sorted(missing_licenses)))
+ "={} {}\n".format(pkg.cpv, " ".join(sorted(missing_licenses)))
)
def find_config_file(abs_user_config, file_name):
@@ -10370,21 +10381,20 @@ class depgraph:
def write_changes(root, changes, file_to_write_to):
file_contents = None
try:
- with io.open(
+ with open(
_unicode_encode(
file_to_write_to, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
file_contents = f.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
file_contents = []
else:
problems.append(
- "!!! Failed to read '%s': %s\n" % (file_to_write_to, e)
+ "!!! Failed to read '{}': {}\n".format(file_to_write_to, e)
)
if file_contents is not None:
file_contents.extend(changes)
@@ -10485,7 +10495,7 @@ class depgraph:
]
writemsg("\n", noiselevel=-1)
for line in msg:
- writemsg(" %s %s\n" % (colorize("WARN", "*"), line), noiselevel=-1)
+ writemsg(" {} {}\n".format(colorize("WARN", "*"), line), noiselevel=-1)
def display_problems(self):
"""
@@ -10529,7 +10539,7 @@ class depgraph:
for pset in depgraph_sets.sets.values():
for error_msg in pset.errors:
writemsg_level(
- "%s\n" % (error_msg,), level=logging.ERROR, noiselevel=-1
+ "{}\n".format(error_msg), level=logging.ERROR, noiselevel=-1
)
# TODO: Add generic support for "set problem" handlers so that
@@ -10609,7 +10619,7 @@ class depgraph:
refs.sort()
ref_string = ", ".join(["'%s'" % name for name in refs])
ref_string = " pulled in by " + ref_string
- msg.append(" %s%s\n" % (colorize("INFORM", str(arg)), ref_string))
+ msg.append(" {}{}\n".format(colorize("INFORM", str(arg)), ref_string))
msg.append("\n")
if "selected" in problems_sets or "world" in problems_sets:
msg.append(
@@ -10735,7 +10745,8 @@ class depgraph:
added_favorites.add(myfavkey)
except portage.exception.InvalidDependString as e:
writemsg(
- "\n\n!!! '%s' has invalid PROVIDE: %s\n" % (x.cpv, e), noiselevel=-1
+ "\n\n!!! '{}' has invalid PROVIDE: {}\n".format(x.cpv, e),
+ noiselevel=-1,
)
writemsg(
"!!! see '%s'\n\n"
@@ -10767,7 +10778,7 @@ class depgraph:
writemsg_stdout("\n", noiselevel=-1)
for a in all_added:
writemsg_stdout(
- " %s %s\n" % (colorize("GOOD", "*"), a), noiselevel=-1
+ " {} {}\n".format(colorize("GOOD", "*"), a), noiselevel=-1
)
writemsg_stdout("\n", noiselevel=-1)
prompt = (
@@ -11252,7 +11263,7 @@ class _dep_check_composite_db(dbapi):
while sub_slots:
slot, sub_slot = sub_slots.pop()
- slot_atom = atom.with_slot("%s/%s" % (slot, sub_slot))
+ slot_atom = atom.with_slot("{}/{}".format(slot, sub_slot))
pkg, existing = self._depgraph._select_package(self._root, slot_atom)
if not pkg:
continue
@@ -11424,7 +11435,7 @@ def ambiguous_package_name(arg, atoms, root_config, spinner, myopts):
"!!! one of the following fully-qualified ebuild names instead:\n\n",
noiselevel=-1,
)
- for cp in sorted(set(portage.dep_getkey(atom) for atom in atoms)):
+ for cp in sorted({portage.dep_getkey(atom) for atom in atoms}):
writemsg(" " + colorize("INFORM", cp) + "\n", noiselevel=-1)
return
@@ -11440,7 +11451,7 @@ def ambiguous_package_name(arg, atoms, root_config, spinner, myopts):
null_cp = portage.dep_getkey(insert_category_into_atom(arg, "null"))
cat, atom_pn = portage.catsplit(null_cp)
s.searchkey = atom_pn
- for cp in sorted(set(portage.dep_getkey(atom) for atom in atoms)):
+ for cp in sorted({portage.dep_getkey(atom) for atom in atoms}):
s.addCP(cp)
s.output()
writemsg(
@@ -11894,7 +11905,7 @@ def _get_masking_status(pkg, pkgsettings, root_config, myrepo=None, use=None):
if pkg.invalid:
for msgs in pkg.invalid.values():
for msg in msgs:
- mreasons.append(_MaskReason("invalid", "invalid: %s" % (msg,)))
+ mreasons.append(_MaskReason("invalid", "invalid: {}".format(msg)))
if not pkg._metadata["SLOT"]:
mreasons.append(_MaskReason("invalid", "SLOT: undefined"))
diff --git a/lib/_emerge/emergelog.py b/lib/_emerge/emergelog.py
index 14439da6e..4485aa3ed 100644
--- a/lib/_emerge/emergelog.py
+++ b/lib/_emerge/emergelog.py
@@ -35,7 +35,7 @@ def emergelog(xterm_titles, mystr, short_msg=None):
try:
file_path = os.path.join(_emerge_log_dir, "emerge.log")
existing_log = os.path.exists(file_path)
- mylogfile = io.open(
+ mylogfile = open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
mode="a",
encoding=_encodings["content"],
@@ -47,10 +47,10 @@ def emergelog(xterm_titles, mystr, short_msg=None):
)
mylock = portage.locks.lockfile(file_path)
try:
- mylogfile.write("%.0f: %s\n" % (time.time(), mystr))
+ mylogfile.write("{:.0f}: {}\n".format(time.time(), mystr))
mylogfile.close()
finally:
portage.locks.unlockfile(mylock)
- except (IOError, OSError, portage.exception.PortageException) as e:
+ except (OSError, portage.exception.PortageException) as e:
if secpass >= 1:
- portage.util.writemsg("emergelog(): %s\n" % (e,), noiselevel=-1)
+ portage.util.writemsg("emergelog(): {}\n".format(e), noiselevel=-1)
diff --git a/lib/_emerge/getloadavg.py b/lib/_emerge/getloadavg.py
index 433f3ba21..f36d57e0e 100644
--- a/lib/_emerge/getloadavg.py
+++ b/lib/_emerge/getloadavg.py
@@ -14,7 +14,7 @@ if getloadavg is None:
try:
with open("/proc/loadavg") as f:
loadavg_str = f.readline()
- except IOError:
+ except OSError:
# getloadavg() is only supposed to raise OSError, so convert
raise OSError("unknown")
loadavg_split = loadavg_str.split()
diff --git a/lib/_emerge/resolver/backtracking.py b/lib/_emerge/resolver/backtracking.py
index 84d461663..d281afc5c 100644
--- a/lib/_emerge/resolver/backtracking.py
+++ b/lib/_emerge/resolver/backtracking.py
@@ -206,7 +206,7 @@ class Backtracker:
new_node.parameter.runtime_pkg_mask.setdefault(dep.parent, {})[
"missing dependency"
- ] = set([(dep.parent, dep.root, dep.atom)])
+ ] = {(dep.parent, dep.root, dep.atom)}
self._add(new_node)
diff --git a/lib/_emerge/resolver/circular_dependency.py b/lib/_emerge/resolver/circular_dependency.py
index 969be0f7a..1f5242346 100644
--- a/lib/_emerge/resolver/circular_dependency.py
+++ b/lib/_emerge/resolver/circular_dependency.py
@@ -292,7 +292,7 @@ class circular_dependency_handler:
changes.append(colorize("red", "+" + flag))
else:
changes.append(colorize("blue", "-" + flag))
- msg = "- %s (Change USE: %s)\n" % (parent.cpv, " ".join(changes))
+ msg = "- {} (Change USE: {})\n".format(parent.cpv, " ".join(changes))
if followup_change:
msg += (
" (This change might require USE changes on parent packages.)"
diff --git a/lib/_emerge/resolver/output.py b/lib/_emerge/resolver/output.py
index 6e569ea48..30fb8ce43 100644
--- a/lib/_emerge/resolver/output.py
+++ b/lib/_emerge/resolver/output.py
@@ -85,10 +85,10 @@ class Display:
"""
if blocker.satisfied:
self.blocker_style = "PKG_BLOCKER_SATISFIED"
- addl = "%s " % (colorize(self.blocker_style, "b"),)
+ addl = "{} ".format(colorize(self.blocker_style, "b"))
else:
self.blocker_style = "PKG_BLOCKER"
- addl = "%s " % (colorize(self.blocker_style, "B"),)
+ addl = "{} ".format(colorize(self.blocker_style, "B"))
addl += self.empty_space_in_brackets()
self.resolved = dep_expand(
str(blocker.atom).lstrip("!"), mydb=self.vardb, settings=self.pkgsettings
@@ -96,14 +96,14 @@ class Display:
if self.conf.columns and self.conf.quiet:
addl += " " + colorize(self.blocker_style, str(self.resolved))
else:
- addl = "[%s %s] %s%s" % (
+ addl = "[{} {}] {}{}".format(
colorize(self.blocker_style, "blocks"),
addl,
self.indent,
colorize(self.blocker_style, str(self.resolved)),
)
block_parents = self.conf.blocker_parents.parent_nodes(blocker)
- block_parents = set(str(pnode.cpv) for pnode in block_parents)
+ block_parents = {str(pnode.cpv) for pnode in block_parents}
block_parents = ", ".join(block_parents)
if blocker.atom.blocker.overlap.forbid:
blocking_desc = "hard blocking"
@@ -117,7 +117,7 @@ class Display:
)
else:
addl += colorize(
- self.blocker_style, " (is %s %s)" % (blocking_desc, block_parents)
+ self.blocker_style, " (is {} {})".format(blocking_desc, block_parents)
)
if blocker.satisfied:
if not self.conf.columns:
@@ -356,7 +356,7 @@ class Display:
pkg_info.repo_path_real
)
else:
- self.repoadd = "%s=>%s" % (
+ self.repoadd = "{}=>{}".format(
self.conf.repo_display.repoStr(repo_path_prev),
self.conf.repo_display.repoStr(pkg_info.repo_path_real),
)
@@ -471,13 +471,13 @@ class Display:
self.verboseadd = None
else:
if not pkg_info.merge:
- myprint = "[%s] %s%s" % (
+ myprint = "[{}] {}{}".format(
self.pkgprint(pkg_info.operation.ljust(13), pkg_info),
self.indent,
self.pkgprint(pkg.cp, pkg_info),
)
else:
- myprint = "[%s %s] %s%s" % (
+ myprint = "[{} {}] {}{}".format(
self.pkgprint(pkg.type_name, pkg_info),
pkg_info.attr_display,
self.indent,
@@ -517,14 +517,14 @@ class Display:
else:
if not pkg_info.merge:
addl = self.empty_space_in_brackets()
- myprint = "[%s%s] %s%s" % (
+ myprint = "[{}{}] {}{}".format(
self.pkgprint(pkg_info.operation.ljust(13), pkg_info),
addl,
self.indent,
self.pkgprint(pkg.cp, pkg_info),
)
else:
- myprint = "[%s %s] %s%s" % (
+ myprint = "[{} {}] {}{}".format(
self.pkgprint(pkg.type_name, pkg_info),
pkg_info.attr_display,
self.indent,
@@ -551,7 +551,7 @@ class Display:
pkg_str = self._append_repository(pkg_str, pkg, pkg_info)
if not pkg_info.merge:
addl = self.empty_space_in_brackets()
- myprint = "[%s%s] %s%s %s" % (
+ myprint = "[{}{}] {}{} {}".format(
self.pkgprint(pkg_info.operation.ljust(13), pkg_info),
addl,
self.indent,
@@ -559,7 +559,7 @@ class Display:
pkg_info.oldbest,
)
else:
- myprint = "[%s %s] %s%s %s" % (
+ myprint = "[{} {}] {}{} {}".format(
self.pkgprint(pkg.type_name, pkg_info),
pkg_info.attr_display,
self.indent,
@@ -576,30 +576,30 @@ class Display:
"""
for msg in self.print_msg:
if isinstance(msg, str):
- writemsg_stdout("%s\n" % (msg,), noiselevel=-1)
+ writemsg_stdout("{}\n".format(msg), noiselevel=-1)
continue
myprint, self.verboseadd, repoadd = msg
if self.verboseadd:
myprint += " " + self.verboseadd
if show_repos and repoadd:
myprint += " " + teal("[%s]" % repoadd)
- writemsg_stdout("%s\n" % (myprint,), noiselevel=-1)
+ writemsg_stdout("{}\n".format(myprint), noiselevel=-1)
def print_blockers(self):
"""Performs the actual output printing of the pre-formatted
blocker messages
"""
for pkg in self.blockers:
- writemsg_stdout("%s\n" % (pkg,), noiselevel=-1)
+ writemsg_stdout("{}\n".format(pkg), noiselevel=-1)
def print_verbose(self, show_repos):
"""Prints the verbose output to std_out
@param show_repos: bool.
"""
- writemsg_stdout("\n%s\n" % (self.counters,), noiselevel=-1)
+ writemsg_stdout("\n{}\n".format(self.counters), noiselevel=-1)
if show_repos:
- writemsg_stdout("%s" % (self.conf.repo_display,), noiselevel=-1)
+ writemsg_stdout("{}".format(self.conf.repo_display), noiselevel=-1)
def get_display_list(self, mylist):
"""Determines the display list to process
@@ -887,12 +887,12 @@ class Display:
pkg_str = self._append_repository(pkg_str, pkg, pkg_info)
if not pkg_info.merge:
addl = self.empty_space_in_brackets()
- myprint = "[%s%s] " % (
+ myprint = "[{}{}] ".format(
self.pkgprint(pkg_info.operation.ljust(13), pkg_info),
addl,
)
else:
- myprint = "[%s %s] " % (
+ myprint = "[{} {}] ".format(
self.pkgprint(pkg.type_name, pkg_info),
pkg_info.attr_display,
)
@@ -916,9 +916,7 @@ class Display:
else:
self.print_msg.append((myprint, self.verboseadd, None))
- show_repos = (
- self.quiet_repo_display and repoadd_set and repoadd_set != set(["0"])
- )
+ show_repos = self.quiet_repo_display and repoadd_set and repoadd_set != {"0"}
# now finally print out the messages
self.print_messages(show_repos)
@@ -927,7 +925,7 @@ class Display:
self.print_verbose(show_repos)
for pkg, pkg_info in self.restrict_fetch_list.items():
writemsg_stdout(
- "\nFetch instructions for %s:\n" % (pkg.cpv,), noiselevel=-1
+ "\nFetch instructions for {}:\n".format(pkg.cpv), noiselevel=-1
)
spawn_nofetch(
self.conf.trees[pkg.root]["porttree"].dbapi, pkg_info.ebuild_path
@@ -953,7 +951,7 @@ def format_unmatched_atom(pkg, atom, pkg_use_enabled):
# 5. USE
if atom.soname:
- return "%s" % (atom,), ""
+ return "{}".format(atom), ""
highlight = set()
@@ -1017,7 +1015,7 @@ def format_unmatched_atom(pkg, atom, pkg_use_enabled):
highlight_use = set()
if atom.use:
- use_atom = "%s[%s]" % (atom.cp, str(atom.use))
+ use_atom = "{}[{}]".format(atom.cp, str(atom.use))
use_atom_set = InternalPackageSet(initial_atoms=(use_atom,))
if not use_atom_set.findAtomForPackage(pkg, modified_use=pkg_use_enabled(pkg)):
missing_iuse = pkg.iuse.get_missing_iuse(atom.unevaluated_atom.use.required)
diff --git a/lib/_emerge/resolver/output_helpers.py b/lib/_emerge/resolver/output_helpers.py
index 6ce812189..366de2429 100644
--- a/lib/_emerge/resolver/output_helpers.py
+++ b/lib/_emerge/resolver/output_helpers.py
@@ -140,7 +140,7 @@ class _PackageCounters:
details[-1] += "s"
if self.interactive > 0:
details.append(
- "%s %s" % (self.interactive, colorize("WARN", "interactive"))
+ "{} {}".format(self.interactive, colorize("WARN", "interactive"))
)
myoutput.append(", ".join(details))
if total_installs != 0:
@@ -339,7 +339,7 @@ def _create_use_string(
else:
ret = " ".join(enabled + disabled + removed)
if ret:
- ret = '%s="%s" ' % (name, ret)
+ ret = '{}="{}" '.format(name, ret)
return ret
@@ -349,11 +349,11 @@ def _tree_display(conf, mylist):
# corresponding blockers to the digraph.
mygraph = conf.digraph.copy()
- executed_uninstalls = set(
+ executed_uninstalls = {
node
for node in mylist
if isinstance(node, Package) and node.operation == "unmerge"
- )
+ }
for uninstall in conf.blocker_uninstalls.leaf_nodes():
uninstall_parents = conf.blocker_uninstalls.parent_nodes(uninstall)
diff --git a/lib/_emerge/resolver/package_tracker.py b/lib/_emerge/resolver/package_tracker.py
index 8631b78d6..03562334b 100644
--- a/lib/_emerge/resolver/package_tracker.py
+++ b/lib/_emerge/resolver/package_tracker.py
@@ -298,7 +298,7 @@ class PackageTracker:
if len(cpv_pkgs) > 1:
# Make sure this cpv conflict is not a slot conflict at the same time.
# Ignore it if it is.
- slots = set(pkg.slot for pkg in cpv_pkgs)
+ slots = {pkg.slot for pkg in cpv_pkgs}
if len(slots) > 1:
self._conflicts_cache.append(
PackageConflict(
@@ -332,8 +332,7 @@ class PackageTracker:
"""
for cp_key in self._cp_pkg_map:
if cp_key[0] == root:
- for pkg in self._cp_pkg_map[cp_key]:
- yield pkg
+ yield from self._cp_pkg_map[cp_key]
for cp_key in self._cp_vdb_pkg_map:
if cp_key[0] == root:
diff --git a/lib/_emerge/resolver/slot_collision.py b/lib/_emerge/resolver/slot_collision.py
index 17d6fa395..88c28464b 100644
--- a/lib/_emerge/resolver/slot_collision.py
+++ b/lib/_emerge/resolver/slot_collision.py
@@ -146,7 +146,7 @@ class slot_conflict_handler:
if self.debug:
writemsg("\nNew configuration:\n", noiselevel=-1)
for pkg in config:
- writemsg(" %s\n" % (pkg,), noiselevel=-1)
+ writemsg(" {}\n".format(pkg), noiselevel=-1)
writemsg("\n", noiselevel=-1)
new_solutions = self._check_configuration(
@@ -258,9 +258,9 @@ class slot_conflict_handler:
)
for root, slot_atom, pkgs in self.all_conflicts:
- msg.append("%s" % (slot_atom,))
+ msg.append("{}".format(slot_atom))
if root != self.depgraph._frozen_config._running_root.root:
- msg.append(" for %s" % (root,))
+ msg.append(" for {}".format(root))
msg.append("\n\n")
for pkg in pkgs:
@@ -523,7 +523,7 @@ class slot_conflict_handler:
def highlight_violations(atom, version, use, slot_violated):
"""Colorize parts of an atom"""
- atom_str = "%s" % (atom,)
+ atom_str = "{}".format(atom)
colored_idx = set()
if version:
op = atom.operator
@@ -614,7 +614,7 @@ class slot_conflict_handler:
atom_str = (
atom_str[:use_part_start]
- + "[%s]" % (",".join(new_tokens),)
+ + "[{}]".format(",".join(new_tokens))
+ atom_str[use_part_end + 1 :]
)
@@ -639,15 +639,17 @@ class slot_conflict_handler:
use_display = ""
if atom.soname:
msg.append(
- "%s required by %s %s\n" % (atom, parent, use_display)
+ "{} required by {} {}\n".format(
+ atom, parent, use_display
+ )
)
elif isinstance(parent, PackageArg):
# For PackageArg it's
# redundant to display the atom attribute.
- msg.append("%s\n" % (parent,))
+ msg.append("{}\n".format(parent))
elif isinstance(parent, AtomArg):
msg.append(2 * indent)
- msg.append("%s (Argument)\n" % (atom,))
+ msg.append("{} (Argument)\n".format(atom))
else:
# Display the specific atom from SetArg or
# Package types.
@@ -675,7 +677,7 @@ class slot_conflict_handler:
if version_violated or slot_violated:
self.is_a_version_conflict = True
- cur_line = "%s required by %s %s\n" % (
+ cur_line = "{} required by {} {}\n".format(
atom_str,
parent,
use_display,
@@ -736,7 +738,7 @@ class slot_conflict_handler:
)
msg.append("!!! package(s) cannot be rebuilt for the reason(s) shown:\n\n")
for ppkg, reason in need_rebuild.items():
- msg.append("%s%s: %s\n" % (indent, ppkg, reason))
+ msg.append("{}{}: {}\n".format(indent, ppkg, reason))
msg.append("\n")
msg.append("\n")
@@ -975,7 +977,7 @@ class slot_conflict_handler:
if self.debug:
writemsg("All involved flags:\n", noiselevel=-1)
for idx, involved_flags in enumerate(all_involved_flags):
- writemsg(" %s\n" % (config[idx],), noiselevel=-1)
+ writemsg(" {}\n".format(config[idx]), noiselevel=-1)
for flag, state in involved_flags.items():
writemsg(" " + flag + ": " + state + "\n", noiselevel=-1)
@@ -1066,7 +1068,7 @@ class slot_conflict_handler:
inner_first = False
else:
msg += ", "
- msg += flag + ": %s" % (state,)
+ msg += flag + ": {}".format(state)
msg += "}"
msg += "]\n"
writemsg(msg, noiselevel=-1)
@@ -1281,7 +1283,7 @@ class _solution_candidate_generator:
return self.value == other.value
def __str__(self):
- return "%s" % (self.value,)
+ return "{}".format(self.value)
def __init__(self, all_involved_flags):
# A copy of all_involved_flags with all "cond" values
diff --git a/lib/_emerge/search.py b/lib/_emerge/search.py
index 4989ed787..a20d432ad 100644
--- a/lib/_emerge/search.py
+++ b/lib/_emerge/search.py
@@ -475,14 +475,17 @@ class search:
try:
uri_map = _parse_uri_map(mycpv, metadata, use=pkg.use.enabled)
except portage.exception.InvalidDependString as e:
- file_size_str = "Unknown (%s)" % (e,)
+ file_size_str = "Unknown ({})".format(e)
del e
else:
try:
mysum[0] = mf.getDistfilesSize(uri_map)
except KeyError as e:
- file_size_str = "Unknown (missing " + "digest for %s)" % (
- e,
+ file_size_str = (
+ "Unknown (missing "
+ + "digest for {})".format(
+ e,
+ )
)
del e
diff --git a/lib/_emerge/show_invalid_depstring_notice.py b/lib/_emerge/show_invalid_depstring_notice.py
index 0b01d157c..0ae788a93 100644
--- a/lib/_emerge/show_invalid_depstring_notice.py
+++ b/lib/_emerge/show_invalid_depstring_notice.py
@@ -12,7 +12,7 @@ def show_invalid_depstring_notice(parent_node, error_msg):
msg1 = (
"\n\n!!! Invalid or corrupt dependency specification: "
- + "\n\n%s\n\n%s\n\n" % (error_msg, parent_node)
+ + "\n\n{}\n\n{}\n\n".format(error_msg, parent_node)
)
p_key = parent_node.cpv
p_status = parent_node.operation
diff --git a/lib/_emerge/unmerge.py b/lib/_emerge/unmerge.py
index 81541cb4a..5466e8346 100644
--- a/lib/_emerge/unmerge.py
+++ b/lib/_emerge/unmerge.py
@@ -506,13 +506,13 @@ def _unmerge_display(
if not (pkgmap[x]["protected"] or pkgmap[x]["omitted"]) and cp in syslist:
virt_cp = sys_virt_map.get(cp)
if virt_cp is None:
- cp_info = "'%s'" % (cp,)
+ cp_info = "'{}'".format(cp)
else:
- cp_info = "'%s' (%s)" % (cp, virt_cp)
+ cp_info = "'{}' ({})".format(cp, virt_cp)
writemsg_level(
colorize(
"BAD",
- "\n\n!!! " + "%s is part of your system profile.\n" % (cp_info,),
+ "\n\n!!! " + "{} is part of your system profile.\n".format(cp_info),
),
level=logging.WARNING,
noiselevel=-1,
@@ -525,7 +525,7 @@ def _unmerge_display(
noiselevel=-1,
)
if not quiet:
- writemsg_level("\n %s\n" % (bold(cp),), noiselevel=-1)
+ writemsg_level("\n {}\n".format(bold(cp)), noiselevel=-1)
else:
writemsg_level(bold(cp) + ": ", noiselevel=-1)
for mytype in ["selected", "protected", "omitted"]:
@@ -653,7 +653,7 @@ def unmerge(
for x in range(len(pkgmap)):
for y in pkgmap[x]["selected"]:
emergelog(xterm_titles, "=== Unmerging... (" + y + ")")
- message = ">>> Unmerging ({0} of {1}) {2}...\n".format(
+ message = ">>> Unmerging ({} of {}) {}...\n".format(
colorize("MERGE_LIST_PROGRESS", str(curval)),
colorize("MERGE_LIST_PROGRESS", str(maxval)),
y,
diff --git a/lib/portage/__init__.py b/lib/portage/__init__.py
index eb42ec13b..3ff27a1a2 100644
--- a/lib/portage/__init__.py
+++ b/lib/portage/__init__.py
@@ -265,10 +265,10 @@ class _unicode_func_wrapper:
_unicode_encode(x, encoding=encoding, errors="strict") for x in args
]
if kwargs:
- wrapped_kwargs = dict(
- (k, _unicode_encode(v, encoding=encoding, errors="strict"))
+ wrapped_kwargs = {
+ k: _unicode_encode(v, encoding=encoding, errors="strict")
for k, v in kwargs.items()
- )
+ }
else:
wrapped_kwargs = {}
@@ -362,7 +362,7 @@ class _eintr_func_wrapper:
try:
rval = self._func(*args, **kwargs)
break
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EINTR:
raise
diff --git a/lib/portage/_compat_upgrade/binpkg_compression.py b/lib/portage/_compat_upgrade/binpkg_compression.py
index 58f995485..6cb117e4e 100644
--- a/lib/portage/_compat_upgrade/binpkg_compression.py
+++ b/lib/portage/_compat_upgrade/binpkg_compression.py
@@ -43,7 +43,7 @@ def main():
content = re.sub(
"^BINPKG_COMPRESS=.*$", compat_setting, content, flags=re.MULTILINE
)
- with open(config_path, "wt") as f:
+ with open(config_path, "w") as f:
f.write(content)
diff --git a/lib/portage/_compat_upgrade/binpkg_multi_instance.py b/lib/portage/_compat_upgrade/binpkg_multi_instance.py
index ce504c54b..a99e4dc54 100644
--- a/lib/portage/_compat_upgrade/binpkg_multi_instance.py
+++ b/lib/portage/_compat_upgrade/binpkg_multi_instance.py
@@ -31,7 +31,7 @@ def main():
config_path = os.path.join(
os.environ["ED"], GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals"
)
- with open(config_path, "at") as f:
+ with open(config_path, "a") as f:
f.write("{}\n".format(COMPAT_FEATURES))
diff --git a/lib/portage/_compat_upgrade/default_locations.py b/lib/portage/_compat_upgrade/default_locations.py
index f4a24985b..d09b5858b 100644
--- a/lib/portage/_compat_upgrade/default_locations.py
+++ b/lib/portage/_compat_upgrade/default_locations.py
@@ -97,7 +97,7 @@ def main():
content = re.sub(
"^RPMDIR=.*$", compat_setting, content, flags=re.MULTILINE
)
- with open(config_path, "wt") as f:
+ with open(config_path, "w") as f:
f.write(content)
if do_main_repo:
@@ -115,7 +115,7 @@ def main():
content = re.sub(
"^location =.*$", compat_setting, content, flags=re.MULTILINE
)
- with open(config_path, "wt") as f:
+ with open(config_path, "w") as f:
f.write(content)
diff --git a/lib/portage/_emirrordist/Config.py b/lib/portage/_emirrordist/Config.py
index 734313c07..8bda1d2ab 100644
--- a/lib/portage/_emirrordist/Config.py
+++ b/lib/portage/_emirrordist/Config.py
@@ -97,7 +97,7 @@ class Config:
if log_path is not None:
logger.warning("dry-run: %s log redirected to logging.info" % log_desc)
else:
- self._open_files.append(io.open(log_path, mode=mode, encoding="utf_8"))
+ self._open_files.append(open(log_path, mode=mode, encoding="utf_8"))
line_format = "%s\n"
log_func = self._open_files[-1].write
@@ -139,7 +139,7 @@ class Config:
if dry_run:
logger.warning("dry-run: %s db opened in readonly mode" % db_desc)
if not isinstance(db, dict):
- volatile_db = dict((k, db[k]) for k in db)
+ volatile_db = {k: db[k] for k in db}
db.close()
db = volatile_db
else:
diff --git a/lib/portage/_emirrordist/DeletionIterator.py b/lib/portage/_emirrordist/DeletionIterator.py
index 6273ad281..636a34a81 100644
--- a/lib/portage/_emirrordist/DeletionIterator.py
+++ b/lib/portage/_emirrordist/DeletionIterator.py
@@ -36,11 +36,9 @@ class DeletionIterator:
)
if self._config.content_db is None
else itertools.chain.from_iterable(
- (
- self._config.content_db.get_filenames_translate(filename)
- for filename in itertools.chain.from_iterable(
- layout.get_filenames(distdir) for layout in self._config.layouts
- )
+ self._config.content_db.get_filenames_translate(filename)
+ for filename in itertools.chain.from_iterable(
+ layout.get_filenames(distdir) for layout in self._config.layouts
)
)
)
diff --git a/lib/portage/_emirrordist/DeletionTask.py b/lib/portage/_emirrordist/DeletionTask.py
index a5d96a9d9..65ba0462d 100644
--- a/lib/portage/_emirrordist/DeletionTask.py
+++ b/lib/portage/_emirrordist/DeletionTask.py
@@ -60,7 +60,7 @@ class DeletionTask(CompositeTask):
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
logger.error(
- "%s unlink failed in distfiles: %s" % (self.distfile, e)
+ "{} unlink failed in distfiles: {}".format(self.distfile, e)
)
success = False
@@ -86,7 +86,7 @@ class DeletionTask(CompositeTask):
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
logger.error(
- "%s unlink failed in distfiles: %s" % (self.distfile, e)
+ "{} unlink failed in distfiles: {}".format(self.distfile, e)
)
success = False
@@ -119,7 +119,7 @@ class DeletionTask(CompositeTask):
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
logger.error(
- "%s unlink failed in distfiles: %s" % (self.distfile, e)
+ "{} unlink failed in distfiles: {}".format(self.distfile, e)
)
success = False
@@ -136,7 +136,7 @@ class DeletionTask(CompositeTask):
cpv = self.config.distfiles_db.get(self.distfile, cpv)
self.config.delete_count += 1
- self.config.log_success("%s\t%s\tremoved" % (cpv, self.distfile))
+ self.config.log_success("{}\t{}\tremoved".format(cpv, self.distfile))
if self.config.distfiles_db is not None:
try:
diff --git a/lib/portage/_emirrordist/FetchIterator.py b/lib/portage/_emirrordist/FetchIterator.py
index 79c460f79..de90d23bd 100644
--- a/lib/portage/_emirrordist/FetchIterator.py
+++ b/lib/portage/_emirrordist/FetchIterator.py
@@ -41,8 +41,7 @@ class FetchIterator:
# and in order to reduce latency in case of a signal interrupt.
cp_all = self._config.portdb.cp_all
for category in sorted(self._config.portdb.categories):
- for cp in cp_all(categories=(category,)):
- yield cp
+ yield from cp_all(categories=(category,))
def __iter__(self):
@@ -163,7 +162,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
try:
(restrict,) = aux_get_result.result()
except (PortageKeyError, PortageException) as e:
- config.log_failure("%s\t\taux_get exception %s" % (cpv, e))
+ config.log_failure("{}\t\taux_get exception {}".format(cpv, e))
result.set_result(fetch_tasks)
return
@@ -173,14 +172,14 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
try:
restrict = frozenset(use_reduce(restrict, flat=True, matchnone=True))
except PortageException as e:
- config.log_failure("%s\t\tuse_reduce exception %s" % (cpv, e))
+ config.log_failure("{}\t\tuse_reduce exception {}".format(cpv, e))
result.set_result(fetch_tasks)
return
try:
uri_map = fetch_map_result.result()
except PortageException as e:
- config.log_failure("%s\t\tgetFetchMap exception %s" % (cpv, e))
+ config.log_failure("{}\t\tgetFetchMap exception {}".format(cpv, e))
result.set_result(fetch_tasks)
return
@@ -235,10 +234,12 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
digests = repo_config.load_manifest(
os.path.join(repo_config.location, cpv.cp)
).getTypeDigests("DIST")
- except (EnvironmentError, PortageException) as e:
+ except (OSError, PortageException) as e:
digests_future.done() or digests_future.set_exception(e)
for filename in new_uri_map:
- config.log_failure("%s\t%s\tManifest exception %s" % (cpv, filename, e))
+ config.log_failure(
+ "{}\t{}\tManifest exception {}".format(cpv, filename, e)
+ )
config.file_failures[filename] = cpv
result.set_result(fetch_tasks)
return
@@ -247,7 +248,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
if not digests:
for filename in new_uri_map:
- config.log_failure("%s\t%s\tdigest entry missing" % (cpv, filename))
+ config.log_failure("{}\t{}\tdigest entry missing".format(cpv, filename))
config.file_failures[filename] = cpv
result.set_result(fetch_tasks)
return
@@ -255,7 +256,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
for filename, uri_tuple in new_uri_map.items():
file_digests = digests.get(filename)
if file_digests is None:
- config.log_failure("%s\t%s\tdigest entry missing" % (cpv, filename))
+ config.log_failure("{}\t{}\tdigest entry missing".format(cpv, filename))
config.file_failures[filename] = cpv
continue
if filename in config.file_owners:
diff --git a/lib/portage/_emirrordist/FetchTask.py b/lib/portage/_emirrordist/FetchTask.py
index 89fd1657e..a85411b6d 100644
--- a/lib/portage/_emirrordist/FetchTask.py
+++ b/lib/portage/_emirrordist/FetchTask.py
@@ -74,7 +74,7 @@ class FetchTask(CompositeTask):
self.scheduler.output(
msg, background=self.background, log_path=self._log_path
)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
self.config.file_failures[self.distfile] = self.cpv
self.returncode = os.EX_OK
self._async_wait()
@@ -89,7 +89,9 @@ class FetchTask(CompositeTask):
st = os.stat(distfile_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "%s stat failed in %s: %s" % (self.distfile, "distfiles", e)
+ msg = "{} stat failed in {}: {}".format(
+ self.distfile, "distfiles", e
+ )
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -174,14 +176,16 @@ class FetchTask(CompositeTask):
if self.config.options.dry_run:
if os.path.exists(recycle_file):
- logger.info("dry-run: delete '%s' from recycle" % (self.distfile,))
+ logger.info(
+ "dry-run: delete '{}' from recycle".format(self.distfile)
+ )
else:
try:
os.unlink(recycle_file)
except OSError:
pass
else:
- logger.debug("delete '%s' from recycle" % (self.distfile,))
+ logger.debug("delete '{}' from recycle".format(self.distfile))
def _distfiles_digester_exit(self, digester):
@@ -195,10 +199,10 @@ class FetchTask(CompositeTask):
# is a bad situation which normally does not occur, so
# skip this file and report it, in order to draw attention
# from the administrator.
- msg = "%s distfiles digester failed unexpectedly" % (self.distfile,)
+ msg = "{} distfiles digester failed unexpectedly".format(self.distfile)
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
logger.error(msg)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
self.config.file_failures[self.distfile] = self.cpv
self.wait()
return
@@ -271,7 +275,7 @@ class FetchTask(CompositeTask):
else:
msg = "no fetchable uris"
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
self.config.file_failures[self.distfile] = self.cpv
self.returncode = os.EX_OK
self.wait()
@@ -310,7 +314,9 @@ class FetchTask(CompositeTask):
st = os.stat(file_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "%s stat failed in %s: %s" % (self.distfile, mirror_info.name, e)
+ msg = "{} stat failed in {}: {}".format(
+ self.distfile, mirror_info.name, e
+ )
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -342,7 +348,7 @@ class FetchTask(CompositeTask):
current_mirror = self._current_mirror
if digester.returncode != os.EX_OK:
- msg = "%s %s digester failed unexpectedly" % (
+ msg = "{} {} digester failed unexpectedly".format(
self.distfile,
current_mirror.name,
)
@@ -351,7 +357,7 @@ class FetchTask(CompositeTask):
else:
bad_digest = self._find_bad_digest(digester.digests)
if bad_digest is not None:
- msg = "%s %s has bad %s digest: expected %s, got %s" % (
+ msg = "{} {} has bad {} digest: expected {}, got {}".format(
self.distfile,
current_mirror.name,
bad_digest,
@@ -387,7 +393,7 @@ class FetchTask(CompositeTask):
self.config.layouts[0].get_path(self.distfile),
)
if self._hardlink_atomic(
- src, dest, "%s to %s" % (current_mirror.name, "distfiles")
+ src, dest, "{} to {}".format(current_mirror.name, "distfiles")
):
logger.debug(
"hardlink '%s' from %s to distfiles"
@@ -420,7 +426,7 @@ class FetchTask(CompositeTask):
current_mirror = self._current_mirror
if copier.returncode != os.EX_OK:
- msg = "%s %s copy failed unexpectedly: %s" % (
+ msg = "{} {} copy failed unexpectedly: {}".format(
self.distfile,
current_mirror.name,
copier.future.exception(),
@@ -430,7 +436,9 @@ class FetchTask(CompositeTask):
else:
logger.debug(
- "copy '%s' from %s to distfiles" % (self.distfile, current_mirror.name)
+ "copy '{}' from {} to distfiles".format(
+ self.distfile, current_mirror.name
+ )
)
# Apply the timestamp from the source file, but
@@ -441,7 +449,7 @@ class FetchTask(CompositeTask):
ns=(self._current_stat.st_mtime_ns, self._current_stat.st_mtime_ns),
)
except OSError as e:
- msg = "%s %s utime failed unexpectedly: %s" % (
+ msg = "{} {} utime failed unexpectedly: {}".format(
self.distfile,
current_mirror.name,
e,
@@ -462,7 +470,7 @@ class FetchTask(CompositeTask):
if self.config.options.dry_run:
# Simply report success.
- logger.info("dry-run: fetch '%s' from '%s'" % (self.distfile, uri))
+ logger.info("dry-run: fetch '{}' from '{}'".format(self.distfile, uri))
self._success()
self.returncode = os.EX_OK
self._async_wait()
@@ -540,7 +548,7 @@ class FetchTask(CompositeTask):
return
if digester.returncode != os.EX_OK:
- msg = "%s %s digester failed unexpectedly" % (
+ msg = "{} {} digester failed unexpectedly".format(
self.distfile,
self._fetch_tmp_dir_info,
)
@@ -549,7 +557,7 @@ class FetchTask(CompositeTask):
else:
bad_digest = self._find_bad_digest(digester.digests)
if bad_digest is not None:
- msg = "%s has bad %s digest: expected %s, got %s" % (
+ msg = "{} has bad {} digest: expected {}, got {}".format(
self.distfile,
bad_digest,
self.digests[bad_digest],
@@ -604,14 +612,14 @@ class FetchTask(CompositeTask):
self._make_layout_links()
else:
# out of space?
- msg = "%s %s copy failed unexpectedly: %s" % (
+ msg = "{} {} copy failed unexpectedly: {}".format(
self.distfile,
self._fetch_tmp_dir_info,
copier.future.exception(),
)
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
logger.error(msg)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
self.config.file_failures[self.distfile] = self.cpv
self.returncode = 1
self.wait()
@@ -636,7 +644,7 @@ class FetchTask(CompositeTask):
if not self._hardlink_atomic(
src_path,
link_path,
- "%s -> %s" % (link_path, src_path),
+ "{} -> {}".format(link_path, src_path),
self.config.options.symlinks,
):
success = False
@@ -649,7 +657,7 @@ class FetchTask(CompositeTask):
msg = "failed to create distfiles layout {}".format(
"symlink" if self.config.options.symlinks else "hardlink"
)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
self.config.file_failures[self.distfile] = self.cpv
self.returncode = 1
@@ -660,7 +668,7 @@ class FetchTask(CompositeTask):
os.unlink(file_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "unlink '%s' failed in %s: %s" % (self.distfile, dir_info, e)
+ msg = "unlink '{}' failed in {}: {}".format(self.distfile, dir_info, e)
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -703,7 +711,7 @@ class FetchTask(CompositeTask):
head, tail = os.path.split(dest)
hardlink_tmp = os.path.join(
- head, ".%s._mirrordist_hardlink_.%s" % (tail, portage.getpid())
+ head, ".{}._mirrordist_hardlink_.{}".format(tail, portage.getpid())
)
try:
@@ -714,7 +722,7 @@ class FetchTask(CompositeTask):
os.link(src, hardlink_tmp)
except OSError as e:
if e.errno != errno.EXDEV:
- msg = "hardlink %s from %s failed: %s" % (
+ msg = "hardlink {} from {} failed: {}".format(
self.distfile,
dir_info,
e,
@@ -728,7 +736,7 @@ class FetchTask(CompositeTask):
try:
os.rename(hardlink_tmp, dest)
except OSError as e:
- msg = "hardlink rename '%s' from %s failed: %s" % (
+ msg = "hardlink rename '{}' from {} failed: {}".format(
self.distfile,
dir_info,
e,
diff --git a/lib/portage/_emirrordist/MirrorDistTask.py b/lib/portage/_emirrordist/MirrorDistTask.py
index 9423125df..303911cba 100644
--- a/lib/portage/_emirrordist/MirrorDistTask.py
+++ b/lib/portage/_emirrordist/MirrorDistTask.py
@@ -187,7 +187,7 @@ class MirrorDistTask(CompositeTask):
cpv = "unknown"
if distfiles_db is not None:
cpv = distfiles_db.get(filename, cpv)
- lines.append("\t%s\t%s\n" % (filename, cpv))
+ lines.append("\t{}\t{}\n".format(filename, cpv))
if not dry_run:
portage.util.write_atomic(
@@ -250,4 +250,4 @@ class MirrorDistTask(CompositeTask):
Override _async_wait to call self._cleanup().
"""
self._cleanup()
- super(MirrorDistTask, self)._async_wait()
+ super()._async_wait()
diff --git a/lib/portage/_emirrordist/main.py b/lib/portage/_emirrordist/main.py
index f6a4f2e43..9a143c221 100644
--- a/lib/portage/_emirrordist/main.py
+++ b/lib/portage/_emirrordist/main.py
@@ -275,7 +275,7 @@ def emirrordist_main(args):
repo_path = settings.repositories.treemap.get(options.repo)
if repo_path is None:
- parser.error("Unable to locate repository named '%s'" % (options.repo,))
+ parser.error("Unable to locate repository named '{}'".format(options.repo))
if options.jobs is not None:
options.jobs = int(options.jobs)
diff --git a/lib/portage/_global_updates.py b/lib/portage/_global_updates.py
index feb87c3d1..136f6874a 100644
--- a/lib/portage/_global_updates.py
+++ b/lib/portage/_global_updates.py
@@ -195,7 +195,7 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
if world_modified:
world_list.sort()
- write_atomic(world_file, "".join("%s\n" % (x,) for x in world_list))
+ write_atomic(world_file, "".join("{}\n".format(x) for x in world_list))
if world_warnings:
# XXX: print warning that we've updated world entries
# and the old name still matches something (from an overlay)?
diff --git a/lib/portage/_sets/ProfilePackageSet.py b/lib/portage/_sets/ProfilePackageSet.py
index 1b5c6eae7..f7855eb7f 100644
--- a/lib/portage/_sets/ProfilePackageSet.py
+++ b/lib/portage/_sets/ProfilePackageSet.py
@@ -11,9 +11,7 @@ class ProfilePackageSet(PackageSet):
_operations = ["merge"]
def __init__(self, profiles, debug=False):
- super(ProfilePackageSet, self).__init__(
- allow_repo=any(allow_profile_repo_deps(y) for y in profiles)
- )
+ super().__init__(allow_repo=any(allow_profile_repo_deps(y) for y in profiles))
self._profiles = profiles
if profiles:
desc_profile = profiles[-1]
diff --git a/lib/portage/_sets/base.py b/lib/portage/_sets/base.py
index 23e8e795e..537ad4510 100644
--- a/lib/portage/_sets/base.py
+++ b/lib/portage/_sets/base.py
@@ -34,10 +34,8 @@ class PackageSet:
def __iter__(self):
self._load()
- for x in self._atoms:
- yield x
- for x in self._nonatoms:
- yield x
+ yield from self._atoms
+ yield from self._nonatoms
def __bool__(self):
self._load()
@@ -160,9 +158,7 @@ class PackageSet:
class EditablePackageSet(PackageSet):
def __init__(self, allow_wildcard=False, allow_repo=False):
- super(EditablePackageSet, self).__init__(
- allow_wildcard=allow_wildcard, allow_repo=allow_repo
- )
+ super().__init__(allow_wildcard=allow_wildcard, allow_repo=allow_repo)
def update(self, atoms):
self._load()
@@ -223,9 +219,7 @@ class InternalPackageSet(EditablePackageSet):
functions default to allow_repo=False, which is sufficient to ensure
that repo atoms are prohibited when necessary.
"""
- super(InternalPackageSet, self).__init__(
- allow_wildcard=allow_wildcard, allow_repo=allow_repo
- )
+ super().__init__(allow_wildcard=allow_wildcard, allow_repo=allow_repo)
if initial_atoms is not None:
self.update(initial_atoms)
@@ -242,7 +236,7 @@ class InternalPackageSet(EditablePackageSet):
class DummyPackageSet(PackageSet):
def __init__(self, atoms=None):
- super(DummyPackageSet, self).__init__()
+ super().__init__()
if atoms:
self._setAtoms(atoms)
diff --git a/lib/portage/_sets/dbapi.py b/lib/portage/_sets/dbapi.py
index 0c0bdb317..8f92602f5 100644
--- a/lib/portage/_sets/dbapi.py
+++ b/lib/portage/_sets/dbapi.py
@@ -33,7 +33,7 @@ class EverythingSet(PackageSet):
_filter = None
def __init__(self, vdbapi, **kwargs):
- super(EverythingSet, self).__init__()
+ super().__init__()
self._db = vdbapi
def load(self):
@@ -47,7 +47,7 @@ class EverythingSet(PackageSet):
# SLOT installed, in order to avoid the possibility
# of unwanted upgrades as reported in bug #338959.
pkg = pkg_str(cpv, None)
- atom = Atom("%s:%s" % (pkg.cp, pkg.slot))
+ atom = Atom("{}:{}".format(pkg.cp, pkg.slot))
if self._filter:
if self._filter(atom):
myatoms.append(atom)
@@ -71,7 +71,7 @@ class OwnerSet(PackageSet):
)
def __init__(self, vardb=None, exclude_files=None, files=None):
- super(OwnerSet, self).__init__()
+ super().__init__()
self._db = vardb
self._exclude_files = exclude_files
self._files = files
@@ -105,7 +105,7 @@ class OwnerSet(PackageSet):
if not exclude_paths:
for link, p in vardb._owners.iter_owners(paths):
pkg = pkg_str(link.mycpv, None)
- rValue.add("%s:%s" % (pkg.cp, pkg.slot))
+ rValue.add("{}:{}".format(pkg.cp, pkg.slot))
else:
all_paths = set()
all_paths.update(paths)
@@ -113,7 +113,7 @@ class OwnerSet(PackageSet):
exclude_atoms = set()
for link, p in vardb._owners.iter_owners(all_paths):
pkg = pkg_str(link.mycpv, None)
- atom = "%s:%s" % (pkg.cp, pkg.slot)
+ atom = "{}:{}".format(pkg.cp, pkg.slot)
rValue.add(atom)
# Returned paths are relative to ROOT and do not have
# a leading slash.
@@ -156,7 +156,7 @@ class VariableSet(EverythingSet):
def __init__(
self, vardb, metadatadb=None, variable=None, includes=None, excludes=None
):
- super(VariableSet, self).__init__(vardb)
+ super().__init__(vardb)
self._metadatadb = metadatadb
self._variable = variable
self._includes = includes
@@ -231,7 +231,7 @@ class SubslotChangedSet(PackageSet):
)
def __init__(self, portdb=None, vardb=None):
- super(SubslotChangedSet, self).__init__()
+ super().__init__()
self._portdb = portdb
self._vardb = vardb
@@ -242,7 +242,7 @@ class SubslotChangedSet(PackageSet):
cp_list = self._vardb.cp_list
for cp in self._vardb.cp_all():
for pkg in cp_list(cp):
- slot_atom = "%s:%s" % (pkg.cp, pkg.slot)
+ slot_atom = "{}:{}".format(pkg.cp, pkg.slot)
ebuild = xmatch(xmatch_level, slot_atom)
if not ebuild:
continue
@@ -268,7 +268,7 @@ class DowngradeSet(PackageSet):
)
def __init__(self, portdb=None, vardb=None):
- super(DowngradeSet, self).__init__()
+ super().__init__()
self._portdb = portdb
self._vardb = vardb
@@ -281,7 +281,7 @@ class DowngradeSet(PackageSet):
for cp in self._vardb.cp_all():
for cpv in cp_list(cp):
pkg = pkg_str(cpv, None)
- slot_atom = "%s:%s" % (pkg.cp, pkg.slot)
+ slot_atom = "{}:{}".format(pkg.cp, pkg.slot)
ebuild = xmatch(xmatch_level, slot_atom)
if not ebuild:
continue
@@ -307,7 +307,7 @@ class UnavailableSet(EverythingSet):
)
def __init__(self, vardb, metadatadb=None):
- super(UnavailableSet, self).__init__(vardb)
+ super().__init__(vardb)
self._metadatadb = metadatadb
def _filter(self, atom):
@@ -340,7 +340,7 @@ class UnavailableBinaries(EverythingSet):
)
def __init__(self, vardb, metadatadb=None):
- super(UnavailableBinaries, self).__init__(vardb)
+ super().__init__(vardb)
self._metadatadb = metadatadb
def _filter(self, atom):
@@ -367,7 +367,7 @@ class CategorySet(PackageSet):
_operations = ["merge", "unmerge"]
def __init__(self, category, dbapi, only_visible=True):
- super(CategorySet, self).__init__()
+ super().__init__()
self._db = dbapi
self._category = category
self._check = only_visible
@@ -375,7 +375,7 @@ class CategorySet(PackageSet):
s = "visible"
else:
s = "all"
- self.description = "Package set containing %s packages of category %s" % (
+ self.description = "Package set containing {} packages of category {}".format(
s,
self._category,
)
@@ -455,7 +455,7 @@ class AgeSet(EverythingSet):
_aux_keys = ("BUILD_TIME",)
def __init__(self, vardb, mode="older", age=7):
- super(AgeSet, self).__init__(vardb)
+ super().__init__(vardb)
self._mode = mode
self._age = age
@@ -494,7 +494,7 @@ class DateSet(EverythingSet):
_aux_keys = ("BUILD_TIME",)
def __init__(self, vardb, date, mode="older"):
- super(DateSet, self).__init__(vardb)
+ super().__init__(vardb)
self._mode = mode
self._date = date
@@ -589,7 +589,7 @@ class RebuiltBinaries(EverythingSet):
_aux_keys = ("BUILD_TIME",)
def __init__(self, vardb, bindb=None):
- super(RebuiltBinaries, self).__init__(vardb, bindb=bindb)
+ super().__init__(vardb, bindb=bindb)
self._bindb = bindb
def _filter(self, atom):
@@ -618,7 +618,7 @@ class ChangedDepsSet(PackageSet):
)
def __init__(self, portdb=None, vardb=None):
- super(ChangedDepsSet, self).__init__()
+ super().__init__()
self._portdb = portdb
self._vardb = vardb
diff --git a/lib/portage/_sets/files.py b/lib/portage/_sets/files.py
index 21e6933fa..97220d841 100644
--- a/lib/portage/_sets/files.py
+++ b/lib/portage/_sets/files.py
@@ -35,7 +35,7 @@ class StaticFileSet(EditablePackageSet):
_repopath_sub = re.compile(r"\$\{repository:(?P<reponame>.+)\}")
def __init__(self, filename, greedy=False, dbapi=None):
- super(StaticFileSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._filename = filename
self._mtime = None
self.description = "Package set loaded from file %s" % self._filename
@@ -77,14 +77,15 @@ class StaticFileSet(EditablePackageSet):
write_atomic(
self._filename,
"".join(
- "%s\n" % (atom,) for atom in sorted(chain(self._atoms, self._nonatoms))
+ "{}\n".format(atom)
+ for atom in sorted(chain(self._atoms, self._nonatoms))
),
)
def load(self):
try:
mtime = os.stat(self._filename).st_mtime
- except (OSError, IOError):
+ except OSError:
mtime = None
if not self._loaded or self._mtime != mtime:
try:
@@ -92,7 +93,7 @@ class StaticFileSet(EditablePackageSet):
for fname in errors:
for e in errors[fname]:
self.errors.append(fname + ": " + e)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -103,7 +104,7 @@ class StaticFileSet(EditablePackageSet):
matches = self.dbapi.match(a)
for cpv in matches:
pkg = self.dbapi._pkg_str(cpv, None)
- atoms.append("%s:%s" % (pkg.cp, pkg.slot))
+ atoms.append("{}:{}".format(pkg.cp, pkg.slot))
# In addition to any installed slots, also try to pull
# in the latest new slot that may be available.
atoms.append(a)
@@ -213,7 +214,7 @@ class StaticFileSet(EditablePackageSet):
class ConfigFileSet(PackageSet):
def __init__(self, filename):
- super(ConfigFileSet, self).__init__()
+ super().__init__()
self._filename = filename
self.description = "Package set generated from %s" % self._filename
self.loader = KeyListFileLoader(self._filename, ValidAtomValidator)
@@ -250,7 +251,7 @@ class WorldSelectedSet(EditablePackageSet):
description = "Set of packages and subsets that were directly installed by the user"
def __init__(self, eroot):
- super(WorldSelectedSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._pkgset = WorldSelectedPackagesSet(eroot)
self._setset = WorldSelectedSetsSet(eroot)
@@ -288,7 +289,7 @@ class WorldSelectedPackagesSet(EditablePackageSet):
description = "Set of packages that were directly installed by the user"
def __init__(self, eroot):
- super(WorldSelectedPackagesSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._lock = None
self._filename = os.path.join(eroot, WORLD_FILE)
self.loader = ItemFileLoader(self._filename, self._validate)
@@ -305,7 +306,7 @@ class WorldSelectedPackagesSet(EditablePackageSet):
atoms_changed = False
try:
mtime = os.stat(self._filename).st_mtime
- except (OSError, IOError):
+ except OSError:
mtime = None
if not self._loaded or self._mtime != mtime:
try:
@@ -313,7 +314,7 @@ class WorldSelectedPackagesSet(EditablePackageSet):
for fname in errors:
for e in errors[fname]:
self.errors.append(fname + ": " + e)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -384,7 +385,7 @@ class WorldSelectedSetsSet(EditablePackageSet):
description = "Set of sets that were directly installed by the user"
def __init__(self, eroot):
- super(WorldSelectedSetsSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._lock = None
self._filename = os.path.join(eroot, WORLD_SETS_FILE)
self.loader = ItemFileLoader(self._filename, self._validate)
@@ -402,7 +403,7 @@ class WorldSelectedSetsSet(EditablePackageSet):
atoms_changed = False
try:
mtime = os.stat(self._filename).st_mtime
- except (OSError, IOError):
+ except OSError:
mtime = None
if not self._loaded or self._mtime != mtime:
try:
@@ -410,7 +411,7 @@ class WorldSelectedSetsSet(EditablePackageSet):
for fname in errors:
for e in errors[fname]:
self.errors.append(fname + ": " + e)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
diff --git a/lib/portage/_sets/libs.py b/lib/portage/_sets/libs.py
index 683cc0823..a99f621d8 100644
--- a/lib/portage/_sets/libs.py
+++ b/lib/portage/_sets/libs.py
@@ -12,7 +12,7 @@ class LibraryConsumerSet(PackageSet):
_operations = ["merge", "unmerge"]
def __init__(self, vardbapi, debug=False):
- super(LibraryConsumerSet, self).__init__()
+ super().__init__()
self.dbapi = vardbapi
self.debug = debug
@@ -28,7 +28,7 @@ class LibraryConsumerSet(PackageSet):
# without replacement.
pass
else:
- rValue.add("%s:%s" % (pkg.cp, pkg.slot))
+ rValue.add("{}:{}".format(pkg.cp, pkg.slot))
return rValue
@@ -45,7 +45,7 @@ class LibraryFileConsumerSet(LibraryConsumerSet):
)
def __init__(self, vardbapi, files, **kargs):
- super(LibraryFileConsumerSet, self).__init__(vardbapi, **kargs)
+ super().__init__(vardbapi, **kargs)
self.files = files
def load(self):
diff --git a/lib/portage/_sets/profiles.py b/lib/portage/_sets/profiles.py
index 289a93218..94f67cb88 100644
--- a/lib/portage/_sets/profiles.py
+++ b/lib/portage/_sets/profiles.py
@@ -17,9 +17,7 @@ class PackagesSystemSet(PackageSet):
_operations = ["merge"]
def __init__(self, profiles, debug=False):
- super(PackagesSystemSet, self).__init__(
- allow_repo=any(allow_profile_repo_deps(x) for x in profiles)
- )
+ super().__init__(allow_repo=any(allow_profile_repo_deps(x) for x in profiles))
self._profiles = profiles
self._debug = debug
if profiles:
@@ -35,7 +33,7 @@ class PackagesSystemSet(PackageSet):
debug = self._debug
if debug:
writemsg_level(
- "\nPackagesSystemSet: profiles: %s\n" % (self._profiles,),
+ "\nPackagesSystemSet: profiles: {}\n".format(self._profiles),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -54,7 +52,7 @@ class PackagesSystemSet(PackageSet):
if debug:
writemsg_level(
- "\nPackagesSystemSet: raw packages: %s\n" % (mylist,),
+ "\nPackagesSystemSet: raw packages: {}\n".format(mylist),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -63,7 +61,7 @@ class PackagesSystemSet(PackageSet):
if debug:
writemsg_level(
- "\nPackagesSystemSet: stacked packages: %s\n" % (mylist,),
+ "\nPackagesSystemSet: stacked packages: {}\n".format(mylist),
level=logging.DEBUG,
noiselevel=-1,
)
diff --git a/lib/portage/_sets/security.py b/lib/portage/_sets/security.py
index 5e8bc89e3..fff81c46b 100644
--- a/lib/portage/_sets/security.py
+++ b/lib/portage/_sets/security.py
@@ -16,7 +16,7 @@ class SecuritySet(PackageSet):
description = "package set that includes all packages possibly affected by a GLSA"
def __init__(self, settings, vardbapi, portdbapi, least_change=True):
- super(SecuritySet, self).__init__()
+ super().__init__()
self._settings = settings
self._vardbapi = vardbapi
self._portdbapi = portdbapi
@@ -49,7 +49,7 @@ class SecuritySet(PackageSet):
for atom in atomlist[:]:
cpv = self._portdbapi.xmatch("match-all", atom)[0]
pkg = self._portdbapi._pkg_str(cpv, None)
- cps = "%s:%s" % (pkg.cp, pkg.slot)
+ cps = "{}:{}".format(pkg.cp, pkg.slot)
if not cps in mydict:
mydict[cps] = (atom, cpv)
else:
diff --git a/lib/portage/_sets/shell.py b/lib/portage/_sets/shell.py
index 249e1fb05..6999c6d65 100644
--- a/lib/portage/_sets/shell.py
+++ b/lib/portage/_sets/shell.py
@@ -29,7 +29,7 @@ class CommandOutputSet(PackageSet):
_operations = ["merge", "unmerge"]
def __init__(self, command):
- super(CommandOutputSet, self).__init__()
+ super().__init__()
self._command = command
self.description = "Package set generated from output of '%s'" % self._command
diff --git a/lib/portage/cache/anydbm.py b/lib/portage/cache/anydbm.py
index cf5f3fa4b..243116081 100644
--- a/lib/portage/cache/anydbm.py
+++ b/lib/portage/cache/anydbm.py
@@ -27,7 +27,7 @@ class database(fs_template.FsBased):
serialize_eclasses = False
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
default_db = config.get("dbtype", "anydbm")
if not default_db.startswith("."):
@@ -50,7 +50,7 @@ class database(fs_template.FsBased):
try:
self._ensure_dirs()
self._ensure_dirs(self._db_path)
- except (OSError, IOError) as e:
+ except OSError as e:
raise cache_errors.InitializationError(self.__class__, e)
# try again if failed
diff --git a/lib/portage/cache/cache_errors.py b/lib/portage/cache/cache_errors.py
index bc8114ec1..075f79350 100644
--- a/lib/portage/cache/cache_errors.py
+++ b/lib/portage/cache/cache_errors.py
@@ -12,7 +12,7 @@ class InitializationError(CacheError):
self.error, self.class_name = error, class_name
def __str__(self):
- return "Creation of instance %s failed due to %s" % (
+ return "Creation of instance {} failed due to {}".format(
self.class_name,
str(self.error),
)
@@ -23,7 +23,7 @@ class CacheCorruption(CacheError):
self.key, self.ex = key, ex
def __str__(self):
- return "%s is corrupt: %s" % (self.key, str(self.ex))
+ return "{} is corrupt: {}".format(self.key, str(self.ex))
class GeneralCacheCorruption(CacheError):
@@ -41,7 +41,7 @@ class InvalidRestriction(CacheError):
self.key, self.restriction, self.ex = key, restriction, ex
def __str__(self):
- return "%s:%s is not valid: %s" % (self.key, self.restriction, str(self.ex))
+ return "{}:{} is not valid: {}".format(self.key, self.restriction, str(self.ex))
class ReadOnlyRestriction(CacheError):
@@ -67,14 +67,14 @@ class StatCollision(CacheError):
self.size = size
def __str__(self):
- return "%s has stat collision with size %s and mtime %s" % (
+ return "{} has stat collision with size {} and mtime {}".format(
self.key,
self.size,
self.mtime,
)
def __repr__(self):
- return "portage.cache.cache_errors.StatCollision(%s)" % (
+ return "portage.cache.cache_errors.StatCollision({})".format(
", ".join(
(repr(self.key), repr(self.filename), repr(self.mtime), repr(self.size))
),
diff --git a/lib/portage/cache/ebuild_xattr.py b/lib/portage/cache/ebuild_xattr.py
index 587466589..35aa0b40d 100644
--- a/lib/portage/cache/ebuild_xattr.py
+++ b/lib/portage/cache/ebuild_xattr.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright: 2009-2020 Gentoo Authors
# Author(s): Petteri Räty (betelgeuse@gentoo.org)
# License: GPL2
@@ -25,7 +24,7 @@ class database(fs_template.FsBased):
autocommits = True
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self.portdir = self.label
self.ns = xattr.NS_USER + ".gentoo.cache"
self.keys = set(self._known_keys)
@@ -57,7 +56,7 @@ class database(fs_template.FsBased):
while True:
self.__set(path, "test_max", s)
s += hundred
- except IOError as e:
+ except OSError as e:
# ext based give wrong errno
# https://bugzilla.kernel.org/show_bug.cgi?id=12793
if e.errno in (errno.E2BIG, errno.ENOSPC):
@@ -67,7 +66,7 @@ class database(fs_template.FsBased):
try:
self.__remove(path, "test_max")
- except IOError as e:
+ except OSError as e:
if e.errno != errno.ENODATA:
raise
@@ -88,7 +87,7 @@ class database(fs_template.FsBased):
def __get(self, path, key, default=None):
try:
return xattr.get(path, key, namespace=self.ns)
- except IOError as e:
+ except OSError as e:
if not default is None and errno.ENODATA == e.errno:
return default
raise NoValueException()
@@ -135,7 +134,7 @@ class database(fs_template.FsBased):
parts += 1
# Only the first entry carries the number of parts
- self.__set(path, key, "%s:%s" % (parts, s[0:max_len]))
+ self.__set(path, key, "{}:{}".format(parts, s[0:max_len]))
# Write out the rest
for i in range(1, parts):
@@ -143,7 +142,7 @@ class database(fs_template.FsBased):
val = s[start : start + max_len]
self.__set(path, key + str(i), val)
else:
- self.__set(path, key, "%s:%s" % (1, s))
+ self.__set(path, key, "{}:{}".format(1, s))
def _delitem(self, cpv):
pass # Will be gone with the ebuild
@@ -166,4 +165,4 @@ class database(fs_template.FsBased):
pn_pv = file[:-7]
path = os.path.join(root, file)
if self.__has_cache(path):
- yield "%s/%s/%s" % (cat, os.path.basename(root), file[:-7])
+ yield "{}/{}/{}".format(cat, os.path.basename(root), file[:-7])
diff --git a/lib/portage/cache/flat_hash.py b/lib/portage/cache/flat_hash.py
index d3f4dad4c..1331b47c0 100644
--- a/lib/portage/cache/flat_hash.py
+++ b/lib/portage/cache/flat_hash.py
@@ -21,13 +21,13 @@ class database(fs_template.FsBased):
autocommits = True
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self.location = os.path.join(
self.location, self.label.lstrip(os.path.sep).rstrip(os.path.sep)
)
write_keys = set(self._known_keys)
write_keys.add("_eclasses_")
- write_keys.add("_%s_" % (self.validation_chf,))
+ write_keys.add("_{}_".format(self.validation_chf))
self._write_keys = sorted(write_keys)
if not self.readonly and not os.path.exists(self.location):
self._ensure_dirs()
@@ -36,9 +36,8 @@ class database(fs_template.FsBased):
# Don't use os.path.join, for better performance.
fp = self.location + _os.sep + cpv
try:
- with io.open(
+ with open(
_unicode_encode(fp, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as myf:
@@ -51,7 +50,7 @@ class database(fs_template.FsBased):
# that uses mtime mangling.
d["_mtime_"] = _os.fstat(myf.fileno())[stat.ST_MTIME]
return d
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise cache_errors.CacheCorruption(cpv, e)
raise KeyError(cpv, e)
@@ -66,17 +65,17 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
try:
fd, fp = tempfile.mkstemp(dir=self.location)
- except EnvironmentError as e:
+ except OSError as e:
raise cache_errors.CacheCorruption(cpv, e)
- with io.open(
+ with open(
fd, mode="w", encoding=_encodings["repo.content"], errors="backslashreplace"
) as myf:
for k in self._write_keys:
v = values.get(k)
if not v:
continue
- myf.write("%s=%s\n" % (k, v))
+ myf.write("{}={}\n".format(k, v))
self._ensure_access(fp)
@@ -85,7 +84,7 @@ class database(fs_template.FsBased):
new_fp = os.path.join(self.location, cpv)
try:
os.rename(fp, new_fp)
- except EnvironmentError as e:
+ except OSError as e:
success = False
try:
if errno.ENOENT == e.errno:
@@ -93,7 +92,7 @@ class database(fs_template.FsBased):
self._ensure_dirs(cpv)
os.rename(fp, new_fp)
success = True
- except EnvironmentError as e:
+ except OSError as e:
raise cache_errors.CacheCorruption(cpv, e)
else:
raise cache_errors.CacheCorruption(cpv, e)
diff --git a/lib/portage/cache/fs_template.py b/lib/portage/cache/fs_template.py
index a3f803740..2ca295197 100644
--- a/lib/portage/cache/fs_template.py
+++ b/lib/portage/cache/fs_template.py
@@ -29,7 +29,7 @@ class FsBased(template.database):
del config[x]
else:
setattr(self, "_" + x, y)
- super(FsBased, self).__init__(*args, **config)
+ super().__init__(*args, **config)
if self.label.startswith(os.path.sep):
# normpath.
@@ -43,7 +43,7 @@ class FsBased(template.database):
if mtime != -1:
mtime = int(mtime)
os.utime(path, (mtime, mtime))
- except (PortageException, EnvironmentError):
+ except (PortageException, OSError):
return False
return True
@@ -87,4 +87,4 @@ def gen_label(base, label):
label = label.strip('"').strip("'")
label = os.path.join(*(label.rstrip(os.path.sep).split(os.path.sep)))
tail = os.path.split(label)[1]
- return "%s-%X" % (tail, abs(label.__hash__()))
+ return "{}-{:X}".format(tail, abs(label.__hash__()))
diff --git a/lib/portage/cache/index/pkg_desc_index.py b/lib/portage/cache/index/pkg_desc_index.py
index be81b9bb9..847f10af7 100644
--- a/lib/portage/cache/index/pkg_desc_index.py
+++ b/lib/portage/cache/index/pkg_desc_index.py
@@ -33,7 +33,9 @@ class pkg_node(str):
def pkg_desc_index_line_format(cp, pkgs, desc):
- return "%s %s: %s\n" % (cp, " ".join(_pkg_str(cpv).version for cpv in pkgs), desc)
+ return "{} {}: {}\n".format(
+ cp, " ".join(_pkg_str(cpv).version for cpv in pkgs), desc
+ )
def pkg_desc_index_line_read(line, repo=None):
diff --git a/lib/portage/cache/metadata.py b/lib/portage/cache/metadata.py
index 02d8385e0..791ad8344 100644
--- a/lib/portage/cache/metadata.py
+++ b/lib/portage/cache/metadata.py
@@ -53,7 +53,7 @@ class database(flat_hash.database):
def __init__(self, location, *args, **config):
loc = location
- super(database, self).__init__(location, *args, **config)
+ super().__init__(location, *args, **config)
self.location = os.path.join(loc, "metadata", "cache")
self.ec = None
self.raise_stat_collision = False
@@ -83,9 +83,9 @@ class database(flat_hash.database):
getter = attrgetter(self.validation_chf)
try:
ec_data = self.ec.get_eclass_data(d["INHERITED"].split())
- d["_eclasses_"] = dict(
- (k, (v.eclass_dir, getter(v))) for k, v in ec_data.items()
- )
+ d["_eclasses_"] = {
+ k: (v.eclass_dir, getter(v)) for k, v in ec_data.items()
+ }
except KeyError as e:
# INHERITED contains a non-existent eclass.
raise cache_errors.CacheCorruption(cpv, e)
@@ -120,7 +120,7 @@ class database(flat_hash.database):
_unicode_encode(new_fp, encoding=_encodings["fs"], errors="strict"),
"rb",
)
- except EnvironmentError:
+ except OSError:
pass
else:
try:
@@ -129,7 +129,7 @@ class database(flat_hash.database):
existing_content = f.read()
finally:
f.close()
- except EnvironmentError:
+ except OSError:
pass
else:
existing_mtime = existing_st[stat.ST_MTIME]
@@ -156,7 +156,7 @@ class database(flat_hash.database):
myf = open(
_unicode_encode(fp, encoding=_encodings["fs"], errors="strict"), "wb"
)
- except EnvironmentError as e:
+ except OSError as e:
if errno.ENOENT == e.errno:
try:
self._ensure_dirs(cpv)
@@ -164,7 +164,7 @@ class database(flat_hash.database):
_unicode_encode(fp, encoding=_encodings["fs"], errors="strict"),
"wb",
)
- except EnvironmentError as e:
+ except OSError as e:
raise cache_errors.CacheCorruption(cpv, e)
else:
raise cache_errors.CacheCorruption(cpv, e)
@@ -177,9 +177,9 @@ class database(flat_hash.database):
try:
os.rename(fp, new_fp)
- except EnvironmentError as e:
+ except OSError as e:
try:
os.unlink(fp)
- except EnvironmentError:
+ except OSError:
pass
raise cache_errors.CacheCorruption(cpv, e)
diff --git a/lib/portage/cache/sql_template.py b/lib/portage/cache/sql_template.py
index f8ce72d5b..e705a891b 100644
--- a/lib/portage/cache/sql_template.py
+++ b/lib/portage/cache/sql_template.py
@@ -53,7 +53,7 @@ class SQLDatabase(template.database):
"""initialize the instance.
derived classes shouldn't need to override this"""
- super(SQLDatabase, self).__init__(location, label, auxdbkeys, *args, **config)
+ super().__init__(location, label, auxdbkeys, *args, **config)
config.setdefault("host", "127.0.0.1")
config.setdefault("autocommit", self.autocommits)
@@ -122,7 +122,7 @@ class SQLDatabase(template.database):
if len(rows) == 0:
raise KeyError(cpv)
- vals = dict([(k, "") for k in self._known_keys])
+ vals = {k: "" for k in self._known_keys}
vals.update(dict(rows))
return vals
@@ -309,7 +309,7 @@ class SQLDatabase(template.database):
v = v.replace("%", "\\%")
v = v.replace(".*", "%")
query_list.append(
- "(key=%s AND value LIKE %s)" % (self._sfilter(k), self._sfilter(v))
+ "(key={} AND value LIKE {})".format(self._sfilter(k), self._sfilter(v))
)
if len(query_list):
diff --git a/lib/portage/cache/sqlite.py b/lib/portage/cache/sqlite.py
index 23a775e65..fd05fe406 100644
--- a/lib/portage/cache/sqlite.py
+++ b/lib/portage/cache/sqlite.py
@@ -30,7 +30,7 @@ class database(fs_template.FsBased):
)
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self._import_sqlite()
self._allowed_keys = ["_eclasses_"]
self._allowed_keys.extend(self._known_keys)
@@ -202,7 +202,7 @@ class database(fs_template.FsBased):
if m is None:
return False, missing_keys
- unique_constraints = set([self._db_table["packages"]["package_key"]])
+ unique_constraints = {self._db_table["packages"]["package_key"]}
missing_keys = set(self._allowed_keys)
unique_re = re.compile(r"^\s*UNIQUE\s*\(\s*(\w*)\s*\)\s*$")
column_re = re.compile(r"^\s*(\w*)\s*TEXT\s*$")
@@ -309,7 +309,7 @@ class database(fs_template.FsBased):
s = " ".join(update_statement)
cursor.execute(s)
except self._db_error as e:
- writemsg("%s: %s\n" % (cpv, str(e)))
+ writemsg("{}: {}\n".format(cpv, str(e)))
raise
def commit(self):
diff --git a/lib/portage/cache/template.py b/lib/portage/cache/template.py
index 3677dfa74..843ca2e5f 100644
--- a/lib/portage/cache/template.py
+++ b/lib/portage/cache/template.py
@@ -89,7 +89,7 @@ class database:
mtime = int(mtime)
except ValueError:
raise cache_errors.CacheCorruption(
- cpv, "_mtime_ conversion to int failed: %s" % (mtime,)
+ cpv, "_mtime_ conversion to int failed: {}".format(mtime)
)
d["_mtime_"] = mtime
return d
@@ -111,11 +111,11 @@ class database:
return extern_ec_dict
chf_getter = operator.attrgetter(chf_type)
if paths:
- intern_ec_dict = dict(
- (k, (v.eclass_dir, chf_getter(v))) for k, v in extern_ec_dict.items()
- )
+ intern_ec_dict = {
+ k: (v.eclass_dir, chf_getter(v)) for k, v in extern_ec_dict.items()
+ }
else:
- intern_ec_dict = dict((k, chf_getter(v)) for k, v in extern_ec_dict.items())
+ intern_ec_dict = {k: chf_getter(v) for k, v in extern_ec_dict.items()}
return intern_ec_dict
def __setitem__(self, cpv, values):
@@ -311,11 +311,11 @@ def serialize_eclasses(eclass_dict, chf_type="mtime", paths=True):
getter = operator.attrgetter(chf_type)
if paths:
return "\t".join(
- "%s\t%s\t%s" % (k, v.eclass_dir, getter(v))
+ "{}\t{}\t{}".format(k, v.eclass_dir, getter(v))
for k, v in sorted(eclass_dict.items(), key=_keysorter)
)
return "\t".join(
- "%s\t%s" % (k, getter(v))
+ "{}\t{}".format(k, getter(v))
for k, v in sorted(eclass_dict.items(), key=_keysorter)
)
diff --git a/lib/portage/cache/volatile.py b/lib/portage/cache/volatile.py
index 67afd20e7..9ee44605c 100644
--- a/lib/portage/cache/volatile.py
+++ b/lib/portage/cache/volatile.py
@@ -14,7 +14,7 @@ class database(template.database):
def __init__(self, *args, **config):
config.pop("gid", None)
config.pop("perms", None)
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self._data = {}
self._delitem = self._data.__delitem__
diff --git a/lib/portage/checksum.py b/lib/portage/checksum.py
index 7421b1c98..f23897d91 100644
--- a/lib/portage/checksum.py
+++ b/lib/portage/checksum.py
@@ -43,7 +43,7 @@ def _open_file(filename):
return open(
_unicode_encode(filename, encoding=_encodings["fs"], errors="strict"), "rb"
)
- except IOError as e:
+ except OSError as e:
func_call = f"open('{_unicode_decode(filename)}')"
if e.errno == errno.EPERM:
raise portage.exception.OperationNotPermitted(func_call)
@@ -574,7 +574,7 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0):
f"{hashname} hash function not available (needs dev-python/pycrypto)"
)
myhash, mysize = hashfunc_map[hashname].checksum_file(myfilename)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno in (errno.ENOENT, errno.ESTALE):
raise portage.exception.FileNotFound(myfilename)
elif e.errno == portage.exception.PermissionDenied.errno:
diff --git a/lib/portage/cvstree.py b/lib/portage/cvstree.py
index 38652db33..5cc254ccf 100644
--- a/lib/portage/cvstree.py
+++ b/lib/portage/cvstree.py
@@ -48,17 +48,16 @@ def isadded(entries, path):
filename = os.path.basename(path)
try:
- myfile = io.open(
+ myfile = open(
_unicode_encode(
os.path.join(basedir, "CVS", "Entries"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["content"],
errors="strict",
)
- except IOError:
+ except OSError:
return 0
mylines = myfile.readlines()
myfile.close()
@@ -107,8 +106,7 @@ def findoption(entries, pattern, recursive=0, basedir=""):
if recursive:
for mydir, mydata in entries["dirs"].items():
- for x in findoption(mydata, pattern, recursive, basedir + mydir):
- yield x
+ yield from findoption(mydata, pattern, recursive, basedir + mydir)
def findchanged(entries, recursive=0, basedir=""):
@@ -235,9 +233,8 @@ def getentries(mydir, recursive=0):
if not os.path.exists(mydir):
return entries
try:
- myfile = io.open(
+ myfile = open(
_unicode_encode(myfn, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="strict",
)
diff --git a/lib/portage/dbapi/IndexedPortdb.py b/lib/portage/dbapi/IndexedPortdb.py
index 013806b57..ef25352fc 100644
--- a/lib/portage/dbapi/IndexedPortdb.py
+++ b/lib/portage/dbapi/IndexedPortdb.py
@@ -77,8 +77,8 @@ class IndexedPortdb:
f = None
for filename in filenames:
try:
- f = io.open(filename, encoding=_encodings["repo.content"])
- except IOError as e:
+ f = open(filename, encoding=_encodings["repo.content"])
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
else:
diff --git a/lib/portage/dbapi/_ContentsCaseSensitivityManager.py b/lib/portage/dbapi/_ContentsCaseSensitivityManager.py
index f9cefecf6..63862ee4a 100644
--- a/lib/portage/dbapi/_ContentsCaseSensitivityManager.py
+++ b/lib/portage/dbapi/_ContentsCaseSensitivityManager.py
@@ -67,10 +67,10 @@ class ContentsCaseSensitivityManager:
"""
Initialize data structures for case-insensitive support.
"""
- self._contents_insensitive = dict(
- (k.lower(), v) for k, v in self.getcontents().items()
- )
- self._reverse_key_map = dict((k.lower(), k) for k in self.getcontents())
+ self._contents_insensitive = {
+ k.lower(): v for k, v in self.getcontents().items()
+ }
+ self._reverse_key_map = {k.lower(): k for k in self.getcontents()}
def _keys_case_insensitive(self):
if self._contents_insensitive is None:
diff --git a/lib/portage/dbapi/_MergeProcess.py b/lib/portage/dbapi/_MergeProcess.py
index 197c48a7e..b9f58e1ea 100644
--- a/lib/portage/dbapi/_MergeProcess.py
+++ b/lib/portage/dbapi/_MergeProcess.py
@@ -50,7 +50,7 @@ class MergeProcess(ForkProcess):
# since closing of file descriptors in the subprocess
# can prevent access to open database connections such
# as that used by the sqlite metadata cache module.
- cpv = "%s/%s" % (self.mycat, self.mypkg)
+ cpv = "{}/{}".format(self.mycat, self.mypkg)
settings = self.settings
if cpv != settings.mycpv or "EAPI" not in settings.configdict["pkg"]:
settings.reload()
@@ -72,7 +72,7 @@ class MergeProcess(ForkProcess):
self.fd_pipes.setdefault(0, portage._get_stdin().fileno())
self.log_filter_file = self.settings.get("PORTAGE_LOG_FILTER_FILE_CMD")
- super(MergeProcess, self)._start()
+ super()._start()
def _lock_vdb(self):
"""
@@ -179,7 +179,7 @@ class MergeProcess(ForkProcess):
self._dblink = mylink
self._elog_reader_fd = elog_reader_fd
- pids = super(MergeProcess, self)._spawn(args, fd_pipes, **kwargs)
+ pids = super()._spawn(args, fd_pipes, **kwargs)
os.close(elog_writer_fd)
mtime_writer.close()
self._buf = ""
@@ -254,7 +254,7 @@ class MergeProcess(ForkProcess):
):
self.postinst_failure = True
self.returncode = os.EX_OK
- super(MergeProcess, self)._proc_join_done(proc, future)
+ super()._proc_join_done(proc, future)
def _unregister(self):
"""
@@ -281,4 +281,4 @@ class MergeProcess(ForkProcess):
)
self._elog_keys = None
- super(MergeProcess, self)._unregister()
+ super()._unregister()
diff --git a/lib/portage/dbapi/_VdbMetadataDelta.py b/lib/portage/dbapi/_VdbMetadataDelta.py
index ebf5fe7cf..acdc0c525 100644
--- a/lib/portage/dbapi/_VdbMetadataDelta.py
+++ b/lib/portage/dbapi/_VdbMetadataDelta.py
@@ -39,14 +39,13 @@ class VdbMetadataDelta:
return None
try:
- with io.open(
+ with open(
self._vardb._cache_delta_filename,
- "r",
encoding=_encodings["repo.content"],
errors="strict",
) as f:
cache_obj = json.load(f)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
except (SystemExit, KeyboardInterrupt):
diff --git a/lib/portage/dbapi/__init__.py b/lib/portage/dbapi/__init__.py
index f52329c90..99f21a8c3 100644
--- a/lib/portage/dbapi/__init__.py
+++ b/lib/portage/dbapi/__init__.py
@@ -46,7 +46,7 @@ class dbapi:
"""
if self._categories is not None:
return self._categories
- self._categories = tuple(sorted(set(catsplit(x)[0] for x in self.cp_all())))
+ self._categories = tuple(sorted({catsplit(x)[0] for x in self.cp_all()}))
return self._categories
def close_caches(self):
@@ -392,7 +392,7 @@ class dbapi:
pkg = _pkg_str(cpv, metadata=metadata, settings=self.settings)
except InvalidData:
continue
- metadata = dict((k, metadata[k]) for k in update_keys)
+ metadata = {k: metadata[k] for k in update_keys}
if repo_dict is None:
updates_list = updates
else:
@@ -454,7 +454,7 @@ class dbapi:
and mycpv.sub_slot
and mycpv.sub_slot not in (mycpv.slot, newslot)
):
- newslot = "%s/%s" % (newslot, mycpv.sub_slot)
+ newslot = "{}/{}".format(newslot, mycpv.sub_slot)
mydata = {"SLOT": newslot + "\n"}
self.aux_update(mycpv, mydata)
return moves
diff --git a/lib/portage/dbapi/bintree.py b/lib/portage/dbapi/bintree.py
index 12f82bdfd..61aa610df 100644
--- a/lib/portage/dbapi/bintree.py
+++ b/lib/portage/dbapi/bintree.py
@@ -104,33 +104,31 @@ class bindbapi(fakedbapi):
self.bintree = mybintree
self.move_ent = mybintree.move_ent
# Selectively cache metadata in order to optimize dep matching.
- self._aux_cache_keys = set(
- [
- "BDEPEND",
- "BUILD_ID",
- "BUILD_TIME",
- "CHOST",
- "DEFINED_PHASES",
- "DEPEND",
- "EAPI",
- "IDEPEND",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "MD5",
- "PDEPEND",
- "PROPERTIES",
- "PROVIDES",
- "RDEPEND",
- "repository",
- "REQUIRES",
- "RESTRICT",
- "SIZE",
- "SLOT",
- "USE",
- "_mtime_",
- ]
- )
+ self._aux_cache_keys = {
+ "BDEPEND",
+ "BUILD_ID",
+ "BUILD_TIME",
+ "CHOST",
+ "DEFINED_PHASES",
+ "DEPEND",
+ "EAPI",
+ "IDEPEND",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "MD5",
+ "PDEPEND",
+ "PROPERTIES",
+ "PROVIDES",
+ "RDEPEND",
+ "repository",
+ "REQUIRES",
+ "RESTRICT",
+ "SIZE",
+ "SLOT",
+ "USE",
+ "_mtime_",
+ }
self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
self._aux_cache = {}
@@ -525,7 +523,6 @@ class binarytree:
"SLOT",
"USE",
]
- self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
self._pkgindex_use_evaluated_keys = (
"BDEPEND",
"DEPEND",
@@ -537,26 +534,24 @@ class binarytree:
"RESTRICT",
)
self._pkgindex_header = None
- self._pkgindex_header_keys = set(
- [
- "ACCEPT_KEYWORDS",
- "ACCEPT_LICENSE",
- "ACCEPT_PROPERTIES",
- "ACCEPT_RESTRICT",
- "CBUILD",
- "CONFIG_PROTECT",
- "CONFIG_PROTECT_MASK",
- "FEATURES",
- "GENTOO_MIRRORS",
- "INSTALL_MASK",
- "IUSE_IMPLICIT",
- "USE",
- "USE_EXPAND",
- "USE_EXPAND_HIDDEN",
- "USE_EXPAND_IMPLICIT",
- "USE_EXPAND_UNPREFIXED",
- ]
- )
+ self._pkgindex_header_keys = {
+ "ACCEPT_KEYWORDS",
+ "ACCEPT_LICENSE",
+ "ACCEPT_PROPERTIES",
+ "ACCEPT_RESTRICT",
+ "CBUILD",
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "FEATURES",
+ "GENTOO_MIRRORS",
+ "INSTALL_MASK",
+ "IUSE_IMPLICIT",
+ "USE",
+ "USE_EXPAND",
+ "USE_EXPAND_HIDDEN",
+ "USE_EXPAND_IMPLICIT",
+ "USE_EXPAND_UNPREFIXED",
+ }
self._pkgindex_default_pkg_data = {
"BDEPEND": "",
"BUILD_ID": "",
@@ -1097,7 +1092,7 @@ class binarytree:
build_id = self._parse_build_id(myfile)
if build_id < 1:
invalid_name = True
- elif myfile != "%s-%s.xpak" % (mypf, build_id):
+ elif myfile != "{}-{}.xpak".format(mypf, build_id):
invalid_name = True
else:
mypkg = mypkg[: -len(str(build_id)) - 1]
@@ -1105,7 +1100,7 @@ class binarytree:
build_id = self._parse_build_id(myfile)
if build_id > 0:
multi_instance = True
- if myfile != "%s-%s.gpkg.tar" % (mypf, build_id):
+ if myfile != "{}-{}.gpkg.tar".format(mypf, build_id):
invalid_name = True
else:
mypkg = mypkg[: -len(str(build_id)) - 1]
@@ -1136,7 +1131,7 @@ class binarytree:
build_id = None
if multi_instance:
- name_split = catpkgsplit("%s/%s" % (mycat, mypf))
+ name_split = catpkgsplit("{}/{}".format(mycat, mypf))
if (
name_split is None
or tuple(catsplit(mydir)) != name_split[:2]
@@ -1256,7 +1251,7 @@ class binarytree:
user, passwd = user.split(":", 1)
if port is not None:
- port_str = ":%s" % (port,)
+ port_str = ":{}".format(port)
if host.endswith(port_str):
host = host[: -len(port_str)]
pkgindex_file = os.path.join(
@@ -1269,11 +1264,10 @@ class binarytree:
)
pkgindex = self._new_pkgindex()
try:
- f = io.open(
+ f = open(
_unicode_encode(
pkgindex_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
@@ -1281,7 +1275,7 @@ class binarytree:
pkgindex.read(f)
finally:
f.close()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
local_timestamp = pkgindex.header.get("TIMESTAMP", None)
@@ -1333,7 +1327,7 @@ class binarytree:
)
if hasattr(f, "headers") and f.headers.get("timestamp", ""):
remote_timestamp = f.headers.get("timestamp")
- except IOError as err:
+ except OSError as err:
if (
hasattr(err, "code") and err.code == 304
): # not modified (since local_timestamp)
@@ -1362,7 +1356,7 @@ class binarytree:
# matches that of the cached Packages file.
ssh_args = ["ssh"]
if port is not None:
- ssh_args.append("-p%s" % (port,))
+ ssh_args.append("-p{}".format(port))
# NOTE: shlex evaluates embedded quotes
ssh_args.extend(
portage.util.shlex_split(
@@ -1383,7 +1377,7 @@ class binarytree:
if not fcmd:
fcmd = self.settings.get("FETCHCOMMAND")
if not fcmd:
- raise EnvironmentError("FETCHCOMMAND is unset")
+ raise OSError("FETCHCOMMAND is unset")
else:
fcmd = repo.fetchcommand
@@ -1406,7 +1400,7 @@ class binarytree:
fcmd=fcmd, fcmd_vars=fcmd_vars
)
if not success:
- raise EnvironmentError("%s failed" % (setting,))
+ raise OSError("{} failed".format(setting))
f = open(tmp_filename, "rb")
f_dec = codecs.iterdecode(
@@ -1467,7 +1461,7 @@ class binarytree:
+ "\n"
)
rmt_idx = pkgindex
- except EnvironmentError as e:
+ except OSError as e:
# This includes URLError which is raised for SSL
# certificate errors when PEP 476 is supported.
writemsg(
@@ -1502,7 +1496,7 @@ class binarytree:
f = atomic_ofstream(pkgindex_file)
pkgindex.write(f)
f.close()
- except (IOError, PortageException):
+ except (OSError, PortageException):
if os.access(os.path.dirname(pkgindex_file), os.W_OK):
raise
# The current user doesn't have permission to cache the
@@ -1983,7 +1977,7 @@ class binarytree:
deps = use_reduce(deps, uselist=use, token_class=token_class)
deps = paren_enclose(deps)
except portage.exception.InvalidDependString as e:
- writemsg("%s: %s\n" % (k, e), noiselevel=-1)
+ writemsg("{}: {}\n".format(k, e), noiselevel=-1)
raise
metadata[k] = deps
@@ -2065,7 +2059,7 @@ class binarytree:
elif binpkg_format == "xpak":
if self._multi_instance:
pf = catsplit(cpv)[1]
- filename = "%s-%s.xpak" % (
+ filename = "{}-{}.xpak".format(
os.path.join(self.pkgdir, cpv.cp, pf),
"1",
)
@@ -2074,7 +2068,7 @@ class binarytree:
elif binpkg_format == "gpkg":
if self._multi_instance:
pf = catsplit(cpv)[1]
- filename = "%s-%s.gpkg.tar" % (
+ filename = "{}-{}.gpkg.tar".format(
os.path.join(self.pkgdir, cpv.cp, pf),
"1",
)
@@ -2257,15 +2251,14 @@ class binarytree:
def _load_pkgindex(self):
pkgindex = self._new_pkgindex()
try:
- f = io.open(
+ f = open(
_unicode_encode(
self._pkgindex_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
- except EnvironmentError:
+ except OSError:
pass
else:
try:
diff --git a/lib/portage/dbapi/porttree.py b/lib/portage/dbapi/porttree.py
index 126e7161b..15e1fd6ff 100644
--- a/lib/portage/dbapi/porttree.py
+++ b/lib/portage/dbapi/porttree.py
@@ -161,7 +161,7 @@ class _better_cache:
continue
for p in pkg_list:
try:
- atom = Atom("%s/%s" % (cat, p))
+ atom = Atom("{}/{}".format(cat, p))
except InvalidAtom:
continue
if atom != atom.cp:
@@ -332,26 +332,24 @@ class portdbapi(dbapi):
if cache is not None:
self._pregen_auxdb[x] = cache
# Selectively cache metadata in order to optimize dep matching.
- self._aux_cache_keys = set(
- [
- "BDEPEND",
- "DEPEND",
- "EAPI",
- "IDEPEND",
- "INHERITED",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "PDEPEND",
- "PROPERTIES",
- "RDEPEND",
- "repository",
- "RESTRICT",
- "SLOT",
- "DEFINED_PHASES",
- "REQUIRED_USE",
- ]
- )
+ self._aux_cache_keys = {
+ "BDEPEND",
+ "DEPEND",
+ "EAPI",
+ "IDEPEND",
+ "INHERITED",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PDEPEND",
+ "PROPERTIES",
+ "RDEPEND",
+ "repository",
+ "RESTRICT",
+ "SLOT",
+ "DEFINED_PHASES",
+ "REQUIRED_USE",
+ }
self._aux_cache = {}
self._better_cache = None
@@ -888,7 +886,9 @@ class portdbapi(dbapi):
# since callers already handle it.
result.set_exception(
portage.exception.InvalidDependString(
- "getFetchMap(): '%s' has unsupported EAPI: '%s'" % (mypkg, eapi)
+ "getFetchMap(): '{}' has unsupported EAPI: '{}'".format(
+ mypkg, eapi
+ )
)
)
return
@@ -1065,7 +1065,7 @@ class portdbapi(dbapi):
oroot + "/" + x, EmptyOnError=1, ignorecvs=1, dirsonly=1
):
try:
- atom = Atom("%s/%s" % (x, y))
+ atom = Atom("{}/{}".format(x, y))
except InvalidAtom:
continue
if atom != atom.cp:
@@ -1436,10 +1436,10 @@ class portdbapi(dbapi):
continue
except PortageException as e:
writemsg(
- "!!! Error: aux_get('%s', %s)\n" % (mycpv, aux_keys),
+ "!!! Error: aux_get('{}', {})\n".format(mycpv, aux_keys),
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
del e
continue
@@ -1705,10 +1705,7 @@ def _async_manifest_fetchlist(
return
if e is None:
result.set_result(
- dict(
- (k, list(v.result()))
- for k, v in zip(cpv_list, gather_result.result())
- )
+ {k: list(v.result()) for k, v in zip(cpv_list, gather_result.result())}
)
else:
result.set_exception(e)
diff --git a/lib/portage/dbapi/vartree.py b/lib/portage/dbapi/vartree.py
index 139424c0a..86c2f2034 100644
--- a/lib/portage/dbapi/vartree.py
+++ b/lib/portage/dbapi/vartree.py
@@ -191,33 +191,31 @@ class vardbapi(dbapi):
if vartree is None:
vartree = portage.db[settings["EROOT"]]["vartree"]
self.vartree = vartree
- self._aux_cache_keys = set(
- [
- "BDEPEND",
- "BUILD_TIME",
- "CHOST",
- "COUNTER",
- "DEPEND",
- "DESCRIPTION",
- "EAPI",
- "HOMEPAGE",
- "BUILD_ID",
- "IDEPEND",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "PDEPEND",
- "PROPERTIES",
- "RDEPEND",
- "repository",
- "RESTRICT",
- "SLOT",
- "USE",
- "DEFINED_PHASES",
- "PROVIDES",
- "REQUIRES",
- ]
- )
+ self._aux_cache_keys = {
+ "BDEPEND",
+ "BUILD_TIME",
+ "CHOST",
+ "COUNTER",
+ "DEPEND",
+ "DESCRIPTION",
+ "EAPI",
+ "HOMEPAGE",
+ "BUILD_ID",
+ "IDEPEND",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PDEPEND",
+ "PROPERTIES",
+ "RDEPEND",
+ "repository",
+ "RESTRICT",
+ "SLOT",
+ "USE",
+ "DEFINED_PHASES",
+ "PROVIDES",
+ "REQUIRES",
+ }
self._aux_cache_obj = None
self._aux_cache_filename = os.path.join(
self._eroot, CACHE_PATH, "vdb_metadata.pickle"
@@ -341,7 +339,7 @@ class vardbapi(dbapi):
"""
lock, counter = self._slot_locks.get(slot_atom, (None, 0))
if lock is None:
- lock_path = self.getpath("%s:%s" % (slot_atom.cp, slot_atom.slot))
+ lock_path = self.getpath("{}:{}".format(slot_atom.cp, slot_atom.slot))
ensure_dirs(os.path.dirname(lock_path))
lock = lockfile(lock_path, wantnewlockfile=True)
self._slot_locks[slot_atom] = (lock, counter + 1)
@@ -460,7 +458,7 @@ class vardbapi(dbapi):
os.path.join(newpath, old_pf + ".ebuild"),
os.path.join(newpath, new_pf + ".ebuild"),
)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -484,7 +482,7 @@ class vardbapi(dbapi):
cat_dir = self.getpath(mysplit[0])
try:
dir_list = os.listdir(cat_dir)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(cat_dir)
del e
@@ -500,7 +498,7 @@ class vardbapi(dbapi):
continue
if len(mysplit) > 1:
if ps[0] == mysplit[1]:
- cpv = "%s/%s" % (mysplit[0], x)
+ cpv = "{}/{}".format(mysplit[0], x)
metadata = dict(
zip(
self._aux_cache_keys,
@@ -541,7 +539,7 @@ class vardbapi(dbapi):
return [
x for x in os.listdir(p) if os.path.isdir(os.path.join(p, x))
]
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(p)
del e
@@ -634,7 +632,7 @@ class vardbapi(dbapi):
)
try:
curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime_ns
- except (IOError, OSError):
+ except OSError:
curmtime = 0
if mycat not in self.matchcache or self.mtdircache[mycat] != curmtime:
@@ -876,18 +874,17 @@ class vardbapi(dbapi):
results[x] = st[stat.ST_MTIME]
continue
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(mydir, x),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
myd = f.read()
- except IOError:
+ except OSError:
if (
x not in self._aux_cache_keys
and self._aux_cache_keys_re.match(x) is None
@@ -941,7 +938,7 @@ class vardbapi(dbapi):
args = bunzip2_cmd + ["-c", env_file]
try:
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
raise portage.exception.CommandNotFound(args[0])
@@ -1004,7 +1001,7 @@ class vardbapi(dbapi):
else:
try:
os.unlink(os.path.join(self.getpath(cpv), k))
- except EnvironmentError:
+ except OSError:
pass
self._bump_mtime(cpv)
@@ -1177,11 +1174,10 @@ class vardbapi(dbapi):
del myroot
counter = -1
try:
- with io.open(
+ with open(
_unicode_encode(
self._counter_path, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -1192,8 +1188,8 @@ class vardbapi(dbapi):
_("!!! COUNTER file is corrupt: '%s'\n") % self._counter_path,
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
- except EnvironmentError as e:
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
+ except OSError as e:
# Silently allow ENOENT since files under
# /var/cache/ are allowed to disappear.
if e.errno != errno.ENOENT:
@@ -1313,16 +1309,15 @@ class vardbapi(dbapi):
needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)
new_needed = None
try:
- with io.open(
+ with open(
_unicode_encode(
needed_filename, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
needed_lines = f.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
else:
@@ -1335,7 +1330,7 @@ class vardbapi(dbapi):
entry = NeededEntry.parse(needed_filename, l)
except InvalidData as e:
writemsg_level(
- "\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1
+ "\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1
)
continue
@@ -1594,8 +1589,7 @@ class vardbapi(dbapi):
del owners[:]
dblink_cache.clear()
gc.collect()
- for x in self._iter_owners_low_mem(path_iter):
- yield x
+ yield from self._iter_owners_low_mem(path_iter)
return
else:
for cpv, p in owners:
@@ -1649,8 +1643,7 @@ class vardbapi(dbapi):
search_future = event_loop.create_future()
event_loop.call_soon(search_pkg, cpv, search_future)
event_loop.run_until_complete(search_future)
- for result in search_future.result():
- yield result
+ yield from search_future.result()
class vartree:
@@ -1936,7 +1929,7 @@ class dblink:
(slot,) = db.aux_get(self.mycpv, ["SLOT"])
slot = slot.partition("/")[0]
- slot_atoms.append(portage.dep.Atom("%s:%s" % (self.mycpv.cp, slot)))
+ slot_atoms.append(portage.dep.Atom("{}:{}".format(self.mycpv.cp, slot)))
for blocker in self._blockers or []:
slot_atoms.append(blocker.slot_atom)
@@ -2031,16 +2024,15 @@ class dblink:
contents_file = os.path.join(self.dbdir, "CONTENTS")
pkgfiles = {}
try:
- with io.open(
+ with open(
_unicode_encode(
contents_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
mylines = f.readlines()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -2358,7 +2350,7 @@ class dblink:
if others_in_slot is None:
slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
slot_matches = self.vartree.dbapi.match(
- "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
+ "{}:{}".format(portage.cpv_getkey(self.mycpv), slot)
)
others_in_slot = []
for cur_cpv in slot_matches:
@@ -2459,7 +2451,7 @@ class dblink:
noiselevel=-1,
)
showMessage(
- "%s\n" % (eapi_unsupported,), level=logging.ERROR, noiselevel=-1
+ "{}\n".format(eapi_unsupported), level=logging.ERROR, noiselevel=-1
)
elif os.path.isfile(myebuildpath):
phase = EbuildPhase(
@@ -2658,7 +2650,7 @@ class dblink:
def _show_unmerge(self, zing, desc, file_type, file_name):
self._display_merge(
- "%s %s %s %s\n" % (zing, desc.ljust(8), file_type, file_name)
+ "{} {} {} {}\n".format(zing, desc.ljust(8), file_type, file_name)
)
def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
@@ -2689,7 +2681,7 @@ class dblink:
others_in_slot = []
slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
slot_matches = self.vartree.dbapi.match(
- "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
+ "{}:{}".format(portage.cpv_getkey(self.mycpv), slot)
)
for cur_cpv in slot_matches:
if cur_cpv == self.mycpv:
@@ -2752,7 +2744,7 @@ class dblink:
# administrative and pkg_postinst stuff.
self._eerror(
"postrm",
- ["Could not chmod or unlink '%s': %s" % (file_name, ose)],
+ ["Could not chmod or unlink '{}': {}".format(file_name, ose)],
)
else:
@@ -2919,7 +2911,7 @@ class dblink:
):
try:
unlink(obj, lstatobj)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3008,7 +3000,7 @@ class dblink:
try:
unlink(obj, lstatobj)
show_unmerge("<<<", "", file_type, obj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3032,7 +3024,7 @@ class dblink:
continue
try:
unlink(obj, lstatobj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3173,7 +3165,7 @@ class dblink:
try:
unlink(obj, os.lstat(obj))
show_unmerge("<<<", "", "sym", obj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3224,7 +3216,7 @@ class dblink:
if stat.S_ISREG(lstatobj.st_mode):
unlink(child, lstatobj)
show_unmerge("<<<", "", "obj", child)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3258,7 +3250,7 @@ class dblink:
self._merged_path(os.path.realpath(parent_name), parent_stat)
show_unmerge("<<<", "", "dir", obj)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in ignored_rmdir_errnos:
raise
if e.errno != errno.ENOENT:
@@ -3290,7 +3282,7 @@ class dblink:
try:
unlink(obj, os.lstat(obj))
show_unmerge("<<<", "", "sym", obj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3427,9 +3419,9 @@ class dblink:
else:
os = portage.os
- self._contents_basenames = set(
+ self._contents_basenames = {
os.path.basename(x) for x in self._contents.keys()
- )
+ }
if basename not in self._contents_basenames:
# This is a shortcut that, in most cases, allows us to
# eliminate this package as an owner without the need
@@ -3441,7 +3433,7 @@ class dblink:
parent_path = os_filename_arg.path.dirname(destfile)
try:
parent_stat = os_filename_arg.stat(parent_path)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -3982,7 +3974,7 @@ class dblink:
try:
dest_lstat = os.lstat(dest_path)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
del e
continue
@@ -3997,7 +3989,7 @@ class dblink:
try:
dest_lstat = os.lstat(parent_path)
break
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOTDIR:
raise
del e
@@ -4343,18 +4335,17 @@ class dblink:
slot = ""
for var_name in ("CHOST", "SLOT"):
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(inforoot, var_name),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
val = f.readline().strip()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -4405,7 +4396,7 @@ class dblink:
# Use _pkg_str discard the sub-slot part if necessary.
slot = _pkg_str(self.mycpv, slot=slot).slot
cp = self.mysplit[0]
- slot_atom = "%s:%s" % (cp, slot)
+ slot_atom = "{}:{}".format(cp, slot)
self.lockdb()
try:
@@ -4486,18 +4477,17 @@ class dblink:
phase.start()
phase.wait()
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(inforoot, "INSTALL_MASK"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
install_mask = InstallMask(f.read())
- except EnvironmentError:
+ except OSError:
install_mask = None
if install_mask:
@@ -4882,9 +4872,9 @@ class dblink:
for pkg in owners:
pkg = self.vartree.dbapi._pkg_str(pkg.mycpv, None)
- pkg_info_str = "%s%s%s" % (pkg, _slot_separator, pkg.slot)
+ pkg_info_str = "{}{}{}".format(pkg, _slot_separator, pkg.slot)
if pkg.repo != _unknown_repo:
- pkg_info_str += "%s%s" % (_repo_separator, pkg.repo)
+ pkg_info_str += "{}{}".format(_repo_separator, pkg.repo)
pkg_info_strs[pkg] = pkg_info_str
finally:
@@ -5014,7 +5004,7 @@ class dblink:
# write local package counter for recording
if counter is None:
counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbtmpdir, "COUNTER"),
encoding=_encodings["fs"],
@@ -5664,7 +5654,7 @@ class dblink:
],
)
- showMessage("%s %s -> %s\n" % (zing, mydest, myto))
+ showMessage("{} {} -> {}\n".format(zing, mydest, myto))
outfile.write(
self._format_contents_line(
node_type="sym",
@@ -5680,7 +5670,7 @@ class dblink:
noiselevel=-1,
)
showMessage(
- "!!! %s -> %s\n" % (mydest, myto),
+ "!!! {} -> {}\n".format(mydest, myto),
level=logging.ERROR,
noiselevel=-1,
)
@@ -5876,7 +5866,7 @@ class dblink:
mtime_ns=mymtime,
)
)
- showMessage("%s %s\n" % (zing, mydest))
+ showMessage("{} {}\n".format(zing, mydest))
else:
# we are merging a fifo or device node
zing = "!!!"
@@ -6037,7 +6027,7 @@ class dblink:
if returncode is None or returncode != os.EX_OK:
try:
proc = subprocess.Popen(["sync"])
- except EnvironmentError:
+ except OSError:
pass
else:
proc.wait()
@@ -6134,13 +6124,12 @@ class dblink:
"returns contents of a file with whitespace converted to spaces"
if not os.path.exists(self.dbdir + "/" + name):
return ""
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, name),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -6153,13 +6142,12 @@ class dblink:
def getfile(self, fname):
if not os.path.exists(self.dbdir + "/" + fname):
return ""
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, fname),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -6177,13 +6165,12 @@ class dblink:
def getelements(self, ename):
if not os.path.exists(self.dbdir + "/" + ename):
return []
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, ename),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -6195,7 +6182,7 @@ class dblink:
return myreturn
def setelements(self, mylist, ename):
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, ename),
encoding=_encodings["fs"],
@@ -6282,7 +6269,7 @@ class dblink:
args=[
portage._python_interpreter,
quickpkg_binary,
- "=%s" % (backup_dblink.mycpv,),
+ "={}".format(backup_dblink.mycpv),
],
background=background,
env=env,
@@ -6409,12 +6396,12 @@ def write_contents(contents, root, f):
relative_filename = filename[root_len:]
if entry_type == "obj":
entry_type, mtime, md5sum = entry_data
- line = "%s %s %s %s\n" % (entry_type, relative_filename, md5sum, mtime)
+ line = "{} {} {} {}\n".format(entry_type, relative_filename, md5sum, mtime)
elif entry_type == "sym":
entry_type, mtime, link = entry_data
- line = "%s %s -> %s %s\n" % (entry_type, relative_filename, link, mtime)
+ line = "{} {} -> {} {}\n".format(entry_type, relative_filename, link, mtime)
else: # dir, dev, fif
- line = "%s %s\n" % (entry_type, relative_filename)
+ line = "{} {}\n".format(entry_type, relative_filename)
f.write(line)
diff --git a/lib/portage/debug.py b/lib/portage/debug.py
index 59aae437e..ed47e72b5 100644
--- a/lib/portage/debug.py
+++ b/lib/portage/debug.py
@@ -74,7 +74,7 @@ class trace_handler:
my_repr = repr(arg[1])
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
- return "type=%s value=%s " % (arg[0], my_repr)
+ return "type={} value={} ".format(arg[0], my_repr)
return ""
diff --git a/lib/portage/dep/__init__.py b/lib/portage/dep/__init__.py
index 8ae726fed..16cbaf262 100644
--- a/lib/portage/dep/__init__.py
+++ b/lib/portage/dep/__init__.py
@@ -462,7 +462,7 @@ def paren_enclose(mylist, unevaluated_atom=False, opconvert=False):
for x in mylist:
if isinstance(x, list):
if opconvert and x and x[0] == "||":
- mystrparts.append("%s ( %s )" % (x[0], paren_enclose(x[1:])))
+ mystrparts.append("{} ( {} )".format(x[0], paren_enclose(x[1:])))
else:
mystrparts.append("( %s )" % paren_enclose(x))
else:
@@ -1164,7 +1164,7 @@ class _use_dep:
def __str__(self):
if not self.tokens:
return ""
- return "[%s]" % (",".join(self.tokens),)
+ return "[{}]".format(",".join(self.tokens))
def __repr__(self):
return "portage.dep._use_dep(%s)" % repr(self.tokens)
@@ -1795,7 +1795,7 @@ class Atom(str):
False otherwise.
"""
if not isinstance(other, Atom):
- raise TypeError("expected %s, got %s" % (Atom, type(other)))
+ raise TypeError("expected {}, got {}".format(Atom, type(other)))
if self == other:
return True
@@ -1957,17 +1957,13 @@ class ExtendedAtomDict(portage.cache.mappings.MutableMapping):
return result
def __iter__(self):
- for k in self._normal:
- yield k
- for k in self._extended:
- yield k
+ yield from self._normal
+ yield from self._extended
def iteritems(self):
try:
- for item in self._normal.items():
- yield item
- for item in self._extended.items():
- yield item
+ yield from self._normal.items()
+ yield from self._extended.items()
except AttributeError:
pass # FEATURES=python-trace
diff --git a/lib/portage/dep/_dnf.py b/lib/portage/dep/_dnf.py
index c83efed8a..21de5344d 100644
--- a/lib/portage/dep/_dnf.py
+++ b/lib/portage/dep/_dnf.py
@@ -24,7 +24,7 @@ def dnf_convert(dep_struct):
if isinstance(x, list):
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (dep_struct,)
+ ), "Normalization error, nested conjunction found in {}".format(dep_struct)
if any(isinstance(element, list) for element in x):
x_dnf = ["||"]
for element in x[1:]:
@@ -34,10 +34,14 @@ def dnf_convert(dep_struct):
# must be a conjunction.
assert (
element
- ), "Normalization error, empty conjunction found in %s" % (x,)
+ ), "Normalization error, empty conjunction found in {}".format(
+ x
+ )
assert (
element[0] != "||"
- ), "Normalization error, nested disjunction found in %s" % (x,)
+ ), "Normalization error, nested disjunction found in {}".format(
+ x
+ )
element = dnf_convert(element)
if contains_disjunction(element):
assert (
@@ -89,7 +93,7 @@ def contains_disjunction(dep_struct):
is_disjunction = dep_struct and dep_struct[0] == "||"
for x in dep_struct:
if isinstance(x, list):
- assert x, "Normalization error, empty conjunction found in %s" % (
+ assert x, "Normalization error, empty conjunction found in {}".format(
dep_struct,
)
if x[0] == "||":
diff --git a/lib/portage/dep/_slot_operator.py b/lib/portage/dep/_slot_operator.py
index bdaf5f328..6e59554a8 100644
--- a/lib/portage/dep/_slot_operator.py
+++ b/lib/portage/dep/_slot_operator.py
@@ -41,8 +41,7 @@ def find_built_slot_operator_atoms(pkg):
def _find_built_slot_operator(dep_struct):
for x in dep_struct:
if isinstance(x, list):
- for atom in _find_built_slot_operator(x):
- yield atom
+ yield from _find_built_slot_operator(x)
elif isinstance(x, Atom) and x.slot_operator_built:
yield x
@@ -107,7 +106,7 @@ def _eval_deps(dep_struct, vardbs):
except (KeyError, InvalidData):
pass
else:
- slot_part = "%s/%s=" % (
+ slot_part = "{}/{}=".format(
best_version.slot,
best_version.sub_slot,
)
diff --git a/lib/portage/dep/dep_check.py b/lib/portage/dep/dep_check.py
index 5a82374f3..d8ecc2e6a 100644
--- a/lib/portage/dep/dep_check.py
+++ b/lib/portage/dep/dep_check.py
@@ -78,15 +78,17 @@ def _expand_new_virtuals(
newsplit.append(x)
continue
elif isinstance(x, list):
- assert x, "Normalization error, empty conjunction found in %s" % (mysplit,)
+ assert x, "Normalization error, empty conjunction found in {}".format(
+ mysplit
+ )
if is_disjunction:
assert (
x[0] != "||"
- ), "Normalization error, nested disjunction found in %s" % (mysplit,)
+ ), "Normalization error, nested disjunction found in {}".format(mysplit)
else:
assert (
x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (mysplit,)
+ ), "Normalization error, nested conjunction found in {}".format(mysplit)
x_exp = _expand_new_virtuals(
x,
edebug,
@@ -107,7 +109,7 @@ def _expand_new_virtuals(
# must be a disjunction.
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (
+ ), "Normalization error, nested conjunction found in {}".format(
x_exp,
)
newsplit.extend(x[1:])
@@ -253,7 +255,7 @@ def _expand_new_virtuals(
del mytrees["virt_parent"]
if not mycheck[0]:
- raise ParseError("%s: %s '%s'" % (pkg, mycheck[1], depstring))
+ raise ParseError("{}: {} '{}'".format(pkg, mycheck[1], depstring))
# Replace the original atom "x" with "virt_atom" which refers
# to the specific version of the virtual whose deps we're
@@ -472,7 +474,7 @@ def dep_zapdeps(
avail_pkg = [replacing]
if avail_pkg:
avail_pkg = avail_pkg[-1] # highest (ascending order)
- avail_slot = Atom("%s:%s" % (atom.cp, avail_pkg.slot))
+ avail_slot = Atom("{}:{}".format(atom.cp, avail_pkg.slot))
if not avail_pkg:
all_available = False
all_use_satisfied = False
@@ -527,7 +529,7 @@ def dep_zapdeps(
avail_pkg_use = avail_pkg_use[-1]
if avail_pkg_use != avail_pkg:
avail_pkg = avail_pkg_use
- avail_slot = Atom("%s:%s" % (atom.cp, avail_pkg.slot))
+ avail_slot = Atom("{}:{}".format(atom.cp, avail_pkg.slot))
if not replacing and downgrade_probe is not None and graph is not None:
highest_in_slot = mydbapi_match_pkgs(avail_slot)
@@ -602,7 +604,7 @@ def dep_zapdeps(
# If any version of a package is already in the graph then we
# assume that it is preferred over other possible packages choices.
all_installed = True
- for atom in set(Atom(atom.cp) for atom in atoms if not atom.blocker):
+ for atom in {Atom(atom.cp) for atom in atoms if not atom.blocker}:
# New-style virtuals have zero cost to install.
if not vardb.match(atom) and not atom.startswith("virtual/"):
all_installed = False
@@ -906,7 +908,7 @@ def dep_check(
eapi=eapi,
)
except InvalidDependString as e:
- return [0, "%s" % (e,)]
+ return [0, "{}".format(e)]
if mysplit == []:
# dependencies were reduced to nothing
@@ -931,7 +933,7 @@ def dep_check(
trees=trees,
)
except ParseError as e:
- return [0, "%s" % (e,)]
+ return [0, "{}".format(e)]
dnf = False
if mysettings.local_config: # if not repoman
@@ -988,7 +990,7 @@ def _overlap_dnf(dep_struct):
if isinstance(x, list):
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (dep_struct,)
+ ), "Normalization error, nested conjunction found in {}".format(dep_struct)
order_map[id(x)] = i
prev_cp = None
for atom in _iter_flatten(x):
diff --git a/lib/portage/dep/soname/SonameAtom.py b/lib/portage/dep/soname/SonameAtom.py
index 2308b31fd..af651f7ef 100644
--- a/lib/portage/dep/soname/SonameAtom.py
+++ b/lib/portage/dep/soname/SonameAtom.py
@@ -21,7 +21,7 @@ class SonameAtom:
)
def __getstate__(self):
- return dict((k, getattr(self, k)) for k in self.__slots__)
+ return {k: getattr(self, k) for k in self.__slots__}
def __setstate__(self, state):
for k, v in state.items():
@@ -43,14 +43,14 @@ class SonameAtom:
return True
def __repr__(self):
- return "%s('%s', '%s')" % (
+ return "{}('{}', '{}')".format(
self.__class__.__name__,
self.multilib_category,
self.soname,
)
def __str__(self):
- return "%s: %s" % (self.multilib_category, self.soname)
+ return "{}: {}".format(self.multilib_category, self.soname)
def match(self, pkg):
"""
diff --git a/lib/portage/dep/soname/multilib_category.py b/lib/portage/dep/soname/multilib_category.py
index 567b1d32e..bdb6c8c40 100644
--- a/lib/portage/dep/soname/multilib_category.py
+++ b/lib/portage/dep/soname/multilib_category.py
@@ -197,6 +197,6 @@ def compute_multilib_category(elf_header):
if prefix is None or suffix is None:
category = None
else:
- category = "%s_%s" % (prefix, suffix)
+ category = "{}_{}".format(prefix, suffix)
return category
diff --git a/lib/portage/dispatch_conf.py b/lib/portage/dispatch_conf.py
index ec2bc84d3..45890f61b 100644
--- a/lib/portage/dispatch_conf.py
+++ b/lib/portage/dispatch_conf.py
@@ -84,9 +84,7 @@ def diff_mixed(func, file1, file2):
content = f"FIF: {file1}\n"
else:
content = f"DEV: {file1}\n"
- with io.open(
- diff_files[i], mode="w", encoding=_encodings["stdio"]
- ) as f:
+ with open(diff_files[i], mode="w", encoding=_encodings["stdio"]) as f:
f.write(content)
return func(diff_files[0], diff_files[1])
@@ -187,7 +185,7 @@ def _archive_copy(src_st, src_path, dest_path):
os.symlink(os.readlink(src_path), dest_path)
else:
shutil.copy2(src_path, dest_path)
- except EnvironmentError as e:
+ except OSError as e:
portage.util.writemsg(
f"dispatch-conf: Error copying {src_path} to {dest_path}: {e}\n",
noiselevel=-1,
diff --git a/lib/portage/elog/__init__.py b/lib/portage/elog/__init__.py
index 48e2a39de..a1433a23e 100644
--- a/lib/portage/elog/__init__.py
+++ b/lib/portage/elog/__init__.py
@@ -59,7 +59,7 @@ def _combine_logentries(logentries):
for msgtype, msgcontent in logentries[phase]:
if previous_type != msgtype:
previous_type = msgtype
- rValue.append("%s: %s" % (msgtype, phase))
+ rValue.append("{}: {}".format(msgtype, phase))
if isinstance(msgcontent, str):
rValue.append(msgcontent.rstrip("\n"))
else:
diff --git a/lib/portage/elog/messages.py b/lib/portage/elog/messages.py
index 6a67a45de..9e49161d1 100644
--- a/lib/portage/elog/messages.py
+++ b/lib/portage/elog/messages.py
@@ -57,9 +57,8 @@ def collect_ebuild_messages(path):
logentries[msgfunction] = []
lastmsgtype = None
msgcontent = []
- f = io.open(
+ f = open(
_unicode_encode(filename, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
diff --git a/lib/portage/elog/mod_mail_summary.py b/lib/portage/elog/mod_mail_summary.py
index a695290ef..17c123d4e 100644
--- a/lib/portage/elog/mod_mail_summary.py
+++ b/lib/portage/elog/mod_mail_summary.py
@@ -97,6 +97,6 @@ def _finalize(mysettings, items):
"Timeout in finalize() for elog system 'mail_summary'\n", noiselevel=-1
)
except PortageException as e:
- writemsg("%s\n" % (e,), noiselevel=-1)
+ writemsg("{}\n".format(e), noiselevel=-1)
return
diff --git a/lib/portage/elog/mod_save.py b/lib/portage/elog/mod_save.py
index aedfd0a38..f4274ccc8 100644
--- a/lib/portage/elog/mod_save.py
+++ b/lib/portage/elog/mod_save.py
@@ -57,14 +57,14 @@ def process(mysettings, key, logentries, fulltext):
_ensure_log_subdirs(logdir, log_subdir)
try:
- with io.open(
+ with open(
_unicode_encode(elogfilename, encoding=_encodings["fs"], errors="strict"),
mode="w",
encoding=_encodings["content"],
errors="backslashreplace",
) as elogfile:
elogfile.write(_unicode_decode(fulltext))
- except IOError as e:
+ except OSError as e:
func_call = "open('%s', 'w')" % elogfilename
if e.errno == errno.EACCES:
raise portage.exception.PermissionDenied(func_call)
diff --git a/lib/portage/elog/mod_save_summary.py b/lib/portage/elog/mod_save_summary.py
index 939198fdc..dcdb4942d 100644
--- a/lib/portage/elog/mod_save_summary.py
+++ b/lib/portage/elog/mod_save_summary.py
@@ -41,13 +41,13 @@ def process(mysettings, key, logentries, fulltext):
# TODO: Locking
elogfilename = elogdir + "/summary.log"
try:
- elogfile = io.open(
+ elogfile = open(
_unicode_encode(elogfilename, encoding=_encodings["fs"], errors="strict"),
mode="a",
encoding=_encodings["content"],
errors="backslashreplace",
)
- except IOError as e:
+ except OSError as e:
func_call = "open('%s', 'a')" % elogfilename
if e.errno == errno.EACCES:
raise portage.exception.PermissionDenied(func_call)
diff --git a/lib/portage/elog/mod_syslog.py b/lib/portage/elog/mod_syslog.py
index e34bd3a92..6e69a946a 100644
--- a/lib/portage/elog/mod_syslog.py
+++ b/lib/portage/elog/mod_syslog.py
@@ -27,6 +27,6 @@ def process(mysettings, key, logentries, fulltext):
if isinstance(msgcontent, str):
msgcontent = [msgcontent]
for line in msgcontent:
- line = "%s: %s: %s" % (key, phase, line)
+ line = "{}: {}: {}".format(key, phase, line)
syslog.syslog(_pri[msgtype], line.rstrip("\n"))
syslog.closelog()
diff --git a/lib/portage/emaint/main.py b/lib/portage/emaint/main.py
index 778b0d145..a25701303 100644
--- a/lib/portage/emaint/main.py
+++ b/lib/portage/emaint/main.py
@@ -78,9 +78,9 @@ def usage(module_controller):
textwrap.subsequent_indent = " ".ljust(17)
for mod in module_controller.module_names:
desc = textwrap.wrap(module_controller.get_description(mod), 65)
- _usage += " %s%s\n" % (mod.ljust(15), desc[0])
+ _usage += " {}{}\n".format(mod.ljust(15), desc[0])
for d in desc[1:]:
- _usage += " %s%s\n" % (" ".ljust(15), d)
+ _usage += " {}{}\n".format(" ".ljust(15), d)
return _usage
@@ -92,10 +92,10 @@ def module_opts(module_controller, module):
for opt in sorted(opts):
optd = opts[opt]
if "short" in optd:
- opto = " %s, %s" % (optd["short"], optd["long"])
+ opto = " {}, {}".format(optd["short"], optd["long"])
else:
- opto = " %s" % (optd["long"],)
- _usage += "%s %s\n" % (opto.ljust(15), optd["help"])
+ opto = " {}".format(optd["long"])
+ _usage += "{} {}\n".format(opto.ljust(15), optd["help"])
_usage += "\n"
return _usage
@@ -206,7 +206,7 @@ def emaint_main(myargv):
if opt.status and getattr(options, opt.target, False):
if long_action is not None:
parser.error(
- "--%s and %s are exclusive options" % (long_action, opt.long)
+ "--{} and {} are exclusive options".format(long_action, opt.long)
)
status = opt.status
func = opt.func
diff --git a/lib/portage/emaint/modules/merges/merges.py b/lib/portage/emaint/modules/merges/merges.py
index b607da23b..a45314cce 100644
--- a/lib/portage/emaint/modules/merges/merges.py
+++ b/lib/portage/emaint/modules/merges/merges.py
@@ -32,7 +32,7 @@ class TrackingFile:
@type failed_pkgs: dict
"""
tracking_path = self._tracking_path
- lines = ["%s %s" % (pkg, mtime) for pkg, mtime in failed_pkgs.items()]
+ lines = ["{} {}".format(pkg, mtime) for pkg, mtime in failed_pkgs.items()]
portage.util.write_atomic(tracking_path, "\n".join(lines))
def load(self):
@@ -46,7 +46,7 @@ class TrackingFile:
if not self.exists():
return {}
failed_pkgs = {}
- with open(tracking_path, "r") as tracking_file:
+ with open(tracking_path) as tracking_file:
for failed_merge in tracking_file:
pkg, mtime = failed_merge.strip().split()
failed_pkgs[pkg] = mtime
@@ -231,7 +231,7 @@ class MergesHandler:
errors = []
for pkg, mtime in failed_pkgs.items():
mtime_str = time.ctime(int(mtime))
- errors.append("'%s' failed to merge on '%s'" % (pkg, mtime_str))
+ errors.append("'{}' failed to merge on '{}'".format(pkg, mtime_str))
if errors:
return (False, errors)
return (True, None)
@@ -251,7 +251,7 @@ class MergesHandler:
try:
self._tracking_file.save(failed_pkgs)
- except IOError as ex:
+ except OSError as ex:
errors = ["Unable to save failed merges to tracking file: %s\n" % str(ex)]
errors.append(", ".join(sorted(failed_pkgs)))
return (False, errors)
diff --git a/lib/portage/emaint/modules/move/move.py b/lib/portage/emaint/modules/move/move.py
index 305cc2e81..5c2db5ffa 100644
--- a/lib/portage/emaint/modules/move/move.py
+++ b/lib/portage/emaint/modules/move/move.py
@@ -87,7 +87,9 @@ class MoveHandler:
if maybe_applied.build_time == build_time:
break
else:
- errors.append("'%s' moved to '%s'" % (cpv, newcp))
+ errors.append(
+ "'{}' moved to '{}'".format(cpv, newcp)
+ )
elif update_cmd[0] == "slotmove":
pkg, origslot, newslot = update_cmd[1:]
atom = pkg.with_slot(origslot)
@@ -121,7 +123,7 @@ class MoveHandler:
pkg = _pkg_str(cpv, metadata=metadata, settings=settings)
except InvalidData:
continue
- metadata = dict((k, metadata[k]) for k in self._update_keys)
+ metadata = {k: metadata[k] for k in self._update_keys}
try:
updates = allupdates[pkg.repo]
except KeyError:
diff --git a/lib/portage/emaint/modules/sync/sync.py b/lib/portage/emaint/modules/sync/sync.py
index 50a37ec61..735e7aa00 100644
--- a/lib/portage/emaint/modules/sync/sync.py
+++ b/lib/portage/emaint/modules/sync/sync.py
@@ -427,7 +427,7 @@ class SyncScheduler(AsyncScheduler):
self._update_leaf_nodes()
if hooks_enabled:
self._hooks_repos.add(repo)
- super(SyncScheduler, self)._task_exit(self)
+ super()._task_exit(self)
def _master_hooks(self, repo_name):
"""
diff --git a/lib/portage/env/config.py b/lib/portage/env/config.py
index af996ab13..93ca162d8 100644
--- a/lib/portage/env/config.py
+++ b/lib/portage/env/config.py
@@ -74,9 +74,7 @@ class PackageKeywordsFile(ConfigLoaderKlass):
default_loader = KeyListFileLoader
def __init__(self, filename):
- super(PackageKeywordsFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
class PackageUseFile(ConfigLoaderKlass):
@@ -87,9 +85,7 @@ class PackageUseFile(ConfigLoaderKlass):
default_loader = KeyListFileLoader
def __init__(self, filename):
- super(PackageUseFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
class PackageMaskFile(ConfigLoaderKlass):
@@ -107,9 +103,7 @@ class PackageMaskFile(ConfigLoaderKlass):
default_loader = ItemFileLoader
def __init__(self, filename):
- super(PackageMaskFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
class PortageModulesFile(ConfigLoaderKlass):
@@ -120,6 +114,4 @@ class PortageModulesFile(ConfigLoaderKlass):
default_loader = KeyValuePairFileLoader
def __init__(self, filename):
- super(PortageModulesFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
diff --git a/lib/portage/env/loaders.py b/lib/portage/env/loaders.py
index 284c311a1..924d27ef8 100644
--- a/lib/portage/env/loaders.py
+++ b/lib/portage/env/loaders.py
@@ -31,7 +31,7 @@ class LoaderError(Exception):
self.error_msg = error_msg
def __str__(self):
- return "Failed while loading resource: %s, error was: %s" % (
+ return "Failed while loading resource: {}, error was: {}".format(
self.resource,
self.error_msg,
)
@@ -159,14 +159,13 @@ class FileLoader(DataLoader):
func = self.lineParser
for fn in RecursiveFileLoader(self.fname):
try:
- with io.open(
+ with open(
_unicode_encode(fn, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
lines = f.readlines()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.EACCES:
writemsg(_("Permission denied: '%s'\n") % fn, noiselevel=-1)
del e
diff --git a/lib/portage/exception.py b/lib/portage/exception.py
index 360febcc8..505e920de 100644
--- a/lib/portage/exception.py
+++ b/lib/portage/exception.py
@@ -159,7 +159,7 @@ class AmbiguousPackageName(ValueError, PortageException):
def __init__(self, *args, **kwargs):
self.args = args
- super(AmbiguousPackageName, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def __str__(self):
return ValueError.__str__(self)
diff --git a/lib/portage/getbinpkg.py b/lib/portage/getbinpkg.py
index 8d06ad862..c35a8fa5a 100644
--- a/lib/portage/getbinpkg.py
+++ b/lib/portage/getbinpkg.py
@@ -616,7 +616,7 @@ def dir_get_metadata(
except tuple(_all_errors) as e:
# ftplib.FTP(host) can raise errors like this:
# socket.error: (111, 'Connection refused')
- sys.stderr.write("!!! %s\n" % (e,))
+ sys.stderr.write("!!! {}\n".format(e))
return {}
out = sys.stdout
@@ -891,7 +891,7 @@ class PackageIndex:
self._read_translation_map = {}
if translated_keys:
self._write_translation_map.update(translated_keys)
- self._read_translation_map.update(((y, x) for (x, y) in translated_keys))
+ self._read_translation_map.update((y, x) for (x, y) in translated_keys)
self.header = {}
if self._default_header_data:
self.header.update(self._default_header_data)
diff --git a/lib/portage/glsa.py b/lib/portage/glsa.py
index d61ad7e59..329e22f88 100644
--- a/lib/portage/glsa.py
+++ b/lib/portage/glsa.py
@@ -778,7 +778,7 @@ class Glsa:
@return: None
"""
if not self.isInjected():
- checkfile = io.open(
+ checkfile = open(
_unicode_encode(
os.path.join(self.config["EROOT"], PRIVATE_PATH, "glsa_injected"),
encoding=_encodings["fs"],
@@ -804,11 +804,11 @@ class Glsa:
@return: list of package-versions that have to be merged
"""
return list(
- set(
+ {
update
for (vuln, update) in self.getAffectionTable(least_change)
if update
- )
+ }
)
def getAffectionTable(self, least_change=True):
diff --git a/lib/portage/gpkg.py b/lib/portage/gpkg.py
index a82f8bd49..34c9bf845 100644
--- a/lib/portage/gpkg.py
+++ b/lib/portage/gpkg.py
@@ -707,7 +707,7 @@ class tar_safe_extract:
them to the dest_dir after sanity check.
"""
if self.closed:
- raise IOError("Tar file is closed.")
+ raise OSError("Tar file is closed.")
temp_dir = tempfile.TemporaryDirectory(dir=dest_dir)
try:
while True:
diff --git a/lib/portage/locks.py b/lib/portage/locks.py
index 67541a84d..baf17d7c6 100644
--- a/lib/portage/locks.py
+++ b/lib/portage/locks.py
@@ -105,7 +105,7 @@ def _test_lock_fn(
try:
with open(lock_path, "a") as f:
lock_fn(lock_path, f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
- except (TryAgain, EnvironmentError) as e:
+ except (TryAgain, OSError) as e:
if isinstance(e, TryAgain) or e.errno == errno.EAGAIN:
# Parent process holds lock, as expected.
sys.exit(0)
@@ -118,7 +118,7 @@ def _test_lock_fn(
try:
try:
unlock_fn = lock_fn(lock_path, fd, fcntl.LOCK_EX)
- except (TryAgain, EnvironmentError):
+ except (TryAgain, OSError):
pass
else:
_lock_manager(fd, os.fstat(fd), lock_path)
@@ -297,9 +297,9 @@ def _lockfile_iteration(
locking_method = portage._eintr_func_wrapper(_get_lock_fn())
try:
if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
- raise IOError(errno.ENOSYS, "Function not implemented")
+ raise OSError(errno.ENOSYS, "Function not implemented")
locking_method(myfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
- except IOError as e:
+ except OSError as e:
if not hasattr(e, "errno"):
raise
if e.errno in (errno.EACCES, errno.EAGAIN, errno.ENOLCK):
@@ -325,7 +325,7 @@ def _lockfile_iteration(
while True:
try:
locking_method(myfd, fcntl.LOCK_EX)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.ENOLCK:
# This is known to occur on Solaris NFS (see
# bug #462694). Assume that the error is due
@@ -343,7 +343,7 @@ def _lockfile_iteration(
% lockfilename
)
writemsg(
- "\n!!! %s: %s\n" % (context_desc, e), noiselevel=-1
+ "\n!!! {}: {}\n".format(context_desc, e), noiselevel=-1
)
time.sleep(_HARDLINK_POLL_LATENCY)
@@ -494,7 +494,7 @@ def _fstat_nlink(fd):
"""
try:
return os.fstat(fd).st_nlink
- except EnvironmentError as e:
+ except OSError as e:
if e.errno in (errno.ENOENT, errno.ESTALE):
# Some filesystems such as CIFS return
# ENOENT which means st_nlink == 0.
@@ -532,7 +532,7 @@ def unlockfile(mytuple):
except OSError:
if isinstance(lockfilename, str):
_open_fds[myfd].close()
- raise IOError(_("Failed to unlock file '%s'\n") % lockfilename)
+ raise OSError(_("Failed to unlock file '%s'\n") % lockfilename)
try:
# This sleep call was added to allow other processes that are
@@ -686,7 +686,7 @@ def hardlink_lockfile(
try:
os.link(lockfilename, myhardlock)
except OSError as e:
- func_call = "link('%s', '%s')" % (lockfilename, myhardlock)
+ func_call = "link('{}', '{}')".format(lockfilename, myhardlock)
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
diff --git a/lib/portage/mail.py b/lib/portage/mail.py
index 40d417808..ce9f4435b 100644
--- a/lib/portage/mail.py
+++ b/lib/portage/mail.py
@@ -150,7 +150,7 @@ def send_mail(mysettings, message):
raise portage.exception.PortageException(
_(f"!!! An error occurred while trying to send logmail:\n{e}")
)
- except socket.error as e:
+ except OSError as e:
raise portage.exception.PortageException(
_(
f"!!! A network error occurred while trying to send logmail:\n{e}\nSure you configured PORTAGE_ELOG_MAILURI correctly?"
diff --git a/lib/portage/manifest.py b/lib/portage/manifest.py
index 6c0968415..92b4ad85d 100644
--- a/lib/portage/manifest.py
+++ b/lib/portage/manifest.py
@@ -221,9 +221,8 @@ class Manifest:
"""Parse a manifest. If myhashdict is given then data will be added too it.
Otherwise, a new dict will be created and returned."""
try:
- with io.open(
+ with open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -231,7 +230,7 @@ class Manifest:
myhashdict = {}
self._parseDigests(f, myhashdict=myhashdict, **kwargs)
return myhashdict
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise FileNotFound(file_path)
else:
@@ -322,13 +321,12 @@ class Manifest:
preserved_stats = {self.pkgdir.rstrip(os.sep): os.stat(self.pkgdir)}
if myentries and not force:
try:
- with io.open(
+ with open(
_unicode_encode(
self.getFullname(),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -340,7 +338,7 @@ class Manifest:
if oldentry != myentry:
update_manifest = True
break
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
pass
else:
@@ -370,7 +368,7 @@ class Manifest:
if sign:
self.sign()
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno == errno.EACCES:
raise PermissionDenied(str(e))
raise
@@ -539,9 +537,9 @@ class Manifest:
os.path.basename(self.pkgdir.rstrip(os.path.sep)),
self.pkgdir,
)
- distlist = set(
+ distlist = {
distfile for cpv in cpvlist for distfile in self._getCpvDistfiles(cpv)
- )
+ }
if requiredDistfiles is None:
# This allows us to force removal of stale digests for the
@@ -796,9 +794,8 @@ class Manifest:
mfname = self.getFullname()
if not os.path.exists(mfname):
return []
- with io.open(
+ with open(
_unicode_encode(mfname, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as myfile:
diff --git a/lib/portage/module.py b/lib/portage/module.py
index 8e63cd545..cd9e2a7e1 100644
--- a/lib/portage/module.py
+++ b/lib/portage/module.py
@@ -68,10 +68,8 @@ class Module:
def get_class(self, name):
if not name or name not in self.kids_names:
raise InvalidModuleName(
- (
- f"Module name '{name}' is invalid or not"
- f"part of the module '{self.name}'"
- )
+ f"Module name '{name}' is invalid or not"
+ f"part of the module '{self.name}'"
)
kid = self.kids[name]
if kid["is_imported"]:
@@ -117,7 +115,7 @@ class Modules:
# test for statinfo to ensure it should a real module
# it will bail if it errors
os.lstat(os.path.join(module_dir, entry, "__init__.py"))
- except EnvironmentError:
+ except OSError:
return False
return True
@@ -238,9 +236,7 @@ class Modules:
if self.compat_versions:
if not module.module_spec["version"] in self.compat_versions:
raise ModuleVersionError(
- (
- f"Error loading '{self._namepath}' plugin module: {module.module_spec['name']}, version: {module.module_spec['version']}\n"
- "Module is not compatible with the current application version\n"
- f"Compatible module API versions are: {self.compat_versions}"
- )
+ f"Error loading '{self._namepath}' plugin module: {module.module_spec['name']}, version: {module.module_spec['version']}\n"
+ "Module is not compatible with the current application version\n"
+ f"Compatible module API versions are: {self.compat_versions}"
)
diff --git a/lib/portage/news.py b/lib/portage/news.py
index 7964f74d3..3886b1126 100644
--- a/lib/portage/news.py
+++ b/lib/portage/news.py
@@ -294,9 +294,8 @@ class NewsItem:
return self._valid
def parse(self):
- with io.open(
+ with open(
_unicode_encode(self.path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
diff --git a/lib/portage/output.py b/lib/portage/output.py
index 439cc4fac..54335b999 100644
--- a/lib/portage/output.py
+++ b/lib/portage/output.py
@@ -188,9 +188,8 @@ def _parse_color_map(config_root="/", onerror=None):
return token
try:
- with io.open(
+ with open(
_unicode_encode(myfile, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
@@ -251,7 +250,7 @@ def _parse_color_map(config_root="/", onerror=None):
_styles[k] = tuple(code_list)
elif k in codes:
codes[k] = "".join(code_list)
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise FileNotFound(myfile)
elif e.errno == errno.EACCES:
@@ -336,7 +335,7 @@ def xtermTitleReset():
home = os.environ.get("HOME", "")
if home != "" and pwd.startswith(home):
pwd = "~" + pwd[len(home) :]
- default_xterm_title = "\x1b]0;%s@%s:%s\x07" % (
+ default_xterm_title = "\x1b]0;{}@{}:{}\x07".format(
os.environ.get("LOGNAME", ""),
os.environ.get("HOSTNAME", "").split(".", 1)[0],
pwd,
@@ -534,7 +533,7 @@ def get_term_size(fd=None):
try:
proc = subprocess.Popen(["stty", "size"], stdout=subprocess.PIPE, stderr=fd)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
# stty command not found
@@ -808,7 +807,7 @@ class ProgressBar:
self._set_desc()
def _set_desc(self):
- self._desc = "%s%s" % (
+ self._desc = "{}{}".format(
"%s: " % self._title if self._title else "",
"%s" % self._label if self._label else "",
)
@@ -900,7 +899,7 @@ class TermProgressBar(ProgressBar):
position = 0.5
self._position = position
bar_width = int(offset * max_bar_width)
- image = "%s%s%s" % (
+ image = "{}{}{}".format(
self._desc,
_percent,
"["
@@ -914,7 +913,7 @@ class TermProgressBar(ProgressBar):
percentage = 100 * curval // maxval
max_bar_width = bar_space - 1
_percent = ("%d%% " % percentage).rjust(percentage_str_width)
- image = "%s%s" % (self._desc, _percent)
+ image = "{}{}".format(self._desc, _percent)
if cols < min_columns:
return image
diff --git a/lib/portage/package/ebuild/_config/LocationsManager.py b/lib/portage/package/ebuild/_config/LocationsManager.py
index a92407dbd..21b371a97 100644
--- a/lib/portage/package/ebuild/_config/LocationsManager.py
+++ b/lib/portage/package/ebuild/_config/LocationsManager.py
@@ -98,9 +98,7 @@ class LocationsManager:
self.broot = portage.const.EPREFIX
def load_profiles(self, repositories, known_repository_paths):
- known_repository_paths = set(
- os.path.realpath(x) for x in known_repository_paths
- )
+ known_repository_paths = {os.path.realpath(x) for x in known_repository_paths}
known_repos = []
for x in known_repository_paths:
@@ -224,14 +222,13 @@ class LocationsManager:
eapi = eapi or "0"
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(eapi_file, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
eapi = f.readline().strip()
- except IOError:
+ except OSError:
pass
else:
if not eapi_is_supported(eapi):
diff --git a/lib/portage/package/ebuild/_ipc/QueryCommand.py b/lib/portage/package/ebuild/_ipc/QueryCommand.py
index f8f464516..def66c0be 100644
--- a/lib/portage/package/ebuild/_ipc/QueryCommand.py
+++ b/lib/portage/package/ebuild/_ipc/QueryCommand.py
@@ -53,7 +53,7 @@ class QueryCommand(IpcCommand):
root = normalize_path(root or os.sep).rstrip(os.sep) + os.sep
if root not in db:
- return ("", "%s: Invalid ROOT: %s\n" % (cmd, root), 3)
+ return ("", "{}: Invalid ROOT: {}\n".format(cmd, root), 3)
portdb = db[root]["porttree"].dbapi
vardb = db[root]["vartree"].dbapi
@@ -63,12 +63,12 @@ class QueryCommand(IpcCommand):
try:
atom = Atom(args[0], allow_repo=allow_repo)
except InvalidAtom:
- return ("", "%s: Invalid atom: %s\n" % (cmd, args[0]), 2)
+ return ("", "{}: Invalid atom: {}\n".format(cmd, args[0]), 2)
try:
atom = Atom(args[0], allow_repo=allow_repo, eapi=eapi)
except InvalidAtom as e:
- warnings.append("QA Notice: %s: %s" % (cmd, e))
+ warnings.append("QA Notice: {}: {}".format(cmd, e))
use = self.settings.get("PORTAGE_BUILT_USE")
if use is None:
@@ -98,7 +98,7 @@ class QueryCommand(IpcCommand):
):
repo = _repo_name_re.match(args[0])
if repo is None:
- return ("", "%s: Invalid repository: %s\n" % (cmd, args[0]), 2)
+ return ("", "{}: Invalid repository: {}\n".format(cmd, args[0]), 2)
try:
repo = portdb.repositories[args[0]]
except KeyError:
diff --git a/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py b/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
index 4599e2d50..de5e92805 100644
--- a/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
+++ b/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
@@ -41,8 +41,7 @@ class ManifestScheduler(AsyncScheduler):
# and in order to reduce latency in case of a signal interrupt.
cp_all = self._portdb.cp_all
for category in sorted(self._portdb.categories):
- for cp in cp_all(categories=(category,)):
- yield cp
+ yield from cp_all(categories=(category,))
def _iter_tasks(self):
portdb = self._portdb
diff --git a/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py b/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py
index df279dab6..76ed36b45 100644
--- a/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py
+++ b/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py
@@ -233,7 +233,7 @@ class ManifestTask(CompositeTask):
"rb",
) as f:
return self._PGP_HEADER not in f.readline()
- except IOError as e:
+ except OSError as e:
if e.errno in (errno.ENOENT, errno.ESTALE):
return False
raise
diff --git a/lib/portage/package/ebuild/config.py b/lib/portage/package/ebuild/config.py
index a37b373cf..5e59932cf 100644
--- a/lib/portage/package/ebuild/config.py
+++ b/lib/portage/package/ebuild/config.py
@@ -553,7 +553,7 @@ class config:
and user_auxdbmodule in self._module_aliases
):
warnings.warn(
- "'%s' is deprecated: %s" % (user_auxdbmodule, modules_file)
+ "'{}' is deprecated: {}".format(user_auxdbmodule, modules_file)
)
self.modules["default"] = {
@@ -587,9 +587,9 @@ class config:
env = os.environ
# Avoid potential UnicodeDecodeError exceptions later.
- env_unicode = dict(
- (_unicode_decode(k), _unicode_decode(v)) for k, v in env.items()
- )
+ env_unicode = {
+ _unicode_decode(k): _unicode_decode(v) for k, v in env.items()
+ }
self.backupenv = env_unicode
@@ -705,7 +705,7 @@ class config:
)
for x in profiles_complex
]
- except EnvironmentError as e:
+ except OSError as e:
_raise_exc(e)
self.packages = tuple(stack_lists(packages_list, incremental=1))
@@ -1744,9 +1744,9 @@ class config:
def __getitem__(self, key):
prefix = key.lower() + "_"
prefix_len = len(prefix)
- expand_flags = set(
+ expand_flags = {
x[prefix_len:] for x in self._use if x[:prefix_len] == prefix
- )
+ }
var_split = self._use_expand_dict.get(key, "").split()
# Preserve the order of var_split because it can matter for things
# like LINGUAS.
@@ -2224,7 +2224,7 @@ class config:
# Use the calculated USE flags to regenerate the USE_EXPAND flags so
# that they are consistent. For optimal performance, use slice
# comparison instead of startswith().
- use_expand_split = set(x.lower() for x in self.get("USE_EXPAND", "").split())
+ use_expand_split = {x.lower() for x in self.get("USE_EXPAND", "").split()}
lazy_use_expand = self._lazy_use_expand(
self,
unfiltered_use,
@@ -2235,7 +2235,7 @@ class config:
self._use_expand_dict,
)
- use_expand_iuses = dict((k, set()) for k in use_expand_split)
+ use_expand_iuses = {k: set() for k in use_expand_split}
for x in portage_iuse:
x_split = x.split("_")
if len(x_split) == 1:
@@ -2300,7 +2300,7 @@ class config:
if k in protected_keys or k in non_user_variables:
writemsg(
"!!! Illegal variable "
- + "'%s' assigned in '%s'\n" % (k, penvfile),
+ + "'{}' assigned in '{}'\n".format(k, penvfile),
noiselevel=-1,
)
elif k in incrementals:
@@ -3050,9 +3050,9 @@ class config:
for k in use_expand:
prefix = k.lower() + "_"
prefix_len = len(prefix)
- expand_flags = set(
+ expand_flags = {
x[prefix_len:] for x in myflags if x[:prefix_len] == prefix
- )
+ }
var_split = use_expand_dict.get(k, "").split()
var_split = [x for x in var_split if x in expand_flags]
var_split.extend(sorted(expand_flags.difference(var_split)))
diff --git a/lib/portage/package/ebuild/deprecated_profile_check.py b/lib/portage/package/ebuild/deprecated_profile_check.py
index 19bea1903..90b79a9c7 100644
--- a/lib/portage/package/ebuild/deprecated_profile_check.py
+++ b/lib/portage/package/ebuild/deprecated_profile_check.py
@@ -39,11 +39,10 @@ def deprecated_profile_check(settings=None):
if not os.access(deprecated_profile_file, os.R_OK):
return
- with io.open(
+ with open(
_unicode_encode(
deprecated_profile_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
diff --git a/lib/portage/package/ebuild/digestgen.py b/lib/portage/package/ebuild/digestgen.py
index 3a3c92a3a..7d2f28c96 100644
--- a/lib/portage/package/ebuild/digestgen.py
+++ b/lib/portage/package/ebuild/digestgen.py
@@ -227,7 +227,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
pv = pkg_key.split("/")[1]
for filename in auto_assumed:
if filename in fetchlist:
- writemsg_stdout(" %s::%s\n" % (pv, filename))
+ writemsg_stdout(" {}::{}\n".format(pv, filename))
return 1
finally:
portage._doebuild_manifest_exempt_depend -= 1
diff --git a/lib/portage/package/ebuild/doebuild.py b/lib/portage/package/ebuild/doebuild.py
index e9f172d22..cbe14b792 100644
--- a/lib/portage/package/ebuild/doebuild.py
+++ b/lib/portage/package/ebuild/doebuild.py
@@ -228,7 +228,7 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
else:
ebuild_sh_arg = phase
- cmd = "%s %s" % (
+ cmd = "{} {}".format(
_shell_quote(
os.path.join(
settings["PORTAGE_BIN_PATH"], os.path.basename(EBUILD_SH_BINARY)
@@ -903,7 +903,7 @@ def doebuild(
if mydo not in clean_phases and not os.path.exists(myebuild):
writemsg(
- "!!! doebuild: %s not found for %s\n" % (myebuild, mydo), noiselevel=-1
+ "!!! doebuild: {} not found for {}\n".format(myebuild, mydo), noiselevel=-1
)
return 1
@@ -1247,12 +1247,14 @@ def doebuild(
else:
vardb = vartree.dbapi
cpv = mysettings.mycpv
- cpv_slot = "%s%s%s" % (cpv.cp, portage.dep._slot_separator, cpv.slot)
+ cpv_slot = "{}{}{}".format(
+ cpv.cp, portage.dep._slot_separator, cpv.slot
+ )
mysettings["REPLACING_VERSIONS"] = " ".join(
- set(
+ {
portage.versions.cpv_getversion(match)
for match in vardb.match(cpv_slot) + vardb.match("=" + cpv)
- )
+ }
)
# if any of these are being called, handle them -- running them out of
@@ -1495,7 +1497,7 @@ def doebuild(
if pkg.build_id is not None:
build_info["BUILD_ID"] = "%s\n" % pkg.build_id
for k, v in build_info.items():
- with io.open(
+ with open(
_unicode_encode(
os.path.join(infoloc, k),
encoding=_encodings["fs"],
@@ -1808,7 +1810,7 @@ def _spawn_actionmap(settings):
def _validate_deps(mysettings, myroot, mydo, mydbapi):
- invalid_dep_exempt_phases = set(["clean", "cleanrm", "help", "prerm", "postrm"])
+ invalid_dep_exempt_phases = {"clean", "cleanrm", "help", "prerm", "postrm"}
all_keys = set(Package.metadata_keys)
all_keys.add("SRC_URI")
all_keys = tuple(all_keys)
@@ -1848,7 +1850,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
if pkg.invalid:
for k, v in pkg.invalid.items():
for msg in v:
- msgs.append(" %s\n" % (msg,))
+ msgs.append(" {}\n".format(msg))
if msgs:
portage.util.writemsg_level(
@@ -2093,7 +2095,7 @@ def spawn(
if mysettings.mycpv is not None:
keywords["opt_name"] = "[%s]" % mysettings.mycpv
else:
- keywords["opt_name"] = "[%s/%s]" % (
+ keywords["opt_name"] = "[{}/{}]".format(
mysettings.get("CATEGORY", ""),
mysettings.get("PF", ""),
)
@@ -2286,7 +2288,7 @@ def _check_build_log(mysettings, out=None):
_unicode_encode(logfile, encoding=_encodings["fs"], errors="strict"),
mode="rb",
)
- except EnvironmentError:
+ except OSError:
return
f_real = None
@@ -2310,7 +2312,7 @@ def _check_build_log(mysettings, out=None):
qa_configure_opts = ""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
mysettings["PORTAGE_BUILDDIR"], "build-info", "QA_CONFIGURE_OPTIONS"
@@ -2318,12 +2320,11 @@ def _check_build_log(mysettings, out=None):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as qa_configure_opts_f:
qa_configure_opts = qa_configure_opts_f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
@@ -2338,7 +2339,7 @@ def _check_build_log(mysettings, out=None):
qa_am_maintainer_mode = []
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
mysettings["PORTAGE_BUILDDIR"],
@@ -2348,14 +2349,13 @@ def _check_build_log(mysettings, out=None):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as qa_am_maintainer_mode_f:
qa_am_maintainer_mode = [
x for x in qa_am_maintainer_mode_f.read().splitlines() if x
]
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
@@ -2435,7 +2435,7 @@ def _check_build_log(mysettings, out=None):
except (EOFError, zlib.error) as e:
_eerror(
[
- "portage encountered a zlib error: '%s'" % (e,),
+ "portage encountered a zlib error: '{}'".format(e),
"while reading the log file: '%s'" % logfile,
]
)
@@ -2532,7 +2532,7 @@ def _post_src_install_write_metadata(settings):
if v is not None:
write_atomic(os.path.join(build_info_dir, k), v + "\n")
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "BUILD_TIME"),
encoding=_encodings["fs"],
@@ -2542,7 +2542,7 @@ def _post_src_install_write_metadata(settings):
encoding=_encodings["repo.content"],
errors="strict",
) as f:
- f.write("%.0f\n" % (time.time(),))
+ f.write("{:.0f}\n".format(time.time()))
use = frozenset(settings["PORTAGE_USE"].split())
for k in _vdb_use_conditional_keys:
@@ -2570,7 +2570,7 @@ def _post_src_install_write_metadata(settings):
except OSError:
pass
continue
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, k),
encoding=_encodings["fs"],
@@ -2592,7 +2592,7 @@ def _post_src_install_write_metadata(settings):
except OSError:
pass
continue
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, k),
encoding=_encodings["fs"],
@@ -2666,7 +2666,7 @@ def _post_src_install_uid_fix(mysettings, out):
qa_desktop_file = ""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
mysettings["PORTAGE_BUILDDIR"], "build-info", "QA_DESKTOP_FILE"
@@ -2674,12 +2674,11 @@ def _post_src_install_uid_fix(mysettings, out):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
qa_desktop_file = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
@@ -2790,7 +2789,7 @@ def _post_src_install_uid_fix(mysettings, out):
" %s is not a valid libtool archive, skipping\n"
% fpath[len(destdir) :]
)
- qa_msg = "QA Notice: invalid .la file found: %s, %s" % (
+ qa_msg = "QA Notice: invalid .la file found: {}, {}".format(
fpath[len(destdir) :],
e,
)
@@ -2852,7 +2851,7 @@ def _post_src_install_uid_fix(mysettings, out):
build_info_dir = os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info")
- f = io.open(
+ f = open(
_unicode_encode(
os.path.join(build_info_dir, "SIZE"),
encoding=_encodings["fs"],
@@ -2897,16 +2896,15 @@ def _post_src_install_soname_symlinks(mysettings, out):
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(
needed_filename, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
lines = f.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
return
@@ -2917,18 +2915,17 @@ def _post_src_install_soname_symlinks(mysettings, out):
metadata = {}
for k in ("QA_PREBUILT", "QA_SONAME_NO_SYMLINK"):
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info", k),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
v = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
else:
@@ -2989,35 +2986,33 @@ def _post_src_install_soname_symlinks(mysettings, out):
build_info_dir = os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info")
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "PROVIDES_EXCLUDE"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
provides_exclude = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
provides_exclude = ""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "REQUIRES_EXCLUDE"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
requires_exclude = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
requires_exclude = ""
@@ -3040,7 +3035,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
entry = NeededEntry.parse(needed_filename, l)
except InvalidData as e:
portage.util.writemsg_level(
- "\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1
+ "\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1
)
continue
@@ -3094,7 +3089,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
needed_file.close()
if soname_deps.requires is not None:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "REQUIRES"),
encoding=_encodings["fs"],
@@ -3107,7 +3102,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
f.write(soname_deps.requires)
if soname_deps.provides is not None:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "PROVIDES"),
encoding=_encodings["fs"],
diff --git a/lib/portage/package/ebuild/fetch.py b/lib/portage/package/ebuild/fetch.py
index 76e12dbd6..575f3256c 100644
--- a/lib/portage/package/ebuild/fetch.py
+++ b/lib/portage/package/ebuild/fetch.py
@@ -372,7 +372,9 @@ def _check_distfile(filename, digests, eout, show_errors=1, hash_filter=None):
digests = _apply_hash_filter(digests, hash_filter)
if _check_digests(filename, digests, show_errors=show_errors):
eout.ebegin(
- "%s %s ;-)" % (os.path.basename(filename), " ".join(sorted(digests)))
+ "{} {} ;-)".format(
+ os.path.basename(filename), " ".join(sorted(digests))
+ )
)
eout.eend(0)
else:
@@ -579,7 +581,7 @@ class ContentHashLayout(FilenameHashLayout):
to a digest value for self.algo, and which can be compared to
other DistfileName instances with their digests_equal method.
"""
- for filename in super(ContentHashLayout, self).get_filenames(distdir):
+ for filename in super().get_filenames(distdir):
yield DistfileName(filename, digests=dict([(self.algo, filename)]))
@staticmethod
@@ -702,9 +704,9 @@ def get_mirror_url(mirror_url, filename, mysettings, cache_path=None):
cache = {}
if cache_path is not None:
try:
- with open(cache_path, "r") as f:
+ with open(cache_path) as f:
cache = json.load(f)
- except (IOError, ValueError):
+ except (OSError, ValueError):
pass
ts, data = cache.get(mirror_url, (0, None))
@@ -726,8 +728,8 @@ def get_mirror_url(mirror_url, filename, mysettings, cache_path=None):
tmpfile = os.path.join(mysettings["DISTDIR"], tmpfile)
mirror_conf.read_from_file(tmpfile)
else:
- raise IOError()
- except (ConfigParserError, IOError, UnicodeDecodeError):
+ raise OSError()
+ except (ConfigParserError, OSError, UnicodeDecodeError):
pass
else:
cache[mirror_url] = (time.time(), mirror_conf.serialize())
@@ -1217,7 +1219,7 @@ def fetch(
vfs_stat = os.statvfs(mysettings["DISTDIR"])
except OSError as e:
writemsg_level(
- "!!! statvfs('%s'): %s\n" % (mysettings["DISTDIR"], e),
+ "!!! statvfs('{}'): {}\n".format(mysettings["DISTDIR"], e),
noiselevel=-1,
level=logging.ERROR,
)
@@ -1444,7 +1446,7 @@ def fetch(
shutil.copyfile(mirror_file, download_path)
writemsg(_("Local mirror has file: %s\n") % myfile)
break
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
del e
@@ -1482,7 +1484,7 @@ def fetch(
if distdir_writable:
try:
os.unlink(download_path)
- except EnvironmentError:
+ except OSError:
pass
elif not orig_digests:
# We don't have a digest, and the temporary file exists.
@@ -1503,7 +1505,7 @@ def fetch(
):
eout = EOutput()
eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
- eout.ebegin("%s size ;-)" % (myfile,))
+ eout.ebegin("{} size ;-)".format(myfile))
eout.eend(0)
continue
else:
@@ -1555,7 +1557,7 @@ def fetch(
digests = list(digests)
digests.sort()
eout.ebegin(
- "%s %s ;-)" % (myfile, " ".join(digests))
+ "{} {} ;-)".format(myfile, " ".join(digests))
)
eout.eend(0)
continue # fetch any remaining files
@@ -1735,7 +1737,7 @@ def fetch(
try:
variables["DIGESTS"] = " ".join(
[
- "%s:%s" % (k.lower(), v)
+ "{}:{}".format(k.lower(), v)
for k, v in mydigests[myfile].items()
if k != "size"
]
@@ -1784,7 +1786,7 @@ def fetch(
os.unlink(download_path)
fetched = 0
continue
- except EnvironmentError:
+ except OSError:
pass
if mydigests is not None and myfile in mydigests:
@@ -1850,13 +1852,12 @@ def fetch(
"<title>.*(not found|404).*</title>",
re.I | re.M,
)
- with io.open(
+ with open(
_unicode_encode(
download_path,
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
@@ -1870,7 +1871,7 @@ def fetch(
)
fetched = 0
continue
- except (IOError, OSError):
+ except OSError:
pass
fetched = 1
continue
diff --git a/lib/portage/package/ebuild/getmaskingstatus.py b/lib/portage/package/ebuild/getmaskingstatus.py
index b47dd8c50..3a24a37b0 100644
--- a/lib/portage/package/ebuild/getmaskingstatus.py
+++ b/lib/portage/package/ebuild/getmaskingstatus.py
@@ -149,7 +149,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
try:
missing_licenses = settings._getMissingLicenses(mycpv, metadata)
if missing_licenses:
- allowed_tokens = set(["||", "(", ")"])
+ allowed_tokens = {"||", "(", ")"}
allowed_tokens.update(missing_licenses)
license_split = licenses.split()
license_split = [x for x in license_split if x in allowed_tokens]
@@ -168,7 +168,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
try:
missing_properties = settings._getMissingProperties(mycpv, metadata)
if missing_properties:
- allowed_tokens = set(["||", "(", ")"])
+ allowed_tokens = {"||", "(", ")"}
allowed_tokens.update(missing_properties)
properties_split = properties.split()
properties_split = [x for x in properties_split if x in allowed_tokens]
@@ -185,7 +185,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
msg.append("in RESTRICT")
rValue.append(_MaskReason("RESTRICT", " ".join(msg)))
except InvalidDependString as e:
- rValue.append(_MaskReason("invalid", "RESTRICT: %s" % (e,)))
+ rValue.append(_MaskReason("invalid", "RESTRICT: {}".format(e)))
# Only show KEYWORDS masks for installed packages
# if they're not masked for any other reason.
diff --git a/lib/portage/package/ebuild/prepare_build_dirs.py b/lib/portage/package/ebuild/prepare_build_dirs.py
index 32a770c99..ad73141c5 100644
--- a/lib/portage/package/ebuild/prepare_build_dirs.py
+++ b/lib/portage/package/ebuild/prepare_build_dirs.py
@@ -151,7 +151,7 @@ def _adjust_perms_msg(settings, msg):
mode="ab",
)
log_file_real = log_file
- except IOError:
+ except OSError:
def write(msg):
pass
@@ -237,11 +237,9 @@ def _prepare_features_dirs(mysettings):
except OSError:
continue
if subdir_st.st_gid != portage_gid or (
- (
- stat.S_ISDIR(subdir_st.st_mode)
- and not dirmode
- == (stat.S_IMODE(subdir_st.st_mode) & dirmode)
- )
+ stat.S_ISDIR(subdir_st.st_mode)
+ and not dirmode
+ == (stat.S_IMODE(subdir_st.st_mode) & dirmode)
):
droppriv_fix = True
break
@@ -396,7 +394,7 @@ def _prepare_workdir(mysettings):
log_subdir = os.path.join(logdir, "build", mysettings["CATEGORY"])
mysettings["PORTAGE_LOG_FILE"] = os.path.join(
log_subdir,
- "%s:%s.log%s" % (mysettings["PF"], logid_time, compress_log_ext),
+ "{}:{}.log{}".format(mysettings["PF"], logid_time, compress_log_ext),
)
else:
log_subdir = logdir
@@ -417,13 +415,14 @@ def _prepare_workdir(mysettings):
try:
_ensure_log_subdirs(logdir, log_subdir)
except PortageException as e:
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
if os.access(log_subdir, os.W_OK):
logdir_subdir_ok = True
else:
writemsg(
- "!!! %s: %s\n" % (_("Permission Denied"), log_subdir), noiselevel=-1
+ "!!! {}: {}\n".format(_("Permission Denied"), log_subdir),
+ noiselevel=-1,
)
tmpdir_log_path = os.path.join(mysettings["T"], "build.log%s" % compress_log_ext)
diff --git a/lib/portage/process.py b/lib/portage/process.py
index 84e09f8ec..40636ee94 100644
--- a/lib/portage/process.py
+++ b/lib/portage/process.py
@@ -442,7 +442,7 @@ def spawn(
# We need to catch _any_ exception so that it doesn't
# propagate out of this function and cause exiting
# with anything other than os._exit()
- writemsg("%s:\n %s\n" % (e, " ".join(mycommand)), noiselevel=-1)
+ writemsg("{}:\n {}\n".format(e, " ".join(mycommand)), noiselevel=-1)
traceback.print_exc()
sys.stderr.flush()
@@ -458,7 +458,7 @@ def spawn(
os._exit(1)
if not isinstance(pid, int):
- raise AssertionError("fork returned non-integer: %s" % (repr(pid),))
+ raise AssertionError("fork returned non-integer: {}".format(repr(pid)))
# Add the pid to our local and the global pid lists.
mypids.append(pid)
@@ -529,7 +529,7 @@ def _has_ipv6():
# [Errno 99] Cannot assign requested address.
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock.bind(("::1", 0))
- except EnvironmentError:
+ except OSError:
__has_ipv6 = False
else:
__has_ipv6 = True
@@ -572,7 +572,7 @@ def _configure_loopback_interface():
rtnl.add_address(ifindex, socket.AF_INET, "10.0.0.1", 8)
if _has_ipv6():
rtnl.add_address(ifindex, socket.AF_INET6, "fd::1", 8)
- except EnvironmentError as e:
+ except OSError as e:
writemsg(
"Unable to configure loopback interface: %s\n" % e.strerror, noiselevel=-1
)
diff --git a/lib/portage/proxy/lazyimport.py b/lib/portage/proxy/lazyimport.py
index c04251f29..89d7d3abe 100644
--- a/lib/portage/proxy/lazyimport.py
+++ b/lib/portage/proxy/lazyimport.py
@@ -136,7 +136,7 @@ class _LazyImportFrom(_LazyImport):
except AttributeError:
# Try to import it as a submodule
try:
- __import__("%s.%s" % (name, attr_name))
+ __import__("{}.{}".format(name, attr_name))
except ImportError:
pass
# If it's a submodule, this will succeed. Otherwise, it may
diff --git a/lib/portage/repository/config.py b/lib/portage/repository/config.py
index 0b591d94f..1dc88668c 100644
--- a/lib/portage/repository/config.py
+++ b/lib/portage/repository/config.py
@@ -531,16 +531,15 @@ class RepoConfig:
repo_name_path = os.path.join(repo_path, REPO_NAME_LOC)
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(
repo_name_path, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
return f.readline().strip(), False
- except EnvironmentError:
+ except OSError:
return "x-" + os.path.basename(repo_path), True
finally:
if f is not None:
@@ -589,16 +588,18 @@ class RepoConfig:
return "\n".join(repo_msg)
def __repr__(self):
- return "<portage.repository.config.RepoConfig(name=%r, location=%r)>" % (
- self.name,
- _unicode_decode(self.location),
+ return (
+ "<portage.repository.config.RepoConfig(name={!r}, location={!r})>".format(
+ self.name,
+ _unicode_decode(self.location),
+ )
)
def __str__(self):
d = {}
for k in self.__slots__:
d[k] = getattr(self, k, None)
- return "%s" % (d,)
+ return "{}".format(d)
class RepoConfigLoader:
@@ -1258,27 +1259,27 @@ class RepoConfigLoader:
continue
if getattr(repo, key) is not None:
if key in bool_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
"true" if getattr(repo, key) else "false",
)
elif key in str_or_int_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
getattr(repo, key),
)
elif key in str_tuple_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
" ".join(getattr(repo, key)),
)
elif key in repo_config_tuple_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
" ".join(x.name for x in getattr(repo, key)),
)
for o, v in repo.module_specific_options.items():
- config_string += "%s = %s\n" % (o, v)
+ config_string += "{} = {}\n".format(o, v)
return config_string.lstrip("\n")
diff --git a/lib/portage/sync/controller.py b/lib/portage/sync/controller.py
index 987aa5481..79dfb19db 100644
--- a/lib/portage/sync/controller.py
+++ b/lib/portage/sync/controller.py
@@ -137,7 +137,7 @@ class SyncManager:
if repo.sync_type in self.module_names:
tasks = [self.module_controller.get_class(repo.sync_type)]
else:
- msg = "\n%s: Sync module '%s' is not an installed/known type'\n" % (
+ msg = "\n{}: Sync module '{}' is not an installed/known type'\n".format(
bad("ERROR"),
repo.sync_type,
)
@@ -212,7 +212,9 @@ class SyncManager:
return succeeded
def pre_sync(self, repo):
- msg = ">>> Syncing repository '%s' into '%s'..." % (repo.name, repo.location)
+ msg = ">>> Syncing repository '{}' into '{}'...".format(
+ repo.name, repo.location
+ )
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n")
try:
diff --git a/lib/portage/sync/modules/git/git.py b/lib/portage/sync/modules/git/git.py
index 8bc5a3811..7f044214c 100644
--- a/lib/portage/sync/modules/git/git.py
+++ b/lib/portage/sync/modules/git/git.py
@@ -52,7 +52,7 @@ class GitSync(NewBase):
self.logger(
self.xterm_titles, "Created new directory %s" % self.repo.location
)
- except IOError:
+ except OSError:
return (1, False)
sync_uri = self.repo.sync_uri
@@ -62,22 +62,22 @@ class GitSync(NewBase):
git_cmd_opts = ""
if self.repo.module_specific_options.get("sync-git-env"):
shlexed_env = shlex_split(self.repo.module_specific_options["sync-git-env"])
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-git-clone-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-git-clone-env"]
)
- clone_env = dict(
- (k, v)
+ clone_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(clone_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -93,7 +93,7 @@ class GitSync(NewBase):
git_cmd_opts += (
" %s" % self.repo.module_specific_options["sync-git-clone-extra-opts"]
)
- git_cmd = "%s clone%s %s ." % (
+ git_cmd = "{} clone{} {} .".format(
self.bin_command,
git_cmd_opts,
portage._shell_quote(sync_uri),
@@ -101,7 +101,7 @@ class GitSync(NewBase):
writemsg_level(git_cmd + "\n")
exitcode = portage.process.spawn_bash(
- "cd %s ; exec %s" % (portage._shell_quote(self.repo.location), git_cmd),
+ "cd {} ; exec {}".format(portage._shell_quote(self.repo.location), git_cmd),
**self.spawn_kwargs,
)
if exitcode != os.EX_OK:
@@ -129,22 +129,22 @@ class GitSync(NewBase):
quiet = self.settings.get("PORTAGE_QUIET") == "1"
if self.repo.module_specific_options.get("sync-git-env"):
shlexed_env = shlex_split(self.repo.module_specific_options["sync-git-env"])
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-git-pull-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-git-pull-env"]
)
- pull_env = dict(
- (k, v)
+ pull_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(pull_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -201,7 +201,7 @@ class GitSync(NewBase):
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
- git_cmd = "%s fetch %s%s" % (
+ git_cmd = "{} fetch {}{}".format(
self.bin_command,
remote_branch.partition("/")[0],
git_cmd_opts,
@@ -215,7 +215,7 @@ class GitSync(NewBase):
)
exitcode = portage.process.spawn_bash(
- "cd %s ; exec %s" % (portage._shell_quote(self.repo.location), git_cmd),
+ "cd {} ; exec {}".format(portage._shell_quote(self.repo.location), git_cmd),
**self.spawn_kwargs,
)
@@ -300,8 +300,10 @@ class GitSync(NewBase):
env = None
if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
try:
- out.einfo("Using keys from %s" % (self.repo.sync_openpgp_key_path,))
- with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
+ out.einfo(
+ "Using keys from {}".format(self.repo.sync_openpgp_key_path)
+ )
+ with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
self._refresh_keys(openpgp_env)
except (GematoException, asyncio.TimeoutError) as e:
@@ -348,7 +350,7 @@ class GitSync(NewBase):
expl = "no signature"
else:
expl = "unknown issue"
- out.eerror("No valid signature found: %s" % (expl,))
+ out.eerror("No valid signature found: {}".format(expl))
return False
finally:
if openpgp_env is not None:
diff --git a/lib/portage/sync/modules/mercurial/mercurial.py b/lib/portage/sync/modules/mercurial/mercurial.py
index 486b4fdd6..bd8135c05 100644
--- a/lib/portage/sync/modules/mercurial/mercurial.py
+++ b/lib/portage/sync/modules/mercurial/mercurial.py
@@ -37,7 +37,7 @@ class MercurialSync(NewBase):
self.logger(
self.xterm_titles, "Created new directory %s" % self.repo.location
)
- except IOError:
+ except OSError:
return (1, False)
sync_uri = self.repo.sync_uri
@@ -49,22 +49,22 @@ class MercurialSync(NewBase):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-env"]
)
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-mercurial-clone-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-clone-env"]
)
- clone_env = dict(
- (k, v)
+ clone_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(clone_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -74,7 +74,7 @@ class MercurialSync(NewBase):
" %s"
% self.repo.module_specific_options["sync-mercurial-clone-extra-opts"]
)
- hg_cmd = "%s clone%s %s ." % (
+ hg_cmd = "{} clone{} {} .".format(
self.bin_command,
hg_cmd_opts,
portage._shell_quote(sync_uri),
@@ -105,22 +105,22 @@ class MercurialSync(NewBase):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-env"]
)
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-mercurial-pull-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-pull-env"]
)
- pull_env = dict(
- (k, v)
+ pull_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(pull_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -130,7 +130,7 @@ class MercurialSync(NewBase):
" %s"
% self.repo.module_specific_options["sync-mercurial-pull-extra-opts"]
)
- hg_cmd = "%s pull -u%s" % (self.bin_command, hg_cmd_opts)
+ hg_cmd = "{} pull -u{}".format(self.bin_command, hg_cmd_opts)
writemsg_level(hg_cmd + "\n")
rev_cmd = [self.bin_command, "id", "--id", "--rev", "tip"]
diff --git a/lib/portage/sync/modules/rsync/rsync.py b/lib/portage/sync/modules/rsync/rsync.py
index 5f4cf1aeb..90c74a730 100644
--- a/lib/portage/sync/modules/rsync/rsync.py
+++ b/lib/portage/sync/modules/rsync/rsync.py
@@ -160,8 +160,10 @@ class RsyncSync(NewBase):
# so we may as well bail out before actual rsync happens.
if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
try:
- out.einfo("Using keys from %s" % (self.repo.sync_openpgp_key_path,))
- with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
+ out.einfo(
+ "Using keys from {}".format(self.repo.sync_openpgp_key_path)
+ )
+ with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
self._refresh_keys(openpgp_env)
except (GematoException, asyncio.TimeoutError) as e:
@@ -264,7 +266,7 @@ class RsyncSync(NewBase):
getaddrinfo_host, None, family, socket.SOCK_STREAM
)
)
- except socket.error as e:
+ except OSError as e:
writemsg_level(
"!!! getaddrinfo failed for '%s': %s\n"
% (_unicode_decode(hostname), str(e)),
@@ -446,11 +448,11 @@ class RsyncSync(NewBase):
out.ewarn(
"You may want to try using another mirror and/or reporting this one:"
)
- out.ewarn(" %s" % (dosyncuri,))
+ out.ewarn(" {}".format(dosyncuri))
out.ewarn("")
out.quiet = quiet
- out.einfo("Manifest timestamp: %s UTC" % (ts.ts,))
+ out.einfo("Manifest timestamp: {} UTC".format(ts.ts))
out.einfo("Valid OpenPGP signature found:")
out.einfo(
"- primary key: %s"
@@ -464,12 +466,12 @@ class RsyncSync(NewBase):
# if nothing has changed, skip the actual Manifest
# verification
if not local_state_unchanged:
- out.ebegin("Verifying %s" % (download_dir,))
+ out.ebegin("Verifying {}".format(download_dir))
m.assert_directory_verifies()
out.eend(0)
except GematoException as e:
writemsg_level(
- "!!! Manifest verification failed:\n%s\n" % (e,),
+ "!!! Manifest verification failed:\n{}\n".format(e),
level=logging.ERROR,
noiselevel=-1,
)
@@ -548,7 +550,7 @@ class RsyncSync(NewBase):
self.self.xterm_titles,
"Created New Directory %s " % self.repo.location,
)
- except IOError:
+ except OSError:
return (1, False)
return self.update()
diff --git a/lib/portage/sync/modules/svn/svn.py b/lib/portage/sync/modules/svn/svn.py
index 788e394cd..35ce3b75e 100644
--- a/lib/portage/sync/modules/svn/svn.py
+++ b/lib/portage/sync/modules/svn/svn.py
@@ -59,7 +59,7 @@ class SVNSync(NewBase):
# svn update
exitcode = portage.process.spawn_bash(
- "cd %s; exec svn update" % (portage._shell_quote(self.repo.location),),
+ "cd {}; exec svn update".format(portage._shell_quote(self.repo.location)),
**self.spawn_kwargs
)
if exitcode != os.EX_OK:
@@ -77,7 +77,7 @@ class SVNSync(NewBase):
@rtype: (int, bool)
"""
exitcode = portage.process.spawn_bash(
- "cd %s; exec svn upgrade" % (portage._shell_quote(self.repo.location),),
+ "cd {}; exec svn upgrade".format(portage._shell_quote(self.repo.location)),
**self.spawn_kwargs
)
if exitcode != os.EX_OK:
diff --git a/lib/portage/sync/modules/webrsync/webrsync.py b/lib/portage/sync/modules/webrsync/webrsync.py
index 0e2f63472..db77c2b8a 100644
--- a/lib/portage/sync/modules/webrsync/webrsync.py
+++ b/lib/portage/sync/modules/webrsync/webrsync.py
@@ -46,7 +46,7 @@ class WebRsync(SyncBase):
self.bin_command = portage.process.find_binary(self._bin_command)
self.bin_pkg = ">=app-portage/emerge-delta-webrsync-3.7.5"
- return super(WebRsync, self).has_bin
+ return super().has_bin
def sync(self, **kwargs):
"""Sync the repository"""
@@ -97,8 +97,10 @@ class WebRsync(SyncBase):
out = portage.output.EOutput(quiet=quiet)
try:
- out.einfo("Using keys from %s" % (self.repo.sync_openpgp_key_path,))
- with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
+ out.einfo(
+ "Using keys from {}".format(self.repo.sync_openpgp_key_path)
+ )
+ with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
self._refresh_keys(openpgp_env)
self.spawn_kwargs["env"]["PORTAGE_GPG_DIR"] = openpgp_env.home
diff --git a/lib/portage/sync/old_tree_timestamp.py b/lib/portage/sync/old_tree_timestamp.py
index 3558a25ad..44fe16728 100644
--- a/lib/portage/sync/old_tree_timestamp.py
+++ b/lib/portage/sync/old_tree_timestamp.py
@@ -82,7 +82,9 @@ def old_tree_timestamp_warn(portdir, settings):
warnsync = float(settings.get(var_name, default_warnsync))
except ValueError:
writemsg_level(
- "!!! %s contains non-numeric value: %s\n" % (var_name, settings[var_name]),
+ "!!! {} contains non-numeric value: {}\n".format(
+ var_name, settings[var_name]
+ ),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/sync/syncbase.py b/lib/portage/sync/syncbase.py
index 94c873e1f..bd12b875e 100644
--- a/lib/portage/sync/syncbase.py
+++ b/lib/portage/sync/syncbase.py
@@ -73,7 +73,7 @@ class SyncBase:
try:
self.repo_storage
except RepoStorageException as e:
- writemsg_level("!!! %s\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level("!!! {}\n".format(e), level=logging.ERROR, noiselevel=-1)
return False
return True
@@ -293,11 +293,9 @@ class SyncBase:
out.ebegin(
"Refreshing keys from keyserver{}".format(
- (
- ""
- if self.repo.sync_openpgp_keyserver is None
- else " " + self.repo.sync_openpgp_keyserver
- )
+ ""
+ if self.repo.sync_openpgp_keyserver is None
+ else " " + self.repo.sync_openpgp_keyserver
)
)
retry_decorator = self._key_refresh_retry_decorator()
@@ -317,7 +315,7 @@ class SyncBase:
keyserver=self.repo.sync_openpgp_keyserver
)
except Exception as e:
- writemsg_level("%s\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level("{}\n".format(e), level=logging.ERROR, noiselevel=-1)
raise # retry
# The ThreadPoolExecutor that asyncio uses by default
diff --git a/lib/portage/tests/__init__.py b/lib/portage/tests/__init__.py
index f74f992d7..e28c4af3b 100644
--- a/lib/portage/tests/__init__.py
+++ b/lib/portage/tests/__init__.py
@@ -89,7 +89,7 @@ def main():
for mydir in getTestDirs(basedir):
testsubdir = mydir.name
for name in getTestNames(mydir):
- print("%s/%s/%s.py" % (testdir, testsubdir, name))
+ print("{}/{}/{}.py".format(testdir, testsubdir, name))
return os.EX_OK
if len(options.tests) > 1:
@@ -181,7 +181,7 @@ class TextTestResult(_TextTestResult):
"""
def __init__(self, stream, descriptions, verbosity):
- super(TextTestResult, self).__init__(stream, descriptions, verbosity)
+ super().__init__(stream, descriptions, verbosity)
self.todoed = []
self.portage_skipped = []
@@ -273,14 +273,14 @@ class TestCase(unittest.TestCase):
testMethod()
ok = True
except unittest.SkipTest as e:
- result.addPortageSkip(self, "%s: SKIP: %s" % (testMethod, str(e)))
+ result.addPortageSkip(self, "{}: SKIP: {}".format(testMethod, str(e)))
except self.failureException:
if self.portage_skip is not None:
if self.portage_skip is True:
result.addPortageSkip(self, "%s: SKIP" % testMethod)
else:
result.addPortageSkip(
- self, "%s: SKIP: %s" % (testMethod, self.portage_skip)
+ self, "{}: SKIP: {}".format(testMethod, self.portage_skip)
)
elif self.todo:
result.addTodo(self, "%s: TODO" % testMethod)
@@ -322,7 +322,7 @@ class TestCase(unittest.TestCase):
excName = excClass.__name__
else:
excName = str(excClass)
- raise self.failureException("%s not raised: %s" % (excName, msg))
+ raise self.failureException("{} not raised: {}".format(excName, msg))
def assertNotExists(self, path):
"""Make sure |path| does not exist"""
diff --git a/lib/portage/tests/bin/test_eapi7_ver_funcs.py b/lib/portage/tests/bin/test_eapi7_ver_funcs.py
index a01901e27..c71d90913 100644
--- a/lib/portage/tests/bin/test_eapi7_ver_funcs.py
+++ b/lib/portage/tests/bin/test_eapi7_ver_funcs.py
@@ -14,9 +14,11 @@ class TestEAPI7VerFuncs(TestCase):
Test that commands in test_cases produce expected output.
"""
with tempfile.NamedTemporaryFile("w") as test_script:
- test_script.write('source "%s"/eapi7-ver-funcs.sh\n' % (PORTAGE_BIN_PATH,))
+ test_script.write(
+ 'source "{}"/eapi7-ver-funcs.sh\n'.format(PORTAGE_BIN_PATH)
+ )
for cmd, exp in test_cases:
- test_script.write("%s\n" % (cmd,))
+ test_script.write("{}\n".format(cmd))
test_script.flush()
s = subprocess.Popen(
@@ -30,7 +32,7 @@ class TestEAPI7VerFuncs(TestCase):
for test_case, result in zip(test_cases, sout.decode().splitlines()):
cmd, exp = test_case
self.assertEqual(
- result, exp, "%s -> %s; expected: %s" % (cmd, result, exp)
+ result, exp, "{} -> {}; expected: {}".format(cmd, result, exp)
)
def _test_return(self, test_cases):
@@ -38,9 +40,11 @@ class TestEAPI7VerFuncs(TestCase):
Test that commands in test_cases give appropriate exit codes.
"""
with tempfile.NamedTemporaryFile("w+") as test_script:
- test_script.write('source "%s"/eapi7-ver-funcs.sh\n' % (PORTAGE_BIN_PATH,))
+ test_script.write(
+ 'source "{}"/eapi7-ver-funcs.sh\n'.format(PORTAGE_BIN_PATH)
+ )
for cmd, exp in test_cases:
- test_script.write("%s; echo $?\n" % (cmd,))
+ test_script.write("{}; echo $?\n".format(cmd))
test_script.flush()
s = subprocess.Popen(
@@ -54,7 +58,7 @@ class TestEAPI7VerFuncs(TestCase):
for test_case, result in zip(test_cases, sout.decode().splitlines()):
cmd, exp = test_case
self.assertEqual(
- result, exp, "%s -> %s; expected: %s" % (cmd, result, exp)
+ result, exp, "{} -> {}; expected: {}".format(cmd, result, exp)
)
def _test_fail(self, test_cases):
@@ -64,9 +68,9 @@ class TestEAPI7VerFuncs(TestCase):
for cmd in test_cases:
test = """
-source "%s"/eapi7-ver-funcs.sh
-die() { exit 1; }
-%s""" % (
+source "{}"/eapi7-ver-funcs.sh
+die() {{ exit 1; }}
+{}""".format(
PORTAGE_BIN_PATH,
cmd,
)
diff --git a/lib/portage/tests/dbapi/test_fakedbapi.py b/lib/portage/tests/dbapi/test_fakedbapi.py
index 08bffbe02..c5fe96e37 100644
--- a/lib/portage/tests/dbapi/test_fakedbapi.py
+++ b/lib/portage/tests/dbapi/test_fakedbapi.py
@@ -101,7 +101,9 @@ class TestFakedbapi(TestCase):
self.assertEqual(
fakedb.match(atom),
expected_result,
- "fakedb.match('%s') = %s != %s" % (atom, result, expected_result),
+ "fakedb.match('{}') = {} != {}".format(
+ atom, result, expected_result
+ ),
)
finally:
shutil.rmtree(tempdir)
diff --git a/lib/portage/tests/dbapi/test_portdb_cache.py b/lib/portage/tests/dbapi/test_portdb_cache.py
index ad97d82ba..a782853d6 100644
--- a/lib/portage/tests/dbapi/test_portdb_cache.py
+++ b/lib/portage/tests/dbapi/test_portdb_cache.py
@@ -231,7 +231,7 @@ class PortdbCacheTestCase(TestCase):
for i, args in enumerate(test_commands):
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index %s failed" % (i,))
+ self.assertTrue(args[0](), "callable at index {} failed".format(i))
continue
proc = subprocess.Popen(args, env=env, stdout=stdout)
diff --git a/lib/portage/tests/dep/testAtom.py b/lib/portage/tests/dep/testAtom.py
index a6fd92dc3..54f364b1f 100644
--- a/lib/portage/tests/dep/testAtom.py
+++ b/lib/portage/tests/dep/testAtom.py
@@ -218,23 +218,27 @@ class TestAtom(TestCase):
self.assertEqual(
op,
a.operator,
- msg="Atom('%s').operator = %s == '%s'" % (atom, a.operator, op),
+ msg="Atom('{}').operator = {} == '{}'".format(atom, a.operator, op),
)
self.assertEqual(
- cp, a.cp, msg="Atom('%s').cp = %s == '%s'" % (atom, a.cp, cp)
+ cp, a.cp, msg="Atom('{}').cp = {} == '{}'".format(atom, a.cp, cp)
)
if ver is not None:
- cpv = "%s-%s" % (cp, ver)
+ cpv = "{}-{}".format(cp, ver)
else:
cpv = cp
self.assertEqual(
- cpv, a.cpv, msg="Atom('%s').cpv = %s == '%s'" % (atom, a.cpv, cpv)
+ cpv, a.cpv, msg="Atom('{}').cpv = {} == '{}'".format(atom, a.cpv, cpv)
)
self.assertEqual(
- slot, a.slot, msg="Atom('%s').slot = %s == '%s'" % (atom, a.slot, slot)
+ slot,
+ a.slot,
+ msg="Atom('{}').slot = {} == '{}'".format(atom, a.slot, slot),
)
self.assertEqual(
- repo, a.repo, msg="Atom('%s').repo == %s == '%s'" % (atom, a.repo, repo)
+ repo,
+ a.repo,
+ msg="Atom('{}').repo == {} == '{}'".format(atom, a.repo, repo),
)
if a.use:
@@ -244,7 +248,7 @@ class TestAtom(TestCase):
self.assertEqual(
use,
returned_use,
- msg="Atom('%s').use = %s == '%s'" % (atom, returned_use, use),
+ msg="Atom('{}').use = {} == '{}'".format(atom, returned_use, use),
)
for atom, allow_wildcard, allow_repo in tests_xfail:
@@ -302,7 +306,7 @@ class TestAtom(TestCase):
self.assertEqual(
v,
getattr(a, k),
- msg="Atom('%s').%s = %s == '%s'" % (atom, k, getattr(a, k), v),
+ msg="Atom('{}').{} = {} == '{}'".format(atom, k, getattr(a, k), v),
)
def test_intersects(self):
@@ -323,7 +327,7 @@ class TestAtom(TestCase):
self.assertEqual(
Atom(atom).intersects(Atom(other)),
expected_result,
- "%s and %s should intersect: %s" % (atom, other, expected_result),
+ "{} and {} should intersect: {}".format(atom, other, expected_result),
)
def test_violated_conditionals(self):
diff --git a/lib/portage/tests/dep/testStandalone.py b/lib/portage/tests/dep/testStandalone.py
index fa8ffc99c..b11e2fc38 100644
--- a/lib/portage/tests/dep/testStandalone.py
+++ b/lib/portage/tests/dep/testStandalone.py
@@ -32,12 +32,12 @@ class TestStandalone(TestCase):
self.assertEqual(
cpvequal(cpv1, cpv2),
expected_result,
- "cpvequal('%s', '%s') != %s" % (cpv1, cpv2, expected_result),
+ "cpvequal('{}', '{}') != {}".format(cpv1, cpv2, expected_result),
)
for cpv1, cpv2 in test_cases_xfail:
self.assertRaisesMsg(
- "cpvequal(%s, %s)" % (cpv1, cpv2),
+ "cpvequal({}, {})".format(cpv1, cpv2),
PortageException,
cpvequal,
cpv1,
diff --git a/lib/portage/tests/dep/test_dep_getusedeps.py b/lib/portage/tests/dep/test_dep_getusedeps.py
index 3a4ada7fa..8bd9c2c1e 100644
--- a/lib/portage/tests/dep/test_dep_getusedeps.py
+++ b/lib/portage/tests/dep/test_dep_getusedeps.py
@@ -23,7 +23,7 @@ class DepGetUseDeps(TestCase):
if slot:
cpv += ":" + slot
if isinstance(use, tuple):
- cpv += "[%s]" % (",".join(use),)
+ cpv += "[{}]".format(",".join(use))
self.assertEqual(dep_getusedeps(cpv), use)
else:
if len(use):
diff --git a/lib/portage/tests/dep/test_get_operator.py b/lib/portage/tests/dep/test_get_operator.py
index 7815961a0..c2fc0a397 100644
--- a/lib/portage/tests/dep/test_get_operator.py
+++ b/lib/portage/tests/dep/test_get_operator.py
@@ -30,7 +30,7 @@ class GetOperator(TestCase):
self.assertEqual(
result,
test[1],
- msg="get_operator(%s) != %s" % (test[0] + atom, test[1]),
+ msg="get_operator({}) != {}".format(test[0] + atom, test[1]),
)
result = get_operator("sys-apps/portage")
diff --git a/lib/portage/tests/dep/test_get_required_use_flags.py b/lib/portage/tests/dep/test_get_required_use_flags.py
index f9c39d530..02650b5be 100644
--- a/lib/portage/tests/dep/test_get_required_use_flags.py
+++ b/lib/portage/tests/dep/test_get_required_use_flags.py
@@ -41,7 +41,7 @@ class TestCheckRequiredUse(TestCase):
for required_use in test_cases_xfail:
self.assertRaisesMsg(
- "REQUIRED_USE: '%s'" % (required_use,),
+ "REQUIRED_USE: '{}'".format(required_use),
InvalidDependString,
get_required_use_flags,
required_use,
diff --git a/lib/portage/tests/dep/test_isvalidatom.py b/lib/portage/tests/dep/test_isvalidatom.py
index c21fb6376..100d9209c 100644
--- a/lib/portage/tests/dep/test_isvalidatom.py
+++ b/lib/portage/tests/dep/test_isvalidatom.py
@@ -230,5 +230,5 @@ class IsValidAtom(TestCase):
)
),
test_case.expected,
- msg="isvalidatom(%s) != %s" % (test_case.atom, test_case.expected),
+ msg="isvalidatom({}) != {}".format(test_case.atom, test_case.expected),
)
diff --git a/lib/portage/tests/dep/test_match_from_list.py b/lib/portage/tests/dep/test_match_from_list.py
index 7d5257719..5a405d89f 100644
--- a/lib/portage/tests/dep/test_match_from_list.py
+++ b/lib/portage/tests/dep/test_match_from_list.py
@@ -16,7 +16,7 @@ class Package:
self.cp = atom.cp
slot = atom.slot
if atom.sub_slot:
- slot = "%s/%s" % (slot, atom.sub_slot)
+ slot = "{}/{}".format(slot, atom.sub_slot)
if not slot:
slot = "0"
self.cpv = _pkg_str(atom.cpv, slot=slot, repo=atom.repo)
diff --git a/lib/portage/tests/dep/test_use_reduce.py b/lib/portage/tests/dep/test_use_reduce.py
index c4a24e8c6..e17675eda 100644
--- a/lib/portage/tests/dep/test_use_reduce.py
+++ b/lib/portage/tests/dep/test_use_reduce.py
@@ -54,7 +54,7 @@ class UseReduceTestCase:
subset=self.subset,
)
except InvalidDependString as e:
- raise InvalidDependString("%s: %s" % (e, self.deparray))
+ raise InvalidDependString("{}: {}".format(e, self.deparray))
class UseReduce(TestCase):
diff --git a/lib/portage/tests/ebuild/test_array_fromfile_eof.py b/lib/portage/tests/ebuild/test_array_fromfile_eof.py
index 2054eee42..282a613be 100644
--- a/lib/portage/tests/ebuild/test_array_fromfile_eof.py
+++ b/lib/portage/tests/ebuild/test_array_fromfile_eof.py
@@ -27,7 +27,7 @@ class ArrayFromfileEofTestCase(TestCase):
a = array.array("B")
try:
a.fromfile(f, len(input_bytes) + 1)
- except (EOFError, IOError):
+ except (EOFError, OSError):
# python-3.0 lost data here
eof = True
diff --git a/lib/portage/tests/ebuild/test_config.py b/lib/portage/tests/ebuild/test_config.py
index d123d9abb..a11d271d9 100644
--- a/lib/portage/tests/ebuild/test_config.py
+++ b/lib/portage/tests/ebuild/test_config.py
@@ -112,7 +112,7 @@ class ConfigTestCase(TestCase):
self.assertEqual(lic_man._accept_license_str, None)
self.assertEqual(lic_man._accept_license, None)
self.assertEqual(lic_man._license_groups, {"EULA": frozenset(["TEST"])})
- self.assertEqual(lic_man._undef_lic_groups, set(["TEST"]))
+ self.assertEqual(lic_man._undef_lic_groups, {"TEST"})
self.assertEqual(lic_man.extract_global_changes(), "TEST TEST2")
self.assertEqual(lic_man.extract_global_changes(), "")
@@ -370,7 +370,7 @@ class ConfigTestCase(TestCase):
user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
os.makedirs(user_config_dir)
- with io.open(
+ with open(
os.path.join(user_config_dir, "package.env"),
mode="w",
encoding=_encodings["content"],
@@ -381,7 +381,7 @@ class ConfigTestCase(TestCase):
env_dir = os.path.join(user_config_dir, "env")
os.makedirs(env_dir)
for k, v in env_files.items():
- with io.open(
+ with open(
os.path.join(env_dir, k), mode="w", encoding=_encodings["content"]
) as f:
for line in v:
diff --git a/lib/portage/tests/ebuild/test_fetch.py b/lib/portage/tests/ebuild/test_fetch.py
index cfca0d4e1..859b14fad 100644
--- a/lib/portage/tests/ebuild/test_fetch.py
+++ b/lib/portage/tests/ebuild/test_fetch.py
@@ -171,10 +171,10 @@ class EbuildFetchTestCase(TestCase):
for k, v in orig_distfiles.items():
filename = DistfileName(
k,
- digests=dict(
- (algo, checksum_str(v, hashname=algo))
+ digests={
+ algo: checksum_str(v, hashname=algo)
for algo in MANIFEST2_HASH_DEFAULTS
- ),
+ },
)
distfiles[filename] = v
@@ -186,7 +186,7 @@ class EbuildFetchTestCase(TestCase):
shutil.rmtree(settings["DISTDIR"])
os.makedirs(settings["DISTDIR"])
- with open(os.path.join(settings["DISTDIR"], "layout.conf"), "wt") as f:
+ with open(os.path.join(settings["DISTDIR"], "layout.conf"), "w") as f:
f.write(layout_data)
if any(isinstance(layout, ContentHashLayout) for layout in layouts):
@@ -254,7 +254,9 @@ class EbuildFetchTestCase(TestCase):
"""
% orig_fetchcommand.replace("${FILE}", "${FILE}.__download__")
)
- settings["FETCHCOMMAND"] = '"%s" "%s" "${URI}" "${DISTDIR}" "${FILE}"' % (
+ settings[
+ "FETCHCOMMAND"
+ ] = '"{}" "{}" "${{URI}}" "${{DISTDIR}}" "${{FILE}}"'.format(
BASH_BINARY,
temp_fetchcommand,
)
@@ -744,10 +746,10 @@ class EbuildFetchTestCase(TestCase):
filename = DistfileName(
"foo-1.tar.gz",
- digests=dict(
- (algo, checksum_str(b"", hashname=algo))
+ digests={
+ algo: checksum_str(b"", hashname=algo)
for algo in MANIFEST2_HASH_DEFAULTS
- ),
+ },
)
# Raise KeyError for a hash algorithm SHA1 which is not in MANIFEST2_HASH_DEFAULTS.
@@ -851,10 +853,10 @@ class EbuildFetchTestCase(TestCase):
def test_filename_hash_layout_get_filenames(self):
filename = DistfileName(
"foo-1.tar.gz",
- digests=dict(
- (algo, checksum_str(b"", hashname=algo))
+ digests={
+ algo: checksum_str(b"", hashname=algo)
for algo in MANIFEST2_HASH_DEFAULTS
- ),
+ },
)
layouts = (
FlatLayout(),
diff --git a/lib/portage/tests/ebuild/test_spawn.py b/lib/portage/tests/ebuild/test_spawn.py
index ad8e121db..8cbd7220f 100644
--- a/lib/portage/tests/ebuild/test_spawn.py
+++ b/lib/portage/tests/ebuild/test_spawn.py
@@ -32,9 +32,8 @@ class SpawnTestCase(TestCase):
proc.start()
os.close(null_fd)
self.assertEqual(proc.wait(), os.EX_OK)
- f = io.open(
+ f = open(
_unicode_encode(logfile, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="strict",
)
@@ -49,7 +48,7 @@ class SpawnTestCase(TestCase):
if logfile:
try:
os.unlink(logfile)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
diff --git a/lib/portage/tests/ebuild/test_use_expand_incremental.py b/lib/portage/tests/ebuild/test_use_expand_incremental.py
index 23c8d17b3..3b1c432fd 100644
--- a/lib/portage/tests/ebuild/test_use_expand_incremental.py
+++ b/lib/portage/tests/ebuild/test_use_expand_incremental.py
@@ -100,7 +100,7 @@ class UseExpandIncrementalTestCase(TestCase):
prof_path = os.path.join(profile_root, p)
ensure_dirs(prof_path)
for k, v in data.items():
- with io.open(
+ with open(
os.path.join(prof_path, k),
mode="w",
encoding=_encodings["repo.content"],
@@ -122,7 +122,7 @@ class UseExpandIncrementalTestCase(TestCase):
settings.setcpv(pkg)
expected = frozenset(expected_use)
got = frozenset(settings["PORTAGE_USE"].split())
- self.assertEqual(got, expected, "%s != %s" % (got, expected))
+ self.assertEqual(got, expected, "{} != {}".format(got, expected))
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_config_protect.py b/lib/portage/tests/emerge/test_config_protect.py
index b60d0c495..b4d109145 100644
--- a/lib/portage/tests/emerge/test_config_protect.py
+++ b/lib/portage/tests/emerge/test_config_protect.py
@@ -129,7 +129,7 @@ src_install() {
path = os.path.join(dir_path, name)
st = os.lstat(path)
if stat.S_ISREG(st.st_mode):
- with io.open(path, mode="a", encoding=_encodings["stdio"]) as f:
+ with open(path, mode="a", encoding=_encodings["stdio"]) as f:
f.write("modified at %d\n" % time.time())
elif stat.S_ISLNK(st.st_mode):
old_dest = os.readlink(path)
@@ -288,7 +288,7 @@ src_install() {
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
)
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py b/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
index 785bf50cb..7b07ce0b6 100644
--- a/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
+++ b/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
@@ -162,7 +162,7 @@ src_install() {
for i, args in enumerate(test_commands):
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index %s failed" % (i,))
+ self.assertTrue(args[0](), "callable at index {} failed".format(i))
continue
if isinstance(args[0], dict):
@@ -185,7 +185,7 @@ src_install() {
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
)
finally:
playground.debug = False
diff --git a/lib/portage/tests/emerge/test_emerge_slot_abi.py b/lib/portage/tests/emerge/test_emerge_slot_abi.py
index 3c3a8b582..303259049 100644
--- a/lib/portage/tests/emerge/test_emerge_slot_abi.py
+++ b/lib/portage/tests/emerge/test_emerge_slot_abi.py
@@ -164,7 +164,7 @@ class SlotAbiEmergeTestCase(TestCase):
for i, args in enumerate(test_commands):
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index %s failed" % (i,))
+ self.assertTrue(args[0](), "callable at index {} failed".format(i))
continue
proc = subprocess.Popen(args, env=env, stdout=stdout)
@@ -180,7 +180,7 @@ class SlotAbiEmergeTestCase(TestCase):
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
)
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_simple.py b/lib/portage/tests/emerge/test_simple.py
index 6d0dae0dd..650da9a60 100644
--- a/lib/portage/tests/emerge/test_simple.py
+++ b/lib/portage/tests/emerge/test_simple.py
@@ -39,7 +39,7 @@ class BinhostContentMap(Mapping):
try:
with open(local_path, "rb") as f:
return f.read()
- except EnvironmentError:
+ except OSError:
raise KeyError(request_path)
@@ -564,7 +564,7 @@ call_has_and_best_version() {
# Test binhost support if FETCHCOMMAND is available.
binrepos_conf_file = os.path.join(os.sep, eprefix, BINREPOS_CONF_FILE)
- with open(binrepos_conf_file, "wt") as f:
+ with open(binrepos_conf_file, "w") as f:
f.write("[test-binhost]\n")
f.write("sync-uri = {}\n".format(binhost_uri))
fetchcommand = portage.util.shlex_split(playground.settings["FETCHCOMMAND"])
@@ -720,7 +720,7 @@ move dev-util/git dev-vcs/git
portage.writemsg(output)
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
)
finally:
binhost_server.__exit__(None, None, None)
diff --git a/lib/portage/tests/env/config/test_PackageKeywordsFile.py b/lib/portage/tests/env/config/test_PackageKeywordsFile.py
index e4a1fcaaf..8f777795a 100644
--- a/lib/portage/tests/env/config/test_PackageKeywordsFile.py
+++ b/lib/portage/tests/env/config/test_PackageKeywordsFile.py
@@ -34,7 +34,7 @@ class PackageKeywordsFileTestCase(TestCase):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
for c in self.cpv:
- f.write("%s %s\n" % (c, " ".join(self.keywords)))
+ f.write("{} {}\n".format(c, " ".join(self.keywords)))
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/config/test_PackageUseFile.py b/lib/portage/tests/env/config/test_PackageUseFile.py
index e8773e4c3..2126de0a8 100644
--- a/lib/portage/tests/env/config/test_PackageUseFile.py
+++ b/lib/portage/tests/env/config/test_PackageUseFile.py
@@ -30,7 +30,7 @@ class PackageUseFileTestCase(TestCase):
def BuildFile(self):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
- f.write("%s %s" % (self.cpv, " ".join(self.useflags)))
+ f.write("{} {}".format(self.cpv, " ".join(self.useflags)))
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/config/test_PortageModulesFile.py b/lib/portage/tests/env/config/test_PortageModulesFile.py
index 3ca6aa240..fffe87ec9 100644
--- a/lib/portage/tests/env/config/test_PortageModulesFile.py
+++ b/lib/portage/tests/env/config/test_PortageModulesFile.py
@@ -32,7 +32,7 @@ class PortageModulesFileTestCase(TestCase):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
for k, v in self.items.items():
- f.write("%s=%s\n" % (k, v))
+ f.write("{}={}\n".format(k, v))
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/glsa/test_security_set.py b/lib/portage/tests/glsa/test_security_set.py
index 68265913e..9babf4b33 100644
--- a/lib/portage/tests/glsa/test_security_set.py
+++ b/lib/portage/tests/glsa/test_security_set.py
@@ -128,7 +128,7 @@ class SecuritySetTestCase(TestCase):
)
portage.util.ensure_dirs(glsa_dir)
for glsa in glsas:
- with io.open(
+ with open(
os.path.join(glsa_dir, "glsa-" + glsa["glsa_id"] + ".xml"),
encoding=_encodings["repo.content"],
mode="w",
diff --git a/lib/portage/tests/gpkg/test_gpkg_path.py b/lib/portage/tests/gpkg/test_gpkg_path.py
index 61b060ef2..64275919b 100644
--- a/lib/portage/tests/gpkg/test_gpkg_path.py
+++ b/lib/portage/tests/gpkg/test_gpkg_path.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copright Gentoo Foundation 2006
# Portage Unit Testing Functionality
diff --git a/lib/portage/tests/lint/test_compile_modules.py b/lib/portage/tests/lint/test_compile_modules.py
index 7cdaa8bcc..e3941bad5 100644
--- a/lib/portage/tests/lint/test_compile_modules.py
+++ b/lib/portage/tests/lint/test_compile_modules.py
@@ -60,7 +60,7 @@ class CompileModulesTestCase(TestCase):
encoding=_encodings["content"],
errors="replace",
)
- except IOError as e:
+ except OSError as e:
# Some tests create files that are unreadable by the
# user (by design), so ignore EACCES issues.
if e.errno != errno.EACCES:
diff --git a/lib/portage/tests/lint/test_import_modules.py b/lib/portage/tests/lint/test_import_modules.py
index 5522e02d1..1cf277812 100644
--- a/lib/portage/tests/lint/test_import_modules.py
+++ b/lib/portage/tests/lint/test_import_modules.py
@@ -23,7 +23,7 @@ class ImportModulesTestCase(TestCase):
__import__(mod)
except ImportError as e:
if mod not in expected_failures:
- self.assertTrue(False, "failed to import '%s': %s" % (mod, e))
+ self.assertTrue(False, "failed to import '{}': {}".format(mod, e))
del e
def _iter_modules(self, base_dir):
diff --git a/lib/portage/tests/process/test_PipeLogger.py b/lib/portage/tests/process/test_PipeLogger.py
index eb0bf053b..eb578758e 100644
--- a/lib/portage/tests/process/test_PipeLogger.py
+++ b/lib/portage/tests/process/test_PipeLogger.py
@@ -72,5 +72,5 @@ class PipeLoggerTestCase(TestCase):
self._testPipeLoggerToPipe(test_string, loop)
)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, "x = {}, len(output) = {}".format(x, len(output))
)
diff --git a/lib/portage/tests/process/test_PopenProcess.py b/lib/portage/tests/process/test_PopenProcess.py
index 770053fa5..f8cc8fda4 100644
--- a/lib/portage/tests/process/test_PopenProcess.py
+++ b/lib/portage/tests/process/test_PopenProcess.py
@@ -93,10 +93,10 @@ class PopenPipeTestCase(TestCase):
test_string = x * "a"
output = self._testPipeReader(test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, "x = {}, len(output) = {}".format(x, len(output))
)
output = self._testPipeLogger(test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, "x = {}, len(output) = {}".format(x, len(output))
)
diff --git a/lib/portage/tests/process/test_PopenProcessBlockingIO.py b/lib/portage/tests/process/test_PopenProcessBlockingIO.py
index 0e9d2dfed..cf30856cd 100644
--- a/lib/portage/tests/process/test_PopenProcessBlockingIO.py
+++ b/lib/portage/tests/process/test_PopenProcessBlockingIO.py
@@ -69,5 +69,5 @@ class PopenPipeBlockingIOTestCase(TestCase):
test_string = x * "a"
output = self._testPipeReader(test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, "x = {}, len(output) = {}".format(x, len(output))
)
diff --git a/lib/portage/tests/process/test_poll.py b/lib/portage/tests/process/test_poll.py
index 627157211..c4c330793 100644
--- a/lib/portage/tests/process/test_poll.py
+++ b/lib/portage/tests/process/test_poll.py
@@ -29,7 +29,7 @@ class PipeReaderTestCase(TestCase):
def make_pipes():
try:
return pty.openpty(), None
- except EnvironmentError:
+ except OSError:
self.skipTest("pty not available")
self._do_test(make_pipes)
@@ -101,7 +101,9 @@ class PipeReaderTestCase(TestCase):
try:
output = self._testPipeReader(read_end, write_end, test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string,
+ output,
+ "x = {}, len(output) = {}".format(x, len(output)),
)
finally:
if cleanup is not None:
@@ -115,7 +117,7 @@ class PipeReaderArrayTestCase(PipeReaderTestCase):
_echo_cmd = "sleep 0.1 ; echo -n '%s'"
def __init__(self, *args, **kwargs):
- super(PipeReaderArrayTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# https://bugs.python.org/issue5380
# https://bugs.pypy.org/issue956
self.todo = True
diff --git a/lib/portage/tests/resolver/ResolverPlayground.py b/lib/portage/tests/resolver/ResolverPlayground.py
index 361de16af..e43b12dd3 100644
--- a/lib/portage/tests/resolver/ResolverPlayground.py
+++ b/lib/portage/tests/resolver/ResolverPlayground.py
@@ -312,7 +312,7 @@ class ResolverPlayground:
f.write(copyright_header)
f.write('EAPI="%s"\n' % eapi)
for k, v in metadata.items():
- f.write('%s="%s"\n' % (k, v))
+ f.write('{}="{}"\n'.format(k, v))
if misc_content is not None:
f.write(misc_content)
@@ -369,11 +369,13 @@ class ResolverPlayground:
if "BUILD_ID" in metadata:
if binpkg_format == "xpak":
binpkg_path = os.path.join(
- category_dir, pn, "%s-%s.xpak" % (pf, metadata["BUILD_ID"])
+ category_dir, pn, "{}-{}.xpak".format(pf, metadata["BUILD_ID"])
)
elif binpkg_format == "gpkg":
binpkg_path = os.path.join(
- category_dir, pn, "%s-%s.gpkg.tar" % (pf, metadata["BUILD_ID"])
+ category_dir,
+ pn,
+ "{}-{}.gpkg.tar".format(pf, metadata["BUILD_ID"]),
)
else:
raise InvalidBinaryPackageFormat(binpkg_format)
@@ -437,7 +439,7 @@ class ResolverPlayground:
with open(ebuild_path, "w") as f:
f.write('EAPI="%s"\n' % metadata.pop("EAPI", "0"))
for k, v in metadata.items():
- f.write('%s="%s"\n' % (k, v))
+ f.write('{}="{}"\n'.format(k, v))
env_path = os.path.join(vdb_pkg_dir, "environment.bz2")
with bz2.BZ2File(env_path, mode="w") as f:
@@ -517,7 +519,7 @@ class ResolverPlayground:
for eclass_name, eclass_content in eclasses.items():
with open(
- os.path.join(eclass_dir, "{}.eclass".format(eclass_name)), "wt"
+ os.path.join(eclass_dir, "{}.eclass".format(eclass_name)), "w"
) as f:
if isinstance(eclass_content, str):
eclass_content = [eclass_content]
@@ -688,7 +690,7 @@ class ResolverPlayground:
"[%s]\n%s"
% (
repo_name,
- "\n".join("%s = %s" % (k, v) for k, v in repo_config.items()),
+ "\n".join("{} = {}".format(k, v) for k, v in repo_config.items()),
)
for repo_name, repo_config in self._repositories.items()
)
@@ -942,7 +944,7 @@ class ResolverPlaygroundTestCase:
expected = set(expected)
elif key == "forced_rebuilds" and expected is not None:
- expected = dict((k, set(v)) for k, v in expected.items())
+ expected = {k: set(v) for k, v in expected.items()}
if got != expected:
fail_msgs.append(
@@ -980,7 +982,7 @@ def _mergelist_str(x, depgraph):
desc = x.type_name
else:
desc = x.operation
- mergelist_str = "[%s]%s" % (desc, mergelist_str)
+ mergelist_str = "[{}]{}".format(desc, mergelist_str)
if x.root != depgraph._frozen_config._running_root.root:
mergelist_str += "{targetroot}"
return mergelist_str
@@ -1080,17 +1082,17 @@ class ResolverPlaygroundResult:
)
if self.depgraph._dynamic_config._unsatisfied_deps_for_display:
- self.unsatisfied_deps = set(
+ self.unsatisfied_deps = {
dep_info[0][1]
for dep_info in self.depgraph._dynamic_config._unsatisfied_deps_for_display
- )
+ }
if self.depgraph._forced_rebuilds:
- self.forced_rebuilds = dict(
- (child.cpv, set(parent.cpv for parent in parents))
+ self.forced_rebuilds = {
+ child.cpv: {parent.cpv for parent in parents}
for child_dict in self.depgraph._forced_rebuilds.values()
for child, parents in child_dict.items()
- )
+ }
required_use_unsatisfied = []
for (
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py b/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
index 4dc83e843..77fdac425 100644
--- a/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
@@ -1,7 +1,6 @@
# Copyright 2015-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py b/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
index 854dee458..d7d70fd5b 100644
--- a/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_autounmask.py b/lib/portage/tests/resolver/soname/test_autounmask.py
index e324c9392..ecf4b61cb 100644
--- a/lib/portage/tests/resolver/soname/test_autounmask.py
+++ b/lib/portage/tests/resolver/soname/test_autounmask.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_downgrade.py b/lib/portage/tests/resolver/soname/test_downgrade.py
index c601b6381..d5a951694 100644
--- a/lib/portage/tests/resolver/soname/test_downgrade.py
+++ b/lib/portage/tests/resolver/soname/test_downgrade.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_or_choices.py b/lib/portage/tests/resolver/soname/test_or_choices.py
index dcdcf57e3..5c8f35295 100644
--- a/lib/portage/tests/resolver/soname/test_or_choices.py
+++ b/lib/portage/tests/resolver/soname/test_or_choices.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_reinstall.py b/lib/portage/tests/resolver/soname/test_reinstall.py
index 68c842af1..b50105688 100644
--- a/lib/portage/tests/resolver/soname/test_reinstall.py
+++ b/lib/portage/tests/resolver/soname/test_reinstall.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_skip_update.py b/lib/portage/tests/resolver/soname/test_skip_update.py
index a515a5252..dabbb781a 100644
--- a/lib/portage/tests/resolver/soname/test_skip_update.py
+++ b/lib/portage/tests/resolver/soname/test_skip_update.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py b/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
index 027cadc83..05ffc41db 100644
--- a/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
+++ b/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_slot_conflict_update.py b/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
index dd763caef..0b581cb2d 100644
--- a/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
+++ b/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_soname_provided.py b/lib/portage/tests/resolver/soname/test_soname_provided.py
index 6a9ee76ba..c2ead9bbb 100644
--- a/lib/portage/tests/resolver/soname/test_soname_provided.py
+++ b/lib/portage/tests/resolver/soname/test_soname_provided.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_unsatisfiable.py b/lib/portage/tests/resolver/soname/test_unsatisfiable.py
index 75d50c10f..2b6e07f4a 100644
--- a/lib/portage/tests/resolver/soname/test_unsatisfiable.py
+++ b/lib/portage/tests/resolver/soname/test_unsatisfiable.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/soname/test_unsatisfied.py b/lib/portage/tests/resolver/soname/test_unsatisfied.py
index 2e0fe6e7f..392db4c9e 100644
--- a/lib/portage/tests/resolver/soname/test_unsatisfied.py
+++ b/lib/portage/tests/resolver/soname/test_unsatisfied.py
@@ -1,7 +1,6 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_autounmask.py b/lib/portage/tests/resolver/test_autounmask.py
index 4e98e5bfc..077d147d3 100644
--- a/lib/portage/tests/resolver/test_autounmask.py
+++ b/lib/portage/tests/resolver/test_autounmask.py
@@ -474,7 +474,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask-license": "y"},
success=False,
mergelist=["dev-libs/A-1"],
- license_changes={"dev-libs/A-1": set(["TEST"])},
+ license_changes={"dev-libs/A-1": {"TEST"}},
),
# Test that --autounmask enables --autounmask-license
ResolverPlaygroundTestCase(
@@ -482,7 +482,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask": True},
success=False,
mergelist=["dev-libs/A-1"],
- license_changes={"dev-libs/A-1": set(["TEST"])},
+ license_changes={"dev-libs/A-1": {"TEST"}},
),
# Test that --autounmask-license is not enabled by default
ResolverPlaygroundTestCase(
@@ -510,7 +510,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask": True},
success=False,
mergelist=["dev-libs/B-1", "dev-libs/C-1"],
- license_changes={"dev-libs/B-1": set(["TEST"])},
+ license_changes={"dev-libs/B-1": {"TEST"}},
unstable_keywords=["dev-libs/B-1"],
use_changes={"dev-libs/B-1": {"foo": True}},
),
@@ -521,10 +521,10 @@ class AutounmaskTestCase(TestCase):
success=False,
mergelist=["dev-libs/E-1", "dev-libs/F-1", "dev-libs/D-1"],
license_changes={
- "dev-libs/D-1": set(["TEST"]),
- "dev-libs/E-1": set(["TEST"]),
- "dev-libs/E-2": set(["TEST"]),
- "dev-libs/F-1": set(["TEST"]),
+ "dev-libs/D-1": {"TEST"},
+ "dev-libs/E-1": {"TEST"},
+ "dev-libs/E-2": {"TEST"},
+ "dev-libs/F-1": {"TEST"},
},
),
# Test license only for bug #420847
@@ -533,7 +533,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask": True},
success=False,
mergelist=["dev-java/sun-jdk-1.6.0.31"],
- license_changes={"dev-java/sun-jdk-1.6.0.31": set(["TEST"])},
+ license_changes={"dev-java/sun-jdk-1.6.0.31": {"TEST"}},
),
)
@@ -682,15 +682,15 @@ class AutounmaskTestCase(TestCase):
success=False,
options={"--autounmask": True},
mergelist=["dev-libs/A-2", "dev-libs/B-1"],
- needed_p_mask_changes=set(["dev-libs/A-2"]),
+ needed_p_mask_changes={"dev-libs/A-2"},
),
ResolverPlaygroundTestCase(
["dev-libs/C"],
success=False,
options={"--autounmask": True},
mergelist=["dev-libs/A-9999", "dev-libs/C-1"],
- unstable_keywords=set(["dev-libs/A-9999"]),
- needed_p_mask_changes=set(["dev-libs/A-9999"]),
+ unstable_keywords={"dev-libs/A-9999"},
+ needed_p_mask_changes={"dev-libs/A-9999"},
),
)
diff --git a/lib/portage/tests/resolver/test_autounmask_binpkg_use.py b/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
index 682732611..043ca7cea 100644
--- a/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
+++ b/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
@@ -1,7 +1,6 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_bdeps.py b/lib/portage/tests/resolver/test_bdeps.py
index a1b987eca..ded50fc51 100644
--- a/lib/portage/tests/resolver/test_bdeps.py
+++ b/lib/portage/tests/resolver/test_bdeps.py
@@ -1,7 +1,6 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.tests import TestCase
diff --git a/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py b/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
index 10a292abd..835c93310 100644
--- a/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
+++ b/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
@@ -1,7 +1,6 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_changed_deps.py b/lib/portage/tests/resolver/test_changed_deps.py
index 0c9b34d4b..f845d4bba 100644
--- a/lib/portage/tests/resolver/test_changed_deps.py
+++ b/lib/portage/tests/resolver/test_changed_deps.py
@@ -1,7 +1,6 @@
# Copyright 2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py b/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
index 6b061ca2a..ce8eb3b64 100644
--- a/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
+++ b/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
@@ -1,7 +1,6 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_disjunctive_depend_order.py b/lib/portage/tests/resolver/test_disjunctive_depend_order.py
index 4471bc605..1f94386c7 100644
--- a/lib/portage/tests/resolver/test_disjunctive_depend_order.py
+++ b/lib/portage/tests/resolver/test_disjunctive_depend_order.py
@@ -1,7 +1,6 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_multirepo.py b/lib/portage/tests/resolver/test_multirepo.py
index 8132439b5..1c0002b27 100644
--- a/lib/portage/tests/resolver/test_multirepo.py
+++ b/lib/portage/tests/resolver/test_multirepo.py
@@ -1,7 +1,6 @@
# Copyright 2010-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_package_tracker.py b/lib/portage/tests/resolver/test_package_tracker.py
index c343589f9..5502c688f 100644
--- a/lib/portage/tests/resolver/test_package_tracker.py
+++ b/lib/portage/tests/resolver/test_package_tracker.py
@@ -20,7 +20,7 @@ class PackageTrackerTestCase(TestCase):
def make_pkg(self, root, atom, repo="test_repo"):
atom = Atom(atom)
- slot_atom = Atom("%s:%s" % (atom.cp, atom.slot))
+ slot_atom = Atom("{}:{}".format(atom.cp, atom.slot))
slot = atom.slot
return self.FakePackage(
diff --git a/lib/portage/tests/resolver/test_profile_default_eapi.py b/lib/portage/tests/resolver/test_profile_default_eapi.py
index 45b8c41a6..76f03465e 100644
--- a/lib/portage/tests/resolver/test_profile_default_eapi.py
+++ b/lib/portage/tests/resolver/test_profile_default_eapi.py
@@ -108,7 +108,7 @@ class ProfileDefaultEAPITestCase(TestCase):
for prof_path, data in profile_info:
ensure_dirs(prof_path)
for k, v in data.items():
- with io.open(
+ with open(
os.path.join(prof_path, k),
mode="w",
encoding=_encodings["repo.content"],
diff --git a/lib/portage/tests/resolver/test_profile_package_set.py b/lib/portage/tests/resolver/test_profile_package_set.py
index 6b64dcdae..508bca1f4 100644
--- a/lib/portage/tests/resolver/test_profile_package_set.py
+++ b/lib/portage/tests/resolver/test_profile_package_set.py
@@ -98,7 +98,7 @@ class ProfilePackageSetTestCase(TestCase):
prof_path = os.path.join(profile_root, p)
ensure_dirs(prof_path)
for k, v in data.items():
- with io.open(
+ with open(
os.path.join(prof_path, k),
mode="w",
encoding=_encodings["repo.content"],
diff --git a/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py b/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
index 127a175d1..db8cdcb0c 100644
--- a/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
+++ b/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
@@ -1,7 +1,6 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_simple.py b/lib/portage/tests/resolver/test_simple.py
index b2bfd5fdf..854cf31e0 100644
--- a/lib/portage/tests/resolver/test_simple.py
+++ b/lib/portage/tests/resolver/test_simple.py
@@ -1,7 +1,6 @@
# Copyright 2010-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_slot_abi.py b/lib/portage/tests/resolver/test_slot_abi.py
index 2d99fb676..7fa917762 100644
--- a/lib/portage/tests/resolver/test_slot_abi.py
+++ b/lib/portage/tests/resolver/test_slot_abi.py
@@ -1,7 +1,6 @@
# Copyright 2012-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
@@ -15,7 +14,7 @@ from portage.output import colorize
class SlotAbiTestCase(TestCase):
def __init__(self, *args, **kwargs):
- super(SlotAbiTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSubSlot(self):
ebuilds = {
diff --git a/lib/portage/tests/resolver/test_slot_abi_downgrade.py b/lib/portage/tests/resolver/test_slot_abi_downgrade.py
index 15c6a7bfc..ca4ce50cf 100644
--- a/lib/portage/tests/resolver/test_slot_abi_downgrade.py
+++ b/lib/portage/tests/resolver/test_slot_abi_downgrade.py
@@ -1,7 +1,6 @@
# Copyright 2012-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
@@ -15,7 +14,7 @@ from portage.output import colorize
class SlotAbiDowngradeTestCase(TestCase):
def __init__(self, *args, **kwargs):
- super(SlotAbiDowngradeTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSubSlot(self):
ebuilds = {
diff --git a/lib/portage/tests/resolver/test_slot_change_without_revbump.py b/lib/portage/tests/resolver/test_slot_change_without_revbump.py
index c1c727caf..d324ec3d1 100644
--- a/lib/portage/tests/resolver/test_slot_change_without_revbump.py
+++ b/lib/portage/tests/resolver/test_slot_change_without_revbump.py
@@ -1,7 +1,6 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_slot_operator_autounmask.py b/lib/portage/tests/resolver/test_slot_operator_autounmask.py
index b8b502a68..77ba7e2c4 100644
--- a/lib/portage/tests/resolver/test_slot_operator_autounmask.py
+++ b/lib/portage/tests/resolver/test_slot_operator_autounmask.py
@@ -1,7 +1,6 @@
# Copyright 2013-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
@@ -15,7 +14,7 @@ from portage.output import colorize
class SlotOperatorAutoUnmaskTestCase(TestCase):
def __init__(self, *args, **kwargs):
- super(SlotOperatorAutoUnmaskTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSubSlot(self):
ebuilds = {
diff --git a/lib/portage/tests/resolver/test_slot_operator_rebuild.py b/lib/portage/tests/resolver/test_slot_operator_rebuild.py
index 9ad2bc7ab..3bf9cc497 100644
--- a/lib/portage/tests/resolver/test_slot_operator_rebuild.py
+++ b/lib/portage/tests/resolver/test_slot_operator_rebuild.py
@@ -1,7 +1,6 @@
# Copyright 2014-2018 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/resolver/test_slot_operator_unsolved.py b/lib/portage/tests/resolver/test_slot_operator_unsolved.py
index 945e34ccf..4576eb13b 100644
--- a/lib/portage/tests/resolver/test_slot_operator_unsolved.py
+++ b/lib/portage/tests/resolver/test_slot_operator_unsolved.py
@@ -1,7 +1,6 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
@@ -20,7 +19,7 @@ class SlotOperatorUnsolvedTestCase(TestCase):
"""
def __init__(self, *args, **kwargs):
- super(SlotOperatorUnsolvedTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSlotOperatorUnsolved(self):
ebuilds = {
diff --git a/lib/portage/tests/resolver/test_useflags.py b/lib/portage/tests/resolver/test_useflags.py
index 340ac5de2..2da63a681 100644
--- a/lib/portage/tests/resolver/test_useflags.py
+++ b/lib/portage/tests/resolver/test_useflags.py
@@ -1,7 +1,6 @@
# Copyright 2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
diff --git a/lib/portage/tests/sets/base/testInternalPackageSet.py b/lib/portage/tests/sets/base/testInternalPackageSet.py
index 0793df3fb..77934cab2 100644
--- a/lib/portage/tests/sets/base/testInternalPackageSet.py
+++ b/lib/portage/tests/sets/base/testInternalPackageSet.py
@@ -12,8 +12,8 @@ class InternalPackageSetTestCase(TestCase):
"""Simple Test Case for InternalPackageSet"""
def testInternalPackageSet(self):
- i1_atoms = set(("dev-libs/A", ">=dev-libs/A-1", "dev-libs/B"))
- i2_atoms = set(("dev-libs/A", "dev-libs/*", "dev-libs/C"))
+ i1_atoms = {"dev-libs/A", ">=dev-libs/A-1", "dev-libs/B"}
+ i2_atoms = {"dev-libs/A", "dev-libs/*", "dev-libs/C"}
i1 = InternalPackageSet(initial_atoms=i1_atoms)
i2 = InternalPackageSet(initial_atoms=i2_atoms, allow_wildcard=True)
diff --git a/lib/portage/tests/sync/test_sync_local.py b/lib/portage/tests/sync/test_sync_local.py
index a5fc069c3..914014216 100644
--- a/lib/portage/tests/sync/test_sync_local.py
+++ b/lib/portage/tests/sync/test_sync_local.py
@@ -94,7 +94,9 @@ class SyncLocalTestCase(TestCase):
break
else:
raise AssertionError(
- "%s binary not found in %s or %s" % (cmd, self.bindir, self.sbindir)
+ "{} binary not found in {} or {}".format(
+ cmd, self.bindir, self.sbindir
+ )
)
git_binary = find_binary("git")
@@ -320,7 +322,7 @@ class SyncLocalTestCase(TestCase):
)
def hg_init_global_config():
- with open(os.path.join(homedir, ".hgrc"), "wt") as f:
+ with open(os.path.join(homedir, ".hgrc"), "w") as f:
f.write(
"[ui]\nusername = {} <{}>\n".format(committer_name, committer_email)
)
@@ -335,7 +337,7 @@ class SyncLocalTestCase(TestCase):
sync_type_mercurial = ((homedir, lambda: repos_set_conf("mercurial")),)
def append_newline(path):
- with open(path, "at") as f:
+ with open(path, "a") as f:
f.write("\n")
upstream_hg_commit = (
diff --git a/lib/portage/tests/unicode/test_string_format.py b/lib/portage/tests/unicode/test_string_format.py
index 65d3c1905..142ad17a2 100644
--- a/lib/portage/tests/unicode/test_string_format.py
+++ b/lib/portage/tests/unicode/test_string_format.py
@@ -28,11 +28,11 @@ class StringFormatTestCase(TestCase):
arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings["content"])
dependency_arg = DependencyArg(arg=arg_unicode)
- formatted_str = "%s" % (dependency_arg,)
+ formatted_str = "{}".format(dependency_arg)
self.assertEqual(formatted_str, arg_unicode)
# Test the __str__ method which returns unicode in python3
- formatted_str = "%s" % (dependency_arg,)
+ formatted_str = "{}".format(dependency_arg)
self.assertEqual(formatted_str, arg_unicode)
def testPortageException(self):
@@ -43,11 +43,11 @@ class StringFormatTestCase(TestCase):
arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings["content"])
e = PortageException(arg_unicode)
- formatted_str = "%s" % (e,)
+ formatted_str = "{}".format(e)
self.assertEqual(formatted_str, arg_unicode)
# Test the __str__ method which returns unicode in python3
- formatted_str = "%s" % (e,)
+ formatted_str = "{}".format(e)
self.assertEqual(formatted_str, arg_unicode)
def testUseFlagDisplay(self):
@@ -59,9 +59,9 @@ class StringFormatTestCase(TestCase):
for arg_unicode in self.unicode_strings:
e = UseFlagDisplay(arg_unicode, enabled, forced)
- formatted_str = "%s" % (e,)
+ formatted_str = "{}".format(e)
self.assertEqual(isinstance(formatted_str, str), True)
# Test the __str__ method which returns unicode in python3
- formatted_str = "%s" % (e,)
+ formatted_str = "{}".format(e)
self.assertEqual(isinstance(formatted_str, str), True)
diff --git a/lib/portage/tests/update/test_move_ent.py b/lib/portage/tests/update/test_move_ent.py
index cb9bb5243..d026a82f8 100644
--- a/lib/portage/tests/update/test_move_ent.py
+++ b/lib/portage/tests/update/test_move_ent.py
@@ -1,7 +1,6 @@
# Copyright 2012-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
import textwrap
diff --git a/lib/portage/tests/update/test_move_slot_ent.py b/lib/portage/tests/update/test_move_slot_ent.py
index 27d51fbb1..baa169bc3 100644
--- a/lib/portage/tests/update/test_move_slot_ent.py
+++ b/lib/portage/tests/update/test_move_slot_ent.py
@@ -1,7 +1,6 @@
# Copyright 2012-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
import textwrap
diff --git a/lib/portage/tests/update/test_update_dbentry.py b/lib/portage/tests/update/test_update_dbentry.py
index 25cab198d..695a246a2 100644
--- a/lib/portage/tests/update/test_update_dbentry.py
+++ b/lib/portage/tests/update/test_update_dbentry.py
@@ -1,7 +1,6 @@
# Copyright 2012-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-from __future__ import print_function
import sys
import re
import textwrap
diff --git a/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py b/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py
index 972f8863a..6920f4365 100644
--- a/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py
+++ b/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py
@@ -25,7 +25,7 @@ class _PipeClosedTestCase:
def test_pty_device(self):
try:
read_end, write_end = pty.openpty()
- except EnvironmentError:
+ except OSError:
self.skipTest("pty not available")
self._do_test(read_end, write_end)
@@ -123,7 +123,7 @@ class WriterPipeClosedTestCase(_PipeClosedTestCase, TestCase):
while True:
try:
os.write(write_end.fileno(), 512 * b"0")
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EAGAIN:
raise
break
diff --git a/lib/portage/tests/util/futures/test_retry.py b/lib/portage/tests/util/futures/test_retry.py
index cea3e83f5..20249a3a2 100644
--- a/lib/portage/tests/util/futures/test_retry.py
+++ b/lib/portage/tests/util/futures/test_retry.py
@@ -213,7 +213,7 @@ class RetryForkExecutorTestCase(RetryTestCase):
"""
def __init__(self, *pargs, **kwargs):
- super(RetryForkExecutorTestCase, self).__init__(*pargs, **kwargs)
+ super().__init__(*pargs, **kwargs)
self._executor = None
def _setUpExecutor(self):
diff --git a/lib/portage/tests/util/test_digraph.py b/lib/portage/tests/util/test_digraph.py
index b48948f24..ce162c2b0 100644
--- a/lib/portage/tests/util/test_digraph.py
+++ b/lib/portage/tests/util/test_digraph.py
@@ -118,17 +118,15 @@ class DigraphTest(TestCase):
self.assertEqual(
x.shortest_path("D", "A", ignore_priority=-2), ["D", "C", "B", "A"]
)
- cycles = set(tuple(y) for y in x.get_cycles())
+ cycles = {tuple(y) for y in x.get_cycles()}
self.assertEqual(
cycles,
- set(
- [
- ("D", "C", "B", "A"),
- ("C", "B", "A", "D"),
- ("B", "A", "D", "C"),
- ("A", "D", "C", "B"),
- ]
- ),
+ {
+ ("D", "C", "B", "A"),
+ ("C", "B", "A", "D"),
+ ("B", "A", "D", "C"),
+ ("A", "D", "C", "B"),
+ },
)
x.remove_edge("A", "B")
self.assertEqual(x.get_cycles(), [])
@@ -154,17 +152,15 @@ class DigraphTest(TestCase):
self.assertEqual(x.firstzero(), "B")
self.assertRaises(KeyError, x.remove, "Z")
x.delnode("Z")
- self.assertEqual(set(x), set(["A", "B", "C", "D", "E"]))
+ self.assertEqual(set(x), {"A", "B", "C", "D", "E"})
self.assertEqual(x.get("A"), "A")
self.assertEqual(x.get("A", "default"), "A")
- self.assertEqual(set(x.all_nodes()), set(["A", "B", "C", "D", "E"]))
- self.assertEqual(set(x.leaf_nodes()), set(["B", "D", "E"]))
- self.assertEqual(
- set(x.leaf_nodes(ignore_priority=0)), set(["A", "B", "D", "E"])
- )
+ self.assertEqual(set(x.all_nodes()), {"A", "B", "C", "D", "E"})
+ self.assertEqual(set(x.leaf_nodes()), {"B", "D", "E"})
+ self.assertEqual(set(x.leaf_nodes(ignore_priority=0)), {"A", "B", "D", "E"})
self.assertEqual(x.root_nodes(), ["A"])
- self.assertEqual(set(x.root_nodes(ignore_priority=0)), set(["A", "B", "C"]))
- self.assertEqual(set(x.child_nodes("A")), set(["B", "C"]))
+ self.assertEqual(set(x.root_nodes(ignore_priority=0)), {"A", "B", "C"})
+ self.assertEqual(set(x.child_nodes("A")), {"B", "C"})
self.assertEqual(x.child_nodes("A", ignore_priority=2), [])
self.assertEqual(x.parent_nodes("B"), ["A"])
self.assertEqual(x.parent_nodes("B", ignore_priority=-2), ["A"])
@@ -177,12 +173,12 @@ class DigraphTest(TestCase):
self.assertEqual(x.shortest_path("A", "D"), ["A", "C", "D"])
self.assertEqual(x.shortest_path("D", "A"), None)
self.assertEqual(x.shortest_path("A", "D", ignore_priority=2), None)
- cycles = set(tuple(y) for y in x.get_cycles())
+ cycles = {tuple(y) for y in x.get_cycles()}
self.assertEqual(cycles, set())
x.remove("D")
- self.assertEqual(set(x.all_nodes()), set(["A", "B", "C", "E"]))
+ self.assertEqual(set(x.all_nodes()), {"A", "B", "C", "E"})
x.remove("C")
- self.assertEqual(set(x.all_nodes()), set(["A", "B", "E"]))
+ self.assertEqual(set(x.all_nodes()), {"A", "B", "E"})
portage.util.noiselimit = -2
x.debug_print()
portage.util.noiselimit = 0
@@ -210,9 +206,9 @@ class DigraphTest(TestCase):
self.assertEqual(x.all_nodes(), ["A", "B", "C"])
self.assertEqual(x.leaf_nodes(), [])
self.assertEqual(x.root_nodes(), [])
- self.assertEqual(set(x.child_nodes("A")), set(["B", "C"]))
+ self.assertEqual(set(x.child_nodes("A")), {"B", "C"})
self.assertEqual(x.child_nodes("A", ignore_priority=0), ["B"])
- self.assertEqual(set(x.parent_nodes("A")), set(["B", "C"]))
+ self.assertEqual(set(x.parent_nodes("A")), {"B", "C"})
self.assertEqual(x.parent_nodes("A", ignore_priority=0), ["C"])
self.assertEqual(x.parent_nodes("A", ignore_priority=1), [])
self.assertEqual(x.hasallzeros(), False)
@@ -223,22 +219,18 @@ class DigraphTest(TestCase):
x.shortest_path("A", "C", ignore_priority=0), ["A", "B", "C"]
)
self.assertEqual(x.shortest_path("C", "A", ignore_priority=0), ["C", "A"])
- cycles = set(frozenset(y) for y in x.get_cycles())
+ cycles = {frozenset(y) for y in x.get_cycles()}
self.assertEqual(
cycles,
- set(
- [
- frozenset(["A", "B"]),
- frozenset(["A", "C"]),
- frozenset(["B", "C"]),
- ]
- ),
+ {
+ frozenset(["A", "B"]),
+ frozenset(["A", "C"]),
+ frozenset(["B", "C"]),
+ },
)
x.remove_edge("A", "B")
- cycles = set(frozenset(y) for y in x.get_cycles())
- self.assertEqual(
- cycles, set([frozenset(["A", "C"]), frozenset(["C", "B"])])
- )
+ cycles = {frozenset(y) for y in x.get_cycles()}
+ self.assertEqual(cycles, {frozenset(["A", "C"]), frozenset(["C", "B"])})
x.difference_update(["C"])
self.assertEqual(x.all_nodes(), ["A", "B"])
portage.util.noiselimit = -2
diff --git a/lib/portage/tests/util/test_getconfig.py b/lib/portage/tests/util/test_getconfig.py
index 14fe145eb..8486938f6 100644
--- a/lib/portage/tests/util/test_getconfig.py
+++ b/lib/portage/tests/util/test_getconfig.py
@@ -71,9 +71,9 @@ class GetConfigTestCase(TestCase):
# Format like env_update formats /etc/profile.env.
for k, v in cases.items():
if v.startswith("$") and not v.startswith("${"):
- line = "export %s=$'%s'\n" % (k, v[1:])
+ line = "export {}=$'{}'\n".format(k, v[1:])
else:
- line = "export %s='%s'\n" % (k, v)
+ line = "export {}='{}'\n".format(k, v)
f.write(_unicode_encode(line))
f.flush()
diff --git a/lib/portage/tests/util/test_socks5.py b/lib/portage/tests/util/test_socks5.py
index 18b8d4db8..fcbc1f6b5 100644
--- a/lib/portage/tests/util/test_socks5.py
+++ b/lib/portage/tests/util/test_socks5.py
@@ -124,7 +124,7 @@ class _socket_file_wrapper(portage.proxy.objectproxy.ObjectProxy):
def __getattribute__(self, attr):
if attr == "close":
return object.__getattribute__(self, "close")
- return super(_socket_file_wrapper, self).__getattribute__(attr)
+ return super().__getattribute__(attr)
def __enter__(self):
return self
diff --git a/lib/portage/tests/util/test_xattr.py b/lib/portage/tests/util/test_xattr.py
index a10dd194e..49d7b756e 100644
--- a/lib/portage/tests/util/test_xattr.py
+++ b/lib/portage/tests/util/test_xattr.py
@@ -167,4 +167,4 @@ class StandardTest(TestCase):
"""Make sure the exported API matches"""
for mod in self.MODULES:
for f in self.FUNCS:
- self.assertTrue(hasattr(mod, f), "%s func missing in %s" % (f, mod))
+ self.assertTrue(hasattr(mod, f), "{} func missing in {}".format(f, mod))
diff --git a/lib/portage/tests/versions/test_vercmp.py b/lib/portage/tests/versions/test_vercmp.py
index 5b204658e..d8f12a833 100644
--- a/lib/portage/tests/versions/test_vercmp.py
+++ b/lib/portage/tests/versions/test_vercmp.py
@@ -27,7 +27,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) <= 0,
- msg="%s < %s? Wrong!" % (test[0], test[1]),
+ msg="{} < {}? Wrong!".format(test[0], test[1]),
)
def testVerCmpLess(self):
@@ -57,7 +57,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) >= 0,
- msg="%s > %s? Wrong!" % (test[0], test[1]),
+ msg="{} > {}? Wrong!".format(test[0], test[1]),
)
def testVerCmpEqual(self):
@@ -73,7 +73,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) != 0,
- msg="%s != %s? Wrong!" % (test[0], test[1]),
+ msg="{} != {}? Wrong!".format(test[0], test[1]),
)
def testVerNotEqual(self):
@@ -96,5 +96,5 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) == 0,
- msg="%s == %s? Wrong!" % (test[0], test[1]),
+ msg="{} == {}? Wrong!".format(test[0], test[1]),
)
diff --git a/lib/portage/update.py b/lib/portage/update.py
index 0ccca6e26..325618881 100644
--- a/lib/portage/update.py
+++ b/lib/portage/update.py
@@ -157,9 +157,8 @@ def fixdbentries(update_iter, dbdir, eapi=None, parent=None):
mydata = {}
for myfile in [f for f in os.listdir(dbdir) if f not in ignored_dbentries]:
file_path = os.path.join(dbdir, myfile)
- with io.open(
+ with open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -200,9 +199,8 @@ def grab_updates(updpath, prev_mtimes=None):
if not stat.S_ISREG(mystat.st_mode):
continue
if int(prev_mtimes.get(file_path, -1)) != mystat[stat.ST_MTIME]:
- f = io.open(
+ f = open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
@@ -382,18 +380,17 @@ def update_config_files(
for x in myxfiles:
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(
os.path.join(abs_user_config, x),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
file_contents[x] = f.readlines()
- except IOError:
+ except OSError:
continue
finally:
if f is not None:
@@ -429,11 +426,13 @@ def update_config_files(
# add a comment with the update command, so
# the user can clearly see what happened
contents[pos] = "# %s\n" % " ".join(
- "%s" % (x,) for x in update_cmd
+ "{}".format(x) for x in update_cmd
)
contents.insert(
pos + 1,
- line.replace("%s" % (atom,), "%s" % (new_atom,), 1),
+ line.replace(
+ "{}".format(atom), "{}".format(new_atom), 1
+ ),
)
# we've inserted an additional line, so we need to
# skip it when it's reached in the next iteration
diff --git a/lib/portage/util/ExtractKernelVersion.py b/lib/portage/util/ExtractKernelVersion.py
index 41a8a6eb0..544e63ffe 100644
--- a/lib/portage/util/ExtractKernelVersion.py
+++ b/lib/portage/util/ExtractKernelVersion.py
@@ -25,15 +25,14 @@ def ExtractKernelVersion(base_dir):
lines = []
pathname = os.path.join(base_dir, "Makefile")
try:
- f = io.open(
+ f = open(
_unicode_encode(pathname, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
except OSError as details:
return (None, str(details))
- except IOError as details:
+ except OSError as details:
return (None, str(details))
try:
@@ -41,7 +40,7 @@ def ExtractKernelVersion(base_dir):
lines.append(f.readline())
except OSError as details:
return (None, str(details))
- except IOError as details:
+ except OSError as details:
return (None, str(details))
finally:
f.close()
@@ -81,7 +80,7 @@ def ExtractKernelVersion(base_dir):
for file_path, file_errors in loader_errors.items():
for error_str in file_errors:
writemsg_level(
- "%s: %s\n" % (file_path, error_str),
+ "{}: {}\n".format(file_path, error_str),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/__init__.py b/lib/portage/util/__init__.py
index 5ade7f660..b6ced6a4f 100644
--- a/lib/portage/util/__init__.py
+++ b/lib/portage/util/__init__.py
@@ -472,9 +472,8 @@ def read_corresponding_eapi_file(filename, default="0"):
eapi = None
try:
- with io.open(
+ with open(
_unicode_encode(eapi_file, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -487,7 +486,7 @@ def read_corresponding_eapi_file(filename, default="0"):
% (eapi_file),
noiselevel=-1,
)
- except IOError:
+ except OSError:
pass
_eapi_cache[eapi_file] = eapi
@@ -679,9 +678,8 @@ def grablines(myfilename, recursive=0, remember_source_file=False):
else:
try:
- with io.open(
+ with open(
_unicode_encode(myfilename, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as myfile:
@@ -689,7 +687,7 @@ def grablines(myfilename, recursive=0, remember_source_file=False):
mylines = [(line, myfilename) for line in myfile.readlines()]
else:
mylines = myfile.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(myfilename)
elif e.errno in (errno.ENOENT, errno.ESTALE):
@@ -708,7 +706,7 @@ def writedict(mydict, myfilename, writekey=True):
lines.append(v + "\n")
else:
for k, v in mydict.items():
- lines.append("%s %s\n" % (k, " ".join(v)))
+ lines.append("{} {}\n".format(k, " ".join(v)))
write_atomic(myfilename, "".join(lines))
@@ -734,11 +732,11 @@ class _getconfig_shlex(shlex.shlex):
try:
newfile = varexpand(newfile, self.var_expand_map)
return shlex.shlex.sourcehook(self, newfile)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(newfile)
if e.errno not in (errno.ENOENT, errno.ENOTDIR):
- writemsg("open('%s', 'r'): %s\n" % (newfile, e), noiselevel=-1)
+ writemsg("open('{}', 'r'): {}\n".format(newfile, e), noiselevel=-1)
raise
msg = self.error_leader()
@@ -795,16 +793,15 @@ def getconfig(
try:
f = open(
_unicode_encode(mycfg, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
content = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(mycfg)
if e.errno != errno.ENOENT:
- writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
+ writemsg("open('{}', 'r'): {}\n".format(mycfg, e), noiselevel=-1)
if e.errno not in (errno.EISDIR,):
raise
return None
@@ -908,7 +905,7 @@ def getconfig(
except Exception as e:
if isinstance(e, ParseError) or lex is None:
raise
- msg = "%s%s" % (lex.error_leader(), e)
+ msg = "{}{}".format(lex.error_leader(), e)
writemsg("%s\n" % msg, noiselevel=-1)
raise
@@ -1128,7 +1125,7 @@ class cmp_sort_key:
def __lt__(self, other):
if other.__class__ is not self.__class__:
raise TypeError(
- "Expected type %s, got %s" % (self.__class__, other.__class__)
+ "Expected type {}, got {}".format(self.__class__, other.__class__)
)
return self._cmp_func(self._obj, other._obj) < 0
@@ -1290,7 +1287,7 @@ def apply_permissions(
os.chmod(filename, new_mode)
modified = True
except OSError as oe:
- func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
+ func_call = "chmod('{}', {})".format(filename, oct(new_mode))
if oe.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
elif oe.errno == errno.EACCES:
@@ -1467,7 +1464,7 @@ class atomic_ofstream(AbstractContextManager, ObjectProxy):
),
)
return
- except IOError as e:
+ except OSError as e:
if canonical_path == filename:
raise
# Ignore this error, since it's irrelevant
@@ -1560,7 +1557,7 @@ def write_atomic(file_path, content, **kwargs):
f = atomic_ofstream(file_path, **kwargs)
f.write(content)
f.close()
- except (IOError, OSError) as e:
+ except OSError as e:
if f:
f.abort()
func_call = "write_atomic('%s')" % file_path
@@ -2005,8 +2002,7 @@ def getlibpaths(root, env=None):
if include_match is not None:
subpath = os.path.join(os.path.dirname(path), include_match.group(1))
for p in glob.glob(subpath):
- for r in read_ld_so_conf(p):
- yield r
+ yield from read_ld_so_conf(p)
else:
yield l
diff --git a/lib/portage/util/_async/AsyncScheduler.py b/lib/portage/util/_async/AsyncScheduler.py
index b8ed31f8c..07782b478 100644
--- a/lib/portage/util/_async/AsyncScheduler.py
+++ b/lib/portage/util/_async/AsyncScheduler.py
@@ -94,7 +94,7 @@ class AsyncScheduler(AsynchronousTask, PollScheduler):
self._schedule()
def _cleanup(self):
- super(AsyncScheduler, self)._cleanup()
+ super()._cleanup()
if self._loadavg_check_id is not None:
self._loadavg_check_id.cancel()
self._loadavg_check_id = None
@@ -104,4 +104,4 @@ class AsyncScheduler(AsynchronousTask, PollScheduler):
Override _async_wait to call self._cleanup().
"""
self._cleanup()
- super(AsyncScheduler, self)._async_wait()
+ super()._async_wait()
diff --git a/lib/portage/util/_async/BuildLogger.py b/lib/portage/util/_async/BuildLogger.py
index cbed2d811..502b3390e 100644
--- a/lib/portage/util/_async/BuildLogger.py
+++ b/lib/portage/util/_async/BuildLogger.py
@@ -60,7 +60,7 @@ class BuildLogger(AsynchronousTask):
scheduler=self.scheduler,
)
filter_proc.start()
- except EnvironmentError:
+ except OSError:
# Maybe the command is missing or broken somehow...
os.close(filter_input)
os.close(stdin)
diff --git a/lib/portage/util/_async/FileCopier.py b/lib/portage/util/_async/FileCopier.py
index 3cd0fe98b..da0e85ee4 100644
--- a/lib/portage/util/_async/FileCopier.py
+++ b/lib/portage/util/_async/FileCopier.py
@@ -22,7 +22,7 @@ class FileCopier(AsyncTaskFuture):
self.future = asyncio.ensure_future(
self.scheduler.run_in_executor(ForkExecutor(loop=self.scheduler), self._run)
)
- super(FileCopier, self)._start()
+ super()._start()
def _run(self):
src_path = _unicode_encode(
diff --git a/lib/portage/util/_async/ForkProcess.py b/lib/portage/util/_async/ForkProcess.py
index e70238705..ea8ea3a5b 100644
--- a/lib/portage/util/_async/ForkProcess.py
+++ b/lib/portage/util/_async/ForkProcess.py
@@ -60,17 +60,17 @@ class ForkProcess(SpawnProcess):
def _cancel(self):
if self._proc is None:
- super(ForkProcess, self)._cancel()
+ super()._cancel()
else:
self._proc.terminate()
def _async_wait(self):
if self._proc_join_task is None:
- super(ForkProcess, self)._async_wait()
+ super()._async_wait()
def _async_waitpid(self):
if self._proc_join_task is None:
- super(ForkProcess, self)._async_waitpid()
+ super()._async_waitpid()
async def _proc_join(self, proc, loop=None):
sentinel_reader = self.scheduler.create_future()
@@ -114,7 +114,7 @@ class ForkProcess(SpawnProcess):
self._async_wait()
def _unregister(self):
- super(ForkProcess, self)._unregister()
+ super()._unregister()
if self._proc is not None:
if self._proc.is_alive():
self._proc.terminate()
diff --git a/lib/portage/util/_async/SchedulerInterface.py b/lib/portage/util/_async/SchedulerInterface.py
index a83e1e015..c397fa1b2 100644
--- a/lib/portage/util/_async/SchedulerInterface.py
+++ b/lib/portage/util/_async/SchedulerInterface.py
@@ -112,7 +112,7 @@ class SchedulerInterface(SlotObject):
mode="ab",
)
f_real = f
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
if not msg_shown:
diff --git a/lib/portage/util/_dyn_libs/LinkageMapELF.py b/lib/portage/util/_dyn_libs/LinkageMapELF.py
index 22b057973..2845e494d 100644
--- a/lib/portage/util/_dyn_libs/LinkageMapELF.py
+++ b/lib/portage/util/_dyn_libs/LinkageMapELF.py
@@ -279,7 +279,7 @@ class LinkageMapELF:
args.extend(os.path.join(root, x.lstrip("." + os.sep)) for x in plibs)
try:
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
raise CommandNotFound(args[0])
@@ -309,7 +309,7 @@ class LinkageMapELF:
entry = NeededEntry.parse("scanelf", l)
except InvalidData as e:
writemsg_level(
- "\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1
+ "\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1
)
continue
try:
@@ -322,7 +322,7 @@ class LinkageMapELF:
"rb",
) as f:
elf_header = ELFHeader.read(f)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
# File removed concurrently.
@@ -344,7 +344,7 @@ class LinkageMapELF:
)
out, err = proc.communicate()
proc.wait()
- except EnvironmentError:
+ except OSError:
pass
else:
if b"SB shared object" in out:
@@ -392,7 +392,7 @@ class LinkageMapELF:
try:
entry = NeededEntry.parse(location, l)
except InvalidData as e:
- writemsg_level("\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level("\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1)
continue
# If NEEDED.ELF.2 contains the new multilib category field,
@@ -662,7 +662,7 @@ class LinkageMapELF:
if debug:
if not os.path.isfile(lib):
writemsg_level(
- _("Missing library:") + " %s\n" % (lib,),
+ _("Missing library:") + " {}\n".format(lib),
level=logging.DEBUG,
noiselevel=-1,
)
@@ -719,7 +719,7 @@ class LinkageMapELF:
os = _os_merge
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
+ raise KeyError("{} ({}) not in object list".format(obj_key, obj))
basename = os.path.basename(obj)
soname = self._obj_properties[obj_key].soname
return (
@@ -835,13 +835,13 @@ class LinkageMapELF:
else:
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
+ raise KeyError("{} ({}) not in object list".format(obj_key, obj))
obj_props = self._obj_properties[obj_key]
arch = obj_props.arch
needed = obj_props.needed
path = obj_props.runpaths
- path_keys = set(self._path_key(x) for x in path.union(self._defpath))
+ path_keys = {self._path_key(x) for x in path.union(self._defpath)}
for soname in needed:
rValue[soname] = set()
if arch not in self._libs or soname not in self._libs[arch]:
@@ -913,10 +913,10 @@ class LinkageMapELF:
raise KeyError("%s not in object list" % obj_key)
objs = self._obj_properties[obj_key].alt_paths
else:
- objs = set([obj])
+ objs = {obj}
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
+ raise KeyError("{} ({}) not in object list".format(obj_key, obj))
# If there is another version of this lib with the
# same soname and the soname symlink points to that
@@ -949,7 +949,7 @@ class LinkageMapELF:
if arch_map is not None:
soname_node = arch_map.get(soname)
- defpath_keys = set(self._path_key(x) for x in self._defpath)
+ defpath_keys = {self._path_key(x) for x in self._defpath}
satisfied_consumer_keys = set()
if soname_node is not None:
if exclude_providers is not None or not greedy:
@@ -984,7 +984,7 @@ class LinkageMapELF:
if soname_node is not None:
# For each potential consumer, add it to rValue if an object from the
# arguments resides in the consumer's runpath.
- objs_dir_keys = set(self._path_key(os.path.dirname(x)) for x in objs)
+ objs_dir_keys = {self._path_key(os.path.dirname(x)) for x in objs}
for consumer_key in soname_node.consumers:
if consumer_key in satisfied_consumer_keys:
continue
diff --git a/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py b/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
index fd5c97362..c60b52156 100644
--- a/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
+++ b/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
@@ -71,7 +71,7 @@ class PreservedLibsRegistry:
"rb",
)
content = f.read()
- except EnvironmentError as e:
+ except OSError as e:
if not hasattr(e, "errno"):
raise
elif e.errno == errno.ENOENT:
@@ -144,10 +144,10 @@ class PreservedLibsRegistry:
else:
pickle.dump(self._data, f, protocol=2)
f.close()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != PermissionDenied.errno:
writemsg_level(
- "!!! %s %s\n" % (e, self._filename),
+ "!!! {} {}\n".format(e, self._filename),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/_dyn_libs/display_preserved_libs.py b/lib/portage/util/_dyn_libs/display_preserved_libs.py
index e81ca369c..31e2c6285 100644
--- a/lib/portage/util/_dyn_libs/display_preserved_libs.py
+++ b/lib/portage/util/_dyn_libs/display_preserved_libs.py
@@ -20,12 +20,12 @@ def display_preserved_libs(vardb, verbose=False):
linkmap.rebuild()
except portage.exception.CommandNotFound as e:
portage.util.writemsg_level(
- "!!! Command Not Found: %s\n" % (e,), level=logging.ERROR, noiselevel=-1
+ "!!! Command Not Found: {}\n".format(e), level=logging.ERROR, noiselevel=-1
)
else:
search_for_owners = set()
for cpv in plibdata:
- internal_plib_keys = set(linkmap._obj_key(f) for f in plibdata[cpv])
+ internal_plib_keys = {linkmap._obj_key(f) for f in plibdata[cpv]}
for f in plibdata[cpv]:
if f in consumer_map:
continue
@@ -67,7 +67,7 @@ def display_preserved_libs(vardb, verbose=False):
for alt_paths in samefile_map.values():
alt_paths = sorted(alt_paths)
for p in alt_paths:
- print(colorize("WARN", " * ") + " - %s" % (p,))
+ print(colorize("WARN", " * ") + " - {}".format(p))
f = alt_paths[0]
consumers = consumer_map.get(f, [])
consumers_non_preserved = [c for c in consumers if c not in all_preserved]
@@ -93,7 +93,8 @@ def display_preserved_libs(vardb, verbose=False):
else:
owners_desc = ", ".join(x.mycpv for x in owners.get(c, []))
print(
- colorize("WARN", " * ") + " used by %s (%s)" % (c, owners_desc)
+ colorize("WARN", " * ")
+ + " used by {} ({})".format(c, owners_desc)
)
if not verbose and len(consumers) > max_display:
print(
diff --git a/lib/portage/util/_dyn_libs/soname_deps_qa.py b/lib/portage/util/_dyn_libs/soname_deps_qa.py
index 532c7bbab..5535b1137 100644
--- a/lib/portage/util/_dyn_libs/soname_deps_qa.py
+++ b/lib/portage/util/_dyn_libs/soname_deps_qa.py
@@ -60,18 +60,17 @@ def _get_unresolved_soname_deps(metadata_dir, all_provides):
@return: list of tuple(filename, tuple(unresolved sonames))
"""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(metadata_dir, "REQUIRES"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="rt",
encoding=_encodings["repo.content"],
errors="strict",
) as f:
requires = frozenset(parse_soname_deps(f.read()))
- except EnvironmentError:
+ except OSError:
return []
unresolved_by_category = {}
@@ -82,9 +81,8 @@ def _get_unresolved_soname_deps(metadata_dir, all_provides):
)
needed_filename = os.path.join(metadata_dir, "NEEDED.ELF.2")
- with io.open(
+ with open(
_unicode_encode(needed_filename, encoding=_encodings["fs"], errors="strict"),
- mode="rt",
encoding=_encodings["repo.content"],
errors="strict",
) as f:
diff --git a/lib/portage/util/_info_files.py b/lib/portage/util/_info_files.py
index 4cea4f657..b20906f58 100644
--- a/lib/portage/util/_info_files.py
+++ b/lib/portage/util/_info_files.py
@@ -68,7 +68,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
try:
os.rename(dir_file + ext, dir_file + ext + ".old")
moved_old_dir = True
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -114,7 +114,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
for ext in dir_extensions:
try:
os.rename(dir_file + ext + ".old", dir_file + ext)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -124,7 +124,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
for ext in dir_extensions:
try:
os.unlink(dir_file + ext + ".old")
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
diff --git a/lib/portage/util/_pty.py b/lib/portage/util/_pty.py
index e58f95e0a..c70da8511 100644
--- a/lib/portage/util/_pty.py
+++ b/lib/portage/util/_pty.py
@@ -56,7 +56,7 @@ def _create_pty_or_pipe(copy_term_size=None):
try:
master_fd, slave_fd = pty.openpty()
got_pty = True
- except EnvironmentError as e:
+ except OSError as e:
_disable_openpty = True
writemsg("openpty failed: '%s'\n" % str(e), noiselevel=-1)
del e
diff --git a/lib/portage/util/_xattr.py b/lib/portage/util/_xattr.py
index f4d3ad733..89eb4a366 100644
--- a/lib/portage/util/_xattr.py
+++ b/lib/portage/util/_xattr.py
@@ -59,7 +59,7 @@ class _XattrSystemCommands(_XattrGetAll):
@classmethod
def get(cls, item, name, nofollow=False, namespace=None):
if namespace:
- name = "%s.%s" % (namespace, name)
+ name = "{}.{}".format(namespace, name)
cmd = ["getfattr", "--absolute-names", "-n", name, item]
if nofollow:
cmd += ["-h"]
@@ -75,14 +75,14 @@ class _XattrSystemCommands(_XattrGetAll):
@classmethod
def set(cls, item, name, value, _flags=0, namespace=None):
if namespace:
- name = "%s.%s" % (namespace, name)
+ name = "{}.{}".format(namespace, name)
cmd = ["setfattr", "-n", name, "-v", value, item]
cls._call(cmd)
@classmethod
def remove(cls, item, name, nofollow=False, namespace=None):
if namespace:
- name = "%s.%s" % (namespace, name)
+ name = "{}.{}".format(namespace, name)
cmd = ["setfattr", "-x", name, item]
if nofollow:
cmd += ["-h"]
diff --git a/lib/portage/util/backoff.py b/lib/portage/util/backoff.py
index f4a9bfc33..b5714dfe5 100644
--- a/lib/portage/util/backoff.py
+++ b/lib/portage/util/backoff.py
@@ -52,4 +52,4 @@ class RandomExponentialBackoff(ExponentialBackoff):
"""
def __call__(self, tries):
- return random.random() * super(RandomExponentialBackoff, self).__call__(tries)
+ return random.random() * super().__call__(tries)
diff --git a/lib/portage/util/bin_entry_point.py b/lib/portage/util/bin_entry_point.py
index acc16d544..bb012b6b7 100644
--- a/lib/portage/util/bin_entry_point.py
+++ b/lib/portage/util/bin_entry_point.py
@@ -18,7 +18,7 @@ def bin_entry_point():
"""
script_path = os.path.join(PORTAGE_BIN_PATH, os.path.basename(sys.argv[0]))
if os.access(script_path, os.X_OK):
- with open(script_path, "rt") as f:
+ with open(script_path) as f:
shebang = f.readline()
python_match = re.search(r"/python[\d\.]*\s+([^/]*)\s+$", shebang)
if python_match:
diff --git a/lib/portage/util/compression_probe.py b/lib/portage/util/compression_probe.py
index 66c7aeb47..312f2d368 100644
--- a/lib/portage/util/compression_probe.py
+++ b/lib/portage/util/compression_probe.py
@@ -95,7 +95,7 @@ def compression_probe(f):
_unicode_encode(f, encoding=_encodings["fs"], errors="strict"),
mode="rb",
)
- except IOError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(f)
elif e.errno in (errno.ENOENT, errno.ESTALE):
diff --git a/lib/portage/util/configparser.py b/lib/portage/util/configparser.py
index 703fad408..9f39dffe1 100644
--- a/lib/portage/util/configparser.py
+++ b/lib/portage/util/configparser.py
@@ -50,13 +50,12 @@ def read_configs(parser, paths):
if isinstance(p, str):
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(p, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
- except EnvironmentError:
+ except OSError:
pass
else:
# The 'source' keyword argument is needed since otherwise
@@ -73,5 +72,7 @@ def read_configs(parser, paths):
read_file(p, **kwargs)
else:
raise TypeError(
- "Unsupported type %r of element %r of 'paths' argument" % (type(p), p)
+ "Unsupported type {!r} of element {!r} of 'paths' argument".format(
+ type(p), p
+ )
)
diff --git a/lib/portage/util/digraph.py b/lib/portage/util/digraph.py
index 360d22335..0a8307ce4 100644
--- a/lib/portage/util/digraph.py
+++ b/lib/portage/util/digraph.py
@@ -312,7 +312,7 @@ class digraph:
writemsg(s, noiselevel=-1)
for node in self.nodes:
- output("%s " % (node,))
+ output("{} ".format(node))
if self.nodes[node][0]:
output("depends on\n")
else:
@@ -330,7 +330,7 @@ class digraph:
if start not in self:
raise KeyError(start)
- queue, enqueued = deque([(None, start)]), set([start])
+ queue, enqueued = deque([(None, start)]), {start}
while queue:
parent, n = queue.popleft()
yield parent, n
diff --git a/lib/portage/util/env_update.py b/lib/portage/util/env_update.py
index bac5b6e7a..1507dbe6c 100644
--- a/lib/portage/util/env_update.py
+++ b/lib/portage/util/env_update.py
@@ -119,25 +119,23 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
fns = templist
del templist
- space_separated = set(["CONFIG_PROTECT", "CONFIG_PROTECT_MASK"])
- colon_separated = set(
- [
- "ADA_INCLUDE_PATH",
- "ADA_OBJECTS_PATH",
- "CLASSPATH",
- "INFODIR",
- "INFOPATH",
- "KDEDIRS",
- "LDPATH",
- "MANPATH",
- "PATH",
- "PKG_CONFIG_PATH",
- "PRELINK_PATH",
- "PRELINK_PATH_MASK",
- "PYTHONPATH",
- "ROOTPATH",
- ]
- )
+ space_separated = {"CONFIG_PROTECT", "CONFIG_PROTECT_MASK"}
+ colon_separated = {
+ "ADA_INCLUDE_PATH",
+ "ADA_OBJECTS_PATH",
+ "CLASSPATH",
+ "INFODIR",
+ "INFOPATH",
+ "KDEDIRS",
+ "LDPATH",
+ "MANPATH",
+ "PATH",
+ "PKG_CONFIG_PATH",
+ "PRELINK_PATH",
+ "PRELINK_PATH_MASK",
+ "PYTHONPATH",
+ "ROOTPATH",
+ }
config_list = []
@@ -195,9 +193,8 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
ldsoconf_path = os.path.join(eroot, "etc", "ld.so.conf")
try:
- myld = io.open(
+ myld = open(
_unicode_encode(ldsoconf_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
@@ -209,7 +206,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
if x[:1] == "#":
continue
oldld.append(x[:-1])
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
oldld = None
@@ -248,7 +245,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
newprelink.write("# contents of /etc/env.d directory\n")
for x in sorted(potential_lib_dirs) + ["bin", "sbin"]:
- newprelink.write("-l /%s\n" % (x,))
+ newprelink.write("-l /{}\n".format(x))
prelink_paths = set()
prelink_paths |= set(specials.get("LDPATH", []))
prelink_paths |= set(specials.get("PATH", []))
@@ -269,9 +266,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
plmasked = 1
break
if not plmasked:
- newprelink.write("-h %s\n" % (x,))
+ newprelink.write("-h {}\n".format(x))
for x in prelink_path_mask:
- newprelink.write("-b %s\n" % (x,))
+ newprelink.write("-b {}\n".format(x))
newprelink.close()
# Migration code path. If /etc/prelink.conf was generated by us, then
@@ -291,7 +288,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
f = atomic_ofstream(prelink_conf)
f.write("-c /etc/prelink.conf.d/*.conf\n")
f.close()
- except IOError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
@@ -367,7 +364,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
writemsg_level(
_(">>> Regenerating %setc/ld.so.cache...\n") % (target_root,)
)
- os.system("cd / ; %s -X -r '%s'" % (ldconfig, target_root))
+ os.system("cd / ; {} -X -r '{}'".format(ldconfig, target_root))
elif ostype in ("FreeBSD", "DragonFly"):
writemsg_level(
_(">>> Regenerating %svar/run/ld-elf.so.hints...\n") % target_root
@@ -399,9 +396,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
for k in env_keys:
v = env[k]
if v.startswith("$") and not v.startswith("${"):
- outfile.write("export %s=$'%s'\n" % (k, v[1:]))
+ outfile.write("export {}=$'{}'\n".format(k, v[1:]))
else:
- outfile.write("export %s='%s'\n" % (k, v))
+ outfile.write("export {}='{}'\n".format(k, v))
# Create the systemd user environment configuration file
# /etc/environment.d/10-gentoo-env.conf with the
@@ -437,5 +434,5 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
outfile = atomic_ofstream(os.path.join(eroot, "etc", "csh.env"))
outfile.write(cenvnotice)
for x in env_keys:
- outfile.write("setenv %s '%s'\n" % (x, env[x]))
+ outfile.write("setenv {} '{}'\n".format(x, env[x]))
outfile.close()
diff --git a/lib/portage/util/futures/_asyncio/streams.py b/lib/portage/util/futures/_asyncio/streams.py
index 95a4244a6..6b902975c 100644
--- a/lib/portage/util/futures/_asyncio/streams.py
+++ b/lib/portage/util/futures/_asyncio/streams.py
@@ -76,7 +76,7 @@ async def _writer(output_file, content, loop=DeprecationWarning):
while content:
try:
content = content[os.write(fd, content) :]
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EAGAIN:
raise
waiter = loop.create_future()
diff --git a/lib/portage/util/futures/extendedfutures.py b/lib/portage/util/futures/extendedfutures.py
index c23feafb5..d7e3d3736 100644
--- a/lib/portage/util/futures/extendedfutures.py
+++ b/lib/portage/util/futures/extendedfutures.py
@@ -37,7 +37,7 @@ class ExtendedFuture(Future):
set.
"""
self.default_result = default_result
- super(ExtendedFuture, self).__init__()
+ super().__init__()
self.set = self.set_result
def set_result(self, data, ignore_InvalidState=False):
@@ -48,11 +48,11 @@ class ExtendedFuture(Future):
"""
if ignore_InvalidState:
try:
- super(ExtendedFuture, self).set_result(data)
+ super().set_result(data)
except InvalidStateError:
pass
else:
- super(ExtendedFuture, self).set_result(data)
+ super().set_result(data)
def get(self, default=UNSET_CONST.result()):
"""Convienience function to wrap result() but adds an optional
@@ -77,12 +77,12 @@ class ExtendedFuture(Future):
def exception(self):
try:
- return super(ExtendedFuture, self).exception(timeout=0)
+ return super().exception(timeout=0)
except concurrent.futures.TimeoutError:
raise InvalidStateError
def result(self):
try:
- return super(ExtendedFuture, self).result(timeout=0)
+ return super().result(timeout=0)
except concurrent.futures.TimeoutError:
raise InvalidStateError
diff --git a/lib/portage/util/futures/iter_completed.py b/lib/portage/util/futures/iter_completed.py
index f4b4e5e0b..5ee0b48c7 100644
--- a/lib/portage/util/futures/iter_completed.py
+++ b/lib/portage/util/futures/iter_completed.py
@@ -34,8 +34,7 @@ def iter_completed(futures, max_jobs=None, max_load=None, loop=None):
for future_done_set in async_iter_completed(
futures, max_jobs=max_jobs, max_load=max_load, loop=loop
):
- for future in loop.run_until_complete(future_done_set):
- yield future
+ yield from loop.run_until_complete(future_done_set)
def async_iter_completed(futures, max_jobs=None, max_load=None, loop=None):
diff --git a/lib/portage/util/futures/unix_events.py b/lib/portage/util/futures/unix_events.py
index c9855aefb..374497010 100644
--- a/lib/portage/util/futures/unix_events.py
+++ b/lib/portage/util/futures/unix_events.py
@@ -70,11 +70,11 @@ class _AsyncioEventLoopPolicy(_PortageEventLoopPolicy):
def get_event_loop(self):
self._check_recursion()
- return super(_AsyncioEventLoopPolicy, self).get_event_loop()
+ return super().get_event_loop()
def get_child_watcher(self):
self._check_recursion()
- return super(_AsyncioEventLoopPolicy, self).get_child_watcher()
+ return super().get_child_watcher()
DefaultEventLoopPolicy = _AsyncioEventLoopPolicy
diff --git a/lib/portage/util/hooks.py b/lib/portage/util/hooks.py
index 204ad4122..c6367118f 100644
--- a/lib/portage/util/hooks.py
+++ b/lib/portage/util/hooks.py
@@ -46,7 +46,7 @@ def perform_hooks(rel_directory, *argv, prefix="/"):
if retval != portage.os.EX_OK:
writemsg_level(
- " %s Spawn failed for: %s, %s\n" % (bad("*"), name, filepath),
+ " {} Spawn failed for: {}, {}\n".format(bad("*"), name, filepath),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/listdir.py b/lib/portage/util/listdir.py
index e7c436282..5d1765ced 100644
--- a/lib/portage/util/listdir.py
+++ b/lib/portage/util/listdir.py
@@ -29,7 +29,7 @@ def cacheddir(
pathstat = os.stat(mypath)
if not stat.S_ISDIR(pathstat.st_mode):
raise DirectoryNotFound(mypath)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(mypath)
del e
@@ -39,7 +39,7 @@ def cacheddir(
else:
try:
fpaths = os.listdir(mypath)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EACCES:
raise
del e
@@ -60,7 +60,7 @@ def cacheddir(
ftype.append(2)
else:
ftype.append(3)
- except (IOError, OSError):
+ except OSError:
ftype.append(3)
if ignorelist or ignorecvs:
diff --git a/lib/portage/util/locale.py b/lib/portage/util/locale.py
index 8fb6cb6eb..54b1e11a6 100644
--- a/lib/portage/util/locale.py
+++ b/lib/portage/util/locale.py
@@ -1,4 +1,3 @@
-# -*- coding:utf-8 -*-
# Copyright 2015-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
@@ -78,14 +77,14 @@ def _check_locale(silent):
if uc != ruc:
msg.extend(
[
- " %s -> %s" % (chars(lc), chars(ruc)),
+ " {} -> {}".format(chars(lc), chars(ruc)),
" %28s: %s" % ("expected", chars(uc)),
]
)
if lc != rlc:
msg.extend(
[
- " %s -> %s" % (chars(uc), chars(rlc)),
+ " {} -> {}".format(chars(uc), chars(rlc)),
" %28s: %s" % ("expected", chars(lc)),
]
)
diff --git a/lib/portage/util/movefile.py b/lib/portage/util/movefile.py
index 4dc08af26..b3f186eef 100644
--- a/lib/portage/util/movefile.py
+++ b/lib/portage/util/movefile.py
@@ -81,7 +81,7 @@ def _copyxattr(src, dest, exclude=None):
"""Copy the extended attributes from |src| to |dest|"""
try:
attrs = xattr.list(src)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno != OperationNotSupported.errno:
raise
attrs = ()
@@ -97,7 +97,7 @@ def _copyxattr(src, dest, exclude=None):
try:
xattr.set(dest, attr, xattr.get(src, attr))
raise_exception = False
- except (OSError, IOError):
+ except OSError:
raise_exception = True
if raise_exception:
raise OperationNotSupported(
@@ -151,13 +151,13 @@ def movefile(
writemsg(
"!!! %s\n" % _("Stating source file failed... movefile()"), noiselevel=-1
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
return None
destexists = 1
try:
dstat = os.lstat(dest)
- except (OSError, IOError):
+ except OSError:
dstat = os.lstat(os.path.dirname(dest))
destexists = 0
@@ -235,8 +235,8 @@ def movefile(
writemsg(
"!!! %s\n" % _("failed to properly create symlink:"), noiselevel=-1
)
- writemsg("!!! %s -> %s\n" % (dest, target), noiselevel=-1)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {} -> {}\n".format(dest, target), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
return None
hardlinked = False
@@ -247,7 +247,7 @@ def movefile(
if hardlink_candidates:
head, tail = os.path.split(dest)
hardlink_tmp = os.path.join(
- head, ".%s._portage_merge_.%s" % (tail, portage.getpid())
+ head, ".{}._portage_merge_.{}".format(tail, portage.getpid())
)
try:
os.unlink(hardlink_tmp)
@@ -258,7 +258,7 @@ def movefile(
% (hardlink_tmp,),
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
return None
del e
for hardlink_src in hardlink_candidates:
@@ -274,7 +274,7 @@ def movefile(
_("!!! Failed to rename %s to %s\n") % (hardlink_tmp, dest),
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
return None
hardlinked = True
try:
@@ -302,7 +302,7 @@ def movefile(
% {"src": src, "dest": dest},
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
return None
# Invalid cross-device-link 'bind' mounted or actually Cross-Device
if renamefailed:
@@ -332,7 +332,7 @@ def movefile(
)
msg = textwrap.wrap(msg, 65)
for line in msg:
- writemsg("!!! %s\n" % (line,), noiselevel=-1)
+ writemsg("!!! {}\n".format(line), noiselevel=-1)
raise
_rename(dest_tmp_bytes, dest_bytes)
_os.unlink(src_bytes)
@@ -344,7 +344,7 @@ def movefile(
% {"src": src, "dest": dest},
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg("!!! {}\n".format(e), noiselevel=-1)
return None
finally:
if not success:
diff --git a/lib/portage/util/mtimedb.py b/lib/portage/util/mtimedb.py
index 098350526..c18b74e26 100644
--- a/lib/portage/util/mtimedb.py
+++ b/lib/portage/util/mtimedb.py
@@ -67,7 +67,7 @@ class MtimeDB(dict):
try:
f = open(_unicode_encode(filename), "rb")
content = f.read()
- except EnvironmentError as e:
+ except OSError as e:
if getattr(e, "errno", None) in (errno.ENOENT, errno.EACCES):
pass
else:
@@ -131,7 +131,7 @@ class MtimeDB(dict):
d["version"] = str(portage.VERSION)
try:
f = atomic_ofstream(self.filename, mode="wb")
- except EnvironmentError:
+ except OSError:
pass
else:
if self._json_write:
diff --git a/lib/portage/util/netlink.py b/lib/portage/util/netlink.py
index b32010654..508c92676 100644
--- a/lib/portage/util/netlink.py
+++ b/lib/portage/util/netlink.py
@@ -65,7 +65,7 @@ class RtNetlink:
self.addr = (0, 0)
try:
self.sock.bind(self.addr)
- except socket.error:
+ except OSError:
self.sock.close()
raise
diff --git a/lib/portage/util/socks5.py b/lib/portage/util/socks5.py
index 820240571..147e37054 100644
--- a/lib/portage/util/socks5.py
+++ b/lib/portage/util/socks5.py
@@ -95,7 +95,7 @@ class ProxyManager:
try:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.socket_path)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
await asyncio.sleep(0.2)
diff --git a/lib/portage/util/whirlpool.py b/lib/portage/util/whirlpool.py
index 9178d70c7..e478922ef 100644
--- a/lib/portage/util/whirlpool.py
+++ b/lib/portage/util/whirlpool.py
@@ -2180,7 +2180,7 @@ def WhirlpoolInit(ctx):
def WhirlpoolAdd(source, sourceBits, ctx):
if not isinstance(source, bytes):
- raise TypeError("Expected %s, got %s" % (bytes, type(source)))
+ raise TypeError("Expected {}, got {}".format(bytes, type(source)))
if sourceBits == 0:
return
diff --git a/lib/portage/util/writeable_check.py b/lib/portage/util/writeable_check.py
index be73745a1..247e64721 100644
--- a/lib/portage/util/writeable_check.py
+++ b/lib/portage/util/writeable_check.py
@@ -1,4 +1,3 @@
-# -*- coding:utf-8 -*-
# Copyright 2014-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
"""
@@ -45,9 +44,8 @@ def linux_ro_checker(dir_list):
invalids = []
try:
- with io.open(
+ with open(
"/proc/self/mountinfo",
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
@@ -86,7 +84,7 @@ def linux_ro_checker(dir_list):
# If /proc/self/mountinfo can't be read, assume that there are no RO
# filesystems and return.
- except EnvironmentError:
+ except OSError:
writemsg_level(
_("!!! /proc/self/mountinfo cannot be read"),
level=logging.WARNING,
diff --git a/lib/portage/versions.py b/lib/portage/versions.py
index e1f798bdf..c7a247b40 100644
--- a/lib/portage/versions.py
+++ b/lib/portage/versions.py
@@ -520,7 +520,8 @@ def cpv_getkey(mycpv, eapi=None):
return mysplit[0] + "/" + mysplit[1]
warnings.warn(
- "portage.versions.cpv_getkey() " + "called with invalid cpv: '%s'" % (mycpv,),
+ "portage.versions.cpv_getkey() "
+ + "called with invalid cpv: '{}'".format(mycpv),
DeprecationWarning,
stacklevel=2,
)
diff --git a/lib/portage/xml/metadata.py b/lib/portage/xml/metadata.py
index 8eaa37a3f..df3ce8121 100644
--- a/lib/portage/xml/metadata.py
+++ b/lib/portage/xml/metadata.py
@@ -83,7 +83,7 @@ class _Maintainer:
setattr(self, attr.tag, attr.text)
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.email)
+ return "<{} {!r}>".format(self.__class__.__name__, self.email)
class _Useflag:
@@ -113,7 +113,7 @@ class _Useflag:
self.description = re.sub(r"\s+", " ", _desc)
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.name)
+ return "<{} {!r}>".format(self.__class__.__name__, self.name)
class _Upstream:
@@ -145,7 +145,7 @@ class _Upstream:
self.remoteids = self.upstream_remoteids()
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.__dict__)
+ return "<{} {!r}>".format(self.__class__.__name__, self.__dict__)
def upstream_bugtrackers(self):
"""Retrieve upstream bugtracker location from xml node."""
@@ -200,7 +200,7 @@ class MetaDataXML:
except ImportError:
pass
except ExpatError as e:
- raise SyntaxError("%s" % (e,))
+ raise SyntaxError("{}".format(e))
if isinstance(herds, etree.ElementTree):
herds_etree = herds
@@ -219,7 +219,7 @@ class MetaDataXML:
self._upstream = None
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.metadata_xml_path)
+ return "<{} {!r}>".format(self.__class__.__name__, self.metadata_xml_path)
def _get_herd_email(self, herd):
"""Get a herd's email address.
@@ -239,7 +239,7 @@ class MetaDataXML:
),
parser=etree.XMLParser(target=_MetadataTreeBuilder()),
)
- except (ImportError, IOError, SyntaxError):
+ except (ImportError, OSError, SyntaxError):
return None
# Some special herds are not listed in herds.xml
diff --git a/lib/portage/xpak.py b/lib/portage/xpak.py
index b586c0be8..e2bb862fe 100644
--- a/lib/portage/xpak.py
+++ b/lib/portage/xpak.py
@@ -342,7 +342,7 @@ class tbz2:
the directory provided. Raises IOError if scan() fails.
Returns result of upackinfo()."""
if not self.scan():
- raise IOError
+ raise OSError
if cleanup:
self.cleanup(datadir)
if not os.path.exists(datadir):
@@ -390,7 +390,7 @@ class tbz2:
"ab+",
)
if not myfile:
- raise IOError
+ raise OSError
myfile.seek(-self.xpaksize, 2) # 0,2 or -0,2 just mean EOF.
myfile.truncate()
myfile.write(xpdata + encodeint(len(xpdata)) + b"STOP")
diff --git a/runtests b/runtests
index b4e6fc2af..503c9b713 100755
--- a/runtests
+++ b/runtests
@@ -138,7 +138,9 @@ def main(argv):
cmd = [prog, "-b", "-Wd", "lib/portage/tests/runTests.py"] + args
if os.access(prog, os.X_OK):
print(
- "%sTesting with Python %s...%s" % (colors.GOOD, ver, colors.NORMAL)
+ "{}Testing with Python {}...{}".format(
+ colors.GOOD, ver, colors.NORMAL
+ )
)
statuses.append((ver, subprocess.call(cmd)))
elif not ignore_missing:
@@ -148,7 +150,7 @@ def main(argv):
)
statuses.append((ver, 1))
else:
- print("%sSkip Python %s...%s" % (colors.WARN, ver, colors.NORMAL))
+ print("{}Skip Python {}...{}".format(colors.WARN, ver, colors.NORMAL))
print()
finally:
if tempdir is not None:
@@ -162,7 +164,7 @@ def main(argv):
print("\nSummary:\n")
width = 10
header = "| %-*s | %s" % (width, "Version", "Status")
- print("%s\n|%s" % (header, "-" * (len(header) - 1)))
+ print("{}\n|{}".format(header, "-" * (len(header) - 1)))
exit_status = 0
for ver, status in statuses:
exit_status += status
diff --git a/setup.py b/setup.py
index 58823ab3b..07f72fc68 100755
--- a/setup.py
+++ b/setup.py
@@ -47,7 +47,7 @@ autodetect_pip = os.path.basename(os.environ.get("_", "")) == "pip" or os.path.b
).startswith("pip-")
venv_prefix = "" if sys.prefix == sys.base_prefix else sys.prefix
create_entry_points = bool(autodetect_pip or venv_prefix)
-with open(os.path.join(os.path.dirname(__file__), "README.md"), "rt") as f:
+with open(os.path.join(os.path.dirname(__file__), "README.md")) as f:
long_description = f.read()
# TODO:
@@ -129,7 +129,7 @@ class build_man(Command):
if not newer(source, target) and not newer(__file__, target):
continue
- print("copying and updating %s -> %s" % (source, target))
+ print("copying and updating {} -> {}".format(source, target))
with codecs.open(source, "r", "utf8") as f:
data = f.readlines()
@@ -679,7 +679,7 @@ class build_tests(x_build_scripts_custom):
)
os.unlink(conf_dir)
conf_src = os.path.relpath("cnf", self.top_dir)
- print("Symlinking %s -> %s" % (conf_dir, conf_src))
+ print("Symlinking {} -> {}".format(conf_dir, conf_src))
os.symlink(conf_src, conf_dir)
source_path = os.path.realpath(__file__)