mirror of
https://git.yoctoproject.org/git/poky
synced 2026-01-04 16:10:04 +00:00
Revert "meta: replace os.popen with subprocess.Popen"
This reverts commit e83d8e58a6b107eea87df0ec233a1bc932b2c6e as the conversion
is not correct. Its replacing readlines() calls which generate an array with
what are effectively strings. There are split("\n") calls missing in many
cases so this needs to be reverted until it gets fixed.
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
bc386b8934
commit
0d9e893711
|
|
@ -60,14 +60,10 @@ python debian_package_name_hook () {
|
|||
for f in files:
|
||||
if so_re.match(f):
|
||||
fp = os.path.join(root, f)
|
||||
cmd = (d.getVar('BUILD_PREFIX', True) or "") + "objdump -p " + fp
|
||||
try:
|
||||
lines = ""
|
||||
lines = bb.process.run(cmd)[0]
|
||||
# Some ".so" maybe ascii text, e.g: /usr/lib64/libpthread.so,
|
||||
# ingore those errors.
|
||||
except Exception:
|
||||
sys.exc_clear()
|
||||
cmd = (d.getVar('BUILD_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
|
||||
fd = os.popen(cmd)
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
m = re.match("\s+SONAME\s+([^\s]*)", l)
|
||||
if m and not m.group(1) in sonames:
|
||||
|
|
|
|||
|
|
@ -564,10 +564,10 @@ python do_checkpkg() {
|
|||
gitproto = parm['protocol']
|
||||
else:
|
||||
gitproto = "git"
|
||||
gitcmd = "git ls-remote %s://%s%s%s *tag*" % (gitproto, gituser, host, path)
|
||||
gitcmd2 = "git ls-remote %s://%s%s%s HEAD" % (gitproto, gituser, host, path)
|
||||
tmp = bb.process.run(gitcmd)[0]
|
||||
tmp2 = bb.process.run(gitcmd2)[0]
|
||||
gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path)
|
||||
gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
|
||||
tmp = os.popen(gitcmd).read()
|
||||
tmp2 = os.popen(gitcmd2).read()
|
||||
#This is for those repo have tag like: refs/tags/1.2.2
|
||||
if tmp:
|
||||
tmpline = tmp.split("\n")
|
||||
|
|
@ -613,9 +613,9 @@ python do_checkpkg() {
|
|||
if 'rev' in parm:
|
||||
pcurver = parm['rev']
|
||||
|
||||
svncmd = "svn info %s %s://%s%s/%s/" % (" ".join(options), svnproto, host, path, parm["module"])
|
||||
svncmd = "svn info %s %s://%s%s/%s/ 2>&1" % (" ".join(options), svnproto, host, path, parm["module"])
|
||||
print svncmd
|
||||
svninfo = bb.process.run(svncmd)[0]
|
||||
svninfo = os.popen(svncmd).read()
|
||||
for line in svninfo.split("\n"):
|
||||
if re.search("^Last Changed Rev:", line):
|
||||
pupver = line.split(" ")[-1]
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ def create_path(compilers, bb, d):
|
|||
staging += "-kernel"
|
||||
|
||||
#check if the icecc path is set by the user
|
||||
icecc = d.getVar('ICECC_PATH') or bb.process.run("which icecc")[0][:-1]
|
||||
icecc = d.getVar('ICECC_PATH') or os.popen("which icecc").read()[:-1]
|
||||
|
||||
# Create the dir if necessary
|
||||
try:
|
||||
|
|
@ -151,9 +151,9 @@ def icc_path(bb,d):
|
|||
|
||||
def icc_get_tool(bb, d, tool):
|
||||
if icc_is_native(bb, d):
|
||||
return bb.process.run("which %s" % tool)[0][:-1]
|
||||
return os.popen("which %s" % tool).read()[:-1]
|
||||
elif icc_is_kernel(bb, d):
|
||||
return bb.process.run("which %s" % get_cross_kernel_cc(bb, d))[0][:-1]
|
||||
return os.popen("which %s" % get_cross_kernel_cc(bb, d)).read()[:-1]
|
||||
else:
|
||||
ice_dir = d.expand('${STAGING_BINDIR_TOOLCHAIN}')
|
||||
target_sys = d.expand('${TARGET_SYS}')
|
||||
|
|
|
|||
|
|
@ -154,29 +154,14 @@ def package_qa_check_rpath(file,name, d, elf, messages):
|
|||
if not bad_dirs[0] in d.getVar('WORKDIR', True):
|
||||
bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check")
|
||||
|
||||
output, errors = bb.process.run("%s -B -F%%r#F '%s'" % (scanelf,file))
|
||||
txt = output.split()
|
||||
output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file))
|
||||
txt = output.readline().split()
|
||||
for line in txt:
|
||||
for dir in bad_dirs:
|
||||
if dir in line:
|
||||
messages.append("package %s contains bad RPATH %s in file %s" % (name, line, file))
|
||||
|
||||
QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths"
|
||||
|
||||
def package_qa_get_objdump(d, path):
|
||||
"""
|
||||
Get the result of objdump, ignore the errors since not all files can be objdumped
|
||||
"""
|
||||
env_path = d.getVar('PATH', True)
|
||||
objdump = d.getVar('OBJDUMP', True)
|
||||
|
||||
try:
|
||||
lines = ""
|
||||
lines = bb.process.run("LC_ALL=C PATH=%s %s -p '%s'" % (env_path, objdump, path))[0]
|
||||
except Exception:
|
||||
sys.exc_clear()
|
||||
return lines
|
||||
|
||||
def package_qa_check_useless_rpaths(file, name, d, elf, messages):
|
||||
"""
|
||||
Check for RPATHs that are useless but not dangerous
|
||||
|
|
@ -184,12 +169,15 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
|
|||
if not elf:
|
||||
return
|
||||
|
||||
objdump = d.getVar('OBJDUMP', True)
|
||||
env_path = d.getVar('PATH', True)
|
||||
|
||||
libdir = d.getVar("libdir", True)
|
||||
base_libdir = d.getVar("base_libdir", True)
|
||||
|
||||
import re
|
||||
rpath_re = re.compile("\s+RPATH\s+(.*)")
|
||||
for line in package_qa_get_objdump(d, file):
|
||||
for line in os.popen("LC_ALL=C PATH=%s %s -p '%s' 2> /dev/null" % (env_path, objdump, file), "r"):
|
||||
m = rpath_re.match(line)
|
||||
if m:
|
||||
rpath = m.group(1)
|
||||
|
|
@ -381,7 +369,7 @@ def package_qa_check_desktop(path, name, d, elf, messages):
|
|||
"""
|
||||
if path.endswith(".desktop"):
|
||||
desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate')
|
||||
output, errors = bb.process.run("%s %s" % (desktop_file_validate, path))
|
||||
output = os.popen("%s %s" % (desktop_file_validate, path))
|
||||
# This only produces output on errors
|
||||
for l in output:
|
||||
messages.append("Desktop file issue: " + l.strip())
|
||||
|
|
@ -404,11 +392,14 @@ def package_qa_hash_style(path, name, d, elf, messages):
|
|||
if not gnu_hash:
|
||||
return
|
||||
|
||||
objdump = d.getVar('OBJDUMP', True)
|
||||
env_path = d.getVar('PATH', True)
|
||||
|
||||
sane = False
|
||||
has_syms = False
|
||||
|
||||
# If this binary has symbols, we expect it to have GNU_HASH too.
|
||||
for line in package_qa_get_objdump(d, path):
|
||||
for line in os.popen("LC_ALL=C PATH=%s %s -p '%s' 2> /dev/null" % (env_path, objdump, path), "r"):
|
||||
if "SYMTAB" in line:
|
||||
has_syms = True
|
||||
if "GNU_HASH" in line:
|
||||
|
|
|
|||
|
|
@ -349,7 +349,7 @@ python populate_packages_prepend () {
|
|||
path = d.getVar("PATH", True)
|
||||
|
||||
cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped)
|
||||
f = bb.process.Popen(cmd, shell=True).stdout
|
||||
f = os.popen(cmd, 'r')
|
||||
|
||||
deps = {}
|
||||
pattern0 = "^(.*\.k?o):..*$"
|
||||
|
|
|
|||
|
|
@ -60,16 +60,18 @@ def base_get_metadata_svn_revision(path, d):
|
|||
return revision
|
||||
|
||||
def base_get_metadata_git_branch(path, d):
|
||||
branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
|
||||
branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
|
||||
|
||||
if len(branch) != 0:
|
||||
return branch
|
||||
return "<unknown>"
|
||||
|
||||
def base_get_metadata_git_revision(path, d):
|
||||
rev = bb.process.run("cd %s; git log -n 1 --pretty=oneline" % path)[0]
|
||||
if len(rev) != 0:
|
||||
rev = rev.split(" ")[0]
|
||||
return rev
|
||||
f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
|
||||
data = f.read()
|
||||
if f.close() is None:
|
||||
rev = data.split(" ")[0]
|
||||
if len(rev) != 0:
|
||||
return rev
|
||||
return "<unknown>"
|
||||
|
||||
|
|
|
|||
|
|
@ -1061,7 +1061,7 @@ python emit_pkgdata() {
|
|||
|
||||
def get_directory_size(dir):
|
||||
if os.listdir(dir):
|
||||
size = int(bb.process.run('du -sk %s' % dir)[0].split('\t')[0])
|
||||
size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0])
|
||||
else:
|
||||
size = 0
|
||||
return size
|
||||
|
|
@ -1221,7 +1221,7 @@ python package_do_filedeps() {
|
|||
rpfiles.append(os.path.join(root, file))
|
||||
|
||||
for files in chunks(rpfiles, 100):
|
||||
dep_pipe = bb.process.Popen(rpmdeps + " " + " ".join(files), shell=True).stdout
|
||||
dep_pipe = os.popen(rpmdeps + " " + " ".join(files))
|
||||
|
||||
process_deps(dep_pipe, pkg, provides_files, requires_files)
|
||||
|
||||
|
|
@ -1263,15 +1263,11 @@ python package_do_shlibs() {
|
|||
|
||||
def linux_so(root, path, file):
|
||||
needs_ldconfig = False
|
||||
cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file))
|
||||
cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null"
|
||||
cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd)
|
||||
try:
|
||||
lines = ""
|
||||
lines = bb.process.run(cmd)[0]
|
||||
# Some ".so" maybe ascii text, e.g: /usr/lib64/libpthread.so,
|
||||
# ingore those errors.
|
||||
except Exception:
|
||||
sys.exc_clear()
|
||||
fd = os.popen(cmd)
|
||||
lines = fd.readlines()
|
||||
fd.close()
|
||||
for l in lines:
|
||||
m = re.match("\s+NEEDED\s+([^\s]*)", l)
|
||||
if m:
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user