mirror of
https://git.yoctoproject.org/git/poky
synced 2026-01-01 13:58:04 +00:00
bitbake: lib: amend code to use proper singleton comparisons where possible
amend the code to handle singleton comparisons properly so it only checks if they only refer to the same object or not, and not bother comparing the values. (Bitbake rev: b809a6812aa15a8a9af97bc382cc4b19571e6bfc) Signed-off-by: Frazer Clews <frazer.clews@codethink.co.uk> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
0ac5174c7d
commit
fa5524890e
|
|
@ -65,7 +65,7 @@ class Command:
|
|||
# Can run synchronous commands straight away
|
||||
command_method = getattr(self.cmds_sync, command)
|
||||
if ro_only:
|
||||
if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
|
||||
if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
|
||||
return None, "Not able to execute not readonly commands in readonly mode"
|
||||
try:
|
||||
self.cooker.process_inotify_updates()
|
||||
|
|
|
|||
|
|
@ -1204,7 +1204,7 @@ class BBCooker:
|
|||
for c in collection_list:
|
||||
calc_layer_priority(c)
|
||||
regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
|
||||
if regex == None:
|
||||
if regex is None:
|
||||
parselog.error("BBFILE_PATTERN_%s not defined" % c)
|
||||
errors = True
|
||||
continue
|
||||
|
|
@ -1310,7 +1310,7 @@ class BBCooker:
|
|||
self.parseConfiguration()
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
if task is None:
|
||||
task = self.configuration.cmd
|
||||
if not task.startswith("do_"):
|
||||
task = "do_%s" % task
|
||||
|
|
@ -1454,7 +1454,7 @@ class BBCooker:
|
|||
self.buildSetVars()
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
if task is None:
|
||||
task = self.configuration.cmd
|
||||
|
||||
if not task.startswith("do_"):
|
||||
|
|
@ -1687,7 +1687,7 @@ class CookerCollectFiles(object):
|
|||
def calc_bbfile_priority( self, filename, matched = None ):
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
if regex.match(filename):
|
||||
if matched != None:
|
||||
if matched is not None:
|
||||
if not regex in matched:
|
||||
matched.add(regex)
|
||||
return pri
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ def expand(s, d, varname = None):
|
|||
return d.expand(s, varname)
|
||||
|
||||
def expandKeys(alterdata, readdata = None):
|
||||
if readdata == None:
|
||||
if readdata is None:
|
||||
readdata = alterdata
|
||||
|
||||
todolist = {}
|
||||
|
|
|
|||
|
|
@ -346,7 +346,7 @@ def set_UIHmask(handlerNum, level, debug_domains, mask):
|
|||
|
||||
def getName(e):
|
||||
"""Returns the name of a class or class instance"""
|
||||
if getattr(e, "__name__", None) == None:
|
||||
if getattr(e, "__name__", None) is None:
|
||||
return e.__class__.__name__
|
||||
else:
|
||||
return e.__name__
|
||||
|
|
|
|||
|
|
@ -1081,7 +1081,7 @@ def try_mirrors(fetch, d, origud, mirrors, check = False):
|
|||
|
||||
for index, uri in enumerate(uris):
|
||||
ret = try_mirror_url(fetch, origud, uds[index], ld, check)
|
||||
if ret != False:
|
||||
if ret:
|
||||
return ret
|
||||
return None
|
||||
|
||||
|
|
@ -1351,7 +1351,7 @@ class FetchMethod(object):
|
|||
"""
|
||||
|
||||
# We cannot compute checksums for directories
|
||||
if os.path.isdir(urldata.localpath) == True:
|
||||
if os.path.isdir(urldata.localpath):
|
||||
return False
|
||||
if urldata.localpath.find("*") != -1:
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -671,7 +671,7 @@ class Git(FetchMethod):
|
|||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(tag_head)
|
||||
if tag == None:
|
||||
if tag is None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class Osc(FetchMethod):
|
|||
else:
|
||||
pv = d.getVar("PV", False)
|
||||
rev = bb.fetch2.srcrev_internal_helper(ud, d)
|
||||
if rev and rev != True:
|
||||
if rev:
|
||||
ud.revision = rev
|
||||
else:
|
||||
ud.revision = ""
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ class Perforce(FetchMethod):
|
|||
if command == 'changes':
|
||||
p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
|
||||
elif command == 'print':
|
||||
if depot_filename != None:
|
||||
if depot_filename is not None:
|
||||
p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
|
||||
else:
|
||||
raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class SSH(FetchMethod):
|
|||
'''Class to fetch a module or modules via Secure Shell'''
|
||||
|
||||
def supports(self, urldata, d):
|
||||
return __pattern__.match(urldata.url) != None
|
||||
return __pattern__.match(urldata.url) is not None
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ class DataNode(AstNode):
|
|||
self.groupd = groupd
|
||||
|
||||
def getFunc(self, key, data):
|
||||
if 'flag' in self.groupd and self.groupd['flag'] != None:
|
||||
if 'flag' in self.groupd and self.groupd['flag'] is not None:
|
||||
return data.getVarFlag(key, self.groupd['flag'], expand=False, noweakdefault=True)
|
||||
else:
|
||||
return data.getVar(key, False, noweakdefault=True, parsing=True)
|
||||
|
|
@ -102,36 +102,36 @@ class DataNode(AstNode):
|
|||
'file': self.filename,
|
||||
'line': self.lineno,
|
||||
}
|
||||
if "exp" in groupd and groupd["exp"] != None:
|
||||
if "exp" in groupd and groupd["exp"] is not None:
|
||||
data.setVarFlag(key, "export", 1, op = 'exported', **loginfo)
|
||||
|
||||
op = "set"
|
||||
if "ques" in groupd and groupd["ques"] != None:
|
||||
if "ques" in groupd and groupd["ques"] is not None:
|
||||
val = self.getFunc(key, data)
|
||||
op = "set?"
|
||||
if val == None:
|
||||
if val is None:
|
||||
val = groupd["value"]
|
||||
elif "colon" in groupd and groupd["colon"] != None:
|
||||
elif "colon" in groupd and groupd["colon"] is not None:
|
||||
e = data.createCopy()
|
||||
op = "immediate"
|
||||
val = e.expand(groupd["value"], key + "[:=]")
|
||||
elif "append" in groupd and groupd["append"] != None:
|
||||
elif "append" in groupd and groupd["append"] is not None:
|
||||
op = "append"
|
||||
val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
|
||||
elif "prepend" in groupd and groupd["prepend"] != None:
|
||||
elif "prepend" in groupd and groupd["prepend"] is not None:
|
||||
op = "prepend"
|
||||
val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or ""))
|
||||
elif "postdot" in groupd and groupd["postdot"] != None:
|
||||
elif "postdot" in groupd and groupd["postdot"] is not None:
|
||||
op = "postdot"
|
||||
val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"])
|
||||
elif "predot" in groupd and groupd["predot"] != None:
|
||||
elif "predot" in groupd and groupd["predot"] is not None:
|
||||
op = "predot"
|
||||
val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or ""))
|
||||
else:
|
||||
val = groupd["value"]
|
||||
|
||||
flag = None
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
if 'flag' in groupd and groupd['flag'] is not None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
|
|
|
|||
|
|
@ -92,11 +92,11 @@ def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
|
|||
Check if the version pe,pv,pr is the preferred one.
|
||||
If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
|
||||
"""
|
||||
if (pr == preferred_r or preferred_r == None):
|
||||
if (pe == preferred_e or preferred_e == None):
|
||||
if pr == preferred_r or preferred_r is None:
|
||||
if pe == preferred_e or preferred_e is None:
|
||||
if preferred_v == pv:
|
||||
return True
|
||||
if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
|
||||
if preferred_v is not None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -362,7 +362,7 @@ class TaskData:
|
|||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if len(eligible) > 1 and foundUnique == False:
|
||||
if len(eligible) > 1 and not foundUnique:
|
||||
if item not in self.consider_msgs_cache:
|
||||
providers_list = []
|
||||
for fn in eligible:
|
||||
|
|
|
|||
|
|
@ -935,7 +935,7 @@ class BuildInfoHelper(object):
|
|||
|
||||
# only reset the build name if the one on the server is actually
|
||||
# a valid value for the build_name field
|
||||
if build_name != None:
|
||||
if build_name is not None:
|
||||
build_info['build_name'] = build_name
|
||||
changed = True
|
||||
|
||||
|
|
@ -1194,7 +1194,7 @@ class BuildInfoHelper(object):
|
|||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
|
||||
for t in self.internal_state['targets']:
|
||||
if t.is_image == True:
|
||||
if t.is_image:
|
||||
output_files = list(evdata.keys())
|
||||
for output in output_files:
|
||||
if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
|
||||
|
|
@ -1236,7 +1236,7 @@ class BuildInfoHelper(object):
|
|||
task_information['outcome'] = Task.OUTCOME_PREBUILT
|
||||
else:
|
||||
task_information['task_executed'] = True
|
||||
if 'noexec' in vars(event) and event.noexec == True:
|
||||
if 'noexec' in vars(event) and event.noexec:
|
||||
task_information['task_executed'] = False
|
||||
task_information['outcome'] = Task.OUTCOME_EMPTY
|
||||
task_information['script_type'] = Task.CODING_NA
|
||||
|
|
@ -1776,7 +1776,7 @@ class BuildInfoHelper(object):
|
|||
image_file_extensions_unique = {}
|
||||
image_fstypes = self.server.runCommand(
|
||||
['getVariable', 'IMAGE_FSTYPES'])[0]
|
||||
if image_fstypes != None:
|
||||
if image_fstypes is not None:
|
||||
image_types_str = image_fstypes.strip()
|
||||
image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
|
||||
image_file_extensions_unique = set(image_file_extensions.split(' '))
|
||||
|
|
|
|||
|
|
@ -447,7 +447,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
|||
if error:
|
||||
logger.error("Command '%s' failed: %s" % (cmdline, error))
|
||||
return 1
|
||||
elif ret != True:
|
||||
elif not ret:
|
||||
logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
|
||||
return 1
|
||||
|
||||
|
|
|
|||
|
|
@ -238,7 +238,7 @@ class NCursesUI:
|
|||
if error:
|
||||
print("Error running command '%s': %s" % (cmdline, error))
|
||||
return
|
||||
elif ret != True:
|
||||
elif not ret:
|
||||
print("Couldn't get default commandlind! %s" % ret)
|
||||
return
|
||||
except xmlrpc.client.Fault as x:
|
||||
|
|
|
|||
|
|
@ -200,7 +200,7 @@ def main(server, eventHandler, params):
|
|||
if error:
|
||||
print("Error running command '%s': %s" % (cmdline, error))
|
||||
return 1
|
||||
elif ret != True:
|
||||
elif not ret:
|
||||
print("Error running command '%s': returned %s" % (cmdline, ret))
|
||||
return 1
|
||||
except client.Fault as x:
|
||||
|
|
|
|||
|
|
@ -176,7 +176,7 @@ def main(server, eventHandler, params):
|
|||
if error:
|
||||
logger.error("Command '%s' failed: %s" % (cmdline, error))
|
||||
return 1
|
||||
elif ret != True:
|
||||
elif not ret:
|
||||
logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
|
||||
return 1
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class BBUIEventQueue:
|
|||
self.EventHandle = ret
|
||||
error = ""
|
||||
|
||||
if self.EventHandle != None:
|
||||
if self.EventHandle is not None:
|
||||
break
|
||||
|
||||
errmsg = "Could not register UI event handler. Error: %s, host %s, "\
|
||||
|
|
|
|||
|
|
@ -427,7 +427,7 @@ class BeautifulSoup(Tag):
|
|||
if self.is_xml:
|
||||
# Print the XML declaration
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
if eventual_encoding is not None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = '<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -321,7 +321,7 @@ class Element(treebuildersbase.Node):
|
|||
return self.element.contents
|
||||
|
||||
def getNameTuple(self):
|
||||
if self.namespace == None:
|
||||
if self.namespace is None:
|
||||
return namespaces["html"], self.name
|
||||
else:
|
||||
return self.namespace, self.name
|
||||
|
|
|
|||
|
|
@ -488,7 +488,7 @@ class LRParser:
|
|||
# --! DEBUG
|
||||
return result
|
||||
|
||||
if t == None:
|
||||
if t is None:
|
||||
|
||||
# --! DEBUG
|
||||
debug.error('Error : %s',
|
||||
|
|
@ -766,7 +766,7 @@ class LRParser:
|
|||
n = symstack[-1]
|
||||
return getattr(n,"value",None)
|
||||
|
||||
if t == None:
|
||||
if t is None:
|
||||
|
||||
# We have some kind of parsing error here. To handle
|
||||
# this, we are going to push the current token onto
|
||||
|
|
@ -1021,7 +1021,7 @@ class LRParser:
|
|||
n = symstack[-1]
|
||||
return getattr(n,"value",None)
|
||||
|
||||
if t == None:
|
||||
if t is None:
|
||||
|
||||
# We have some kind of parsing error here. To handle
|
||||
# this, we are going to push the current token onto
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class PRTable(object):
|
|||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
return row[0]
|
||||
else:
|
||||
#no value found, try to insert
|
||||
|
|
@ -87,7 +87,7 @@ class PRTable(object):
|
|||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
return row[0]
|
||||
else:
|
||||
raise prserv.NotFoundError
|
||||
|
|
@ -99,7 +99,7 @@ class PRTable(object):
|
|||
% (self.table, self.table),
|
||||
(version, pkgarch, checksum, version, pkgarch))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
return row[0]
|
||||
else:
|
||||
#no value found, try to insert
|
||||
|
|
@ -116,7 +116,7 @@ class PRTable(object):
|
|||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
return row[0]
|
||||
else:
|
||||
raise prserv.NotFoundError
|
||||
|
|
@ -132,7 +132,7 @@ class PRTable(object):
|
|||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row = data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
val=row[0]
|
||||
else:
|
||||
#no value found, try to insert
|
||||
|
|
@ -147,7 +147,7 @@ class PRTable(object):
|
|||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row = data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
val = row[0]
|
||||
return val
|
||||
|
||||
|
|
@ -170,7 +170,7 @@ class PRTable(object):
|
|||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
|
||||
(version,pkgarch,checksum,value))
|
||||
row=data.fetchone()
|
||||
if row != None:
|
||||
if row is not None:
|
||||
return row[0]
|
||||
else:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1274,7 +1274,7 @@ class Notifier:
|
|||
basename = os.path.basename(sys.argv[0]) or 'pyinotify'
|
||||
pid_file = os.path.join(dirname, basename + '.pid')
|
||||
|
||||
if pid_file != False and os.path.lexists(pid_file):
|
||||
if pid_file and os.path.lexists(pid_file):
|
||||
err = 'Cannot daemonize: pid file %s already exists.' % pid_file
|
||||
raise NotifierError(err)
|
||||
|
||||
|
|
@ -1308,7 +1308,7 @@ class Notifier:
|
|||
fork_daemon()
|
||||
|
||||
# Write pid
|
||||
if pid_file != False:
|
||||
if pid_file:
|
||||
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
|
||||
fd_pid = os.open(pid_file, flags, 0o0600)
|
||||
os.write(fd_pid, bytes(str(os.getpid()) + '\n',
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ class Command(BaseCommand):
|
|||
template_conf = os.environ.get("TEMPLATECONF", "")
|
||||
custom_xml_only = os.environ.get("CUSTOM_XML_ONLY")
|
||||
|
||||
if ToasterSetting.objects.filter(name='CUSTOM_XML_ONLY').count() > 0 or (not custom_xml_only == None):
|
||||
if ToasterSetting.objects.filter(name='CUSTOM_XML_ONLY').count() > 0 or custom_xml_only is not None:
|
||||
# only use the custom settings
|
||||
pass
|
||||
elif "poky" in template_conf:
|
||||
|
|
|
|||
|
|
@ -1647,14 +1647,14 @@ class CustomImageRecipe(Recipe):
|
|||
"""
|
||||
# Check if we're aldready up-to-date or not
|
||||
target = self.get_last_successful_built_target()
|
||||
if target == None:
|
||||
if target is None:
|
||||
# So we've never actually built this Custom recipe but what about
|
||||
# the recipe it's based on?
|
||||
target = \
|
||||
Target.objects.filter(Q(build__outcome=Build.SUCCEEDED) &
|
||||
Q(build__project=self.project) &
|
||||
Q(target=self.base_recipe.name)).last()
|
||||
if target == None:
|
||||
if target is None:
|
||||
return
|
||||
|
||||
if target.build.completed_on == self.last_updated:
|
||||
|
|
|
|||
|
|
@ -212,7 +212,7 @@ def filtered_installedsize(size, installed_size):
|
|||
"""If package.installed_size not null and not empty return it,
|
||||
else return package.size
|
||||
"""
|
||||
return size if (installed_size == 0) or (installed_size == "") or (installed_size == None) else installed_size
|
||||
return size if (installed_size == 0) or (installed_size == "") or (installed_size is None) else installed_size
|
||||
|
||||
@register.filter
|
||||
def filtered_packageversion(version, revision):
|
||||
|
|
@ -228,7 +228,7 @@ def filter_sizeovertotal(package_object, total_size):
|
|||
formatted nicely.
|
||||
"""
|
||||
size = package_object.installed_size
|
||||
if size == None or size == '':
|
||||
if size is None or size == '':
|
||||
size = package_object.size
|
||||
|
||||
return '{:.1%}'.format(float(size)/float(total_size))
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ class MimeTypeFinder(object):
|
|||
def get_mimetype(self, path):
|
||||
guess = mimetypes.guess_type(path, self._strict)
|
||||
guessed_type = guess[0]
|
||||
if guessed_type == None:
|
||||
if guessed_type is None:
|
||||
guessed_type = 'application/octet-stream'
|
||||
return guessed_type
|
||||
|
||||
|
|
@ -126,7 +126,7 @@ def _lv_to_dict(prj, x = None):
|
|||
return {"id": x.pk,
|
||||
"name": x.layer.name,
|
||||
"tooltip": "%s | %s" % (x.layer.vcs_url,x.get_vcs_reference()),
|
||||
"detail": "(%s" % x.layer.vcs_url + (")" if x.release == None else " | "+x.get_vcs_reference()+")"),
|
||||
"detail": "(%s" % x.layer.vcs_url + (")" if x.release is None else " | "+x.get_vcs_reference()+")"),
|
||||
"giturl": x.layer.vcs_url,
|
||||
"layerdetailurl" : reverse('layerdetails', args=(prj.id,x.pk)),
|
||||
"revision" : x.get_vcs_reference(),
|
||||
|
|
@ -718,7 +718,7 @@ def _get_dir_entries(build_id, target_id, start):
|
|||
resolved_id = o.sym_target_id
|
||||
resolved_path = o.path
|
||||
if target_packages.count():
|
||||
while resolved_id != "" and resolved_id != None:
|
||||
while resolved_id != "" and resolved_id is not None:
|
||||
tf = Target_File.objects.get(pk=resolved_id)
|
||||
resolved_path = tf.path
|
||||
resolved_id = tf.sym_target_id
|
||||
|
|
@ -730,10 +730,10 @@ def _get_dir_entries(build_id, target_id, start):
|
|||
entry['package_id'] = str(p.id)
|
||||
entry['package'] = p.name
|
||||
# don't use resolved path from above, show immediate link-to
|
||||
if o.sym_target_id != "" and o.sym_target_id != None:
|
||||
if o.sym_target_id != "" and o.sym_target_id is not None:
|
||||
entry['link_to'] = Target_File.objects.get(pk=o.sym_target_id).path
|
||||
entry['size'] = filtered_filesizeformat(o.size)
|
||||
if entry['link_to'] != None:
|
||||
if entry['link_to'] is not None:
|
||||
entry['permission'] = node_str[o.inodetype] + o.permission
|
||||
else:
|
||||
entry['permission'] = node_str[o.inodetype] + o.permission
|
||||
|
|
@ -755,7 +755,7 @@ def dirinfo(request, build_id, target_id, file_path=None):
|
|||
objects = _get_dir_entries(build_id, target_id, '/')
|
||||
packages_sum = Package.objects.filter(id__in=Target_Installed_Package.objects.filter(target_id=target_id).values('package_id')).aggregate(Sum('installed_size'))
|
||||
dir_list = None
|
||||
if file_path != None:
|
||||
if file_path is not None:
|
||||
"""
|
||||
Link from the included package detail file list page and is
|
||||
requesting opening the dir info to a specific file path.
|
||||
|
|
@ -1029,15 +1029,15 @@ def _get_package_dependency_count(package, target_id, is_installed):
|
|||
|
||||
def _get_package_alias(package):
|
||||
alias = package.installed_name
|
||||
if alias != None and alias != '' and alias != package.name:
|
||||
if alias is not None and alias != '' and alias != package.name:
|
||||
return alias
|
||||
else:
|
||||
return ''
|
||||
|
||||
def _get_fullpackagespec(package):
|
||||
r = package.name
|
||||
version_good = package.version != None and package.version != ''
|
||||
revision_good = package.revision != None and package.revision != ''
|
||||
version_good = package.version is not None and package.version != ''
|
||||
revision_good = package.revision is not None and package.revision != ''
|
||||
if version_good or revision_good:
|
||||
r += '_'
|
||||
if version_good:
|
||||
|
|
|
|||
|
|
@ -505,7 +505,7 @@ class Command(BaseCommand):
|
|||
default_release = Release.objects.get(id=1)
|
||||
|
||||
# SANITY: if 'reconfig' but project does not exist (deleted externally), switch to 'import'
|
||||
if ("reconfigure" == options['command']) and (None == project):
|
||||
if ("reconfigure" == options['command']) and project is None:
|
||||
options['command'] = 'import'
|
||||
|
||||
# 'Configure':
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user