Merge "dict_object.keys() is not required for *in* operator"

This commit is contained in:
Zuul 2019-01-07 10:27:39 +00:00 committed by Gerrit Code Review
commit 6bb3f9d9ea
8 changed files with 15 additions and 15 deletions

View File

@ -257,7 +257,7 @@ class FakeGerritChange(object):
'type': 'ADDED'},
{'file': 'README',
'type': 'MODIFIED'}]
for f in files.keys():
for f in files:
ps_files.append({'file': f, 'type': 'ADDED'})
d = {'approvals': [],
'createdOn': time.time(),
@ -416,7 +416,7 @@ class FakeGerritChange(object):
def getSubmitRecords(self):
status = {}
for cat in self.categories.keys():
for cat in self.categories:
status[cat] = 0
for a in self.patchsets[-1]['approvals']:
@ -695,7 +695,7 @@ class FakeGerritConnection(gerritconnection.GerritConnection):
# happens they can add their own verified event into the queue.
# Nevertheless, we can update change with the new review in gerrit.
for cat in action.keys():
for cat in action:
if cat != 'submit':
change.addApproval(cat, action[cat], username=self.user)
@ -1155,7 +1155,7 @@ class FakeGithubConnection(githubconnection.GithubConnection):
# simulate one installation per org
orgs = {}
latest_inst_id = 0
for repo in self.github_data.repos.keys():
for repo in self.github_data.repos:
inst_id = orgs.get(repo[0])
if not inst_id:
latest_inst_id += 1

View File

@ -402,10 +402,10 @@ class CallbackModule(default.CallbackModule):
msg=result_dict['module_stderr'])
elif result._task.action == 'debug':
# this is a debug statement, handle it special
for key in [k for k in result_dict.keys()
for key in [k for k in result_dict
if k.startswith('_ansible')]:
del result_dict[key]
if 'changed' in result_dict.keys():
if 'changed' in result_dict:
del result_dict['changed']
keyname = next(iter(result_dict.keys()))
# If it has msg, that means it was like:

View File

@ -308,7 +308,7 @@ class Client(zuul.cmd.ZuulApp):
fields = all_fields.keys()
else:
fields = [f.strip().lower() for f in self.args.columns.split(',')
if f.strip().lower() in all_fields.keys()]
if f.strip().lower() in all_fields]
table = prettytable.PrettyTable(
field_names=[all_fields[f]['title'] for f in fields])

View File

@ -182,7 +182,7 @@ def merge_project_dict(project_dicts, name, project):
return
old = project_dicts[name]
for key in project.keys():
for key in project:
if key not in old:
old[key] = project[key]
elif isinstance(old[key], list):
@ -212,7 +212,7 @@ def normalize_project_expansions():
# Second, find out which projects need to expand a given template
for job_name, project in copy.deepcopy(JOBS_FOR_EXPAND).items():
# There is a job-level expansion for this one
if job_name in JOB_MATCHERS.keys():
if job_name in JOB_MATCHERS:
continue
for project_name, expansion in project.items():
TEMPLATES_TO_EXPAND[project_name] = []
@ -416,7 +416,7 @@ def expandYamlForTemplateJob(self, project, template, jobs_glob=None):
and self.config.getboolean(
'job_builder', 'allow_empty_variables')
for key in template.keys():
for key in template:
if key not in params:
params[key] = template[key]
@ -1054,7 +1054,7 @@ class JobMapping:
def _expandVars(self, info, match_dict):
job_vars = info['vars'].copy()
for key in job_vars.keys():
for key in job_vars:
job_vars[key] = job_vars[key].format(**match_dict)
return job_vars

View File

@ -1871,7 +1871,7 @@ class TenantParser(object):
layout.getJob(job.name)
job.validateReferences(layout)
for project_name in layout.project_configs.keys():
for project_name in layout.project_configs:
for project_config in layout.project_configs[project_name]:
with reference_exceptions(
'project', project_config, layout.loading_errors):

View File

@ -798,7 +798,7 @@ class GerritConnection(BaseConnection):
return branches
refs = self.getInfoRefs(project)
heads = [str(k[len('refs/heads/'):]) for k in refs.keys()
heads = [str(k[len('refs/heads/'):]) for k in refs
if k.startswith('refs/heads/') and
GerritConnection._checkRefFormat(k)]
self._project_branch_cache[project.name] = heads

View File

@ -320,7 +320,7 @@ class ExecutorClient(object):
_fname = '%s:%s' % (
function_name,
executor_zone)
if _fname in functions.keys():
if _fname in functions:
function_name = _fname
else:
self.log.warning(

View File

@ -50,7 +50,7 @@ def get_statsd(config, extra_keys=None):
format_keys = copy.copy(keys)
# we need to normalize all keys which go into the metric name
for key in format_keys.keys():
for key in format_keys:
normalized_value = normalize_statsd_name(format_keys[key])
format_keys[key] = normalized_value