Feature: retry on subset of jobs hosts

This commit is contained in:
AlanCoding
2017-10-16 09:58:56 -04:00
parent f1813c35ed
commit 0ae9283fba
7 changed files with 176 additions and 4 deletions
+27 -1
View File
@@ -23,6 +23,9 @@ from django.utils.timezone import utc
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
# REST Framework
from rest_framework.exceptions import ParseError
# AWX
from awx.api.versioning import reverse
from awx.main.models.base import * # noqa
@@ -588,10 +591,33 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
return self.passwords_needed_to_start
def _get_hosts(self, **kwargs):
from awx.main.models.inventory import Host
Host = JobHostSummary._meta.get_field('host').related_model
kwargs['job_host_summaries__job__pk'] = self.pk
return Host.objects.filter(**kwargs)
def retry_qs(self, status):
'''
Returns Host queryset that will be used to produce the `limit`
field in a retry on a subset of hosts
'''
kwargs = {}
if status == 'all':
pass
elif status == 'failed':
# Special case for parity with Ansible .retry files
kwargs['job_host_summaries__failed'] = True
elif status in ['ok', 'changed', 'unreachable']:
if status == 'unreachable':
status_field = 'dark'
else:
status_field = status
kwargs['job_host_summaries__{}__gt'.format(status_field)] = 0
else:
raise ParseError(_(
'{status_value} is not a valid status option.'
).format(status_value=status))
return self._get_hosts(**kwargs)
@property
def task_impact(self):
# NOTE: We sorta have to assume the host count matches and that forks default to 5
+3 -1
View File
@@ -734,7 +734,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
pass
super(UnifiedJob, self).delete()
def copy_unified_job(self):
def copy_unified_job(self, limit=None):
'''
Returns saved object, including related fields.
Create a copy of this unified job for the purpose of relaunch
@@ -746,6 +746,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
fields = unified_jt_class._get_unified_job_field_names() + [parent_field_name]
unified_job = copy_model_by_class(self, unified_job_class, fields, {})
unified_job.launch_type = 'relaunch'
if limit:
unified_job.limit = limit
unified_job.save()
# Labels coppied here
+28
View File
@@ -37,3 +37,31 @@ def test_job_relaunch_permission_denied_response(
r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, jt_user, expect=403)
assert 'launched with prompted fields' in r.data['detail']
assert 'do not have permission' in r.data['detail']
@pytest.mark.django_db
@pytest.mark.parametrize("status,hosts", [
('all', 'host1,host2,host3'),
('failed', 'host3'),
])
def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credential, admin_user, status, hosts):
h1 = inventory.hosts.create(name='host1') # no-op
h2 = inventory.hosts.create(name='host2') # changed host
h3 = inventory.hosts.create(name='host3') # failed host
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory,
project=project, credential=machine_credential
)
job = jt.create_unified_job(_eager_fields={'status': 'failed', 'limit': 'host1,host2,host3'})
job.job_events.create(event='playbook_on_stats')
job.job_host_summaries.create(host=h1, failed=False, ok=1, changed=0, failures=0, host_name=h1.name)
job.job_host_summaries.create(host=h2, failed=False, ok=0, changed=1, failures=0, host_name=h2.name)
job.job_host_summaries.create(host=h3, failed=False, ok=0, changed=0, failures=1, host_name=h3.name)
r = post(
url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
data={'hosts': status},
user=admin_user,
expect=201
)
assert r.data.get('limit') == hosts