mirror of
https://github.com/ZwareBear/awx.git
synced 2026-04-30 03:41:50 -05:00
Merge branch 'rbac' of github.com:ansible/ansible-tower into rbac
This commit is contained in:
@@ -1291,6 +1291,31 @@ class ScheduleAccess(BaseAccess):
|
||||
else:
|
||||
return False
|
||||
|
||||
class NotifierAccess(BaseAccess):
|
||||
'''
|
||||
I can see/use a notifier if I have permission to
|
||||
'''
|
||||
model = Notifier
|
||||
|
||||
def get_queryset(self):
|
||||
qs = self.model.objects.filter(active=True).distinct()
|
||||
if self.user.is_superuser:
|
||||
return qs
|
||||
return qs
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
'''
|
||||
I can see/use a notification if I have permission to
|
||||
'''
|
||||
model = Notification
|
||||
|
||||
def get_queryset(self):
|
||||
qs = self.model.objects.distinct()
|
||||
if self.user.is_superuser:
|
||||
return qs
|
||||
return qs
|
||||
|
||||
|
||||
class ActivityStreamAccess(BaseAccess):
|
||||
'''
|
||||
I can see activity stream events only when I have permission on all objects included in the event
|
||||
@@ -1475,23 +1500,31 @@ class RoleAccess(BaseAccess):
|
||||
def get_queryset(self):
|
||||
if self.user.is_superuser:
|
||||
return self.model.objects.all()
|
||||
return self.model.objects.none()
|
||||
return self.model.visible_roles(self.user)
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_add(self, obj, data):
|
||||
return self.user.is_superuser
|
||||
# Unsupported for now
|
||||
return False
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, data,
|
||||
skip_sub_obj_read_check=False):
|
||||
return self.user.is_superuser
|
||||
return self.can_unattach(obj, sub_obj, relationship)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship):
|
||||
return self.user.is_superuser
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
if obj.object_id and \
|
||||
isinstance(obj.content_object, ResourceMixin) and \
|
||||
obj.content_object.accessible_by(self.user, {'write': True}):
|
||||
return True
|
||||
return False
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.user.is_superuser
|
||||
# Unsupported for now
|
||||
return False
|
||||
|
||||
|
||||
class ResourceAccess(BaseAccess):
|
||||
@@ -1550,3 +1583,5 @@ register_access(CustomInventoryScript, CustomInventoryScriptAccess)
|
||||
register_access(TowerSettings, TowerSettingsAccess)
|
||||
register_access(Role, RoleAccess)
|
||||
register_access(Resource, ResourceAccess)
|
||||
register_access(Notifier, NotifierAccess)
|
||||
register_access(Notification, NotificationAccess)
|
||||
|
||||
@@ -134,8 +134,9 @@ def resolve_role_field(obj, field):
|
||||
class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor):
|
||||
"""Descriptor Implict Role Fields. Auto-creates the appropriate role entry on first access"""
|
||||
|
||||
def __init__(self, role_name, permissions, parent_role, *args, **kwargs):
|
||||
def __init__(self, role_name, role_description, permissions, parent_role, *args, **kwargs):
|
||||
self.role_name = role_name
|
||||
self.role_description = role_description if role_description else ""
|
||||
self.permissions = permissions
|
||||
self.parent_role = parent_role
|
||||
|
||||
@@ -152,7 +153,7 @@ class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor):
|
||||
if connection.needs_rollback:
|
||||
raise TransactionManagementError('Current transaction has failed, cannot create implicit role')
|
||||
|
||||
role = Role.objects.create(name=self.role_name, content_object=instance)
|
||||
role = Role.objects.create(name=self.role_name, description=self.role_description, content_object=instance)
|
||||
if self.parent_role:
|
||||
|
||||
# Add all non-null parent roles as parents
|
||||
@@ -195,8 +196,9 @@ class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor):
|
||||
class ImplicitRoleField(models.ForeignKey):
|
||||
"""Implicitly creates a role entry for a resource"""
|
||||
|
||||
def __init__(self, role_name=None, permissions=None, parent_role=None, *args, **kwargs):
|
||||
def __init__(self, role_name=None, role_description=None, permissions=None, parent_role=None, *args, **kwargs):
|
||||
self.role_name = role_name
|
||||
self.role_description = role_description
|
||||
self.permissions = permissions
|
||||
self.parent_role = parent_role
|
||||
|
||||
@@ -211,6 +213,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
self.name,
|
||||
ImplicitRoleDescriptor(
|
||||
self.role_name,
|
||||
self.role_description,
|
||||
self.permissions,
|
||||
self.parent_role,
|
||||
self
|
||||
|
||||
@@ -12,7 +12,7 @@ from django.db import transaction
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.api.license import feature_enabled
|
||||
|
||||
OLDER_THAN = 'older_than'
|
||||
@@ -31,7 +31,7 @@ class CleanupFacts(object):
|
||||
# pivot -= granularity
|
||||
# group by host
|
||||
def cleanup(self, older_than_abs, granularity, module=None):
|
||||
fact_oldest = FactVersion.objects.all().order_by('timestamp').first()
|
||||
fact_oldest = Fact.objects.all().order_by('timestamp').first()
|
||||
if not fact_oldest:
|
||||
return 0
|
||||
|
||||
@@ -44,7 +44,10 @@ class CleanupFacts(object):
|
||||
# Special case, granularity=0x where x is d, w, or y
|
||||
# The intent is to delete all facts < older_than_abs
|
||||
if granularity == relativedelta():
|
||||
return FactVersion.objects.filter(**kv).order_by('-timestamp').delete()
|
||||
qs = Fact.objects.filter(**kv)
|
||||
count = qs.count()
|
||||
qs.delete()
|
||||
return count
|
||||
|
||||
total = 0
|
||||
|
||||
@@ -61,18 +64,17 @@ class CleanupFacts(object):
|
||||
kv['module'] = module
|
||||
|
||||
|
||||
fact_version_objs = FactVersion.objects.filter(**kv).order_by('-timestamp').limit(1)
|
||||
if fact_version_objs:
|
||||
fact_version_obj = fact_version_objs[0]
|
||||
fact_version_obj = Fact.objects.filter(**kv).order_by('-timestamp').first()
|
||||
if fact_version_obj:
|
||||
kv = {
|
||||
'timestamp__lt': fact_version_obj.timestamp,
|
||||
'timestamp__gt': date_pivot_next
|
||||
}
|
||||
if module:
|
||||
kv['module'] = module
|
||||
count = FactVersion.objects.filter(**kv).delete()
|
||||
# FIXME: These two deletes should be a transaction
|
||||
count = Fact.objects.filter(**kv).delete()
|
||||
qs = Fact.objects.filter(**kv)
|
||||
count = qs.count()
|
||||
qs.delete()
|
||||
total += count
|
||||
|
||||
date_pivot = date_pivot_next
|
||||
|
||||
@@ -9,9 +9,11 @@ from datetime import datetime
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.conf import settings
|
||||
#from django.core.exceptions import Does
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
from awx.main.socket import Socket
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver')
|
||||
@@ -47,35 +49,34 @@ class FactCacheReceiver(object):
|
||||
# ansible v2 will not emit this message. Thus, this can be removed at that time.
|
||||
if 'module_setup' in facts_data and len(facts_data) == 1:
|
||||
logger.info('Received module_setup message')
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id)
|
||||
except FactHost.DoesNotExist:
|
||||
logger.info('Creating new host <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
|
||||
host = FactHost(hostname=hostname, inventory_id=inventory_id)
|
||||
host.save()
|
||||
logger.info('Created new host <%s>' % (host.id))
|
||||
except FactHost.MultipleObjectsReturned:
|
||||
query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id)
|
||||
logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query))
|
||||
host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id)
|
||||
except Fact.DoesNotExist:
|
||||
logger.warn('Failed to intake fact. Host does not exist <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
|
||||
return
|
||||
except Fact.MultipleObjectsReturned:
|
||||
logger.warn('Database inconsistent. Multiple Hosts found for <hostname, inventory_id> <%s, %s>.' % (hostname, inventory_id))
|
||||
return None
|
||||
except Exception, e:
|
||||
logger.error("Exception communicating with Fact Cache Database: %s" % str(e))
|
||||
return
|
||||
return None
|
||||
|
||||
(module, facts) = self.process_facts(facts_data)
|
||||
(module_name, facts) = self.process_facts(facts_data)
|
||||
self.timestamp = datetime.fromtimestamp(date_key, None)
|
||||
|
||||
try:
|
||||
# Update existing Fact entry
|
||||
version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module)
|
||||
Fact.objects(id=version_obj.fact.id).update_one(fact=facts)
|
||||
logger.info('Updated existing fact <%s>' % (version_obj.fact.id))
|
||||
except FactVersion.DoesNotExist:
|
||||
# Update existing Fact entry
|
||||
fact_obj = Fact.objects.filter(host__id=host_obj.id, module=module_name, timestamp=self.timestamp)
|
||||
if fact_obj:
|
||||
fact_obj.facts = facts
|
||||
fact_obj.save()
|
||||
logger.info('Updated existing fact <%s>' % (fact_obj.id))
|
||||
else:
|
||||
# Create new Fact entry
|
||||
(fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module)
|
||||
logger.info('Created new fact <fact, fact_version> <%s, %s>' % (fact_obj.id, version_obj.id))
|
||||
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
|
||||
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
|
||||
return fact_obj
|
||||
|
||||
def run_receiver(self, use_processing_threads=True):
|
||||
with Socket('fact_cache', 'r') as facts:
|
||||
|
||||
@@ -15,7 +15,7 @@ from django.core.management.base import NoArgsCommand
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.queue import FifoQueue
|
||||
from awx.main.tasks import handle_work_error
|
||||
from awx.main.tasks import handle_work_error, handle_work_success
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
|
||||
# Celery
|
||||
@@ -265,14 +265,15 @@ def process_graph(graph, task_capacity):
|
||||
[{'type': graph.get_node_type(n['node_object']),
|
||||
'id': n['node_object'].id} for n in node_dependencies]
|
||||
error_handler = handle_work_error.s(subtasks=dependent_nodes)
|
||||
start_status = node_obj.start(error_callback=error_handler)
|
||||
success_handler = handle_work_success.s(task_actual={'type': graph.get_node_type(node_obj),
|
||||
'id': node_obj.id})
|
||||
start_status = node_obj.start(error_callback=error_handler, success_callback=success_handler)
|
||||
if not start_status:
|
||||
node_obj.status = 'failed'
|
||||
if node_obj.job_explanation:
|
||||
node_obj.job_explanation += ' '
|
||||
node_obj.job_explanation += 'Task failed pre-start check.'
|
||||
node_obj.save()
|
||||
# TODO: Run error handler
|
||||
continue
|
||||
remaining_volume -= impact
|
||||
running_impact += impact
|
||||
|
||||
@@ -43,7 +43,7 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable')])),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
|
||||
105
awx/main/migrations/0003_v300_changes.py
Normal file
105
awx/main/migrations/0003_v300_changes.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import jsonfield.fields
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
import taggit.managers
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0002_v300_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Notification',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])),
|
||||
('error', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('notifications_sent', models.IntegerField(default=0, editable=False)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])),
|
||||
('recipients', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('subject', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('pk',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Notifier',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])),
|
||||
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('organization', models.ForeignKey(related_name='notifiers', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='notification',
|
||||
name='notifier',
|
||||
field=models.ForeignKey(related_name='notifications', editable=False, to='main.Notifier'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='notification',
|
||||
field=models.ManyToManyField(to='main.Notification', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='notifier',
|
||||
field=models.ManyToManyField(to='main.Notifier', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notifiers_any',
|
||||
field=models.ManyToManyField(related_name='organization_notifiers_for_any', to='main.Notifier', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notifiers_error',
|
||||
field=models.ManyToManyField(related_name='organization_notifiers_for_errors', to='main.Notifier', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notifiers_success',
|
||||
field=models.ManyToManyField(related_name='organization_notifiers_for_success', to='main.Notifier', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='notifications',
|
||||
field=models.ManyToManyField(related_name='unifiedjob_notifications', editable=False, to='main.Notification'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='notifiers_any',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_any', to='main.Notifier', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='notifiers_error',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_errors', to='main.Notifier', blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='notifiers_success',
|
||||
field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_success', to='main.Notifier', blank=True),
|
||||
),
|
||||
]
|
||||
29
awx/main/migrations/0004_v300_changes.py
Normal file
29
awx/main/migrations/0004_v300_changes.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import jsonbfield.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0003_v300_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Fact',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)),
|
||||
('module', models.CharField(max_length=128)),
|
||||
('facts', jsonbfield.fields.JSONField(default={}, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)),
|
||||
('host', models.ForeignKey(related_name='facts', to='main.Host', help_text='Host for the facts that the fact scan captured.')),
|
||||
],
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='fact',
|
||||
index_together=set([('timestamp', 'module', 'host')]),
|
||||
),
|
||||
]
|
||||
16
awx/main/migrations/0005_v300_active_field_changes.py
Normal file
16
awx/main/migrations/0005_v300_active_field_changes.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0004_v300_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# This is a placeholder for our future active flag removal work
|
||||
]
|
||||
@@ -14,7 +14,7 @@ class Migration(migrations.Migration):
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('main', '0002_v300_changes'),
|
||||
('main', '0005_v300_active_field_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -8,7 +8,7 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0003_rbac_changes'),
|
||||
('main', '0006_v300_rbac_changes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -20,6 +20,8 @@ from awx.main.models.configuration import * # noqa
|
||||
from awx.main.models.rbac import * # noqa
|
||||
from awx.main.models.user import * # noqa
|
||||
from awx.main.models.mixins import * # noqa
|
||||
from awx.main.models.notifications import * # noqa
|
||||
from awx.main.models.fact import * # noqa
|
||||
|
||||
# Monkeypatch Django serializer to ignore django-taggit fields (which break
|
||||
# the dumpdata command; see https://github.com/alex/django-taggit/issues/155).
|
||||
@@ -62,3 +64,5 @@ activity_stream_registrar.connect(AdHocCommand)
|
||||
activity_stream_registrar.connect(Schedule)
|
||||
activity_stream_registrar.connect(CustomInventoryScript)
|
||||
activity_stream_registrar.connect(TowerSettings)
|
||||
activity_stream_registrar.connect(Notifier)
|
||||
activity_stream_registrar.connect(Notification)
|
||||
|
||||
@@ -53,6 +53,8 @@ class ActivityStream(models.Model):
|
||||
ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True)
|
||||
schedule = models.ManyToManyField("Schedule", blank=True)
|
||||
custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True)
|
||||
notifier = models.ManyToManyField("Notifier", blank=True)
|
||||
notification = models.ManyToManyField("Notification", blank=True)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:activity_stream_detail', args=(self.pk,))
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
from urlparse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -139,6 +140,9 @@ class AdHocCommand(UnifiedJob):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:ad_hoc_command_detail', args=(self.pk,))
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(tower_settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk))
|
||||
|
||||
@property
|
||||
def task_auth_token(self):
|
||||
'''Return temporary auth token used for task requests via API.'''
|
||||
@@ -221,8 +225,9 @@ class AdHocCommandEvent(CreatedModifiedModel):
|
||||
('runner_on_unreachable', _('Host Unreachable'), True),
|
||||
# Tower won't see no_hosts (check is done earlier without callback).
|
||||
#('runner_on_no_hosts', _('No Hosts Matched'), False),
|
||||
# Tower should probably never see skipped (no conditionals).
|
||||
#('runner_on_skipped', _('Host Skipped'), False),
|
||||
# Tower will see skipped (when running in check mode for a module that
|
||||
# does not support check mode).
|
||||
('runner_on_skipped', _('Host Skipped'), False),
|
||||
# Tower does not support async for ad hoc commands.
|
||||
#('runner_on_async_poll', _('Host Polling'), False),
|
||||
#('runner_on_async_ok', _('Host Async OK'), False),
|
||||
|
||||
@@ -25,7 +25,7 @@ from awx.main.utils import encrypt_field
|
||||
|
||||
__all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
|
||||
'PasswordFieldsModel', 'PrimordialModel', 'CommonModel',
|
||||
'CommonModelNameNotUnique',
|
||||
'CommonModelNameNotUnique', 'NotificationFieldsModel',
|
||||
'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ',
|
||||
'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN',
|
||||
'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES',
|
||||
@@ -337,3 +337,26 @@ class CommonModelNameNotUnique(PrimordialModel):
|
||||
max_length=512,
|
||||
unique=False,
|
||||
)
|
||||
|
||||
class NotificationFieldsModel(BaseModel):
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
notifiers_error = models.ManyToManyField(
|
||||
"Notifier",
|
||||
blank=True,
|
||||
related_name='%(class)s_notifiers_for_errors'
|
||||
)
|
||||
|
||||
notifiers_success = models.ManyToManyField(
|
||||
"Notifier",
|
||||
blank=True,
|
||||
related_name='%(class)s_notifiers_for_success'
|
||||
)
|
||||
|
||||
notifiers_any = models.ManyToManyField(
|
||||
"Notifier",
|
||||
blank=True,
|
||||
related_name='%(class)s_notifiers_for_any'
|
||||
)
|
||||
|
||||
@@ -157,11 +157,13 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
)
|
||||
owner_role = ImplicitRoleField(
|
||||
role_name='Credential Owner',
|
||||
role_description='Owner of the credential',
|
||||
parent_role='team.admin_role',
|
||||
permissions = {'all': True}
|
||||
)
|
||||
usage_role = ImplicitRoleField(
|
||||
role_name='Credential User',
|
||||
role_description='May use this credential, but not read sensitive portions or modify it',
|
||||
parent_role= 'team.member_role',
|
||||
permissions = {'use': True}
|
||||
)
|
||||
|
||||
64
awx/main/models/fact.py
Normal file
64
awx/main/models/fact.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from jsonbfield.fields import JSONField
|
||||
|
||||
__all__ = ('Fact', )
|
||||
|
||||
class Fact(models.Model):
|
||||
"""A model representing a fact returned from Ansible.
|
||||
Facts are stored as JSON dictionaries.
|
||||
"""
|
||||
host = models.ForeignKey(
|
||||
'Host',
|
||||
related_name='facts',
|
||||
db_index=True,
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_('Host for the facts that the fact scan captured.'),
|
||||
)
|
||||
timestamp = models.DateTimeField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text=_('Date and time of the corresponding fact scan gathering time.')
|
||||
)
|
||||
module = models.CharField(max_length=128)
|
||||
facts = JSONField(blank=True, default={}, help_text=_('Arbitrary JSON structure of module facts captured at timestamp for a single host.'))
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
index_together = [
|
||||
["timestamp", "module", "host"],
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_host_fact(host_id, module, timestamp):
|
||||
qs = Fact.objects.filter(host__id=host_id, module=module, timestamp__lte=timestamp).order_by('-timestamp')
|
||||
if qs:
|
||||
return qs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_timeline(host_id, module=None, ts_from=None, ts_to=None):
|
||||
kwargs = {
|
||||
'host__id': host_id,
|
||||
}
|
||||
if module:
|
||||
kwargs['module'] = module
|
||||
if ts_from and ts_to and ts_from == ts_to:
|
||||
kwargs['timestamp'] = ts_from
|
||||
else:
|
||||
if ts_from:
|
||||
kwargs['timestamp__gt'] = ts_from
|
||||
if ts_to:
|
||||
kwargs['timestamp__lte'] = ts_to
|
||||
return Fact.objects.filter(**kwargs).order_by('-timestamp').only('timestamp', 'module').order_by('-timestamp', 'module')
|
||||
|
||||
@staticmethod
|
||||
def add_fact(host_id, module, timestamp, facts):
|
||||
fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts)
|
||||
fact_obj.save()
|
||||
return fact_obj
|
||||
@@ -6,6 +6,7 @@ import datetime
|
||||
import logging
|
||||
import re
|
||||
import copy
|
||||
from urlparse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -24,7 +25,9 @@ from awx.main.models.base import * # noqa
|
||||
from awx.main.models.jobs import Job
|
||||
from awx.main.models.unified_jobs import * # noqa
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
from awx.main.models.notifications import Notifier
|
||||
from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates
|
||||
from awx.main.conf import tower_settings
|
||||
|
||||
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript']
|
||||
|
||||
@@ -95,19 +98,23 @@ class Inventory(CommonModel, ResourceMixin):
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
role_name='Inventory Administrator',
|
||||
role_description='May manage this inventory',
|
||||
parent_role='organization.admin_role',
|
||||
permissions = {'all': True}
|
||||
)
|
||||
auditor_role = ImplicitRoleField(
|
||||
role_name='Inventory Auditor',
|
||||
role_description='May view but not modify this inventory',
|
||||
parent_role='organization.auditor_role',
|
||||
permissions = {'read': True}
|
||||
)
|
||||
updater_role = ImplicitRoleField(
|
||||
role_name='Inventory Updater',
|
||||
role_description='May update the inventory',
|
||||
)
|
||||
executor_role = ImplicitRoleField(
|
||||
role_name='Inventory Executor',
|
||||
role_description='May execute jobs against this inventory',
|
||||
)
|
||||
|
||||
def get_absolute_url(self):
|
||||
@@ -1217,6 +1224,14 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def notifiers(self):
|
||||
base_notifiers = Notifier.objects.filter(active=True)
|
||||
error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization))
|
||||
success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization))
|
||||
any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization))
|
||||
return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers)
|
||||
|
||||
def clean_source(self):
|
||||
source = self.source
|
||||
if source and self.group:
|
||||
@@ -1276,6 +1291,9 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:inventory_update_detail', args=(self.pk,))
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(tower_settings.TOWER_URL_BASE, "/#/inventory_sync/{}".format(self.pk))
|
||||
|
||||
def is_blocked_by(self, obj):
|
||||
if type(obj) == InventoryUpdate:
|
||||
if self.inventory_source.inventory == obj.inventory_source.inventory:
|
||||
|
||||
@@ -6,6 +6,7 @@ import hmac
|
||||
import json
|
||||
import yaml
|
||||
import logging
|
||||
from urlparse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -22,6 +23,7 @@ from jsonfield import JSONField
|
||||
from awx.main.constants import CLOUD_PROVIDERS
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.unified_jobs import * # noqa
|
||||
from awx.main.models.notifications import Notifier
|
||||
from awx.main.utils import decrypt_field, ignore_inventory_computed_fields
|
||||
from awx.main.utils import emit_websocket_notification
|
||||
from awx.main.redact import PlainTextCleaner
|
||||
@@ -183,16 +185,19 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
role_name='Job Template Administrator',
|
||||
role_description='Full access to all settings',
|
||||
parent_role='project.admin_role',
|
||||
permissions = {'all': True}
|
||||
)
|
||||
auditor_role = ImplicitRoleField(
|
||||
role_name='Job Template Auditor',
|
||||
role_description='Read-only access to all settings',
|
||||
parent_role='project.auditor_role',
|
||||
permissions = {'read': True}
|
||||
)
|
||||
executor_role = ImplicitRoleField(
|
||||
role_name='Job Template Executor',
|
||||
role_name='Job Template Runner',
|
||||
role_description='May run the job template',
|
||||
permissions = {'read': True, 'execute': True}
|
||||
)
|
||||
|
||||
@@ -347,6 +352,20 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
def _can_update(self):
|
||||
return self.can_start_without_user_input()
|
||||
|
||||
@property
|
||||
def notifiers(self):
|
||||
# Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type
|
||||
# TODO: Currently there is no org fk on project so this will need to be added once that is
|
||||
# available after the rbac pr
|
||||
base_notifiers = Notifier.objects.filter(active=True)
|
||||
error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project]))
|
||||
success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project]))
|
||||
any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project]))
|
||||
# Get Organization Notifiers
|
||||
error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.project.organizations.all())))
|
||||
success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.project.organizations.all())))
|
||||
any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.project.organizations.all())))
|
||||
return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers))
|
||||
|
||||
class Job(UnifiedJob, JobOptions):
|
||||
'''
|
||||
@@ -386,6 +405,9 @@ class Job(UnifiedJob, JobOptions):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:job_detail', args=(self.pk,))
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(tower_settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk))
|
||||
|
||||
@property
|
||||
def task_auth_token(self):
|
||||
'''Return temporary auth token used for task requests via API.'''
|
||||
@@ -502,6 +524,26 @@ class Job(UnifiedJob, JobOptions):
|
||||
dependencies.append(source.create_inventory_update(launch_type='dependency'))
|
||||
return dependencies
|
||||
|
||||
def notification_data(self):
|
||||
data = super(Job, self).notification_data()
|
||||
all_hosts = {}
|
||||
for h in self.job_host_summaries.all():
|
||||
all_hosts[h.host.name] = dict(failed=h.failed,
|
||||
changed=h.changed,
|
||||
dark=h.dark,
|
||||
failures=h.failures,
|
||||
ok=h.ok,
|
||||
processed=h.processed,
|
||||
skipped=h.skipped)
|
||||
data.update(dict(inventory=self.inventory.name,
|
||||
project=self.project.name,
|
||||
playbook=self.playbook,
|
||||
credential=self.credential.name,
|
||||
limit=self.limit,
|
||||
extra_vars=self.extra_vars,
|
||||
hosts=all_hosts))
|
||||
return data
|
||||
|
||||
def handle_extra_data(self, extra_data):
|
||||
extra_vars = {}
|
||||
if isinstance(extra_data, dict):
|
||||
@@ -1082,6 +1124,9 @@ class SystemJob(UnifiedJob, SystemJobOptions):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:system_job_detail', args=(self.pk,))
|
||||
|
||||
def get_ui_url(self):
|
||||
return urljoin(tower_settings.TOWER_URL_BASE, "/#/management_jobs/{}".format(self.pk))
|
||||
|
||||
def is_blocked_by(self, obj):
|
||||
return True
|
||||
|
||||
|
||||
172
awx/main/models/notifications.py
Normal file
172
awx/main/models/notifications.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
|
||||
from django.db import models
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.core.mail.message import EmailMessage
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.utils import encrypt_field, decrypt_field
|
||||
from awx.main.notifications.email_backend import CustomEmailBackend
|
||||
from awx.main.notifications.slack_backend import SlackBackend
|
||||
from awx.main.notifications.twilio_backend import TwilioBackend
|
||||
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
|
||||
from awx.main.notifications.hipchat_backend import HipChatBackend
|
||||
from awx.main.notifications.webhook_backend import WebhookBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
logger = logging.getLogger('awx.main.models.notifications')
|
||||
|
||||
__all__ = ['Notifier', 'Notification']
|
||||
|
||||
class Notifier(CommonModel):
|
||||
|
||||
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
('pagerduty', _('Pagerduty'), PagerDutyBackend),
|
||||
('hipchat', _('HipChat'), HipChatBackend),
|
||||
('webhook', _('Webhook'), WebhookBackend),
|
||||
('irc', _('IRC'), IrcBackend)]
|
||||
NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES]
|
||||
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
blank=False,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='notifiers',
|
||||
)
|
||||
|
||||
notification_type = models.CharField(
|
||||
max_length = 32,
|
||||
choices=NOTIFICATION_TYPE_CHOICES,
|
||||
)
|
||||
|
||||
notification_configuration = JSONField(blank=False)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:notifier_detail', args=(self.pk,))
|
||||
|
||||
@property
|
||||
def notification_class(self):
|
||||
return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
new_instance = not bool(self.pk)
|
||||
update_fields = kwargs.get('update_fields', [])
|
||||
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
||||
self.notification_class.init_parameters):
|
||||
if new_instance:
|
||||
value = self.notification_configuration[field]
|
||||
setattr(self, '_saved_{}_{}'.format("config", field), value)
|
||||
self.notification_configuration[field] = ''
|
||||
else:
|
||||
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
|
||||
self.notification_configuration[field] = encrypted
|
||||
if 'notification_configuration' not in update_fields:
|
||||
update_fields.append('notification_configuration')
|
||||
super(Notifier, self).save(*args, **kwargs)
|
||||
if new_instance:
|
||||
update_fields = []
|
||||
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
||||
self.notification_class.init_parameters):
|
||||
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
|
||||
self.notification_configuration[field] = saved_value
|
||||
#setattr(self.notification_configuration, field, saved_value)
|
||||
if 'notification_configuration' not in update_fields:
|
||||
update_fields.append('notification_configuration')
|
||||
self.save(update_fields=update_fields)
|
||||
|
||||
@property
|
||||
def recipients(self):
|
||||
return self.notification_configuration[self.notification_class.recipient_parameter]
|
||||
|
||||
def generate_notification(self, subject, message):
|
||||
notification = Notification(notifier=self,
|
||||
notification_type=self.notification_type,
|
||||
recipients=smart_str(self.recipients),
|
||||
subject=subject,
|
||||
body=message)
|
||||
notification.save()
|
||||
return notification
|
||||
|
||||
def send(self, subject, body):
|
||||
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
||||
self.notification_class.init_parameters):
|
||||
self.notification_configuration[field] = decrypt_field(self,
|
||||
'notification_configuration',
|
||||
subfield=field)
|
||||
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
|
||||
if not isinstance(recipients, list):
|
||||
recipients = [recipients]
|
||||
sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None)
|
||||
backend_obj = self.notification_class(**self.notification_configuration)
|
||||
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
|
||||
return backend_obj.send_messages([notification_obj])
|
||||
|
||||
class Notification(CreatedModifiedModel):
|
||||
'''
|
||||
A notification event emitted when a Notifier is run
|
||||
'''
|
||||
|
||||
NOTIFICATION_STATE_CHOICES = [
|
||||
('pending', _('Pending')),
|
||||
('successful', _('Successful')),
|
||||
('failed', _('Failed')),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('pk',)
|
||||
|
||||
notifier = models.ForeignKey(
|
||||
'Notifier',
|
||||
related_name='notifications',
|
||||
on_delete=models.CASCADE,
|
||||
editable=False
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=NOTIFICATION_STATE_CHOICES,
|
||||
default='pending',
|
||||
editable=False,
|
||||
)
|
||||
error = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
editable=False,
|
||||
)
|
||||
notifications_sent = models.IntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
)
|
||||
notification_type = models.CharField(
|
||||
max_length = 32,
|
||||
choices=Notifier.NOTIFICATION_TYPE_CHOICES,
|
||||
)
|
||||
recipients = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
editable=False,
|
||||
)
|
||||
subject = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
editable=False,
|
||||
)
|
||||
body = JSONField(blank=True)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:notification_detail', args=(self.pk,))
|
||||
@@ -29,7 +29,7 @@ from awx.main.conf import tower_settings
|
||||
__all__ = ['Organization', 'Team', 'Permission', 'Profile', 'AuthToken']
|
||||
|
||||
|
||||
class Organization(CommonModel, ResourceMixin):
|
||||
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin):
|
||||
'''
|
||||
An organization is the basic unit of multi-tenancy divisions
|
||||
'''
|
||||
@@ -55,16 +55,20 @@ class Organization(CommonModel, ResourceMixin):
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
role_name='Organization Administrator',
|
||||
role_description='May manage all aspects of this organization',
|
||||
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
permissions = ALL_PERMISSIONS,
|
||||
)
|
||||
auditor_role = ImplicitRoleField(
|
||||
role_name='Organization Auditor',
|
||||
role_description='May read all settings associated with this organization',
|
||||
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
permissions = {'read': True}
|
||||
)
|
||||
member_role = ImplicitRoleField(
|
||||
role_name='Organization Member',
|
||||
role_description='A member of this organization',
|
||||
parent_role='admin_role',
|
||||
permissions = {'read': True}
|
||||
)
|
||||
|
||||
@@ -111,16 +115,19 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
role_name='Team Administrator',
|
||||
role_description='May manage this team',
|
||||
parent_role='organization.admin_role',
|
||||
permissions = ALL_PERMISSIONS,
|
||||
)
|
||||
auditor_role = ImplicitRoleField(
|
||||
role_name='Team Auditor',
|
||||
role_description='May read all settings associated with this team',
|
||||
parent_role='organization.auditor_role',
|
||||
permissions = {'read': True}
|
||||
)
|
||||
member_role = ImplicitRoleField(
|
||||
role_name='Team Member',
|
||||
role_description='A member of this team',
|
||||
parent_role='admin_role',
|
||||
permissions = {'read':True},
|
||||
)
|
||||
|
||||
@@ -20,10 +20,12 @@ from django.utils.timezone import now, make_aware, get_default_timezone
|
||||
from awx.lib.compat import slugify
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.jobs import Job
|
||||
from awx.main.models.notifications import Notifier
|
||||
from awx.main.models.unified_jobs import * # noqa
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
from awx.main.utils import update_scm_url
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.conf import tower_settings
|
||||
|
||||
__all__ = ['Project', 'ProjectUpdate']
|
||||
|
||||
@@ -209,20 +211,24 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
role_name='Project Administrator',
|
||||
role_description='May manage this project',
|
||||
parent_role='organizations.admin_role',
|
||||
permissions = {'all': True}
|
||||
)
|
||||
auditor_role = ImplicitRoleField(
|
||||
role_name='Project Auditor',
|
||||
role_description='May read all settings associated with this project',
|
||||
parent_role='organizations.auditor_role',
|
||||
permissions = {'read': True}
|
||||
)
|
||||
member_role = ImplicitRoleField(
|
||||
role_name='Project Member',
|
||||
role_description='Implies membership within this project',
|
||||
permissions = {'read': True}
|
||||
)
|
||||
scm_update_role = ImplicitRoleField(
|
||||
role_name='Project Updater',
|
||||
role_description='May update this project from the source control management system',
|
||||
parent_role='admin_role',
|
||||
permissions = {'scm_update': True}
|
||||
)
|
||||
@@ -330,6 +336,18 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def notifiers(self):
|
||||
base_notifiers = Notifier.objects.filter(active=True)
|
||||
error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self))
|
||||
success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self))
|
||||
any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self))
|
||||
# Get Organization Notifiers
|
||||
error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.organizations.all())))
|
||||
success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.organizations.all())))
|
||||
any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.organizations.all())))
|
||||
return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers))
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:project_detail', args=(self.pk,))
|
||||
|
||||
@@ -391,6 +409,9 @@ class ProjectUpdate(UnifiedJob, ProjectOptions):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:project_update_detail', args=(self.pk,))
|
||||
|
||||
def get_ui_url(self):
|
||||
return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/scm_update/{}".format(self.pk))
|
||||
|
||||
def _update_parent_instance(self):
|
||||
parent_instance = self._get_parent_instance()
|
||||
if parent_instance:
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.db.models.aggregates import Max
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@@ -128,6 +129,10 @@ class Role(CommonModelNameNotUnique):
|
||||
setattr(permission, k, int(permissions[k]))
|
||||
permission.save()
|
||||
|
||||
@staticmethod
|
||||
def visible_roles(user):
|
||||
return Role.objects.filter(Q(descendents__in=user.roles.filter()) | Q(ancestors__in=user.roles.filter()))
|
||||
|
||||
@staticmethod
|
||||
def singleton(name):
|
||||
try:
|
||||
|
||||
@@ -17,6 +17,7 @@ from django.db import models
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
@@ -40,7 +41,7 @@ logger = logging.getLogger('awx.main.models.unified_jobs')
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
|
||||
|
||||
class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique):
|
||||
class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel):
|
||||
'''
|
||||
Concrete base class for unified job templates.
|
||||
'''
|
||||
@@ -297,6 +298,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique):
|
||||
'''
|
||||
return kwargs # Override if needed in subclass.
|
||||
|
||||
@property
|
||||
def notifiers(self):
|
||||
'''
|
||||
Return notifiers relevant to this Unified Job Template
|
||||
'''
|
||||
# NOTE: Derived classes should implement
|
||||
return Notifier.objects.none()
|
||||
|
||||
def create_unified_job(self, **kwargs):
|
||||
'''
|
||||
Create a new unified job based on this unified job template.
|
||||
@@ -385,6 +394,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
editable=False,
|
||||
related_name='%(class)s_blocked_jobs+',
|
||||
)
|
||||
notifications = models.ManyToManyField(
|
||||
'Notification',
|
||||
editable=False,
|
||||
related_name='%(class)s_notifications',
|
||||
)
|
||||
cancel_flag = models.BooleanField(
|
||||
blank=True,
|
||||
default=False,
|
||||
@@ -470,6 +484,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
else:
|
||||
return ''
|
||||
|
||||
def get_ui_url(self):
|
||||
real_instance = self.get_real_instance()
|
||||
if real_instance != self:
|
||||
return real_instance.get_ui_url()
|
||||
else:
|
||||
return ''
|
||||
|
||||
@classmethod
|
||||
def _get_task_class(cls):
|
||||
raise NotImplementedError # Implement in subclasses.
|
||||
@@ -717,7 +738,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
tasks that might preclude creating one'''
|
||||
return []
|
||||
|
||||
def start(self, error_callback, **kwargs):
|
||||
def notification_data(self):
|
||||
return dict(id=self.id,
|
||||
name=self.name,
|
||||
url=self.get_ui_url(),
|
||||
created_by=smart_text(self.created_by),
|
||||
started=self.started.isoformat(),
|
||||
finished=self.finished.isoformat(),
|
||||
status=self.status,
|
||||
traceback=self.result_traceback)
|
||||
|
||||
def start(self, error_callback, success_callback, **kwargs):
|
||||
'''
|
||||
Start the task running via Celery.
|
||||
'''
|
||||
@@ -743,7 +774,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
# if field not in needed])
|
||||
if 'extra_vars' in kwargs:
|
||||
self.handle_extra_data(kwargs['extra_vars'])
|
||||
task_class().apply_async((self.pk,), opts, link_error=error_callback)
|
||||
task_class().apply_async((self.pk,), opts, link_error=error_callback, link=success_callback)
|
||||
return True
|
||||
|
||||
def signal_start(self, **kwargs):
|
||||
@@ -765,7 +796,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
# Sanity check: If we are running unit tests, then run synchronously.
|
||||
if getattr(settings, 'CELERY_UNIT_TEST', False):
|
||||
return self.start(None, **kwargs)
|
||||
return self.start(None, None, **kwargs)
|
||||
|
||||
# Save the pending status, and inform the SocketIO listener.
|
||||
self.update_fields(start_args=json.dumps(kwargs), status='pending')
|
||||
|
||||
@@ -26,5 +26,6 @@ class UserResource(CommonModelNameNotUnique, ResourceMixin):
|
||||
|
||||
admin_role = ImplicitRoleField(
|
||||
role_name='User Administrator',
|
||||
role_description='May manage this user',
|
||||
permissions = {'all': True},
|
||||
)
|
||||
|
||||
0
awx/main/notifications/__init__.py
Normal file
0
awx/main/notifications/__init__.py
Normal file
20
awx/main/notifications/base.py
Normal file
20
awx/main/notifications/base.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import pprint
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.base import BaseEmailBackend
|
||||
|
||||
class TowerBaseEmailBackend(BaseEmailBackend):
|
||||
|
||||
def format_body(self, body):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
|
||||
body['id'],
|
||||
body['status'],
|
||||
body['url']))
|
||||
body_actual += pprint.pformat(body, indent=4)
|
||||
return body_actual
|
||||
28
awx/main/notifications/email_backend.py
Normal file
28
awx/main/notifications/email_backend.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import pprint
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.smtp import EmailBackend
|
||||
|
||||
class CustomEmailBackend(EmailBackend):
|
||||
|
||||
init_parameters = {"host": {"label": "Host", "type": "string"},
|
||||
"port": {"label": "Port", "type": "int"},
|
||||
"username": {"label": "Username", "type": "string"},
|
||||
"password": {"label": "Password", "type": "password"},
|
||||
"use_tls": {"label": "Use TLS", "type": "bool"},
|
||||
"use_ssl": {"label": "Use SSL", "type": "bool"},
|
||||
"sender": {"label": "Sender Email", "type": "string"},
|
||||
"recipients": {"label": "Recipient List", "type": "list"}}
|
||||
recipient_parameter = "recipients"
|
||||
sender_parameter = "sender"
|
||||
|
||||
def format_body(self, body):
|
||||
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
|
||||
body['id'],
|
||||
body['status'],
|
||||
body['url']))
|
||||
body_actual += pprint.pformat(body, indent=4)
|
||||
return body_actual
|
||||
49
awx/main/notifications/hipchat_backend.py
Normal file
49
awx/main/notifications/hipchat_backend.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
|
||||
|
||||
class HipChatBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
"channels": {"label": "Destination Channels", "type": "list"},
|
||||
"color": {"label": "Notification Color", "type": "string"},
|
||||
"api_url": {"label": "API Url (e.g: https://mycompany.hipchat.com)", "type": "string"},
|
||||
"notify": {"label": "Notify channel", "type": "bool"},
|
||||
"message_from": {"label": "Label to be shown with notification", "type": "string"}}
|
||||
recipient_parameter = "channels"
|
||||
sender_parameter = "message_from"
|
||||
|
||||
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
|
||||
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.token = token
|
||||
self.color = color
|
||||
self.api_url = api_url
|
||||
self.notify = notify
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
|
||||
for m in messages:
|
||||
for rcp in m.recipients():
|
||||
r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp),
|
||||
params={"auth_token": self.token},
|
||||
json={"color": self.color,
|
||||
"message": m.subject,
|
||||
"notify": self.notify,
|
||||
"from": m.from_email,
|
||||
"message_format": "text"})
|
||||
if r.status_code != 204:
|
||||
logger.error(smart_text("Error sending messages: {}".format(r.text)))
|
||||
if not self.fail_silently:
|
||||
raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text)))
|
||||
sent_messages += 1
|
||||
return sent_messages
|
||||
95
awx/main/notifications/irc_backend.py
Normal file
95
awx/main/notifications/irc_backend.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import time
|
||||
import ssl
|
||||
import logging
|
||||
|
||||
import irc.client
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
||||
|
||||
class IrcBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
|
||||
"port": {"label": "IRC Server Port", "type": "int"},
|
||||
"nickname": {"label": "IRC Nick", "type": "string"},
|
||||
"password": {"label": "IRC Server Password", "type": "password"},
|
||||
"use_ssl": {"label": "SSL Connection", "type": "bool"},
|
||||
"targets": {"label": "Destination Channels or Users", "type": "list"}}
|
||||
recipient_parameter = "targets"
|
||||
sender_parameter = None
|
||||
|
||||
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
|
||||
super(IrcBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.server = server
|
||||
self.port = port
|
||||
self.nickname = nickname
|
||||
self.password = password if password != "" else None
|
||||
self.use_ssl = use_ssl
|
||||
self.connection = None
|
||||
|
||||
def open(self):
|
||||
if self.connection is not None:
|
||||
return False
|
||||
if self.use_ssl:
|
||||
connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket)
|
||||
else:
|
||||
connection_factory = irc.connection.Factory()
|
||||
try:
|
||||
self.reactor = irc.client.Reactor()
|
||||
self.connection = self.reactor.server().connect(
|
||||
self.server,
|
||||
self.port,
|
||||
self.nickname,
|
||||
password=self.password,
|
||||
connect_factory=connection_factory,
|
||||
)
|
||||
except irc.client.ServerConnectionError as e:
|
||||
logger.error(smart_text("Exception connecting to irc server: {}".format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return True
|
||||
|
||||
def close(self):
|
||||
if self.connection is None:
|
||||
return
|
||||
self.connection = None
|
||||
|
||||
def on_connect(self, connection, event):
|
||||
for c in self.channels:
|
||||
if irc.client.is_channel(c):
|
||||
connection.join(c)
|
||||
else:
|
||||
for m in self.channels[c]:
|
||||
connection.privmsg(c, m.subject)
|
||||
self.channels_sent += 1
|
||||
|
||||
def on_join(self, connection, event):
|
||||
for m in self.channels[event.target]:
|
||||
connection.privmsg(event.target, m.subject)
|
||||
self.channels_sent += 1
|
||||
|
||||
def send_messages(self, messages):
|
||||
if self.connection is None:
|
||||
self.open()
|
||||
self.channels = {}
|
||||
self.channels_sent = 0
|
||||
for m in messages:
|
||||
for r in m.recipients():
|
||||
if r not in self.channels:
|
||||
self.channels[r] = []
|
||||
self.channels[r].append(m)
|
||||
self.connection.add_global_handler("welcome", self.on_connect)
|
||||
self.connection.add_global_handler("join", self.on_join)
|
||||
start_time = time.time()
|
||||
process_time = time.time()
|
||||
while self.channels_sent < len(self.channels) and (process_time - start_time) < 60:
|
||||
self.reactor.process_once(0.1)
|
||||
process_time = time.time()
|
||||
self.reactor.disconnect_all()
|
||||
return self.channels_sent
|
||||
49
awx/main/notifications/pagerduty_backend.py
Normal file
49
awx/main/notifications/pagerduty_backend.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
import pygerduty
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
|
||||
|
||||
class PagerDutyBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
|
||||
"token": {"label": "API Token", "type": "password"},
|
||||
"service_key": {"label": "API Service/Integration Key", "type": "string"},
|
||||
"client_name": {"label": "Client Identifier", "type": "string"}}
|
||||
recipient_parameter = "service_key"
|
||||
sender_parameter = "client_name"
|
||||
|
||||
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
|
||||
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.subdomain = subdomain
|
||||
self.token = token
|
||||
|
||||
def format_body(self, body):
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
|
||||
try:
|
||||
pager = pygerduty.PagerDuty(self.subdomain, self.token)
|
||||
except Exception as e:
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e)))
|
||||
for m in messages:
|
||||
try:
|
||||
pager.trigger_incident(m.recipients()[0],
|
||||
description=m.subject,
|
||||
details=m.body,
|
||||
client=m.from_email)
|
||||
except Exception as e:
|
||||
logger.error(smart_text("Exception sending messages: {}".format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return sent_messages
|
||||
52
awx/main/notifications/slack_backend.py
Normal file
52
awx/main/notifications/slack_backend.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
from slackclient import SlackClient
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.slack_backend')
|
||||
|
||||
class SlackBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
"channels": {"label": "Destination Channels", "type": "list"}}
|
||||
recipient_parameter = "channels"
|
||||
sender_parameter = None
|
||||
|
||||
def __init__(self, token, fail_silently=False, **kwargs):
|
||||
super(SlackBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.token = token
|
||||
self.connection = None
|
||||
|
||||
def open(self):
|
||||
if self.connection is not None:
|
||||
return False
|
||||
self.connection = SlackClient(self.token)
|
||||
if not self.connection.rtm_connect():
|
||||
if not self.fail_silently:
|
||||
raise Exception("Slack Notification Token is invalid")
|
||||
return True
|
||||
|
||||
def close(self):
|
||||
if self.connection is None:
|
||||
return
|
||||
self.connection = None
|
||||
|
||||
def send_messages(self, messages):
|
||||
if self.connection is None:
|
||||
self.open()
|
||||
sent_messages = 0
|
||||
for m in messages:
|
||||
try:
|
||||
for r in m.recipients():
|
||||
self.connection.rtm_send_message(r, m.subject)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_text("Exception sending messages: {}".format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return sent_messages
|
||||
48
awx/main/notifications/twilio_backend.py
Normal file
48
awx/main/notifications/twilio_backend.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
|
||||
from twilio.rest import TwilioRestClient
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.twilio_backend')
|
||||
|
||||
class TwilioBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
|
||||
"account_token": {"label": "Account Token", "type": "password"},
|
||||
"from_number": {"label": "Source Phone Number", "type": "string"},
|
||||
"to_numbers": {"label": "Destination SMS Numbers", "type": "list"}}
|
||||
recipient_parameter = "to_numbers"
|
||||
sender_parameter = "from_number"
|
||||
|
||||
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
|
||||
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.account_sid = account_sid
|
||||
self.account_token = account_token
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
try:
|
||||
connection = TwilioRestClient(self.account_sid, self.account_token)
|
||||
except Exception as e:
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
logger.error(smart_text("Exception connecting to Twilio: {}".format(e)))
|
||||
|
||||
for m in messages:
|
||||
try:
|
||||
connection.messages.create(
|
||||
to=m.to,
|
||||
from_=m.from_email,
|
||||
body=m.subject)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_text("Exception sending messages: {}".format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return sent_messages
|
||||
39
awx/main/notifications/webhook_backend.py
Normal file
39
awx/main/notifications/webhook_backend.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
import requests
|
||||
import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||
|
||||
class WebhookBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"url": {"label": "Target URL", "type": "string"},
|
||||
"headers": {"label": "HTTP Headers", "type": "object"}}
|
||||
recipient_parameter = "url"
|
||||
sender_parameter = None
|
||||
|
||||
def __init__(self, headers, fail_silently=False, **kwargs):
|
||||
self.headers = headers
|
||||
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def format_body(self, body):
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
for m in messages:
|
||||
r = requests.post("{}".format(m.recipients()[0]),
|
||||
data=json.dumps(m.body),
|
||||
headers=self.headers)
|
||||
if r.status_code >= 400:
|
||||
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
|
||||
if not self.fail_silently:
|
||||
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
|
||||
sent_messages += 1
|
||||
return sent_messages
|
||||
@@ -332,6 +332,8 @@ model_serializer_mapping = {
|
||||
Job: JobSerializer,
|
||||
AdHocCommand: AdHocCommandSerializer,
|
||||
TowerSettings: TowerSettingsSerializer,
|
||||
Notifier: NotifierSerializer,
|
||||
Notification: NotificationSerializer,
|
||||
}
|
||||
|
||||
def activity_stream_create(sender, instance, created, **kwargs):
|
||||
|
||||
@@ -39,6 +39,9 @@ from celery import Task, task
|
||||
from django.conf import settings
|
||||
from django.db import transaction, DatabaseError
|
||||
from django.utils.timezone import now
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail import send_mail
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# AWX
|
||||
from awx.lib.metrics import task_timer
|
||||
@@ -46,13 +49,14 @@ from awx.main.constants import CLOUD_PROVIDERS
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.queue import FifoQueue
|
||||
from awx.main.conf import tower_settings
|
||||
from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL
|
||||
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
|
||||
ignore_inventory_computed_fields, emit_websocket_notification,
|
||||
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot)
|
||||
from awx.fact.utils.connection import test_mongo_connection
|
||||
|
||||
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
|
||||
'RunAdHocCommand', 'handle_work_error', 'update_inventory_computed_fields']
|
||||
'RunAdHocCommand', 'handle_work_error', 'handle_work_success',
|
||||
'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks']
|
||||
|
||||
HIDDEN_PASSWORD = '**********'
|
||||
|
||||
@@ -64,6 +68,48 @@ Try upgrading OpenSSH or providing your private key in an different format. \
|
||||
|
||||
logger = logging.getLogger('awx.main.tasks')
|
||||
|
||||
@task()
|
||||
def send_notifications(notification_list, job_id=None):
|
||||
if not isinstance(notification_list, list):
|
||||
raise TypeError("notification_list should be of type list")
|
||||
if job_id is not None:
|
||||
job_actual = UnifiedJob.objects.get(id=job_id)
|
||||
for notification_id in notification_list:
|
||||
notification = Notification.objects.get(id=notification_id)
|
||||
try:
|
||||
sent = notification.notifier.send(notification.subject, notification.body)
|
||||
notification.status = "successful"
|
||||
notification.notifications_sent = sent
|
||||
except Exception as e:
|
||||
logger.error("Send Notification Failed {}".format(e))
|
||||
notification.status = "failed"
|
||||
notification.error = smart_text(e)
|
||||
finally:
|
||||
notification.save()
|
||||
if job_id is not None:
|
||||
job_actual.notifications.add(notification)
|
||||
|
||||
@task(bind=True)
|
||||
def run_administrative_checks(self):
|
||||
if not tower_settings.TOWER_ADMIN_ALERTS:
|
||||
return
|
||||
reader = TaskSerializer()
|
||||
validation_info = reader.from_database()
|
||||
if validation_info.get('instance_count', 0) < 1:
|
||||
return
|
||||
used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100))
|
||||
tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True)
|
||||
if (used_percentage * 100) > 90:
|
||||
send_mail("Ansible Tower host usage over 90%",
|
||||
"Ansible Tower host usage over 90%",
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
if validation_info.get('time_remaining', 0) < TASK_TIMEOUT_INTERVAL:
|
||||
send_mail("Ansible Tower license will expire soon",
|
||||
"Ansible Tower license will expire soon",
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
|
||||
@task()
|
||||
def bulk_inventory_element_delete(inventory, hosts=[], groups=[]):
|
||||
from awx.main.signals import disable_activity_stream
|
||||
@@ -134,7 +180,6 @@ def notify_task_runner(metadata_dict):
|
||||
queue = FifoQueue('tower_task_manager')
|
||||
queue.push(metadata_dict)
|
||||
|
||||
|
||||
@task()
|
||||
def mongodb_control(cmd):
|
||||
# Sanity check: Do not send arbitrary commands.
|
||||
@@ -159,6 +204,39 @@ def mongodb_control(cmd):
|
||||
p = subprocess.Popen('sudo mongod --shutdown -f /etc/mongod.conf', shell=True)
|
||||
p.wait()
|
||||
|
||||
@task(bind=True)
|
||||
def handle_work_success(self, result, task_actual):
|
||||
if task_actual['type'] == 'project_update':
|
||||
instance = ProjectUpdate.objects.get(id=task_actual['id'])
|
||||
instance_name = instance.name
|
||||
notifiers = instance.project.notifiers
|
||||
friendly_name = "Project Update"
|
||||
elif task_actual['type'] == 'inventory_update':
|
||||
instance = InventoryUpdate.objects.get(id=task_actual['id'])
|
||||
instance_name = instance.name
|
||||
notifiers = instance.inventory_source.notifiers
|
||||
friendly_name = "Inventory Update"
|
||||
elif task_actual['type'] == 'job':
|
||||
instance = Job.objects.get(id=task_actual['id'])
|
||||
instance_name = instance.job_template.name
|
||||
notifiers = instance.job_template.notifiers
|
||||
friendly_name = "Job"
|
||||
elif task_actual['type'] == 'ad_hoc_command':
|
||||
instance = AdHocCommand.objects.get(id=task_actual['id'])
|
||||
instance_name = instance.module_name
|
||||
notifiers = [] # TODO: Ad-hoc commands need to notify someone
|
||||
friendly_name = "AdHoc Command"
|
||||
else:
|
||||
return
|
||||
notification_body = instance.notification_data()
|
||||
notification_subject = "{} #{} '{}' succeeded on Ansible Tower: {}".format(friendly_name,
|
||||
task_actual['id'],
|
||||
instance_name,
|
||||
notification_body['url'])
|
||||
send_notifications.delay([n.generate_notification(notification_subject, notification_body)
|
||||
for n in set(notifiers.get('success', []) + notifiers.get('any', []))],
|
||||
job_id=task_actual['id'])
|
||||
|
||||
@task(bind=True)
|
||||
def handle_work_error(self, task_id, subtasks=None):
|
||||
print('Executing error task id %s, subtasks: %s' %
|
||||
@@ -173,15 +251,23 @@ def handle_work_error(self, task_id, subtasks=None):
|
||||
if each_task['type'] == 'project_update':
|
||||
instance = ProjectUpdate.objects.get(id=each_task['id'])
|
||||
instance_name = instance.name
|
||||
notifiers = instance.project.notifiers
|
||||
friendly_name = "Project Update"
|
||||
elif each_task['type'] == 'inventory_update':
|
||||
instance = InventoryUpdate.objects.get(id=each_task['id'])
|
||||
instance_name = instance.name
|
||||
notifiers = instance.inventory_source.notifiers
|
||||
friendly_name = "Inventory Update"
|
||||
elif each_task['type'] == 'job':
|
||||
instance = Job.objects.get(id=each_task['id'])
|
||||
instance_name = instance.job_template.name
|
||||
notifiers = instance.job_template.notifiers
|
||||
friendly_name = "Job"
|
||||
elif each_task['type'] == 'ad_hoc_command':
|
||||
instance = AdHocCommand.objects.get(id=each_task['id'])
|
||||
instance_name = instance.module_name
|
||||
notifiers = []
|
||||
friendly_name = "AdHoc Command"
|
||||
else:
|
||||
# Unknown task type
|
||||
break
|
||||
@@ -190,6 +276,7 @@ def handle_work_error(self, task_id, subtasks=None):
|
||||
first_task_id = instance.id
|
||||
first_task_type = each_task['type']
|
||||
first_task_name = instance_name
|
||||
first_task_friendly_name = friendly_name
|
||||
if instance.celery_task_id != task_id:
|
||||
instance.status = 'failed'
|
||||
instance.failed = True
|
||||
@@ -197,6 +284,16 @@ def handle_work_error(self, task_id, subtasks=None):
|
||||
(first_task_type, first_task_name, first_task_id)
|
||||
instance.save()
|
||||
instance.socketio_emit_status("failed")
|
||||
notification_body = first_task.notification_data()
|
||||
notification_subject = "{} #{} '{}' failed on Ansible Tower: {}".format(first_task_friendly_name,
|
||||
first_task_id,
|
||||
first_task_name,
|
||||
notification_body['url'])
|
||||
notification_body['friendly_name'] = first_task_friendly_name
|
||||
send_notifications.delay([n.generate_notification(notification_subject, notification_body).id
|
||||
for n in set(notifiers.get('error', []) + notifiers.get('any', []))],
|
||||
job_id=first_task_id)
|
||||
|
||||
|
||||
@task()
|
||||
def update_inventory_computed_fields(inventory_id, should_update_hosts=True):
|
||||
@@ -861,11 +958,6 @@ class RunJob(BaseTask):
|
||||
'''
|
||||
return getattr(tower_settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
def pre_run_hook(self, job, **kwargs):
|
||||
if job.job_type == PERM_INVENTORY_SCAN:
|
||||
if not test_mongo_connection():
|
||||
raise RuntimeError("Fact Scan Database is offline")
|
||||
|
||||
def post_run_hook(self, job, **kwargs):
|
||||
'''
|
||||
Hook for actions to run after job/task has completed.
|
||||
|
||||
283
awx/main/tests/functional/ansible.json
Normal file
283
awx/main/tests/functional/ansible.json
Normal file
@@ -0,0 +1,283 @@
|
||||
{
|
||||
"ansible_all_ipv4_addresses": [
|
||||
"172.17.0.7"
|
||||
],
|
||||
"ansible_all_ipv6_addresses": [
|
||||
"fe80::42:acff:fe11:7"
|
||||
],
|
||||
"ansible_architecture": "x86_64",
|
||||
"ansible_bios_date": "12/01/2006",
|
||||
"ansible_bios_version": "VirtualBox",
|
||||
"ansible_cmdline": {
|
||||
"BOOT_IMAGE": "/boot/vmlinuz64",
|
||||
"base": true,
|
||||
"console": "tty0",
|
||||
"initrd": "/boot/initrd.img",
|
||||
"loglevel": "3",
|
||||
"noembed": true,
|
||||
"nomodeset": true,
|
||||
"norestore": true,
|
||||
"user": "docker",
|
||||
"waitusb": "10:LABEL=boot2docker-data"
|
||||
},
|
||||
"ansible_date_time": {
|
||||
"date": "2016-02-02",
|
||||
"day": "02",
|
||||
"epoch": "1454424257",
|
||||
"hour": "14",
|
||||
"iso8601": "2016-02-02T14:44:17Z",
|
||||
"iso8601_basic": "20160202T144417348424",
|
||||
"iso8601_basic_short": "20160202T144417",
|
||||
"iso8601_micro": "2016-02-02T14:44:17.348496Z",
|
||||
"minute": "44",
|
||||
"month": "02",
|
||||
"second": "17",
|
||||
"time": "14:44:17",
|
||||
"tz": "UTC",
|
||||
"tz_offset": "+0000",
|
||||
"weekday": "Tuesday",
|
||||
"weekday_number": "2",
|
||||
"weeknumber": "05",
|
||||
"year": "2016"
|
||||
},
|
||||
"ansible_default_ipv4": {
|
||||
"address": "172.17.0.7",
|
||||
"alias": "eth0",
|
||||
"broadcast": "global",
|
||||
"gateway": "172.17.0.1",
|
||||
"interface": "eth0",
|
||||
"macaddress": "02:42:ac:11:00:07",
|
||||
"mtu": 1500,
|
||||
"netmask": "255.255.0.0",
|
||||
"network": "172.17.0.0",
|
||||
"type": "ether"
|
||||
},
|
||||
"ansible_default_ipv6": {},
|
||||
"ansible_devices": {
|
||||
"sda": {
|
||||
"holders": [],
|
||||
"host": "",
|
||||
"model": "VBOX HARDDISK",
|
||||
"partitions": {
|
||||
"sda1": {
|
||||
"sectors": "510015555",
|
||||
"sectorsize": 512,
|
||||
"size": "243.19 GB",
|
||||
"start": "1975995"
|
||||
},
|
||||
"sda2": {
|
||||
"sectors": "1975932",
|
||||
"sectorsize": 512,
|
||||
"size": "964.81 MB",
|
||||
"start": "63"
|
||||
}
|
||||
},
|
||||
"removable": "0",
|
||||
"rotational": "0",
|
||||
"scheduler_mode": "deadline",
|
||||
"sectors": "512000000",
|
||||
"sectorsize": "512",
|
||||
"size": "244.14 GB",
|
||||
"support_discard": "0",
|
||||
"vendor": "ATA"
|
||||
},
|
||||
"sr0": {
|
||||
"holders": [],
|
||||
"host": "",
|
||||
"model": "CD-ROM",
|
||||
"partitions": {},
|
||||
"removable": "1",
|
||||
"rotational": "1",
|
||||
"scheduler_mode": "deadline",
|
||||
"sectors": "61440",
|
||||
"sectorsize": "2048",
|
||||
"size": "120.00 MB",
|
||||
"support_discard": "0",
|
||||
"vendor": "VBOX"
|
||||
}
|
||||
},
|
||||
"ansible_distribution": "Ubuntu",
|
||||
"ansible_distribution_major_version": "14",
|
||||
"ansible_distribution_release": "trusty",
|
||||
"ansible_distribution_version": "14.04",
|
||||
"ansible_dns": {
|
||||
"nameservers": [
|
||||
"8.8.8.8"
|
||||
]
|
||||
},
|
||||
"ansible_domain": "",
|
||||
"ansible_env": {
|
||||
"HOME": "/root",
|
||||
"HOSTNAME": "ede894599989",
|
||||
"LANG": "en_US.UTF-8",
|
||||
"LC_ALL": "en_US.UTF-8",
|
||||
"LC_MESSAGES": "en_US.UTF-8",
|
||||
"LESSCLOSE": "/usr/bin/lesspipe %s %s",
|
||||
"LESSOPEN": "| /usr/bin/lesspipe %s",
|
||||
"LS_COLORS": "",
|
||||
"OLDPWD": "/ansible",
|
||||
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"PWD": "/ansible/examples",
|
||||
"SHLVL": "1",
|
||||
"_": "/usr/local/bin/ansible",
|
||||
"container": "docker"
|
||||
},
|
||||
"ansible_eth0": {
|
||||
"active": true,
|
||||
"device": "eth0",
|
||||
"ipv4": {
|
||||
"address": "172.17.0.7",
|
||||
"broadcast": "global",
|
||||
"netmask": "255.255.0.0",
|
||||
"network": "172.17.0.0"
|
||||
},
|
||||
"ipv6": [
|
||||
{
|
||||
"address": "fe80::42:acff:fe11:7",
|
||||
"prefix": "64",
|
||||
"scope": "link"
|
||||
}
|
||||
],
|
||||
"macaddress": "02:42:ac:11:00:07",
|
||||
"mtu": 1500,
|
||||
"promisc": false,
|
||||
"type": "ether"
|
||||
},
|
||||
"ansible_fips": false,
|
||||
"ansible_form_factor": "Other",
|
||||
"ansible_fqdn": "ede894599989",
|
||||
"ansible_hostname": "ede894599989",
|
||||
"ansible_interfaces": [
|
||||
"lo",
|
||||
"eth0"
|
||||
],
|
||||
"ansible_kernel": "4.1.12-boot2docker",
|
||||
"ansible_lo": {
|
||||
"active": true,
|
||||
"device": "lo",
|
||||
"ipv4": {
|
||||
"address": "127.0.0.1",
|
||||
"broadcast": "host",
|
||||
"netmask": "255.0.0.0",
|
||||
"network": "127.0.0.0"
|
||||
},
|
||||
"ipv6": [
|
||||
{
|
||||
"address": "::1",
|
||||
"prefix": "128",
|
||||
"scope": "host"
|
||||
}
|
||||
],
|
||||
"mtu": 65536,
|
||||
"promisc": false,
|
||||
"type": "loopback"
|
||||
},
|
||||
"ansible_lsb": {
|
||||
"codename": "trusty",
|
||||
"description": "Ubuntu 14.04.3 LTS",
|
||||
"id": "Ubuntu",
|
||||
"major_release": "14",
|
||||
"release": "14.04"
|
||||
},
|
||||
"ansible_machine": "x86_64",
|
||||
"ansible_memfree_mb": 3746,
|
||||
"ansible_memory_mb": {
|
||||
"nocache": {
|
||||
"free": 8896,
|
||||
"used": 3638
|
||||
},
|
||||
"real": {
|
||||
"free": 3746,
|
||||
"total": 12534,
|
||||
"used": 8788
|
||||
},
|
||||
"swap": {
|
||||
"cached": 0,
|
||||
"free": 4048,
|
||||
"total": 4048,
|
||||
"used": 0
|
||||
}
|
||||
},
|
||||
"ansible_memtotal_mb": 12534,
|
||||
"ansible_mounts": [
|
||||
{
|
||||
"device": "/dev/sda1",
|
||||
"fstype": "ext4",
|
||||
"mount": "/etc/resolv.conf",
|
||||
"options": "rw,relatime,data=ordered",
|
||||
"size_available": 201281392640,
|
||||
"size_total": 256895700992,
|
||||
"uuid": "NA"
|
||||
},
|
||||
{
|
||||
"device": "/dev/sda1",
|
||||
"fstype": "ext4",
|
||||
"mount": "/etc/hostname",
|
||||
"options": "rw,relatime,data=ordered",
|
||||
"size_available": 201281392640,
|
||||
"size_total": 256895700992,
|
||||
"uuid": "NA"
|
||||
},
|
||||
{
|
||||
"device": "/dev/sda1",
|
||||
"fstype": "ext4",
|
||||
"mount": "/etc/hosts",
|
||||
"options": "rw,relatime,data=ordered",
|
||||
"size_available": 201281392640,
|
||||
"size_total": 256895700992,
|
||||
"uuid": "NA"
|
||||
}
|
||||
],
|
||||
"ansible_nodename": "ede894599989",
|
||||
"ansible_os_family": "Debian",
|
||||
"ansible_pkg_mgr": "apt",
|
||||
"ansible_processor": [
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz"
|
||||
],
|
||||
"ansible_processor_cores": 8,
|
||||
"ansible_processor_count": 1,
|
||||
"ansible_processor_threads_per_core": 1,
|
||||
"ansible_processor_vcpus": 8,
|
||||
"ansible_product_name": "VirtualBox",
|
||||
"ansible_product_serial": "0",
|
||||
"ansible_product_uuid": "25C5EA5A-1DF1-48D9-A2C6-81227DA153C0",
|
||||
"ansible_product_version": "1.2",
|
||||
"ansible_python_version": "2.7.6",
|
||||
"ansible_selinux": false,
|
||||
"ansible_service_mgr": "upstart",
|
||||
"ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBALF0xsM8UMXgSKiWNw4t19wxbxLnxQX742t/dIM0O8YLx+/lIP+Q69Dv5uoVt0zKV39eFziRlCh96qj2KYkGEJ6XfVZFnhpculL2Pv2CPpSwKuQ1vTbDO/xxUrvY+bHpfNJf9Rh69bFEE2pTsjomFPCgp8M0qGaFtwg6czSaeBONAAAAFQCGEfVtj97JiexTVRqgQITYlFp/eQAAAIEAg+S9qWn+AIb3amwVoLL/usQYOPCmZY9RVPzpkjJ6OG+HI4B7cXeauPtNTJwT0f9vGEqzf4mPpmS+aCShj6iwdmJ+cOwR5+SJlNalab3CMBoXKVLbT1J2XWFlK0szKKnoReP96IDbkAkGQ3fkm4jz0z6Wy0u6wOQVNcd4G5cwLZ4AAACAFvBm+H1LwNrwWBjWio+ayhglZ4Y25mLMEn2+dqBz0gLK5szEbft1HMPOWIVHvl6vi3v34pAJHKpxXpkLlNliTn8iw9BzCOrgP4V8sp2/85mxEuCdI1w/QERj9cHu5iS2pZ0cUwDE3pfuuGBB3IEliaJyaapowdrM8lN12jQl11E=",
|
||||
"ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHiYp4e9RfXpxDcEWpK4EuXPHW9++xcFI9hiB0TYAZgxEF9RIgwfucpPawFk7HIFoNc7EXQMlryilLSbg155KWM=",
|
||||
"ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAILclD2JaC654azEsAfcHRIOA2Ig9/Qk6MX80i/VCEdSH",
|
||||
"ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDeSUGxZaZsgBsezld0mj3HcbAwx6aykGnejceBjcs6lVwSGMHevofzSXIQDPYBhZoyWNl0PYAHv6AsQ8+3khd2SitUMJAuHSz1ZjgHCCGQP9ijXTKHn+lWCKA8rhLG/dwYwiouoOPZfn1G+erbKO6XiVbELrrf2RadnMGuMinESIOKVj3IunXsaGRMsDOQferOnUf7MvH7xpQnoySyQ1+p4rGruaohWG+Y2cDo7+B2FylPVbrpRDDJkfbt4J96WHx0KOdD0qzOicQP8JqDflqQPJJCWcgrvjQOSe4gXdPB6GZDtBl2qgQRwt1IgizPMm+b7Bwbd2VDe1TeWV2gT/7H",
|
||||
"ansible_swapfree_mb": 4048,
|
||||
"ansible_swaptotal_mb": 4048,
|
||||
"ansible_system": "Linux",
|
||||
"ansible_system_vendor": "innotek GmbH",
|
||||
"ansible_uptime_seconds": 178398,
|
||||
"ansible_user_dir": "/root",
|
||||
"ansible_user_gecos": "root",
|
||||
"ansible_user_gid": 0,
|
||||
"ansible_user_id": "root",
|
||||
"ansible_user_shell": "/bin/bash",
|
||||
"ansible_user_uid": 0,
|
||||
"ansible_userspace_architecture": "x86_64",
|
||||
"ansible_userspace_bits": "64",
|
||||
"ansible_virtualization_role": "guest",
|
||||
"ansible_virtualization_type": "docker",
|
||||
"module_setup": true
|
||||
}
|
||||
255
awx/main/tests/functional/api/test_fact_versions.py
Normal file
255
awx/main/tests/functional/api/test_fact_versions.py
Normal file
@@ -0,0 +1,255 @@
|
||||
# Python
|
||||
import mock
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
import urlparse
|
||||
import urllib
|
||||
|
||||
# AWX
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
|
||||
def mock_feature_disabled(feature, bypass_database=None):
|
||||
return False
|
||||
|
||||
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1):
|
||||
hosts = hosts(host_count=host_count)
|
||||
fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True), data=get_params)
|
||||
|
||||
return (hosts[0], response)
|
||||
|
||||
def check_url(url1_full, fact_known, module):
|
||||
url1_split = urlparse.urlsplit(url1_full)
|
||||
url1 = url1_split.path
|
||||
url1_params = urlparse.parse_qsl(url1_split.query)
|
||||
|
||||
url2 = reverse('api:host_fact_compare_view', args=(fact_known.host.pk,))
|
||||
url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))]
|
||||
|
||||
assert url1 == url2
|
||||
assert urllib.urlencode(url1_params) == urllib.urlencode(url2_params)
|
||||
|
||||
def check_response_facts(facts_known, response):
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
assert fact_known.module == response.data['results'][i]['module']
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp']
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
def check_system_tracking_feature_forbidden(response):
|
||||
assert 402 == response.status_code
|
||||
assert 'Your license does not permit use of system tracking.' == response.data['detail']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_get(hosts, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_options(hosts, options, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = options(url, None, user('admin', True))
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_no_facts_db(hosts, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
response_expected = {
|
||||
'results': []
|
||||
}
|
||||
assert response_expected == response.data
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'from': epoch,
|
||||
'to': epoch,
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
|
||||
results = response.data['results']
|
||||
assert 'related' in results[0]
|
||||
assert 'timestamp' in results[0]
|
||||
assert 'module' in results[0]
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_basic_options_fields(hosts, fact_scans, options, user):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = options(url, None, user('admin', True), pk=hosts[0].id)
|
||||
|
||||
assert 'related' in response.data['actions']['GET']
|
||||
assert 'module' in response.data['actions']['GET']
|
||||
assert ("ansible", "Ansible") in response.data['actions']['GET']['module']['choices']
|
||||
assert ("services", "Services") in response.data['actions']['GET']['module']['choices']
|
||||
assert ("packages", "Packages") in response.data['actions']['GET']['module']['choices']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_related_fact_view(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch)
|
||||
facts_known = Fact.get_timeline(host.id)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(response.data['results'])
|
||||
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_hosts(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, host_count=3)
|
||||
facts_known = Fact.get_timeline(host.id)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(response.data['results'])
|
||||
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_to_from(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'from': epoch - timedelta(days=10),
|
||||
'to': epoch + timedelta(days=10),
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, ts_from=search['from'], ts_to=search['to'])
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_module(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'module': 'packages',
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, module=search['module'])
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_from(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'from': epoch + timedelta(days=1),
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, ts_from=search['from'])
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_to(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'to': epoch + timedelta(days=1),
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, ts_to=search['to'])
|
||||
assert 6 == len(facts_known)
|
||||
assert 6 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
team_obj.users.add(user_obj)
|
||||
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(url, user_obj)
|
||||
return response
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_normal_user_403(hosts, fact_scans, get, user, team):
|
||||
user_bob = user('bob', False)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_bob, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
assert "You do not have permission to perform this action." == response.data['detail']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_super_user_ok(hosts, fact_scans, get, user, team):
|
||||
user_super = user('bob', True)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_super, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
organization.admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
org2 = organizations(1)
|
||||
org2[0].admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
|
||||
182
awx/main/tests/functional/api/test_fact_view.py
Normal file
182
awx/main/tests/functional/api/test_fact_view.py
Normal file
@@ -0,0 +1,182 @@
|
||||
import mock
|
||||
import pytest
|
||||
import json
|
||||
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
|
||||
def mock_feature_disabled(feature, bypass_database=None):
|
||||
return False
|
||||
|
||||
# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it
|
||||
def find_fact(facts, host_id, module_name, timestamp):
|
||||
for f in facts:
|
||||
if f.host_id == host_id and f.module == module_name and f.timestamp == timestamp:
|
||||
return f
|
||||
raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts))
|
||||
|
||||
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}):
|
||||
hosts = hosts(host_count=1)
|
||||
facts = fact_scans(fact_scans=1, timestamp_epoch=epoch)
|
||||
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True), data=get_params)
|
||||
|
||||
fact_known = find_fact(facts, hosts[0].id, module_name, epoch)
|
||||
return (fact_known, response)
|
||||
|
||||
def check_system_tracking_feature_forbidden(response):
|
||||
assert 402 == response.status_code
|
||||
assert 'Your license does not permit use of system tracking.' == response.data['detail']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_get(hosts, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_options(hosts, options, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = options(url, None, user('admin', True))
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_no_fact_found(hosts, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
expected_response = {
|
||||
"detail": "Fact not found"
|
||||
}
|
||||
assert 404 == response.status_code
|
||||
assert expected_response == response.data
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
assert 'related' in response.data
|
||||
assert 'id' in response.data
|
||||
assert 'facts' in response.data
|
||||
assert 'module' in response.data
|
||||
assert 'host' in response.data
|
||||
assert isinstance(response.data['host'], int)
|
||||
assert 'summary_fields' in response.data
|
||||
assert 'host' in response.data['summary_fields']
|
||||
assert 'name' in response.data['summary_fields']['host']
|
||||
assert 'description' in response.data['summary_fields']['host']
|
||||
assert 'host' in response.data['related']
|
||||
assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_content(hosts, fact_scans, get, user, fact_ansible_json):
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user)
|
||||
|
||||
assert fact_known.host_id == response.data['host']
|
||||
assert fact_ansible_json == json.loads(response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert fact_known.module == response.data['module']
|
||||
|
||||
def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name):
|
||||
params = {
|
||||
'module': module_name
|
||||
}
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params)
|
||||
|
||||
assert fact_json == json.loads(response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert module_name == response.data['module']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json):
|
||||
_test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages')
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json):
|
||||
_test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services')
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json):
|
||||
epoch = timezone.now()
|
||||
module_name = 'packages'
|
||||
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, epoch=epoch, get_params=dict(module=module_name, datetime=epoch))
|
||||
|
||||
assert fact_known.id == response.data['id']
|
||||
|
||||
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
team_obj.users.add(user_obj)
|
||||
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(url, user_obj)
|
||||
return response
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_normal_user_403(hosts, fact_scans, get, user, team):
|
||||
user_bob = user('bob', False)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_bob, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
assert "You do not have permission to perform this action." == response.data['detail']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_super_user_ok(hosts, fact_scans, get, user, team):
|
||||
user_super = user('bob', True)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_super, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
organization.admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
org2 = organizations(1)
|
||||
org2[0].admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
|
||||
17
awx/main/tests/functional/api/test_host_detail.py
Normal file
17
awx/main/tests/functional/api/test_host_detail.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint.
|
||||
# Other host tests should live here to make this test suite more complete.
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
|
||||
url = reverse('api:host_detail', args=(hosts[0].pk,))
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
assert 'related' in response.data
|
||||
assert 'fact_versions' in response.data['related']
|
||||
assert reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) == response.data['related']['fact_versions']
|
||||
|
||||
0
awx/main/tests/functional/commands/__init__.py
Normal file
0
awx/main/tests/functional/commands/__init__.py
Normal file
109
awx/main/tests/functional/commands/conftest.py
Normal file
109
awx/main/tests/functional/commands/conftest.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import pytest
|
||||
import time
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_base(inventory, hosts):
|
||||
host_objs = hosts(1)
|
||||
return {
|
||||
'host': host_objs[0].name,
|
||||
'date_key': time.mktime(datetime.utcnow().timetuple()),
|
||||
'facts' : { },
|
||||
'inventory_id': inventory.id
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_small(fact_msg_base):
|
||||
fact_msg_base['facts'] = {
|
||||
'packages': {
|
||||
"accountsservice": [
|
||||
{
|
||||
"architecture": "amd64",
|
||||
"name": "accountsservice",
|
||||
"source": "apt",
|
||||
"version": "0.6.35-0ubuntu7.1"
|
||||
}
|
||||
],
|
||||
"acpid": [
|
||||
{
|
||||
"architecture": "amd64",
|
||||
"name": "acpid",
|
||||
"source": "apt",
|
||||
"version": "1:2.0.21-1ubuntu2"
|
||||
}
|
||||
],
|
||||
"adduser": [
|
||||
{
|
||||
"architecture": "all",
|
||||
"name": "adduser",
|
||||
"source": "apt",
|
||||
"version": "3.113+nmu3ubuntu3"
|
||||
}
|
||||
],
|
||||
},
|
||||
'services': [
|
||||
{
|
||||
"name": "acpid",
|
||||
"source": "sysv",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"name": "apparmor",
|
||||
"source": "sysv",
|
||||
"state": "stopped"
|
||||
},
|
||||
{
|
||||
"name": "atd",
|
||||
"source": "sysv",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"name": "cron",
|
||||
"source": "sysv",
|
||||
"state": "running"
|
||||
}
|
||||
],
|
||||
'ansible': {
|
||||
'ansible_fact_simple': 'hello world',
|
||||
'ansible_fact_complex': {
|
||||
'foo': 'bar',
|
||||
'hello': [
|
||||
'scooby',
|
||||
'dooby',
|
||||
'doo'
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
return fact_msg_base
|
||||
|
||||
|
||||
'''
|
||||
Facts sent from ansible to our fact cache reciever.
|
||||
The fact module type is implicit i.e
|
||||
|
||||
Note: The 'ansible' module is an expection to this rule.
|
||||
It is NOT nested in a dict, and thus does NOT contain a first-level
|
||||
key of 'ansible'
|
||||
|
||||
{
|
||||
'fact_module_name': { ... },
|
||||
}
|
||||
'''
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_ansible(fact_msg_base, fact_ansible_json):
|
||||
fact_msg_base['facts'] = fact_ansible_json
|
||||
return fact_msg_base
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_packages(fact_msg_base, fact_packages_json):
|
||||
fact_msg_base['facts']['packages'] = fact_packages_json
|
||||
return fact_msg_base
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_services(fact_msg_base, fact_services_json):
|
||||
fact_msg_base['facts']['services'] = fact_services_json
|
||||
return fact_msg_base
|
||||
|
||||
200
awx/main/tests/functional/commands/test_cleanup_facts.py
Normal file
200
awx/main/tests/functional/commands/test_cleanup_facts.py
Normal file
@@ -0,0 +1,200 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
import mock
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from datetime import timedelta
|
||||
|
||||
# Django
|
||||
from django.utils import timezone
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
# AWX
|
||||
from awx.main.management.commands.cleanup_facts import CleanupFacts, Command
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
|
||||
def mock_feature_disabled(feature, bypass_database=None):
|
||||
return False
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_granularity(fact_scans, hosts):
|
||||
epoch = timezone.now()
|
||||
hosts(5)
|
||||
fact_scans(10, timestamp_epoch=epoch)
|
||||
fact_newest = Fact.objects.all().order_by('-timestamp').first()
|
||||
timestamp_future = fact_newest.timestamp + timedelta(days=365)
|
||||
granularity = relativedelta(days=2)
|
||||
|
||||
cleanup_facts = CleanupFacts()
|
||||
deleted_count = cleanup_facts.cleanup(timestamp_future, granularity)
|
||||
assert 60 == deleted_count
|
||||
|
||||
'''
|
||||
Delete half of the scans
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_older_than(fact_scans, hosts):
|
||||
epoch = timezone.now()
|
||||
hosts(5)
|
||||
fact_scans(28, timestamp_epoch=epoch)
|
||||
qs = Fact.objects.all().order_by('-timestamp')
|
||||
fact_middle = qs[qs.count() / 2]
|
||||
granularity = relativedelta()
|
||||
|
||||
cleanup_facts = CleanupFacts()
|
||||
deleted_count = cleanup_facts.cleanup(fact_middle.timestamp, granularity)
|
||||
assert 210 == deleted_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_older_than_granularity_module(fact_scans, hosts):
|
||||
epoch = timezone.now()
|
||||
hosts(5)
|
||||
fact_scans(10, timestamp_epoch=epoch)
|
||||
fact_newest = Fact.objects.all().order_by('-timestamp').first()
|
||||
timestamp_future = fact_newest.timestamp + timedelta(days=365)
|
||||
granularity = relativedelta(days=2)
|
||||
|
||||
cleanup_facts = CleanupFacts()
|
||||
deleted_count = cleanup_facts.cleanup(timestamp_future, granularity, module='ansible')
|
||||
assert 20 == deleted_count
|
||||
|
||||
|
||||
'''
|
||||
Reduce the granularity of half of the facts scans, by half.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_logic(fact_scans, hosts):
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(5)
|
||||
fact_scans(60, timestamp_epoch=epoch)
|
||||
timestamp_middle = epoch + timedelta(days=30)
|
||||
granularity = relativedelta(days=2)
|
||||
module = 'ansible'
|
||||
|
||||
cleanup_facts = CleanupFacts()
|
||||
cleanup_facts.cleanup(timestamp_middle, granularity, module=module)
|
||||
|
||||
|
||||
host_ids = Host.objects.all().values_list('id', flat=True)
|
||||
host_facts = {}
|
||||
for host_id in host_ids:
|
||||
facts = Fact.objects.filter(host__id=host_id, module=module, timestamp__lt=timestamp_middle).order_by('-timestamp')
|
||||
host_facts[host_id] = facts
|
||||
|
||||
for host_id, facts in host_facts.iteritems():
|
||||
assert 15 == len(facts)
|
||||
|
||||
timestamp_pivot = timestamp_middle
|
||||
for fact in facts:
|
||||
timestamp_pivot -= granularity
|
||||
assert fact.timestamp == timestamp_pivot
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_feature_disabled(mocker):
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(None)
|
||||
assert 'The System Tracking feature is not enabled for your Tower instance' in err.value
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_parameters_ok(mocker):
|
||||
run = mocker.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run')
|
||||
kv = {
|
||||
'older_than': '1d',
|
||||
'granularity': '1d',
|
||||
'module': None,
|
||||
}
|
||||
cmd = Command()
|
||||
cmd.handle(None, **kv)
|
||||
run.assert_called_once_with(relativedelta(days=1), relativedelta(days=1), module=None)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_string_time_to_timestamp_ok():
|
||||
kvs = [
|
||||
{
|
||||
'time': '2w',
|
||||
'timestamp': relativedelta(weeks=2),
|
||||
'msg': '2 weeks',
|
||||
},
|
||||
{
|
||||
'time': '23d',
|
||||
'timestamp': relativedelta(days=23),
|
||||
'msg': '23 days',
|
||||
},
|
||||
{
|
||||
'time': '11m',
|
||||
'timestamp': relativedelta(months=11),
|
||||
'msg': '11 months',
|
||||
},
|
||||
{
|
||||
'time': '14y',
|
||||
'timestamp': relativedelta(years=14),
|
||||
'msg': '14 years',
|
||||
},
|
||||
]
|
||||
for kv in kvs:
|
||||
cmd = Command()
|
||||
res = cmd.string_time_to_timestamp(kv['time'])
|
||||
assert kv['timestamp'] == res
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_string_time_to_timestamp_invalid():
|
||||
kvs = [
|
||||
{
|
||||
'time': '2weeks',
|
||||
'msg': 'weeks instead of w',
|
||||
},
|
||||
{
|
||||
'time': '2days',
|
||||
'msg': 'days instead of d',
|
||||
},
|
||||
{
|
||||
'time': '23',
|
||||
'msg': 'no unit specified',
|
||||
},
|
||||
{
|
||||
'time': None,
|
||||
'msg': 'no value specified',
|
||||
},
|
||||
{
|
||||
'time': 'zigzag',
|
||||
'msg': 'random string specified',
|
||||
},
|
||||
]
|
||||
for kv in kvs:
|
||||
cmd = Command()
|
||||
res = cmd.string_time_to_timestamp(kv['time'])
|
||||
assert res is None
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_parameters_fail(mocker):
|
||||
# Mock run() just in case, but it should never get called because an error should be thrown
|
||||
mocker.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run')
|
||||
kvs = [
|
||||
{
|
||||
'older_than': '1week',
|
||||
'granularity': '1d',
|
||||
'msg': '--older_than invalid value "1week"',
|
||||
},
|
||||
{
|
||||
'older_than': '1d',
|
||||
'granularity': '1year',
|
||||
'msg': '--granularity invalid value "1year"',
|
||||
}
|
||||
]
|
||||
for kv in kvs:
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(None, older_than=kv['older_than'], granularity=kv['granularity'])
|
||||
assert kv['msg'] in err.value
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
# Django
|
||||
|
||||
# AWX
|
||||
from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
|
||||
# TODO: Check that timestamp and other attributes are as expected
|
||||
def check_process_fact_message_module(fact_returned, data, module_name):
|
||||
date_key = data['date_key']
|
||||
|
||||
# Ensure 1, and only 1, fact created
|
||||
timestamp = datetime.fromtimestamp(date_key, None)
|
||||
assert 1 == Fact.objects.all().count()
|
||||
|
||||
host_obj = Host.objects.get(name=data['host'], inventory__id=data['inventory_id'])
|
||||
assert host_obj is not None
|
||||
fact_known = Fact.get_host_fact(host_obj.id, module_name, timestamp)
|
||||
assert fact_known is not None
|
||||
assert fact_known == fact_returned
|
||||
|
||||
assert host_obj == fact_returned.host
|
||||
if module_name == 'ansible':
|
||||
assert data['facts'] == fact_returned.facts
|
||||
else:
|
||||
assert data['facts'][module_name] == fact_returned.facts
|
||||
assert timestamp == fact_returned.timestamp
|
||||
assert module_name == fact_returned.module
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_ansible(fact_msg_ansible):
|
||||
receiver = FactCacheReceiver()
|
||||
fact_returned = receiver.process_fact_message(fact_msg_ansible)
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible')
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_packages(fact_msg_packages):
|
||||
receiver = FactCacheReceiver()
|
||||
fact_returned = receiver.process_fact_message(fact_msg_packages)
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages')
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_services(fact_msg_services):
|
||||
receiver = FactCacheReceiver()
|
||||
fact_returned = receiver.process_fact_message(fact_msg_services)
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_services, 'services')
|
||||
|
||||
'''
|
||||
We pickypack our fact sending onto the Ansible fact interface.
|
||||
The interface is <hostname, facts>. Where facts is a json blob of all the facts.
|
||||
This makes it hard to decipher what facts are new/changed.
|
||||
Because of this, we handle the same fact module data being sent multiple times
|
||||
and just keep the newest version.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible):
|
||||
#epoch = timezone.now()
|
||||
epoch = datetime.fromtimestamp(fact_msg_ansible['date_key'])
|
||||
fact_scans(fact_scans=1, timestamp_epoch=epoch)
|
||||
key = 'ansible.overwrite'
|
||||
value = 'hello world'
|
||||
|
||||
receiver = FactCacheReceiver()
|
||||
receiver.process_fact_message(fact_msg_ansible)
|
||||
|
||||
fact_msg_ansible['facts'][key] = value
|
||||
fact_returned = receiver.process_fact_message(fact_msg_ansible)
|
||||
|
||||
fact_obj = Fact.objects.get(id=fact_returned.id)
|
||||
assert key in fact_obj.facts
|
||||
assert json.loads(fact_obj.facts) == fact_msg_ansible['facts']
|
||||
assert value == json.loads(fact_obj.facts)[key]
|
||||
|
||||
# Ensure that the message flows from the socket through to process_fact_message()
|
||||
@pytest.mark.django_db
|
||||
def test_run_receiver(mocker, fact_msg_ansible):
|
||||
mocker.patch("awx.main.socket.Socket.listen", return_value=[fact_msg_ansible])
|
||||
|
||||
receiver = FactCacheReceiver()
|
||||
mocker.patch.object(receiver, 'process_fact_message', return_value=None)
|
||||
|
||||
receiver.run_receiver(use_processing_threads=False)
|
||||
|
||||
receiver.process_fact_message.assert_called_once_with(fact_msg_ansible)
|
||||
@@ -1,8 +1,23 @@
|
||||
import pytest
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
import mock
|
||||
import json
|
||||
import os
|
||||
from datetime import timedelta
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.utils.six.moves.urllib.parse import urlparse
|
||||
from django.utils import timezone
|
||||
from django.contrib.auth.models import User
|
||||
from django.conf import settings
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.base import PERM_INVENTORY_READ
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.fact import Fact
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
@@ -10,20 +25,34 @@ from rest_framework.test import (
|
||||
)
|
||||
|
||||
from awx.main.models.credential import Credential
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.inventory import (
|
||||
Inventory,
|
||||
Group,
|
||||
)
|
||||
from awx.main.models.organization import (
|
||||
Organization,
|
||||
Team,
|
||||
Permission,
|
||||
)
|
||||
|
||||
from awx.main.models.rbac import Role
|
||||
|
||||
'''
|
||||
Disable all django model signals.
|
||||
'''
|
||||
@pytest.fixture(scope="session", autouse=False)
|
||||
def disable_signals():
|
||||
mocked = mock.patch('django.dispatch.Signal.send', autospec=True)
|
||||
mocked.start()
|
||||
|
||||
'''
|
||||
FIXME: Not sure how "far" just setting the BROKER_URL will get us.
|
||||
We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py)
|
||||
|
||||
Allows django signal code to execute without the need for redis
|
||||
'''
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def celery_memory_broker():
|
||||
settings.BROKER_URL='memory://localhost/'
|
||||
|
||||
@pytest.fixture
|
||||
def user():
|
||||
@@ -60,11 +89,15 @@ def deploy_jobtemplate(project, inventory, credential):
|
||||
|
||||
@pytest.fixture
|
||||
def team(organization):
|
||||
return Team.objects.create(organization=organization, name='test-team')
|
||||
return organization.teams.create(name='test-team')
|
||||
|
||||
@pytest.fixture
|
||||
def project(organization):
|
||||
prj = Project.objects.create(name="test-project", description="test-project-desc")
|
||||
@mock.patch.object(Project, "update", lambda self, **kwargs: None)
|
||||
def project(instance, organization):
|
||||
prj = Project.objects.create(name="test-proj",
|
||||
description="test-proj-desc",
|
||||
scm_type="git",
|
||||
scm_url="https://github.com/jlaska/ansible-playbooks")
|
||||
prj.organizations.add(organization)
|
||||
return prj
|
||||
|
||||
@@ -87,7 +120,7 @@ def credential():
|
||||
|
||||
@pytest.fixture
|
||||
def inventory(organization):
|
||||
return Inventory.objects.create(name="test-inventory", organization=organization)
|
||||
return organization.inventories.create(name="test-inv")
|
||||
|
||||
@pytest.fixture
|
||||
def role():
|
||||
@@ -105,12 +138,43 @@ def alice(user):
|
||||
def bob(user):
|
||||
return user('bob', False)
|
||||
|
||||
@pytest.fixture
|
||||
def organizations(instance):
|
||||
def rf(organization_count=1):
|
||||
orgs = []
|
||||
for i in xrange(0, organization_count):
|
||||
o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc")
|
||||
orgs.append(o)
|
||||
return orgs
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def group(inventory):
|
||||
def g(name):
|
||||
return Group.objects.create(inventory=inventory, name=name)
|
||||
try:
|
||||
return Group.objects.get(name=name, inventory=inventory)
|
||||
except:
|
||||
return Group.objects.create(inventory=inventory, name=name)
|
||||
return g
|
||||
|
||||
@pytest.fixture
|
||||
def hosts(group):
|
||||
group1 = group('group-1')
|
||||
|
||||
def rf(host_count=1):
|
||||
hosts = []
|
||||
for i in xrange(0, host_count):
|
||||
name = '%s-host-%s' % (group1.name, i)
|
||||
(host, created) = group1.inventory.hosts.get_or_create(name=name)
|
||||
if created:
|
||||
group1.hosts.add(host)
|
||||
hosts.append(host)
|
||||
return hosts
|
||||
return rf
|
||||
|
||||
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def permissions():
|
||||
return {
|
||||
@@ -244,7 +308,48 @@ def options():
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def celery_memory_broker():
|
||||
from django.conf import settings
|
||||
settings.BROKER_URL='memory://localhost/'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_scans(group, fact_ansible_json, fact_packages_json, fact_services_json):
|
||||
group1 = group('group-1')
|
||||
|
||||
def rf(fact_scans=1, timestamp_epoch=timezone.now()):
|
||||
facts_json = {}
|
||||
facts = []
|
||||
module_names = ['ansible', 'services', 'packages']
|
||||
timestamp_current = timestamp_epoch
|
||||
|
||||
facts_json['ansible'] = fact_ansible_json
|
||||
facts_json['packages'] = fact_packages_json
|
||||
facts_json['services'] = fact_services_json
|
||||
|
||||
for i in xrange(0, fact_scans):
|
||||
for host in group1.hosts.all():
|
||||
for module_name in module_names:
|
||||
facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name]))
|
||||
timestamp_current += timedelta(days=1)
|
||||
return facts
|
||||
return rf
|
||||
|
||||
def _fact_json(module_name):
|
||||
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
with open('%s/%s.json' % (current_dir, module_name)) as f:
|
||||
return json.load(f)
|
||||
|
||||
@pytest.fixture
|
||||
def fact_ansible_json():
|
||||
return _fact_json('ansible')
|
||||
|
||||
@pytest.fixture
|
||||
def fact_packages_json():
|
||||
return _fact_json('packages')
|
||||
|
||||
@pytest.fixture
|
||||
def fact_services_json():
|
||||
return _fact_json('services')
|
||||
|
||||
@pytest.fixture
|
||||
def permission_inv_read(organization, inventory, team):
|
||||
return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ)
|
||||
|
||||
|
||||
111
awx/main/tests/functional/models/fact/test_get_host_fact.py
Normal file
111
awx/main/tests/functional/models/fact/test_get_host_fact.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import pytest
|
||||
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_newest_scan_exact(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch:
|
||||
fact_known = f
|
||||
break
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', epoch)
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Show me the most recent state of the sytem at any point of time.
|
||||
or, said differently
|
||||
For any timestamp, get the first scan that is <= the timestamp.
|
||||
'''
|
||||
|
||||
'''
|
||||
Ensure most recent scan run is the scan returned.
|
||||
Query by future date.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_newest_scan_less_than(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
timestamp_future = epoch + timedelta(days=10)
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=2):
|
||||
fact_known = f
|
||||
break
|
||||
assert fact_known is not None
|
||||
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_future)
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_query_middle_of_timeline(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
timestamp_middle = epoch + timedelta(days=1, hours=3)
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=1):
|
||||
fact_known = f
|
||||
break
|
||||
assert fact_known is not None
|
||||
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_middle)
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Query time less than any fact scan. Should return None
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_query_result_empty(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
timestamp_less = epoch - timedelta(days=1)
|
||||
hosts = hosts(host_count=2)
|
||||
fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_less)
|
||||
|
||||
assert fact_found is None
|
||||
|
||||
'''
|
||||
Query by fact module other than 'ansible'
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_by_module(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known_services = None
|
||||
fact_known_packages = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id:
|
||||
if f.module == 'services' and f.timestamp == epoch:
|
||||
fact_known_services = f
|
||||
elif f.module == 'packages' and f.timestamp == epoch:
|
||||
fact_known_packages = f
|
||||
assert fact_known_services is not None
|
||||
assert fact_known_packages is not None
|
||||
|
||||
fact_found_services = Fact.get_host_fact(hosts[0].id, 'services', epoch)
|
||||
fact_found_packages = Fact.get_host_fact(hosts[0].id, 'packages', epoch)
|
||||
|
||||
assert fact_found_services == fact_known_services
|
||||
assert fact_found_packages == fact_known_packages
|
||||
|
||||
129
awx/main/tests/functional/models/fact/test_get_timeline.py
Normal file
129
awx/main/tests/functional/models/fact/test_get_timeline.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import pytest
|
||||
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
|
||||
def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None):
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
facts_known = []
|
||||
for f in facts:
|
||||
if f.host.id == hosts[0].id:
|
||||
if module_name and f.module != module_name:
|
||||
continue
|
||||
if ts_known and f.timestamp != ts_known:
|
||||
continue
|
||||
facts_known.append(f)
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to)
|
||||
return (facts_known, fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all_ansible(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(fact_objs)
|
||||
|
||||
for i in xrange(len(facts_known) - 1, 0):
|
||||
assert facts_known[i].id == fact_objs[i].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_db(hosts, fact_scans):
|
||||
hosts = hosts(host_count=2)
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to)
|
||||
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_results(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=100)
|
||||
ts_to = epoch - timedelta(days=50)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_exact_same_equal(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_to = ts_from = epoch + timedelta(days=1)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch)
|
||||
assert 1 == len(facts_known)
|
||||
assert 1 == len(fact_objs)
|
||||
|
||||
assert facts_known[0].id == fact_objs[0].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_exact_from_exclusive_to_inclusive(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch + timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=2)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch)
|
||||
|
||||
assert 1 == len(facts_known)
|
||||
assert 1 == len(fact_objs)
|
||||
|
||||
assert facts_known[0].id == fact_objs[0].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_to_lte(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_to = epoch + timedelta(days=1)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch)
|
||||
facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known)
|
||||
|
||||
assert 2 == len(facts_known_subset)
|
||||
assert 2 == len(fact_objs)
|
||||
|
||||
for i in xrange(0, len(fact_objs)):
|
||||
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_from_gt(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch)
|
||||
facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known)
|
||||
|
||||
assert 2 == len(facts_known_subset)
|
||||
assert 2 == len(fact_objs)
|
||||
|
||||
for i in xrange(0, len(fact_objs)):
|
||||
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_ts(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=epoch)
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(fact_objs)
|
||||
|
||||
for i in xrange(len(facts_known) - 1, 0):
|
||||
assert facts_known[i].id == fact_objs[i].id
|
||||
|
||||
|
||||
2922
awx/main/tests/functional/packages.json
Normal file
2922
awx/main/tests/functional/packages.json
Normal file
File diff suppressed because it is too large
Load Diff
697
awx/main/tests/functional/services.json
Normal file
697
awx/main/tests/functional/services.json
Normal file
@@ -0,0 +1,697 @@
|
||||
[
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "iprdump"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "iprinit"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "iprupdate"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "stopped",
|
||||
"name": "netconsole"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "network"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "arp-ethers.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "auditd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "autovt@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "avahi-daemon.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "blk-availability.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "brandbot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "console-getty.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "console-shell.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "cpupower.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "crond.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.fedoraproject.FirewallD1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.freedesktop.Avahi.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.hostname1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.locale1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.login1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.machine1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.freedesktop.NetworkManager.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.freedesktop.nm-dispatcher.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.timedate1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "debug-shell.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dhcpd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dhcpd6.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dhcrelay.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dm-event.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dnsmasq.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-cmdline.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-initqueue.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-mount.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-mount.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-pivot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-trigger.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-udev.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-shutdown.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "ebtables.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "emergency.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "firewalld.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "getty@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "halt-local.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-cleanup.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-parse-etc.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-switch-root.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-udevadm-cleanup-db.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "irqbalance.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "kdump.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "kmod-static-nodes.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "lvm2-lvmetad.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "lvm2-monitor.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "lvm2-pvscan@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "messagebus.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "microcode.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "named-setup-rndc.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "named.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "NetworkManager-dispatcher.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "NetworkManager-wait-online.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "NetworkManager.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "ntpd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "ntpdate.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "openvpn@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-halt.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-kexec.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-poweroff.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-quit-wait.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-quit.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-read-write.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-reboot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-start.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-switch-root.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "polkit.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "postfix.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "quotaon.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rc-local.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rdisc.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rescue.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-autorelabel-mark.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-autorelabel.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-configure.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-dmesg.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-domainname.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-import-state.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-loadmodules.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-readonly.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "rsyslog.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "serial-getty@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "sshd-keygen.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "sshd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "sshd@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-ask-password-console.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-ask-password-plymouth.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-ask-password-wall.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-backlight@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-binfmt.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-fsck-root.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-fsck@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-halt.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-hibernate.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-hostnamed.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-hybrid-sleep.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-initctl.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-journal-flush.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-journald.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-kexec.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-localed.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-logind.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-machined.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-modules-load.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-nspawn@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-poweroff.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-quotacheck.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-random-seed.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "systemd-readahead-collect.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-readahead-done.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "systemd-readahead-drop.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "systemd-readahead-replay.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-reboot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-remount-fs.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-shutdownd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-suspend.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-sysctl.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-timedated.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-tmpfiles-clean.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-tmpfiles-setup-dev.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-tmpfiles-setup.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-udev-settle.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-udev-trigger.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-udevd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-update-utmp-runlevel.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-update-utmp.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-user-sessions.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-vconsole-setup.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "teamd@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "tuned.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "vmtoolsd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "wpa_supplicant.service"
|
||||
}
|
||||
]
|
||||
115
awx/main/tests/functional/test_notifications.py
Normal file
115
awx/main/tests/functional/test_notifications.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from awx.main.models.notifications import Notifier
|
||||
from awx.main.models.inventory import Inventory, Group
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
@pytest.fixture
|
||||
def notifier():
|
||||
return Notifier.objects.create(name="test-notification",
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(url="http://localhost",
|
||||
headers={"Test": "Header"}))
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_notifier_list(get, user, notifier):
|
||||
url = reverse('api:notifier_list')
|
||||
response = get(url, user('admin', True))
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_parameterization(get, post, user, organization):
|
||||
u = user('admin-poster', True)
|
||||
url = reverse('api:notifier_list')
|
||||
response = post(url,
|
||||
dict(name="test-webhook",
|
||||
description="test webhook",
|
||||
organization=1,
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(url="http://localhost",
|
||||
headers={"Test": "Header"})),
|
||||
u)
|
||||
assert response.status_code == 201
|
||||
url = reverse('api:notifier_detail', args=(response.data['id'],))
|
||||
response = get(url, u)
|
||||
assert 'related' in response.data
|
||||
assert 'organization' in response.data['related']
|
||||
assert 'summary_fields' in response.data
|
||||
assert 'organization' in response.data['summary_fields']
|
||||
assert 'notifications' in response.data['related']
|
||||
assert 'notification_configuration' in response.data
|
||||
assert 'url' in response.data['notification_configuration']
|
||||
assert 'headers' in response.data['notification_configuration']
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_encrypted_subfields(get, post, user, organization):
|
||||
def assert_send(self, messages):
|
||||
assert self.account_token == "shouldhide"
|
||||
return 1
|
||||
u = user('admin-poster', True)
|
||||
url = reverse('api:notifier_list')
|
||||
response = post(url,
|
||||
dict(name="test-twilio",
|
||||
description="test twilio",
|
||||
organization=organization.id,
|
||||
notification_type="twilio",
|
||||
notification_configuration=dict(account_sid="dummy",
|
||||
account_token="shouldhide",
|
||||
from_number="+19999999999",
|
||||
to_numbers=["9998887777"])),
|
||||
u)
|
||||
assert response.status_code == 201
|
||||
notifier_actual = Notifier.objects.get(id=response.data['id'])
|
||||
url = reverse('api:notifier_detail', args=(response.data['id'],))
|
||||
response = get(url, u)
|
||||
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
|
||||
with mock.patch.object(notifier_actual.notification_class, "send_messages", assert_send):
|
||||
notifier_actual.send("Test", {'body': "Test"})
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inherited_notifiers(get, post, user, organization, project):
|
||||
u = user('admin-poster', True)
|
||||
url = reverse('api:notifier_list')
|
||||
notifiers = []
|
||||
for nfiers in xrange(3):
|
||||
response = post(url,
|
||||
dict(name="test-webhook-{}".format(nfiers),
|
||||
description="test webhook {}".format(nfiers),
|
||||
organization=1,
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(url="http://localhost",
|
||||
headers={"Test": "Header"})),
|
||||
u)
|
||||
assert response.status_code == 201
|
||||
notifiers.append(response.data['id'])
|
||||
organization.projects.add(project)
|
||||
i = Inventory.objects.create(name='test', organization=organization)
|
||||
i.save()
|
||||
g = Group.objects.create(name='test', inventory=i)
|
||||
g.save()
|
||||
jt = JobTemplate.objects.create(name='test', inventory=i, project=project, playbook='debug.yml')
|
||||
jt.save()
|
||||
url = reverse('api:organization_notifiers_any_list', args=(organization.id,))
|
||||
response = post(url, dict(id=notifiers[0]), u)
|
||||
assert response.status_code == 204
|
||||
url = reverse('api:project_notifiers_any_list', args=(project.id,))
|
||||
response = post(url, dict(id=notifiers[1]), u)
|
||||
assert response.status_code == 204
|
||||
url = reverse('api:job_template_notifiers_any_list', args=(jt.id,))
|
||||
response = post(url, dict(id=notifiers[2]), u)
|
||||
assert response.status_code == 204
|
||||
assert len(jt.notifiers['any']) == 3
|
||||
assert len(project.notifiers['any']) == 2
|
||||
assert len(g.inventory_source.notifiers['any']) == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notifier_merging(get, post, user, organization, project, notifier):
|
||||
user('admin-poster', True)
|
||||
organization.projects.add(project)
|
||||
organization.notifiers_any.add(notifier)
|
||||
project.notifiers_any.add(notifier)
|
||||
assert len(project.notifiers['any']) == 1
|
||||
@@ -2,7 +2,7 @@ import mock # noqa
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from awx.main.models.rbac import Role
|
||||
from awx.main.models.rbac import Role, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
@@ -24,39 +24,55 @@ def test_get_roles_list_admin(organization, get, admin):
|
||||
assert roles['count'] > 0
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Unimplemented')
|
||||
def test_get_roles_list_user(organization, get, user):
|
||||
def test_get_roles_list_user(organization, inventory, team, get, user):
|
||||
'Users can see all roles they have access to, but not all roles'
|
||||
assert False
|
||||
this_user = user('user-test_get_roles_list_user')
|
||||
organization.member_role.members.add(this_user)
|
||||
custom_role = Role.objects.create(name='custom_role-test_get_roles_list_user')
|
||||
organization.member_role.children.add(custom_role)
|
||||
|
||||
url = reverse('api:role_list')
|
||||
response = get(url, this_user)
|
||||
assert response.status_code == 200
|
||||
roles = response.data
|
||||
assert roles['count'] > 0
|
||||
assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
|
||||
|
||||
role_hash = {}
|
||||
|
||||
for r in roles['results']:
|
||||
role_hash[r['id']] = r
|
||||
|
||||
assert Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).id in role_hash
|
||||
assert organization.admin_role.id in role_hash
|
||||
assert organization.member_role.id in role_hash
|
||||
assert this_user.resource.admin_role.id in role_hash
|
||||
assert custom_role.id in role_hash
|
||||
|
||||
assert inventory.admin_role.id not in role_hash
|
||||
assert team.member_role.id not in role_hash
|
||||
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_create_role(post, admin):
|
||||
'Admins can create new roles'
|
||||
#u = user('admin', True)
|
||||
def test_cant_create_role(post, admin):
|
||||
"Ensure we can't create new roles through the api"
|
||||
# Some day we might want to do this, but until that is speced out, lets
|
||||
# ensure we don't slip up and allow this implicitly through some helper or
|
||||
# another
|
||||
response = post(reverse('api:role_list'), {'name': 'New Role'}, admin)
|
||||
assert response.status_code == 201
|
||||
assert response.status_code == 405
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_delete_role(post, admin):
|
||||
'Admins can delete a custom role'
|
||||
assert False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_user_create_role(organization, get, user):
|
||||
'User can create custom roles'
|
||||
assert False
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_user_delete_role(organization, get, user):
|
||||
'User can delete their custom roles, but not any old row'
|
||||
assert False
|
||||
def test_cant_delete_role(delete, admin):
|
||||
"Ensure we can't delete roles through the api"
|
||||
# Some day we might want to do this, but until that is speced out, lets
|
||||
# ensure we don't slip up and allow this implicitly through some helper or
|
||||
# another
|
||||
response = delete(reverse('api:role_detail', args=(admin.resource.admin_role.id,)), admin)
|
||||
assert response.status_code == 405
|
||||
|
||||
|
||||
|
||||
@@ -72,6 +88,53 @@ def test_get_user_roles_list(get, admin):
|
||||
roles = response.data
|
||||
assert roles['count'] > 0 # 'System Administrator' role if nothing else
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_view_other_user_roles(organization, inventory, team, get, alice, bob):
|
||||
'Users can see roles for other users, but only the roles that that user has access to see as well'
|
||||
organization.member_role.members.add(alice)
|
||||
organization.admins.add(bob)
|
||||
custom_role = Role.objects.create(name='custom_role-test_user_view_admin_roles_list')
|
||||
organization.member_role.children.add(custom_role)
|
||||
team.users.add(bob)
|
||||
|
||||
# alice and bob are in the same org and can see some child role of that org.
|
||||
# Bob is an org admin, alice can see this.
|
||||
# Bob is in a team that alice is not, alice cannot see that bob is a member of that team.
|
||||
|
||||
url = reverse('api:user_roles_list', args=(bob.id,))
|
||||
response = get(url, alice)
|
||||
assert response.status_code == 200
|
||||
roles = response.data
|
||||
assert roles['count'] > 0
|
||||
assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
|
||||
|
||||
role_hash = {}
|
||||
for r in roles['results']:
|
||||
role_hash[r['id']] = r['name']
|
||||
|
||||
assert organization.admin_role.id in role_hash
|
||||
assert custom_role.id not in role_hash # doesn't show up in the user roles list, not an explicit grant
|
||||
assert Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).id not in role_hash
|
||||
assert inventory.admin_role.id not in role_hash
|
||||
assert team.member_role.id not in role_hash # alice can't see this
|
||||
|
||||
# again but this time alice is part of the team, and should be able to see the team role
|
||||
team.users.add(alice)
|
||||
response = get(url, alice)
|
||||
assert response.status_code == 200
|
||||
roles = response.data
|
||||
assert roles['count'] > 0
|
||||
assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
|
||||
|
||||
role_hash = {}
|
||||
for r in roles['results']:
|
||||
role_hash[r['id']] = r['name']
|
||||
|
||||
assert team.member_role.id in role_hash # Alice can now see this
|
||||
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_add_role_to_user(role, post, admin):
|
||||
assert admin.roles.filter(id=role.id).count() == 0
|
||||
@@ -165,15 +228,15 @@ def test_get_role(get, admin, role):
|
||||
def test_put_role(put, admin, role):
|
||||
url = reverse('api:role_detail', args=(role.id,))
|
||||
response = put(url, {'name': 'Some new name'}, admin)
|
||||
assert response.status_code == 200
|
||||
r = Role.objects.get(id=role.id)
|
||||
assert r.name == 'Some new name'
|
||||
assert response.status_code == 405
|
||||
#r = Role.objects.get(id=role.id)
|
||||
#assert r.name == 'Some new name'
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_put_role_access_denied(put, alice, admin, role):
|
||||
url = reverse('api:role_detail', args=(role.id,))
|
||||
response = put(url, {'name': 'Some new name'}, alice)
|
||||
assert response.status_code == 403
|
||||
assert response.status_code == 403 or response.status_code == 405
|
||||
|
||||
|
||||
#
|
||||
@@ -204,6 +267,67 @@ def test_remove_user_to_role(post, admin, role):
|
||||
post(url, {'disassociate': True, 'id': admin.id}, admin)
|
||||
assert role.members.filter(id=admin.id).count() == 0
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_add_user_to_job_template(post, organization, check_jobtemplate, user):
|
||||
'Tests that a user with permissions to assign/revoke membership to a particular role can do so'
|
||||
org_admin = user('org-admin')
|
||||
joe = user('joe')
|
||||
organization.admins.add(org_admin)
|
||||
|
||||
assert check_jobtemplate.accessible_by(org_admin, {'write': True}) is True
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False
|
||||
|
||||
post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'id': joe.id}, org_admin)
|
||||
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_remove_user_to_job_template(post, organization, check_jobtemplate, user):
|
||||
'Tests that a user with permissions to assign/revoke membership to a particular role can do so'
|
||||
org_admin = user('org-admin')
|
||||
joe = user('joe')
|
||||
organization.admins.add(org_admin)
|
||||
check_jobtemplate.executor_role.members.add(joe)
|
||||
|
||||
assert check_jobtemplate.accessible_by(org_admin, {'write': True}) is True
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True
|
||||
|
||||
post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'disassociate': True, 'id': joe.id}, org_admin)
|
||||
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_fail_to_add_user_to_job_template(post, organization, check_jobtemplate, user):
|
||||
'Tests that a user without permissions to assign/revoke membership to a particular role cannot do so'
|
||||
rando = user('rando')
|
||||
joe = user('joe')
|
||||
|
||||
assert check_jobtemplate.accessible_by(rando, {'write': True}) is False
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False
|
||||
|
||||
res = post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'id': joe.id}, rando)
|
||||
assert res.status_code == 403
|
||||
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_fail_to_remove_user_to_job_template(post, organization, check_jobtemplate, user):
|
||||
'Tests that a user without permissions to assign/revoke membership to a particular role cannot do so'
|
||||
rando = user('rando')
|
||||
joe = user('joe')
|
||||
check_jobtemplate.executor_role.members.add(joe)
|
||||
|
||||
assert check_jobtemplate.accessible_by(rando, {'write': True}) is False
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True
|
||||
|
||||
res = post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'disassociate': True, 'id': joe.id}, rando)
|
||||
assert res.status_code == 403
|
||||
|
||||
assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True
|
||||
|
||||
|
||||
#
|
||||
# /roles/<id>/teams/
|
||||
#
|
||||
@@ -252,22 +376,6 @@ def test_role_parents(get, team, admin, role):
|
||||
assert response.data['count'] == 1
|
||||
assert response.data['results'][0]['id'] == team.member_role.id
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_role_add_parent(post, team, admin, role):
|
||||
assert role.parents.count() == 0
|
||||
url = reverse('api:role_parents_list', args=(role.id,))
|
||||
post(url, {'id': team.member_role.id}, admin)
|
||||
assert role.parents.count() == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_role_remove_parent(post, team, admin, role):
|
||||
role.parents.add(team.member_role)
|
||||
assert role.parents.count() == 1
|
||||
url = reverse('api:role_parents_list', args=(role.id,))
|
||||
post(url, {'disassociate': True, 'id': team.member_role.id}, admin)
|
||||
assert role.parents.count() == 0
|
||||
|
||||
#
|
||||
# /roles/<id>/children/
|
||||
@@ -282,22 +390,6 @@ def test_role_children(get, team, admin, role):
|
||||
assert response.data['count'] == 1
|
||||
assert response.data['results'][0]['id'] == role.id
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_role_add_children(post, team, admin, role):
|
||||
assert role.children.count() == 0
|
||||
url = reverse('api:role_children_list', args=(role.id,))
|
||||
post(url, {'id': team.member_role.id}, admin)
|
||||
assert role.children.count() == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason='Waiting on custom role requirements')
|
||||
def test_role_remove_children(post, team, admin, role):
|
||||
role.children.add(team.member_role)
|
||||
assert role.children.count() == 1
|
||||
url = reverse('api:role_children_list', args=(role.id,))
|
||||
post(url, {'disassociate': True, 'id': team.member_role.id}, admin)
|
||||
assert role.children.count() == 0
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -138,3 +138,32 @@ def test_content_object(user):
|
||||
assert org.resource.content_object.id == org.id
|
||||
assert org.admin_role.content_object.id == org.id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_hierarchy_rebuilding():
|
||||
'Tests some subdtle cases around role hierarchy rebuilding'
|
||||
|
||||
X = Role.objects.create(name='X')
|
||||
A = Role.objects.create(name='A')
|
||||
B = Role.objects.create(name='B')
|
||||
C = Role.objects.create(name='C')
|
||||
D = Role.objects.create(name='D')
|
||||
|
||||
A.children.add(B)
|
||||
A.children.add(D)
|
||||
B.children.add(C)
|
||||
C.children.add(D)
|
||||
|
||||
assert A.is_ancestor_of(D)
|
||||
assert X.is_ancestor_of(D) is False
|
||||
|
||||
X.children.add(A)
|
||||
|
||||
assert X.is_ancestor_of(D) is True
|
||||
|
||||
X.children.remove(A)
|
||||
|
||||
# This can be the stickler, the rebuilder needs to ensure that D's role
|
||||
# hierarchy is built after both A and C are updated.
|
||||
assert X.is_ancestor_of(D) is False
|
||||
|
||||
|
||||
|
||||
@@ -128,8 +128,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
|
||||
self.assertFalse(ad_hoc_command.passwords_needed_to_start)
|
||||
self.assertTrue(ad_hoc_command.signal_start())
|
||||
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
|
||||
self.check_job_result(ad_hoc_command, 'failed')
|
||||
self.check_ad_hoc_command_events(ad_hoc_command, 'unreachable')
|
||||
self.check_job_result(ad_hoc_command, 'successful')
|
||||
self.check_ad_hoc_command_events(ad_hoc_command, 'skipped')
|
||||
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('canceled', 0))
|
||||
def test_cancel_ad_hoc_command(self, ignore):
|
||||
|
||||
@@ -1,238 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from datetime import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import mock
|
||||
|
||||
#Django
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
# AWX
|
||||
from awx.main.tests.base import BaseTest
|
||||
from awx.fact.tests.base import MongoDBRequired, FactScanBuilder, TEST_FACT_PACKAGES, TEST_FACT_ANSIBLE, TEST_FACT_SERVICES
|
||||
from command_base import BaseCommandMixin
|
||||
from awx.main.management.commands.cleanup_facts import Command, CleanupFacts
|
||||
from awx.fact.models.fact import * # noqa
|
||||
|
||||
__all__ = ['CommandTest','CleanupFactsUnitTest', 'CleanupFactsCommandFunctionalTest']
|
||||
|
||||
class CleanupFactsCommandFunctionalTest(BaseCommandMixin, BaseTest, MongoDBRequired):
|
||||
def setUp(self):
|
||||
super(CleanupFactsCommandFunctionalTest, self).setUp()
|
||||
self.create_test_license_file()
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.add_fact('ansible', TEST_FACT_ANSIBLE)
|
||||
|
||||
def test_invoke_zero_ok(self):
|
||||
self.builder.set_epoch(datetime(year=2015, day=2, month=1, microsecond=0))
|
||||
self.builder.build(scan_count=20, host_count=10)
|
||||
|
||||
result, stdout, stderr = self.run_command('cleanup_facts', granularity='2y', older_than='1d')
|
||||
self.assertEqual(stdout, 'Deleted %s facts.\n' % ((200 / 2)))
|
||||
|
||||
def test_invoke_zero_deleted(self):
|
||||
result, stdout, stderr = self.run_command('cleanup_facts', granularity='1w',older_than='5d')
|
||||
self.assertEqual(stdout, 'Deleted 0 facts.\n')
|
||||
|
||||
def test_invoke_all_deleted(self):
|
||||
self.builder.build(scan_count=20, host_count=10)
|
||||
|
||||
result, stdout, stderr = self.run_command('cleanup_facts', granularity='0d', older_than='0d')
|
||||
self.assertEqual(stdout, 'Deleted 200 facts.\n')
|
||||
|
||||
def test_invoke_params_required(self):
|
||||
result, stdout, stderr = self.run_command('cleanup_facts')
|
||||
self.assertIsInstance(result, CommandError)
|
||||
self.assertEqual(str(result), 'Both --granularity and --older_than are required.')
|
||||
|
||||
def test_module(self):
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.add_fact('services', TEST_FACT_SERVICES)
|
||||
self.builder.build(scan_count=5, host_count=5)
|
||||
|
||||
result, stdout, stderr = self.run_command('cleanup_facts', granularity='0d', older_than='0d', module='packages')
|
||||
self.assertEqual(stdout, 'Deleted 25 facts.\n')
|
||||
|
||||
class CommandTest(BaseTest):
|
||||
def setUp(self):
|
||||
super(CommandTest, self).setUp()
|
||||
self.create_test_license_file()
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run')
|
||||
def test_parameters_ok(self, run):
|
||||
|
||||
kv = {
|
||||
'older_than': '1d',
|
||||
'granularity': '1d',
|
||||
'module': None,
|
||||
}
|
||||
cmd = Command()
|
||||
cmd.handle(None, **kv)
|
||||
run.assert_called_once_with(relativedelta(days=1), relativedelta(days=1), module=None)
|
||||
|
||||
def test_string_time_to_timestamp_ok(self):
|
||||
kvs = [
|
||||
{
|
||||
'time': '2w',
|
||||
'timestamp': relativedelta(weeks=2),
|
||||
'msg': '2 weeks',
|
||||
},
|
||||
{
|
||||
'time': '23d',
|
||||
'timestamp': relativedelta(days=23),
|
||||
'msg': '23 days',
|
||||
},
|
||||
{
|
||||
'time': '11m',
|
||||
'timestamp': relativedelta(months=11),
|
||||
'msg': '11 months',
|
||||
},
|
||||
{
|
||||
'time': '14y',
|
||||
'timestamp': relativedelta(years=14),
|
||||
'msg': '14 years',
|
||||
},
|
||||
]
|
||||
for kv in kvs:
|
||||
cmd = Command()
|
||||
res = cmd.string_time_to_timestamp(kv['time'])
|
||||
self.assertEqual(kv['timestamp'], res, "%s should convert to %s" % (kv['time'], kv['msg']))
|
||||
|
||||
def test_string_time_to_timestamp_invalid(self):
|
||||
kvs = [
|
||||
{
|
||||
'time': '2weeks',
|
||||
'msg': 'weeks instead of w',
|
||||
},
|
||||
{
|
||||
'time': '2days',
|
||||
'msg': 'days instead of d',
|
||||
},
|
||||
{
|
||||
'time': '23',
|
||||
'msg': 'no unit specified',
|
||||
},
|
||||
{
|
||||
'time': None,
|
||||
'msg': 'no value specified',
|
||||
},
|
||||
{
|
||||
'time': 'zigzag',
|
||||
'msg': 'random string specified',
|
||||
},
|
||||
]
|
||||
for kv in kvs:
|
||||
cmd = Command()
|
||||
res = cmd.string_time_to_timestamp(kv['time'])
|
||||
self.assertIsNone(res, kv['msg'])
|
||||
|
||||
# Mock run() just in case, but it should never get called because an error should be thrown
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run')
|
||||
def test_parameters_fail(self, run):
|
||||
kvs = [
|
||||
{
|
||||
'older_than': '1week',
|
||||
'granularity': '1d',
|
||||
'msg': 'Invalid older_than param value',
|
||||
},
|
||||
{
|
||||
'older_than': '1d',
|
||||
'granularity': '1year',
|
||||
'msg': 'Invalid granularity param value',
|
||||
}
|
||||
]
|
||||
for kv in kvs:
|
||||
cmd = Command()
|
||||
with self.assertRaises(CommandError):
|
||||
cmd.handle(None, older_than=kv['older_than'], granularity=kv['granularity'])
|
||||
|
||||
class CleanupFactsUnitTest(BaseCommandMixin, BaseTest, MongoDBRequired):
|
||||
def setUp(self):
|
||||
super(CleanupFactsUnitTest, self).setUp()
|
||||
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.add_fact('ansible', TEST_FACT_ANSIBLE)
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.build(scan_count=20, host_count=10)
|
||||
|
||||
'''
|
||||
Create 10 hosts with 40 facts each. After cleanup, there should be 20 facts for each host.
|
||||
Then ensure the correct facts are deleted.
|
||||
'''
|
||||
def test_cleanup_logic(self):
|
||||
cleanup_facts = CleanupFacts()
|
||||
fact_oldest = FactVersion.objects.all().order_by('timestamp').first()
|
||||
granularity = relativedelta(years=2)
|
||||
|
||||
deleted_count = cleanup_facts.cleanup(self.builder.get_timestamp(0), granularity)
|
||||
self.assertEqual(deleted_count, 2 * (self.builder.get_scan_count() * self.builder.get_host_count()) / 2)
|
||||
|
||||
# Check the number of facts per host
|
||||
for host in self.builder.get_hosts():
|
||||
count = FactVersion.objects.filter(host=host).count()
|
||||
scan_count = (2 * self.builder.get_scan_count()) / 2
|
||||
self.assertEqual(count, scan_count)
|
||||
|
||||
count = Fact.objects.filter(host=host).count()
|
||||
self.assertEqual(count, scan_count)
|
||||
|
||||
# Ensure that only 2 facts (ansible and packages) exists per granularity time
|
||||
date_pivot = self.builder.get_timestamp(0)
|
||||
for host in self.builder.get_hosts():
|
||||
while date_pivot > fact_oldest.timestamp:
|
||||
date_pivot_next = date_pivot - granularity
|
||||
kv = {
|
||||
'timestamp__lte': date_pivot,
|
||||
'timestamp__gt': date_pivot_next,
|
||||
'host': host,
|
||||
}
|
||||
count = FactVersion.objects.filter(**kv).count()
|
||||
self.assertEqual(count, 2, "should only be 2 FactVersion per the 2 year granularity")
|
||||
count = Fact.objects.filter(**kv).count()
|
||||
self.assertEqual(count, 2, "should only be 2 Fact per the 2 year granularity")
|
||||
date_pivot = date_pivot_next
|
||||
|
||||
'''
|
||||
Create 10 hosts with 40 facts each. After cleanup, there should be 30 facts for each host.
|
||||
Then ensure the correct facts are deleted.
|
||||
'''
|
||||
def test_cleanup_module(self):
|
||||
cleanup_facts = CleanupFacts()
|
||||
fact_oldest = FactVersion.objects.all().order_by('timestamp').first()
|
||||
granularity = relativedelta(years=2)
|
||||
|
||||
deleted_count = cleanup_facts.cleanup(self.builder.get_timestamp(0), granularity, module='ansible')
|
||||
self.assertEqual(deleted_count, (self.builder.get_scan_count() * self.builder.get_host_count()) / 2)
|
||||
|
||||
# Check the number of facts per host
|
||||
for host in self.builder.get_hosts():
|
||||
count = FactVersion.objects.filter(host=host).count()
|
||||
self.assertEqual(count, 30)
|
||||
|
||||
count = Fact.objects.filter(host=host).count()
|
||||
self.assertEqual(count, 30)
|
||||
|
||||
# Ensure that only 1 ansible fact exists per granularity time
|
||||
date_pivot = self.builder.get_timestamp(0)
|
||||
for host in self.builder.get_hosts():
|
||||
while date_pivot > fact_oldest.timestamp:
|
||||
date_pivot_next = date_pivot - granularity
|
||||
kv = {
|
||||
'timestamp__lte': date_pivot,
|
||||
'timestamp__gt': date_pivot_next,
|
||||
'host': host,
|
||||
'module': 'ansible',
|
||||
}
|
||||
count = FactVersion.objects.filter(**kv).count()
|
||||
self.assertEqual(count, 1)
|
||||
count = Fact.objects.filter(**kv).count()
|
||||
self.assertEqual(count, 1)
|
||||
date_pivot = date_pivot_next
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,242 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import unittest2 as unittest
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.tests.base import BaseLiveServerTest
|
||||
from awx.fact.models import * # noqa
|
||||
from awx.fact.tests.base import BaseFactTestMixin, FactScanBuilder, TEST_FACT_ANSIBLE, TEST_FACT_PACKAGES, TEST_FACT_SERVICES
|
||||
from awx.main.utils import build_url
|
||||
|
||||
__all__ = ['FactVersionApiTest', 'FactViewApiTest', 'SingleFactApiTest',]
|
||||
|
||||
class FactApiBaseTest(BaseLiveServerTest, BaseFactTestMixin):
|
||||
def setUp(self):
|
||||
super(FactApiBaseTest, self).setUp()
|
||||
self.create_test_license_file()
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organization(self.super_django_user)
|
||||
self.organization.admins.add(self.normal_django_user)
|
||||
self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory')
|
||||
self.host = self.inventory.hosts.create(name='host.example.com')
|
||||
self.host2 = self.inventory.hosts.create(name='host2.example.com')
|
||||
self.host3 = self.inventory.hosts.create(name='host3.example.com')
|
||||
|
||||
def setup_facts(self, scan_count):
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.set_inventory_id(self.inventory.pk)
|
||||
self.builder.add_fact('ansible', TEST_FACT_ANSIBLE)
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.add_fact('services', TEST_FACT_SERVICES)
|
||||
self.builder.add_hostname('host.example.com')
|
||||
self.builder.add_hostname('host2.example.com')
|
||||
self.builder.add_hostname('host3.example.com')
|
||||
self.builder.build(scan_count=scan_count, host_count=3)
|
||||
|
||||
self.fact_host = FactHost.objects.get(hostname=self.host.name)
|
||||
|
||||
class FactVersionApiTest(FactApiBaseTest):
|
||||
def check_equal(self, fact_versions, results):
|
||||
def find(element, set1):
|
||||
for e in set1:
|
||||
if all([ e.get(field) == element.get(field) for field in element.keys()]):
|
||||
return e
|
||||
return None
|
||||
|
||||
self.assertEqual(len(results), len(fact_versions))
|
||||
for v in fact_versions:
|
||||
v_dict = {
|
||||
'timestamp': timestamp_apiformat(v.timestamp),
|
||||
'module': v.module
|
||||
}
|
||||
e = find(v_dict, results)
|
||||
self.assertIsNotNone(e, "%s not found in %s" % (v_dict, results))
|
||||
|
||||
def get_list(self, fact_versions, params=None):
|
||||
url = build_url('api:host_fact_versions_list', args=(self.host.pk,), get=params)
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
|
||||
self.check_equal(fact_versions, response['results'])
|
||||
return response
|
||||
|
||||
def test_permission_list(self):
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user('admin'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('normal'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
|
||||
def test_list_empty(self):
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertIn('results', response)
|
||||
self.assertIsInstance(response['results'], list)
|
||||
self.assertEqual(len(response['results']), 0)
|
||||
|
||||
def test_list_related_fact_view(self):
|
||||
self.setup_facts(2)
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
for entry in response['results']:
|
||||
self.assertIn('fact_view', entry['related'])
|
||||
self.get(entry['related']['fact_view'], expect=200)
|
||||
|
||||
def test_list(self):
|
||||
self.setup_facts(2)
|
||||
self.get_list(FactVersion.objects.filter(host=self.fact_host))
|
||||
|
||||
def test_list_module(self):
|
||||
self.setup_facts(10)
|
||||
self.get_list(FactVersion.objects.filter(host=self.fact_host, module='packages'), dict(module='packages'))
|
||||
|
||||
def test_list_time_from(self):
|
||||
self.setup_facts(10)
|
||||
|
||||
params = {
|
||||
'from': timestamp_apiformat(self.builder.get_timestamp(1)),
|
||||
}
|
||||
# 'to': timestamp_apiformat(self.builder.get_timestamp(3))
|
||||
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'])
|
||||
self.get_list(fact_versions, params)
|
||||
|
||||
def test_list_time_to(self):
|
||||
self.setup_facts(10)
|
||||
|
||||
params = {
|
||||
'to': timestamp_apiformat(self.builder.get_timestamp(3))
|
||||
}
|
||||
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__lte=params['to'])
|
||||
self.get_list(fact_versions, params)
|
||||
|
||||
def test_list_time_from_to(self):
|
||||
self.setup_facts(10)
|
||||
|
||||
params = {
|
||||
'from': timestamp_apiformat(self.builder.get_timestamp(1)),
|
||||
'to': timestamp_apiformat(self.builder.get_timestamp(3))
|
||||
}
|
||||
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'], timestamp__lte=params['to'])
|
||||
self.get_list(fact_versions, params)
|
||||
|
||||
|
||||
class FactViewApiTest(FactApiBaseTest):
|
||||
def check_equal(self, fact_obj, results):
|
||||
fact_dict = {
|
||||
'timestamp': timestamp_apiformat(fact_obj.timestamp),
|
||||
'module': fact_obj.module,
|
||||
'host': {
|
||||
'hostname': fact_obj.host.hostname,
|
||||
'inventory_id': fact_obj.host.inventory_id,
|
||||
'id': str(fact_obj.host.id)
|
||||
},
|
||||
'fact': fact_obj.fact
|
||||
}
|
||||
self.assertEqual(fact_dict, results)
|
||||
|
||||
def test_permission_view(self):
|
||||
url = reverse('api:host_fact_compare_view', args=(self.host.pk,))
|
||||
with self.current_user('admin'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('normal'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
|
||||
def get_fact(self, fact_obj, params=None):
|
||||
url = build_url('api:host_fact_compare_view', args=(self.host.pk,), get=params)
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
|
||||
self.check_equal(fact_obj, response)
|
||||
|
||||
def test_view(self):
|
||||
self.setup_facts(2)
|
||||
self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible').order_by('-timestamp')[0])
|
||||
|
||||
def test_view_module_filter(self):
|
||||
self.setup_facts(2)
|
||||
self.get_fact(Fact.objects.filter(host=self.fact_host, module='services').order_by('-timestamp')[0], dict(module='services'))
|
||||
|
||||
def test_view_time_filter(self):
|
||||
self.setup_facts(6)
|
||||
ts = self.builder.get_timestamp(3)
|
||||
self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible', timestamp__lte=ts).order_by('-timestamp')[0],
|
||||
dict(datetime=ts))
|
||||
|
||||
|
||||
@unittest.skip("single fact query needs to be updated to use inventory_id attribute on host document")
|
||||
class SingleFactApiTest(FactApiBaseTest):
|
||||
def setUp(self):
|
||||
super(SingleFactApiTest, self).setUp()
|
||||
|
||||
self.group = self.inventory.groups.create(name='test-group')
|
||||
self.group.hosts.add(self.host, self.host2, self.host3)
|
||||
|
||||
def test_permission_list(self):
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user('admin'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('normal'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
|
||||
def _test_related(self, url):
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertTrue(len(response['results']) > 0)
|
||||
for entry in response['results']:
|
||||
self.assertIn('single_fact', entry['related'])
|
||||
# Requires fields
|
||||
self.get(entry['related']['single_fact'], expect=400)
|
||||
|
||||
def test_related_host_list(self):
|
||||
self.setup_facts(2)
|
||||
self._test_related(reverse('api:host_list'))
|
||||
|
||||
def test_related_group_list(self):
|
||||
self.setup_facts(2)
|
||||
self._test_related(reverse('api:group_list'))
|
||||
|
||||
def test_related_inventory_list(self):
|
||||
self.setup_facts(2)
|
||||
self._test_related(reverse('api:inventory_list'))
|
||||
|
||||
def test_params(self):
|
||||
self.setup_facts(2)
|
||||
params = {
|
||||
'module': 'packages',
|
||||
'fact_key': 'name',
|
||||
'fact_value': 'acpid',
|
||||
}
|
||||
url = build_url('api:inventory_single_fact_view', args=(self.inventory.pk,), get=params)
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(len(response['results']), 3)
|
||||
for entry in response['results']:
|
||||
self.assertEqual(entry['fact'][0]['name'], 'acpid')
|
||||
@@ -139,12 +139,13 @@ def get_encryption_key(instance, field_name):
|
||||
h.update(field_name)
|
||||
return h.digest()[:16]
|
||||
|
||||
|
||||
def encrypt_field(instance, field_name, ask=False):
|
||||
def encrypt_field(instance, field_name, ask=False, subfield=None):
|
||||
'''
|
||||
Return content of the given instance and field name encrypted.
|
||||
'''
|
||||
value = getattr(instance, field_name)
|
||||
if isinstance(value, dict) and subfield is not None:
|
||||
value = value[subfield]
|
||||
if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'):
|
||||
return value
|
||||
value = smart_str(value)
|
||||
@@ -157,11 +158,13 @@ def encrypt_field(instance, field_name, ask=False):
|
||||
return '$encrypted$%s$%s' % ('AES', b64data)
|
||||
|
||||
|
||||
def decrypt_field(instance, field_name):
|
||||
def decrypt_field(instance, field_name, subfield=None):
|
||||
'''
|
||||
Return content of the given instance and field name decrypted.
|
||||
'''
|
||||
value = getattr(instance, field_name)
|
||||
if isinstance(value, dict) and subfield is not None:
|
||||
value = value[subfield]
|
||||
if not value or not value.startswith('$encrypted$'):
|
||||
return value
|
||||
algo, b64data = value[len('$encrypted$'):].split('$', 1)
|
||||
|
||||
Reference in New Issue
Block a user