diff --git a/.github/triage_replies.md b/.github/triage_replies.md index 6df2046a2f..f4cd6e9c9b 100644 --- a/.github/triage_replies.md +++ b/.github/triage_replies.md @@ -106,6 +106,13 @@ The Ansible Community is looking at building an EE that corresponds to all of th ### Oracle AWX We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support. +### Community Resolved +Hi, + +We are happy to see that it appears a fix has been provided for your issue, so we will go ahead and close this ticket. Please feel free to reopen if any other problems arise. + + thanks so much for taking the time to write a thoughtful and helpful response to this issue! + ### AWX Release Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb diff --git a/.github/workflows/pr_body_check.yml b/.github/workflows/pr_body_check.yml index 90520c7e92..7ddcabd3d6 100644 --- a/.github/workflows/pr_body_check.yml +++ b/.github/workflows/pr_body_check.yml @@ -17,9 +17,9 @@ jobs: env: PR_BODY: ${{ github.event.pull_request.body }} run: | - echo $PR_BODY | grep "Bug, Docs Fix or other nominal change" > Z - echo $PR_BODY | grep "New or Enhanced Feature" > Y - echo $PR_BODY | grep "Breaking Change" > X + echo "$PR_BODY" | grep "Bug, Docs Fix or other nominal change" > Z + echo "$PR_BODY" | grep "New or Enhanced Feature" > Y + echo "$PR_BODY" | grep "Breaking Change" > X exit 0 # We exit 0 and set the shell to prevent the returns from the greps from failing this step # See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference diff --git a/.github/workflows/promote.yml b/.github/workflows/promote.yml index 820494d303..6bd36f0102 100644 --- a/.github/workflows/promote.yml +++ b/.github/workflows/promote.yml @@ -38,9 +38,13 @@ jobs: - name: Build collection and publish to galaxy run: | COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection - ansible-galaxy collection publish \ - --token=${{ secrets.GALAXY_TOKEN }} \ - awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz + if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \ + echo "Galaxy release already done"; \ + else \ + ansible-galaxy collection publish \ + --token=${{ secrets.GALAXY_TOKEN }} \ + awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \ + fi - name: Set official pypi info run: echo pypi_repo=pypi >> $GITHUB_ENV @@ -52,6 +56,7 @@ jobs: - name: Build awxkit and upload to pypi run: | + git reset --hard cd awxkit && python3 setup.py bdist_wheel twine upload \ -r ${{ env.pypi_repo }} \ @@ -74,4 +79,6 @@ jobs: docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }} docker push quay.io/${{ github.repository }}:latest - + docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} + docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} + docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index 042b6b7b0d..306fe3834c 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -84,6 +84,20 @@ jobs: -e push=yes \ -e awx_official=yes + - name: Log in to GHCR + run: | + echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin + + - name: Log in to Quay + run: | + echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin + + - name: tag awx-ee:latest with version input + run: | + docker pull quay.io/ansible/awx-ee:latest + docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }} + docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }} + - name: Build and stage awx-operator working-directory: awx-operator run: | @@ -103,6 +117,7 @@ jobs: env: AWX_TEST_IMAGE: ${{ github.repository }} AWX_TEST_VERSION: ${{ github.event.inputs.version }} + AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }} - name: Create draft release for AWX working-directory: awx diff --git a/Makefile b/Makefile index fc9a6dd2d6..e28fafd15c 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ PYTHON ?= python3.9 OFFICIAL ?= no NODE ?= node NPM_BIN ?= npm +KIND_BIN ?= $(shell which kind) CHROMIUM_BIN=/tmp/chrome-linux/chrome GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD) MANAGEMENT_COMMAND ?= awx-manage @@ -232,11 +233,17 @@ daphne: fi; \ daphne -b 127.0.0.1 -p 8051 awx.asgi:channel_layer -wsbroadcast: +wsrelay: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - $(PYTHON) manage.py run_wsbroadcast + $(PYTHON) manage.py run_wsrelay + +heartbeet: + @if [ "$(VENV_BASE)" ]; then \ + . $(VENV_BASE)/awx/bin/activate; \ + fi; \ + $(PYTHON) manage.py run_heartbeet ## Run to start the background task dispatcher for development. dispatcher: @@ -606,6 +613,9 @@ awx-kube-build: Dockerfile --build-arg HEADLESS=$(HEADLESS) \ -t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) . +kind-dev-load: awx-kube-dev-build + $(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) + # Translation TASKS # -------------------------------------- diff --git a/awx/api/conf.py b/awx/api/conf.py index fd1467cdde..b9c2ee701a 100644 --- a/awx/api/conf.py +++ b/awx/api/conf.py @@ -96,6 +96,15 @@ register( category=_('Authentication'), category_slug='authentication', ) +register( + 'ALLOW_METRICS_FOR_ANONYMOUS_USERS', + field_class=fields.BooleanField, + default=False, + label=_('Allow anonymous users to poll metrics'), + help_text=_('If true, anonymous users are allowed to poll metrics.'), + category=_('Authentication'), + category_slug='authentication', +) def authentication_validate(serializer, attrs): diff --git a/awx/api/views/metrics.py b/awx/api/views/metrics.py index 1634293cab..4c05819f13 100644 --- a/awx/api/views/metrics.py +++ b/awx/api/views/metrics.py @@ -5,9 +5,11 @@ import logging # Django +from django.conf import settings from django.utils.translation import gettext_lazy as _ # Django REST Framework +from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework.exceptions import PermissionDenied @@ -31,9 +33,14 @@ class MetricsView(APIView): renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer] + def initialize_request(self, request, *args, **kwargs): + if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS: + self.permission_classes = (AllowAny,) + return super(APIView, self).initialize_request(request, *args, **kwargs) + def get(self, request): '''Show Metrics Details''' - if request.user.is_superuser or request.user.is_system_auditor: + if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor: metrics_to_show = '' if not request.query_params.get('subsystemonly', "0") == "1": metrics_to_show += metrics().decode('UTF-8') diff --git a/awx/conf/settings.py b/awx/conf/settings.py index 70e40fadcc..e3859fb2b0 100644 --- a/awx/conf/settings.py +++ b/awx/conf/settings.py @@ -5,6 +5,8 @@ import threading import time import os +from concurrent.futures import ThreadPoolExecutor + # Django from django.conf import LazySettings from django.conf import settings, UserSettingsHolder @@ -158,7 +160,7 @@ class EncryptedCacheProxy(object): obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty) if obj_id is empty: logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key)) - obj_id = getattr(self._get_setting_from_db(key), 'pk', None) + obj_id = getattr(_get_setting_from_db(self.registry, key), 'pk', None) elif obj_id == SETTING_CACHE_NONE: obj_id = None return method(TransientSetting(pk=obj_id, value=value), 'value') @@ -167,11 +169,6 @@ class EncryptedCacheProxy(object): # a no-op; it just returns the provided value return value - def _get_setting_from_db(self, key): - field = self.registry.get_setting_field(key) - if not field.read_only: - return Setting.objects.filter(key=key, user__isnull=True).order_by('pk').first() - def __getattr__(self, name): return getattr(self.cache, name) @@ -187,6 +184,18 @@ def get_settings_to_cache(registry): return dict([(key, SETTING_CACHE_NOTSET) for key in get_writeable_settings(registry)]) +# HACK: runs in thread in order to work in an asyncio context +def _get_setting_from_db(registry, key): + def wrapped(registry, key): + field = registry.get_setting_field(key) + if not field.read_only: + return Setting.objects.filter(key=key, user__isnull=True).order_by('pk').first() + + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(wrapped, registry, key) + return future.result() + + def get_cache_value(value): """Returns the proper special cache setting for a value based on instance type. @@ -346,7 +355,7 @@ class SettingsWrapper(UserSettingsHolder): setting_id = None # this value is read-only, however we *do* want to fetch its value from the database if not field.read_only or name == 'INSTALL_UUID': - setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first() + setting = _get_setting_from_db(self.registry, name) if setting: if getattr(field, 'encrypted', False): value = decrypt_field(setting, 'value') diff --git a/awx/main/analytics/broadcast_websocket.py b/awx/main/analytics/broadcast_websocket.py index 37b64361d3..21567402f3 100644 --- a/awx/main/analytics/broadcast_websocket.py +++ b/awx/main/analytics/broadcast_websocket.py @@ -65,7 +65,7 @@ class FixedSlidingWindow: return sum(self.buckets.values()) or 0 -class BroadcastWebsocketStatsManager: +class RelayWebsocketStatsManager: def __init__(self, event_loop, local_hostname): self._local_hostname = local_hostname @@ -74,7 +74,7 @@ class BroadcastWebsocketStatsManager: self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME def new_remote_host_stats(self, remote_hostname): - self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, remote_hostname) + self._stats[remote_hostname] = RelayWebsocketStats(self._local_hostname, remote_hostname) return self._stats[remote_hostname] def delete_remote_host_stats(self, remote_hostname): @@ -107,7 +107,7 @@ class BroadcastWebsocketStatsManager: return parser.text_string_to_metric_families(stats_str.decode('UTF-8')) -class BroadcastWebsocketStats: +class RelayWebsocketStats: def __init__(self, local_hostname, remote_hostname): self._local_hostname = local_hostname self._remote_hostname = remote_hostname diff --git a/awx/main/analytics/subsystem_metrics.py b/awx/main/analytics/subsystem_metrics.py index 4b023db315..d8836b332f 100644 --- a/awx/main/analytics/subsystem_metrics.py +++ b/awx/main/analytics/subsystem_metrics.py @@ -9,7 +9,7 @@ from django.apps import apps from awx.main.consumers import emit_channel_notification from awx.main.utils import is_testing -root_key = 'awx_metrics' +root_key = settings.SUBSYSTEM_METRICS_REDIS_KEY_PREFIX logger = logging.getLogger('awx.main.analytics') @@ -264,13 +264,6 @@ class Metrics: data[field] = self.METRICS[field].decode(self.conn) return data - def store_metrics(self, data_json): - # called when receiving metrics from other instances - data = json.loads(data_json) - if self.instance_name != data['instance']: - logger.debug(f"{self.instance_name} received subsystem metrics from {data['instance']}") - self.conn.set(root_key + "_instance_" + data['instance'], data['metrics']) - def should_pipe_execute(self): if self.metrics_have_changed is False: return False @@ -309,9 +302,9 @@ class Metrics: 'instance': self.instance_name, 'metrics': self.serialize_local_metrics(), } - # store a local copy as well - self.store_metrics(json.dumps(payload)) + emit_channel_notification("metrics", payload) + self.previous_send_metrics.set(current_time) self.previous_send_metrics.store_value(self.conn) finally: diff --git a/awx/main/consumers.py b/awx/main/consumers.py index ad1740c362..f856ca915e 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -3,6 +3,7 @@ import logging import time import hmac import asyncio +import redis from django.core.serializers.json import DjangoJSONEncoder from django.conf import settings @@ -80,7 +81,7 @@ class WebsocketSecretAuthHelper: WebsocketSecretAuthHelper.verify_secret(secret) -class BroadcastConsumer(AsyncJsonWebsocketConsumer): +class RelayConsumer(AsyncJsonWebsocketConsumer): async def connect(self): try: WebsocketSecretAuthHelper.is_authorized(self.scope) @@ -100,6 +101,21 @@ class BroadcastConsumer(AsyncJsonWebsocketConsumer): async def internal_message(self, event): await self.send(event['text']) + async def receive_json(self, data): + (group, message) = unwrap_broadcast_msg(data) + if group == "metrics": + message = json.loads(message['text']) + conn = redis.Redis.from_url(settings.BROKER_URL) + conn.set(settings.SUBSYSTEM_METRICS_REDIS_KEY_PREFIX + "_instance_" + message['instance'], message['metrics']) + else: + await self.channel_layer.group_send(group, message) + + async def consumer_subscribe(self, event): + await self.send_json(event) + + async def consumer_unsubscribe(self, event): + await self.send_json(event) + class EventConsumer(AsyncJsonWebsocketConsumer): async def connect(self): @@ -128,6 +144,11 @@ class EventConsumer(AsyncJsonWebsocketConsumer): self.channel_name, ) + await self.channel_layer.group_send( + settings.BROADCAST_WEBSOCKET_GROUP_NAME, + {"type": "consumer.unsubscribe", "groups": list(current_groups), "origin_channel": self.channel_name}, + ) + @database_sync_to_async def user_can_see_object_id(self, user_access, oid): # At this point user is a channels.auth.UserLazyObject object @@ -176,9 +197,20 @@ class EventConsumer(AsyncJsonWebsocketConsumer): self.channel_name, ) + if len(old_groups): + await self.channel_layer.group_send( + settings.BROADCAST_WEBSOCKET_GROUP_NAME, + {"type": "consumer.unsubscribe", "groups": list(old_groups), "origin_channel": self.channel_name}, + ) + new_groups_exclusive = new_groups - current_groups for group_name in new_groups_exclusive: await self.channel_layer.group_add(group_name, self.channel_name) + + await self.channel_layer.group_send( + settings.BROADCAST_WEBSOCKET_GROUP_NAME, + {"type": "consumer.subscribe", "groups": list(new_groups), "origin_channel": self.channel_name}, + ) self.scope['session']['groups'] = new_groups await self.send_json({"groups_current": list(new_groups), "groups_left": list(old_groups), "groups_joined": list(new_groups_exclusive)}) @@ -200,9 +232,11 @@ def _dump_payload(payload): return None -def emit_channel_notification(group, payload): - from awx.main.wsbroadcast import wrap_broadcast_msg # noqa +def unwrap_broadcast_msg(payload: dict): + return (payload['group'], payload['message']) + +def emit_channel_notification(group, payload): payload_dumped = _dump_payload(payload) if payload_dumped is None: return @@ -212,16 +246,6 @@ def emit_channel_notification(group, payload): run_sync( channel_layer.group_send( group, - {"type": "internal.message", "text": payload_dumped}, - ) - ) - - run_sync( - channel_layer.group_send( - settings.BROADCAST_WEBSOCKET_GROUP_NAME, - { - "type": "internal.message", - "text": wrap_broadcast_msg(group, payload_dumped), - }, + {"type": "internal.message", "text": payload_dumped, "needs_relay": True}, ) ) diff --git a/awx/main/credential_plugins/hashivault.py b/awx/main/credential_plugins/hashivault.py index 1a636bdbf9..0a2b9171b9 100644 --- a/awx/main/credential_plugins/hashivault.py +++ b/awx/main/credential_plugins/hashivault.py @@ -1,6 +1,7 @@ import copy import os import pathlib +import time from urllib.parse import urljoin from .plugin import CredentialPlugin, CertFiles, raise_for_status @@ -247,7 +248,15 @@ def kv_backend(**kwargs): request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/') with CertFiles(cacert) as cert: request_kwargs['verify'] = cert - response = sess.get(request_url, **request_kwargs) + request_retries = 0 + while request_retries < 5: + response = sess.get(request_url, **request_kwargs) + # https://developer.hashicorp.com/vault/docs/enterprise/consistency + if response.status_code == 412: + request_retries += 1 + time.sleep(1) + else: + break raise_for_status(response) json = response.json() @@ -289,8 +298,15 @@ def ssh_backend(**kwargs): with CertFiles(cacert) as cert: request_kwargs['verify'] = cert - resp = sess.post(request_url, **request_kwargs) - + request_retries = 0 + while request_retries < 5: + resp = sess.post(request_url, **request_kwargs) + # https://developer.hashicorp.com/vault/docs/enterprise/consistency + if resp.status_code == 412: + request_retries += 1 + time.sleep(1) + else: + break raise_for_status(resp) return resp.json()['data']['signed_key'] diff --git a/awx/main/db/profiled_pg/base.py b/awx/main/db/profiled_pg/base.py index 5df1341428..583c12ff53 100644 --- a/awx/main/db/profiled_pg/base.py +++ b/awx/main/db/profiled_pg/base.py @@ -63,7 +63,7 @@ class RecordedQueryLog(object): if not os.path.isdir(self.dest): os.makedirs(self.dest) progname = ' '.join(sys.argv) - for match in ('uwsgi', 'dispatcher', 'callback_receiver', 'wsbroadcast'): + for match in ('uwsgi', 'dispatcher', 'callback_receiver', 'wsrelay'): if match in progname: progname = match break diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py index 0578a4ff97..b0588265a4 100644 --- a/awx/main/dispatch/worker/callback.py +++ b/awx/main/dispatch/worker/callback.py @@ -3,14 +3,12 @@ import logging import os import signal import time -import traceback import datetime from django.conf import settings from django.utils.functional import cached_property from django.utils.timezone import now as tz_now -from django.db import DatabaseError, OperationalError, transaction, connection as django_connection -from django.db.utils import InterfaceError, InternalError +from django.db import transaction, connection as django_connection from django_guid import set_guid import psutil @@ -64,6 +62,7 @@ class CallbackBrokerWorker(BaseWorker): """ MAX_RETRIES = 2 + INDIVIDUAL_EVENT_RETRIES = 3 last_stats = time.time() last_flush = time.time() total = 0 @@ -164,38 +163,48 @@ class CallbackBrokerWorker(BaseWorker): else: # only calculate the seconds if the created time already has been set metrics_total_job_event_processing_seconds += e.modified - e.created metrics_duration_to_save = time.perf_counter() + saved_events = [] try: cls.objects.bulk_create(events) metrics_bulk_events_saved += len(events) + saved_events = events + self.buff[cls] = [] except Exception as exc: - logger.warning(f'Error in events bulk_create, will try indiviually up to 5 errors, error {str(exc)}') + # If the database is flaking, let ensure_connection throw a general exception + # will be caught by the outer loop, which goes into a proper sleep and retry loop + django_connection.ensure_connection() + logger.warning(f'Error in events bulk_create, will try indiviually, error: {str(exc)}') # if an exception occurs, we should re-attempt to save the # events one-by-one, because something in the list is # broken/stale - consecutive_errors = 0 - events_saved = 0 metrics_events_batch_save_errors += 1 - for e in events: + for e in events.copy(): try: e.save() - events_saved += 1 - consecutive_errors = 0 + metrics_singular_events_saved += 1 + events.remove(e) + saved_events.append(e) # Importantly, remove successfully saved events from the buffer except Exception as exc_indv: - consecutive_errors += 1 - logger.info(f'Database Error Saving individual Job Event, error {str(exc_indv)}') - if consecutive_errors >= 5: - raise - metrics_singular_events_saved += events_saved - if events_saved == 0: - raise + retry_count = getattr(e, '_retry_count', 0) + 1 + e._retry_count = retry_count + + # special sanitization logic for postgres treatment of NUL 0x00 char + if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout): + e.stdout = e.stdout.replace("\x00", "") + + if retry_count >= self.INDIVIDUAL_EVENT_RETRIES: + logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}') + events.remove(e) + else: + logger.info(f'Database Error Saving individual Event uuid={e.uuid} try={retry_count}, error: {str(exc_indv)}') + metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save - for e in events: + for e in saved_events: if not getattr(e, '_skip_websocket_message', False): metrics_events_broadcast += 1 emit_event_detail(e) if getattr(e, '_notification_trigger_event', False): job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e) - self.buff = {} self.last_flush = time.time() # only update metrics if we saved events if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0: @@ -267,20 +276,16 @@ class CallbackBrokerWorker(BaseWorker): try: self.flush(force=flush) break - except (OperationalError, InterfaceError, InternalError) as exc: + except Exception as exc: + # Aside form bugs, exceptions here are assumed to be due to database flake if retries >= self.MAX_RETRIES: logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.') + self.buff = {} return delay = 60 * retries logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}') django_connection.close() time.sleep(delay) retries += 1 - except DatabaseError: - logger.exception('Database Error Flushing Job Events') - django_connection.close() - break - except Exception as exc: - tb = traceback.format_exc() - logger.error('Callback Task Processor Raised Exception: %r', exc) - logger.error('Detail: {}'.format(tb)) + except Exception: + logger.exception(f'Callback Task Processor Raised Unexpected Exception processing event data:\n{body}') diff --git a/awx/main/management/commands/run_heartbeet.py b/awx/main/management/commands/run_heartbeet.py new file mode 100644 index 0000000000..199412dfab --- /dev/null +++ b/awx/main/management/commands/run_heartbeet.py @@ -0,0 +1,67 @@ +import json +import logging +import os +import time + +from django.core.management.base import BaseCommand +from django.conf import settings + +from awx.main.dispatch import pg_bus_conn + +logger = logging.getLogger('awx.main.commands.run_heartbeet') + + +class Command(BaseCommand): + help = 'Launch the web server beacon (heartbeet)' + + def print_banner(self): + heartbeet = """ + ********** ********** + ************* ************* +***************************** + ***********HEART*********** + ************************* + ******************* + *************** _._ + *********** /`._ `'. __ + ******* \ .\| \ _'` `) + *** (``_) \| ).'` /`- / + * `\ `;\_ `\\//`-'` / + \ `'.'.| / __/` + `'--v_|/`'` + __||-._ + /'` `-`` `'\\ + / .'` ) + \ BEET ' ) + \. / + '. /'` + `) | + // + '(. + `\`. + ``""" + print(heartbeet) + + def construct_payload(self, action='online'): + payload = { + 'hostname': settings.CLUSTER_HOST_ID, + 'ip': os.environ.get('MY_POD_IP'), + 'action': action, + } + return json.dumps(payload) + + def do_hearbeat_loop(self): + with pg_bus_conn(new_connection=True) as conn: + while True: + logger.debug('Sending heartbeat') + conn.notify('web_heartbeet', self.construct_payload()) + time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS) + + # TODO: Send a message with action=offline if we notice a SIGTERM or SIGINT + # (wsrelay can use this to remove the node quicker) + def handle(self, *arg, **options): + self.print_banner() + + # Note: We don't really try any reconnect logic to pg_notify here, + # just let supervisor restart if we fail. + self.do_hearbeat_loop() diff --git a/awx/main/management/commands/run_wsbroadcast.py b/awx/main/management/commands/run_wsrelay.py similarity index 87% rename from awx/main/management/commands/run_wsbroadcast.py rename to awx/main/management/commands/run_wsrelay.py index cb2b7efcdb..8bdf6ea0a3 100644 --- a/awx/main/management/commands/run_wsbroadcast.py +++ b/awx/main/management/commands/run_wsrelay.py @@ -13,13 +13,13 @@ from django.db import connection from django.db.migrations.executor import MigrationExecutor from awx.main.analytics.broadcast_websocket import ( - BroadcastWebsocketStatsManager, + RelayWebsocketStatsManager, safe_name, ) -from awx.main.wsbroadcast import BroadcastWebsocketManager +from awx.main.wsrelay import WebSocketRelayManager -logger = logging.getLogger('awx.main.wsbroadcast') +logger = logging.getLogger('awx.main.wsrelay') class Command(BaseCommand): @@ -99,7 +99,7 @@ class Command(BaseCommand): executor = MigrationExecutor(connection) migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes())) except Exception as exc: - logger.info(f'Error on startup of run_wsbroadcast (error: {exc}), retry in 10s...') + logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...') time.sleep(10) return @@ -130,9 +130,9 @@ class Command(BaseCommand): if options.get('status'): try: - stats_all = BroadcastWebsocketStatsManager.get_stats_sync() + stats_all = RelayWebsocketStatsManager.get_stats_sync() except redis.exceptions.ConnectionError as e: - print(f"Unable to get Broadcast Websocket Status. Failed to connect to redis {e}") + print(f"Unable to get Relay Websocket Status. Failed to connect to redis {e}") return data = {} @@ -151,22 +151,19 @@ class Command(BaseCommand): host_stats = Command.get_connection_status(hostnames, data) lines = Command._format_lines(host_stats) - print(f'Broadcast websocket connection status from "{my_hostname}" to:') + print(f'Relay websocket connection status from "{my_hostname}" to:') print('\n'.join(lines)) host_stats = Command.get_connection_stats(hostnames, data) lines = Command._format_lines(host_stats) - print(f'\nBroadcast websocket connection stats from "{my_hostname}" to:') + print(f'\nRelay websocket connection stats from "{my_hostname}" to:') print('\n'.join(lines)) return try: - broadcast_websocket_mgr = BroadcastWebsocketManager() - task = broadcast_websocket_mgr.start() - - loop = asyncio.get_event_loop() - loop.run_until_complete(task) + websocket_relay_manager = WebSocketRelayManager() + asyncio.run(websocket_relay_manager.run()) except KeyboardInterrupt: - logger.debug('Terminating Websocket Broadcaster') + logger.info('Terminating Websocket Relayer') diff --git a/awx/main/routing.py b/awx/main/routing.py index 100347f64e..9625b23176 100644 --- a/awx/main/routing.py +++ b/awx/main/routing.py @@ -28,7 +28,7 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter): websocket_urlpatterns = [ re_path(r'websocket/$', consumers.EventConsumer.as_asgi()), - re_path(r'websocket/broadcast/$', consumers.BroadcastConsumer.as_asgi()), + re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()), ] application = AWXProtocolTypeRouter( diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index c6eaa36fec..a726a418c0 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -390,6 +390,7 @@ class BaseTask(object): logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, lock_path, e.strerror)) raise + emitted_lockfile_log = False start_time = time.time() while True: try: @@ -401,6 +402,9 @@ class BaseTask(object): logger.error("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(e.errno, lock_path, e.strerror)) raise else: + if not emitted_lockfile_log: + logger.info(f"exception acquiring lock {lock_path}: {e}") + emitted_lockfile_log = True time.sleep(1.0) self.instance.refresh_from_db(fields=['cancel_flag']) if self.instance.cancel_flag or signal_callback(): diff --git a/awx/main/tests/functional/commands/test_callback_receiver.py b/awx/main/tests/functional/commands/test_callback_receiver.py index 389edf6ffb..234392fb44 100644 --- a/awx/main/tests/functional/commands/test_callback_receiver.py +++ b/awx/main/tests/functional/commands/test_callback_receiver.py @@ -1,7 +1,15 @@ import pytest +import time +from unittest import mock +from uuid import uuid4 + +from django.test import TransactionTestCase + +from awx.main.dispatch.worker.callback import job_stats_wrapup, CallbackBrokerWorker -from awx.main.dispatch.worker.callback import job_stats_wrapup from awx.main.models.jobs import Job +from awx.main.models.inventory import InventoryUpdate, InventorySource +from awx.main.models.events import InventoryUpdateEvent @pytest.mark.django_db @@ -24,3 +32,108 @@ def test_wrapup_does_send_notifications(mocker): job.refresh_from_db() assert job.host_status_counts == {} mock.assert_called_once_with('succeeded') + + +class FakeRedis: + def keys(self, *args, **kwargs): + return [] + + def set(self): + pass + + def get(self): + return None + + @classmethod + def from_url(cls, *args, **kwargs): + return cls() + + def pipeline(self): + return self + + +class TestCallbackBrokerWorker(TransactionTestCase): + @pytest.fixture(autouse=True) + def turn_off_websockets(self): + with mock.patch('awx.main.dispatch.worker.callback.emit_event_detail', lambda *a, **kw: None): + yield + + def get_worker(self): + with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff + return CallbackBrokerWorker() + + def event_create_kwargs(self): + inventory_update = InventoryUpdate.objects.create(source='file', inventory_source=InventorySource.objects.create(source='file')) + return dict(inventory_update=inventory_update, created=inventory_update.created) + + def test_flush_with_valid_event(self): + worker = self.get_worker() + events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())] + worker.buff = {InventoryUpdateEvent: events} + worker.flush() + assert worker.buff.get(InventoryUpdateEvent, []) == [] + assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1 + + def test_flush_with_invalid_event(self): + worker = self.get_worker() + kwargs = self.event_create_kwargs() + events = [ + InventoryUpdateEvent(uuid=str(uuid4()), stdout='good1', **kwargs), + InventoryUpdateEvent(uuid=str(uuid4()), stdout='bad', counter=-2, **kwargs), + InventoryUpdateEvent(uuid=str(uuid4()), stdout='good2', **kwargs), + ] + worker.buff = {InventoryUpdateEvent: events.copy()} + worker.flush() + assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1 + assert InventoryUpdateEvent.objects.filter(uuid=events[1].uuid).count() == 0 + assert InventoryUpdateEvent.objects.filter(uuid=events[2].uuid).count() == 1 + assert worker.buff == {InventoryUpdateEvent: [events[1]]} + + def test_duplicate_key_not_saved_twice(self): + worker = self.get_worker() + events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())] + worker.buff = {InventoryUpdateEvent: events.copy()} + worker.flush() + + # put current saved event in buffer (error case) + worker.buff = {InventoryUpdateEvent: [InventoryUpdateEvent.objects.get(uuid=events[0].uuid)]} + worker.last_flush = time.time() - 2.0 + # here, the bulk_create will fail with UNIQUE constraint violation, but individual saves should resolve it + worker.flush() + assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1 + assert worker.buff.get(InventoryUpdateEvent, []) == [] + + def test_give_up_on_bad_event(self): + worker = self.get_worker() + events = [InventoryUpdateEvent(uuid=str(uuid4()), counter=-2, **self.event_create_kwargs())] + worker.buff = {InventoryUpdateEvent: events.copy()} + + for i in range(5): + worker.last_flush = time.time() - 2.0 + worker.flush() + + # Could not save, should be logged, and buffer should be cleared + assert worker.buff.get(InventoryUpdateEvent, []) == [] + assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 0 # sanity + + def test_postgres_invalid_NUL_char(self): + # In postgres, text fields reject NUL character, 0x00 + # tests use sqlite3 which will not raise an error + # but we can still test that it is sanitized before saving + worker = self.get_worker() + kwargs = self.event_create_kwargs() + events = [InventoryUpdateEvent(uuid=str(uuid4()), stdout="\x00", **kwargs)] + assert "\x00" in events[0].stdout # sanity + worker.buff = {InventoryUpdateEvent: events.copy()} + + with mock.patch.object(InventoryUpdateEvent.objects, 'bulk_create', side_effect=ValueError): + with mock.patch.object(events[0], 'save', side_effect=ValueError): + worker.flush() + + assert "\x00" not in events[0].stdout + + worker.last_flush = time.time() - 2.0 + worker.flush() + + event = InventoryUpdateEvent.objects.get(uuid=events[0].uuid) + assert "\x00" not in event.stdout diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index 1740a4c8f6..7f7116d78b 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -103,6 +103,10 @@ ColorHandler = logging.StreamHandler if settings.COLOR_LOGS is True: try: from logutils.colorize import ColorizingStreamHandler + import colorama + + colorama.deinit() + colorama.init(wrap=False, convert=False, strip=False) class ColorHandler(ColorizingStreamHandler): def colorize(self, line, record): diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py deleted file mode 100644 index c4ed0fc21b..0000000000 --- a/awx/main/wsbroadcast.py +++ /dev/null @@ -1,209 +0,0 @@ -import json -import logging -import asyncio - -import aiohttp -from aiohttp import client_exceptions -from asgiref.sync import sync_to_async - -from channels.layers import get_channel_layer - -from django.conf import settings -from django.apps import apps -from django.core.serializers.json import DjangoJSONEncoder - -from awx.main.analytics.broadcast_websocket import ( - BroadcastWebsocketStats, - BroadcastWebsocketStatsManager, -) -import awx.main.analytics.subsystem_metrics as s_metrics - -logger = logging.getLogger('awx.main.wsbroadcast') - - -def wrap_broadcast_msg(group, message: str): - # TODO: Maybe wrap as "group","message" so that we don't need to - # encode/decode as json. - return json.dumps(dict(group=group, message=message), cls=DjangoJSONEncoder) - - -def unwrap_broadcast_msg(payload: dict): - return (payload['group'], payload['message']) - - -@sync_to_async -def get_broadcast_hosts(): - Instance = apps.get_model('main', 'Instance') - instances = ( - Instance.objects.exclude(hostname=Instance.objects.my_hostname()) - .exclude(node_type='execution') - .exclude(node_type='hop') - .order_by('hostname') - .values('hostname', 'ip_address') - .distinct() - ) - return {i['hostname']: i['ip_address'] or i['hostname'] for i in instances} - - -def get_local_host(): - Instance = apps.get_model('main', 'Instance') - return Instance.objects.my_hostname() - - -class WebsocketTask: - def __init__( - self, - name, - event_loop, - stats: BroadcastWebsocketStats, - remote_host: str, - remote_port: int = settings.BROADCAST_WEBSOCKET_PORT, - protocol: str = settings.BROADCAST_WEBSOCKET_PROTOCOL, - verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT, - endpoint: str = 'broadcast', - ): - self.name = name - self.event_loop = event_loop - self.stats = stats - self.remote_host = remote_host - self.remote_port = remote_port - self.endpoint = endpoint - self.protocol = protocol - self.verify_ssl = verify_ssl - self.channel_layer = None - self.subsystem_metrics = s_metrics.Metrics(instance_name=name) - - async def run_loop(self, websocket: aiohttp.ClientWebSocketResponse): - raise RuntimeError("Implement me") - - async def connect(self, attempt): - from awx.main.consumers import WebsocketSecretAuthHelper # noqa - - logger.debug(f"Connection from {self.name} to {self.remote_host} attempt number {attempt}.") - - ''' - Can not put get_channel_layer() in the init code because it is in the init - path of channel layers i.e. RedisChannelLayer() calls our init code. - ''' - if not self.channel_layer: - self.channel_layer = get_channel_layer() - - try: - if attempt > 0: - await asyncio.sleep(settings.BROADCAST_WEBSOCKET_RECONNECT_RETRY_RATE_SECONDS) - except asyncio.CancelledError: - logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled") - raise - - uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/{self.endpoint}/" - timeout = aiohttp.ClientTimeout(total=10) - - secret_val = WebsocketSecretAuthHelper.construct_secret() - try: - async with aiohttp.ClientSession(headers={'secret': secret_val}, timeout=timeout) as session: - async with session.ws_connect(uri, ssl=self.verify_ssl, heartbeat=20) as websocket: - logger.info(f"Connection from {self.name} to {self.remote_host} established.") - self.stats.record_connection_established() - attempt = 0 - await self.run_loop(websocket) - except asyncio.CancelledError: - # TODO: Check if connected and disconnect - # Possibly use run_until_complete() if disconnect is async - logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled.") - self.stats.record_connection_lost() - raise - except client_exceptions.ClientConnectorError as e: - logger.warning(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") - except asyncio.TimeoutError: - logger.warning(f"Connection from {self.name} to {self.remote_host} timed out.") - except Exception as e: - # Early on, this is our canary. I'm not sure what exceptions we can really encounter. - logger.exception(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") - else: - logger.warning(f"Connection from {self.name} to {self.remote_host} list.") - - self.stats.record_connection_lost() - self.start(attempt=attempt + 1) - - def start(self, attempt=0): - self.async_task = self.event_loop.create_task(self.connect(attempt=attempt)) - - def cancel(self): - self.async_task.cancel() - - -class BroadcastWebsocketTask(WebsocketTask): - async def run_loop(self, websocket: aiohttp.ClientWebSocketResponse): - async for msg in websocket: - self.stats.record_message_received() - - if msg.type == aiohttp.WSMsgType.ERROR: - break - elif msg.type == aiohttp.WSMsgType.TEXT: - try: - payload = json.loads(msg.data) - except json.JSONDecodeError: - logmsg = "Failed to decode broadcast message" - if logger.isEnabledFor(logging.DEBUG): - logmsg = "{} {}".format(logmsg, payload) - logger.warning(logmsg) - continue - (group, message) = unwrap_broadcast_msg(payload) - if group == "metrics": - self.subsystem_metrics.store_metrics(message) - continue - await self.channel_layer.group_send(group, {"type": "internal.message", "text": message}) - - -class BroadcastWebsocketManager(object): - def __init__(self): - self.event_loop = asyncio.get_event_loop() - ''' - { - 'hostname1': BroadcastWebsocketTask(), - 'hostname2': BroadcastWebsocketTask(), - 'hostname3': BroadcastWebsocketTask(), - } - ''' - self.broadcast_tasks = dict() - self.local_hostname = get_local_host() - self.stats_mgr = BroadcastWebsocketStatsManager(self.event_loop, self.local_hostname) - - async def run_per_host_websocket(self): - - while True: - known_hosts = await get_broadcast_hosts() - future_remote_hosts = known_hosts.keys() - current_remote_hosts = self.broadcast_tasks.keys() - deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) - new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts) - - remote_addresses = {k: v.remote_host for k, v in self.broadcast_tasks.items()} - for hostname, address in known_hosts.items(): - if hostname in self.broadcast_tasks and address != remote_addresses[hostname]: - deleted_remote_hosts.add(hostname) - new_remote_hosts.add(hostname) - - if deleted_remote_hosts: - logger.warning(f"Removing {deleted_remote_hosts} from websocket broadcast list") - if new_remote_hosts: - logger.warning(f"Adding {new_remote_hosts} to websocket broadcast list") - - for h in deleted_remote_hosts: - self.broadcast_tasks[h].cancel() - del self.broadcast_tasks[h] - self.stats_mgr.delete_remote_host_stats(h) - - for h in new_remote_hosts: - stats = self.stats_mgr.new_remote_host_stats(h) - broadcast_task = BroadcastWebsocketTask(name=self.local_hostname, event_loop=self.event_loop, stats=stats, remote_host=known_hosts[h]) - broadcast_task.start() - self.broadcast_tasks[h] = broadcast_task - - await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS) - - def start(self): - self.stats_mgr.start() - - self.async_task = self.event_loop.create_task(self.run_per_host_websocket()) - return self.async_task diff --git a/awx/main/wsrelay.py b/awx/main/wsrelay.py new file mode 100644 index 0000000000..77b2c8a393 --- /dev/null +++ b/awx/main/wsrelay.py @@ -0,0 +1,307 @@ +import json +import logging +import asyncio + +import aiohttp +from aiohttp import client_exceptions +from asgiref.sync import sync_to_async + +from channels.layers import get_channel_layer +from channels.db import database_sync_to_async + +from django.conf import settings +from django.apps import apps + +import psycopg + +from awx.main.analytics.broadcast_websocket import ( + RelayWebsocketStats, + RelayWebsocketStatsManager, +) +import awx.main.analytics.subsystem_metrics as s_metrics + +logger = logging.getLogger('awx.main.wsrelay') + + +def wrap_broadcast_msg(group, message: str): + # TODO: Maybe wrap as "group","message" so that we don't need to + # encode/decode as json. + return dict(group=group, message=message) + + +def get_local_host(): + Instance = apps.get_model('main', 'Instance') + return Instance.objects.my_hostname() + + +class WebsocketRelayConnection: + def __init__( + self, + name, + stats: RelayWebsocketStats, + remote_host: str, + remote_port: int = settings.BROADCAST_WEBSOCKET_PORT, + protocol: str = settings.BROADCAST_WEBSOCKET_PROTOCOL, + verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT, + ): + self.name = name + self.event_loop = asyncio.get_event_loop() + self.stats = stats + self.remote_host = remote_host + self.remote_port = remote_port + self.protocol = protocol + self.verify_ssl = verify_ssl + self.channel_layer = None + self.subsystem_metrics = s_metrics.Metrics(instance_name=name) + self.producers = dict() + self.connected = False + + async def run_loop(self, websocket: aiohttp.ClientWebSocketResponse): + raise RuntimeError("Implement me") + + async def connect(self): + from awx.main.consumers import WebsocketSecretAuthHelper # noqa + + logger.debug(f"Connection attempt from {self.name} to {self.remote_host}") + + ''' + Can not put get_channel_layer() in the init code because it is in the init + path of channel layers i.e. RedisChannelLayer() calls our init code. + ''' + if not self.channel_layer: + self.channel_layer = get_channel_layer() + + uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/relay/" + timeout = aiohttp.ClientTimeout(total=10) + + secret_val = WebsocketSecretAuthHelper.construct_secret() + try: + async with aiohttp.ClientSession(headers={'secret': secret_val}, timeout=timeout) as session: + async with session.ws_connect(uri, ssl=self.verify_ssl, heartbeat=20) as websocket: + logger.info(f"Connection from {self.name} to {self.remote_host} established.") + self.stats.record_connection_established() + self.connected = True + await self.run_connection(websocket) + except asyncio.CancelledError: + # TODO: Check if connected and disconnect + # Possibly use run_until_complete() if disconnect is async + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled.") + except client_exceptions.ClientConnectorError as e: + logger.warning(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.", exc_info=True) + except asyncio.TimeoutError: + logger.warning(f"Connection from {self.name} to {self.remote_host} timed out.") + except Exception as e: + # Early on, this is our canary. I'm not sure what exceptions we can really encounter. + logger.warning(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.", exc_info=True) + else: + logger.debug(f"Connection from {self.name} to {self.remote_host} lost, but no exception was raised.") + finally: + self.connected = False + self.stats.record_connection_lost() + + def start(self): + self.async_task = self.event_loop.create_task(self.connect()) + return self.async_task + + def cancel(self): + self.async_task.cancel() + + async def run_connection(self, websocket: aiohttp.ClientWebSocketResponse): + # create a dedicated subsystem metric producer to handle local subsystem + # metrics messages + # the "metrics" group is not subscribed to in the typical fashion, so we + # just explicitly create it + producer = self.event_loop.create_task(self.run_producer("metrics", websocket, "metrics")) + self.producers["metrics"] = {"task": producer, "subscriptions": {"metrics"}} + async for msg in websocket: + self.stats.record_message_received() + + if msg.type == aiohttp.WSMsgType.ERROR: + break + elif msg.type == aiohttp.WSMsgType.TEXT: + try: + payload = json.loads(msg.data) + except json.JSONDecodeError: + logmsg = "Failed to decode message from web node" + if logger.isEnabledFor(logging.DEBUG): + logmsg = "{} {}".format(logmsg, payload) + logger.warning(logmsg) + continue + + if payload.get("type") == "consumer.subscribe": + for group in payload['groups']: + name = f"{self.remote_host}-{group}" + origin_channel = payload['origin_channel'] + if not self.producers.get(name): + producer = self.event_loop.create_task(self.run_producer(name, websocket, group)) + self.producers[name] = {"task": producer, "subscriptions": {origin_channel}} + logger.debug(f"Producer {name} started.") + else: + self.producers[name]["subscriptions"].add(origin_channel) + logger.debug(f"Connection from {self.name} to {self.remote_host} added subscription to {group}.") + + if payload.get("type") == "consumer.unsubscribe": + for group in payload['groups']: + name = f"{self.remote_host}-{group}" + origin_channel = payload['origin_channel'] + try: + self.producers[name]["subscriptions"].remove(origin_channel) + logger.debug(f"Unsubscribed {origin_channel} from {name}") + except KeyError: + logger.warning(f"Producer {name} not found.") + + async def run_producer(self, name, websocket, group): + try: + logger.info(f"Starting producer for {name}") + + consumer_channel = await self.channel_layer.new_channel() + await self.channel_layer.group_add(group, consumer_channel) + logger.debug(f"Producer {name} added to group {group} and is now awaiting messages.") + + while True: + try: + msg = await asyncio.wait_for(self.channel_layer.receive(consumer_channel), timeout=10) + if not msg.get("needs_relay"): + # This is added in by emit_channel_notification(). It prevents us from looping + # in the event that we are sharing a redis with a web instance. We'll see the + # message once (it'll have needs_relay=True), we'll delete that, and then forward + # the message along. The web instance will add it back to the same channels group, + # but it won't have needs_relay=True, so we'll ignore it. + continue + + # We need to copy the message because we're going to delete the needs_relay key + # and we don't want to modify the original message because other producers may + # still need to act on it. It seems weird, but it's necessary. + msg = dict(msg) + del msg["needs_relay"] + except asyncio.TimeoutError: + current_subscriptions = self.producers[name]["subscriptions"] + if len(current_subscriptions) == 0: + logger.info(f"Producer {name} has no subscribers, shutting down.") + return + + continue + + await websocket.send_json(wrap_broadcast_msg(group, msg)) + except ConnectionResetError: + # This can be hit when a web node is scaling down and we try to write to it. + # There's really nothing to do in this case and it's a fairly typical thing to happen. + # We'll log it as debug, but it's not really a problem. + logger.debug(f"Producer {name} connection reset.") + pass + except Exception: + # Note, this is very intentional and important since we do not otherwise + # ever check the result of this future. Without this line you will not see an error if + # something goes wrong in here. + logger.exception(f"Event relay producer {name} crashed") + finally: + await self.channel_layer.group_discard(group, consumer_channel) + del self.producers[name] + + +class WebSocketRelayManager(object): + def __init__(self): + + self.local_hostname = get_local_host() + self.relay_connections = dict() + # hostname -> ip + self.known_hosts: Dict[str, str] = dict() + + async def pg_consumer(self, conn): + try: + await conn.execute("LISTEN web_heartbeet") + async for notif in conn.notifies(): + if notif is not None and notif.channel == "web_heartbeet": + try: + payload = json.loads(notif.payload) + except json.JSONDecodeError: + logmsg = "Failed to decode message from pg_notify channel `web_heartbeet`" + if logger.isEnabledFor(logging.DEBUG): + logmsg = "{} {}".format(logmsg, payload) + logger.warning(logmsg) + continue + + # Skip if the message comes from the same host we are running on + # In this case, we'll be sharing a redis, no need to relay. + if payload.get("hostname") == self.local_hostname: + continue + + if payload.get("action") == "online": + hostname = payload["hostname"] + ip = payload["ip"] + if ip is None: + # If we don't get an IP, just try the hostname, maybe it resolves + ip = hostname + self.known_hosts[hostname] = ip + logger.debug(f"Web host {hostname} ({ip}) online heartbeat received.") + elif payload.get("action") == "offline": + hostname = payload["hostname"] + del self.known_hosts[hostname] + logger.debug(f"Web host {hostname} ({ip}) offline heartbeat received.") + except Exception as e: + # This catch-all is the same as the one above. asyncio will eat the exception + # but we want to know about it. + logger.exception(f"pg_consumer exception") + + async def run(self): + event_loop = asyncio.get_running_loop() + + stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname) + stats_mgr.start() + + # Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully. + database_conf = settings.DATABASES['default'] + async_conn = await psycopg.AsyncConnection.connect( + dbname=database_conf['NAME'], + host=database_conf['HOST'], + user=database_conf['USER'], + password=database_conf['PASSWORD'], + port=database_conf['PORT'], + **database_conf.get("OPTIONS", {}), + ) + await async_conn.set_autocommit(True) + event_loop.create_task(self.pg_consumer(async_conn)) + + # Establishes a websocket connection to /websocket/relay on all API servers + while True: + # logger.info("Current known hosts: {}".format(self.known_hosts)) + future_remote_hosts = self.known_hosts.keys() + current_remote_hosts = self.relay_connections.keys() + deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) + new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts) + + # This loop handles if we get an advertisement from a host we already know about but + # the advertisement has a different IP than we are currently connected to. + for hostname, address in self.known_hosts.items(): + if hostname not in self.relay_connections: + # We've picked up a new hostname that we don't know about yet. + continue + + if address != self.relay_connections[hostname].remote_host: + deleted_remote_hosts.add(hostname) + new_remote_hosts.add(hostname) + + # Delete any hosts with closed connections + for hostname, relay_conn in self.relay_connections.items(): + if not relay_conn.connected: + deleted_remote_hosts.add(hostname) + + if deleted_remote_hosts: + logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list") + + if new_remote_hosts: + logger.info(f"Adding {new_remote_hosts} to websocket broadcast list") + + for h in deleted_remote_hosts: + self.relay_connections[h].cancel() + del self.relay_connections[h] + del self.known_hosts[h] + stats_mgr.delete_remote_host_stats(h) + + for h in new_remote_hosts: + stats = stats_mgr.new_remote_host_stats(h) + relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h]) + relay_connection.start() + self.relay_connections[h] = relay_connection + + await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index a1c07524f7..271eb4db8d 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -215,6 +215,9 @@ JOB_EVENT_MAX_QUEUE_SIZE = 10000 # The number of job events to migrate per-transaction when moving from int -> bigint JOB_EVENT_MIGRATION_CHUNK_SIZE = 1000000 +# The prefix of the redis key that stores metrics +SUBSYSTEM_METRICS_REDIS_KEY_PREFIX = "awx_metrics" + # Histogram buckets for the callback_receiver_batch_events_insert_db metric SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS = [10, 50, 150, 350, 650, 2000] @@ -418,6 +421,9 @@ AUTH_BASIC_ENABLED = True # when trying to access a UI page that requries authentication. LOGIN_REDIRECT_OVERRIDE = '' +# Note: This setting may be overridden by database settings. +ALLOW_METRICS_FOR_ANONYMOUS_USERS = False + DEVSERVER_DEFAULT_ADDR = '0.0.0.0' DEVSERVER_DEFAULT_PORT = '8013' @@ -470,21 +476,15 @@ _SOCIAL_AUTH_PIPELINE_BASE = ( 'social_core.pipeline.user.get_username', 'social_core.pipeline.social_auth.associate_by_email', 'social_core.pipeline.user.create_user', - 'awx.sso.pipeline.check_user_found_or_created', + 'awx.sso.social_base_pipeline.check_user_found_or_created', 'social_core.pipeline.social_auth.associate_user', 'social_core.pipeline.social_auth.load_extra_data', - 'awx.sso.pipeline.set_is_active_for_new_user', + 'awx.sso.social_base_pipeline.set_is_active_for_new_user', 'social_core.pipeline.user.user_details', - 'awx.sso.pipeline.prevent_inactive_login', -) -SOCIAL_AUTH_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + ('awx.sso.pipeline.update_user_orgs', 'awx.sso.pipeline.update_user_teams') -SOCIAL_AUTH_SAML_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + ( - 'awx.sso.pipeline.update_user_orgs_by_saml_attr', - 'awx.sso.pipeline.update_user_teams_by_saml_attr', - 'awx.sso.pipeline.update_user_orgs', - 'awx.sso.pipeline.update_user_teams', - 'awx.sso.pipeline.update_user_flags', + 'awx.sso.social_base_pipeline.prevent_inactive_login', ) +SOCIAL_AUTH_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + ('awx.sso.social_pipeline.update_user_orgs', 'awx.sso.social_pipeline.update_user_teams') +SOCIAL_AUTH_SAML_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + ('awx.sso.saml_pipeline.populate_user', 'awx.sso.saml_pipeline.update_user_flags') SAML_AUTO_CREATE_OBJECTS = True SOCIAL_AUTH_LOGIN_URL = '/' @@ -844,7 +844,7 @@ LOGGING = { 'awx.main.commands.run_callback_receiver': {'handlers': ['callback_receiver']}, # level handled by dynamic_level_filter 'awx.main.dispatch': {'handlers': ['dispatcher']}, 'awx.main.consumers': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'INFO'}, - 'awx.main.wsbroadcast': {'handlers': ['wsbroadcast']}, + 'awx.main.wsrelay': {'handlers': ['wsrelay']}, 'awx.main.rsyslog_configurer': {'handlers': ['rsyslog_configurer']}, 'awx.main.cache_clear': {'handlers': ['cache_clear']}, 'awx.main.commands.inventory_import': {'handlers': ['inventory_import'], 'propagate': False}, @@ -855,7 +855,7 @@ LOGGING = { 'awx.main.signals': {'level': 'INFO'}, # very verbose debug-level logs 'awx.api.permissions': {'level': 'INFO'}, # very verbose debug-level logs 'awx.analytics': {'handlers': ['external_logger'], 'level': 'INFO', 'propagate': False}, - 'awx.analytics.broadcast_websocket': {'handlers': ['console', 'file', 'wsbroadcast', 'external_logger'], 'level': 'INFO', 'propagate': False}, + 'awx.analytics.broadcast_websocket': {'handlers': ['console', 'file', 'wsrelay', 'external_logger'], 'level': 'INFO', 'propagate': False}, 'awx.analytics.performance': {'handlers': ['console', 'file', 'tower_warnings', 'external_logger'], 'level': 'DEBUG', 'propagate': False}, 'awx.analytics.job_lifecycle': {'handlers': ['console', 'job_lifecycle'], 'level': 'DEBUG', 'propagate': False}, 'django_auth_ldap': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'DEBUG'}, @@ -873,7 +873,7 @@ handler_config = { 'tower_warnings': {'filename': 'tower.log'}, 'callback_receiver': {'filename': 'callback_receiver.log'}, 'dispatcher': {'filename': 'dispatcher.log', 'formatter': 'dispatcher'}, - 'wsbroadcast': {'filename': 'wsbroadcast.log'}, + 'wsrelay': {'filename': 'wsrelay.log'}, 'task_system': {'filename': 'task_system.log'}, 'rbac_migrations': {'filename': 'tower_rbac_migrations.log'}, 'job_lifecycle': {'filename': 'job_lifecycle.log', 'formatter': 'job_lifecycle'}, @@ -984,6 +984,9 @@ BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS = 10 # How often websocket process will generate stats BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS = 5 +# How often should web instances advertise themselves? +BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS = 15 + DJANGO_GUID = {'GUID_HEADER_NAME': 'X-API-Request-Id'} # Name of the default task queue diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 5daa621165..06f2a6c671 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -11,11 +11,9 @@ import ldap # Django from django.dispatch import receiver from django.contrib.auth.models import User -from django.contrib.contenttypes.models import ContentType from django.conf import settings as django_settings from django.core.signals import setting_changed from django.utils.encoding import force_str -from django.db.utils import IntegrityError # django-auth-ldap from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings @@ -36,6 +34,7 @@ from social_core.backends.saml import SAMLIdentityProvider as BaseSAMLIdentityPr # Ansible Tower from awx.sso.models import UserEnterpriseAuth +from awx.sso.common import create_org_and_teams, reconcile_users_org_team_mappings logger = logging.getLogger('awx.sso.backends') @@ -354,7 +353,9 @@ def _update_m2m_from_groups(ldap_user, opts, remove=True): continue if ldap_user._get_groups().is_member_of(group_dn): return True - return False + if remove: + return False + return None @receiver(populate_user, dispatch_uid='populate-ldap-user') @@ -363,8 +364,6 @@ def on_populate_user(sender, **kwargs): Handle signal from LDAP backend to populate the user object. Update user organization/team memberships according to their LDAP groups. """ - from awx.main.models import Organization, Team - user = kwargs['user'] ldap_user = kwargs['ldap_user'] backend = ldap_user.backend @@ -388,43 +387,16 @@ def on_populate_user(sender, **kwargs): org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {}) team_map = getattr(backend.settings, 'TEAM_MAP', {}) - - # Move this junk into save of the settings for performance later, there is no need to do that here - # with maybe the exception of someone defining this in settings before the server is started? - # ============================================================================================================== - - # Get all of the IDs and names of orgs in the DB and create any new org defined in LDAP that does not exist in the DB - existing_orgs = {} - for (org_id, org_name) in Organization.objects.all().values_list('id', 'name'): - existing_orgs[org_name] = org_id - - # Create any orgs (if needed) for all entries in the org and team maps - for org_name in set(list(org_map.keys()) + [item.get('organization', None) for item in team_map.values()]): - if org_name and org_name not in existing_orgs: - logger.info("LDAP adapter is creating org {}".format(org_name)) - try: - new_org = Organization.objects.create(name=org_name) - except IntegrityError: - # Another thread must have created this org before we did so now we need to get it - new_org = Organization.objects.get(name=org_name) - # Add the org name to the existing orgs since we created it and we may need it to build the teams below - existing_orgs[org_name] = new_org.id - - # Do the same for teams - existing_team_names = list(Team.objects.all().values_list('name', flat=True)) + orgs_list = list(org_map.keys()) + team_map = {} for team_name, team_opts in team_map.items(): if not team_opts.get('organization', None): # You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name)) continue - if team_name not in existing_team_names: - try: - Team.objects.create(name=team_name, organization_id=existing_orgs[team_opts['organization']]) - except IntegrityError: - # If another process got here before us that is ok because we don't need the ID from this team or anything - pass - # End move some day - # ============================================================================================================== + team_map[team_name] = team_opts['organization'] + + create_org_and_teams(orgs_list, team_map, 'LDAP') # Compute in memory what the state is of the different LDAP orgs org_roles_and_ldap_attributes = {'admin_role': 'admins', 'auditor_role': 'auditors', 'member_role': 'users'} @@ -452,7 +424,10 @@ def on_populate_user(sender, **kwargs): remove = bool(team_opts.get('remove', True)) state = _update_m2m_from_groups(ldap_user, users_opts, remove) if state is not None: - desired_team_states[team_name] = {'member_role': state} + organization = team_opts['organization'] + if organization not in desired_team_states: + desired_team_states[organization] = {} + desired_team_states[organization][team_name] = {'member_role': state} # Check if user.profile is available, otherwise force user.save() try: @@ -470,60 +445,3 @@ def on_populate_user(sender, **kwargs): profile.save() reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, 'LDAP') - - -def reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, source): - from awx.main.models import Organization, Team - - content_types = [] - reconcile_items = [] - if desired_org_states: - content_types.append(ContentType.objects.get_for_model(Organization)) - reconcile_items.append(('organization', desired_org_states, Organization)) - if desired_team_states: - content_types.append(ContentType.objects.get_for_model(Team)) - reconcile_items.append(('team', desired_team_states, Team)) - - if not content_types: - # If both desired states were empty we can simply return because there is nothing to reconcile - return - - # users_roles is a flat set of IDs - users_roles = set(user.roles.filter(content_type__in=content_types).values_list('pk', flat=True)) - - for object_type, desired_states, model in reconcile_items: - # Get all of the roles in the desired states for efficient DB extraction - roles = [] - for sub_dict in desired_states.values(): - for role_name in sub_dict: - if sub_dict[role_name] is None: - continue - if role_name not in roles: - roles.append(role_name) - - # Get a set of named tuples for the org/team name plus all of the roles we got above - model_roles = model.objects.filter(name__in=desired_states.keys()).values_list('name', *roles, named=True) - for row in model_roles: - for role_name in roles: - desired_state = desired_states.get(row.name, {}) - if desired_state[role_name] is None: - # The mapping was not defined for this [org/team]/role so we can just pass - pass - - # If somehow the auth adapter knows about an items role but that role is not defined in the DB we are going to print a pretty error - # This is your classic safety net that we should never hit; but here you are reading this comment... good luck and Godspeed. - role_id = getattr(row, role_name, None) - if role_id is None: - logger.error("{} adapter wanted to manage role {} of {} {} but that role is not defined".format(source, role_name, object_type, row.name)) - continue - - if desired_state[role_name]: - # The desired state was the user mapped into the object_type, if the user was not mapped in map them in - if role_id not in users_roles: - logger.debug("{} adapter adding user {} to {} {} as {}".format(source, user.username, object_type, row.name, role_name)) - user.roles.add(role_id) - else: - # The desired state was the user was not mapped into the org, if the user has the permission remove it - if role_id in users_roles: - logger.debug("{} adapter removing user {} permission of {} from {} {}".format(source, user.username, role_name, object_type, row.name)) - user.roles.remove(role_id) diff --git a/awx/sso/common.py b/awx/sso/common.py new file mode 100644 index 0000000000..a80b519f13 --- /dev/null +++ b/awx/sso/common.py @@ -0,0 +1,171 @@ +# Copyright (c) 2022 Ansible, Inc. +# All Rights Reserved. + +import logging + +from django.contrib.contenttypes.models import ContentType +from django.db.utils import IntegrityError +from awx.main.models import Organization, Team + +logger = logging.getLogger('awx.sso.common') + + +def get_orgs_by_ids(): + existing_orgs = {} + for (org_id, org_name) in Organization.objects.all().values_list('id', 'name'): + existing_orgs[org_name] = org_id + return existing_orgs + + +def reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, source): + # + # Arguments: + # user - a user object + # desired_org_states: { '': { '': or None } } + # desired_team_states: { '': { '': { '': or None } } } + # source - a text label indicating the "authentication adapter" for debug messages + # + # This function will load the users existing roles and then based on the desired states modify the users roles + # True indicates the user needs to be a member of the role + # False indicates the user should not be a member of the role + # None means this function should not change the users membership of a role + # + + content_types = [] + reconcile_items = [] + if desired_org_states: + content_types.append(ContentType.objects.get_for_model(Organization)) + reconcile_items.append(('organization', desired_org_states)) + if desired_team_states: + content_types.append(ContentType.objects.get_for_model(Team)) + reconcile_items.append(('team', desired_team_states)) + + if not content_types: + # If both desired states were empty we can simply return because there is nothing to reconcile + return + + # users_roles is a flat set of IDs + users_roles = set(user.roles.filter(content_type__in=content_types).values_list('pk', flat=True)) + + for object_type, desired_states in reconcile_items: + roles = [] + # Get a set of named tuples for the org/team name plus all of the roles we got above + if object_type == 'organization': + for sub_dict in desired_states.values(): + for role_name in sub_dict: + if sub_dict[role_name] is None: + continue + if role_name not in roles: + roles.append(role_name) + model_roles = Organization.objects.filter(name__in=desired_states.keys()).values_list('name', *roles, named=True) + else: + team_names = [] + for teams_dict in desired_states.values(): + team_names.extend(teams_dict.keys()) + for sub_dict in teams_dict.values(): + for role_name in sub_dict: + if sub_dict[role_name] is None: + continue + if role_name not in roles: + roles.append(role_name) + model_roles = Team.objects.filter(name__in=team_names).values_list('name', 'organization__name', *roles, named=True) + + for row in model_roles: + for role_name in roles: + if object_type == 'organization': + desired_state = desired_states.get(row.name, {}) + else: + desired_state = desired_states.get(row.organization__name, {}).get(row.name, {}) + + if desired_state.get(role_name, None) is None: + # The mapping was not defined for this [org/team]/role so we can just pass + continue + + # If somehow the auth adapter knows about an items role but that role is not defined in the DB we are going to print a pretty error + # This is your classic safety net that we should never hit; but here you are reading this comment... good luck and Godspeed. + role_id = getattr(row, role_name, None) + if role_id is None: + logger.error("{} adapter wanted to manage role {} of {} {} but that role is not defined".format(source, role_name, object_type, row.name)) + continue + + if desired_state[role_name]: + # The desired state was the user mapped into the object_type, if the user was not mapped in map them in + if role_id not in users_roles: + logger.debug("{} adapter adding user {} to {} {} as {}".format(source, user.username, object_type, row.name, role_name)) + user.roles.add(role_id) + else: + # The desired state was the user was not mapped into the org, if the user has the permission remove it + if role_id in users_roles: + logger.debug("{} adapter removing user {} permission of {} from {} {}".format(source, user.username, role_name, object_type, row.name)) + user.roles.remove(role_id) + + +def create_org_and_teams(org_list, team_map, adapter, can_create=True): + # + # org_list is a set of organization names + # team_map is a dict of {: } + # + # Move this junk into save of the settings for performance later, there is no need to do that here + # with maybe the exception of someone defining this in settings before the server is started? + # ============================================================================================================== + + if not can_create: + logger.debug(f"Adapter {adapter} is not allowed to create orgs/teams") + return + + # Get all of the IDs and names of orgs in the DB and create any new org defined in LDAP that does not exist in the DB + existing_orgs = get_orgs_by_ids() + + # Parse through orgs and teams provided and create a list of unique items we care about creating + all_orgs = list(set(org_list)) + all_teams = [] + for team_name in team_map: + org_name = team_map[team_name] + if org_name: + if org_name not in all_orgs: + all_orgs.append(org_name) + # We don't have to test if this is in all_teams because team_map is already a hash + all_teams.append(team_name) + else: + # The UI should prevent this condition so this is just a double check to prevent a stack trace.... + # although the rest of the login process might stack later on + logger.error("{} adapter is attempting to create a team {} but it does not have an org".format(adapter, team_name)) + + for org_name in all_orgs: + if org_name and org_name not in existing_orgs: + logger.info("{} adapter is creating org {}".format(adapter, org_name)) + try: + new_org = get_or_create_org_with_default_galaxy_cred(name=org_name) + except IntegrityError: + # Another thread must have created this org before we did so now we need to get it + new_org = get_or_create_org_with_default_galaxy_cred(name=org_name) + # Add the org name to the existing orgs since we created it and we may need it to build the teams below + existing_orgs[org_name] = new_org.id + + # Do the same for teams + existing_team_names = list(Team.objects.all().values_list('name', flat=True)) + for team_name in all_teams: + if team_name not in existing_team_names: + logger.info("{} adapter is creating team {} in org {}".format(adapter, team_name, team_map[team_name])) + try: + Team.objects.create(name=team_name, organization_id=existing_orgs[team_map[team_name]]) + except IntegrityError: + # If another process got here before us that is ok because we don't need the ID from this team or anything + pass + # End move some day + # ============================================================================================================== + + +def get_or_create_org_with_default_galaxy_cred(**kwargs): + from awx.main.models import Organization, Credential + + (org, org_created) = Organization.objects.get_or_create(**kwargs) + if org_created: + logger.debug("Created org {} (id {}) from {}".format(org.name, org.id, kwargs)) + public_galaxy_credential = Credential.objects.filter(managed=True, name='Ansible Galaxy').first() + if public_galaxy_credential is not None: + org.galaxy_credentials.add(public_galaxy_credential) + logger.debug("Added default Ansible Galaxy credential to org") + else: + logger.debug("Could not find default Ansible Galaxy credential to add to org") + return org diff --git a/awx/sso/pipeline.py b/awx/sso/saml_pipeline.py similarity index 51% rename from awx/sso/pipeline.py rename to awx/sso/saml_pipeline.py index 348396b1c9..a0060e13e8 100644 --- a/awx/sso/pipeline.py +++ b/awx/sso/saml_pipeline.py @@ -5,59 +5,43 @@ import re import logging - -# Python Social Auth -from social_core.exceptions import AuthException - # Django -from django.core.exceptions import ObjectDoesNotExist -from django.utils.translation import gettext_lazy as _ -from django.db.models import Q +from django.conf import settings + +from awx.main.models import Team +from awx.sso.common import create_org_and_teams, reconcile_users_org_team_mappings, get_orgs_by_ids + +logger = logging.getLogger('awx.sso.saml_pipeline') -logger = logging.getLogger('awx.sso.pipeline') - - -class AuthNotFound(AuthException): - def __init__(self, backend, email_or_uid, *args, **kwargs): - self.email_or_uid = email_or_uid - super(AuthNotFound, self).__init__(backend, *args, **kwargs) - - def __str__(self): - return _('An account cannot be found for {0}').format(self.email_or_uid) - - -class AuthInactive(AuthException): - def __str__(self): - return _('Your account is inactive') - - -def check_user_found_or_created(backend, details, user=None, *args, **kwargs): +def populate_user(backend, details, user=None, *args, **kwargs): if not user: - email_or_uid = details.get('email') or kwargs.get('email') or kwargs.get('uid') or '???' - raise AuthNotFound(backend, email_or_uid) + return + + # Build the in-memory settings for how this user should be modeled + desired_org_state = {} + desired_team_state = {} + orgs_to_create = [] + teams_to_create = {} + _update_user_orgs_by_saml_attr(backend, desired_org_state, orgs_to_create, **kwargs) + _update_user_teams_by_saml_attr(desired_team_state, teams_to_create, **kwargs) + _update_user_orgs(backend, desired_org_state, orgs_to_create, user) + _update_user_teams(backend, desired_team_state, teams_to_create, user) + + # If the SAML adapter is allowed to create objects, lets do that first + create_org_and_teams(orgs_to_create, teams_to_create, 'SAML', settings.SAML_AUTO_CREATE_OBJECTS) + + # Finally reconcile the user + reconcile_users_org_team_mappings(user, desired_org_state, desired_team_state, 'SAML') -def set_is_active_for_new_user(strategy, details, user=None, *args, **kwargs): - if kwargs.get('is_new', False): - details['is_active'] = True - return {'details': details} - - -def prevent_inactive_login(backend, details, user=None, *args, **kwargs): - if user and not user.is_active: - raise AuthInactive(backend) - - -def _update_m2m_from_expression(user, related, expr, remove=True): +def _update_m2m_from_expression(user, expr, remove=True): """ Helper function to update m2m relationship based on user matching one or more expressions. """ should_add = False - if expr is None: - return - elif not expr: + if expr is None or not expr: pass elif expr is True: should_add = True @@ -72,70 +56,18 @@ def _update_m2m_from_expression(user, related, expr, remove=True): if ex.match(user.username) or ex.match(user.email): should_add = True if should_add: - related.add(user) + return True elif remove: - related.remove(user) + return False + else: + return None -def get_or_create_with_default_galaxy_cred(**kwargs): - from awx.main.models import Organization, Credential - - (org, org_created) = Organization.objects.get_or_create(**kwargs) - if org_created: - logger.debug("Created org {} (id {}) from {}".format(org.name, org.id, kwargs)) - public_galaxy_credential = Credential.objects.filter(managed=True, name='Ansible Galaxy').first() - if public_galaxy_credential is not None: - org.galaxy_credentials.add(public_galaxy_credential) - logger.debug("Added default Ansible Galaxy credential to org") - else: - logger.debug("Could not find default Ansible Galaxy credential to add to org") - return org - - -def _update_org_from_attr(user, related, attr, remove, remove_admins, remove_auditors, backend): - from awx.main.models import Organization - from django.conf import settings - - org_ids = [] - - for org_name in attr: - try: - if settings.SAML_AUTO_CREATE_OBJECTS: - try: - organization_alias = backend.setting('ORGANIZATION_MAP').get(org_name).get('organization_alias') - if organization_alias is not None: - organization_name = organization_alias - else: - organization_name = org_name - except Exception: - organization_name = org_name - org = get_or_create_with_default_galaxy_cred(name=organization_name) - else: - org = Organization.objects.get(name=org_name) - except ObjectDoesNotExist: - continue - - org_ids.append(org.id) - getattr(org, related).members.add(user) - - if remove: - [o.member_role.members.remove(user) for o in Organization.objects.filter(Q(member_role__members=user) & ~Q(id__in=org_ids))] - - if remove_admins: - [o.admin_role.members.remove(user) for o in Organization.objects.filter(Q(admin_role__members=user) & ~Q(id__in=org_ids))] - - if remove_auditors: - [o.auditor_role.members.remove(user) for o in Organization.objects.filter(Q(auditor_role__members=user) & ~Q(id__in=org_ids))] - - -def update_user_orgs(backend, details, user=None, *args, **kwargs): +def _update_user_orgs(backend, desired_org_state, orgs_to_create, user=None): """ Update organization memberships for the given user based on mapping rules defined in settings. """ - if not user: - return - org_map = backend.setting('ORGANIZATION_MAP') or {} for org_name, org_opts in org_map.items(): organization_alias = org_opts.get('organization_alias') @@ -143,78 +75,108 @@ def update_user_orgs(backend, details, user=None, *args, **kwargs): organization_name = organization_alias else: organization_name = org_name - org = get_or_create_with_default_galaxy_cred(name=organization_name) + if organization_name not in orgs_to_create: + orgs_to_create.append(organization_name) - # Update org admins from expression(s). remove = bool(org_opts.get('remove', True)) - admins_expr = org_opts.get('admins', None) - remove_admins = bool(org_opts.get('remove_admins', remove)) - _update_m2m_from_expression(user, org.admin_role.members, admins_expr, remove_admins) - # Update org users from expression(s). - users_expr = org_opts.get('users', None) - remove_users = bool(org_opts.get('remove_users', remove)) - _update_m2m_from_expression(user, org.member_role.members, users_expr, remove_users) + if organization_name not in desired_org_state: + desired_org_state[organization_name] = {} + + for role_name, user_type in (('admin_role', 'admins'), ('member_role', 'users'), ('auditor_role', 'auditors')): + is_member_expression = org_opts.get(user_type, None) + remove_members = bool(org_opts.get('remove_{}'.format(user_type), remove)) + has_role = _update_m2m_from_expression(user, is_member_expression, remove_members) + desired_org_state[organization_name][role_name] = has_role -def update_user_teams(backend, details, user=None, *args, **kwargs): +def _update_user_teams(backend, desired_team_state, teams_to_create, user=None): """ Update team memberships for the given user based on mapping rules defined in settings. """ - if not user: - return - from awx.main.models import Team team_map = backend.setting('TEAM_MAP') or {} for team_name, team_opts in team_map.items(): # Get or create the org to update. if 'organization' not in team_opts: continue - org = get_or_create_with_default_galaxy_cred(name=team_opts['organization']) - - # Update team members from expression(s). - team = Team.objects.get_or_create(name=team_name, organization=org)[0] + teams_to_create[team_name] = team_opts['organization'] users_expr = team_opts.get('users', None) remove = bool(team_opts.get('remove', True)) - _update_m2m_from_expression(user, team.member_role.members, users_expr, remove) + add_or_remove = _update_m2m_from_expression(user, users_expr, remove) + if add_or_remove is not None: + org_name = team_opts['organization'] + if org_name not in desired_team_state: + desired_team_state[org_name] = {} + desired_team_state[org_name][team_name] = {'member_role': add_or_remove} -def update_user_orgs_by_saml_attr(backend, details, user=None, *args, **kwargs): - if not user: - return - from django.conf import settings - +def _update_user_orgs_by_saml_attr(backend, desired_org_state, orgs_to_create, **kwargs): org_map = settings.SOCIAL_AUTH_SAML_ORGANIZATION_ATTR - if org_map.get('saml_attr') is None and org_map.get('saml_admin_attr') is None and org_map.get('saml_auditor_attr') is None: - return + roles_and_flags = ( + ('member_role', 'remove', 'saml_attr'), + ('admin_role', 'remove_admins', 'saml_admin_attr'), + ('auditor_role', 'remove_auditors', 'saml_auditor_attr'), + ) - remove = bool(org_map.get('remove', True)) - remove_admins = bool(org_map.get('remove_admins', True)) - remove_auditors = bool(org_map.get('remove_auditors', True)) + # If the remove_flag was present we need to load all of the orgs and remove the user from the role + all_orgs = None + for role, remove_flag, _ in roles_and_flags: + remove = bool(org_map.get(remove_flag, True)) + if remove: + # Only get the all orgs once, and only if needed + if all_orgs is None: + all_orgs = get_orgs_by_ids() + for org_name in all_orgs.keys(): + if org_name not in desired_org_state: + desired_org_state[org_name] = {} + desired_org_state[org_name][role] = False - attr_values = kwargs.get('response', {}).get('attributes', {}).get(org_map.get('saml_attr'), []) - attr_admin_values = kwargs.get('response', {}).get('attributes', {}).get(org_map.get('saml_admin_attr'), []) - attr_auditor_values = kwargs.get('response', {}).get('attributes', {}).get(org_map.get('saml_auditor_attr'), []) - - _update_org_from_attr(user, "member_role", attr_values, remove, False, False, backend) - _update_org_from_attr(user, "admin_role", attr_admin_values, False, remove_admins, False, backend) - _update_org_from_attr(user, "auditor_role", attr_auditor_values, False, False, remove_auditors, backend) + # Now we can add the user as a member/admin/auditor for any orgs they have specified + for role, _, attr_flag in roles_and_flags: + if org_map.get(attr_flag) is None: + continue + saml_attr_values = kwargs.get('response', {}).get('attributes', {}).get(org_map.get(attr_flag), []) + for org_name in saml_attr_values: + try: + organization_alias = backend.setting('ORGANIZATION_MAP').get(org_name).get('organization_alias') + if organization_alias is not None: + organization_name = organization_alias + else: + organization_name = org_name + except Exception: + organization_name = org_name + if organization_name not in orgs_to_create: + orgs_to_create.append(organization_name) + if organization_name not in desired_org_state: + desired_org_state[organization_name] = {} + desired_org_state[organization_name][role] = True -def update_user_teams_by_saml_attr(backend, details, user=None, *args, **kwargs): - if not user: - return - from awx.main.models import Organization, Team - from django.conf import settings - +def _update_user_teams_by_saml_attr(desired_team_state, teams_to_create, **kwargs): + # + # Map users into organizations based on SOCIAL_AUTH_SAML_TEAM_ATTR setting + # team_map = settings.SOCIAL_AUTH_SAML_TEAM_ATTR if team_map.get('saml_attr') is None: return + all_teams = None + # The role and flag is hard coded here but intended to be flexible in case we ever wanted to add another team type + for role, remove_flag in [('member_role', 'remove')]: + remove = bool(team_map.get(remove_flag, True)) + if remove: + # Only get the all orgs once, and only if needed + if all_teams is None: + all_teams = Team.objects.all().values_list('name', 'organization__name') + for (team_name, organization_name) in all_teams: + if organization_name not in desired_team_state: + desired_team_state[organization_name] = {} + desired_team_state[organization_name][team_name] = {role: False} + saml_team_names = set(kwargs.get('response', {}).get('attributes', {}).get(team_map['saml_attr'], [])) - team_ids = [] for team_name_map in team_map.get('team_org_map', []): team_name = team_name_map.get('team', None) team_alias = team_name_map.get('team_alias', None) @@ -225,29 +187,17 @@ def update_user_teams_by_saml_attr(backend, details, user=None, *args, **kwargs) logger.error("organization name invalid for team {}".format(team_name)) continue - try: - if settings.SAML_AUTO_CREATE_OBJECTS: - org = get_or_create_with_default_galaxy_cred(name=organization_name) - else: - org = Organization.objects.get(name=organization_name) - except ObjectDoesNotExist: - continue - if team_alias: team_name = team_alias - try: - if settings.SAML_AUTO_CREATE_OBJECTS: - team = Team.objects.get_or_create(name=team_name, organization=org)[0] - else: - team = Team.objects.get(name=team_name, organization=org) - except ObjectDoesNotExist: - continue - team_ids.append(team.id) - team.member_role.members.add(user) + teams_to_create[team_name] = organization_name + user_is_member_of_team = True + else: + user_is_member_of_team = False - if team_map.get('remove', True): - [t.member_role.members.remove(user) for t in Team.objects.filter(Q(member_role__members=user) & ~Q(id__in=team_ids))] + if organization_name not in desired_team_state: + desired_team_state[organization_name] = {} + desired_team_state[organization_name][team_name] = {'member_role': user_is_member_of_team} def _get_matches(list1, list2): @@ -329,11 +279,6 @@ def _check_flag(user, flag, attributes, user_flags_settings): def update_user_flags(backend, details, user=None, *args, **kwargs): - if not user: - return - - from django.conf import settings - user_flags_settings = settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR attributes = kwargs.get('response', {}).get('attributes', {}) diff --git a/awx/sso/social_base_pipeline.py b/awx/sso/social_base_pipeline.py new file mode 100644 index 0000000000..ccdaf1d200 --- /dev/null +++ b/awx/sso/social_base_pipeline.py @@ -0,0 +1,39 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved. + +# Python Social Auth +from social_core.exceptions import AuthException + +# Django +from django.utils.translation import gettext_lazy as _ + + +class AuthNotFound(AuthException): + def __init__(self, backend, email_or_uid, *args, **kwargs): + self.email_or_uid = email_or_uid + super(AuthNotFound, self).__init__(backend, *args, **kwargs) + + def __str__(self): + return _('An account cannot be found for {0}').format(self.email_or_uid) + + +class AuthInactive(AuthException): + def __str__(self): + return _('Your account is inactive') + + +def check_user_found_or_created(backend, details, user=None, *args, **kwargs): + if not user: + email_or_uid = details.get('email') or kwargs.get('email') or kwargs.get('uid') or '???' + raise AuthNotFound(backend, email_or_uid) + + +def set_is_active_for_new_user(strategy, details, user=None, *args, **kwargs): + if kwargs.get('is_new', False): + details['is_active'] = True + return {'details': details} + + +def prevent_inactive_login(backend, details, user=None, *args, **kwargs): + if user and not user.is_active: + raise AuthInactive(backend) diff --git a/awx/sso/social_pipeline.py b/awx/sso/social_pipeline.py new file mode 100644 index 0000000000..b4fb4c1fe3 --- /dev/null +++ b/awx/sso/social_pipeline.py @@ -0,0 +1,90 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved. + +# Python +import re +import logging + +from awx.sso.common import get_or_create_org_with_default_galaxy_cred + +logger = logging.getLogger('awx.sso.social_pipeline') + + +def _update_m2m_from_expression(user, related, expr, remove=True): + """ + Helper function to update m2m relationship based on user matching one or + more expressions. + """ + should_add = False + if expr is None: + return + elif not expr: + pass + elif expr is True: + should_add = True + else: + if isinstance(expr, (str, type(re.compile('')))): + expr = [expr] + for ex in expr: + if isinstance(ex, str): + if user.username == ex or user.email == ex: + should_add = True + elif isinstance(ex, type(re.compile(''))): + if ex.match(user.username) or ex.match(user.email): + should_add = True + if should_add: + related.add(user) + elif remove: + related.remove(user) + + +def update_user_orgs(backend, details, user=None, *args, **kwargs): + """ + Update organization memberships for the given user based on mapping rules + defined in settings. + """ + if not user: + return + + org_map = backend.setting('ORGANIZATION_MAP') or {} + for org_name, org_opts in org_map.items(): + organization_alias = org_opts.get('organization_alias') + if organization_alias: + organization_name = organization_alias + else: + organization_name = org_name + org = get_or_create_org_with_default_galaxy_cred(name=organization_name) + + # Update org admins from expression(s). + remove = bool(org_opts.get('remove', True)) + admins_expr = org_opts.get('admins', None) + remove_admins = bool(org_opts.get('remove_admins', remove)) + _update_m2m_from_expression(user, org.admin_role.members, admins_expr, remove_admins) + + # Update org users from expression(s). + users_expr = org_opts.get('users', None) + remove_users = bool(org_opts.get('remove_users', remove)) + _update_m2m_from_expression(user, org.member_role.members, users_expr, remove_users) + + +def update_user_teams(backend, details, user=None, *args, **kwargs): + """ + Update team memberships for the given user based on mapping rules defined + in settings. + """ + if not user: + return + from awx.main.models import Team + + team_map = backend.setting('TEAM_MAP') or {} + for team_name, team_opts in team_map.items(): + # Get or create the org to update. + if 'organization' not in team_opts: + continue + org = get_or_create_org_with_default_galaxy_cred(name=team_opts['organization']) + + # Update team members from expression(s). + team = Team.objects.get_or_create(name=team_name, organization=org)[0] + users_expr = team_opts.get('users', None) + remove = bool(team_opts.get('remove', True)) + _update_m2m_from_expression(user, team.member_role.members, users_expr, remove) diff --git a/awx/sso/tests/functional/test_common.py b/awx/sso/tests/functional/test_common.py new file mode 100644 index 0000000000..4fc3edd841 --- /dev/null +++ b/awx/sso/tests/functional/test_common.py @@ -0,0 +1,280 @@ +import pytest +from collections import Counter +from django.core.exceptions import FieldError +from django.utils.timezone import now + +from awx.main.models import Credential, CredentialType, Organization, Team, User +from awx.sso.common import get_orgs_by_ids, reconcile_users_org_team_mappings, create_org_and_teams, get_or_create_org_with_default_galaxy_cred + + +@pytest.mark.django_db +class TestCommonFunctions: + @pytest.fixture + def orgs(self): + o1 = Organization.objects.create(name='Default1') + o2 = Organization.objects.create(name='Default2') + o3 = Organization.objects.create(name='Default3') + return (o1, o2, o3) + + @pytest.fixture + def galaxy_credential(self): + galaxy_type = CredentialType.objects.create(kind='galaxy') + cred = Credential( + created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'} + ) + cred.save() + + def test_get_orgs_by_ids(self, orgs): + orgs_and_ids = get_orgs_by_ids() + o1, o2, o3 = orgs + assert Counter(orgs_and_ids.keys()) == Counter([o1.name, o2.name, o3.name]) + assert Counter(orgs_and_ids.values()) == Counter([o1.id, o2.id, o3.id]) + + def test_reconcile_users_org_team_mappings(self): + # Create objects for us to play with + user = User.objects.create(username='user1@foo.com', last_name='foo', first_name='bar', email='user1@foo.com', is_active=True) + org1 = Organization.objects.create(name='Default1') + org2 = Organization.objects.create(name='Default2') + team1 = Team.objects.create(name='Team1', organization=org1) + team2 = Team.objects.create(name='Team1', organization=org2) + + # Try adding nothing + reconcile_users_org_team_mappings(user, {}, {}, 'Nada') + assert list(user.roles.all()) == [] + + # Add a user to an org that does not exist (should have no affect) + reconcile_users_org_team_mappings( + user, + { + 'junk': {'member_role': True}, + }, + {}, + 'Nada', + ) + assert list(user.roles.all()) == [] + + # Remove a user to an org that does not exist (should have no affect) + reconcile_users_org_team_mappings( + user, + { + 'junk': {'member_role': False}, + }, + {}, + 'Nada', + ) + assert list(user.roles.all()) == [] + + # Add the user to the orgs + reconcile_users_org_team_mappings(user, {org1.name: {'member_role': True}, org2.name: {'member_role': True}}, {}, 'Nada') + assert len(user.roles.all()) == 2 + assert user in org1.member_role + assert user in org2.member_role + + # Remove the user from the orgs + reconcile_users_org_team_mappings(user, {org1.name: {'member_role': False}, org2.name: {'member_role': False}}, {}, 'Nada') + assert list(user.roles.all()) == [] + assert user not in org1.member_role + assert user not in org2.member_role + + # Remove the user from the orgs (again, should have no affect) + reconcile_users_org_team_mappings(user, {org1.name: {'member_role': False}, org2.name: {'member_role': False}}, {}, 'Nada') + assert list(user.roles.all()) == [] + assert user not in org1.member_role + assert user not in org2.member_role + + # Add a user back to the member role + reconcile_users_org_team_mappings( + user, + { + org1.name: { + 'member_role': True, + }, + }, + {}, + 'Nada', + ) + users_roles = set(user.roles.values_list('pk', flat=True)) + assert len(users_roles) == 1 + assert user in org1.member_role + + # Add the user to additional roles + reconcile_users_org_team_mappings( + user, + { + org1.name: {'admin_role': True, 'auditor_role': True}, + }, + {}, + 'Nada', + ) + assert len(user.roles.all()) == 3 + assert user in org1.member_role + assert user in org1.admin_role + assert user in org1.auditor_role + + # Add a user to a non-existent role (results in FieldError exception) + with pytest.raises(FieldError): + reconcile_users_org_team_mappings( + user, + { + org1.name: { + 'dne_role': True, + }, + }, + {}, + 'Nada', + ) + + # Try adding a user to a role that should not exist on an org (technically this works at this time) + reconcile_users_org_team_mappings( + user, + { + org1.name: { + 'read_role_id': True, + }, + }, + {}, + 'Nada', + ) + assert len(user.roles.all()) == 4 + assert user in org1.member_role + assert user in org1.admin_role + assert user in org1.auditor_role + + # Remove all of the org perms to test team perms + reconcile_users_org_team_mappings( + user, + { + org1.name: { + 'read_role_id': False, + 'member_role': False, + 'admin_role': False, + 'auditor_role': False, + }, + }, + {}, + 'Nada', + ) + assert list(user.roles.all()) == [] + + # Add the user as a member to one of the teams + reconcile_users_org_team_mappings(user, {}, {org1.name: {team1.name: {'member_role': True}}}, 'Nada') + assert len(user.roles.all()) == 1 + assert user in team1.member_role + # Validate that the user did not become a member of a team with the same name in a different org + assert user not in team2.member_role + + # Remove the user from the team + reconcile_users_org_team_mappings(user, {}, {org1.name: {team1.name: {'member_role': False}}}, 'Nada') + assert list(user.roles.all()) == [] + assert user not in team1.member_role + + # Remove the user from the team again + reconcile_users_org_team_mappings(user, {}, {org1.name: {team1.name: {'member_role': False}}}, 'Nada') + assert list(user.roles.all()) == [] + + # Add the user to a team that does not exist (should have no affect) + reconcile_users_org_team_mappings(user, {}, {org1.name: {'junk': {'member_role': True}}}, 'Nada') + assert list(user.roles.all()) == [] + + # Remove the user from a team that does not exist (should have no affect) + reconcile_users_org_team_mappings(user, {}, {org1.name: {'junk': {'member_role': False}}}, 'Nada') + assert list(user.roles.all()) == [] + + # Test a None setting + reconcile_users_org_team_mappings(user, {}, {org1.name: {'junk': {'member_role': None}}}, 'Nada') + assert list(user.roles.all()) == [] + + # Add the user multiple teams in different orgs + reconcile_users_org_team_mappings(user, {}, {org1.name: {team1.name: {'member_role': True}}, org2.name: {team2.name: {'member_role': True}}}, 'Nada') + assert len(user.roles.all()) == 2 + assert user in team1.member_role + assert user in team2.member_role + + # Remove the user from just one of the teams + reconcile_users_org_team_mappings(user, {}, {org2.name: {team2.name: {'member_role': False}}}, 'Nada') + assert len(user.roles.all()) == 1 + assert user in team1.member_role + assert user not in team2.member_role + + @pytest.mark.parametrize( + "org_list, team_map, can_create, org_count, team_count", + [ + # In this case we will only pass in organizations + ( + ["org1", "org2"], + {}, + True, + 2, + 0, + ), + # In this case we will only pass in teams but the orgs will be created from the teams + ( + [], + {"team1": "org1", "team2": "org2"}, + True, + 2, + 2, + ), + # In this case we will reuse an org + ( + ["org1"], + {"team1": "org1", "team2": "org1"}, + True, + 1, + 2, + ), + # In this case we have a combination of orgs, orgs reused and an org created by a team + ( + ["org1", "org2", "org3"], + {"team1": "org1", "team2": "org4"}, + True, + 4, + 2, + ), + # In this case we will test a case that the UI should prevent and have a team with no Org + # This should create org1/2 but only team1 + ( + ["org1"], + {"team1": "org2", "team2": None}, + True, + 2, + 1, + ), + # Block any creation with the can_create flag + ( + ["org1"], + {"team1": "org2", "team2": None}, + False, + 0, + 0, + ), + ], + ) + def test_create_org_and_teams(self, galaxy_credential, org_list, team_map, can_create, org_count, team_count): + create_org_and_teams(org_list, team_map, 'py.test', can_create=can_create) + assert Organization.objects.count() == org_count + assert Team.objects.count() == team_count + + def test_get_or_create_org_with_default_galaxy_cred_add_galaxy_cred(self, galaxy_credential): + # If this method creates the org it should get the default galaxy credential + num_orgs = 4 + for number in range(1, (num_orgs + 1)): + get_or_create_org_with_default_galaxy_cred(name=f"Default {number}") + + assert Organization.objects.count() == 4 + + for o in Organization.objects.all(): + assert o.galaxy_credentials.count() == 1 + assert o.galaxy_credentials.first().name == 'Ansible Galaxy' + + def test_get_or_create_org_with_default_galaxy_cred_no_galaxy_cred(self, galaxy_credential): + # If the org is pre-created, we should not add the galaxy_credential + num_orgs = 4 + for number in range(1, (num_orgs + 1)): + Organization.objects.create(name=f"Default {number}") + get_or_create_org_with_default_galaxy_cred(name=f"Default {number}") + + assert Organization.objects.count() == 4 + + for o in Organization.objects.all(): + assert o.galaxy_credentials.count() == 0 diff --git a/awx/sso/tests/functional/test_pipeline.py b/awx/sso/tests/functional/test_pipeline.py deleted file mode 100644 index 6bf034b68a..0000000000 --- a/awx/sso/tests/functional/test_pipeline.py +++ /dev/null @@ -1,566 +0,0 @@ -import pytest -import re -from unittest import mock - -from django.utils.timezone import now - -from awx.conf.registry import settings_registry -from awx.sso.pipeline import update_user_orgs, update_user_teams, update_user_orgs_by_saml_attr, update_user_teams_by_saml_attr, _check_flag -from awx.main.models import User, Team, Organization, Credential, CredentialType - - -@pytest.fixture -def galaxy_credential(): - galaxy_type = CredentialType.objects.create(kind='galaxy') - cred = Credential( - created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'} - ) - cred.save() - - -@pytest.fixture -def users(): - u1 = User.objects.create(username='user1@foo.com', last_name='foo', first_name='bar', email='user1@foo.com') - u2 = User.objects.create(username='user2@foo.com', last_name='foo', first_name='bar', email='user2@foo.com') - u3 = User.objects.create(username='user3@foo.com', last_name='foo', first_name='bar', email='user3@foo.com') - return (u1, u2, u3) - - -@pytest.mark.django_db -class TestSAMLMap: - @pytest.fixture - def backend(self): - class Backend: - s = { - 'ORGANIZATION_MAP': { - 'Default': { - 'remove': True, - 'admins': 'foobar', - 'remove_admins': True, - 'users': 'foo', - 'remove_users': True, - 'organization_alias': '', - } - }, - 'TEAM_MAP': {'Blue': {'organization': 'Default', 'remove': True, 'users': ''}, 'Red': {'organization': 'Default', 'remove': True, 'users': ''}}, - } - - def setting(self, key): - return self.s[key] - - return Backend() - - @pytest.fixture - def org(self): - return Organization.objects.create(name="Default") - - def test_update_user_orgs(self, org, backend, users, galaxy_credential): - u1, u2, u3 = users - - # Test user membership logic with regular expressions - backend.setting('ORGANIZATION_MAP')['Default']['admins'] = re.compile('.*') - backend.setting('ORGANIZATION_MAP')['Default']['users'] = re.compile('.*') - - update_user_orgs(backend, None, u1) - update_user_orgs(backend, None, u2) - update_user_orgs(backend, None, u3) - - assert org.admin_role.members.count() == 3 - assert org.member_role.members.count() == 3 - - # Test remove feature enabled - backend.setting('ORGANIZATION_MAP')['Default']['admins'] = '' - backend.setting('ORGANIZATION_MAP')['Default']['users'] = '' - backend.setting('ORGANIZATION_MAP')['Default']['remove_admins'] = True - backend.setting('ORGANIZATION_MAP')['Default']['remove_users'] = True - update_user_orgs(backend, None, u1) - - assert org.admin_role.members.count() == 2 - assert org.member_role.members.count() == 2 - - # Test remove feature disabled - backend.setting('ORGANIZATION_MAP')['Default']['remove_admins'] = False - backend.setting('ORGANIZATION_MAP')['Default']['remove_users'] = False - update_user_orgs(backend, None, u2) - - assert org.admin_role.members.count() == 2 - assert org.member_role.members.count() == 2 - - # Test organization alias feature - backend.setting('ORGANIZATION_MAP')['Default']['organization_alias'] = 'Default_Alias' - update_user_orgs(backend, None, u1) - assert Organization.objects.get(name="Default_Alias") is not None - - for o in Organization.objects.all(): - if o.name == 'Default': - # The default org was already created and should not have a galaxy credential - assert o.galaxy_credentials.count() == 0 - else: - # The Default_Alias was created by SAML and should get the galaxy credential - assert o.galaxy_credentials.count() == 1 - assert o.galaxy_credentials.first().name == 'Ansible Galaxy' - - def test_update_user_teams(self, backend, users, galaxy_credential): - u1, u2, u3 = users - - # Test user membership logic with regular expressions - backend.setting('TEAM_MAP')['Blue']['users'] = re.compile('.*') - backend.setting('TEAM_MAP')['Red']['users'] = re.compile('.*') - - update_user_teams(backend, None, u1) - update_user_teams(backend, None, u2) - update_user_teams(backend, None, u3) - - assert Team.objects.get(name="Red").member_role.members.count() == 3 - assert Team.objects.get(name="Blue").member_role.members.count() == 3 - - # Test remove feature enabled - backend.setting('TEAM_MAP')['Blue']['remove'] = True - backend.setting('TEAM_MAP')['Red']['remove'] = True - backend.setting('TEAM_MAP')['Blue']['users'] = '' - backend.setting('TEAM_MAP')['Red']['users'] = '' - - update_user_teams(backend, None, u1) - - assert Team.objects.get(name="Red").member_role.members.count() == 2 - assert Team.objects.get(name="Blue").member_role.members.count() == 2 - - # Test remove feature disabled - backend.setting('TEAM_MAP')['Blue']['remove'] = False - backend.setting('TEAM_MAP')['Red']['remove'] = False - - update_user_teams(backend, None, u2) - - assert Team.objects.get(name="Red").member_role.members.count() == 2 - assert Team.objects.get(name="Blue").member_role.members.count() == 2 - - for o in Organization.objects.all(): - assert o.galaxy_credentials.count() == 1 - assert o.galaxy_credentials.first().name == 'Ansible Galaxy' - - -@pytest.mark.django_db -class TestSAMLAttr: - @pytest.fixture - def kwargs(self): - return { - 'username': u'cmeyers@redhat.com', - 'uid': 'idp:cmeyers@redhat.com', - 'request': {u'SAMLResponse': [], u'RelayState': [u'idp']}, - 'is_new': False, - 'response': { - 'session_index': '_0728f0e0-b766-0135-75fa-02842b07c044', - 'idp_name': u'idp', - 'attributes': { - 'memberOf': ['Default1', 'Default2'], - 'admins': ['Default3'], - 'auditors': ['Default4'], - 'groups': ['Blue', 'Red'], - 'User.email': ['cmeyers@redhat.com'], - 'User.LastName': ['Meyers'], - 'name_id': 'cmeyers@redhat.com', - 'User.FirstName': ['Chris'], - 'PersonImmutableID': [], - }, - }, - # 'social': , - 'social': None, - # 'strategy': , - 'strategy': None, - 'new_association': False, - } - - @pytest.fixture - def orgs(self): - o1 = Organization.objects.create(name='Default1') - o2 = Organization.objects.create(name='Default2') - o3 = Organization.objects.create(name='Default3') - return (o1, o2, o3) - - @pytest.fixture - def mock_settings(self, request): - fixture_args = request.node.get_closest_marker('fixture_args') - if fixture_args and 'autocreate' in fixture_args.kwargs: - autocreate = fixture_args.kwargs['autocreate'] - else: - autocreate = True - - class MockSettings: - SAML_AUTO_CREATE_OBJECTS = autocreate - SOCIAL_AUTH_SAML_ORGANIZATION_ATTR = { - 'saml_attr': 'memberOf', - 'saml_admin_attr': 'admins', - 'saml_auditor_attr': 'auditors', - 'remove': True, - 'remove_admins': True, - } - SOCIAL_AUTH_SAML_TEAM_ATTR = { - 'saml_attr': 'groups', - 'remove': True, - 'team_org_map': [ - {'team': 'Blue', 'organization': 'Default1'}, - {'team': 'Blue', 'organization': 'Default2'}, - {'team': 'Blue', 'organization': 'Default3'}, - {'team': 'Red', 'organization': 'Default1'}, - {'team': 'Green', 'organization': 'Default1'}, - {'team': 'Green', 'organization': 'Default3'}, - {'team': 'Yellow', 'team_alias': 'Yellow_Alias', 'organization': 'Default4', 'organization_alias': 'Default4_Alias'}, - ], - } - - mock_settings_obj = MockSettings() - for key in settings_registry.get_registered_settings(category_slug='logging'): - value = settings_registry.get_setting_field(key).get_default() - setattr(mock_settings_obj, key, value) - setattr(mock_settings_obj, 'DEBUG', True) - - return mock_settings_obj - - @pytest.fixture - def backend(self): - class Backend: - s = { - 'ORGANIZATION_MAP': { - 'Default1': { - 'remove': True, - 'admins': 'foobar', - 'remove_admins': True, - 'users': 'foo', - 'remove_users': True, - 'organization_alias': 'o1_alias', - } - } - } - - def setting(self, key): - return self.s[key] - - return Backend() - - def test_update_user_orgs_by_saml_attr(self, orgs, users, galaxy_credential, kwargs, mock_settings, backend): - with mock.patch('django.conf.settings', mock_settings): - o1, o2, o3 = orgs - u1, u2, u3 = users - - # Test getting orgs from attribute - update_user_orgs_by_saml_attr(None, None, u1, **kwargs) - update_user_orgs_by_saml_attr(None, None, u2, **kwargs) - update_user_orgs_by_saml_attr(None, None, u3, **kwargs) - - assert o1.member_role.members.count() == 3 - assert o2.member_role.members.count() == 3 - assert o3.member_role.members.count() == 0 - - # Test remove logic enabled - kwargs['response']['attributes']['memberOf'] = ['Default3'] - - update_user_orgs_by_saml_attr(None, None, u1, **kwargs) - - assert o1.member_role.members.count() == 2 - assert o2.member_role.members.count() == 2 - assert o3.member_role.members.count() == 1 - - # Test remove logic disabled - mock_settings.SOCIAL_AUTH_SAML_ORGANIZATION_ATTR['remove'] = False - kwargs['response']['attributes']['memberOf'] = ['Default1', 'Default2'] - - update_user_orgs_by_saml_attr(None, None, u1, **kwargs) - - assert o1.member_role.members.count() == 3 - assert o2.member_role.members.count() == 3 - assert o3.member_role.members.count() == 1 - - update_user_orgs_by_saml_attr(backend, None, u1, **kwargs) - assert Organization.objects.get(name="o1_alias").member_role.members.count() == 1 - - for o in Organization.objects.all(): - if o.id in [o1.id, o2.id, o3.id]: - # o[123] were created without a default galaxy cred - assert o.galaxy_credentials.count() == 0 - else: - # anything else created should have a default galaxy cred - assert o.galaxy_credentials.count() == 1 - assert o.galaxy_credentials.first().name == 'Ansible Galaxy' - - def test_update_user_teams_by_saml_attr(self, orgs, users, galaxy_credential, kwargs, mock_settings): - with mock.patch('django.conf.settings', mock_settings): - o1, o2, o3 = orgs - u1, u2, u3 = users - - # Test getting teams from attribute with team->org mapping - - kwargs['response']['attributes']['groups'] = ['Blue', 'Red', 'Green'] - - # Ensure basic functionality - update_user_teams_by_saml_attr(None, None, u1, **kwargs) - update_user_teams_by_saml_attr(None, None, u2, **kwargs) - update_user_teams_by_saml_attr(None, None, u3, **kwargs) - - assert Team.objects.get(name='Blue', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Blue', organization__name='Default2').member_role.members.count() == 3 - assert Team.objects.get(name='Blue', organization__name='Default3').member_role.members.count() == 3 - - assert Team.objects.get(name='Red', organization__name='Default1').member_role.members.count() == 3 - - assert Team.objects.get(name='Green', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Green', organization__name='Default3').member_role.members.count() == 3 - - # Test remove logic - kwargs['response']['attributes']['groups'] = ['Green'] - update_user_teams_by_saml_attr(None, None, u1, **kwargs) - update_user_teams_by_saml_attr(None, None, u2, **kwargs) - update_user_teams_by_saml_attr(None, None, u3, **kwargs) - - assert Team.objects.get(name='Blue', organization__name='Default1').member_role.members.count() == 0 - assert Team.objects.get(name='Blue', organization__name='Default2').member_role.members.count() == 0 - assert Team.objects.get(name='Blue', organization__name='Default3').member_role.members.count() == 0 - - assert Team.objects.get(name='Red', organization__name='Default1').member_role.members.count() == 0 - - assert Team.objects.get(name='Green', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Green', organization__name='Default3').member_role.members.count() == 3 - - # Test remove logic disabled - mock_settings.SOCIAL_AUTH_SAML_TEAM_ATTR['remove'] = False - kwargs['response']['attributes']['groups'] = ['Blue'] - - update_user_teams_by_saml_attr(None, None, u1, **kwargs) - update_user_teams_by_saml_attr(None, None, u2, **kwargs) - update_user_teams_by_saml_attr(None, None, u3, **kwargs) - - assert Team.objects.get(name='Blue', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Blue', organization__name='Default2').member_role.members.count() == 3 - assert Team.objects.get(name='Blue', organization__name='Default3').member_role.members.count() == 3 - - assert Team.objects.get(name='Red', organization__name='Default1').member_role.members.count() == 0 - - assert Team.objects.get(name='Green', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Green', organization__name='Default3').member_role.members.count() == 3 - - for o in Organization.objects.all(): - if o.id in [o1.id, o2.id, o3.id]: - # o[123] were created without a default galaxy cred - assert o.galaxy_credentials.count() == 0 - else: - # anything else created should have a default galaxy cred - assert o.galaxy_credentials.count() == 1 - assert o.galaxy_credentials.first().name == 'Ansible Galaxy' - - def test_update_user_teams_alias_by_saml_attr(self, orgs, users, galaxy_credential, kwargs, mock_settings): - with mock.patch('django.conf.settings', mock_settings): - u1 = users[0] - - # Test getting teams from attribute with team->org mapping - kwargs['response']['attributes']['groups'] = ['Yellow'] - - # Ensure team and org will be created - update_user_teams_by_saml_attr(None, None, u1, **kwargs) - - assert Team.objects.filter(name='Yellow', organization__name='Default4').count() == 0 - assert Team.objects.filter(name='Yellow_Alias', organization__name='Default4').count() == 1 - assert Team.objects.get(name='Yellow_Alias', organization__name='Default4').member_role.members.count() == 1 - - # only Org 4 got created/updated - org = Organization.objects.get(name='Default4') - assert org.galaxy_credentials.count() == 1 - assert org.galaxy_credentials.first().name == 'Ansible Galaxy' - - @pytest.mark.fixture_args(autocreate=False) - def test_autocreate_disabled(self, users, kwargs, mock_settings): - kwargs['response']['attributes']['memberOf'] = ['Default1', 'Default2', 'Default3'] - kwargs['response']['attributes']['groups'] = ['Blue', 'Red', 'Green'] - with mock.patch('django.conf.settings', mock_settings): - for u in users: - update_user_orgs_by_saml_attr(None, None, u, **kwargs) - update_user_teams_by_saml_attr(None, None, u, **kwargs) - assert Organization.objects.count() == 0 - assert Team.objects.count() == 0 - - # precreate everything - o1 = Organization.objects.create(name='Default1') - o2 = Organization.objects.create(name='Default2') - o3 = Organization.objects.create(name='Default3') - Team.objects.create(name='Blue', organization_id=o1.id) - Team.objects.create(name='Blue', organization_id=o2.id) - Team.objects.create(name='Blue', organization_id=o3.id) - Team.objects.create(name='Red', organization_id=o1.id) - Team.objects.create(name='Green', organization_id=o1.id) - Team.objects.create(name='Green', organization_id=o3.id) - - for u in users: - update_user_orgs_by_saml_attr(None, None, u, **kwargs) - update_user_teams_by_saml_attr(None, None, u, **kwargs) - - assert o1.member_role.members.count() == 3 - assert o2.member_role.members.count() == 3 - assert o3.member_role.members.count() == 3 - - assert Team.objects.get(name='Blue', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Blue', organization__name='Default2').member_role.members.count() == 3 - assert Team.objects.get(name='Blue', organization__name='Default3').member_role.members.count() == 3 - - assert Team.objects.get(name='Red', organization__name='Default1').member_role.members.count() == 3 - - assert Team.objects.get(name='Green', organization__name='Default1').member_role.members.count() == 3 - assert Team.objects.get(name='Green', organization__name='Default3').member_role.members.count() == 3 - - def test_galaxy_credential_auto_assign(self, users, kwargs, galaxy_credential, mock_settings): - kwargs['response']['attributes']['memberOf'] = ['Default1', 'Default2', 'Default3'] - kwargs['response']['attributes']['groups'] = ['Blue', 'Red', 'Green'] - with mock.patch('django.conf.settings', mock_settings): - for u in users: - update_user_orgs_by_saml_attr(None, None, u, **kwargs) - update_user_teams_by_saml_attr(None, None, u, **kwargs) - - assert Organization.objects.count() == 4 - for o in Organization.objects.all(): - assert o.galaxy_credentials.count() == 1 - assert o.galaxy_credentials.first().name == 'Ansible Galaxy' - - def test_galaxy_credential_no_auto_assign(self, users, kwargs, galaxy_credential, mock_settings): - # A Galaxy credential should not be added to an existing org - o = Organization.objects.create(name='Default1') - o = Organization.objects.create(name='Default2') - o = Organization.objects.create(name='Default3') - o = Organization.objects.create(name='Default4') - kwargs['response']['attributes']['memberOf'] = ['Default1'] - kwargs['response']['attributes']['groups'] = ['Blue'] - with mock.patch('django.conf.settings', mock_settings): - for u in users: - update_user_orgs_by_saml_attr(None, None, u, **kwargs) - update_user_teams_by_saml_attr(None, None, u, **kwargs) - - assert Organization.objects.count() == 4 - for o in Organization.objects.all(): - assert o.galaxy_credentials.count() == 0 - - -@pytest.mark.django_db -class TestSAMLUserFlags: - @pytest.mark.parametrize( - "user_flags_settings, expected, is_superuser", - [ - # In this case we will pass no user flags so new_flag should be false and changed will def be false - ( - {}, - (False, False), - False, - ), - # NOTE: The first handful of tests test role/value as string instead of lists. - # This was from the initial implementation of these fields but the code should be able to handle this - # There are a couple tests at the end of this which will validate arrays in these values. - # - # In this case we will give the user a group to make them an admin - ( - {'is_superuser_role': 'test-role-1'}, - (True, True), - False, - ), - # In this case we will give the user a flag that will make then an admin - ( - {'is_superuser_attr': 'is_superuser'}, - (True, True), - False, - ), - # In this case we will give the user a flag but the wrong value - ( - {'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'junk'}, - (False, False), - False, - ), - # In this case we will give the user a flag and the right value - ( - {'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'true'}, - (True, True), - False, - ), - # In this case we will give the user a proper role and an is_superuser_attr role that they dont have, this should make them an admin - ( - {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'gibberish', 'is_superuser_value': 'true'}, - (True, True), - False, - ), - # In this case we will give the user a proper role and an is_superuser_attr role that they have, this should make them an admin - ( - {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'test-role-1'}, - (True, True), - False, - ), - # In this case we will give the user a proper role and an is_superuser_attr role that they have but a bad value, this should make them an admin - ( - {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'junk'}, - (False, False), - False, - ), - # In this case we will give the user everything - ( - {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'true'}, - (True, True), - False, - ), - # In this test case we will validate that a single attribute (instead of a list) still works - ( - {'is_superuser_attr': 'name_id', 'is_superuser_value': 'test_id'}, - (True, True), - False, - ), - # This will be a negative test for a single atrribute - ( - {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk'}, - (False, False), - False, - ), - # The user is already a superuser so we should remove them - ( - {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk', 'remove_superusers': True}, - (False, True), - True, - ), - # The user is already a superuser but we don't have a remove field - ( - {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk', 'remove_superusers': False}, - (True, False), - True, - ), - # Positive test for multiple values for is_superuser_value - ( - {'is_superuser_attr': 'is_superuser', 'is_superuser_value': ['junk', 'junk2', 'else', 'junk']}, - (True, True), - False, - ), - # Negative test for multiple values for is_superuser_value - ( - {'is_superuser_attr': 'is_superuser', 'is_superuser_value': ['junk', 'junk2', 'junk']}, - (False, True), - True, - ), - # Positive test for multiple values of is_superuser_role - ( - {'is_superuser_role': ['junk', 'junk2', 'something', 'junk']}, - (True, True), - False, - ), - # Negative test for multiple values of is_superuser_role - ( - {'is_superuser_role': ['junk', 'junk2', 'junk']}, - (False, True), - True, - ), - ], - ) - def test__check_flag(self, user_flags_settings, expected, is_superuser): - user = User() - user.username = 'John' - user.is_superuser = is_superuser - - attributes = { - 'email': ['noone@nowhere.com'], - 'last_name': ['Westcott'], - 'is_superuser': ['something', 'else', 'true'], - 'username': ['test_id'], - 'first_name': ['John'], - 'Role': ['test-role-1', 'something', 'different'], - 'name_id': 'test_id', - } - - assert expected == _check_flag(user, 'superuser', attributes, user_flags_settings) diff --git a/awx/sso/tests/functional/test_saml_pipeline.py b/awx/sso/tests/functional/test_saml_pipeline.py new file mode 100644 index 0000000000..628d793d4e --- /dev/null +++ b/awx/sso/tests/functional/test_saml_pipeline.py @@ -0,0 +1,639 @@ +import pytest +import re + +from django.test.utils import override_settings +from awx.main.models import User, Organization, Team +from awx.sso.saml_pipeline import ( + _update_m2m_from_expression, + _update_user_orgs, + _update_user_teams, + _update_user_orgs_by_saml_attr, + _update_user_teams_by_saml_attr, + _check_flag, +) + +# from unittest import mock +# from django.utils.timezone import now +# , Credential, CredentialType + + +@pytest.fixture +def users(): + u1 = User.objects.create(username='user1@foo.com', last_name='foo', first_name='bar', email='user1@foo.com') + u2 = User.objects.create(username='user2@foo.com', last_name='foo', first_name='bar', email='user2@foo.com') + u3 = User.objects.create(username='user3@foo.com', last_name='foo', first_name='bar', email='user3@foo.com') + return (u1, u2, u3) + + +@pytest.mark.django_db +class TestSAMLPopulateUser: + # The main populate_user does not need to be tested since its just a conglomeration of other functions that we test + # This test is here in case someone alters the code in the future in a way that does require testing + def test_populate_user(self): + assert True + + +@pytest.mark.django_db +class TestSAMLSimpleMaps: + # This tests __update_user_orgs and __update_user_teams + @pytest.fixture + def backend(self): + class Backend: + s = { + 'ORGANIZATION_MAP': { + 'Default': { + 'remove': True, + 'admins': 'foobar', + 'remove_admins': True, + 'users': 'foo', + 'remove_users': True, + 'organization_alias': '', + } + }, + 'TEAM_MAP': {'Blue': {'organization': 'Default', 'remove': True, 'users': ''}, 'Red': {'organization': 'Default', 'remove': True, 'users': ''}}, + } + + def setting(self, key): + return self.s[key] + + return Backend() + + def test__update_user_orgs(self, backend, users): + u1, u2, u3 = users + + # Test user membership logic with regular expressions + backend.setting('ORGANIZATION_MAP')['Default']['admins'] = re.compile('.*') + backend.setting('ORGANIZATION_MAP')['Default']['users'] = re.compile('.*') + + desired_org_state = {} + orgs_to_create = [] + _update_user_orgs(backend, desired_org_state, orgs_to_create, u1) + _update_user_orgs(backend, desired_org_state, orgs_to_create, u2) + _update_user_orgs(backend, desired_org_state, orgs_to_create, u3) + + assert desired_org_state == {'Default': {'member_role': True, 'admin_role': True, 'auditor_role': False}} + assert orgs_to_create == ['Default'] + + # Test remove feature enabled + backend.setting('ORGANIZATION_MAP')['Default']['admins'] = '' + backend.setting('ORGANIZATION_MAP')['Default']['users'] = '' + backend.setting('ORGANIZATION_MAP')['Default']['remove_admins'] = True + backend.setting('ORGANIZATION_MAP')['Default']['remove_users'] = True + desired_org_state = {} + orgs_to_create = [] + _update_user_orgs(backend, desired_org_state, orgs_to_create, u1) + assert desired_org_state == {'Default': {'member_role': False, 'admin_role': False, 'auditor_role': False}} + assert orgs_to_create == ['Default'] + + # Test remove feature disabled + backend.setting('ORGANIZATION_MAP')['Default']['remove_admins'] = False + backend.setting('ORGANIZATION_MAP')['Default']['remove_users'] = False + desired_org_state = {} + orgs_to_create = [] + _update_user_orgs(backend, desired_org_state, orgs_to_create, u2) + + assert desired_org_state == {'Default': {'member_role': None, 'admin_role': None, 'auditor_role': False}} + assert orgs_to_create == ['Default'] + + # Test organization alias feature + backend.setting('ORGANIZATION_MAP')['Default']['organization_alias'] = 'Default_Alias' + orgs_to_create = [] + _update_user_orgs(backend, {}, orgs_to_create, u1) + assert orgs_to_create == ['Default_Alias'] + + def test__update_user_teams(self, backend, users): + u1, u2, u3 = users + + # Test user membership logic with regular expressions + backend.setting('TEAM_MAP')['Blue']['users'] = re.compile('.*') + backend.setting('TEAM_MAP')['Red']['users'] = re.compile('.*') + + desired_team_state = {} + teams_to_create = {} + _update_user_teams(backend, desired_team_state, teams_to_create, u1) + assert teams_to_create == {'Red': 'Default', 'Blue': 'Default'} + assert desired_team_state == {'Default': {'Blue': {'member_role': True}, 'Red': {'member_role': True}}} + + # Test remove feature enabled + backend.setting('TEAM_MAP')['Blue']['remove'] = True + backend.setting('TEAM_MAP')['Red']['remove'] = True + backend.setting('TEAM_MAP')['Blue']['users'] = '' + backend.setting('TEAM_MAP')['Red']['users'] = '' + + desired_team_state = {} + teams_to_create = {} + _update_user_teams(backend, desired_team_state, teams_to_create, u1) + assert teams_to_create == {'Red': 'Default', 'Blue': 'Default'} + assert desired_team_state == {'Default': {'Blue': {'member_role': False}, 'Red': {'member_role': False}}} + + # Test remove feature disabled + backend.setting('TEAM_MAP')['Blue']['remove'] = False + backend.setting('TEAM_MAP')['Red']['remove'] = False + + desired_team_state = {} + teams_to_create = {} + _update_user_teams(backend, desired_team_state, teams_to_create, u2) + assert teams_to_create == {'Red': 'Default', 'Blue': 'Default'} + # If we don't care about team memberships we just don't add them to the hash so this would be an empty hash + assert desired_team_state == {} + + +@pytest.mark.django_db +class TestSAMLM2M: + @pytest.mark.parametrize( + "expression, remove, expected_return", + [ + # No expression with no remove + (None, False, None), + ("", False, None), + # No expression with remove + (None, True, False), + # True expression with and without remove + (True, False, True), + (True, True, True), + # Single string matching the user name + ("user1", False, True), + # Single string matching the user email + ("user1@foo.com", False, True), + # Single string not matching username or email, no remove + ("user27", False, None), + # Single string not matching username or email, with remove + ("user27", True, False), + # Same tests with arrays instead of strings + (["user1"], False, True), + (["user1@foo.com"], False, True), + (["user27"], False, None), + (["user27"], True, False), + # Arrays with nothing matching + (["user27", "user28"], False, None), + (["user27", "user28"], True, False), + # Arrays with all matches + (["user1", "user1@foo.com"], False, True), + # Arrays with some match, some not + (["user1", "user28", "user27"], False, True), + # + # Note: For RE's, usually settings takes care of the compilation for us, so we have to do it manually for testing. + # we also need to remove any / or flags for the compile to happen + # + # Matching username regex non-array + (re.compile("^user.*"), False, True), + (re.compile("^user.*"), True, True), + # Matching email regex non-array + (re.compile(".*@foo.com$"), False, True), + (re.compile(".*@foo.com$"), True, True), + # Non-array not matching username or email + (re.compile("^$"), False, None), + (re.compile("^$"), True, False), + # All re tests just in array form + ([re.compile("^user.*")], False, True), + ([re.compile("^user.*")], True, True), + ([re.compile(".*@foo.com$")], False, True), + ([re.compile(".*@foo.com$")], True, True), + ([re.compile("^$")], False, None), + ([re.compile("^$")], True, False), + # An re with username matching but not email + ([re.compile("^user.*"), re.compile(".*@bar.com$")], False, True), + # An re with email matching but not username + ([re.compile("^user27$"), re.compile(".*@foo.com$")], False, True), + # An re array with no matching + ([re.compile("^user27$"), re.compile(".*@bar.com$")], False, None), + ([re.compile("^user27$"), re.compile(".*@bar.com$")], True, False), + # + # A mix of re and strings + # + # String matches, re does not + (["user1", re.compile(".*@bar.com$")], False, True), + # String does not match, re does + (["user27", re.compile(".*@foo.com$")], False, True), + # Nothing matches + (["user27", re.compile(".*@bar.com$")], False, None), + (["user27", re.compile(".*@bar.com$")], True, False), + ], + ) + def test__update_m2m_from_expression(self, expression, remove, expected_return): + user = User.objects.create(username='user1', last_name='foo', first_name='bar', email='user1@foo.com') + return_val = _update_m2m_from_expression(user, expression, remove) + assert return_val == expected_return + + +@pytest.mark.django_db +class TestSAMLAttrMaps: + @pytest.fixture + def backend(self): + class Backend: + s = { + 'ORGANIZATION_MAP': { + 'Default1': { + 'remove': True, + 'admins': 'foobar', + 'remove_admins': True, + 'users': 'foo', + 'remove_users': True, + 'organization_alias': 'o1_alias', + } + } + } + + def setting(self, key): + return self.s[key] + + return Backend() + + @pytest.mark.parametrize( + "setting, expected_state, expected_orgs_to_create, kwargs_member_of_mods", + [ + ( + # Default test, make sure that our roles get applied and removed as specified (with an alias) + { + 'saml_attr': 'memberOf', + 'saml_admin_attr': 'admins', + 'saml_auditor_attr': 'auditors', + 'remove': True, + 'remove_admins': True, + }, + { + 'Default2': {'member_role': True}, + 'Default3': {'admin_role': True}, + 'Default4': {'auditor_role': True}, + 'o1_alias': {'member_role': True}, + 'Rando1': {'admin_role': False, 'auditor_role': False, 'member_role': False}, + }, + [ + 'o1_alias', + 'Default2', + 'Default3', + 'Default4', + ], + None, + ), + ( + # Similar test, we are just going to override the values "coming from the IdP" to limit the teams + { + 'saml_attr': 'memberOf', + 'saml_admin_attr': 'admins', + 'saml_auditor_attr': 'auditors', + 'remove': True, + 'remove_admins': True, + }, + { + 'Default3': {'admin_role': True, 'member_role': True}, + 'Default4': {'auditor_role': True}, + 'Rando1': {'admin_role': False, 'auditor_role': False, 'member_role': False}, + }, + [ + 'Default3', + 'Default4', + ], + ['Default3'], + ), + ( + # Test to make sure the remove logic is working + { + 'saml_attr': 'memberOf', + 'saml_admin_attr': 'admins', + 'saml_auditor_attr': 'auditors', + 'remove': False, + 'remove_admins': False, + 'remove_auditors': False, + }, + { + 'Default2': {'member_role': True}, + 'Default3': {'admin_role': True}, + 'Default4': {'auditor_role': True}, + 'o1_alias': {'member_role': True}, + }, + [ + 'o1_alias', + 'Default2', + 'Default3', + 'Default4', + ], + ['Default1', 'Default2'], + ), + ], + ) + def test__update_user_orgs_by_saml_attr(self, backend, setting, expected_state, expected_orgs_to_create, kwargs_member_of_mods): + kwargs = { + 'username': u'cmeyers@redhat.com', + 'uid': 'idp:cmeyers@redhat.com', + 'request': {u'SAMLResponse': [], u'RelayState': [u'idp']}, + 'is_new': False, + 'response': { + 'session_index': '_0728f0e0-b766-0135-75fa-02842b07c044', + 'idp_name': u'idp', + 'attributes': { + 'memberOf': ['Default1', 'Default2'], + 'admins': ['Default3'], + 'auditors': ['Default4'], + 'groups': ['Blue', 'Red'], + 'User.email': ['cmeyers@redhat.com'], + 'User.LastName': ['Meyers'], + 'name_id': 'cmeyers@redhat.com', + 'User.FirstName': ['Chris'], + 'PersonImmutableID': [], + }, + }, + 'social': None, + 'strategy': None, + 'new_association': False, + } + if kwargs_member_of_mods: + kwargs['response']['attributes']['memberOf'] = kwargs_member_of_mods + + # Create a random organization in the database for testing + Organization.objects.create(name='Rando1') + + with override_settings(SOCIAL_AUTH_SAML_ORGANIZATION_ATTR=setting): + desired_org_state = {} + orgs_to_create = [] + _update_user_orgs_by_saml_attr(backend, desired_org_state, orgs_to_create, **kwargs) + assert desired_org_state == expected_state + assert orgs_to_create == expected_orgs_to_create + + @pytest.mark.parametrize( + "setting, expected_team_state, expected_teams_to_create, kwargs_group_override", + [ + ( + { + 'saml_attr': 'groups', + 'remove': False, + 'team_org_map': [ + {'team': 'Blue', 'organization': 'Default1'}, + {'team': 'Blue', 'organization': 'Default2'}, + {'team': 'Blue', 'organization': 'Default3'}, + {'team': 'Red', 'organization': 'Default1'}, + {'team': 'Green', 'organization': 'Default1'}, + {'team': 'Green', 'organization': 'Default3'}, + {'team': 'Yellow', 'team_alias': 'Yellow_Alias', 'organization': 'Default4', 'organization_alias': 'Default4_Alias'}, + ], + }, + { + 'Default1': { + 'Blue': {'member_role': True}, + 'Green': {'member_role': False}, + 'Red': {'member_role': True}, + }, + 'Default2': { + 'Blue': {'member_role': True}, + }, + 'Default3': { + 'Blue': {'member_role': True}, + 'Green': {'member_role': False}, + }, + 'Default4': { + 'Yellow': {'member_role': False}, + }, + }, + { + 'Blue': 'Default3', + 'Red': 'Default1', + }, + None, + ), + ( + { + 'saml_attr': 'groups', + 'remove': False, + 'team_org_map': [ + {'team': 'Blue', 'organization': 'Default1'}, + {'team': 'Blue', 'organization': 'Default2'}, + {'team': 'Blue', 'organization': 'Default3'}, + {'team': 'Red', 'organization': 'Default1'}, + {'team': 'Green', 'organization': 'Default1'}, + {'team': 'Green', 'organization': 'Default3'}, + {'team': 'Yellow', 'team_alias': 'Yellow_Alias', 'organization': 'Default4', 'organization_alias': 'Default4_Alias'}, + ], + }, + { + 'Default1': { + 'Blue': {'member_role': True}, + 'Green': {'member_role': True}, + 'Red': {'member_role': True}, + }, + 'Default2': { + 'Blue': {'member_role': True}, + }, + 'Default3': { + 'Blue': {'member_role': True}, + 'Green': {'member_role': True}, + }, + 'Default4': { + 'Yellow': {'member_role': False}, + }, + }, + { + 'Blue': 'Default3', + 'Red': 'Default1', + 'Green': 'Default3', + }, + ['Blue', 'Red', 'Green'], + ), + ( + { + 'saml_attr': 'groups', + 'remove': True, + 'team_org_map': [ + {'team': 'Blue', 'organization': 'Default1'}, + {'team': 'Blue', 'organization': 'Default2'}, + {'team': 'Blue', 'organization': 'Default3'}, + {'team': 'Red', 'organization': 'Default1'}, + {'team': 'Green', 'organization': 'Default1'}, + {'team': 'Green', 'organization': 'Default3'}, + {'team': 'Yellow', 'team_alias': 'Yellow_Alias', 'organization': 'Default4', 'organization_alias': 'Default4_Alias'}, + ], + }, + { + 'Default1': { + 'Blue': {'member_role': False}, + 'Green': {'member_role': True}, + 'Red': {'member_role': False}, + }, + 'Default2': { + 'Blue': {'member_role': False}, + }, + 'Default3': { + 'Blue': {'member_role': False}, + 'Green': {'member_role': True}, + }, + 'Default4': { + 'Yellow': {'member_role': False}, + }, + 'Rando1': { + 'Rando1': {'member_role': False}, + }, + }, + { + 'Green': 'Default3', + }, + ['Green'], + ), + ], + ) + def test__update_user_teams_by_saml_attr(self, setting, expected_team_state, expected_teams_to_create, kwargs_group_override): + kwargs = { + 'username': u'cmeyers@redhat.com', + 'uid': 'idp:cmeyers@redhat.com', + 'request': {u'SAMLResponse': [], u'RelayState': [u'idp']}, + 'is_new': False, + 'response': { + 'session_index': '_0728f0e0-b766-0135-75fa-02842b07c044', + 'idp_name': u'idp', + 'attributes': { + 'memberOf': ['Default1', 'Default2'], + 'admins': ['Default3'], + 'auditors': ['Default4'], + 'groups': ['Blue', 'Red'], + 'User.email': ['cmeyers@redhat.com'], + 'User.LastName': ['Meyers'], + 'name_id': 'cmeyers@redhat.com', + 'User.FirstName': ['Chris'], + 'PersonImmutableID': [], + }, + }, + 'social': None, + 'strategy': None, + 'new_association': False, + } + if kwargs_group_override: + kwargs['response']['attributes']['groups'] = kwargs_group_override + + o = Organization.objects.create(name='Rando1') + Team.objects.create(name='Rando1', organization_id=o.id) + + with override_settings(SOCIAL_AUTH_SAML_TEAM_ATTR=setting): + desired_team_state = {} + teams_to_create = {} + _update_user_teams_by_saml_attr(desired_team_state, teams_to_create, **kwargs) + assert desired_team_state == expected_team_state + assert teams_to_create == expected_teams_to_create + + +@pytest.mark.django_db +class TestSAMLUserFlags: + @pytest.mark.parametrize( + "user_flags_settings, expected, is_superuser", + [ + # In this case we will pass no user flags so new_flag should be false and changed will def be false + ( + {}, + (False, False), + False, + ), + # NOTE: The first handful of tests test role/value as string instead of lists. + # This was from the initial implementation of these fields but the code should be able to handle this + # There are a couple tests at the end of this which will validate arrays in these values. + # + # In this case we will give the user a group to make them an admin + ( + {'is_superuser_role': 'test-role-1'}, + (True, True), + False, + ), + # In this case we will give the user a flag that will make then an admin + ( + {'is_superuser_attr': 'is_superuser'}, + (True, True), + False, + ), + # In this case we will give the user a flag but the wrong value + ( + {'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'junk'}, + (False, False), + False, + ), + # In this case we will give the user a flag and the right value + ( + {'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'true'}, + (True, True), + False, + ), + # In this case we will give the user a proper role and an is_superuser_attr role that they don't have, this should make them an admin + ( + {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'gibberish', 'is_superuser_value': 'true'}, + (True, True), + False, + ), + # In this case we will give the user a proper role and an is_superuser_attr role that they have, this should make them an admin + ( + {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'test-role-1'}, + (True, True), + False, + ), + # In this case we will give the user a proper role and an is_superuser_attr role that they have but a bad value, this should make them an admin + ( + {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'junk'}, + (False, False), + False, + ), + # In this case we will give the user everything + ( + {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'true'}, + (True, True), + False, + ), + # In this test case we will validate that a single attribute (instead of a list) still works + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'test_id'}, + (True, True), + False, + ), + # This will be a negative test for a single attribute + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk'}, + (False, False), + False, + ), + # The user is already a superuser so we should remove them + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk', 'remove_superusers': True}, + (False, True), + True, + ), + # The user is already a superuser but we don't have a remove field + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk', 'remove_superusers': False}, + (True, False), + True, + ), + # Positive test for multiple values for is_superuser_value + ( + {'is_superuser_attr': 'is_superuser', 'is_superuser_value': ['junk', 'junk2', 'else', 'junk']}, + (True, True), + False, + ), + # Negative test for multiple values for is_superuser_value + ( + {'is_superuser_attr': 'is_superuser', 'is_superuser_value': ['junk', 'junk2', 'junk']}, + (False, True), + True, + ), + # Positive test for multiple values of is_superuser_role + ( + {'is_superuser_role': ['junk', 'junk2', 'something', 'junk']}, + (True, True), + False, + ), + # Negative test for multiple values of is_superuser_role + ( + {'is_superuser_role': ['junk', 'junk2', 'junk']}, + (False, True), + True, + ), + ], + ) + def test__check_flag(self, user_flags_settings, expected, is_superuser): + user = User() + user.username = 'John' + user.is_superuser = is_superuser + + attributes = { + 'email': ['noone@nowhere.com'], + 'last_name': ['Westcott'], + 'is_superuser': ['something', 'else', 'true'], + 'username': ['test_id'], + 'first_name': ['John'], + 'Role': ['test-role-1', 'something', 'different'], + 'name_id': 'test_id', + } + + assert expected == _check_flag(user, 'superuser', attributes, user_flags_settings) diff --git a/awx/sso/tests/functional/test_social_base_pipeline.py b/awx/sso/tests/functional/test_social_base_pipeline.py new file mode 100644 index 0000000000..38a49e15f3 --- /dev/null +++ b/awx/sso/tests/functional/test_social_base_pipeline.py @@ -0,0 +1,76 @@ +import pytest + +from awx.main.models import User +from awx.sso.social_base_pipeline import AuthNotFound, check_user_found_or_created, set_is_active_for_new_user, prevent_inactive_login, AuthInactive + + +@pytest.mark.django_db +class TestSocialBasePipeline: + def test_check_user_found_or_created_no_exception(self): + # If we have a user (the True param, we should not get an exception) + try: + check_user_found_or_created(None, {}, True) + except AuthNotFound: + assert False, 'check_user_found_or_created should not have raised an exception with a user' + + @pytest.mark.parametrize( + "details, kwargs, expected_id", + [ + ( + {}, + {}, + '???', + ), + ( + {}, + {'uid': 'kwargs_uid'}, + 'kwargs_uid', + ), + ( + {}, + {'uid': 'kwargs_uid', 'email': 'kwargs_email'}, + 'kwargs_email', + ), + ( + {'email': 'details_email'}, + {'uid': 'kwargs_uid', 'email': 'kwargs_email'}, + 'details_email', + ), + ], + ) + def test_check_user_found_or_created_exceptions(self, details, expected_id, kwargs): + with pytest.raises(AuthNotFound) as e: + check_user_found_or_created(None, details, False, None, **kwargs) + assert f'An account cannot be found for {expected_id}' == str(e.value) + + @pytest.mark.parametrize( + "kwargs, expected_details, expected_response", + [ + ({}, {}, None), + ({'is_new': False}, {}, None), + ({'is_new': True}, {'is_active': True}, {'details': {'is_active': True}}), + ], + ) + def test_set_is_active_for_new_user(self, kwargs, expected_details, expected_response): + details = {} + response = set_is_active_for_new_user(None, details, None, None, **kwargs) + assert details == expected_details + assert response == expected_response + + def test_prevent_inactive_login_no_exception_no_user(self): + try: + prevent_inactive_login(None, None, None, None, None) + except AuthInactive: + assert False, 'prevent_inactive_login should not have raised an exception with no user' + + def test_prevent_inactive_login_no_exception_active_user(self): + user = User.objects.create(username='user1@foo.com', last_name='foo', first_name='bar', email='user1@foo.com', is_active=True) + try: + prevent_inactive_login(None, None, user, None, None) + except AuthInactive: + assert False, 'prevent_inactive_login should not have raised an exception with an active user' + + def test_prevent_inactive_login_no_exception_inactive_user(self): + user = User.objects.create(username='user1@foo.com', last_name='foo', first_name='bar', email='user1@foo.com', is_active=False) + with pytest.raises(AuthInactive): + prevent_inactive_login(None, None, user, None, None) diff --git a/awx/sso/tests/functional/test_social_pipeline.py b/awx/sso/tests/functional/test_social_pipeline.py new file mode 100644 index 0000000000..f26886e719 --- /dev/null +++ b/awx/sso/tests/functional/test_social_pipeline.py @@ -0,0 +1,113 @@ +import pytest +import re + +from awx.sso.social_pipeline import update_user_orgs, update_user_teams +from awx.main.models import User, Team, Organization + + +@pytest.fixture +def users(): + u1 = User.objects.create(username='user1@foo.com', last_name='foo', first_name='bar', email='user1@foo.com') + u2 = User.objects.create(username='user2@foo.com', last_name='foo', first_name='bar', email='user2@foo.com') + u3 = User.objects.create(username='user3@foo.com', last_name='foo', first_name='bar', email='user3@foo.com') + return (u1, u2, u3) + + +@pytest.mark.django_db +class TestSocialPipeline: + @pytest.fixture + def backend(self): + class Backend: + s = { + 'ORGANIZATION_MAP': { + 'Default': { + 'remove': True, + 'admins': 'foobar', + 'remove_admins': True, + 'users': 'foo', + 'remove_users': True, + 'organization_alias': '', + } + }, + 'TEAM_MAP': {'Blue': {'organization': 'Default', 'remove': True, 'users': ''}, 'Red': {'organization': 'Default', 'remove': True, 'users': ''}}, + } + + def setting(self, key): + return self.s[key] + + return Backend() + + @pytest.fixture + def org(self): + return Organization.objects.create(name="Default") + + def test_update_user_orgs(self, org, backend, users): + u1, u2, u3 = users + + # Test user membership logic with regular expressions + backend.setting('ORGANIZATION_MAP')['Default']['admins'] = re.compile('.*') + backend.setting('ORGANIZATION_MAP')['Default']['users'] = re.compile('.*') + + update_user_orgs(backend, None, u1) + update_user_orgs(backend, None, u2) + update_user_orgs(backend, None, u3) + + assert org.admin_role.members.count() == 3 + assert org.member_role.members.count() == 3 + + # Test remove feature enabled + backend.setting('ORGANIZATION_MAP')['Default']['admins'] = '' + backend.setting('ORGANIZATION_MAP')['Default']['users'] = '' + backend.setting('ORGANIZATION_MAP')['Default']['remove_admins'] = True + backend.setting('ORGANIZATION_MAP')['Default']['remove_users'] = True + update_user_orgs(backend, None, u1) + + assert org.admin_role.members.count() == 2 + assert org.member_role.members.count() == 2 + + # Test remove feature disabled + backend.setting('ORGANIZATION_MAP')['Default']['remove_admins'] = False + backend.setting('ORGANIZATION_MAP')['Default']['remove_users'] = False + update_user_orgs(backend, None, u2) + + assert org.admin_role.members.count() == 2 + assert org.member_role.members.count() == 2 + + # Test organization alias feature + backend.setting('ORGANIZATION_MAP')['Default']['organization_alias'] = 'Default_Alias' + update_user_orgs(backend, None, u1) + assert Organization.objects.get(name="Default_Alias") is not None + + def test_update_user_teams(self, backend, users): + u1, u2, u3 = users + + # Test user membership logic with regular expressions + backend.setting('TEAM_MAP')['Blue']['users'] = re.compile('.*') + backend.setting('TEAM_MAP')['Red']['users'] = re.compile('.*') + + update_user_teams(backend, None, u1) + update_user_teams(backend, None, u2) + update_user_teams(backend, None, u3) + + assert Team.objects.get(name="Red").member_role.members.count() == 3 + assert Team.objects.get(name="Blue").member_role.members.count() == 3 + + # Test remove feature enabled + backend.setting('TEAM_MAP')['Blue']['remove'] = True + backend.setting('TEAM_MAP')['Red']['remove'] = True + backend.setting('TEAM_MAP')['Blue']['users'] = '' + backend.setting('TEAM_MAP')['Red']['users'] = '' + + update_user_teams(backend, None, u1) + + assert Team.objects.get(name="Red").member_role.members.count() == 2 + assert Team.objects.get(name="Blue").member_role.members.count() == 2 + + # Test remove feature disabled + backend.setting('TEAM_MAP')['Blue']['remove'] = False + backend.setting('TEAM_MAP')['Red']['remove'] = False + + update_user_teams(backend, None, u2) + + assert Team.objects.get(name="Red").member_role.members.count() == 2 + assert Team.objects.get(name="Blue").member_role.members.count() == 2 diff --git a/awx/sso/tests/unit/test_pipeline.py b/awx/sso/tests/unit/test_pipeline.py deleted file mode 100644 index 8e1dd4e92f..0000000000 --- a/awx/sso/tests/unit/test_pipeline.py +++ /dev/null @@ -1,2 +0,0 @@ -def test_module_loads(): - from awx.sso import pipeline # noqa diff --git a/awx/sso/tests/unit/test_pipelines.py b/awx/sso/tests/unit/test_pipelines.py new file mode 100644 index 0000000000..94a1111187 --- /dev/null +++ b/awx/sso/tests/unit/test_pipelines.py @@ -0,0 +1,12 @@ +import pytest + + +@pytest.mark.parametrize( + "lib", + [ + ("saml_pipeline"), + ("social_pipeline"), + ], +) +def test_module_loads(lib): + module = __import__("awx.sso." + lib) # noqa diff --git a/awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js b/awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js index f13208e4f0..69ed14eb93 100644 --- a/awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js +++ b/awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js @@ -34,8 +34,14 @@ const QS_CONFIG = getQSConfig('template', { order_by: 'name', }); -function RelatedTemplateList({ searchParams, projectName = null }) { - const { id: projectId } = useParams(); +const resources = { + projects: 'project', + inventories: 'inventory', + credentials: 'credentials', +}; + +function RelatedTemplateList({ searchParams, resourceName = null }) { + const { id } = useParams(); const location = useLocation(); const { addToast, Toast, toastProps } = useToast(); @@ -129,12 +135,19 @@ function RelatedTemplateList({ searchParams, projectName = null }) { actions && Object.prototype.hasOwnProperty.call(actions, 'POST'); let linkTo = ''; - - if (projectName) { - const qs = encodeQueryString({ - project_id: projectId, - project_name: projectName, - }); + if (resourceName) { + const queryString = { + resource_id: id, + resource_name: resourceName, + resource_type: resources[location.pathname.split('/')[1]], + resource_kind: null, + }; + if (Array.isArray(resourceName)) { + const [name, kind] = resourceName; + queryString.resource_name = name; + queryString.resource_kind = kind; + } + const qs = encodeQueryString(queryString); linkTo = `/templates/job_template/add/?${qs}`; } else { linkTo = '/templates/job_template/add'; diff --git a/awx/ui/src/components/RelatedTemplateList/relatedTemplateHelpers.js b/awx/ui/src/components/RelatedTemplateList/relatedTemplateHelpers.js new file mode 100644 index 0000000000..95ecb0ce85 --- /dev/null +++ b/awx/ui/src/components/RelatedTemplateList/relatedTemplateHelpers.js @@ -0,0 +1 @@ +/* eslint-disable import/prefer-default-export */ diff --git a/awx/ui/src/components/Search/AdvancedSearch.test.js b/awx/ui/src/components/Search/AdvancedSearch.test.js index 5050ff63af..8258ef6812 100644 --- a/awx/ui/src/components/Search/AdvancedSearch.test.js +++ b/awx/ui/src/components/Search/AdvancedSearch.test.js @@ -420,7 +420,7 @@ describe('', () => { const selectOptions = wrapper.find( 'Select[aria-label="Related search type"] SelectOption' ); - expect(selectOptions).toHaveLength(2); + expect(selectOptions).toHaveLength(3); expect( selectOptions.find('SelectOption[id="name-option-select"]').prop('value') ).toBe('name__icontains'); diff --git a/awx/ui/src/components/Search/RelatedLookupTypeInput.js b/awx/ui/src/components/Search/RelatedLookupTypeInput.js index effbc4199a..008c83164b 100644 --- a/awx/ui/src/components/Search/RelatedLookupTypeInput.js +++ b/awx/ui/src/components/Search/RelatedLookupTypeInput.js @@ -31,6 +31,12 @@ function RelatedLookupTypeInput({ value="name__icontains" description={t`Fuzzy search on name field.`} /> + , diff --git a/awx/ui/src/screens/Credential/Credential.test.js b/awx/ui/src/screens/Credential/Credential.test.js index b66619c877..2df3162205 100644 --- a/awx/ui/src/screens/Credential/Credential.test.js +++ b/awx/ui/src/screens/Credential/Credential.test.js @@ -6,7 +6,8 @@ import { mountWithContexts, waitForElement, } from '../../../testUtils/enzymeHelpers'; -import mockCredential from './shared/data.scmCredential.json'; +import mockMachineCredential from './shared/data.machineCredential.json'; +import mockSCMCredential from './shared/data.scmCredential.json'; import Credential from './Credential'; jest.mock('../../api'); @@ -21,13 +22,10 @@ jest.mock('react-router-dom', () => ({ describe('', () => { let wrapper; - beforeEach(() => { + test('initially renders user-based machine credential successfully', async () => { CredentialsAPI.readDetail.mockResolvedValueOnce({ - data: mockCredential, + data: mockMachineCredential, }); - }); - - test('initially renders user-based credential successfully', async () => { await act(async () => { wrapper = mountWithContexts( {}} />); }); @@ -36,6 +34,18 @@ describe('', () => { expect(wrapper.find('RoutedTabs li').length).toBe(4); }); + test('initially renders user-based SCM credential successfully', async () => { + CredentialsAPI.readDetail.mockResolvedValueOnce({ + data: mockSCMCredential, + }); + await act(async () => { + wrapper = mountWithContexts( {}} />); + }); + wrapper.update(); + expect(wrapper.find('Credential').length).toBe(1); + expect(wrapper.find('RoutedTabs li').length).toBe(3); + }); + test('should render expected tabs', async () => { const expectedTabs = [ 'Back to Credentials', diff --git a/awx/ui/src/screens/InstanceGroup/InstanceDetails/InstanceDetails.js b/awx/ui/src/screens/InstanceGroup/InstanceDetails/InstanceDetails.js index b4ab4dc81f..e0dd8a09fa 100644 --- a/awx/ui/src/screens/InstanceGroup/InstanceDetails/InstanceDetails.js +++ b/awx/ui/src/screens/InstanceGroup/InstanceDetails/InstanceDetails.js @@ -81,35 +81,30 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) { const { data: { results }, } = await InstanceGroupsAPI.readInstances(instanceGroup.id); - let instanceDetails; const isAssociated = results.some( ({ id: instId }) => instId === parseInt(instanceId, 10) ); if (isAssociated) { - const [{ data: details }, { data: healthCheckData }] = - await Promise.all([ - InstancesAPI.readDetail(instanceId), - InstancesAPI.readHealthCheckDetail(instanceId), - ]); - - instanceDetails = details; - setHealthCheck(healthCheckData); - } else { - throw new Error( - `This instance is not associated with this instance group` + const { data: details } = await InstancesAPI.readDetail(instanceId); + if (details.node_type === 'execution') { + const { data: healthCheckData } = + await InstancesAPI.readHealthCheckDetail(instanceId); + setHealthCheck(healthCheckData); + } + setBreadcrumb(instanceGroup, details); + setForks( + computeForks( + details.mem_capacity, + details.cpu_capacity, + details.capacity_adjustment + ) ); + return { instance: details }; } - - setBreadcrumb(instanceGroup, instanceDetails); - setForks( - computeForks( - instanceDetails.mem_capacity, - instanceDetails.cpu_capacity, - instanceDetails.capacity_adjustment - ) + throw new Error( + `This instance is not associated with this instance group` ); - return { instance: instanceDetails }; }, [instanceId, setBreadcrumb, instanceGroup]), { instance: {}, isLoading: true } ); diff --git a/awx/ui/src/screens/Inventory/Inventory.js b/awx/ui/src/screens/Inventory/Inventory.js index d26c0fc5ce..53da122cd6 100644 --- a/awx/ui/src/screens/Inventory/Inventory.js +++ b/awx/ui/src/screens/Inventory/Inventory.js @@ -181,6 +181,7 @@ function Inventory({ setBreadcrumb }) { > , diff --git a/awx/ui/src/screens/Job/JobOutput/JobOutput.js b/awx/ui/src/screens/Job/JobOutput/JobOutput.js index c4949d1ffe..12d541ffad 100644 --- a/awx/ui/src/screens/Job/JobOutput/JobOutput.js +++ b/awx/ui/src/screens/Job/JobOutput/JobOutput.js @@ -187,7 +187,9 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { useEffect(() => { const pendingRequests = Object.values(eventByUuidRequests.current || {}); setHasContentLoading(true); // prevents "no content found" screen from flashing - setIsFollowModeEnabled(false); + if (location.search) { + setIsFollowModeEnabled(false); + } Promise.allSettled(pendingRequests).then(() => { setRemoteRowCount(0); clearLoadedEvents(); @@ -251,6 +253,9 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { }); const updated = oldWsEvents.concat(newEvents); jobSocketCounter.current = updated.length; + if (!oldWsEvents.length && min > remoteRowCount + 1) { + loadJobEvents(min); + } return updated.sort((a, b) => a.counter - b.counter); }); setCssMap((prevCssMap) => ({ @@ -358,7 +363,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { } }; - const loadJobEvents = async () => { + const loadJobEvents = async (firstWsCounter = null) => { const [params, loadRange] = getEventRequestParams(job, 50, [1, 50]); if (isMounted.current) { @@ -371,6 +376,9 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { if (isFlatMode) { params.not__stdout = ''; } + if (firstWsCounter) { + params.counter__lt = firstWsCounter; + } const qsParams = parseQueryString(QS_CONFIG, location.search); const eventPromise = getJobModel(job.type).readEvents(job.id, { ...params, @@ -435,7 +443,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { if (getEvent(counter)) { return true; } - if (index > remoteRowCount && index < remoteRowCount + wsEvents.length) { + if (index >= remoteRowCount && index < remoteRowCount + wsEvents.length) { return true; } return currentlyLoading.includes(counter); @@ -462,7 +470,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { } if ( !event && - index > remoteRowCount && + index >= remoteRowCount && index < remoteRowCount + wsEvents.length ) { event = wsEvents[index - remoteRowCount]; @@ -629,10 +637,14 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { setIsFollowModeEnabled(false); }; - const scrollToEnd = () => { + const scrollToEnd = useCallback(() => { scrollToRow(-1); - setTimeout(() => scrollToRow(-1), 100); - }; + let timeout; + if (isFollowModeEnabled) { + setTimeout(() => scrollToRow(-1), 100); + } + return () => clearTimeout(timeout); + }, [isFollowModeEnabled]); const handleScrollLast = () => { scrollToEnd(); diff --git a/awx/ui/src/screens/Project/Project.js b/awx/ui/src/screens/Project/Project.js index f3500d3eda..6ec4bd9558 100644 --- a/awx/ui/src/screens/Project/Project.js +++ b/awx/ui/src/screens/Project/Project.js @@ -179,7 +179,7 @@ function Project({ setBreadcrumb }) { searchParams={{ project__id: project.id, }} - projectName={project.name} + resourceName={project.name} /> {project?.scm_type && project.scm_type !== '' && ( diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js index 6ae68c1c8d..52e216e41e 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js @@ -141,14 +141,14 @@ function JobsEdit() { ', () => { await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); expect(wrapper.find('ContentError').length).toBe(1); }); + + test('Form input fields that are invisible (due to being set manually via a settings file) should not prevent submitting the form', async () => { + const mockOptions = Object.assign({}, mockAllOptions); + // If AWX_ISOLATION_BASE_PATH has been set in a settings file it will be absent in the PUT options + delete mockOptions['actions']['PUT']['AWX_ISOLATION_BASE_PATH']; + await act(async () => { + wrapper = mountWithContexts( + + + + ); + }); + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + await act(async () => { + wrapper.find('Form').invoke('onSubmit')(); + }); + expect(SettingsAPI.updateAll).toHaveBeenCalledTimes(1); + }); }); diff --git a/awx/ui/src/screens/Setting/shared/SharedFields.js b/awx/ui/src/screens/Setting/shared/SharedFields.js index 9fd1817bb8..06851e3b9e 100644 --- a/awx/ui/src/screens/Setting/shared/SharedFields.js +++ b/awx/ui/src/screens/Setting/shared/SharedFields.js @@ -397,7 +397,10 @@ const InputField = ({ name, config, type = 'text', isRequired = false }) => { }; InputField.propTypes = { name: string.isRequired, - config: shape({}).isRequired, + config: shape({}), +}; +InputField.defaultProps = { + config: null, }; const TextAreaField = ({ name, config, isRequired = false }) => { diff --git a/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.js b/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.js index f4d0f4f49a..ebb171bb1e 100644 --- a/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.js +++ b/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.js @@ -9,29 +9,31 @@ function JobTemplateAdd() { const [formSubmitError, setFormSubmitError] = useState(null); const history = useHistory(); - const projectParams = { - project_id: null, - project_name: null, + const resourceParams = { + resource_id: null, + resource_name: null, + resource_type: null, + resource_kind: null, }; history.location.search .replace(/^\?/, '') .split('&') .map((s) => s.split('=')) .forEach(([key, val]) => { - if (!(key in projectParams)) { + if (!(key in resourceParams)) { return; } - projectParams[key] = decodeURIComponent(val); + resourceParams[key] = decodeURIComponent(val); }); - let projectValues = null; + let resourceValues = null; - if ( - Object.values(projectParams).filter((item) => item !== null).length === 2 - ) { - projectValues = { - id: projectParams.project_id, - name: projectParams.project_name, + if (history.location.search.includes('resource_id' && 'resource_name')) { + resourceValues = { + id: resourceParams.resource_id, + name: resourceParams.resource_name, + type: resourceParams.resource_type, + kind: resourceParams.resource_kind, // refers to credential kind }; } @@ -122,7 +124,7 @@ function JobTemplateAdd() { handleCancel={handleCancel} handleSubmit={handleSubmit} submitError={formSubmitError} - projectValues={projectValues} + resourceValues={resourceValues} isOverrideDisabledLookup /> diff --git a/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.js b/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.js index 3fd63394dc..e50b91d8c8 100644 --- a/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.js +++ b/awx/ui/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.js @@ -274,9 +274,14 @@ describe('', () => { test('should parse and pre-fill project field from query params', async () => { const history = createMemoryHistory({ initialEntries: [ - '/templates/job_template/add/add?project_id=6&project_name=Demo%20Project', + '/templates/job_template/add?resource_id=6&resource_name=Demo%20Project&resource_type=project', ], }); + ProjectsAPI.read.mockResolvedValueOnce({ + count: 1, + results: [{ name: 'foo', id: 1, allow_override: true, organization: 1 }], + }); + ProjectsAPI.readOptions.mockResolvedValueOnce({}); let wrapper; await act(async () => { wrapper = mountWithContexts(, { @@ -284,8 +289,9 @@ describe('', () => { }); }); await waitForElement(wrapper, 'EmptyStateBody', (el) => el.length === 0); + expect(wrapper.find('input#project').prop('value')).toEqual('Demo Project'); - expect(ProjectsAPI.readPlaybooks).toBeCalledWith('6'); + expect(ProjectsAPI.readPlaybooks).toBeCalledWith(6); }); test('should not call ProjectsAPI.readPlaybooks if there is no project', async () => { diff --git a/awx/ui/src/screens/Template/shared/JobTemplateForm.js b/awx/ui/src/screens/Template/shared/JobTemplateForm.js index 7621601e9e..dcdfd0d956 100644 --- a/awx/ui/src/screens/Template/shared/JobTemplateForm.js +++ b/awx/ui/src/screens/Template/shared/JobTemplateForm.js @@ -690,7 +690,7 @@ JobTemplateForm.defaultProps = { }; const FormikApp = withFormik({ - mapPropsToValues({ projectValues = {}, template = {} }) { + mapPropsToValues({ resourceValues = null, template = {} }) { const { summary_fields = { labels: { results: [] }, @@ -698,7 +698,7 @@ const FormikApp = withFormik({ }, } = template; - return { + const initialValues = { allow_callbacks: template.allow_callbacks || false, allow_simultaneous: template.allow_simultaneous || false, ask_credential_on_launch: template.ask_credential_on_launch || false, @@ -739,7 +739,7 @@ const FormikApp = withFormik({ playbook: template.playbook || '', prevent_instance_group_fallback: template.prevent_instance_group_fallback || false, - project: summary_fields?.project || projectValues || null, + project: summary_fields?.project || null, scm_branch: template.scm_branch || '', skip_tags: template.skip_tags || '', timeout: template.timeout || 0, @@ -756,6 +756,24 @@ const FormikApp = withFormik({ execution_environment: template.summary_fields?.execution_environment || null, }; + if (resourceValues !== null) { + if (resourceValues.type === 'credentials') { + initialValues[resourceValues.type] = [ + { + id: parseInt(resourceValues.id, 10), + name: resourceValues.name, + kind: resourceValues.kind, + }, + ]; + } else { + initialValues[resourceValues.type] = { + id: parseInt(resourceValues.id, 10), + name: resourceValues.name, + }; + } + } + + return initialValues; }, handleSubmit: async (values, { props, setErrors }) => { try { diff --git a/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js index 52edf9e865..dcfef81448 100644 --- a/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js +++ b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js @@ -12,11 +12,16 @@ import PaginatedTable, { import AlertModal from 'components/AlertModal'; import ErrorDetail from 'components/ErrorDetail'; import DataListToolbar from 'components/DataListToolbar'; -import useRequest, { useDeleteItems } from 'hooks/useRequest'; +import useRequest, { + useDeleteItems, + useDismissableError, +} from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; import { getQSConfig, parseQueryString } from 'util/qs'; import WorkflowApprovalListItem from './WorkflowApprovalListItem'; import useWsWorkflowApprovals from './useWsWorkflowApprovals'; +import WorkflowApprovalListApproveButton from './WorkflowApprovalListApproveButton'; +import WorkflowApprovalListDenyButton from './WorkflowApprovalListDenyButton'; const QS_CONFIG = getQSConfig('workflow_approvals', { page: 1, @@ -104,7 +109,50 @@ function WorkflowApprovalsList() { clearSelected(); }; - const isLoading = isWorkflowApprovalsLoading || isDeleteLoading; + const { + error: approveApprovalError, + isLoading: isApproveLoading, + request: approveWorkflowApprovals, + } = useRequest( + useCallback( + async () => + Promise.all(selected.map(({ id }) => WorkflowApprovalsAPI.approve(id))), + [selected] + ), + {} + ); + + const handleApprove = async () => { + await approveWorkflowApprovals(); + clearSelected(); + }; + + const { + error: denyApprovalError, + isLoading: isDenyLoading, + request: denyWorkflowApprovals, + } = useRequest( + useCallback( + async () => + Promise.all(selected.map(({ id }) => WorkflowApprovalsAPI.deny(id))), + [selected] + ), + {} + ); + + const handleDeny = async () => { + await denyWorkflowApprovals(); + clearSelected(); + }; + + const { error: actionError, dismissError: dismissActionError } = + useDismissableError(approveApprovalError || denyApprovalError); + + const isLoading = + isWorkflowApprovalsLoading || + isDeleteLoading || + isApproveLoading || + isDenyLoading; return ( <> @@ -138,6 +186,16 @@ function WorkflowApprovalsList() { onSelectAll={selectAll} qsConfig={QS_CONFIG} additionalControls={[ + , + , )} + {actionError && ( + + {approveApprovalError + ? t`Failed to approve one or more workflow approval.` + : t`Failed to deny one or more workflow approval.`} + + + )} ); } diff --git a/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListApproveButton.js b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListApproveButton.js new file mode 100644 index 0000000000..e9c32927ac --- /dev/null +++ b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListApproveButton.js @@ -0,0 +1,76 @@ +import React, { useContext } from 'react'; + +import { t } from '@lingui/macro'; +import PropTypes from 'prop-types'; +import { Button, DropdownItem, Tooltip } from '@patternfly/react-core'; +import { KebabifiedContext } from 'contexts/Kebabified'; +import { WorkflowApproval } from 'types'; + +function cannotApprove(item) { + return !item.can_approve_or_deny; +} + +function WorkflowApprovalListApproveButton({ onApprove, selectedItems }) { + const { isKebabified } = useContext(KebabifiedContext); + + const renderTooltip = () => { + if (selectedItems.length === 0) { + return t`Select a row to approve`; + } + + const itemsUnableToApprove = selectedItems + .filter(cannotApprove) + .map((item) => item.name) + .join(', '); + + if (selectedItems.some(cannotApprove)) { + return t`You are unable to act on the following workflow approvals: ${itemsUnableToApprove}`; + } + + return t`Approve`; + }; + + const isDisabled = + selectedItems.length === 0 || selectedItems.some(cannotApprove); + + return ( + /* eslint-disable-next-line react/jsx-no-useless-fragment */ + <> + {isKebabified ? ( + + {t`Approve`} + + ) : ( + +
+ +
+
+ )} + + ); +} + +WorkflowApprovalListApproveButton.propTypes = { + onApprove: PropTypes.func.isRequired, + selectedItems: PropTypes.arrayOf(WorkflowApproval), +}; + +WorkflowApprovalListApproveButton.defaultProps = { + selectedItems: [], +}; + +export default WorkflowApprovalListApproveButton; diff --git a/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListApproveButton.test.js b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListApproveButton.test.js new file mode 100644 index 0000000000..949c07ec10 --- /dev/null +++ b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListApproveButton.test.js @@ -0,0 +1,56 @@ +import React from 'react'; +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; +import WorkflowApprovalListApproveButton from './WorkflowApprovalListApproveButton'; + +const workflowApproval = { + id: 1, + name: 'Foo', + can_approve_or_deny: true, + url: '/api/v2/workflow_approvals/218/', +}; + +describe('', () => { + test('should render button', () => { + const wrapper = mountWithContexts( + {}} + selectedItems={[]} + /> + ); + expect(wrapper.find('button')).toHaveLength(1); + }); + + test('should invoke onApprove prop', () => { + const onApprove = jest.fn(); + const wrapper = mountWithContexts( + + ); + wrapper.find('button').simulate('click'); + wrapper.update(); + expect(onApprove).toHaveBeenCalled(); + }); + + test('should disable button when no approve/deny permissions', () => { + const wrapper = mountWithContexts( + {}} + selectedItems={[{ ...workflowApproval, can_approve_or_deny: false }]} + /> + ); + expect(wrapper.find('button[disabled]')).toHaveLength(1); + }); + + test('should render tooltip', () => { + const wrapper = mountWithContexts( + {}} + selectedItems={[workflowApproval]} + /> + ); + expect(wrapper.find('Tooltip')).toHaveLength(1); + expect(wrapper.find('Tooltip').prop('content')).toEqual('Approve'); + }); +}); diff --git a/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListDenyButton.js b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListDenyButton.js new file mode 100644 index 0000000000..a3cff0c231 --- /dev/null +++ b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListDenyButton.js @@ -0,0 +1,76 @@ +import React, { useContext } from 'react'; + +import { t } from '@lingui/macro'; +import PropTypes from 'prop-types'; +import { Button, DropdownItem, Tooltip } from '@patternfly/react-core'; +import { KebabifiedContext } from 'contexts/Kebabified'; +import { WorkflowApproval } from 'types'; + +function cannotDeny(item) { + return !item.can_approve_or_deny; +} + +function WorkflowApprovalListDenyButton({ onDeny, selectedItems }) { + const { isKebabified } = useContext(KebabifiedContext); + + const renderTooltip = () => { + if (selectedItems.length === 0) { + return t`Select a row to deny`; + } + + const itemsUnableToDeny = selectedItems + .filter(cannotDeny) + .map((item) => item.name) + .join(', '); + + if (selectedItems.some(cannotDeny)) { + return t`You are unable to act on the following workflow approvals: ${itemsUnableToDeny}`; + } + + return t`Deny`; + }; + + const isDisabled = + selectedItems.length === 0 || selectedItems.some(cannotDeny); + + return ( + /* eslint-disable-next-line react/jsx-no-useless-fragment */ + <> + {isKebabified ? ( + + {t`Deny`} + + ) : ( + +
+ +
+
+ )} + + ); +} + +WorkflowApprovalListDenyButton.propTypes = { + onDeny: PropTypes.func.isRequired, + selectedItems: PropTypes.arrayOf(WorkflowApproval), +}; + +WorkflowApprovalListDenyButton.defaultProps = { + selectedItems: [], +}; + +export default WorkflowApprovalListDenyButton; diff --git a/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListDenyButton.test.js b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListDenyButton.test.js new file mode 100644 index 0000000000..a799ecf208 --- /dev/null +++ b/awx/ui/src/screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalListDenyButton.test.js @@ -0,0 +1,53 @@ +import React from 'react'; +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; +import WorkflowApprovalListDenyButton from './WorkflowApprovalListDenyButton'; + +const workflowApproval = { + id: 1, + name: 'Foo', + can_approve_or_deny: true, + url: '/api/v2/workflow_approvals/218/', +}; + +describe('', () => { + test('should render button', () => { + const wrapper = mountWithContexts( + {}} selectedItems={[]} /> + ); + expect(wrapper.find('button')).toHaveLength(1); + }); + + test('should invoke onDeny prop', () => { + const onDeny = jest.fn(); + const wrapper = mountWithContexts( + + ); + wrapper.find('button').simulate('click'); + wrapper.update(); + expect(onDeny).toHaveBeenCalled(); + }); + + test('should disable button when no approve/deny permissions', () => { + const wrapper = mountWithContexts( + {}} + selectedItems={[{ ...workflowApproval, can_approve_or_deny: false }]} + /> + ); + expect(wrapper.find('button[disabled]')).toHaveLength(1); + }); + + test('should render tooltip', () => { + const wrapper = mountWithContexts( + {}} + selectedItems={[workflowApproval]} + /> + ); + expect(wrapper.find('Tooltip')).toHaveLength(1); + expect(wrapper.find('Tooltip').prop('content')).toEqual('Deny'); + }); +}); diff --git a/awx_collection/meta/runtime.yml b/awx_collection/meta/runtime.yml index b23d5b87e2..e59becd4ea 100644 --- a/awx_collection/meta/runtime.yml +++ b/awx_collection/meta/runtime.yml @@ -46,90 +46,216 @@ action_groups: plugin_routing: inventory: tower: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* plugins have been deprecated, use awx.awx.controller instead. redirect: awx.awx.controller lookup: tower_api: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* plugins have been deprecated, use awx.awx.controller_api instead. redirect: awx.awx.controller_api tower_schedule_rrule: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* plugins have been deprecated, use awx.awx.schedule_rrule instead. redirect: awx.awx.schedule_rrule modules: tower_ad_hoc_command_cancel: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.ad_hoc_command_cancel instead. redirect: awx.awx.ad_hoc_command_cancel tower_ad_hoc_command_wait: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.ad_hoc_command_wait instead. redirect: awx.awx.ad_hoc_command_wait tower_ad_hoc_command: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.ad_hoc_command instead. redirect: awx.awx.ad_hoc_command tower_application: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.application instead. redirect: awx.awx.application tower_meta: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.controller_meta instead. redirect: awx.awx.controller_meta tower_credential_input_source: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.credential_input_source instead. redirect: awx.awx.credential_input_source tower_credential_type: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.credential_type instead. redirect: awx.awx.credential_type tower_credential: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.credential instead. redirect: awx.awx.credential tower_execution_environment: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.execution_environment instead. redirect: awx.awx.execution_environment tower_export: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.export instead. redirect: awx.awx.export tower_group: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.group instead. redirect: awx.awx.group tower_host: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.host instead. redirect: awx.awx.host tower_import: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.import instead. redirect: awx.awx.import tower_instance_group: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.instance_group instead. redirect: awx.awx.instance_group tower_inventory_source_update: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.inventory_source_update instead. redirect: awx.awx.inventory_source_update tower_inventory_source: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.inventory_source instead. redirect: awx.awx.inventory_source tower_inventory: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.inventory instead. redirect: awx.awx.inventory tower_job_cancel: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.job_cancel instead. redirect: awx.awx.job_cancel tower_job_launch: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.job_launch instead. redirect: awx.awx.job_launch tower_job_list: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.job_list instead. redirect: awx.awx.job_list tower_job_template: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.job_template instead. redirect: awx.awx.job_template tower_job_wait: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.job_wait instead. redirect: awx.awx.job_wait tower_label: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.label instead. redirect: awx.awx.label tower_license: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.license instead. redirect: awx.awx.license tower_notification_template: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.notification_template instead. redirect: awx.awx.notification_template tower_notification: redirect: awx.awx.notification_template tower_organization: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.organization instead. redirect: awx.awx.organization tower_project_update: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.project_update instead. redirect: awx.awx.project_update tower_project: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.project instead. redirect: awx.awx.project tower_role: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.role instead. redirect: awx.awx.role tower_schedule: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.schedule instead. redirect: awx.awx.schedule tower_settings: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.settings instead. redirect: awx.awx.settings tower_team: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.team instead. redirect: awx.awx.team tower_token: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.token instead. redirect: awx.awx.token tower_user: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.user instead. redirect: awx.awx.user tower_workflow_approval: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_approval instead. redirect: awx.awx.workflow_approval tower_workflow_job_template_node: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_job_template_node instead. redirect: awx.awx.workflow_job_template_node tower_workflow_job_template: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_job_template instead. redirect: awx.awx.workflow_job_template tower_workflow_launch: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_launch instead. redirect: awx.awx.workflow_launch tower_workflow_node_wait: + deprecation: + removal_date: '2022-01-23' + warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_node_wait instead. redirect: awx.awx.workflow_node_wait diff --git a/awx_collection/plugins/modules/group.py b/awx_collection/plugins/modules/group.py index 973fb7744d..c91bf164d9 100644 --- a/awx_collection/plugins/modules/group.py +++ b/awx_collection/plugins/modules/group.py @@ -128,7 +128,7 @@ def main(): description = module.params.get('description') state = module.params.pop('state') preserve_existing_hosts = module.params.get('preserve_existing_hosts') - preserve_existing_children = module.params.get('preserve_existing_groups') + preserve_existing_children = module.params.get('preserve_existing_children') variables = module.params.get('variables') # Attempt to look up the related items the user specified (these will fail the module if not found) diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command.py b/awx_collection/plugins/modules/tower_ad_hoc_command.py deleted file mode 120000 index 1b02428042..0000000000 --- a/awx_collection/plugins/modules/tower_ad_hoc_command.py +++ /dev/null @@ -1 +0,0 @@ -ad_hoc_command.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py b/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py deleted file mode 120000 index 1d9c64563b..0000000000 --- a/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py +++ /dev/null @@ -1 +0,0 @@ -ad_hoc_command_cancel.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py b/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py deleted file mode 120000 index 50cc9f6eab..0000000000 --- a/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py +++ /dev/null @@ -1 +0,0 @@ -ad_hoc_command_wait.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_application.py b/awx_collection/plugins/modules/tower_application.py deleted file mode 120000 index cc28a46af5..0000000000 --- a/awx_collection/plugins/modules/tower_application.py +++ /dev/null @@ -1 +0,0 @@ -application.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_controller_meta.py b/awx_collection/plugins/modules/tower_controller_meta.py deleted file mode 120000 index 603f9fa251..0000000000 --- a/awx_collection/plugins/modules/tower_controller_meta.py +++ /dev/null @@ -1 +0,0 @@ -controller_meta.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_credential.py b/awx_collection/plugins/modules/tower_credential.py deleted file mode 120000 index 76fc468892..0000000000 --- a/awx_collection/plugins/modules/tower_credential.py +++ /dev/null @@ -1 +0,0 @@ -credential.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py deleted file mode 120000 index b6824f7983..0000000000 --- a/awx_collection/plugins/modules/tower_credential_input_source.py +++ /dev/null @@ -1 +0,0 @@ -credential_input_source.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_credential_type.py b/awx_collection/plugins/modules/tower_credential_type.py deleted file mode 120000 index 3ef2c5aaa1..0000000000 --- a/awx_collection/plugins/modules/tower_credential_type.py +++ /dev/null @@ -1 +0,0 @@ -credential_type.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_execution_environment.py b/awx_collection/plugins/modules/tower_execution_environment.py deleted file mode 120000 index 0436ddac1d..0000000000 --- a/awx_collection/plugins/modules/tower_execution_environment.py +++ /dev/null @@ -1 +0,0 @@ -execution_environment.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_export.py b/awx_collection/plugins/modules/tower_export.py deleted file mode 120000 index b9ead459dc..0000000000 --- a/awx_collection/plugins/modules/tower_export.py +++ /dev/null @@ -1 +0,0 @@ -export.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_group.py b/awx_collection/plugins/modules/tower_group.py deleted file mode 120000 index 0d50916a64..0000000000 --- a/awx_collection/plugins/modules/tower_group.py +++ /dev/null @@ -1 +0,0 @@ -group.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_host.py b/awx_collection/plugins/modules/tower_host.py deleted file mode 120000 index 36a0bc2c59..0000000000 --- a/awx_collection/plugins/modules/tower_host.py +++ /dev/null @@ -1 +0,0 @@ -host.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_import.py b/awx_collection/plugins/modules/tower_import.py deleted file mode 120000 index b0354fac74..0000000000 --- a/awx_collection/plugins/modules/tower_import.py +++ /dev/null @@ -1 +0,0 @@ -import.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_instance_group.py b/awx_collection/plugins/modules/tower_instance_group.py deleted file mode 120000 index f7f770d778..0000000000 --- a/awx_collection/plugins/modules/tower_instance_group.py +++ /dev/null @@ -1 +0,0 @@ -instance_group.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_inventory.py b/awx_collection/plugins/modules/tower_inventory.py deleted file mode 120000 index f3f0a4990c..0000000000 --- a/awx_collection/plugins/modules/tower_inventory.py +++ /dev/null @@ -1 +0,0 @@ -inventory.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_inventory_source.py b/awx_collection/plugins/modules/tower_inventory_source.py deleted file mode 120000 index 462d6066fd..0000000000 --- a/awx_collection/plugins/modules/tower_inventory_source.py +++ /dev/null @@ -1 +0,0 @@ -inventory_source.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_inventory_source_update.py b/awx_collection/plugins/modules/tower_inventory_source_update.py deleted file mode 120000 index a283dfccb1..0000000000 --- a/awx_collection/plugins/modules/tower_inventory_source_update.py +++ /dev/null @@ -1 +0,0 @@ -inventory_source_update.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_job_cancel.py b/awx_collection/plugins/modules/tower_job_cancel.py deleted file mode 120000 index 6298f026cb..0000000000 --- a/awx_collection/plugins/modules/tower_job_cancel.py +++ /dev/null @@ -1 +0,0 @@ -job_cancel.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_job_launch.py b/awx_collection/plugins/modules/tower_job_launch.py deleted file mode 120000 index dbcb7048bc..0000000000 --- a/awx_collection/plugins/modules/tower_job_launch.py +++ /dev/null @@ -1 +0,0 @@ -job_launch.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_job_list.py b/awx_collection/plugins/modules/tower_job_list.py deleted file mode 120000 index 45e0ea6fe4..0000000000 --- a/awx_collection/plugins/modules/tower_job_list.py +++ /dev/null @@ -1 +0,0 @@ -job_list.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_job_template.py b/awx_collection/plugins/modules/tower_job_template.py deleted file mode 120000 index 4561927af7..0000000000 --- a/awx_collection/plugins/modules/tower_job_template.py +++ /dev/null @@ -1 +0,0 @@ -job_template.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_job_wait.py b/awx_collection/plugins/modules/tower_job_wait.py deleted file mode 120000 index 488cb4683d..0000000000 --- a/awx_collection/plugins/modules/tower_job_wait.py +++ /dev/null @@ -1 +0,0 @@ -job_wait.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_label.py b/awx_collection/plugins/modules/tower_label.py deleted file mode 120000 index 593aec76b8..0000000000 --- a/awx_collection/plugins/modules/tower_label.py +++ /dev/null @@ -1 +0,0 @@ -label.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_license.py b/awx_collection/plugins/modules/tower_license.py deleted file mode 120000 index 467811cd59..0000000000 --- a/awx_collection/plugins/modules/tower_license.py +++ /dev/null @@ -1 +0,0 @@ -license.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_notification_template.py b/awx_collection/plugins/modules/tower_notification_template.py deleted file mode 120000 index f24d407167..0000000000 --- a/awx_collection/plugins/modules/tower_notification_template.py +++ /dev/null @@ -1 +0,0 @@ -notification_template.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_organization.py b/awx_collection/plugins/modules/tower_organization.py deleted file mode 120000 index 2da5304b4a..0000000000 --- a/awx_collection/plugins/modules/tower_organization.py +++ /dev/null @@ -1 +0,0 @@ -organization.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_project.py b/awx_collection/plugins/modules/tower_project.py deleted file mode 120000 index 41d1fa306a..0000000000 --- a/awx_collection/plugins/modules/tower_project.py +++ /dev/null @@ -1 +0,0 @@ -project.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_project_update.py b/awx_collection/plugins/modules/tower_project_update.py deleted file mode 120000 index 6f22e2a4b1..0000000000 --- a/awx_collection/plugins/modules/tower_project_update.py +++ /dev/null @@ -1 +0,0 @@ -project_update.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_role.py b/awx_collection/plugins/modules/tower_role.py deleted file mode 120000 index 0a520b759b..0000000000 --- a/awx_collection/plugins/modules/tower_role.py +++ /dev/null @@ -1 +0,0 @@ -role.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_schedule.py b/awx_collection/plugins/modules/tower_schedule.py deleted file mode 120000 index a21a88885c..0000000000 --- a/awx_collection/plugins/modules/tower_schedule.py +++ /dev/null @@ -1 +0,0 @@ -schedule.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_settings.py b/awx_collection/plugins/modules/tower_settings.py deleted file mode 120000 index fff7c2ed4a..0000000000 --- a/awx_collection/plugins/modules/tower_settings.py +++ /dev/null @@ -1 +0,0 @@ -settings.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_team.py b/awx_collection/plugins/modules/tower_team.py deleted file mode 120000 index 320689b4cd..0000000000 --- a/awx_collection/plugins/modules/tower_team.py +++ /dev/null @@ -1 +0,0 @@ -team.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_token.py b/awx_collection/plugins/modules/tower_token.py deleted file mode 120000 index 0c41c0d586..0000000000 --- a/awx_collection/plugins/modules/tower_token.py +++ /dev/null @@ -1 +0,0 @@ -token.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_user.py b/awx_collection/plugins/modules/tower_user.py deleted file mode 120000 index 576f943a25..0000000000 --- a/awx_collection/plugins/modules/tower_user.py +++ /dev/null @@ -1 +0,0 @@ -user.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_workflow_approval.py b/awx_collection/plugins/modules/tower_workflow_approval.py deleted file mode 120000 index 76ba8f3be2..0000000000 --- a/awx_collection/plugins/modules/tower_workflow_approval.py +++ /dev/null @@ -1 +0,0 @@ -workflow_approval.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_workflow_job_template.py b/awx_collection/plugins/modules/tower_workflow_job_template.py deleted file mode 120000 index 914891e32a..0000000000 --- a/awx_collection/plugins/modules/tower_workflow_job_template.py +++ /dev/null @@ -1 +0,0 @@ -workflow_job_template.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_workflow_job_template_node.py b/awx_collection/plugins/modules/tower_workflow_job_template_node.py deleted file mode 120000 index 406b3cec5b..0000000000 --- a/awx_collection/plugins/modules/tower_workflow_job_template_node.py +++ /dev/null @@ -1 +0,0 @@ -workflow_job_template_node.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_workflow_launch.py b/awx_collection/plugins/modules/tower_workflow_launch.py deleted file mode 120000 index d0a93529d8..0000000000 --- a/awx_collection/plugins/modules/tower_workflow_launch.py +++ /dev/null @@ -1 +0,0 @@ -workflow_launch.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/tower_workflow_node_wait.py b/awx_collection/plugins/modules/tower_workflow_node_wait.py deleted file mode 120000 index 25bf1d0a87..0000000000 --- a/awx_collection/plugins/modules/tower_workflow_node_wait.py +++ /dev/null @@ -1 +0,0 @@ -workflow_node_wait.py \ No newline at end of file diff --git a/awx_collection/plugins/modules/workflow_job_template.py b/awx_collection/plugins/modules/workflow_job_template.py index ba4d5cf102..41c5b66573 100644 --- a/awx_collection/plugins/modules/workflow_job_template.py +++ b/awx_collection/plugins/modules/workflow_job_template.py @@ -19,7 +19,6 @@ author: "John Westcott IV (@john-westcott-iv)" short_description: create, update, or destroy Automation Platform Controller workflow job templates. description: - Create, update, or destroy Automation Platform Controller workflow job templates. - - Replaces the deprecated tower_workflow_template module. - Use workflow_job_template_node after this, or use the workflow_nodes parameter to build the workflow's graph options: name: @@ -614,6 +613,10 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id): if workflow_node['unified_job_template']['type'] != 'workflow_approval': module.fail_json(msg="Unable to Find unified_job_template: {0}".format(search_fields)) + inventory = workflow_node.get('inventory') + if inventory: + workflow_node_fields['inventory'] = module.resolve_name_to_id('inventories', inventory) + # Lookup Values for other fields for field_name in ( diff --git a/awx_collection/plugins/modules/workflow_job_template_node.py b/awx_collection/plugins/modules/workflow_job_template_node.py index 61f713f843..a59cd33f9c 100644 --- a/awx_collection/plugins/modules/workflow_job_template_node.py +++ b/awx_collection/plugins/modules/workflow_job_template_node.py @@ -20,7 +20,6 @@ short_description: create, update, or destroy Automation Platform Controller wor description: - Create, update, or destroy Automation Platform Controller workflow job template nodes. - Use this to build a graph for a workflow, which dictates what the workflow runs. - - Replaces the deprecated tower_workflow_template module schema command. - You can create nodes first, and link them afterwards, and not worry about ordering. For failsafe referencing of a node, specify identifier, WFJT, and organization. With those specified, you can choose to modify or not modify any other parameter. diff --git a/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 b/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 index 246897f1b4..e860c43f8c 100644 --- a/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 +++ b/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 @@ -74,6 +74,7 @@ Notable releases of the `{{ collection_namespace }}.{{ collection_package }}` co - 7.0.0 is intended to be identical to the content prior to the migration, aside from changes necessary to function as a collection. - 11.0.0 has no non-deprecated modules that depend on the deprecated `tower-cli` [PyPI](https://pypi.org/project/ansible-tower-cli/). - 19.2.1 large renaming purged "tower" names (like options and module names), adding redirects for old names + - 21.11.0 "tower" modules deprecated and symlinks removed. - 0.0.1-devel is the version you should see if installing from source, which is intended for development and expected to be unstable. {% else %} - 3.7.0 initial release diff --git a/awxkit/README.md b/awxkit/README.md index e07d1622e5..6fdf56f132 100644 --- a/awxkit/README.md +++ b/awxkit/README.md @@ -1,6 +1,10 @@ awxkit ====== -Python library that backs the provided `awx` command line client. +A Python library that backs the provided `awx` command line client. -For more information on installing the CLI and building the docs on how to use it, look [here](./awxkit/cli/docs). +It can be installed by running `pip install awxkit`. + +The PyPI respository can be found [here](https://pypi.org/project/awxkit/). + +For more information on installing the CLI and building the docs on how to use it, look [here](./awxkit/cli/docs). \ No newline at end of file diff --git a/docs/auth/saml.md b/docs/auth/saml.md index 49400e7b50..56d07f6c96 100644 --- a/docs/auth/saml.md +++ b/docs/auth/saml.md @@ -107,12 +107,12 @@ Below is an example of a SAML attribute that contains admin attributes: These properties can be defined either by a role or an attribute with the following configuration options: ``` { - "is_superuser_role": "awx_admins", + "is_superuser_role": ["awx_admins"], "is_superuser_attr": "is_superuser", - "is_superuser_value": "IT-Superadmin", - "is_system_auditor_role": "awx_auditors", + "is_superuser_value": ["IT-Superadmin"], + "is_system_auditor_role": ["awx_auditors"], "is_system_auditor_attr": "is_system_auditor", - "is_system_auditor_value": "Auditor" + "is_system_auditor_value": ["Auditor"] } ``` diff --git a/docs/websockets.md b/docs/websockets.md index a9fcd43926..84e1226594 100644 --- a/docs/websockets.md +++ b/docs/websockets.md @@ -10,13 +10,13 @@ To communicate between our different services we use websockets. Every AWX node Inside AWX we use the `emit_channel_notification` function which places messages onto the queue. The messages are given an explicit event group and event type which we later use in our wire protocol to control message delivery to the client. -### Broadcast Backplane +### Relay Backplane -Previously, AWX leveraged RabbitMQ to deliver Ansible events that emanated from one AWX node to all other AWX nodes so that any client listening and subscribed to the Websockets could get events from any running playbook. We are since moved off of RabbitMQ and onto a per-node local Redis instance. To maintain the requirement that any Websocket connection can receive events from any playbook running on any AWX node we still need to deliver every event to every AWX node. AWX does this via a fully connected Websocket backplane. +Previously, AWX leveraged RabbitMQ to deliver Ansible events that emanated from one AWX node to all other AWX nodes so that any client listening and subscribed to the Websockets could get events from any running playbook. We are since moved off of RabbitMQ and onto a per-node local Redis instance. To maintain the requirement that any Websocket connection can receive events from any playbook running on any AWX node we still need to deliver every event to every AWX node. AWX does this via a fully connected Websocket backplane. -#### Broadcast Backplane Token +#### Relay Backplane Token -AWX node(s) connect to every other node via the Websocket backplane. The backplane websockets initiate from the `wsbroadcast` process and connect to other nodes via the same nginx process that serves webpage websocket connections and marshalls incoming web/API requests. If you have configured AWX to run with an ssl terminated connection in front of nginx then you likely will have nginx configured to handle http traffic and thus the websocket connection will flow unencrypted over http. If you have nginx configured with ssl enabled, then the websocket traffic will flow encrypted. +AWX node(s) connect to every other node via the Websocket backplane. The backplane websockets initiate from the `wsrelay` process and connect to other nodes via the same nginx process that serves webpage websocket connections and marshalls incoming web/API requests. If you have configured AWX to run with an ssl terminated connection in front of nginx then you likely will have nginx configured to handle http traffic and thus the websocket connection will flow unencrypted over http. If you have nginx configured with ssl enabled, then the websocket traffic will flow encrypted. Authentication is accomplished via a shared secret that is generated and set at playbook install time. The shared secret is used to derive a payload that is exchanged via the http(s) header `secret`. The shared secret payload consists of a a `secret`, containing the shared secret, and a `nonce` which is used to mitigate replay attack windows. @@ -65,14 +65,14 @@ This section will specifically discuss deployment in the context of websockets a | `nginx` | listens on ports 80/443, handles HTTPS proxying, serves static assets, routes requests for `daphne` and `uwsgi` | | `uwsgi` | listens on port 8050, handles API requests | | `daphne` | listens on port 8051, handles websocket requests | -| `wsbroadcast` | no listening port, forwards all group messages to all cluster nodes | +| `wsrelay` | no listening port, forwards all group messages to all cluster nodes | | `supervisord` | (production-only) handles the process management of all the services except `nginx` | When a request comes in to `nginx` and has the `Upgrade` header and is for the path `/websocket`, then `nginx` knows that it should be routing that request to our `daphne` service. `daphne` handles websocket connections proxied by nginx. -`wsbroadcast` fully connects all cluster nodes via the `/websocket/broadcast/` endpoint to every other cluster nodes. Sends a copy of all group websocket messages to all other cluster nodes (i.e. job event type messages). +`wsrelay` fully connects all cluster nodes via the `/websocket/broadcast/` endpoint to every other cluster nodes. Sends a copy of all group websocket messages to all other cluster nodes (i.e. job event type messages). ### Development - `nginx` listens on 8013/8043 instead of 80/443 diff --git a/requirements/requirements.in b/requirements/requirements.in index f58baf032e..78c0f5f20a 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -36,6 +36,7 @@ openshift pexpect==4.7.0 # see library notes prometheus_client psycopg2 +psycopg # psycopg3 is used to listen for pg_notify messages from web servers in awx.main.wsrelay where asyncio is used psutil pygerduty pyparsing==2.4.6 # Upgrading to v3 of pyparsing introduce errors on smart host filtering: Expected 'or' term, found 'or' (at char 15), (line:1, col:16) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 7293ebd046..26efb2784b 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -265,6 +265,8 @@ prometheus-client==0.15.0 # via -r /awx_devel/requirements/requirements.in psutil==5.9.4 # via -r /awx_devel/requirements/requirements.in +psycopg==3.1.4 + # via -r /awx_devel/requirements/requirements.in psycopg2==2.9.5 # via -r /awx_devel/requirements/requirements.in ptyprocess==0.7.0 @@ -425,7 +427,7 @@ txaio==22.2.1 typing-extensions==4.4.0 # via # azure-core - # pydantic + # psycopg # setuptools-rust # setuptools-scm # twisted diff --git a/tools/ansible/roles/dockerfile/templates/supervisor.conf.j2 b/tools/ansible/roles/dockerfile/templates/supervisor.conf.j2 index baf024fd66..3347513bb8 100644 --- a/tools/ansible/roles/dockerfile/templates/supervisor.conf.j2 +++ b/tools/ansible/roles/dockerfile/templates/supervisor.conf.j2 @@ -22,12 +22,11 @@ stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 [program:uwsgi] - {% if kube_dev | bool %} command = make uwsgi directory = /awx_devel environment = - DEV_RELOAD_COMMAND='supervisorctl -c /etc/supervisord_task.conf restart all; supervisorctl restart tower-processes:daphne tower-processes:wsbroadcast' + DEV_RELOAD_COMMAND='supervisorctl -c /etc/supervisord_task.conf restart all; supervisorctl restart tower-processes:daphne' {% else %} command = /var/lib/awx/venv/awx/bin/uwsgi /etc/tower/uwsgi.ini directory = /var/lib/awx @@ -58,12 +57,12 @@ stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 -[program:wsbroadcast] +[program:heartbeet] {% if kube_dev | bool %} -command = make wsbroadcast +command = make heartbeet directory = /awx_devel {% else %} -command = awx-manage run_wsbroadcast +command = awx-manage run_heartbeet directory = /var/lib/awx {% endif %} autorestart = true diff --git a/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 b/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 index a2f2bd5298..8c3a46342e 100644 --- a/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 +++ b/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 @@ -22,6 +22,23 @@ stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 +[program:wsrelay] +{% if kube_dev | bool %} +command = make wsrelay +directory = /awx_devel +{% else %} +command = awx-manage run_wsrelay +directory = /var/lib/awx +{% endif %} +autorestart = true +startsecs = 30 +stopasgroup=true +killasgroup=true +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 + [program:callback-receiver] {% if kube_dev | bool %} command = make receiver @@ -40,7 +57,7 @@ stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 [group:tower-processes] -programs=dispatcher,callback-receiver +programs=dispatcher,callback-receiver,wsrelay priority=5 [eventlistener:superwatcher] diff --git a/tools/docker-compose/README.md b/tools/docker-compose/README.md index dd66c41450..f8e58dc043 100644 --- a/tools/docker-compose/README.md +++ b/tools/docker-compose/README.md @@ -295,7 +295,7 @@ Certain features or bugs are only applicable when running a cluster of AWX nodes `CONTROL_PLANE_NODE_COUNT` is configurable and defaults to 1, effectively a non-clustered AWX. -Note that you may see multiple messages of the form `2021-03-04 20:11:47,666 WARNING [-] awx.main.wsbroadcast Connection from awx_2 to awx_5 failed: 'Cannot connect to host awx_5:8013 ssl:False [Name or service not known]'.`. This can happen when you bring up a cluster of many nodes, say 10, then you bring up a cluster of less nodes, say 3. In this example, there will be 7 `Instance` records in the database that represent AWX instances. The AWX development environment mimics the VM deployment (vs. kubernetes) and expects the missing nodes to be brought back to healthy by the admin. The warning message you are seeing is all of the AWX nodes trying to connect the websocket backplane. You can manually delete the `Instance` records from the database i.e. `Instance.objects.get(hostname='awx_9').delete()` to stop the warnings. +Note that you may see multiple messages of the form `2021-03-04 20:11:47,666 WARNING [-] awx.main.wsrelay Connection from awx_2 to awx_5 failed: 'Cannot connect to host awx_5:8013 ssl:False [Name or service not known]'.`. This can happen when you bring up a cluster of many nodes, say 10, then you bring up a cluster of less nodes, say 3. In this example, there will be 7 `Instance` records in the database that represent AWX instances. The AWX development environment mimics the VM deployment (vs. kubernetes) and expects the missing nodes to be brought back to healthy by the admin. The warning message you are seeing is all of the AWX nodes trying to connect the websocket backplane. You can manually delete the `Instance` records from the database i.e. `Instance.objects.get(hostname='awx_9').delete()` to stop the warnings. ### Start with Minikube diff --git a/tools/docker-compose/ansible/roles/sources/templates/haproxy.cfg.j2 b/tools/docker-compose/ansible/roles/sources/templates/haproxy.cfg.j2 index fab09ffc8e..f2aa3b4ec2 100644 --- a/tools/docker-compose/ansible/roles/sources/templates/haproxy.cfg.j2 +++ b/tools/docker-compose/ansible/roles/sources/templates/haproxy.cfg.j2 @@ -32,7 +32,7 @@ backend nodes option httpchk HEAD / HTTP/1.1\r\nHost:localhost {% for i in range(control_plane_node_count|int) %} {% set container_postfix = loop.index %} - server tools_awx_{{ container_postfix }} tools_awx_{{ container_postfix }}:8013 check + server tools_awx_{{ container_postfix }} tools_awx_{{ container_postfix }}:8013 check inter 10s {% endfor %} backend nodes_ssl @@ -40,7 +40,7 @@ backend nodes_ssl balance roundrobin {% for i in range(control_plane_node_count|int) %} {% set container_postfix = loop.index %} - server tools_awx_{{ container_postfix }} tools_awx_{{ container_postfix }}:8043 check + server tools_awx_{{ container_postfix }} tools_awx_{{ container_postfix }}:8043 check inter 10s {% endfor %} listen stats diff --git a/tools/docker-compose/supervisor.conf b/tools/docker-compose/supervisor.conf index 76d7d0dba4..17165ae4af 100644 --- a/tools/docker-compose/supervisor.conf +++ b/tools/docker-compose/supervisor.conf @@ -8,31 +8,34 @@ command = make dispatcher autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true [program:awx-receiver] command = make receiver autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true -[program:awx-wsbroadcast] -command = make wsbroadcast +[program:awx-wsrelay] +command = make wsrelay autorestart = true autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true + +[program:awx-heartbeet] +command = make heartbeet +autorestart = true +autorestart = true +stopasgroup=true +killasgroup=true +stdout_events_enabled = true +stderr_events_enabled = true [program:awx-rsyslog-configurer] command = make rsyslog-configurer @@ -61,30 +64,24 @@ stopwaitsecs = 1 stopsignal=KILL stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true [program:awx-daphne] command = make daphne autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true [program:awx-nginx] command = make nginx autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true [program:awx-rsyslogd] command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf @@ -100,13 +97,11 @@ command = receptor --config /etc/receptor/receptor.conf autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 +stdout_events_enabled = true +stderr_events_enabled = true [group:tower-processes] -programs=awx-dispatcher,awx-receiver,awx-uwsgi,awx-daphne,awx-nginx,awx-wsbroadcast,awx-rsyslogd, awx-cache-clear +programs=awx-dispatcher,awx-receiver,awx-uwsgi,awx-daphne,awx-nginx,awx-wsrelay,awx-rsyslogd,awx-heartbeet,awx-cache-clear priority=5 [program:awx-autoreload] @@ -115,10 +110,6 @@ autostart = true autorestart = true stopasgroup=true killasgroup=true -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 stdout_events_enabled = true stderr_events_enabled = true @@ -127,9 +118,6 @@ command=stop-supervisor events=PROCESS_STATE_FATAL autorestart = true stderr_logfile=/dev/stdout -stderr_logfile_maxbytes=0 -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 [unix_http_server] file=/var/run/supervisor/supervisor.sock