From 4630757f5f75c886bbfd7c55cf0cecfab3588b2f Mon Sep 17 00:00:00 2001 From: npithonDR <123001064+npithonDR@users.noreply.github.com> Date: Thu, 9 Feb 2023 09:34:10 +0100 Subject: [PATCH 01/25] Fix error for byweekday in schedule_rruleset Fix error: ``` fatal: [localhost]: FAILED! => { "msg": "An unhandled exception occurred while running the lookup plugin 'awx.awx.schedule_rruleset'. Error was a , original message: In rule 1 byweekday must only contain values in monday, tuesday, wednesday, thursday, friday, saturday, sunday. In rule 1 byweekday must only contain values in monday, tuesday, wednesday, thursday, friday, saturday, sunday" } ``` with: ``` - name: Build a complex schedule for every monday using the rruleset plugin awx.awx.schedule: name: "Test build complex schedule" state: present unified_job_template: "template name" rrule: "{{ query('awx.awx.schedule_rruleset', '2030-04-30 10:30:45', rules=rrules, timezone='Europe/Paris' ) }}" vars: rrules: - frequency: 'day' interval: 1 byweekday: 'monday' ``` --- awx_collection/plugins/lookup/schedule_rruleset.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/awx_collection/plugins/lookup/schedule_rruleset.py b/awx_collection/plugins/lookup/schedule_rruleset.py index 6aefde48d0..d5850582b0 100644 --- a/awx_collection/plugins/lookup/schedule_rruleset.py +++ b/awx_collection/plugins/lookup/schedule_rruleset.py @@ -210,8 +210,7 @@ class LookupModule(LookupBase): def process_list(self, field_name, rule, valid_list, rule_number): return_values = [] - if isinstance(rule[field_name], list): - rule[field_name] = rule[field_name].split(',') + rule[field_name] = rule[field_name].split(',') for value in rule[field_name]: value = value.strip() if value not in valid_list: From 951eee944cf7417c4c8a98e81862579f86540727 Mon Sep 17 00:00:00 2001 From: npithonDR <123001064+npithonDR@users.noreply.github.com> Date: Mon, 13 Feb 2023 09:50:11 +0100 Subject: [PATCH 02/25] Add additional rruleset tests --- .../targets/lookup_rruleset/tasks/main.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/awx_collection/tests/integration/targets/lookup_rruleset/tasks/main.yml b/awx_collection/tests/integration/targets/lookup_rruleset/tasks/main.yml index fe5b3f755d..7f9f0271b2 100644 --- a/awx_collection/tests/integration/targets/lookup_rruleset/tasks/main.yml +++ b/awx_collection/tests/integration/targets/lookup_rruleset/tasks/main.yml @@ -95,6 +95,22 @@ - results is failed - "'In rule 2 end_on must either be an integer or in the format YYYY-MM-DD [HH:MM:SS]' in results.msg" +- name: Every Mondays + set_fact: + complex_rule: "{{ query(ruleset_plugin_name, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}" + ignore_errors: True + register: results + vars: + rrules: + - frequency: 'day' + interval: 1 + byweekday: 'monday' + +- assert: + that: + - results is success + - "'DTSTART;TZID=UTC:20220430T103045 RRULE:FREQ=DAILY;BYDAY=MO;INTERVAL=1' == complex_rule" + - name: call rruleset with an invalid byweekday set_fact: From ce7f597c7e79d9f78db6ccbc7fb88d603c98a654 Mon Sep 17 00:00:00 2001 From: Tom Siewert Date: Wed, 15 Feb 2023 12:51:36 +0100 Subject: [PATCH 03/25] Makefile: Make docker-compose command configurable docker-compose v1 is EOL since April 2022 and hasn't received any updates since May 2021. docker compose v2 is a complete rewrite in Go which acts as a plugin for the main docker application. The syntax is the same, but only the `compose` command differs. This commit adds the ability to override the default `docker-compose` command using `make DOCKER_COMPOSE='docker compose'`. Signed-off-by: Tom Siewert --- Makefile | 17 +++++++++-------- tools/docker-compose/README.md | 2 -- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index 896dcf2c6c..0a1bc58f71 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,5 @@ PYTHON ?= python3.9 +DOCKER_COMPOSE ?= docker-compose OFFICIAL ?= no NODE ?= node NPM_BIN ?= npm @@ -509,20 +510,20 @@ docker-compose-sources: .git/hooks/pre-commit docker-compose: awx/projects docker-compose-sources - docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans docker-compose-credential-plugins: awx/projects docker-compose-sources echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m" - docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans docker-compose-test: awx/projects docker-compose-sources - docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash docker-compose-runtest: awx/projects docker-compose-sources - docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh docker-compose-build-swagger: awx/projects docker-compose-sources - docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger SCHEMA_DIFF_BASE_BRANCH ?= devel detect-schema-change: genschema @@ -531,7 +532,7 @@ detect-schema-change: genschema diff -u -b reference-schema.json schema.json docker-compose-clean: awx/projects - docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf docker-compose-container-group-clean: @if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \ @@ -559,10 +560,10 @@ docker-refresh: docker-clean docker-compose ## Docker Development Environment with Elastic Stack Connected docker-compose-elk: awx/projects docker-compose-sources - docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate docker-compose-cluster-elk: awx/projects docker-compose-sources - docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate + $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate docker-compose-container-group: MINIKUBE_CONTAINER_GROUP=true make docker-compose diff --git a/tools/docker-compose/README.md b/tools/docker-compose/README.md index dd66c41450..e2018b9adc 100644 --- a/tools/docker-compose/README.md +++ b/tools/docker-compose/README.md @@ -33,8 +33,6 @@ Notable files: ### Prerequisites - [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the `docker` group, refer to the documentation for details) -- [docker-compose](https://pypi.org/project/docker-compose/) Python module. - - This also installs the `docker` Python module, which is incompatible with [`docker-py`](https://pypi.org/project/docker-py/). If you have previously installed `docker-py`, please uninstall it. - [Docker Compose](https://docs.docker.com/compose/install/). - [Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) will need to be installed as we use it to template files needed for the docker-compose. - OpenSSL. From b4803ca894e8d0016a54641f18f4cf2918937a86 Mon Sep 17 00:00:00 2001 From: Stanislav Zaprudskiy Date: Fri, 13 Jan 2023 10:22:53 +0100 Subject: [PATCH 04/25] Add disable_instance management command Signed-off-by: Stanislav Zaprudskiy --- .../management/commands/disable_instance.py | 153 ++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 awx/main/management/commands/disable_instance.py diff --git a/awx/main/management/commands/disable_instance.py b/awx/main/management/commands/disable_instance.py new file mode 100644 index 0000000000..3689154229 --- /dev/null +++ b/awx/main/management/commands/disable_instance.py @@ -0,0 +1,153 @@ +import socket +import time +from urllib.parse import urljoin + +from argparse import ArgumentTypeError + +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from django.db.models import Q +from django.utils.timezone import now + +from awx.main.models import Instance, UnifiedJob + + +class AWXInstance(): + def __init__(self, **filter): + self.filter = filter + self.get_instance() + + def get_instance(self): + filter = self.filter if self.filter is not None else dict(hostname=socket.gethostname()) + qs = Instance.objects.filter(**filter) + if not qs.exists(): + raise ValueError(f"No AWX instance found with {filter} "\ + "parameters") + self.instance = qs.first() + + def disable(self): + if self.instance.enabled: + self.instance.enabled = False + self.instance.save() + return True + + def enable(self): + if not self.instance.enabled: + self.instance.enabled = True + self.instance.save() + return True + + def jobs(self): + return UnifiedJob.objects.filter( + Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), + status__in=("running", "waiting") + ) + + def jobs_pretty(self): + jobs = [] + for j in self.jobs(): + # similar calculation of `elapsed` as the corresponding serializer + # does + td = now() - j.started + elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0) + elapsed = float(elapsed) + details = dict( + name = j.name, + url = j.get_ui_url(), + elapsed = elapsed, + ) + jobs.append(details) + + jobs = sorted(jobs, reverse=True, key=lambda j: j["elapsed"]) + + return ", ".join( + [f"[\"{j['name']}\"]({j['url']})" for j in jobs] + ) + + def instance_pretty(self): + instance = ( + self.instance.hostname, + urljoin(settings.TOWER_URL_BASE, f"/#/instances/{self.instance.pk}/details"), + ) + return f"[\"{instance[0]}\"]({instance[1]})" + + +class Command(BaseCommand): + help = "Disable instance, optionally waiting for all its managed jobs " \ + "to finish." + + @staticmethod + def int_positive(arg): + int_arg = int(arg) + if int_arg < 1: + raise ArgumentTypeError(f"The value must be a positive number >= 1. Provided: \"{arg}\"") + return int_arg + + def add_arguments(self, parser): + filter_group = parser.add_mutually_exclusive_group() + + filter_group.add_argument("--hostname", type=str, + default=socket.gethostname(), + help=f"{Instance.hostname.field.help_text} Defaults to the " \ + "hostname of the machine where the Python interpreter is " \ + "currently executing".strip() + ) + filter_group.add_argument("--id", type=self.int_positive, + help=Instance.id.field.help_text + ) + + parser.add_argument("--wait", action="store_true", + help="Wait for jobs managed by the instance to finish. With " \ + "default retry arguments waits for about 3h", + ) + + parser.add_argument("--retry", type=self.int_positive, default=360, + help="Number of retries when waiting for jobs to finish. " \ + "Default: 360", + ) + + parser.add_argument("--retry_sleep", type=self.int_positive, default=30, + help="Number of seconds to sleep before consequtive retries " \ + "when waiting. Default: 30", + ) + + def handle(self, *args, **options): + try: + filter = dict(id=options["id"]) if options["id"] is not None else dict(hostname=options["hostname"]) + instance = AWXInstance(**filter) + except ValueError as e: + raise CommandError(e) + + if instance.disable(): + self.stdout.write(self.style.SUCCESS( + f"Instance {instance.instance_pretty()} has been disabled" + )) + else: + self.stdout.write( + f"Instance {instance.instance_pretty()} has already been disabled" + ) + + if not options["wait"]: + return + + rc = 1 + while instance.jobs().count() > 0: + if rc < options["retry"]: + self.stdout.write( + f"{rc}/{options['retry']}: " \ + f"Waiting {options['retry_sleep']}s before the next " \ + "attempt to see if the following instance' managed jobs " \ + f"have finished: {instance.jobs_pretty()}" + ) + rc += 1 + time.sleep(options["retry_sleep"]) + else: + raise CommandError( + f"{rc}/{options['retry']}: " \ + "No more retry attempts left, but the instance still " \ + f"has associated managed jobs: {instance.jobs_pretty()}" + ) + else: + self.stdout.write(self.style.SUCCESS( + "Done waiting for instance' managed jobs to finish!" + )) From d1c608a2819fd322bbc87e3a6d673e6ab5f6b48c Mon Sep 17 00:00:00 2001 From: Stanislav Zaprudskiy Date: Fri, 13 Jan 2023 11:38:01 +0100 Subject: [PATCH 05/25] Reformat with black Signed-off-by: Stanislav Zaprudskiy --- .../management/commands/disable_instance.py | 79 ++++++++----------- 1 file changed, 33 insertions(+), 46 deletions(-) diff --git a/awx/main/management/commands/disable_instance.py b/awx/main/management/commands/disable_instance.py index 3689154229..f17c72446c 100644 --- a/awx/main/management/commands/disable_instance.py +++ b/awx/main/management/commands/disable_instance.py @@ -12,7 +12,7 @@ from django.utils.timezone import now from awx.main.models import Instance, UnifiedJob -class AWXInstance(): +class AWXInstance: def __init__(self, **filter): self.filter = filter self.get_instance() @@ -21,8 +21,7 @@ class AWXInstance(): filter = self.filter if self.filter is not None else dict(hostname=socket.gethostname()) qs = Instance.objects.filter(**filter) if not qs.exists(): - raise ValueError(f"No AWX instance found with {filter} "\ - "parameters") + raise ValueError(f"No AWX instance found with {filter} parameters") self.instance = qs.first() def disable(self): @@ -39,8 +38,7 @@ class AWXInstance(): def jobs(self): return UnifiedJob.objects.filter( - Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), - status__in=("running", "waiting") + Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), status__in=("running", "waiting") ) def jobs_pretty(self): @@ -52,17 +50,15 @@ class AWXInstance(): elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0) elapsed = float(elapsed) details = dict( - name = j.name, - url = j.get_ui_url(), - elapsed = elapsed, + name=j.name, + url=j.get_ui_url(), + elapsed=elapsed, ) jobs.append(details) jobs = sorted(jobs, reverse=True, key=lambda j: j["elapsed"]) - return ", ".join( - [f"[\"{j['name']}\"]({j['url']})" for j in jobs] - ) + return ", ".join([f"[\"{j['name']}\"]({j['url']})" for j in jobs]) def instance_pretty(self): instance = ( @@ -73,8 +69,7 @@ class AWXInstance(): class Command(BaseCommand): - help = "Disable instance, optionally waiting for all its managed jobs " \ - "to finish." + help = "Disable instance, optionally waiting for all its managed jobs to finish." @staticmethod def int_positive(arg): @@ -86,29 +81,32 @@ class Command(BaseCommand): def add_arguments(self, parser): filter_group = parser.add_mutually_exclusive_group() - filter_group.add_argument("--hostname", type=str, + filter_group.add_argument( + "--hostname", + type=str, default=socket.gethostname(), - help=f"{Instance.hostname.field.help_text} Defaults to the " \ - "hostname of the machine where the Python interpreter is " \ - "currently executing".strip() + help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(), ) - filter_group.add_argument("--id", type=self.int_positive, - help=Instance.id.field.help_text + filter_group.add_argument("--id", type=self.int_positive, help=Instance.id.field.help_text) + + parser.add_argument( + "--wait", + action="store_true", + help="Wait for jobs managed by the instance to finish. With default retry arguments waits for about 3h", ) - parser.add_argument("--wait", action="store_true", - help="Wait for jobs managed by the instance to finish. With " \ - "default retry arguments waits for about 3h", + parser.add_argument( + "--retry", + type=self.int_positive, + default=360, + help="Number of retries when waiting for jobs to finish. Default: 360", ) - parser.add_argument("--retry", type=self.int_positive, default=360, - help="Number of retries when waiting for jobs to finish. " \ - "Default: 360", - ) - - parser.add_argument("--retry_sleep", type=self.int_positive, default=30, - help="Number of seconds to sleep before consequtive retries " \ - "when waiting. Default: 30", + parser.add_argument( + "--retry_sleep", + type=self.int_positive, + default=30, + help="Number of seconds to sleep before consequtive retries when waiting. Default: 30", ) def handle(self, *args, **options): @@ -119,13 +117,9 @@ class Command(BaseCommand): raise CommandError(e) if instance.disable(): - self.stdout.write(self.style.SUCCESS( - f"Instance {instance.instance_pretty()} has been disabled" - )) + self.stdout.write(self.style.SUCCESS(f"Instance {instance.instance_pretty()} has been disabled")) else: - self.stdout.write( - f"Instance {instance.instance_pretty()} has already been disabled" - ) + self.stdout.write(f"Instance {instance.instance_pretty()} has already been disabled") if not options["wait"]: return @@ -134,20 +128,13 @@ class Command(BaseCommand): while instance.jobs().count() > 0: if rc < options["retry"]: self.stdout.write( - f"{rc}/{options['retry']}: " \ - f"Waiting {options['retry_sleep']}s before the next " \ - "attempt to see if the following instance' managed jobs " \ - f"have finished: {instance.jobs_pretty()}" + f"{rc}/{options['retry']}: Waiting {options['retry_sleep']}s before the next attempt to see if the following instance' managed jobs have finished: {instance.jobs_pretty()}" ) rc += 1 time.sleep(options["retry_sleep"]) else: raise CommandError( - f"{rc}/{options['retry']}: " \ - "No more retry attempts left, but the instance still " \ - f"has associated managed jobs: {instance.jobs_pretty()}" + f"{rc}/{options['retry']}: No more retry attempts left, but the instance still has associated managed jobs: {instance.jobs_pretty()}" ) else: - self.stdout.write(self.style.SUCCESS( - "Done waiting for instance' managed jobs to finish!" - )) + self.stdout.write(self.style.SUCCESS("Done waiting for instance' managed jobs to finish!")) From 166b586591fc1e3b5247a94d7900b1a1145189c4 Mon Sep 17 00:00:00 2001 From: Stanislav Zaprudskiy Date: Fri, 13 Jan 2023 19:42:59 +0100 Subject: [PATCH 06/25] Support indefinitely waiting for jobs to finish Signed-off-by: Stanislav Zaprudskiy --- .../management/commands/disable_instance.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/awx/main/management/commands/disable_instance.py b/awx/main/management/commands/disable_instance.py index f17c72446c..6e54912928 100644 --- a/awx/main/management/commands/disable_instance.py +++ b/awx/main/management/commands/disable_instance.py @@ -44,6 +44,8 @@ class AWXInstance: def jobs_pretty(self): jobs = [] for j in self.jobs(): + if not j.started: + continue # similar calculation of `elapsed` as the corresponding serializer # does td = now() - j.started @@ -72,7 +74,10 @@ class Command(BaseCommand): help = "Disable instance, optionally waiting for all its managed jobs to finish." @staticmethod - def int_positive(arg): + def ge_1(arg): + if arg == "inf": + return float(arg) + int_arg = int(arg) if int_arg < 1: raise ArgumentTypeError(f"The value must be a positive number >= 1. Provided: \"{arg}\"") @@ -87,24 +92,24 @@ class Command(BaseCommand): default=socket.gethostname(), help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(), ) - filter_group.add_argument("--id", type=self.int_positive, help=Instance.id.field.help_text) + filter_group.add_argument("--id", type=self.ge_1, help=Instance.id.field.help_text) parser.add_argument( "--wait", action="store_true", - help="Wait for jobs managed by the instance to finish. With default retry arguments waits for about 3h", + help="Wait for jobs managed by the instance to finish. With default retry arguments waits ~1h", ) parser.add_argument( "--retry", - type=self.int_positive, - default=360, - help="Number of retries when waiting for jobs to finish. Default: 360", + type=self.ge_1, + default=120, + help="Number of retries when waiting for jobs to finish. Default: 120. Also accepts \"inf\" to wait indefinitely", ) parser.add_argument( "--retry_sleep", - type=self.int_positive, + type=self.ge_1, default=30, help="Number of seconds to sleep before consequtive retries when waiting. Default: 30", ) From f2ab8d637cd1921c7d383032b36a89043de8ed1e Mon Sep 17 00:00:00 2001 From: Stanislav Zaprudskiy Date: Tue, 17 Jan 2023 13:28:34 +0100 Subject: [PATCH 07/25] Do not discard jobs w/ .started=None --- awx/main/management/commands/disable_instance.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/awx/main/management/commands/disable_instance.py b/awx/main/management/commands/disable_instance.py index 6e54912928..a0f7bfef10 100644 --- a/awx/main/management/commands/disable_instance.py +++ b/awx/main/management/commands/disable_instance.py @@ -44,11 +44,10 @@ class AWXInstance: def jobs_pretty(self): jobs = [] for j in self.jobs(): - if not j.started: - continue + job_started = j.started if j.started else now() # similar calculation of `elapsed` as the corresponding serializer # does - td = now() - j.started + td = now() - job_started elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0) elapsed = float(elapsed) details = dict( @@ -76,7 +75,7 @@ class Command(BaseCommand): @staticmethod def ge_1(arg): if arg == "inf": - return float(arg) + return float("inf") int_arg = int(arg) if int_arg < 1: From 35fbb94aa67e7358584756d68d6de92ef9577884 Mon Sep 17 00:00:00 2001 From: Stanislav Zaprudskiy Date: Fri, 17 Feb 2023 17:33:45 +0100 Subject: [PATCH 08/25] Use `CLUSTER_HOST_ID` as default hostname argument value Incorporates feedback from https://github.com/ansible/awx/pull/13445/files#r1106012308 Signed-off-by: Stanislav Zaprudskiy --- awx/main/management/commands/disable_instance.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/awx/main/management/commands/disable_instance.py b/awx/main/management/commands/disable_instance.py index a0f7bfef10..054e47974b 100644 --- a/awx/main/management/commands/disable_instance.py +++ b/awx/main/management/commands/disable_instance.py @@ -1,4 +1,3 @@ -import socket import time from urllib.parse import urljoin @@ -18,7 +17,7 @@ class AWXInstance: self.get_instance() def get_instance(self): - filter = self.filter if self.filter is not None else dict(hostname=socket.gethostname()) + filter = self.filter if self.filter is not None else dict(hostname=settings.CLUSTER_HOST_ID) qs = Instance.objects.filter(**filter) if not qs.exists(): raise ValueError(f"No AWX instance found with {filter} parameters") @@ -88,7 +87,7 @@ class Command(BaseCommand): filter_group.add_argument( "--hostname", type=str, - default=socket.gethostname(), + default=settings.CLUSTER_HOST_ID, help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(), ) filter_group.add_argument("--id", type=self.ge_1, help=Instance.id.field.help_text) From 811ecb86735fc952de2907ed71a2f84049b07138 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Thu, 23 Feb 2023 12:05:21 -0500 Subject: [PATCH 09/25] Follow suggestion from comment, split if NOT list --- awx_collection/plugins/lookup/schedule_rruleset.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/awx_collection/plugins/lookup/schedule_rruleset.py b/awx_collection/plugins/lookup/schedule_rruleset.py index d5850582b0..4e9732f0f2 100644 --- a/awx_collection/plugins/lookup/schedule_rruleset.py +++ b/awx_collection/plugins/lookup/schedule_rruleset.py @@ -196,7 +196,7 @@ class LookupModule(LookupBase): if isinstance(rule[field_name], int): rule[field_name] = [rule[field_name]] # If its not a list, we need to split it into a list - if isinstance(rule[field_name], list): + if not isinstance(rule[field_name], list): rule[field_name] = rule[field_name].split(',') for value in rule[field_name]: # If they have a list of strs we want to strip the str incase its space delineated @@ -210,7 +210,8 @@ class LookupModule(LookupBase): def process_list(self, field_name, rule, valid_list, rule_number): return_values = [] - rule[field_name] = rule[field_name].split(',') + if not isinstance(rule[field_name], list): + rule[field_name] = rule[field_name].split(',') for value in rule[field_name]: value = value.strip() if value not in valid_list: From 3051384f9570d5f35f96c25ecabd4e5cf158bc9f Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Thu, 23 Feb 2023 12:05:32 -0500 Subject: [PATCH 10/25] Follow suggestion from comment, split if NOT list --- awx_collection/plugins/lookup/schedule_rruleset.py | 1 + 1 file changed, 1 insertion(+) diff --git a/awx_collection/plugins/lookup/schedule_rruleset.py b/awx_collection/plugins/lookup/schedule_rruleset.py index 4e9732f0f2..b45d861db3 100644 --- a/awx_collection/plugins/lookup/schedule_rruleset.py +++ b/awx_collection/plugins/lookup/schedule_rruleset.py @@ -210,6 +210,7 @@ class LookupModule(LookupBase): def process_list(self, field_name, rule, valid_list, rule_number): return_values = [] + # If its not a list, we need to split it into a list if not isinstance(rule[field_name], list): rule[field_name] = rule[field_name].split(',') for value in rule[field_name]: From 327352feaff445fadb6dbf0253c71008b1db959f Mon Sep 17 00:00:00 2001 From: Joe Garcia Date: Mon, 27 Feb 2023 11:26:52 -0500 Subject: [PATCH 11/25] Add default value to webservice_id kwarg --- awx/main/credential_plugins/aim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py index 96487dcced..4cf65a36a9 100644 --- a/awx/main/credential_plugins/aim.py +++ b/awx/main/credential_plugins/aim.py @@ -70,7 +70,7 @@ def aim_backend(**kwargs): client_cert = kwargs.get('client_cert', None) client_key = kwargs.get('client_key', None) verify = kwargs['verify'] - webservice_id = kwargs['webservice_id'] + webservice_id = kwargs['webservice_id', ''] app_id = kwargs['app_id'] object_query = kwargs['object_query'] object_query_format = kwargs['object_query_format'] From 98b2f51c189561626c95c6b71a720538f83dee9d Mon Sep 17 00:00:00 2001 From: Joe Garcia Date: Mon, 27 Feb 2023 11:52:44 -0500 Subject: [PATCH 12/25] fix kwargs[] to kwargs.get() --- awx/main/credential_plugins/aim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py index 4cf65a36a9..e72fe285f4 100644 --- a/awx/main/credential_plugins/aim.py +++ b/awx/main/credential_plugins/aim.py @@ -70,7 +70,7 @@ def aim_backend(**kwargs): client_cert = kwargs.get('client_cert', None) client_key = kwargs.get('client_key', None) verify = kwargs['verify'] - webservice_id = kwargs['webservice_id', ''] + webservice_id = kwargs.get('webservice_id', '') app_id = kwargs['app_id'] object_query = kwargs['object_query'] object_query_format = kwargs['object_query_format'] From cf21eab7f475509c5196bc523d83f6f3ce91b4a6 Mon Sep 17 00:00:00 2001 From: Hao Liu Date: Mon, 27 Feb 2023 18:32:10 -0500 Subject: [PATCH 13/25] [chore] update project_update playbook to be compliant with ansible-lint reshaving the yak Co-Authored-By: Gabriel Muniz --- awx/api/serializers.py | 2 +- awx/main/tasks/callback.py | 4 +- awx/playbooks/project_update.yml | 189 ++++++++++++++++--------------- 3 files changed, 102 insertions(+), 93 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index be87a50a82..9e617c3a14 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -4028,7 +4028,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer): # raw SCM URLs in their stdout (which *could* contain passwords) # attempt to detect and filter HTTP basic auth passwords in the stdout # of these types of events - if obj.event_data.get('task_action') in ('git', 'svn'): + if obj.event_data.get('task_action') in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'): try: return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data))) except Exception: diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py index 92bfc40368..d93e56fed5 100644 --- a/awx/main/tasks/callback.py +++ b/awx/main/tasks/callback.py @@ -116,7 +116,7 @@ class RunnerCallback: # so it *should* have a negligible performance impact task = event_data.get('event_data', {}).get('task_action') try: - if task in ('git', 'svn'): + if task in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'): event_data_json = json.dumps(event_data) event_data_json = UriCleaner.remove_sensitive(event_data_json) event_data = json.loads(event_data_json) @@ -219,7 +219,7 @@ class RunnerCallbackForProjectUpdate(RunnerCallback): def event_handler(self, event_data): super_return_value = super(RunnerCallbackForProjectUpdate, self).event_handler(event_data) returned_data = event_data.get('event_data', {}) - if returned_data.get('task_action', '') == 'set_fact': + if returned_data.get('task_action', '') in ('set_fact', 'ansible.builtin.set_fact'): returned_facts = returned_data.get('res', {}).get('ansible_facts', {}) if 'scm_version' in returned_facts: self.playbook_new_revision = returned_facts['scm_version'] diff --git a/awx/playbooks/project_update.yml b/awx/playbooks/project_update.yml index 2067e76043..125b5ef312 100644 --- a/awx/playbooks/project_update.yml +++ b/awx/playbooks/project_update.yml @@ -25,42 +25,47 @@ connection: local name: Update source tree if necessary tasks: - - - name: delete project directory before update - command: "find -delete" # volume mounted, cannot delete folder itself + - name: Delete project directory before update + ansible.builtin.shell: set -o pipefail && find . -delete -print | tail -2 # volume mounted, cannot delete folder itself + register: reg + changed_when: reg.stdout_lines | length > 1 args: chdir: "{{ project_path }}" tags: - delete - - block: - - name: update project using git - git: - dest: "{{project_path|quote}}" - repo: "{{scm_url}}" - version: "{{scm_branch|quote}}" - refspec: "{{scm_refspec|default(omit)}}" - force: "{{scm_clean}}" - track_submodules: "{{scm_track_submodules|default(omit)}}" - accept_hostkey: "{{scm_accept_hostkey|default(omit)}}" + - name: Update project using git + tags: + - update_git + block: + - name: Update project using git + ansible.builtin.git: + dest: "{{ project_path | quote }}" + repo: "{{ scm_url }}" + version: "{{ scm_branch | quote }}" + refspec: "{{ scm_refspec | default(omit) }}" + force: "{{ scm_clean }}" + track_submodules: "{{ scm_track_submodules | default(omit) }}" + accept_hostkey: "{{ scm_accept_hostkey | default(omit) }}" register: git_result - name: Set the git repository version - set_fact: + ansible.builtin.set_fact: scm_version: "{{ git_result['after'] }}" when: "'after' in git_result" - tags: - - update_git - - block: - - name: update project using svn - subversion: - dest: "{{project_path|quote}}" - repo: "{{scm_url|quote}}" - revision: "{{scm_branch|quote}}" - force: "{{scm_clean}}" - username: "{{scm_username|default(omit)}}" - password: "{{scm_password|default(omit)}}" + - name: Update project using svn + tags: + - update_svn + block: + - name: Update project using svn + ansible.builtin.subversion: + dest: "{{ project_path | quote }}" + repo: "{{ scm_url | quote }}" + revision: "{{ scm_branch | quote }}" + force: "{{ scm_clean }}" + username: "{{ scm_username | default(omit) }}" + password: "{{ scm_password | default(omit) }}" # must be in_place because folder pre-existing, because it is mounted in_place: true environment: @@ -68,85 +73,90 @@ register: svn_result - name: Set the svn repository version - set_fact: + ansible.builtin.set_fact: scm_version: "{{ svn_result['after'] }}" when: "'after' in svn_result" - - name: parse subversion version string properly - set_fact: - scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}" - tags: - - update_svn + - name: Parse subversion version string properly + ansible.builtin.set_fact: + scm_version: "{{ scm_version | regex_replace('^.*Revision: ([0-9]+).*$', '\\1') }}" - - block: + + - name: Project update for Insights + tags: + - update_insights + block: - name: Ensure the project directory is present - file: - dest: "{{project_path|quote}}" + ansible.builtin.file: + dest: "{{ project_path | quote }}" state: directory + mode: '0755' - name: Fetch Insights Playbook(s) insights: - insights_url: "{{insights_url}}" - username: "{{scm_username}}" - password: "{{scm_password}}" - project_path: "{{project_path}}" - awx_license_type: "{{awx_license_type}}" - awx_version: "{{awx_version}}" + insights_url: "{{ insights_url }}" + username: "{{ scm_username }}" + password: "{{ scm_password }}" + project_path: "{{ project_path }}" + awx_license_type: "{{ awx_license_type }}" + awx_version: "{{ awx_version }}" register: results - name: Save Insights Version - set_fact: - scm_version: "{{results.version}}" + ansible.builtin.set_fact: + scm_version: "{{ results.version }}" when: results is defined - tags: - - update_insights - - block: + + - name: Update project using archive + tags: + - update_archive + block: - name: Ensure the project archive directory is present - file: - dest: "{{ project_path|quote }}/.archive" + ansible.builtin.file: + dest: "{{ project_path | quote }}/.archive" state: directory + mode: '0755' - name: Get archive from url - get_url: - url: "{{ scm_url|quote }}" - dest: "{{ project_path|quote }}/.archive/" - url_username: "{{ scm_username|default(omit) }}" - url_password: "{{ scm_password|default(omit) }}" + ansible.builtin.get_url: + url: "{{ scm_url | quote }}" + dest: "{{ project_path | quote }}/.archive/" + url_username: "{{ scm_username | default(omit) }}" + url_password: "{{ scm_password | default(omit) }}" force_basic_auth: true + mode: '0755' register: get_archive - name: Unpack archive project_archive: src: "{{ get_archive.dest }}" - project_path: "{{ project_path|quote }}" + project_path: "{{ project_path | quote }}" force: "{{ scm_clean }}" when: get_archive.changed or scm_clean register: unarchived - name: Find previous archives - find: - paths: "{{ project_path|quote }}/.archive/" + ansible.builtin.find: + paths: "{{ project_path | quote }}/.archive/" excludes: - - "{{ get_archive.dest|basename }}" + - "{{ get_archive.dest | basename }}" when: unarchived.changed register: previous_archive - name: Remove previous archives - file: + ansible.builtin.file: path: "{{ item.path }}" state: absent loop: "{{ previous_archive.files }}" - when: previous_archive.files|default([]) + when: previous_archive.files | default([]) - name: Set scm_version to archive sha1 checksum - set_fact: + ansible.builtin.set_fact: scm_version: "{{ get_archive.checksum_src }}" - tags: - - update_archive - name: Repository Version - debug: + ansible.builtin.debug: msg: "Repository Version {{ scm_version }}" tags: - update_git @@ -183,60 +193,59 @@ additional_collections_env: # These environment variables are used for installing collections, in addition to galaxy_task_env # setting the collections paths silences warnings - ANSIBLE_COLLECTIONS_PATHS: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections" + ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections" # Put the local tmp directory in same volume as collection destination # otherwise, files cannot be moved accross volumes and will cause error - ANSIBLE_LOCAL_TEMP: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/tmp" + ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp" tasks: - - name: Check content sync settings - block: - - debug: - msg: > - Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and - AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization. - - - meta: end_play - - when: not roles_enabled|bool and not collections_enabled|bool + when: not roles_enabled | bool and not collections_enabled | bool tags: - install_roles - install_collections + block: + - name: Warn about disabled content sync + ansible.builtin.debug: + msg: > + Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and + AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization. + - name: End play due to disabled content sync + ansible.builtin.meta: end_play - - name: fetch galaxy roles from requirements.(yml/yaml) - command: > + - name: Fetch galaxy roles from requirements.(yml/yaml) + ansible.builtin.command: > ansible-galaxy role install -r {{ item }} - --roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles + --roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles {{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }} args: - chdir: "{{project_path|quote}}" + chdir: "{{ project_path | quote }}" register: galaxy_result with_fileglob: - - "{{project_path|quote}}/roles/requirements.yaml" - - "{{project_path|quote}}/roles/requirements.yml" + - "{{ project_path | quote }}/roles/requirements.yaml" + - "{{ project_path | quote }}/roles/requirements.yml" changed_when: "'was installed successfully' in galaxy_result.stdout" environment: "{{ galaxy_task_env }}" - when: roles_enabled|bool + when: roles_enabled | bool tags: - install_roles - - name: fetch galaxy collections from collections/requirements.(yml/yaml) - command: > + - name: Fetch galaxy collections from collections/requirements.(yml/yaml) + ansible.builtin.command: > ansible-galaxy collection install -r {{ item }} - --collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections + --collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections {{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }} args: - chdir: "{{project_path|quote}}" + chdir: "{{ project_path | quote }}" register: galaxy_collection_result with_fileglob: - - "{{project_path|quote}}/collections/requirements.yaml" - - "{{project_path|quote}}/collections/requirements.yml" - - "{{project_path|quote}}/requirements.yaml" - - "{{project_path|quote}}/requirements.yml" + - "{{ project_path | quote }}/collections/requirements.yaml" + - "{{ project_path | quote }}/collections/requirements.yml" + - "{{ project_path | quote }}/requirements.yaml" + - "{{ project_path | quote }}/requirements.yml" changed_when: "'Installing ' in galaxy_collection_result.stdout" environment: "{{ additional_collections_env | combine(galaxy_task_env) }}" when: - "ansible_version.full is version_compare('2.9', '>=')" - - collections_enabled|bool + - collections_enabled | bool tags: - install_collections From 7cca39d0691d1760e8903825a6e5970069b72e11 Mon Sep 17 00:00:00 2001 From: Hao Liu Date: Mon, 27 Feb 2023 20:53:07 -0500 Subject: [PATCH 14/25] change make Dockerfile to phony awx-kube-build and docker-compose-build share the same Dockerfile if u run awx-kube-build than docker-compose-build in succession the second command wont run the Dockerfile target and cause the image to be built with the incorrect Dockerfile --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 4e402125b2..c90430d67e 100644 --- a/Makefile +++ b/Makefile @@ -572,6 +572,7 @@ VERSION: PYTHON_VERSION: @echo "$(PYTHON)" | sed 's:python::' +.PHONY: Dockerfile Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE) From d5de1f9d114808ef415e799c687efec2ea0f56f0 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Mon, 20 Feb 2023 09:56:38 -0500 Subject: [PATCH 15/25] Make use of new keepalive messages from ansible-runner Make setting API configurable and process keepalive events when seen in the event callback Use env var in pod spec and make it specific to K8S --- awx/main/conf.py | 10 ++++++++++ awx/main/tasks/callback.py | 2 ++ awx/main/tasks/receptor.py | 4 ++++ awx/settings/defaults.py | 5 +++++ 4 files changed, 21 insertions(+) diff --git a/awx/main/conf.py b/awx/main/conf.py index dab0543a1a..99b995b113 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -282,6 +282,16 @@ register( placeholder={'HTTP_PROXY': 'myproxy.local:8080'}, ) +register( + 'AWX_RUNNER_KEEPALIVE_SECONDS', + field_class=fields.IntegerField, + label=_('K8S Ansible Runner Keep-Alive Message Interval'), + help_text=_('Only applies to K8S deployments and container_group jobs. If not 0, send a message every so-many seconds to keep connection open.'), + category=_('Jobs'), + category_slug='jobs', + placeholder=240, # intended to be under common 5 minute idle timeout +) + register( 'GALAXY_TASK_ENV', field_class=fields.KeyValueField, diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py index 92bfc40368..0046d07d82 100644 --- a/awx/main/tasks/callback.py +++ b/awx/main/tasks/callback.py @@ -85,6 +85,8 @@ class RunnerCallback: # which generate job events from two 'streams': # ansible-inventory and the awx.main.commands.inventory_import # logger + if event_data.get('event') == 'keepalive': + return if event_data.get(self.event_data_key, None): if self.event_data_key != 'job_id': diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 006c805943..9cb4d49efe 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -526,6 +526,10 @@ class AWXReceptorJob: pod_spec['spec']['containers'][0]['image'] = ee.image pod_spec['spec']['containers'][0]['args'] = ['ansible-runner', 'worker', '--private-data-dir=/runner'] + if settings.AWX_RUNNER_KEEPALIVE_SECONDS: + pod_spec['spec']['containers'][0].setdefault('env', []) + pod_spec['spec']['containers'][0]['env'].append({'name': 'ANSIBLE_RUNNER_KEEPALIVE_SECONDS', 'value': str(settings.AWX_RUNNER_KEEPALIVE_SECONDS)}) + # Enforce EE Pull Policy pull_options = {"always": "Always", "missing": "IfNotPresent", "never": "Never"} if self.task and self.task.instance.execution_environment: diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 4d18540bcd..74a36b3e2d 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -929,6 +929,11 @@ AWX_RUNNER_OMIT_ENV_FILES = True # Allow ansible-runner to save ansible output (may cause performance issues) AWX_RUNNER_SUPPRESS_OUTPUT_FILE = True +# https://github.com/ansible/ansible-runner/pull/1191/files +# Interval in seconds between the last message and keep-alive messages that +# ansible-runner will send +AWX_RUNNER_KEEPALIVE_SECONDS = 0 + # Delete completed work units in receptor RECEPTOR_RELEASE_WORK = True From 6fa22f5be29262d52e9ee95bc37dc133ccb54de9 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Tue, 21 Feb 2023 16:17:48 -0500 Subject: [PATCH 16/25] Add UI for the new setting --- .../screens/Setting/Jobs/JobsEdit/JobsEdit.js | 5 +++++ .../Setting/Jobs/JobsEdit/JobsEdit.test.js | 1 + .../shared/data.allSettingOptions.json | 19 +++++++++++++++++++ .../Setting/shared/data.allSettings.json | 1 + .../Setting/shared/data.jobSettings.json | 1 + 5 files changed, 27 insertions(+) diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js index 52e216e41e..d258fdec46 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js @@ -150,6 +150,11 @@ function JobsEdit() { type={options?.SCHEDULE_MAX_JOBS ? 'number' : undefined} isRequired={Boolean(options?.SCHEDULE_MAX_JOBS)} /> + ', () => { const { EVENT_STDOUT_MAX_BYTES_DISPLAY, STDOUT_MAX_BYTES_DISPLAY, + AWX_RUNNER_KEEPALIVE_SECONDS, ...jobRequest } = mockJobSettings; expect(SettingsAPI.updateAll).toHaveBeenCalledWith(jobRequest); diff --git a/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json b/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json index b654d1bd90..3eaf93eff0 100644 --- a/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json +++ b/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json @@ -344,6 +344,16 @@ "category_slug": "jobs", "default": 10 }, + "AWX_RUNNER_KEEPALIVE_SECONDS": { + "type": "integer", + "required": true, + "label": "K8S Ansible Runner Keep-Alive Message Interval", + "help_text": "Only applies to K8S deployments and container_group jobs. If not 0, send a message every so-many seconds to keep connection open.", + "category": "Jobs", + "category_slug": "jobs", + "placeholder": 240, + "default": 0 + }, "AWX_ANSIBLE_CALLBACK_PLUGINS": { "type": "list", "required": false, @@ -4098,6 +4108,15 @@ "category_slug": "jobs", "defined_in_file": false }, + "AWX_RUNNER_KEEPALIVE_SECONDS": { + "type": "integer", + "label": "K8S Ansible Runner Keep-Alive Message Interval", + "help_text": "Only applies to K8S deployments and container_group jobs. If not 0, send a message every so-many seconds to keep connection open.", + "category": "Jobs", + "category_slug": "jobs", + "placeholder": 240, + "default": 0 + }, "AWX_ANSIBLE_CALLBACK_PLUGINS": { "type": "list", "label": "Ansible Callback Plugins", diff --git a/awx/ui/src/screens/Setting/shared/data.allSettings.json b/awx/ui/src/screens/Setting/shared/data.allSettings.json index e5136f4b58..b2eaea2a12 100644 --- a/awx/ui/src/screens/Setting/shared/data.allSettings.json +++ b/awx/ui/src/screens/Setting/shared/data.allSettings.json @@ -51,6 +51,7 @@ "STDOUT_MAX_BYTES_DISPLAY":1048576, "EVENT_STDOUT_MAX_BYTES_DISPLAY":1024, "SCHEDULE_MAX_JOBS":10, + "AWX_RUNNER_KEEPALIVE_SECONDS": 0, "AWX_ANSIBLE_CALLBACK_PLUGINS":[], "DEFAULT_JOB_TIMEOUT":0, "DEFAULT_JOB_IDLE_TIMEOUT":0, diff --git a/awx/ui/src/screens/Setting/shared/data.jobSettings.json b/awx/ui/src/screens/Setting/shared/data.jobSettings.json index 29567a8f8c..6c001cca3f 100644 --- a/awx/ui/src/screens/Setting/shared/data.jobSettings.json +++ b/awx/ui/src/screens/Setting/shared/data.jobSettings.json @@ -19,6 +19,7 @@ "STDOUT_MAX_BYTES_DISPLAY": 1048576, "EVENT_STDOUT_MAX_BYTES_DISPLAY": 1024, "SCHEDULE_MAX_JOBS": 10, + "AWX_RUNNER_KEEPALIVE_SECONDS": 0, "AWX_ANSIBLE_CALLBACK_PLUGINS": [], "DEFAULT_JOB_TIMEOUT": 0, "DEFAULT_JOB_IDLE_TIMEOUT": 0, From b143df31834fe98afb2059999d07be81727dfa21 Mon Sep 17 00:00:00 2001 From: Michael Abashian Date: Tue, 21 Feb 2023 17:05:14 -0500 Subject: [PATCH 17/25] Fix broken UI test --- awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.test.js | 1 - 1 file changed, 1 deletion(-) diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.test.js b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.test.js index 0d9d2a9ca2..3f8b6dd220 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.test.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.test.js @@ -79,7 +79,6 @@ describe('', () => { const { EVENT_STDOUT_MAX_BYTES_DISPLAY, STDOUT_MAX_BYTES_DISPLAY, - AWX_RUNNER_KEEPALIVE_SECONDS, ...jobRequest } = mockJobSettings; expect(SettingsAPI.updateAll).toHaveBeenCalledWith(jobRequest); From 90f54b98cd0183091228b78c46d289f175f94e93 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Wed, 22 Feb 2023 14:32:30 -0500 Subject: [PATCH 18/25] Update keepalive setting help_text to be more direct Co-authored-by: Shane McDonald --- awx/main/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/conf.py b/awx/main/conf.py index 99b995b113..2dbf5e127e 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -286,7 +286,7 @@ register( 'AWX_RUNNER_KEEPALIVE_SECONDS', field_class=fields.IntegerField, label=_('K8S Ansible Runner Keep-Alive Message Interval'), - help_text=_('Only applies to K8S deployments and container_group jobs. If not 0, send a message every so-many seconds to keep connection open.'), + help_text=_('Only applies to jobs running in a Container Group. If not 0, send a message every so-many seconds to keep connection open.'), category=_('Jobs'), category_slug='jobs', placeholder=240, # intended to be under common 5 minute idle timeout From 52d46c88e47fe3078e64b11420a8ca64e8106c59 Mon Sep 17 00:00:00 2001 From: Christian Adams Date: Tue, 28 Feb 2023 13:44:34 -0500 Subject: [PATCH 19/25] External users should not be able to change their password (#13491) * Azure AD users should not be able to change their password * Multiple auth changes Moving get_external_user function into awx.sso.common Altering get_external_user to not look at current config, just user object values Altering how api/conf.py detects external auth config (and making reusable function in awx.sso.common) Altering logic in api.serializers in _update_pasword to use awx.sso.common * Adding unit tests --------- Co-authored-by: John Westcott IV --- awx/api/conf.py | 17 +---- awx/api/serializers.py | 22 +----- awx/main/models/oauth.py | 2 +- awx/main/utils/common.py | 24 ------ awx/sso/common.py | 42 +++++++++++ awx/sso/tests/functional/test_common.py | 99 ++++++++++++++++++++++++- 6 files changed, 148 insertions(+), 58 deletions(-) diff --git a/awx/api/conf.py b/awx/api/conf.py index b9c2ee701a..0697f40c56 100644 --- a/awx/api/conf.py +++ b/awx/api/conf.py @@ -1,5 +1,4 @@ # Django -from django.conf import settings from django.utils.translation import gettext_lazy as _ # Django REST Framework @@ -9,6 +8,7 @@ from rest_framework import serializers from awx.conf import fields, register, register_validate from awx.api.fields import OAuth2ProviderField from oauth2_provider.settings import oauth2_settings +from awx.sso.common import is_remote_auth_enabled register( @@ -108,19 +108,8 @@ register( def authentication_validate(serializer, attrs): - remote_auth_settings = [ - 'AUTH_LDAP_SERVER_URI', - 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', - 'SOCIAL_AUTH_GITHUB_KEY', - 'SOCIAL_AUTH_GITHUB_ORG_KEY', - 'SOCIAL_AUTH_GITHUB_TEAM_KEY', - 'SOCIAL_AUTH_SAML_ENABLED_IDPS', - 'RADIUS_SERVER', - 'TACACSPLUS_HOST', - ] - if attrs.get('DISABLE_LOCAL_AUTH', False): - if not any(getattr(settings, s, None) for s in remote_auth_settings): - raise serializers.ValidationError(_("There are no remote authentication systems configured.")) + if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled(): + raise serializers.ValidationError(_("There are no remote authentication systems configured.")) return attrs diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 9e617c3a14..7dad4459fa 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -108,7 +108,6 @@ from awx.main.utils import ( extract_ansible_vars, encrypt_dict, prefetch_page_capabilities, - get_external_account, truncate_stdout, ) from awx.main.utils.filters import SmartFilter @@ -124,6 +123,8 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver # AWX Utils from awx.api.validators import HostnameRegexValidator +from awx.sso.common import get_external_account + logger = logging.getLogger('awx.api.serializers') # Fields that should be summarized regardless of object type. @@ -987,23 +988,8 @@ class UserSerializer(BaseSerializer): def _update_password(self, obj, new_password): # For now we're not raising an error, just not saving password for # users managed by LDAP who already have an unusable password set. - if getattr(settings, 'AUTH_LDAP_SERVER_URI', None): - try: - if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password(): - new_password = None - except AttributeError: - pass - if ( - getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None) - ) and obj.social_auth.all(): - new_password = None - if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all(): - new_password = None - if new_password: + # Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option + if new_password and not self.get_external_account(obj): obj.set_password(new_password) obj.save(update_fields=['password']) diff --git a/awx/main/models/oauth.py b/awx/main/models/oauth.py index c9927f78bd..fbd7772119 100644 --- a/awx/main/models/oauth.py +++ b/awx/main/models/oauth.py @@ -14,7 +14,7 @@ from oauth2_provider.models import AbstractApplication, AbstractAccessToken from oauth2_provider.generators import generate_client_secret from oauthlib import oauth2 -from awx.main.utils import get_external_account +from awx.sso.common import get_external_account from awx.main.fields import OAuth2ClientSecretField diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index dedd02f995..26920535dc 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -80,7 +80,6 @@ __all__ = [ 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices', - 'get_external_account', 'ScheduleTaskManager', 'ScheduleDependencyManager', 'ScheduleWorkflowManager', @@ -1089,29 +1088,6 @@ def has_model_field_prefetched(model_obj, field_name): return getattr(getattr(model_obj, field_name, None), 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {}) -def get_external_account(user): - from django.conf import settings - - account_type = None - if getattr(settings, 'AUTH_LDAP_SERVER_URI', None): - try: - if user.pk and user.profile.ldap_dn and not user.has_usable_password(): - account_type = "ldap" - except AttributeError: - pass - if ( - getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) - or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None) - ) and user.social_auth.all(): - account_type = "social" - if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all(): - account_type = "enterprise" - return account_type - - class classproperty: def __init__(self, fget=None, fset=None, fdel=None, doc=None): self.fget = fget diff --git a/awx/sso/common.py b/awx/sso/common.py index 4d601bb22e..99abc51d5a 100644 --- a/awx/sso/common.py +++ b/awx/sso/common.py @@ -169,3 +169,45 @@ def get_or_create_org_with_default_galaxy_cred(**kwargs): else: logger.debug("Could not find default Ansible Galaxy credential to add to org") return org + + +def get_external_account(user): + account_type = None + + # Previously this method also checked for active configuration which meant that if a user logged in from LDAP + # and then LDAP was no longer configured it would "convert" the user from an LDAP account_type to none. + # This did have one benefit that if a login type was removed intentionally the user could be given a username password. + # But it had a limitation that the user would have to have an active session (or an admin would have to go set a temp password). + # It also lead to the side affect that if LDAP was ever reconfigured the user would convert back to LDAP but still have a local password. + # That local password could then be used to bypass LDAP authentication. + try: + if user.pk and user.profile.ldap_dn and not user.has_usable_password(): + account_type = "ldap" + except AttributeError: + pass + + if user.social_auth.all(): + account_type = "social" + + if user.enterprise_auth.all(): + account_type = "enterprise" + + return account_type + + +def is_remote_auth_enabled(): + from django.conf import settings + + # Append LDAP, Radius, TACACS+ and SAML options + settings_that_turn_on_remote_auth = [ + 'AUTH_LDAP_SERVER_URI', + 'SOCIAL_AUTH_SAML_ENABLED_IDPS', + 'RADIUS_SERVER', + 'TACACSPLUS_HOST', + ] + # Also include any SOCAIL_AUTH_*KEY (except SAML) + for social_auth_key in dir(settings): + if social_auth_key.startswith('SOCIAL_AUTH_') and social_auth_key.endswith('_KEY') and 'SAML' not in social_auth_key: + settings_that_turn_on_remote_auth.append(social_auth_key) + + return any(getattr(settings, s, None) for s in settings_that_turn_on_remote_auth) diff --git a/awx/sso/tests/functional/test_common.py b/awx/sso/tests/functional/test_common.py index 4fc3edd841..f2b3e5781d 100644 --- a/awx/sso/tests/functional/test_common.py +++ b/awx/sso/tests/functional/test_common.py @@ -2,9 +2,22 @@ import pytest from collections import Counter from django.core.exceptions import FieldError from django.utils.timezone import now +from django.test.utils import override_settings from awx.main.models import Credential, CredentialType, Organization, Team, User -from awx.sso.common import get_orgs_by_ids, reconcile_users_org_team_mappings, create_org_and_teams, get_or_create_org_with_default_galaxy_cred +from awx.sso.common import ( + get_orgs_by_ids, + reconcile_users_org_team_mappings, + create_org_and_teams, + get_or_create_org_with_default_galaxy_cred, + is_remote_auth_enabled, + get_external_account, +) + + +class MicroMockObject(object): + def all(self): + return True @pytest.mark.django_db @@ -278,3 +291,87 @@ class TestCommonFunctions: for o in Organization.objects.all(): assert o.galaxy_credentials.count() == 0 + + @pytest.mark.parametrize( + "enable_ldap, enable_social, enable_enterprise, expected_results", + [ + (False, False, False, None), + (True, False, False, 'ldap'), + (True, True, False, 'social'), + (True, True, True, 'enterprise'), + (False, True, True, 'enterprise'), + (False, False, True, 'enterprise'), + (False, True, False, 'social'), + ], + ) + def test_get_external_account(self, enable_ldap, enable_social, enable_enterprise, expected_results): + try: + user = User.objects.get(username="external_tester") + except User.DoesNotExist: + user = User(username="external_tester") + user.set_unusable_password() + user.save() + + if enable_ldap: + user.profile.ldap_dn = 'test.dn' + if enable_social: + from social_django.models import UserSocialAuth + + social_auth, _ = UserSocialAuth.objects.get_or_create( + uid='667ec049-cdf3-45d0-a4dc-0465f7505954', + provider='oidc', + extra_data={}, + user_id=user.id, + ) + user.social_auth.set([social_auth]) + if enable_enterprise: + from awx.sso.models import UserEnterpriseAuth + + enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+') + enterprise_auth.save() + + assert get_external_account(user) == expected_results + + @pytest.mark.parametrize( + "setting, expected", + [ + # Set none of the social auth settings + ('JUNK_SETTING', False), + # Set the hard coded settings + ('AUTH_LDAP_SERVER_URI', True), + ('SOCIAL_AUTH_SAML_ENABLED_IDPS', True), + ('RADIUS_SERVER', True), + ('TACACSPLUS_HOST', True), + # Set some SOCIAL_SOCIAL_AUTH_OIDC_KEYAUTH_*_KEY settings + ('SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True), + ('SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY', True), + ('SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY', True), + ('SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY', True), + ('SOCIAL_AUTH_GITHUB_KEY', True), + ('SOCIAL_AUTH_GITHUB_ORG_KEY', True), + ('SOCIAL_AUTH_GITHUB_TEAM_KEY', True), + ('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', True), + ('SOCIAL_AUTH_OIDC_KEY', True), + # Try a hypothetical future one + ('SOCIAL_AUTH_GIBBERISH_KEY', True), + # Do a SAML one + ('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', False), + ], + ) + def test_is_remote_auth_enabled(self, setting, expected): + with override_settings(**{setting: True}): + assert is_remote_auth_enabled() == expected + + @pytest.mark.parametrize( + "key_one, key_one_value, key_two, key_two_value, expected", + [ + ('JUNK_SETTING', True, 'JUNK2_SETTING', True, False), + ('AUTH_LDAP_SERVER_URI', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True), + ('JUNK_SETTING', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True), + ('AUTH_LDAP_SERVER_URI', False, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', False, False), + ], + ) + def test_is_remote_auth_enabled_multiple_keys(self, key_one, key_one_value, key_two, key_two_value, expected): + with override_settings(**{key_one: key_one_value}): + with override_settings(**{key_two: key_two_value}): + assert is_remote_auth_enabled() == expected From 39ee4285ce2427ab9278e2249fd11e06c5a88901 Mon Sep 17 00:00:00 2001 From: Lila Yasin Date: Wed, 1 Mar 2023 08:19:00 -0500 Subject: [PATCH 20/25] Working on running spellcheck on everything ahead of merging the shellcheck/code check CI addition. (#13453) --- awx/api/filters.py | 4 ++-- awx/api/generics.py | 2 +- awx/api/renderers.py | 2 +- awx/api/serializers.py | 8 ++++---- .../api/dashboard_inventory_graph_view.md | 4 ++-- awx/api/templates/api/dashboard_jobs_graph_view.md | 2 +- .../api/inventory_inventory_sources_update.md | 2 +- awx/api/views/__init__.py | 12 ++++++------ awx/conf/fields.py | 2 +- awx/conf/views.py | 2 +- awx/locale/en-us/LC_MESSAGES/django.po | 14 +++++++------- awx/locale/es/LC_MESSAGES/django.po | 10 +++++----- 12 files changed, 32 insertions(+), 32 deletions(-) diff --git a/awx/api/filters.py b/awx/api/filters.py index a40006d670..1a6e3eb90e 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -155,7 +155,7 @@ class FieldLookupBackend(BaseFilterBackend): 'search', ) - # A list of fields that we know can be filtered on without the possiblity + # A list of fields that we know can be filtered on without the possibility # of introducing duplicates NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField) @@ -268,7 +268,7 @@ class FieldLookupBackend(BaseFilterBackend): continue # HACK: make `created` available via API for the Django User ORM model - # so it keep compatiblity with other objects which exposes the `created` attr. + # so it keep compatibility with other objects which exposes the `created` attr. if queryset.model._meta.object_name == 'User' and key.startswith('created'): key = key.replace('created', 'date_joined') diff --git a/awx/api/generics.py b/awx/api/generics.py index 361506c605..c86639ab95 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -674,7 +674,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView): location = None created = True - # Retrive the sub object (whether created or by ID). + # Retrieve the sub object (whether created or by ID). sub = get_object_or_400(self.model, pk=sub_id) # Verify we have permission to attach. diff --git a/awx/api/renderers.py b/awx/api/renderers.py index 613d2c9fda..b6078c189c 100644 --- a/awx/api/renderers.py +++ b/awx/api/renderers.py @@ -60,7 +60,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer): delattr(renderer_context['view'], '_request') def get_raw_data_form(self, data, view, method, request): - # Set a flag on the view to indiciate to the view/serializer that we're + # Set a flag on the view to indicate to the view/serializer that we're # creating a raw data form for the browsable API. Store the original # request method to determine how to populate the raw data form. if request.method in {'OPTIONS', 'DELETE'}: diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 7dad4459fa..6e23f3298f 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -537,7 +537,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl # # This logic is to force rendering choice's on an uneditable field. # Note: Consider expanding this rendering for more than just choices fields - # Note: This logic works in conjuction with + # Note: This logic works in conjunction with if hasattr(model_field, 'choices') and model_field.choices: was_editable = model_field.editable model_field.editable = True @@ -3983,7 +3983,7 @@ class JobEventSerializer(BaseSerializer): # Show full stdout for playbook_on_* events. if obj and obj.event.startswith('playbook_on'): return data - # If the view logic says to not trunctate (request was to the detail view or a param was used) + # If the view logic says to not truncate (request was to the detail view or a param was used) if self.context.get('no_truncate', False): return data max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY @@ -4058,7 +4058,7 @@ class AdHocCommandEventSerializer(BaseSerializer): def to_representation(self, obj): data = super(AdHocCommandEventSerializer, self).to_representation(obj) - # If the view logic says to not trunctate (request was to the detail view or a param was used) + # If the view logic says to not truncate (request was to the detail view or a param was used) if self.context.get('no_truncate', False): return data max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY @@ -4751,7 +4751,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria ), ) until = serializers.SerializerMethodField( - help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an emptry string will be returned'), + help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an empty string will be returned'), ) class Meta: diff --git a/awx/api/templates/api/dashboard_inventory_graph_view.md b/awx/api/templates/api/dashboard_inventory_graph_view.md index 7353bf5006..aff5c06325 100644 --- a/awx/api/templates/api/dashboard_inventory_graph_view.md +++ b/awx/api/templates/api/dashboard_inventory_graph_view.md @@ -3,7 +3,7 @@ Make a GET request to this resource to retrieve aggregate statistics about inven Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or failed status of hosts which have run jobs within an Inventory. -## Parmeters and Filtering +## Parameters and Filtering The `period` of the data can be adjusted with: @@ -24,7 +24,7 @@ Data about the number of hosts will be returned in the following format: Each element contains an epoch timestamp represented in seconds and a numerical value indicating the number of hosts that exist at a given moment -Data about failed and successfull hosts by inventory will be given as: +Data about failed and successful hosts by inventory will be given as: { "sources": [ diff --git a/awx/api/templates/api/dashboard_jobs_graph_view.md b/awx/api/templates/api/dashboard_jobs_graph_view.md index baadd4d561..05437c17e1 100644 --- a/awx/api/templates/api/dashboard_jobs_graph_view.md +++ b/awx/api/templates/api/dashboard_jobs_graph_view.md @@ -2,7 +2,7 @@ Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing. -## Parmeters and Filtering +## Parameters and Filtering The `period` of the data can be adjusted with: diff --git a/awx/api/templates/api/inventory_inventory_sources_update.md b/awx/api/templates/api/inventory_inventory_sources_update.md index edf17a27ce..3ca3e0f328 100644 --- a/awx/api/templates/api/inventory_inventory_sources_update.md +++ b/awx/api/templates/api/inventory_inventory_sources_update.md @@ -18,7 +18,7 @@ inventory sources: * `inventory_update`: ID of the inventory update job that was started. (integer, read-only) * `project_update`: ID of the project update job that was started if this inventory source is an SCM source. - (interger, read-only, optional) + (integer, read-only, optional) Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories. diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 609e88e155..4d5f98d2c2 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -152,7 +152,7 @@ def api_exception_handler(exc, context): if 'awx.named_url_rewritten' in req.environ and not str(getattr(exc, 'status_code', 0)).startswith('2'): # if the URL was rewritten, and it's not a 2xx level status code, # revert the request.path to its original value to avoid leaking - # any context about the existance of resources + # any context about the existence of resources req.path = req.environ['awx.named_url_rewritten'] if exc.status_code == 403: exc = NotFound(detail=_('Not found.')) @@ -172,7 +172,7 @@ class DashboardView(APIView): user_inventory = get_user_queryset(request.user, models.Inventory) inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0) user_inventory_external = user_inventory.filter(has_inventory_sources=True) - # if there are *zero* inventories, this aggregrate query will be None, fall back to 0 + # if there are *zero* inventories, this aggregate query will be None, fall back to 0 failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum'] or 0 data['inventories'] = { 'url': reverse('api:inventory_list', request=request), @@ -1667,7 +1667,7 @@ class GroupList(ListCreateAPIView): class EnforceParentRelationshipMixin(object): """ - Useful when you have a self-refering ManyToManyRelationship. + Useful when you have a self-referring ManyToManyRelationship. * Tower uses a shallow (2-deep only) url pattern. For example: When an object hangs off of a parent object you would have the url of the @@ -2415,7 +2415,7 @@ class JobTemplateSurveySpec(GenericAPIView): status=status.HTTP_400_BAD_REQUEST, ) # if it's a multiselect or multiple choice, it must have coices listed - # choices and defualts must come in as strings seperated by /n characters. + # choices and defaults must come in as strings separated by /n characters. if qtype == 'multiselect' or qtype == 'multiplechoice': if 'choices' in survey_item: if isinstance(survey_item['choices'], str): @@ -3430,7 +3430,7 @@ class JobCreateSchedule(RetrieveAPIView): config = obj.launch_config - # Make up a name for the schedule, guarentee that it is unique + # Make up a name for the schedule, guarantee that it is unique name = 'Auto-generated schedule from job {}'.format(obj.id) existing_names = models.Schedule.objects.filter(name__startswith=name).values_list('name', flat=True) if name in existing_names: @@ -3621,7 +3621,7 @@ class JobJobEventsChildrenSummary(APIView): # key is counter of meta events (i.e. verbose), value is uuid of the assigned parent map_meta_counter_nested_uuid = {} - # collapsable tree view in the UI only makes sense for tree-like + # collapsible tree view in the UI only makes sense for tree-like # hierarchy. If ansible is ran with a strategy like free or host_pinned, then # events can be out of sequential order, and no longer follow a tree structure # E1 diff --git a/awx/conf/fields.py b/awx/conf/fields.py index 0073e473fd..fee4573745 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -21,7 +21,7 @@ logger = logging.getLogger('awx.conf.fields') # Use DRF fields to convert/validate settings: # - to_representation(obj) should convert a native Python object to a primitive # serializable type. This primitive type will be what is presented in the API -# and stored in the JSON field in the datbase. +# and stored in the JSON field in the database. # - to_internal_value(data) should convert the primitive type back into the # appropriate Python type to be used in settings. diff --git a/awx/conf/views.py b/awx/conf/views.py index f7190737fa..5231cd86bd 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -180,7 +180,7 @@ class SettingLoggingTest(GenericAPIView): if not port: return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST) else: - # if http/https by this point, domain is reacheable + # if http/https by this point, domain is reachable return Response(status=status.HTTP_202_ACCEPTED) if protocol == 'udp': diff --git a/awx/locale/en-us/LC_MESSAGES/django.po b/awx/locale/en-us/LC_MESSAGES/django.po index 97dd6807a8..8dc82f230a 100644 --- a/awx/locale/en-us/LC_MESSAGES/django.po +++ b/awx/locale/en-us/LC_MESSAGES/django.po @@ -1972,7 +1972,7 @@ msgid "" "HTTP headers and meta keys to search to determine remote host name or IP. " "Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if " "behind a reverse proxy. See the \"Proxy Support\" section of the " -"Adminstrator guide for more details." +"Administrator guide for more details." msgstr "" #: awx/main/conf.py:85 @@ -2457,7 +2457,7 @@ msgid "" msgstr "" #: awx/main/conf.py:631 -msgid "Maximum disk persistance for external log aggregation (in GB)" +msgid "Maximum disk persistence for external log aggregation (in GB)" msgstr "" #: awx/main/conf.py:633 @@ -2548,7 +2548,7 @@ msgid "Enable" msgstr "" #: awx/main/constants.py:27 -msgid "Doas" +msgid "Does" msgstr "" #: awx/main/constants.py:28 @@ -4801,7 +4801,7 @@ msgstr "" #: awx/main/models/workflow.py:251 msgid "" -"An identifier coresponding to the workflow job template node that this node " +"An identifier corresponding to the workflow job template node that this node " "was created from." msgstr "" @@ -5521,7 +5521,7 @@ msgstr "" #: awx/sso/conf.py:606 msgid "" "Extra arguments for Google OAuth2 login. You can restrict it to only allow a " -"single domain to authenticate, even if the user is logged in with multple " +"single domain to authenticate, even if the user is logged in with multiple " "Google accounts. Refer to the documentation for more detail." msgstr "" @@ -5905,7 +5905,7 @@ msgstr "" #: awx/sso/conf.py:1290 msgid "" -"Create a keypair to use as a service provider (SP) and include the " +"Create a key pair to use as a service provider (SP) and include the " "certificate content here." msgstr "" @@ -5915,7 +5915,7 @@ msgstr "" #: awx/sso/conf.py:1302 msgid "" -"Create a keypair to use as a service provider (SP) and include the private " +"Create a key pair to use as a service provider (SP) and include the private " "key content here." msgstr "" diff --git a/awx/locale/es/LC_MESSAGES/django.po b/awx/locale/es/LC_MESSAGES/django.po index ec51ee8347..cd47f1e581 100644 --- a/awx/locale/es/LC_MESSAGES/django.po +++ b/awx/locale/es/LC_MESSAGES/django.po @@ -1971,7 +1971,7 @@ msgid "" "HTTP headers and meta keys to search to determine remote host name or IP. " "Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if " "behind a reverse proxy. See the \"Proxy Support\" section of the " -"Adminstrator guide for more details." +"Administrator guide for more details." msgstr "Los encabezados HTTP y las llaves de activación para buscar y determinar el nombre de host remoto o IP. Añada elementos adicionales a esta lista, como \"HTTP_X_FORWARDED_FOR\", si está detrás de un proxy inverso. Consulte la sección \"Soporte de proxy\" de la guía del adminstrador para obtener más información." #: awx/main/conf.py:85 @@ -4804,7 +4804,7 @@ msgstr "Indica que un trabajo no se creará cuando es sea True. La semántica de #: awx/main/models/workflow.py:251 msgid "" -"An identifier coresponding to the workflow job template node that this node " +"An identifier corresponding to the workflow job template node that this node " "was created from." msgstr "Un identificador que corresponde al nodo de plantilla de tarea del flujo de trabajo a partir del cual se creó este nodo." @@ -5526,7 +5526,7 @@ msgstr "Argumentos adicionales para Google OAuth2" #: awx/sso/conf.py:606 msgid "" "Extra arguments for Google OAuth2 login. You can restrict it to only allow a " -"single domain to authenticate, even if the user is logged in with multple " +"single domain to authenticate, even if the user is logged in with multiple " "Google accounts. Refer to the documentation for more detail." msgstr "Argumentos adicionales para el inicio de sesión en Google OAuth2. Puede limitarlo para permitir la autenticación de un solo dominio, incluso si el usuario ha iniciado sesión con varias cuentas de Google. Consulte la documentación para obtener información detallada." @@ -5910,7 +5910,7 @@ msgstr "Certificado público del proveedor de servicio SAML" #: awx/sso/conf.py:1290 msgid "" -"Create a keypair to use as a service provider (SP) and include the " +"Create a key pair to use as a service provider (SP) and include the " "certificate content here." msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido del certificado aquí." @@ -5920,7 +5920,7 @@ msgstr "Clave privada del proveedor de servicio SAML" #: awx/sso/conf.py:1302 msgid "" -"Create a keypair to use as a service provider (SP) and include the private " +"Create a key pair to use as a service provider (SP) and include the private " "key content here." msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido de la clave privada aquí." From 2fe1ea94bd0d2f708103e640ad640b843baec178 Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Wed, 1 Feb 2023 16:00:28 -0500 Subject: [PATCH 21/25] Conditionally applies the job templates tab to credentials that can be on a JT --- awx/ui/src/screens/Credential/Credential.js | 30 ++++++++++++------- .../src/screens/Credential/Credential.test.js | 20 +++++++++++++ 2 files changed, 40 insertions(+), 10 deletions(-) diff --git a/awx/ui/src/screens/Credential/Credential.js b/awx/ui/src/screens/Credential/Credential.js index ce0509146d..79c766a2d8 100644 --- a/awx/ui/src/screens/Credential/Credential.js +++ b/awx/ui/src/screens/Credential/Credential.js @@ -22,14 +22,19 @@ import { CredentialsAPI } from 'api'; import CredentialDetail from './CredentialDetail'; import CredentialEdit from './CredentialEdit'; -const jobTemplateCredentialTypes = [ - 'machine', - 'cloud', - 'net', - 'ssh', - 'vault', - 'kubernetes', - 'cryptography', +const unacceptableCredentialTypes = [ + 'centrify_vault_kv', + 'aim', + 'conjur', + 'hashivault_kv', + 'hashivault_ssh', + 'azure_kv', + 'thycotic_dsv', + 'thycotic_tss', + 'galaxy_api_token', + 'insights', + 'registry', + 'scm', ]; function Credential({ setBreadcrumb }) { @@ -86,7 +91,10 @@ function Credential({ setBreadcrumb }) { id: 1, }, ]; - if (jobTemplateCredentialTypes.includes(credential?.kind)) { + if ( + !unacceptableCredentialTypes.includes(credential?.kind) && + credential !== null + ) { tabsArray.push({ name: t`Job Templates`, link: `/credentials/${id}/job_templates`, @@ -115,12 +123,14 @@ function Credential({ setBreadcrumb }) { ); } + if (hasContentLoading) { + return ; + } return ( {showCardHeader && } - {hasContentLoading && } {!hasContentLoading && credential && ( ({ describe('', () => { let wrapper; + afterEach(() => { + jest.clearAllMocks(); + + wrapper.unmount(); + }); test('initially renders user-based machine credential successfully', async () => { CredentialsAPI.readDetail.mockResolvedValueOnce({ @@ -61,6 +67,19 @@ describe('', () => { }); }); + test('should not render job template tab', async () => { + CredentialsAPI.readDetail.mockResolvedValueOnce({ + data: { ...mockCyberArkCredential, kind: 'registry' }, + }); + const expectedTabs = ['Back to Credentials', 'Details', 'Access']; + await act(async () => { + wrapper = mountWithContexts( {}} />); + }); + wrapper.find('RoutedTabs li').forEach((tab, index) => { + expect(tab.text()).toEqual(expectedTabs[index]); + }); + }); + test('should show content error when user attempts to navigate to erroneous route', async () => { const history = createMemoryHistory({ initialEntries: ['/credentials/2/foobar'], @@ -85,3 +104,4 @@ describe('', () => { await waitForElement(wrapper, 'ContentError', (el) => el.length === 1); }); }); +describe(' should not show job template tab', () => {}); From 217dc57c24b07717eca6bf178f41c713a969a100 Mon Sep 17 00:00:00 2001 From: Jesse Wattenbarger Date: Tue, 14 Feb 2023 10:06:15 -0500 Subject: [PATCH 22/25] Change docker-clean build rule in Makefile - Use a make foreach macro and rmi instead of grep and xargs. --- Makefile | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 896dcf2c6c..e6da7db8da 100644 --- a/Makefile +++ b/Makefile @@ -547,10 +547,8 @@ docker-compose-build: --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) . docker-clean: - $(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);) - if [ "$(shell docker images | grep awx_devel)" ]; then \ - docker images | grep awx_devel | awk '{print $$3}' | xargs docker rmi --force; \ - fi + -$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);) + -$(foreach image_id,$(shell docker images --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);) docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q) From 2ca0b7bc01a3bf3a8dd32e1d570c992d00091dc1 Mon Sep 17 00:00:00 2001 From: Hao Liu Date: Thu, 2 Mar 2023 21:14:53 -0500 Subject: [PATCH 23/25] Revert "Remove trailing $ from websocket_urlpatterns to work with custom path to fix #12241" This reverts commit 5e28f5dca162ec01d6fd32207404f66fa2276604. --- awx/main/routing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/awx/main/routing.py b/awx/main/routing.py index c96505b7e1..100347f64e 100644 --- a/awx/main/routing.py +++ b/awx/main/routing.py @@ -27,8 +27,8 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter): websocket_urlpatterns = [ - re_path(r'websocket/', consumers.EventConsumer.as_asgi()), - re_path(r'websocket/broadcast/', consumers.BroadcastConsumer.as_asgi()), + re_path(r'websocket/$', consumers.EventConsumer.as_asgi()), + re_path(r'websocket/broadcast/$', consumers.BroadcastConsumer.as_asgi()), ] application = AWXProtocolTypeRouter( From 0a2f1622f60518746dec532603049ccaa4758102 Mon Sep 17 00:00:00 2001 From: Martin Slemr Date: Tue, 7 Mar 2023 18:24:04 +0100 Subject: [PATCH 24/25] Analytics: instance_info.json v1.3 (#13408) --- awx/main/analytics/collectors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index 30bf730784..14d54c7ac2 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -233,7 +233,7 @@ def projects_by_scm_type(since, **kwargs): return counts -@register('instance_info', '1.2', description=_('Cluster topology and capacity')) +@register('instance_info', '1.3', description=_('Cluster topology and capacity')) def instance_info(since, include_hostnames=False, **kwargs): info = {} # Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance From 3945db60eb396aefe6a60aba789648aab0d03ec0 Mon Sep 17 00:00:00 2001 From: Hao Liu Date: Tue, 7 Mar 2023 14:09:34 -0500 Subject: [PATCH 25/25] Automatically build image for feature branch - also will now publish awx image for devel --- .github/workflows/devel_images.yml | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/.github/workflows/devel_images.yml b/.github/workflows/devel_images.yml index dbc1a4937b..6f2bdf74c4 100644 --- a/.github/workflows/devel_images.yml +++ b/.github/workflows/devel_images.yml @@ -7,6 +7,7 @@ on: branches: - devel - release_* + - feature_* jobs: push: if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_') @@ -20,6 +21,12 @@ jobs: - name: Get python version from Makefile run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV + - name: Set lower case owner name + run: | + echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV} + env: + OWNER: '${{ github.repository_owner }}' + - name: Install python ${{ env.py_version }} uses: actions/setup-python@v2 with: @@ -31,15 +38,18 @@ jobs: - name: Pre-pull image to warm build cache run: | - docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || : - docker pull ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/} || : + docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || : + docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || : + docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || : - name: Build images run: | - DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build - DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build + DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build + DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build + DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build - name: Push image run: | - docker push ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} - docker push ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/} + docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} + docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} + docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}