mirror of
https://github.com/ZwareBear/awx.git
synced 2026-05-13 15:58:41 -05:00
Merge branch 'devel' into cyberark-ccp-branding-webserviceid
This commit is contained in:
@@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
name: Feature branch deletion cleanup
|
||||||
|
on:
|
||||||
|
delete:
|
||||||
|
branches:
|
||||||
|
- feature_**
|
||||||
|
jobs:
|
||||||
|
push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Delete API Schema
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
AWS_REGION: 'us-east-1'
|
||||||
|
run: |
|
||||||
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
|
ansible localhost -c local -m aws_s3 \
|
||||||
|
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
@@ -13,21 +13,13 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Write PR body to a file
|
|
||||||
run: |
|
|
||||||
cat >> pr.body << __SOME_RANDOM_PR_EOF__
|
|
||||||
${{ github.event.pull_request.body }}
|
|
||||||
__SOME_RANDOM_PR_EOF__
|
|
||||||
|
|
||||||
- name: Display the received body for troubleshooting
|
|
||||||
run: cat pr.body
|
|
||||||
|
|
||||||
# We want to write these out individually just incase the options were joined on a single line
|
|
||||||
- name: Check for each of the lines
|
- name: Check for each of the lines
|
||||||
|
env:
|
||||||
|
PR_BODY: ${{ github.event.pull_request.body }}
|
||||||
run: |
|
run: |
|
||||||
grep "Bug, Docs Fix or other nominal change" pr.body > Z
|
echo $PR_BODY | grep "Bug, Docs Fix or other nominal change" > Z
|
||||||
grep "New or Enhanced Feature" pr.body > Y
|
echo $PR_BODY | grep "New or Enhanced Feature" > Y
|
||||||
grep "Breaking Change" pr.body > X
|
echo $PR_BODY | grep "Breaking Change" > X
|
||||||
exit 0
|
exit 0
|
||||||
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
- release_**
|
- release_**
|
||||||
|
- feature_**
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
@@ -85,6 +85,7 @@ clean: clean-ui clean-api clean-awxkit clean-dist
|
|||||||
|
|
||||||
clean-api:
|
clean-api:
|
||||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||||
|
rm -rf .tox
|
||||||
find . -type f -regex ".*\.py[co]$$" -delete
|
find . -type f -regex ".*\.py[co]$$" -delete
|
||||||
find . -type d -name "__pycache__" -delete
|
find . -type d -name "__pycache__" -delete
|
||||||
rm -f awx/awx_test.sqlite3*
|
rm -f awx/awx_test.sqlite3*
|
||||||
@@ -181,7 +182,7 @@ collectstatic:
|
|||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||||
|
|
||||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||||
|
|
||||||
@@ -377,6 +378,8 @@ clean-ui:
|
|||||||
rm -rf awx/ui/build
|
rm -rf awx/ui/build
|
||||||
rm -rf awx/ui/src/locales/_build
|
rm -rf awx/ui/src/locales/_build
|
||||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||||
|
# the collectstatic command doesn't like it if this dir doesn't exist.
|
||||||
|
mkdir -p awx/ui/build/static
|
||||||
|
|
||||||
awx/ui/node_modules:
|
awx/ui/node_modules:
|
||||||
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
||||||
@@ -386,16 +389,14 @@ $(UI_BUILD_FLAG_FILE):
|
|||||||
$(PYTHON) tools/scripts/compilemessages.py
|
$(PYTHON) tools/scripts/compilemessages.py
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||||
mkdir -p awx/public/static/css
|
mkdir -p /var/lib/awx/public/static/css
|
||||||
mkdir -p awx/public/static/js
|
mkdir -p /var/lib/awx/public/static/js
|
||||||
mkdir -p awx/public/static/media
|
mkdir -p /var/lib/awx/public/static/media
|
||||||
cp -r awx/ui/build/static/css/* awx/public/static/css
|
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||||
cp -r awx/ui/build/static/js/* awx/public/static/js
|
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||||
cp -r awx/ui/build/static/media/* awx/public/static/media
|
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel: awx/ui/node_modules
|
ui-devel: awx/ui/node_modules
|
||||||
@@ -453,6 +454,7 @@ COMPOSE_OPTS ?=
|
|||||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||||
EXECUTION_NODE_COUNT ?= 2
|
EXECUTION_NODE_COUNT ?= 2
|
||||||
MINIKUBE_CONTAINER_GROUP ?= false
|
MINIKUBE_CONTAINER_GROUP ?= false
|
||||||
|
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||||
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||||
|
|
||||||
ifneq ($(ADMIN_PASSWORD),)
|
ifneq ($(ADMIN_PASSWORD),)
|
||||||
@@ -461,7 +463,7 @@ endif
|
|||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
@@ -635,4 +637,4 @@ help/generate:
|
|||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||||
@printf "\n"
|
@printf "\n"
|
||||||
|
|||||||
+14
-12
@@ -2221,6 +2221,15 @@ class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
fields = ('can_update',)
|
fields = ('can_update',)
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
project = self.instance.source_project
|
||||||
|
if project:
|
||||||
|
failed_reason = project.get_reason_if_failed()
|
||||||
|
if failed_reason:
|
||||||
|
raise serializers.ValidationError(failed_reason)
|
||||||
|
|
||||||
|
return super(InventorySourceUpdateSerializer, self).validate(attrs)
|
||||||
|
|
||||||
|
|
||||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||||
|
|
||||||
@@ -4272,17 +4281,10 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
# Basic validation - cannot run a playbook without a playbook
|
# Basic validation - cannot run a playbook without a playbook
|
||||||
if not template.project:
|
if not template.project:
|
||||||
errors['project'] = _("A project is required to run a job.")
|
errors['project'] = _("A project is required to run a job.")
|
||||||
elif template.project.status in ('error', 'failed'):
|
else:
|
||||||
errors['playbook'] = _("Missing a revision to run due to failed project update.")
|
failure_reason = template.project.get_reason_if_failed()
|
||||||
|
if failure_reason:
|
||||||
latest_update = template.project.project_updates.last()
|
errors['playbook'] = failure_reason
|
||||||
if latest_update is not None and latest_update.failed:
|
|
||||||
failed_validation_tasks = latest_update.project_update_events.filter(
|
|
||||||
event='runner_on_failed',
|
|
||||||
play="Perform project signature/checksum verification",
|
|
||||||
)
|
|
||||||
if failed_validation_tasks:
|
|
||||||
errors['playbook'] = _("Last project update failed due to signature validation failure.")
|
|
||||||
|
|
||||||
# cannot run a playbook without an inventory
|
# cannot run a playbook without an inventory
|
||||||
if template.inventory and template.inventory.pending_deletion is True:
|
if template.inventory and template.inventory.pending_deletion is True:
|
||||||
@@ -4952,7 +4954,7 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||||
if obj.node_type != 'hop':
|
if obj.node_type == 'execution':
|
||||||
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
Launch a Job Template:
|
Launch a Job Template:
|
||||||
|
{% ifmeth GET %}
|
||||||
Make a GET request to this resource to determine if the job_template can be
|
Make a GET request to this resource to determine if the job_template can be
|
||||||
launched and whether any passwords are required to launch the job_template.
|
launched and whether any passwords are required to launch the job_template.
|
||||||
The response will include the following fields:
|
The response will include the following fields:
|
||||||
@@ -29,8 +29,8 @@ The response will include the following fields:
|
|||||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||||
associated with the job template. If not then one should be supplied when
|
associated with the job template. If not then one should be supplied when
|
||||||
launching the job (boolean, read-only)
|
launching the job (boolean, read-only)
|
||||||
|
{% endifmeth %}
|
||||||
Make a POST request to this resource to launch the job_template. If any
|
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
|
||||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||||
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
|
|||||||
launched, a 405 status code will be returned. If the provided credential or
|
launched, a 405 status code will be returned. If the provided credential or
|
||||||
inventory are not allowed to be used by the user, then a 403 status code will
|
inventory are not allowed to be used by the user, then a 403 status code will
|
||||||
be returned.
|
be returned.
|
||||||
|
{% endifmeth %}
|
||||||
@@ -392,8 +392,8 @@ class InstanceHealthCheck(GenericAPIView):
|
|||||||
permission_classes = (IsSystemAdminOrAuditor,)
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
return super().get_queryset().filter(node_type='execution')
|
||||||
# FIXME: For now, we don't have a good way of checking the health of a hop node.
|
# FIXME: For now, we don't have a good way of checking the health of a hop node.
|
||||||
return super().get_queryset().exclude(node_type='hop')
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
def get(self, request, *args, **kwargs):
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
@@ -413,9 +413,10 @@ class InstanceHealthCheck(GenericAPIView):
|
|||||||
|
|
||||||
execution_node_health_check.apply_async([obj.hostname])
|
execution_node_health_check.apply_async([obj.hostname])
|
||||||
else:
|
else:
|
||||||
from awx.main.tasks.system import cluster_node_health_check
|
return Response(
|
||||||
|
{"error": f"Cannot run a health check on instances of type {obj.node_type}. Health checks can only be run on execution nodes."},
|
||||||
cluster_node_health_check.apply_async([obj.hostname], queue=obj.hostname)
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
return Response({'msg': f"Health check is running for {obj.hostname}."}, status=status.HTTP_200_OK)
|
return Response({'msg': f"Health check is running for {obj.hostname}."}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@@ -2220,6 +2221,8 @@ class InventorySourceUpdateView(RetrieveAPIView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
|
serializer = self.get_serializer(instance=obj, data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
if obj.can_update:
|
if obj.can_update:
|
||||||
update = obj.update()
|
update = obj.update()
|
||||||
if not update:
|
if not update:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,9 @@ import logging
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
|
||||||
from awx.main.consumers import emit_channel_notification
|
from awx.main.consumers import emit_channel_notification
|
||||||
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
root_key = 'awx_metrics'
|
root_key = 'awx_metrics'
|
||||||
logger = logging.getLogger('awx.main.analytics')
|
logger = logging.getLogger('awx.main.analytics')
|
||||||
@@ -163,7 +165,7 @@ class Metrics:
|
|||||||
Instance = apps.get_model('main', 'Instance')
|
Instance = apps.get_model('main', 'Instance')
|
||||||
if instance_name:
|
if instance_name:
|
||||||
self.instance_name = instance_name
|
self.instance_name = instance_name
|
||||||
elif settings.IS_TESTING():
|
elif is_testing():
|
||||||
self.instance_name = "awx_testing"
|
self.instance_name = "awx_testing"
|
||||||
else:
|
else:
|
||||||
self.instance_name = Instance.objects.my_hostname()
|
self.instance_name = Instance.objects.my_hostname()
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||||
|
|
||||||
import base64
|
|
||||||
from urllib.parse import urljoin, quote
|
from urllib.parse import urljoin, quote
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
|||||||
cacert = kwargs.get('cacert', None)
|
cacert = kwargs.get('cacert', None)
|
||||||
|
|
||||||
auth_kwargs = {
|
auth_kwargs = {
|
||||||
'headers': {'Content-Type': 'text/plain'},
|
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||||
'data': api_key,
|
'data': api_key,
|
||||||
'allow_redirects': False,
|
'allow_redirects': False,
|
||||||
}
|
}
|
||||||
@@ -69,9 +68,9 @@ def conjur_backend(**kwargs):
|
|||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||||
auth_kwargs['verify'] = cert
|
auth_kwargs['verify'] = cert
|
||||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
token = base64.b64encode(resp.content).decode('utf-8')
|
token = resp.content.decode('utf-8')
|
||||||
|
|
||||||
lookup_kwargs = {
|
lookup_kwargs = {
|
||||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||||
@@ -79,9 +78,10 @@ def conjur_backend(**kwargs):
|
|||||||
}
|
}
|
||||||
|
|
||||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
path = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||||
if version:
|
if version:
|
||||||
path = '?'.join([path, version])
|
ver = "version={}".format(version)
|
||||||
|
path = '?'.join([path, ver])
|
||||||
|
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
lookup_kwargs['verify'] = cert
|
lookup_kwargs['verify'] = cert
|
||||||
@@ -90,4 +90,4 @@ def conjur_backend(**kwargs):
|
|||||||
return resp.text
|
return resp.text
|
||||||
|
|
||||||
|
|
||||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django_guid import get_guid
|
from django_guid import get_guid
|
||||||
|
|
||||||
from . import pg_bus_conn
|
from . import pg_bus_conn
|
||||||
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.dispatch')
|
logger = logging.getLogger('awx.main.dispatch')
|
||||||
|
|
||||||
@@ -93,7 +92,7 @@ class task:
|
|||||||
obj.update(**kw)
|
obj.update(**kw)
|
||||||
if callable(queue):
|
if callable(queue):
|
||||||
queue = queue()
|
queue = queue()
|
||||||
if not settings.IS_TESTING(sys.argv):
|
if not is_testing():
|
||||||
with pg_bus_conn() as conn:
|
with pg_bus_conn() as conn:
|
||||||
conn.notify(queue, json.dumps(obj))
|
conn.notify(queue, json.dumps(obj))
|
||||||
return (obj, queue)
|
return (obj, queue)
|
||||||
|
|||||||
@@ -233,11 +233,12 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
|||||||
if not isinstance(vargs.get('grace_period'), int):
|
if not isinstance(vargs.get('grace_period'), int):
|
||||||
vargs['grace_period'] = 60 # grace period of 60 minutes, need to set because CLI default will not take effect
|
vargs['grace_period'] = 60 # grace period of 60 minutes, need to set because CLI default will not take effect
|
||||||
if 'exclude_strings' not in vargs and vargs.get('file_pattern'):
|
if 'exclude_strings' not in vargs and vargs.get('file_pattern'):
|
||||||
active_pks = list(
|
active_job_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
|
||||||
UnifiedJob.objects.filter(
|
if self.node_type == 'execution':
|
||||||
(models.Q(execution_node=self.hostname) | models.Q(controller_node=self.hostname)) & models.Q(status__in=('running', 'waiting'))
|
active_job_qs = active_job_qs.filter(execution_node=self.hostname)
|
||||||
).values_list('pk', flat=True)
|
else:
|
||||||
)
|
active_job_qs = active_job_qs.filter(controller_node=self.hostname)
|
||||||
|
active_pks = list(active_job_qs.values_list('pk', flat=True))
|
||||||
if active_pks:
|
if active_pks:
|
||||||
vargs['exclude_strings'] = [JOB_FOLDER_PREFIX % job_id for job_id in active_pks]
|
vargs['exclude_strings'] = [JOB_FOLDER_PREFIX % job_id for job_id in active_pks]
|
||||||
if 'remove_images' in vargs or 'image_prune' in vargs:
|
if 'remove_images' in vargs or 'image_prune' in vargs:
|
||||||
|
|||||||
@@ -247,6 +247,19 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
|||||||
return (number, step)
|
return (number, step)
|
||||||
|
|
||||||
def get_sliced_hosts(self, host_queryset, slice_number, slice_count):
|
def get_sliced_hosts(self, host_queryset, slice_number, slice_count):
|
||||||
|
"""
|
||||||
|
Returns a slice of Hosts given a slice number and total slice count, or
|
||||||
|
the original queryset if slicing is not requested.
|
||||||
|
|
||||||
|
NOTE: If slicing is performed, this will return a List[Host] with the
|
||||||
|
resulting slice. If slicing is not performed it will return the
|
||||||
|
original queryset (not evaluating it or forcing it to a list). This
|
||||||
|
puts the burden on the caller to check the resulting type. This is
|
||||||
|
non-ideal because it's easy to get wrong, but I think the only way
|
||||||
|
around it is to force the queryset which has memory implications for
|
||||||
|
large inventories.
|
||||||
|
"""
|
||||||
|
|
||||||
if slice_count > 1 and slice_number > 0:
|
if slice_count > 1 and slice_number > 0:
|
||||||
offset = slice_number - 1
|
offset = slice_number - 1
|
||||||
host_queryset = host_queryset[offset::slice_count]
|
host_queryset = host_queryset[offset::slice_count]
|
||||||
|
|||||||
+13
-4
@@ -15,6 +15,7 @@ from urllib.parse import urljoin
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
|
||||||
# from django.core.cache import cache
|
# from django.core.cache import cache
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
@@ -844,22 +845,30 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
|||||||
def get_notification_friendly_name(self):
|
def get_notification_friendly_name(self):
|
||||||
return "Job"
|
return "Job"
|
||||||
|
|
||||||
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id']):
|
def _get_inventory_hosts(self, only=('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id'), **filters):
|
||||||
|
"""Return value is an iterable for the relevant hosts for this job"""
|
||||||
if not self.inventory:
|
if not self.inventory:
|
||||||
return []
|
return []
|
||||||
host_queryset = self.inventory.hosts.only(*only)
|
host_queryset = self.inventory.hosts.only(*only)
|
||||||
return self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
|
if filters:
|
||||||
|
host_queryset = host_queryset.filter(**filters)
|
||||||
|
host_queryset = self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
|
||||||
|
if isinstance(host_queryset, QuerySet):
|
||||||
|
return host_queryset.iterator()
|
||||||
|
return host_queryset
|
||||||
|
|
||||||
def start_job_fact_cache(self, destination, modification_times, timeout=None):
|
def start_job_fact_cache(self, destination, modification_times, timeout=None):
|
||||||
self.log_lifecycle("start_job_fact_cache")
|
self.log_lifecycle("start_job_fact_cache")
|
||||||
os.makedirs(destination, mode=0o700)
|
os.makedirs(destination, mode=0o700)
|
||||||
hosts = self._get_inventory_hosts()
|
|
||||||
if timeout is None:
|
if timeout is None:
|
||||||
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
|
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
|
||||||
if timeout > 0:
|
if timeout > 0:
|
||||||
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
|
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
|
||||||
timeout = now() - datetime.timedelta(seconds=timeout)
|
timeout = now() - datetime.timedelta(seconds=timeout)
|
||||||
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
|
hosts = self._get_inventory_hosts(ansible_facts_modified__gte=timeout)
|
||||||
|
else:
|
||||||
|
hosts = self._get_inventory_hosts()
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||||
if not os.path.realpath(filepath).startswith(destination):
|
if not os.path.realpath(filepath).startswith(destination):
|
||||||
|
|||||||
@@ -471,6 +471,29 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
|||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
|
def get_reason_if_failed(self):
|
||||||
|
"""
|
||||||
|
If the project is in a failed or errored state, return a human-readable
|
||||||
|
error message explaining why. Otherwise return None.
|
||||||
|
|
||||||
|
This is used during validation in the serializer and also by
|
||||||
|
RunProjectUpdate/RunInventoryUpdate.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.status not in ('error', 'failed'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
latest_update = self.project_updates.last()
|
||||||
|
if latest_update is not None and latest_update.failed:
|
||||||
|
failed_validation_tasks = latest_update.project_update_events.filter(
|
||||||
|
event='runner_on_failed',
|
||||||
|
play="Perform project signature/checksum verification",
|
||||||
|
)
|
||||||
|
if failed_validation_tasks:
|
||||||
|
return _("Last project update failed due to signature validation failure.")
|
||||||
|
|
||||||
|
return _("Missing a revision to run due to failed project update.")
|
||||||
|
|
||||||
'''
|
'''
|
||||||
RelatedJobsMixin
|
RelatedJobsMixin
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -5,9 +5,6 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from django.utils.encoding import smart_str
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
from awx.main.utils import get_awx_http_client_headers
|
from awx.main.utils import get_awx_http_client_headers
|
||||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
@@ -17,6 +14,8 @@ logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
|||||||
|
|
||||||
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
|
MAX_RETRIES = 5
|
||||||
|
|
||||||
init_parameters = {
|
init_parameters = {
|
||||||
"url": {"label": "Target URL", "type": "string"},
|
"url": {"label": "Target URL", "type": "string"},
|
||||||
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
||||||
@@ -64,20 +63,67 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
if self.http_method.lower() not in ['put', 'post']:
|
if self.http_method.lower() not in ['put', 'post']:
|
||||||
raise ValueError("HTTP method must be either 'POST' or 'PUT'.")
|
raise ValueError("HTTP method must be either 'POST' or 'PUT'.")
|
||||||
chosen_method = getattr(requests, self.http_method.lower(), None)
|
chosen_method = getattr(requests, self.http_method.lower(), None)
|
||||||
|
|
||||||
for m in messages:
|
for m in messages:
|
||||||
|
|
||||||
auth = None
|
auth = None
|
||||||
if self.username or self.password:
|
if self.username or self.password:
|
||||||
auth = (self.username, self.password)
|
auth = (self.username, self.password)
|
||||||
r = chosen_method(
|
|
||||||
"{}".format(m.recipients()[0]),
|
# the constructor for EmailMessage - https://docs.djangoproject.com/en/4.1/_modules/django/core/mail/message will turn an empty dictionary to an empty string
|
||||||
auth=auth,
|
# sometimes an empty dict is intentional and we added this conditional to enforce that
|
||||||
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
if not m.body:
|
||||||
headers=dict(list(get_awx_http_client_headers().items()) + list((self.headers or {}).items())),
|
m.body = {}
|
||||||
verify=(not self.disable_ssl_verification),
|
|
||||||
)
|
url = str(m.recipients()[0])
|
||||||
if r.status_code >= 400:
|
data = json.dumps(m.body, ensure_ascii=False).encode('utf-8')
|
||||||
logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
|
headers = {**(get_awx_http_client_headers()), **(self.headers or {})}
|
||||||
|
|
||||||
|
err = None
|
||||||
|
|
||||||
|
for retries in range(self.MAX_RETRIES):
|
||||||
|
|
||||||
|
# Sometimes we hit redirect URLs. We must account for this. We still extract the redirect URL from the response headers and try again. Max retires == 5
|
||||||
|
resp = chosen_method(
|
||||||
|
url=url,
|
||||||
|
auth=auth,
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
verify=(not self.disable_ssl_verification),
|
||||||
|
allow_redirects=False, # override default behaviour for redirects
|
||||||
|
)
|
||||||
|
|
||||||
|
# either success or error reached if this conditional fires
|
||||||
|
if resp.status_code not in [301, 307]:
|
||||||
|
break
|
||||||
|
|
||||||
|
# we've hit a redirect. extract the redirect URL out of the first response header and try again
|
||||||
|
logger.warning(
|
||||||
|
f"Received a {resp.status_code} from {url}, trying to reach redirect url {resp.headers.get('Location', None)}; attempt #{retries+1}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# take the first redirect URL in the response header and try that
|
||||||
|
url = resp.headers.get("Location", None)
|
||||||
|
|
||||||
|
if url is None:
|
||||||
|
err = f"Webhook notification received redirect to a blank URL from {url}. Response headers={resp.headers}"
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# no break condition in the loop encountered; therefore we have hit the maximum number of retries
|
||||||
|
err = f"Webhook notification max number of retries [{self.MAX_RETRIES}] exceeded. Failed to send webhook notification to {url}"
|
||||||
|
|
||||||
|
if resp.status_code >= 400:
|
||||||
|
err = f"Error sending webhook notification: {resp.status_code}"
|
||||||
|
|
||||||
|
# log error message
|
||||||
|
if err:
|
||||||
|
logger.error(err)
|
||||||
if not self.fail_silently:
|
if not self.fail_silently:
|
||||||
raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
|
raise Exception(err)
|
||||||
sent_messages += 1
|
|
||||||
|
# no errors were encountered therefore we successfully sent off the notification webhook
|
||||||
|
if resp.status_code in range(200, 299):
|
||||||
|
logger.debug(f"Notification webhook successfully sent to {url}. Received {resp.status_code}")
|
||||||
|
sent_messages += 1
|
||||||
|
|
||||||
return sent_messages
|
return sent_messages
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
|
|
||||||
from django.db.models.signals import pre_save, post_save, pre_delete, m2m_changed
|
from django.db.models.signals import pre_save, post_save, pre_delete, m2m_changed
|
||||||
|
|
||||||
|
from taggit.managers import TaggableManager
|
||||||
|
|
||||||
|
|
||||||
class ActivityStreamRegistrar(object):
|
class ActivityStreamRegistrar(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -19,6 +21,8 @@ class ActivityStreamRegistrar(object):
|
|||||||
pre_delete.connect(activity_stream_delete, sender=model, dispatch_uid=str(self.__class__) + str(model) + "_delete")
|
pre_delete.connect(activity_stream_delete, sender=model, dispatch_uid=str(self.__class__) + str(model) + "_delete")
|
||||||
|
|
||||||
for m2mfield in model._meta.many_to_many:
|
for m2mfield in model._meta.many_to_many:
|
||||||
|
if isinstance(m2mfield, TaggableManager):
|
||||||
|
continue # Special case for taggit app
|
||||||
try:
|
try:
|
||||||
m2m_attr = getattr(model, m2mfield.name)
|
m2m_attr = getattr(model, m2mfield.name)
|
||||||
m2m_changed.connect(
|
m2m_changed.connect(
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ from awx.main.utils import (
|
|||||||
ScheduleTaskManager,
|
ScheduleTaskManager,
|
||||||
ScheduleWorkflowManager,
|
ScheduleWorkflowManager,
|
||||||
)
|
)
|
||||||
from awx.main.utils.common import task_manager_bulk_reschedule
|
from awx.main.utils.common import task_manager_bulk_reschedule, is_testing
|
||||||
from awx.main.signals import disable_activity_stream
|
from awx.main.signals import disable_activity_stream
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES
|
||||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||||
@@ -97,7 +97,7 @@ class TaskBase:
|
|||||||
self.all_tasks = [t for t in qs]
|
self.all_tasks = [t for t in qs]
|
||||||
|
|
||||||
def record_aggregate_metrics(self, *args):
|
def record_aggregate_metrics(self, *args):
|
||||||
if not settings.IS_TESTING():
|
if not is_testing():
|
||||||
# increment task_manager_schedule_calls regardless if the other
|
# increment task_manager_schedule_calls regardless if the other
|
||||||
# metrics are recorded
|
# metrics are recorded
|
||||||
s_metrics.Metrics(auto_pipe_execute=True).inc(f"{self.prefix}__schedule_calls", 1)
|
s_metrics.Metrics(auto_pipe_execute=True).inc(f"{self.prefix}__schedule_calls", 1)
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ import json
|
|||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
from collections import deque
|
from collections import deque
|
||||||
import os
|
|
||||||
import stat
|
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -206,21 +204,6 @@ class RunnerCallback:
|
|||||||
self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env)
|
self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env)
|
||||||
# We opened a connection just for that save, close it here now
|
# We opened a connection just for that save, close it here now
|
||||||
connections.close_all()
|
connections.close_all()
|
||||||
elif status_data['status'] == 'failed':
|
|
||||||
# For encrypted ssh_key_data, ansible-runner worker will open and write the
|
|
||||||
# ssh_key_data to a named pipe. Then, once the podman container starts, ssh-agent will
|
|
||||||
# read from this named pipe so that the key can be used in ansible-playbook.
|
|
||||||
# Once the podman container exits, the named pipe is deleted.
|
|
||||||
# However, if the podman container fails to start in the first place, e.g. the image
|
|
||||||
# name is incorrect, then this pipe is not cleaned up. Eventually ansible-runner
|
|
||||||
# processor will attempt to write artifacts to the private data dir via unstream_dir, requiring
|
|
||||||
# that it open this named pipe. This leads to a hang. Thus, before any artifacts
|
|
||||||
# are written by the processor, it's important to remove this ssh_key_data pipe.
|
|
||||||
private_data_dir = self.instance.job_env.get('AWX_PRIVATE_DATA_DIR', None)
|
|
||||||
if private_data_dir:
|
|
||||||
key_data_file = os.path.join(private_data_dir, 'artifacts', str(self.instance.id), 'ssh_key_data')
|
|
||||||
if os.path.exists(key_data_file) and stat.S_ISFIFO(os.stat(key_data_file).st_mode):
|
|
||||||
os.remove(key_data_file)
|
|
||||||
elif status_data['status'] == 'error':
|
elif status_data['status'] == 'error':
|
||||||
result_traceback = status_data.get('result_traceback', None)
|
result_traceback = status_data.get('result_traceback', None)
|
||||||
if result_traceback:
|
if result_traceback:
|
||||||
|
|||||||
@@ -767,6 +767,10 @@ class SourceControlMixin(BaseTask):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
original_branch = None
|
original_branch = None
|
||||||
|
failed_reason = project.get_reason_if_failed()
|
||||||
|
if failed_reason:
|
||||||
|
self.update_model(self.instance.pk, status='failed', job_explanation=failed_reason)
|
||||||
|
raise RuntimeError(failed_reason)
|
||||||
project_path = project.get_project_path(check_if_exists=False)
|
project_path = project.get_project_path(check_if_exists=False)
|
||||||
if project.scm_type == 'git' and (scm_branch and scm_branch != project.scm_branch):
|
if project.scm_type == 'git' and (scm_branch and scm_branch != project.scm_branch):
|
||||||
if os.path.exists(project_path):
|
if os.path.exists(project_path):
|
||||||
@@ -1056,10 +1060,6 @@ class RunJob(SourceControlMixin, BaseTask):
|
|||||||
error = _('Job could not start because no Execution Environment could be found.')
|
error = _('Job could not start because no Execution Environment could be found.')
|
||||||
self.update_model(job.pk, status='error', job_explanation=error)
|
self.update_model(job.pk, status='error', job_explanation=error)
|
||||||
raise RuntimeError(error)
|
raise RuntimeError(error)
|
||||||
elif job.project.status in ('error', 'failed'):
|
|
||||||
msg = _('The project revision for this job template is unknown due to a failed update.')
|
|
||||||
job = self.update_model(job.pk, status='failed', job_explanation=msg)
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
if job.inventory.kind == 'smart':
|
if job.inventory.kind == 'smart':
|
||||||
# cache smart inventory memberships so that the host_filter query is not
|
# cache smart inventory memberships so that the host_filter query is not
|
||||||
|
|||||||
@@ -208,7 +208,10 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
|||||||
if state_name.lower() == 'failed':
|
if state_name.lower() == 'failed':
|
||||||
work_detail = status.get('Detail', '')
|
work_detail = status.get('Detail', '')
|
||||||
if work_detail:
|
if work_detail:
|
||||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}')
|
if stdout:
|
||||||
|
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}\nstdout:\n{stdout}')
|
||||||
|
else:
|
||||||
|
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}')
|
||||||
else:
|
else:
|
||||||
raise RemoteJobError(f'Unknown ansible-runner error on node {node}, stdout:\n{stdout}')
|
raise RemoteJobError(f'Unknown ansible-runner error on node {node}, stdout:\n{stdout}')
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from awx.main.models.ha import Instance
|
|||||||
from django.test.utils import override_settings
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
|
|
||||||
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import pytest
|
import pytest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate
|
from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate, Job
|
||||||
from awx.main.models.activity_stream import ActivityStream
|
from awx.main.models.activity_stream import ActivityStream
|
||||||
from awx.main.models.ha import Instance, InstanceGroup
|
from awx.main.models.ha import Instance, InstanceGroup
|
||||||
from awx.main.tasks.system import apply_cluster_membership_policies
|
from awx.main.tasks.system import apply_cluster_membership_policies
|
||||||
@@ -15,6 +15,24 @@ def test_default_tower_instance_group(default_instance_group, job_factory):
|
|||||||
assert default_instance_group in job_factory().preferred_instance_groups
|
assert default_instance_group in job_factory().preferred_instance_groups
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize('node_type', ('execution', 'control'))
|
||||||
|
@pytest.mark.parametrize('active', (True, False))
|
||||||
|
def test_get_cleanup_task_kwargs_active_jobs(node_type, active):
|
||||||
|
instance = Instance.objects.create(hostname='foobar', node_type=node_type)
|
||||||
|
job_kwargs = dict()
|
||||||
|
job_kwargs['controller_node' if node_type == 'control' else 'execution_node'] = instance.hostname
|
||||||
|
job_kwargs['status'] = 'running' if active else 'successful'
|
||||||
|
|
||||||
|
job = Job.objects.create(**job_kwargs)
|
||||||
|
kwargs = instance.get_cleanup_task_kwargs()
|
||||||
|
|
||||||
|
if active:
|
||||||
|
assert kwargs['exclude_strings'] == [f'awx_{job.pk}_']
|
||||||
|
else:
|
||||||
|
assert 'exclude_strings' not in kwargs
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestPolicyTaskScheduling:
|
class TestPolicyTaskScheduling:
|
||||||
"""Tests make assertions about when the policy task gets scheduled"""
|
"""Tests make assertions about when the policy task gets scheduled"""
|
||||||
|
|||||||
@@ -75,6 +75,7 @@ def test_encrypted_subfields(get, post, user, organization):
|
|||||||
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
|
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
|
||||||
response = get(url, u)
|
response = get(url, u)
|
||||||
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
|
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
|
||||||
|
|
||||||
with mock.patch.object(notification_template_actual.notification_class, "send_messages", assert_send):
|
with mock.patch.object(notification_template_actual.notification_class, "send_messages", assert_send):
|
||||||
notification_template_actual.send("Test", {'body': "Test"})
|
notification_template_actual.send("Test", {'body': "Test"})
|
||||||
|
|
||||||
@@ -175,3 +176,46 @@ def test_custom_environment_injection(post, user, organization):
|
|||||||
|
|
||||||
fake_send.side_effect = _send_side_effect
|
fake_send.side_effect = _send_side_effect
|
||||||
template.send('subject', 'message')
|
template.send('subject', 'message')
|
||||||
|
|
||||||
|
|
||||||
|
def mock_post(*args, **kwargs):
|
||||||
|
class MockGoodResponse:
|
||||||
|
def __init__(self):
|
||||||
|
self.status_code = 200
|
||||||
|
|
||||||
|
class MockRedirectResponse:
|
||||||
|
def __init__(self):
|
||||||
|
self.status_code = 301
|
||||||
|
self.headers = {"Location": "http://goodendpoint"}
|
||||||
|
|
||||||
|
if kwargs['url'] == "http://goodendpoint":
|
||||||
|
return MockGoodResponse()
|
||||||
|
else:
|
||||||
|
return MockRedirectResponse()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@mock.patch('requests.post', side_effect=mock_post)
|
||||||
|
def test_webhook_notification_pointed_to_a_redirect_launch_endpoint(post, admin, organization):
|
||||||
|
|
||||||
|
n1 = NotificationTemplate.objects.create(
|
||||||
|
name="test-webhook",
|
||||||
|
description="test webhook",
|
||||||
|
organization=organization,
|
||||||
|
notification_type="webhook",
|
||||||
|
notification_configuration=dict(
|
||||||
|
url="http://some.fake.url",
|
||||||
|
disable_ssl_verification=True,
|
||||||
|
http_method="POST",
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
username=admin.username,
|
||||||
|
password=admin.password,
|
||||||
|
),
|
||||||
|
messages={
|
||||||
|
"success": {"message": "", "body": "{}"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert n1.send("", n1.messages.get("success").get("body")) == 1
|
||||||
|
|||||||
@@ -27,11 +27,12 @@ def test_send_messages_as_POST():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.post.assert_called_once_with(
|
requests_mock.post.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=None,
|
auth=None,
|
||||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||||
verify=True,
|
verify=True,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|
||||||
@@ -57,11 +58,12 @@ def test_send_messages_as_PUT():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.put.assert_called_once_with(
|
requests_mock.put.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=None,
|
auth=None,
|
||||||
data=json.dumps({'text': 'test body 2'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body 2'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||||
verify=True,
|
verify=True,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|
||||||
@@ -87,11 +89,12 @@ def test_send_messages_with_username():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.post.assert_called_once_with(
|
requests_mock.post.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=('userstring', None),
|
auth=('userstring', None),
|
||||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||||
verify=True,
|
verify=True,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|
||||||
@@ -117,11 +120,12 @@ def test_send_messages_with_password():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.post.assert_called_once_with(
|
requests_mock.post.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=(None, 'passwordstring'),
|
auth=(None, 'passwordstring'),
|
||||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||||
verify=True,
|
verify=True,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|
||||||
@@ -147,11 +151,12 @@ def test_send_messages_with_username_and_password():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.post.assert_called_once_with(
|
requests_mock.post.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=('userstring', 'passwordstring'),
|
auth=('userstring', 'passwordstring'),
|
||||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||||
verify=True,
|
verify=True,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|
||||||
@@ -177,11 +182,12 @@ def test_send_messages_with_no_verify_ssl():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.post.assert_called_once_with(
|
requests_mock.post.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=None,
|
auth=None,
|
||||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||||
verify=False,
|
verify=False,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|
||||||
@@ -207,7 +213,7 @@ def test_send_messages_with_additional_headers():
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
requests_mock.post.assert_called_once_with(
|
requests_mock.post.assert_called_once_with(
|
||||||
'http://example.com',
|
url='http://example.com',
|
||||||
auth=None,
|
auth=None,
|
||||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||||
headers={
|
headers={
|
||||||
@@ -217,5 +223,6 @@ def test_send_messages_with_additional_headers():
|
|||||||
'X-Test-Header2': 'test-content-2',
|
'X-Test-Header2': 'test-content-2',
|
||||||
},
|
},
|
||||||
verify=True,
|
verify=True,
|
||||||
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert sent_messages == 1
|
assert sent_messages == 1
|
||||||
|
|||||||
@@ -11,11 +11,12 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
import re
|
import re
|
||||||
import stat
|
import stat
|
||||||
|
import sys
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import threading
|
import threading
|
||||||
import contextlib
|
import contextlib
|
||||||
import tempfile
|
import tempfile
|
||||||
from functools import reduce, wraps
|
import functools
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||||
@@ -73,6 +74,7 @@ __all__ = [
|
|||||||
'NullablePromptPseudoField',
|
'NullablePromptPseudoField',
|
||||||
'model_instance_diff',
|
'model_instance_diff',
|
||||||
'parse_yaml_or_json',
|
'parse_yaml_or_json',
|
||||||
|
'is_testing',
|
||||||
'RequireDebugTrueOrTest',
|
'RequireDebugTrueOrTest',
|
||||||
'has_model_field_prefetched',
|
'has_model_field_prefetched',
|
||||||
'set_environ',
|
'set_environ',
|
||||||
@@ -144,6 +146,19 @@ def underscore_to_camelcase(s):
|
|||||||
return ''.join(x.capitalize() or '_' for x in s.split('_'))
|
return ''.join(x.capitalize() or '_' for x in s.split('_'))
|
||||||
|
|
||||||
|
|
||||||
|
@functools.cache
|
||||||
|
def is_testing(argv=None):
|
||||||
|
'''Return True if running django or py.test unit tests.'''
|
||||||
|
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||||
|
return True
|
||||||
|
argv = sys.argv if argv is None else argv
|
||||||
|
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||||
|
return True
|
||||||
|
elif len(argv) >= 2 and argv[1] == 'test':
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class RequireDebugTrueOrTest(logging.Filter):
|
class RequireDebugTrueOrTest(logging.Filter):
|
||||||
"""
|
"""
|
||||||
Logging filter to output when in DEBUG mode or running tests.
|
Logging filter to output when in DEBUG mode or running tests.
|
||||||
@@ -152,7 +167,7 @@ class RequireDebugTrueOrTest(logging.Filter):
|
|||||||
def filter(self, record):
|
def filter(self, record):
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
return settings.DEBUG or settings.IS_TESTING()
|
return settings.DEBUG or is_testing()
|
||||||
|
|
||||||
|
|
||||||
class IllegalArgumentError(ValueError):
|
class IllegalArgumentError(ValueError):
|
||||||
@@ -174,7 +189,7 @@ def memoize(ttl=60, cache_key=None, track_function=False, cache=None):
|
|||||||
cache = cache or get_memoize_cache()
|
cache = cache or get_memoize_cache()
|
||||||
|
|
||||||
def memoize_decorator(f):
|
def memoize_decorator(f):
|
||||||
@wraps(f)
|
@functools.wraps(f)
|
||||||
def _memoizer(*args, **kwargs):
|
def _memoizer(*args, **kwargs):
|
||||||
if track_function:
|
if track_function:
|
||||||
cache_dict_key = slugify('%r %r' % (args, kwargs))
|
cache_dict_key = slugify('%r %r' % (args, kwargs))
|
||||||
@@ -992,7 +1007,7 @@ def getattrd(obj, name, default=NoDefaultProvided):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return reduce(getattr, name.split("."), obj)
|
return functools.reduce(getattr, name.split("."), obj)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
if default != NoDefaultProvided:
|
if default != NoDefaultProvided:
|
||||||
return default
|
return default
|
||||||
@@ -1188,7 +1203,7 @@ def cleanup_new_process(func):
|
|||||||
Cleanup django connection, cache connection, before executing new thread or processes entry point, func.
|
Cleanup django connection, cache connection, before executing new thread or processes entry point, func.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper_cleanup_new_process(*args, **kwargs):
|
def wrapper_cleanup_new_process(*args, **kwargs):
|
||||||
from awx.conf.settings import SettingsWrapper # noqa
|
from awx.conf.settings import SettingsWrapper # noqa
|
||||||
|
|
||||||
@@ -1202,7 +1217,7 @@ def cleanup_new_process(func):
|
|||||||
|
|
||||||
def log_excess_runtime(func_logger, cutoff=5.0):
|
def log_excess_runtime(func_logger, cutoff=5.0):
|
||||||
def log_excess_runtime_decorator(func):
|
def log_excess_runtime_decorator(func):
|
||||||
@wraps(func)
|
@functools.wraps(func)
|
||||||
def _new_func(*args, **kwargs):
|
def _new_func(*args, **kwargs):
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
return_value = func(*args, **kwargs)
|
return_value = func(*args, **kwargs)
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ if settings.COLOR_LOGS is True:
|
|||||||
# logs rendered with cyan text
|
# logs rendered with cyan text
|
||||||
previous_level_map = self.level_map.copy()
|
previous_level_map = self.level_map.copy()
|
||||||
if record.name == "awx.analytics.job_lifecycle":
|
if record.name == "awx.analytics.job_lifecycle":
|
||||||
self.level_map[logging.DEBUG] = (None, 'cyan', True)
|
self.level_map[logging.INFO] = (None, 'cyan', True)
|
||||||
msg = super(ColorHandler, self).colorize(line, record)
|
msg = super(ColorHandler, self).colorize(line, record)
|
||||||
self.level_map = previous_level_map
|
self.level_map = previous_level_map
|
||||||
return msg
|
return msg
|
||||||
|
|||||||
+13
-34
@@ -10,28 +10,6 @@ import socket
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
|
|
||||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
|
||||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
def is_testing(argv=None):
|
|
||||||
import sys
|
|
||||||
|
|
||||||
'''Return True if running django or py.test unit tests.'''
|
|
||||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
|
||||||
return True
|
|
||||||
argv = sys.argv if argv is None else argv
|
|
||||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
|
||||||
return True
|
|
||||||
elif len(argv) >= 2 and argv[1] == 'test':
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def IS_TESTING(argv=None):
|
|
||||||
return is_testing(argv)
|
|
||||||
|
|
||||||
|
|
||||||
if "pytest" in sys.modules:
|
if "pytest" in sys.modules:
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
@@ -40,9 +18,13 @@ if "pytest" in sys.modules:
|
|||||||
else:
|
else:
|
||||||
import ldap
|
import ldap
|
||||||
|
|
||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
SQL_DEBUG = DEBUG
|
SQL_DEBUG = DEBUG
|
||||||
|
|
||||||
|
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||||
|
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
|
||||||
# FIXME: it would be nice to cycle back around and allow this to be
|
# FIXME: it would be nice to cycle back around and allow this to be
|
||||||
# BigAutoField going forward, but we'd have to be explicit about our
|
# BigAutoField going forward, but we'd have to be explicit about our
|
||||||
# existing models.
|
# existing models.
|
||||||
@@ -101,7 +83,7 @@ USE_L10N = True
|
|||||||
|
|
||||||
USE_TZ = True
|
USE_TZ = True
|
||||||
|
|
||||||
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static'))
|
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static')]
|
||||||
|
|
||||||
# Absolute filesystem path to the directory where static file are collected via
|
# Absolute filesystem path to the directory where static file are collected via
|
||||||
# the collectstatic command.
|
# the collectstatic command.
|
||||||
@@ -254,6 +236,14 @@ START_TASK_LIMIT = 100
|
|||||||
TASK_MANAGER_TIMEOUT = 300
|
TASK_MANAGER_TIMEOUT = 300
|
||||||
TASK_MANAGER_TIMEOUT_GRACE_PERIOD = 60
|
TASK_MANAGER_TIMEOUT_GRACE_PERIOD = 60
|
||||||
|
|
||||||
|
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
||||||
|
# in waiting without being reaped
|
||||||
|
JOB_WAITING_GRACE_PERIOD = 60
|
||||||
|
|
||||||
|
# Number of seconds after a container group job finished time to wait
|
||||||
|
# before the awx_k8s_reaper task will tear down the pods
|
||||||
|
K8S_POD_REAPER_GRACE_PERIOD = 60
|
||||||
|
|
||||||
# Disallow sending session cookies over insecure connections
|
# Disallow sending session cookies over insecure connections
|
||||||
SESSION_COOKIE_SECURE = True
|
SESSION_COOKIE_SECURE = True
|
||||||
|
|
||||||
@@ -1004,16 +994,5 @@ DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true']
|
|||||||
# Mount exposed paths as hostPath resource in k8s/ocp
|
# Mount exposed paths as hostPath resource in k8s/ocp
|
||||||
AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
|
AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
|
||||||
|
|
||||||
# Time out task managers if they take longer than this many seconds
|
|
||||||
TASK_MANAGER_TIMEOUT = 300
|
|
||||||
|
|
||||||
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
|
||||||
# in waiting without being reaped
|
|
||||||
JOB_WAITING_GRACE_PERIOD = 60
|
|
||||||
|
|
||||||
# Number of seconds after a container group job finished time to wait
|
|
||||||
# before the awx_k8s_reaper task will tear down the pods
|
|
||||||
K8S_POD_REAPER_GRACE_PERIOD = 60
|
|
||||||
|
|
||||||
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
|
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
|
||||||
CLUSTER_HOST_ID = socket.gethostname()
|
CLUSTER_HOST_ID = socket.gethostname()
|
||||||
|
|||||||
Generated
+43
-43
@@ -7,9 +7,9 @@
|
|||||||
"name": "ui",
|
"name": "ui",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@lingui/react": "3.14.0",
|
"@lingui/react": "3.14.0",
|
||||||
"@patternfly/patternfly": "4.210.2",
|
"@patternfly/patternfly": "4.217.1",
|
||||||
"@patternfly/react-core": "^4.239.0",
|
"@patternfly/react-core": "^4.250.1",
|
||||||
"@patternfly/react-icons": "4.90.0",
|
"@patternfly/react-icons": "4.92.10",
|
||||||
"@patternfly/react-table": "4.108.0",
|
"@patternfly/react-table": "4.108.0",
|
||||||
"ace-builds": "^1.10.1",
|
"ace-builds": "^1.10.1",
|
||||||
"ansi-to-html": "0.7.2",
|
"ansi-to-html": "0.7.2",
|
||||||
@@ -3747,26 +3747,26 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/patternfly": {
|
"node_modules/@patternfly/patternfly": {
|
||||||
"version": "4.210.2",
|
"version": "4.217.1",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/react-core": {
|
"node_modules/@patternfly/react-core": {
|
||||||
"version": "4.239.0",
|
"version": "4.250.1",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||||
"integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
|
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@patternfly/react-icons": "^4.90.0",
|
"@patternfly/react-icons": "^4.92.6",
|
||||||
"@patternfly/react-styles": "^4.89.0",
|
"@patternfly/react-styles": "^4.91.6",
|
||||||
"@patternfly/react-tokens": "^4.91.0",
|
"@patternfly/react-tokens": "^4.93.6",
|
||||||
"focus-trap": "6.9.2",
|
"focus-trap": "6.9.2",
|
||||||
"react-dropzone": "9.0.0",
|
"react-dropzone": "9.0.0",
|
||||||
"tippy.js": "5.1.2",
|
"tippy.js": "5.1.2",
|
||||||
"tslib": "^2.0.0"
|
"tslib": "^2.0.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"react": "^16.8.0 || ^17.0.0",
|
"react": "^16.8 || ^17 || ^18",
|
||||||
"react-dom": "^16.8.0 || ^17.0.0"
|
"react-dom": "^16.8 || ^17 || ^18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||||
@@ -3775,18 +3775,18 @@
|
|||||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/react-icons": {
|
"node_modules/@patternfly/react-icons": {
|
||||||
"version": "4.90.0",
|
"version": "4.92.10",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||||
"integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
|
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"react": "^16.8.0 || ^17.0.0",
|
"react": "^16.8 || ^17 || ^18",
|
||||||
"react-dom": "^16.8.0 || ^17.0.0"
|
"react-dom": "^16.8 || ^17 || ^18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/react-styles": {
|
"node_modules/@patternfly/react-styles": {
|
||||||
"version": "4.89.0",
|
"version": "4.91.10",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||||
"integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
|
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/react-table": {
|
"node_modules/@patternfly/react-table": {
|
||||||
"version": "4.108.0",
|
"version": "4.108.0",
|
||||||
@@ -3811,9 +3811,9 @@
|
|||||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||||
},
|
},
|
||||||
"node_modules/@patternfly/react-tokens": {
|
"node_modules/@patternfly/react-tokens": {
|
||||||
"version": "4.91.0",
|
"version": "4.93.10",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||||
"integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
|
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||||
},
|
},
|
||||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||||
"version": "0.5.4",
|
"version": "0.5.4",
|
||||||
@@ -25089,18 +25089,18 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@patternfly/patternfly": {
|
"@patternfly/patternfly": {
|
||||||
"version": "4.210.2",
|
"version": "4.217.1",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||||
},
|
},
|
||||||
"@patternfly/react-core": {
|
"@patternfly/react-core": {
|
||||||
"version": "4.239.0",
|
"version": "4.250.1",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||||
"integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
|
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@patternfly/react-icons": "^4.90.0",
|
"@patternfly/react-icons": "^4.92.6",
|
||||||
"@patternfly/react-styles": "^4.89.0",
|
"@patternfly/react-styles": "^4.91.6",
|
||||||
"@patternfly/react-tokens": "^4.91.0",
|
"@patternfly/react-tokens": "^4.93.6",
|
||||||
"focus-trap": "6.9.2",
|
"focus-trap": "6.9.2",
|
||||||
"react-dropzone": "9.0.0",
|
"react-dropzone": "9.0.0",
|
||||||
"tippy.js": "5.1.2",
|
"tippy.js": "5.1.2",
|
||||||
@@ -25115,15 +25115,15 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@patternfly/react-icons": {
|
"@patternfly/react-icons": {
|
||||||
"version": "4.90.0",
|
"version": "4.92.10",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||||
"integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
|
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||||
"requires": {}
|
"requires": {}
|
||||||
},
|
},
|
||||||
"@patternfly/react-styles": {
|
"@patternfly/react-styles": {
|
||||||
"version": "4.89.0",
|
"version": "4.91.10",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||||
"integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
|
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||||
},
|
},
|
||||||
"@patternfly/react-table": {
|
"@patternfly/react-table": {
|
||||||
"version": "4.108.0",
|
"version": "4.108.0",
|
||||||
@@ -25146,9 +25146,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@patternfly/react-tokens": {
|
"@patternfly/react-tokens": {
|
||||||
"version": "4.91.0",
|
"version": "4.93.10",
|
||||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
|
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||||
"integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
|
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||||
},
|
},
|
||||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||||
"version": "0.5.4",
|
"version": "0.5.4",
|
||||||
|
|||||||
+3
-3
@@ -7,9 +7,9 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@lingui/react": "3.14.0",
|
"@lingui/react": "3.14.0",
|
||||||
"@patternfly/patternfly": "4.210.2",
|
"@patternfly/patternfly": "4.217.1",
|
||||||
"@patternfly/react-core": "^4.239.0",
|
"@patternfly/react-core": "^4.250.1",
|
||||||
"@patternfly/react-icons": "4.90.0",
|
"@patternfly/react-icons": "4.92.10",
|
||||||
"@patternfly/react-table": "4.108.0",
|
"@patternfly/react-table": "4.108.0",
|
||||||
"ace-builds": "^1.10.1",
|
"ace-builds": "^1.10.1",
|
||||||
"ansi-to-html": "0.7.2",
|
"ansi-to-html": "0.7.2",
|
||||||
|
|||||||
@@ -416,8 +416,14 @@ function ScheduleForm({
|
|||||||
|
|
||||||
if (options.end === 'onDate') {
|
if (options.end === 'onDate') {
|
||||||
if (
|
if (
|
||||||
DateTime.fromISO(values.startDate) >=
|
DateTime.fromFormat(
|
||||||
DateTime.fromISO(options.endDate)
|
`${values.startDate} ${values.startTime}`,
|
||||||
|
'yyyy-LL-dd h:mm a'
|
||||||
|
).toMillis() >=
|
||||||
|
DateTime.fromFormat(
|
||||||
|
`${options.endDate} ${options.endTime}`,
|
||||||
|
'yyyy-LL-dd h:mm a'
|
||||||
|
).toMillis()
|
||||||
) {
|
) {
|
||||||
freqErrors.endDate = t`Please select an end date/time that comes after the start date/time.`;
|
freqErrors.endDate = t`Please select an end date/time that comes after the start date/time.`;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -900,6 +900,36 @@ describe('<ScheduleForm />', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should create schedule with the same start and end date provided that the end date is at a later time', async () => {
|
||||||
|
const today = DateTime.now().toFormat('yyyy-LL-dd');
|
||||||
|
const laterTime = DateTime.now().plus({ hours: 1 }).toFormat('h:mm a');
|
||||||
|
await act(async () => {
|
||||||
|
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
|
||||||
|
today,
|
||||||
|
new Date(today)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
wrapper.update();
|
||||||
|
expect(
|
||||||
|
wrapper
|
||||||
|
.find('FormGroup[data-cy="schedule-End date/time"]')
|
||||||
|
.prop('helperTextInvalid')
|
||||||
|
).toBe(
|
||||||
|
'Please select an end date/time that comes after the start date/time.'
|
||||||
|
);
|
||||||
|
await act(async () => {
|
||||||
|
wrapper.find('TimePicker[aria-label="End time"]').prop('onChange')(
|
||||||
|
laterTime
|
||||||
|
);
|
||||||
|
});
|
||||||
|
wrapper.update();
|
||||||
|
expect(
|
||||||
|
wrapper
|
||||||
|
.find('FormGroup[data-cy="schedule-End date/time"]')
|
||||||
|
.prop('helperTextInvalid')
|
||||||
|
).toBe(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
test('error shown when on day number is not between 1 and 31', async () => {
|
test('error shown when on day number is not between 1 and 31', async () => {
|
||||||
await act(async () => {
|
await act(async () => {
|
||||||
wrapper.find('FrequencySelect#schedule-frequency').invoke('onChange')([
|
wrapper.find('FrequencySelect#schedule-frequency').invoke('onChange')([
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -282,7 +282,7 @@ const mockInputSources = {
|
|||||||
summary_fields: {
|
summary_fields: {
|
||||||
source_credential: {
|
source_credential: {
|
||||||
id: 20,
|
id: 20,
|
||||||
name: 'CyberArk Conjur Secret Lookup',
|
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||||
description: '',
|
description: '',
|
||||||
kind: 'conjur',
|
kind: 'conjur',
|
||||||
cloud: false,
|
cloud: false,
|
||||||
@@ -301,7 +301,7 @@ const mockInputSources = {
|
|||||||
summary_fields: {
|
summary_fields: {
|
||||||
source_credential: {
|
source_credential: {
|
||||||
id: 20,
|
id: 20,
|
||||||
name: 'CyberArk Conjur Secret Lookup',
|
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||||
description: '',
|
description: '',
|
||||||
kind: 'conjur',
|
kind: 'conjur',
|
||||||
cloud: false,
|
cloud: false,
|
||||||
|
|||||||
+2
-2
@@ -36,14 +36,14 @@ const mockCredentialTypeDetail = {
|
|||||||
url: '/api/v2/credential_types/20/',
|
url: '/api/v2/credential_types/20/',
|
||||||
related: {
|
related: {
|
||||||
named_url:
|
named_url:
|
||||||
'/api/v2/credential_types/CyberArk Conjur Secret Lookup+external/',
|
'/api/v2/credential_types/CyberArk Conjur Secrets Manager Lookup+external/',
|
||||||
credentials: '/api/v2/credential_types/20/credentials/',
|
credentials: '/api/v2/credential_types/20/credentials/',
|
||||||
activity_stream: '/api/v2/credential_types/20/activity_stream/',
|
activity_stream: '/api/v2/credential_types/20/activity_stream/',
|
||||||
},
|
},
|
||||||
summary_fields: { user_capabilities: { edit: false, delete: false } },
|
summary_fields: { user_capabilities: { edit: false, delete: false } },
|
||||||
created: '2020-05-18T21:53:35.398260Z',
|
created: '2020-05-18T21:53:35.398260Z',
|
||||||
modified: '2020-05-18T21:54:05.451444Z',
|
modified: '2020-05-18T21:54:05.451444Z',
|
||||||
name: 'CyberArk Conjur Secret Lookup',
|
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||||
description: '',
|
description: '',
|
||||||
kind: 'external',
|
kind: 'external',
|
||||||
namespace: 'conjur',
|
namespace: 'conjur',
|
||||||
|
|||||||
@@ -546,7 +546,7 @@
|
|||||||
},
|
},
|
||||||
"created": "2020-05-18T21:53:35.398260Z",
|
"created": "2020-05-18T21:53:35.398260Z",
|
||||||
"modified": "2020-05-18T21:54:05.451444Z",
|
"modified": "2020-05-18T21:54:05.451444Z",
|
||||||
"name": "CyberArk Conjur Secret Lookup",
|
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||||
"description": "",
|
"description": "",
|
||||||
"kind": "external",
|
"kind": "external",
|
||||||
"namespace": "conjur",
|
"namespace": "conjur",
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"type": "credential",
|
"type": "credential",
|
||||||
"url": "/api/v2/credentials/1/",
|
"url": "/api/v2/credentials/1/",
|
||||||
"related": {
|
"related": {
|
||||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secret Lookup++CyberArk Conjur Secret Lookup+external++/",
|
"named_url": "/api/v2/credentials/CyberArk Conjur Secrets Manager Lookup+external++/",
|
||||||
"created_by": "/api/v2/users/1/",
|
"created_by": "/api/v2/users/1/",
|
||||||
"modified_by": "/api/v2/users/1/",
|
"modified_by": "/api/v2/users/1/",
|
||||||
"activity_stream": "/api/v2/credentials/1/activity_stream/",
|
"activity_stream": "/api/v2/credentials/1/activity_stream/",
|
||||||
@@ -19,7 +19,7 @@
|
|||||||
"summary_fields": {
|
"summary_fields": {
|
||||||
"credential_type": {
|
"credential_type": {
|
||||||
"id": 20,
|
"id": 20,
|
||||||
"name": "CyberArk Conjur Secret Lookup",
|
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||||
"description": ""
|
"description": ""
|
||||||
},
|
},
|
||||||
"created_by": {
|
"created_by": {
|
||||||
@@ -69,7 +69,7 @@
|
|||||||
},
|
},
|
||||||
"created": "2020-05-19T12:51:36.956029Z",
|
"created": "2020-05-19T12:51:36.956029Z",
|
||||||
"modified": "2020-05-19T12:51:36.956086Z",
|
"modified": "2020-05-19T12:51:36.956086Z",
|
||||||
"name": "CyberArk Conjur Secret Lookup",
|
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||||
"description": "",
|
"description": "",
|
||||||
"organization": null,
|
"organization": null,
|
||||||
"credential_type": 20,
|
"credential_type": 20,
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ const getStdOutValue = (hostEvent) => {
|
|||||||
) {
|
) {
|
||||||
stdOut = res.results.join('\n');
|
stdOut = res.results.join('\n');
|
||||||
} else if (res?.stdout) {
|
} else if (res?.stdout) {
|
||||||
stdOut = res.stdout;
|
stdOut = Array.isArray(res.stdout) ? res.stdout.join(' ') : res.stdout;
|
||||||
}
|
}
|
||||||
return stdOut;
|
return stdOut;
|
||||||
};
|
};
|
||||||
@@ -70,7 +70,6 @@ const getStdOutValue = (hostEvent) => {
|
|||||||
function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||||
const [hostStatus, setHostStatus] = useState(null);
|
const [hostStatus, setHostStatus] = useState(null);
|
||||||
const [activeTabKey, setActiveTabKey] = useState(0);
|
const [activeTabKey, setActiveTabKey] = useState(0);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setHostStatus(processEventStatus(hostEvent));
|
setHostStatus(processEventStatus(hostEvent));
|
||||||
}, [setHostStatus, hostEvent]);
|
}, [setHostStatus, hostEvent]);
|
||||||
@@ -108,11 +107,11 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
|||||||
style={{ alignItems: 'center', marginTop: '20px' }}
|
style={{ alignItems: 'center', marginTop: '20px' }}
|
||||||
gutter="sm"
|
gutter="sm"
|
||||||
>
|
>
|
||||||
<Detail label={t`Host`} value={hostEvent.host_name} />
|
<Detail label={t`Host`} value={hostEvent.event_data?.host} />
|
||||||
{hostEvent.summary_fields.host?.description ? (
|
{hostEvent.summary_fields?.host?.description ? (
|
||||||
<Detail
|
<Detail
|
||||||
label={t`Description`}
|
label={t`Description`}
|
||||||
value={hostEvent.summary_fields.host.description}
|
value={hostEvent.summary_fields?.host?.description}
|
||||||
/>
|
/>
|
||||||
) : null}
|
) : null}
|
||||||
{hostStatus ? (
|
{hostStatus ? (
|
||||||
@@ -125,12 +124,9 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
|||||||
<Detail label={t`Task`} value={hostEvent.task} />
|
<Detail label={t`Task`} value={hostEvent.task} />
|
||||||
<Detail
|
<Detail
|
||||||
label={t`Module`}
|
label={t`Module`}
|
||||||
value={hostEvent.event_data.task_action || t`No result found`}
|
value={hostEvent.event_data?.task_action || t`No result found`}
|
||||||
/>
|
|
||||||
<Detail
|
|
||||||
label={t`Command`}
|
|
||||||
value={hostEvent?.event_data?.res?.cmd}
|
|
||||||
/>
|
/>
|
||||||
|
<Detail label={t`Command`} value={hostEvent.event_data?.res?.cmd} />
|
||||||
</DetailList>
|
</DetailList>
|
||||||
</Tab>
|
</Tab>
|
||||||
<Tab
|
<Tab
|
||||||
|
|||||||
@@ -52,6 +52,47 @@ const hostEvent = {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const partialHostEvent = {
|
||||||
|
changed: true,
|
||||||
|
event: 'runner_on_ok',
|
||||||
|
event_data: {
|
||||||
|
host: 'foo',
|
||||||
|
play: 'all',
|
||||||
|
playbook: 'run_command.yml',
|
||||||
|
res: {
|
||||||
|
ansible_loop_var: 'item',
|
||||||
|
changed: true,
|
||||||
|
item: '1',
|
||||||
|
msg: 'This is a debug message: 1',
|
||||||
|
stdout:
|
||||||
|
' total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023',
|
||||||
|
stderr: 'problems',
|
||||||
|
cmd: ['free', '-m'],
|
||||||
|
stderr_lines: [],
|
||||||
|
stdout_lines: [
|
||||||
|
' total used free shared buff/cache available',
|
||||||
|
'Mem: 7973 3005 960 30 4007 4582',
|
||||||
|
'Swap: 1023 0 1023',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
task: 'command',
|
||||||
|
task_action: 'command',
|
||||||
|
},
|
||||||
|
event_display: 'Host OK',
|
||||||
|
event_level: 3,
|
||||||
|
failed: false,
|
||||||
|
host: 1,
|
||||||
|
id: 123,
|
||||||
|
job: 4,
|
||||||
|
play: 'all',
|
||||||
|
playbook: 'run_command.yml',
|
||||||
|
stdout: `stdout: "[0;33mchanged: [localhost] => {"changed": true, "cmd": ["free", "-m"], "delta": "0:00:01.479609", "end": "2019-09-10 14:21:45.469533", "rc": 0, "start": "2019-09-10 14:21:43.989924", "stderr": "", "stderr_lines": [], "stdout": " total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023", "stdout_lines": [" total used free shared buff/cache available", "Mem: 7973 3005 960 30 4007 4582", "Swap: 1023 0 1023"]}[0m"
|
||||||
|
`,
|
||||||
|
task: 'command',
|
||||||
|
type: 'job_event',
|
||||||
|
url: '/api/v2/job_events/123/',
|
||||||
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Some libraries return a list of string in stdout
|
Some libraries return a list of string in stdout
|
||||||
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
|
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
|
||||||
@@ -134,6 +175,13 @@ describe('HostEventModal', () => {
|
|||||||
expect(wrapper).toHaveLength(1);
|
expect(wrapper).toHaveLength(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('renders successfully with partial data', () => {
|
||||||
|
const wrapper = shallow(
|
||||||
|
<HostEventModal hostEvent={partialHostEvent} onClose={() => {}} />
|
||||||
|
);
|
||||||
|
expect(wrapper).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
test('should render all tabs', () => {
|
test('should render all tabs', () => {
|
||||||
const wrapper = shallow(
|
const wrapper = shallow(
|
||||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||||
|
|||||||
@@ -114,7 +114,12 @@ def main():
|
|||||||
# Update the project
|
# Update the project
|
||||||
result = module.post_endpoint(project['related']['update'])
|
result = module.post_endpoint(project['related']['update'])
|
||||||
|
|
||||||
if result['status_code'] != 202:
|
if result['status_code'] == 405:
|
||||||
|
module.fail_json(
|
||||||
|
msg="Unable to trigger a project update because the project scm_type ({0}) does not support it.".format(project['scm_type']),
|
||||||
|
response=result
|
||||||
|
)
|
||||||
|
elif result['status_code'] != 202:
|
||||||
module.fail_json(msg="Failed to update project, see response for details", response=result)
|
module.fail_json(msg="Failed to update project, see response for details", response=result)
|
||||||
|
|
||||||
module.json_output['changed'] = True
|
module.json_output['changed'] = True
|
||||||
|
|||||||
@@ -275,7 +275,13 @@ class ApiV2(base.Base):
|
|||||||
# When creating a project, we need to wait for its
|
# When creating a project, we need to wait for its
|
||||||
# first project update to finish so that associated
|
# first project update to finish so that associated
|
||||||
# JTs have valid options for playbook names
|
# JTs have valid options for playbook names
|
||||||
_page.wait_until_completed()
|
try:
|
||||||
|
_page.wait_until_completed(timeout=300)
|
||||||
|
except AssertionError:
|
||||||
|
# If the project update times out, try to
|
||||||
|
# carry on in the hopes that it will
|
||||||
|
# finish before it is needed.
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
# If we are an existing project and our scm_tpye is not changing don't try and import the local_path setting
|
# If we are an existing project and our scm_tpye is not changing don't try and import the local_path setting
|
||||||
if asset['natural_key']['type'] == 'project' and 'local_path' in post_data and _page['scm_type'] == post_data['scm_type']:
|
if asset['natural_key']['type'] == 'project' and 'local_path' in post_data and _page['scm_type'] == post_data['scm_type']:
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
|
||||||
from awxkit.api.pages import SystemJobTemplate
|
from awxkit.api.pages import JobTemplate, SystemJobTemplate, Project, InventorySource
|
||||||
|
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
|
||||||
from awxkit.api.mixins import HasCreate
|
from awxkit.api.mixins import HasCreate
|
||||||
from awxkit.api.resources import resources
|
from awxkit.api.resources import resources
|
||||||
from awxkit.config import config
|
from awxkit.config import config
|
||||||
@@ -11,7 +12,7 @@ from . import base
|
|||||||
|
|
||||||
|
|
||||||
class Schedule(HasCreate, base.Base):
|
class Schedule(HasCreate, base.Base):
|
||||||
dependencies = [SystemJobTemplate]
|
dependencies = [JobTemplate, SystemJobTemplate, Project, InventorySource, WorkflowJobTemplate]
|
||||||
NATURAL_KEY = ('unified_job_template', 'name')
|
NATURAL_KEY = ('unified_job_template', 'name')
|
||||||
|
|
||||||
def silent_delete(self):
|
def silent_delete(self):
|
||||||
|
|||||||
@@ -52,6 +52,7 @@ html_static_path = ['_static']
|
|||||||
|
|
||||||
rst_epilog = '''
|
rst_epilog = '''
|
||||||
.. |prog| replace:: awx
|
.. |prog| replace:: awx
|
||||||
.. |at| replace:: Ansible Tower
|
.. |at| replace:: automation controller
|
||||||
.. |RHAT| replace:: Red Hat Ansible Tower
|
.. |At| replace:: Automation controller
|
||||||
|
.. |RHAT| replace:: Red Hat Ansible Automation Platform controller
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -197,8 +197,10 @@ def parse_resource(client, skip_deprecated=False):
|
|||||||
|
|
||||||
if hasattr(client, 'v2'):
|
if hasattr(client, 'v2'):
|
||||||
for k in client.v2.json.keys():
|
for k in client.v2.json.keys():
|
||||||
if k in ('dashboard',):
|
if k in ('dashboard', 'config'):
|
||||||
# the Dashboard API is deprecated and not supported
|
# - the Dashboard API is deprecated and not supported
|
||||||
|
# - the Config command is already dealt with by the
|
||||||
|
# CustomCommand section above
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# argparse aliases are *only* supported in Python3 (not 2.7)
|
# argparse aliases are *only* supported in Python3 (not 2.7)
|
||||||
|
|||||||
@@ -1,25 +1,22 @@
|
|||||||
# Dependency Management
|
# Dependency Management
|
||||||
|
|
||||||
The `requirements.txt` file is generated from `requirements.in`, using `pip-tools` `pip-compile`.
|
The `requirements.txt` file is generated from `requirements.in` and `requirements_git.txt`, using `pip-tools` and `pip-compile`.
|
||||||
|
|
||||||
## How To Use
|
## How To Use
|
||||||
|
|
||||||
Commands should be run from inside the `./requirements` directory of the awx repository.
|
Commands should be run in the awx container from inside the `./requirements` directory of the awx repository.
|
||||||
|
|
||||||
### Upgrading or Adding Select Libraries
|
### Upgrading or Adding Select Libraries
|
||||||
|
|
||||||
If you need to add or upgrade one targeted library, then modify `requirements.in`,
|
If you need to add or upgrade one targeted library, then modify `requirements.in`,
|
||||||
then run the script:
|
then run the script:
|
||||||
|
|
||||||
`./updater.sh`
|
`./updater.sh run`
|
||||||
|
|
||||||
NOTE: `./updater.sh` uses /usr/bin/python3.6, to match the current python version
|
|
||||||
(3.6) used to build releases.
|
|
||||||
|
|
||||||
#### Upgrading Unpinned Dependency
|
#### Upgrading Unpinned Dependency
|
||||||
|
|
||||||
If you require a new version of a dependency that does not have a pinned version
|
If you require a new version of a dependency that does not have a pinned version
|
||||||
for a fix or feature, pin a minimum version and run `./updater.sh`. For example,
|
for a fix or feature, pin a minimum version in `requirements.in` and run `./updater.sh run`. For example,
|
||||||
replace the line `asgi-amqp` with `asgi-amqp>=1.1.4`, and consider leaving a
|
replace the line `asgi-amqp` with `asgi-amqp>=1.1.4`, and consider leaving a
|
||||||
note.
|
note.
|
||||||
|
|
||||||
|
|||||||
+38
-2
@@ -33,11 +33,47 @@ generate_requirements() {
|
|||||||
|
|
||||||
main() {
|
main() {
|
||||||
base_dir=$(pwd)
|
base_dir=$(pwd)
|
||||||
_tmp="$(mktemp -d --suffix .awx-requirements XXXX -p /tmp)"
|
|
||||||
|
_tmp=$(python -c "import tempfile; print(tempfile.mkdtemp(suffix='.awx-requirements', dir='/tmp'))")
|
||||||
|
|
||||||
trap _cleanup INT TERM EXIT
|
trap _cleanup INT TERM EXIT
|
||||||
|
|
||||||
if [ "$1" = "upgrade" ]; then
|
case $1 in
|
||||||
|
"run")
|
||||||
|
NEEDS_HELP=0
|
||||||
|
;;
|
||||||
|
"upgrade")
|
||||||
|
NEEDS_HELP=0
|
||||||
pip_compile="${pip_compile} --upgrade"
|
pip_compile="${pip_compile} --upgrade"
|
||||||
|
;;
|
||||||
|
"help")
|
||||||
|
NEEDS_HELP=1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo ""
|
||||||
|
echo "ERROR: Parameter $1 not valid"
|
||||||
|
echo ""
|
||||||
|
NEEDS_HELP=1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [[ "$NEEDS_HELP" == "1" ]] ; then
|
||||||
|
echo "This script generates requirements.txt from requirements.in and requirements_git.in"
|
||||||
|
echo "It should be run from within the awx container"
|
||||||
|
echo ""
|
||||||
|
echo "Usage: $0 [run|upgrade]"
|
||||||
|
echo ""
|
||||||
|
echo "Commands:"
|
||||||
|
echo "help Print this message"
|
||||||
|
echo "run Run the process only upgrading pinned libraries from requirements.in"
|
||||||
|
echo "upgrade Upgrade all libraries to latest while respecting pinnings"
|
||||||
|
echo ""
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -d /awx_devel ]] ; then
|
||||||
|
echo "This script should be run inside the awx container"
|
||||||
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cp -vf requirements.txt "${_tmp}"
|
cp -vf requirements.txt "${_tmp}"
|
||||||
|
|||||||
@@ -9,4 +9,4 @@ template_dest: '_build'
|
|||||||
receptor_image: quay.io/ansible/receptor:devel
|
receptor_image: quay.io/ansible/receptor:devel
|
||||||
|
|
||||||
# Helper vars to construct the proper download URL for the current architecture
|
# Helper vars to construct the proper download URL for the current architecture
|
||||||
image_architecture: '{{ { "x86_64": "amd64", "aarch64": "arm64", "armv7": "arm", "ppc64le": "ppc64le" }[ansible_facts.architecture] }}'
|
image_architecture: '{{ { "x86_64": "amd64", "aarch64": "arm64", "armv7": "arm", "arm64": "arm64", "ppc64le": "ppc64le" }[ansible_facts.architecture] }}'
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ addons:
|
|||||||
minikube_url_linux: 'https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64'
|
minikube_url_linux: 'https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64'
|
||||||
minikube_url_macos: 'https://storage.googleapis.com/minikube/releases/latest/minikube-darwin-amd64'
|
minikube_url_macos: 'https://storage.googleapis.com/minikube/releases/latest/minikube-darwin-amd64'
|
||||||
|
|
||||||
kubectl_url_linux: 'https://dl.k8s.io/release/v1.21.0/bin/linux/amd64/kubectl'
|
kubectl_url_linux: 'https://dl.k8s.io/release/v1.25.0/bin/linux/amd64/kubectl'
|
||||||
kubectl_url_macos: 'https://dl.k8s.io/release/v1.21.0/bin/darwin/amd64/kubectl'
|
kubectl_url_macos: 'https://dl.k8s.io/release/v1.25.0/bin/darwin/amd64/kubectl'
|
||||||
|
|
||||||
# Service Account Name
|
# Service Account Name
|
||||||
minikube_service_account_name: 'awx-devel'
|
minikube_service_account_name: 'awx-devel'
|
||||||
|
|||||||
@@ -8,6 +8,10 @@
|
|||||||
state: 'directory'
|
state: 'directory'
|
||||||
mode: '0700'
|
mode: '0700'
|
||||||
|
|
||||||
|
- name: debug minikube_setup
|
||||||
|
debug:
|
||||||
|
var: minikube_setup
|
||||||
|
|
||||||
# Linux block
|
# Linux block
|
||||||
- block:
|
- block:
|
||||||
- name: Download Minikube
|
- name: Download Minikube
|
||||||
@@ -24,6 +28,7 @@
|
|||||||
when:
|
when:
|
||||||
- ansible_architecture == "x86_64"
|
- ansible_architecture == "x86_64"
|
||||||
- ansible_system == "Linux"
|
- ansible_system == "Linux"
|
||||||
|
- minikube_setup | default(False) | bool
|
||||||
|
|
||||||
# MacOS block
|
# MacOS block
|
||||||
- block:
|
- block:
|
||||||
@@ -41,25 +46,29 @@
|
|||||||
when:
|
when:
|
||||||
- ansible_architecture == "x86_64"
|
- ansible_architecture == "x86_64"
|
||||||
- ansible_system == "Darwin"
|
- ansible_system == "Darwin"
|
||||||
|
- minikube_setup | default(False) | bool
|
||||||
|
|
||||||
- name: Starting Minikube
|
- block:
|
||||||
shell: "{{ sources_dest }}/minikube start --driver={{ driver }} --install-addons=true --addons={{ addons | join(',') }}"
|
- name: Starting Minikube
|
||||||
register: minikube_stdout
|
shell: "{{ sources_dest }}/minikube start --driver={{ driver }} --install-addons=true --addons={{ addons | join(',') }}"
|
||||||
|
register: minikube_stdout
|
||||||
|
|
||||||
- name: Enable Ingress Controller on Minikube
|
- name: Enable Ingress Controller on Minikube
|
||||||
shell: "{{ sources_dest }}/minikube addons enable ingress"
|
shell: "{{ sources_dest }}/minikube addons enable ingress"
|
||||||
|
when:
|
||||||
|
- minikube_stdout.rc == 0
|
||||||
|
register: _minikube_ingress
|
||||||
|
ignore_errors: true
|
||||||
|
|
||||||
|
- name: Show Minikube Ingress known-issue 7332 warning
|
||||||
|
pause:
|
||||||
|
seconds: 5
|
||||||
|
prompt: "The Minikube Ingress addon has been disabled since it looks like you are hitting https://github.com/kubernetes/minikube/issues/7332"
|
||||||
|
when:
|
||||||
|
- '"minikube/issues/7332" in _minikube_ingress.stderr'
|
||||||
|
- ansible_system == "Darwin"
|
||||||
when:
|
when:
|
||||||
- minikube_stdout.rc == 0
|
- minikube_setup | default(False) | bool
|
||||||
register: _minikube_ingress
|
|
||||||
ignore_errors: true
|
|
||||||
|
|
||||||
- name: Show Minikube Ingress known-issue 7332 warning
|
|
||||||
pause:
|
|
||||||
seconds: 5
|
|
||||||
prompt: "The Minikube Ingress addon has been disabled since it looks like you are hitting https://github.com/kubernetes/minikube/issues/7332"
|
|
||||||
when:
|
|
||||||
- '"minikube/issues/7332" in _minikube_ingress.stderr'
|
|
||||||
- ansible_system == "Darwin"
|
|
||||||
|
|
||||||
- name: Create ServiceAccount and clusterRoleBinding
|
- name: Create ServiceAccount and clusterRoleBinding
|
||||||
k8s:
|
k8s:
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ Here are the main `make` targets:
|
|||||||
Notable files:
|
Notable files:
|
||||||
|
|
||||||
- `tools/docker-compose/inventory` file - used to configure the AWX development environment.
|
- `tools/docker-compose/inventory` file - used to configure the AWX development environment.
|
||||||
- `migrate.yml` - playbook for migrating data from Local Docker to the Development Environment
|
- `tools/docker-compose/ansible/migrate.yml` - playbook for migrating data from Local Docker to the Development Environment
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
@@ -301,11 +301,19 @@ Note that you may see multiple messages of the form `2021-03-04 20:11:47,666 WAR
|
|||||||
|
|
||||||
To bring up a 1 node AWX + minikube that is accessible from AWX run the following.
|
To bring up a 1 node AWX + minikube that is accessible from AWX run the following.
|
||||||
|
|
||||||
|
Start minikube
|
||||||
|
|
||||||
|
```bash
|
||||||
|
(host)$minikube start --cpus=4 --memory=8g --addons=ingress`
|
||||||
|
```
|
||||||
|
|
||||||
|
Start AWX
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
(host)$ make docker-compose-container-group
|
(host)$ make docker-compose-container-group
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can set the env var `MINIKUBE_CONTAINER_GROUP=true` to use the default dev env bring up. his way you can use other env flags like the cluster node count.
|
Alternatively, you can set the env var `MINIKUBE_CONTAINER_GROUP=true` to use the default dev env bring up. his way you can use other env flags like the cluster node count. Set `MINIKUBE_SETUP=true` to make the roles download, install and run minikube for you, but if you run into issues with this just start minikube yourself.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
(host)$ MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
(host)$ MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||||
|
|||||||
@@ -19,6 +19,9 @@ else
|
|||||||
wait-for-migrations
|
wait-for-migrations
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Make sure that the UI static file directory exists, Django complains otherwise.
|
||||||
|
mkdir -p /awx_devel/awx/ui/build/static
|
||||||
|
|
||||||
if output=$(awx-manage createsuperuser --noinput --username=admin --email=admin@localhost 2> /dev/null); then
|
if output=$(awx-manage createsuperuser --noinput --username=admin --email=admin@localhost 2> /dev/null); then
|
||||||
echo $output
|
echo $output
|
||||||
fi
|
fi
|
||||||
@@ -27,10 +30,6 @@ echo "Admin password: ${DJANGO_SUPERUSER_PASSWORD}"
|
|||||||
awx-manage create_preload_data
|
awx-manage create_preload_data
|
||||||
awx-manage register_default_execution_environments
|
awx-manage register_default_execution_environments
|
||||||
|
|
||||||
mkdir -p /awx_devel/awx/public/static
|
|
||||||
mkdir -p /awx_devel/awx/ui/static
|
|
||||||
mkdir -p /awx_devel/awx/ui/build/static
|
|
||||||
|
|
||||||
awx-manage provision_instance --hostname="$(hostname)" --node_type="$MAIN_NODE_TYPE"
|
awx-manage provision_instance --hostname="$(hostname)" --node_type="$MAIN_NODE_TYPE"
|
||||||
awx-manage register_queue --queuename=controlplane --instance_percent=100
|
awx-manage register_queue --queuename=controlplane --instance_percent=100
|
||||||
awx-manage register_queue --queuename=default --instance_percent=100
|
awx-manage register_queue --queuename=default --instance_percent=100
|
||||||
|
|||||||
Reference in New Issue
Block a user