diff --git a/awx/api/serializers.py b/awx/api/serializers.py index fdddc9ba22..0bbdfa35c6 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -1285,8 +1285,8 @@ class OrganizationSerializer(BaseSerializer): class ProjectOptionsSerializer(BaseSerializer): class Meta: - fields = ('*', 'local_path', 'scm_type', 'scm_url', 'scm_branch', - 'scm_clean', 'scm_delete_on_update', 'credential', 'timeout',) + fields = ('*', 'local_path', 'scm_type', 'scm_url', 'scm_branch', 'scm_refspec', + 'scm_clean', 'scm_delete_on_update', 'credential', 'timeout', 'scm_revision') def get_related(self, obj): res = super(ProjectOptionsSerializer, self).get_related(obj) @@ -1311,6 +1311,8 @@ class ProjectOptionsSerializer(BaseSerializer): attrs.pop('local_path', None) if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths: errors['local_path'] = _('This path is already being used by another manual project.') + if attrs.get('scm_refspec') and scm_type != 'git': + errors['scm_refspec'] = _('SCM refspec can only be used with git projects.') if errors: raise serializers.ValidationError(errors) @@ -1338,7 +1340,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): class Meta: model = Project fields = ('*', 'organization', 'scm_update_on_launch', - 'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \ + 'scm_update_cache_timeout', 'allow_override', 'custom_virtualenv',) + \ ('last_update_failed', 'last_updated') # Backwards compatibility def get_related(self, obj): @@ -1388,6 +1390,21 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): elif self.instance: organization = self.instance.organization + if 'allow_override' in attrs and self.instance: + # case where user is turning off this project setting + if self.instance.allow_override and not attrs['allow_override']: + used_by = set( + JobTemplate.objects.filter( + models.Q(project=self.instance), + models.Q(ask_scm_branch_on_launch=True) | ~models.Q(scm_branch="") + ).values_list('pk', flat=True) + ) + if used_by: + raise serializers.ValidationError({ + 'allow_override': _('One or more job templates depend on branch override behavior for this project (ids: {}).').format( + ' '.join([str(pk) for pk in used_by]) + )}) + view = self.context.get('view', None) if not organization and not view.request.user.is_superuser: # Only allow super users to create orgless projects @@ -2701,7 +2718,7 @@ class LabelsListMixin(object): class JobOptionsSerializer(LabelsListMixin, BaseSerializer): class Meta: - fields = ('*', 'job_type', 'inventory', 'project', 'playbook', + fields = ('*', 'job_type', 'inventory', 'project', 'playbook', 'scm_branch', 'forks', 'limit', 'verbosity', 'extra_vars', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task', 'timeout', 'use_fact_cache',) @@ -2748,16 +2765,28 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer): def validate(self, attrs): if 'project' in self.fields and 'playbook' in self.fields: - project = attrs.get('project', self.instance and self.instance.project or None) + project = attrs.get('project', self.instance.project if self.instance else None) playbook = attrs.get('playbook', self.instance and self.instance.playbook or '') + scm_branch = attrs.get('scm_branch', self.instance.scm_branch if self.instance else None) + ask_scm_branch_on_launch = attrs.get( + 'ask_scm_branch_on_launch', self.instance.ask_scm_branch_on_launch if self.instance else None) if not project: raise serializers.ValidationError({'project': _('This field is required.')}) - if project and project.scm_type and playbook and force_text(playbook) not in project.playbook_files: - raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) - if project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks: + playbook_not_found = bool( + ( + project and project.scm_type and (not project.allow_override) and + playbook and force_text(playbook) not in project.playbook_files + ) or + (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual + ) + if playbook_not_found: raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) if project and not playbook: raise serializers.ValidationError({'playbook': _('Must select playbook for project.')}) + if scm_branch and not project.allow_override: + raise serializers.ValidationError({'scm_branch': _('Project does not allow overriding branch.')}) + if ask_scm_branch_on_launch and not project.allow_override: + raise serializers.ValidationError({'ask_scm_branch_on_launch': _('Project does not allow overriding branch.')}) ret = super(JobOptionsSerializer, self).validate(attrs) return ret @@ -2799,7 +2828,8 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO class Meta: model = JobTemplate - fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', + fields = ('*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', + 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode', 'allow_simultaneous', 'custom_virtualenv', 'job_slice_count') @@ -3365,6 +3395,7 @@ class WorkflowJobCancelSerializer(WorkflowJobSerializer): class LaunchConfigurationBaseSerializer(BaseSerializer): + scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, choices=NEW_JOB_TYPE_CHOICES) job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) @@ -3377,7 +3408,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): class Meta: fields = ('*', 'extra_data', 'inventory', # Saved launch-time config fields - 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity') + 'scm_branch', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity') def get_related(self, obj): res = super(LaunchConfigurationBaseSerializer, self).get_related(obj) @@ -3960,6 +3991,7 @@ class JobLaunchSerializer(BaseSerializer): required=False, write_only=True ) credential_passwords = VerbatimField(required=False, write_only=True) + scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True) diff_mode = serializers.BooleanField(required=False, write_only=True) job_tags = serializers.CharField(required=False, write_only=True, allow_blank=True) job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True) @@ -3970,13 +4002,15 @@ class JobLaunchSerializer(BaseSerializer): class Meta: model = JobTemplate fields = ('can_start_without_user_input', 'passwords_needed_to_start', - 'extra_vars', 'inventory', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode', - 'credentials', 'credential_passwords', 'ask_variables_on_launch', 'ask_tags_on_launch', + 'extra_vars', 'inventory', 'scm_branch', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode', + 'credentials', 'credential_passwords', + 'ask_scm_branch_on_launch', 'ask_variables_on_launch', 'ask_tags_on_launch', 'ask_diff_mode_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_limit_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'variables_needed_to_start', 'credential_needed_to_start', 'inventory_needed_to_start', 'job_template_data', 'defaults', 'verbosity') read_only_fields = ( + 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',) diff --git a/awx/main/conf.py b/awx/main/conf.py index 7db0737acf..18f6e4026a 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -328,6 +328,16 @@ register( category_slug='jobs', ) +register( + 'AWX_COLLECTIONS_ENABLED', + field_class=fields.BooleanField, + default=True, + label=_('Enable Collection(s) Download'), + help_text=_('Allows collections to be dynamically downloaded from a requirements.yml file for SCM projects.'), + category=_('Jobs'), + category_slug='jobs', +) + register( 'STDOUT_MAX_BYTES_DISPLAY', field_class=fields.IntegerField, diff --git a/awx/main/migrations/0083_v360_job_branch_overrirde.py b/awx/main/migrations/0083_v360_job_branch_overrirde.py new file mode 100644 index 0000000000..4e1b00b4a7 --- /dev/null +++ b/awx/main/migrations/0083_v360_job_branch_overrirde.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.20 on 2019-06-14 15:08 +from __future__ import unicode_literals + +import awx.main.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0082_v360_webhook_http_method'), + ] + + operations = [ + # Add fields for user-provided project refspec + migrations.AddField( + model_name='project', + name='scm_refspec', + field=models.CharField(blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'), + ), + migrations.AddField( + model_name='projectupdate', + name='scm_refspec', + field=models.CharField(blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'), + ), + # Add fields for job specification of project branch + migrations.AddField( + model_name='job', + name='scm_branch', + field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024), + ), + migrations.AddField( + model_name='jobtemplate', + name='ask_scm_branch_on_launch', + field=awx.main.fields.AskForField(blank=True, default=False), + ), + migrations.AddField( + model_name='jobtemplate', + name='scm_branch', + field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024), + ), + migrations.AddField( + model_name='project', + name='allow_override', + field=models.BooleanField(default=False, help_text='Allow changing the SCM branch or revision in a job template that uses this project.'), + ), + # Fix typo in help_text + migrations.AlterField( + model_name='project', + name='scm_update_cache_timeout', + field=models.PositiveIntegerField(blank=True, default=0, help_text='The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'), + ), + # Start tracking the fetched revision on project update model + migrations.AddField( + model_name='projectupdate', + name='scm_revision', + field=models.CharField(blank=True, default='', editable=False, help_text='The SCM Revision discovered by this update for the given project and branch.', max_length=1024, verbose_name='SCM Revision'), + ), + ] diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 12c691d195..ceff1c8cb7 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -96,6 +96,13 @@ class JobOptions(BaseModel): default='', blank=True, ) + scm_branch = models.CharField( + max_length=1024, + default='', + blank=True, + help_text=_('Branch to use in job run. Project default used if blank. ' + 'Only allowed if project allow_override field is set to true.'), + ) forks = models.PositiveIntegerField( blank=True, default=0, @@ -234,6 +241,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour default=False, allows_field='credentials' ) + ask_scm_branch_on_launch = AskForField( + blank=True, + default=False, + allows_field='scm_branch' + ) job_slice_count = models.PositiveIntegerField( blank=True, default=1, @@ -387,7 +399,21 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue + elif field_name == 'scm_branch' and old_value == '' and self.project and new_value == self.project.scm_branch: + # special case of "not provided" for branches + # job template does not provide branch, runs with default branch + continue elif getattr(self, ask_field_name): + # Special case where prompts can be rejected based on project setting + if field_name == 'scm_branch': + if not self.project: + rejected_data[field_name] = new_value + errors_dict[field_name] = _('Project is missing.') + continue + if kwargs['scm_branch'] != self.project.scm_branch and not self.project.allow_override: + rejected_data[field_name] = new_value + errors_dict[field_name] = _('Project does not allow override of branch.') + continue # accepted prompt prompted_data[field_name] = new_value else: @@ -396,7 +422,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: - errors_dict[field_name] = _('Field is not configured to prompt on launch.').format(field_name=field_name) + errors_dict[field_name] = _('Field is not configured to prompt on launch.') if ('prompts' not in exclude_errors and (not getattr(self, 'ask_credential_on_launch', False)) and diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index c86b08421d..afd61e8faa 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -106,6 +106,13 @@ class ProjectOptions(models.Model): verbose_name=_('SCM Branch'), help_text=_('Specific branch, tag or commit to checkout.'), ) + scm_refspec = models.CharField( + max_length=1024, + blank=True, + default='', + verbose_name=_('SCM refspec'), + help_text=_('For git projects, an additional refspec to fetch.'), + ) scm_clean = models.BooleanField( default=False, help_text=_('Discard any local changes before syncing the project.'), @@ -241,7 +248,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials'] FIELDS_TO_DISCARD_AT_COPY = ['local_path'] - FIELDS_TRIGGER_UPDATE = frozenset(['scm_url', 'scm_branch', 'scm_type']) + FIELDS_TRIGGER_UPDATE = frozenset(['scm_url', 'scm_branch', 'scm_type', 'scm_refspec']) class Meta: app_label = 'main' @@ -261,9 +268,14 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn scm_update_cache_timeout = models.PositiveIntegerField( default=0, blank=True, - help_text=_('The number of seconds after the last project update ran that a new' + help_text=_('The number of seconds after the last project update ran that a new ' 'project update will be launched as a job dependency.'), ) + allow_override = models.BooleanField( + default=False, + help_text=_('Allow changing the SCM branch or revision in a job template ' + 'that uses this project.'), + ) scm_revision = models.CharField( max_length=1024, @@ -471,6 +483,14 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage choices=PROJECT_UPDATE_JOB_TYPE_CHOICES, default='check', ) + scm_revision = models.CharField( + max_length=1024, + blank=True, + default='', + editable=False, + verbose_name=_('SCM Revision'), + help_text=_('The SCM Revision discovered by this update for the given project and branch.'), + ) def _get_parent_field_name(self): return 'project' diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 0a41380d03..e8f671ee9e 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -20,6 +20,8 @@ from distutils.dir_util import copy_tree from distutils.version import LooseVersion as Version import yaml import fcntl +from pathlib import Path +from uuid import uuid4 try: import psutil except Exception: @@ -41,6 +43,10 @@ from django.core.exceptions import ObjectDoesNotExist # Django-CRUM from crum import impersonate +# GitPython +import git +from gitdb.exc import BadName as BadGitName + # Runner import ansible_runner @@ -67,7 +73,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url, ignore_inventory_computed_fields, ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager, get_awx_version) -from awx.main.utils.common import _get_ansible_version, get_custom_venv_choices +from awx.main.utils.common import get_ansible_version, _get_ansible_version, get_custom_venv_choices from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja from awx.main.utils.reload import stop_local_services from awx.main.utils.pglock import advisory_lock @@ -694,9 +700,11 @@ class BaseTask(object): model = None event_model = None abstract = True - cleanup_paths = [] proot_show_paths = [] + def __init__(self): + self.cleanup_paths = [] + def update_model(self, pk, _attempt=0, **updates): """Reload the model instance from the database and update the given fields. @@ -769,9 +777,11 @@ class BaseTask(object): os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) if settings.AWX_CLEANUP_PATHS: self.cleanup_paths.append(path) - # Ansible Runner requires that this directory exists. - # Specifically, when using process isolation - os.mkdir(os.path.join(path, 'project')) + runner_project_folder = os.path.join(path, 'project') + if not os.path.exists(runner_project_folder): + # Ansible Runner requires that this directory exists. + # Specifically, when using process isolation + os.mkdir(runner_project_folder) return path def build_private_data_files(self, instance, private_data_dir): @@ -860,7 +870,10 @@ class BaseTask(object): ''' process_isolation_params = dict() if self.should_use_proot(instance): - show_paths = self.proot_show_paths + [private_data_dir, cwd] + \ + local_paths = [private_data_dir] + if cwd != private_data_dir and Path(private_data_dir) not in Path(cwd).parents: + local_paths.append(cwd) + show_paths = self.proot_show_paths + local_paths + \ settings.AWX_PROOT_SHOW_PATHS # Help the user out by including the collections path inside the bubblewrap environment @@ -1030,7 +1043,7 @@ class BaseTask(object): expect_passwords[k] = passwords.get(v, '') or '' return expect_passwords - def pre_run_hook(self, instance): + def pre_run_hook(self, instance, private_data_dir): ''' Hook for any steps to run before the job/task starts ''' @@ -1157,7 +1170,8 @@ class BaseTask(object): try: isolated = self.instance.is_isolated() self.instance.send_notification_templates("running") - self.pre_run_hook(self.instance) + private_data_dir = self.build_private_data_dir(self.instance) + self.pre_run_hook(self.instance, private_data_dir) if self.instance.cancel_flag: self.instance = self.update_model(self.instance.pk, status='canceled') if self.instance.status != 'running': @@ -1173,7 +1187,6 @@ class BaseTask(object): # store a record of the venv used at runtime if hasattr(self.instance, 'custom_virtualenv'): self.update_model(pk, custom_virtualenv=getattr(self.instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH)) - private_data_dir = self.build_private_data_dir(self.instance) # Fetch "cached" fact data from prior runs and put on the disk # where ansible expects to find it @@ -1256,9 +1269,6 @@ class BaseTask(object): module_args = ansible_runner.utils.args2cmdline( params.get('module_args'), ) - else: - # otherwise, it's a playbook, so copy the project dir - copy_tree(cwd, os.path.join(private_data_dir, 'project')) shutil.move( params.pop('inventory'), os.path.join(private_data_dir, 'inventory') @@ -1464,6 +1474,15 @@ class RunJob(BaseTask): if authorize: env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='') + for env_key, folder in ( + ('ANSIBLE_COLLECTIONS_PATHS', 'requirements_collections'), + ('ANSIBLE_ROLES_PATH', 'requirements_roles')): + paths = [] + if env_key in env: + paths.append(env[env_key]) + paths.append(os.path.join(private_data_dir, folder)) + env[env_key] = os.pathsep.join(paths) + return env def build_args(self, job, private_data_dir, passwords): @@ -1532,15 +1551,10 @@ class RunJob(BaseTask): return args def build_cwd(self, job, private_data_dir): - cwd = job.project.get_project_path() - if not cwd: - root = settings.PROJECTS_ROOT - raise RuntimeError('project local_path %s cannot be found in %s' % - (job.project.local_path, root)) - return cwd + return os.path.join(private_data_dir, 'project') def build_playbook_path_relative_to_cwd(self, job, private_data_dir): - return os.path.join(job.playbook) + return job.playbook def build_extra_vars_file(self, job, private_data_dir): # Define special extra_vars for AWX, combine with job.extra_vars. @@ -1587,39 +1601,86 @@ class RunJob(BaseTask): ''' return getattr(settings, 'AWX_PROOT_ENABLED', False) - def pre_run_hook(self, job): + def pre_run_hook(self, job, private_data_dir): if job.inventory is None: error = _('Job could not start because it does not have a valid inventory.') self.update_model(job.pk, status='failed', job_explanation=error) raise RuntimeError(error) - if job.project and job.project.scm_type: + elif job.project is None: + error = _('Job could not start because it does not have a valid project.') + self.update_model(job.pk, status='failed', job_explanation=error) + raise RuntimeError(error) + elif job.project.status in ('error', 'failed'): + msg = _( + 'The project revision for this job template is unknown due to a failed update.' + ) + job = self.update_model(job.pk, status='failed', job_explanation=msg) + raise RuntimeError(msg) + + project_path = job.project.get_project_path(check_if_exists=False) + job_revision = job.project.scm_revision + needs_sync = True + if not job.project.scm_type: + # manual projects are not synced, user has responsibility for that + needs_sync = False + elif not os.path.exists(project_path): + logger.debug('Performing fresh clone of {} on this instance.'.format(job.project)) + elif not job.project.scm_revision: + logger.debug('Revision not known for {}, will sync with remote'.format(job.project)) + elif job.project.scm_type == 'git': + git_repo = git.Repo(project_path) + try: + desired_revision = job.project.scm_revision + if job.scm_branch and job.scm_branch != job.project.scm_branch: + desired_revision = job.scm_branch # could be commit or not, but will try as commit + current_revision = git_repo.head.commit.hexsha + if desired_revision == current_revision: + job_revision = desired_revision + logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format)) + needs_sync = False + except (ValueError, BadGitName): + logger.debug('Needed commit for {} not in local source tree, will sync with remote'.format(job.log_format)) + # Galaxy requirements are not supported for manual projects + if not needs_sync and job.project.scm_type: + # see if we need a sync because of presence of roles + galaxy_req_path = os.path.join(project_path, 'roles', 'requirements.yml') + if os.path.exists(galaxy_req_path): + logger.debug('Running project sync for {} because of galaxy role requirements.'.format(job.log_format)) + needs_sync = True + + galaxy_collections_req_path = os.path.join(project_path, 'collections', 'requirements.yml') + if os.path.exists(galaxy_collections_req_path): + logger.debug('Running project sync for {} because of galaxy collections requirements.'.format(job.log_format)) + needs_sync = True + + if needs_sync: pu_ig = job.instance_group pu_en = job.execution_node if job.is_isolated() is True: pu_ig = pu_ig.controller pu_en = settings.CLUSTER_HOST_ID - if job.project.status in ('error', 'failed'): - msg = _( - 'The project revision for this job template is unknown due to a failed update.' - ) - job = self.update_model(job.pk, status='failed', job_explanation=msg) - raise RuntimeError(msg) - local_project_sync = job.project.create_project_update( - _eager_fields=dict( - launch_type="sync", - job_type='run', - status='running', - instance_group = pu_ig, - execution_node=pu_en, - celery_task_id=job.celery_task_id)) + sync_metafields = dict( + launch_type="sync", + job_type='run', + status='running', + instance_group = pu_ig, + execution_node=pu_en, + celery_task_id=job.celery_task_id + ) + if job.scm_branch and job.scm_branch != job.project.scm_branch: + sync_metafields['scm_branch'] = job.scm_branch + local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields) # save the associated job before calling run() so that a # cancel() call on the job can cancel the project update job = self.update_model(job.pk, project_update=local_project_sync) project_update_task = local_project_sync._get_task_class() try: - project_update_task().run(local_project_sync.id) - job = self.update_model(job.pk, scm_revision=job.project.scm_revision) + # the job private_data_dir is passed so sync can download roles and collections there + sync_task = project_update_task(job_private_data_dir=private_data_dir) + sync_task.run(local_project_sync.id) + local_project_sync.refresh_from_db() + job = self.update_model(job.pk, scm_revision=local_project_sync.scm_revision) except Exception: local_project_sync.refresh_from_db() if local_project_sync.status != 'canceled': @@ -1627,6 +1688,38 @@ class RunJob(BaseTask): job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % ('project_update', local_project_sync.name, local_project_sync.id))) raise + job.refresh_from_db() + if job.cancel_flag: + return + else: + # Case where a local sync is not needed, meaning that local tree is + # up-to-date with project, job is running project current version + if job_revision: + job = self.update_model(job.pk, scm_revision=job_revision) + + # copy the project directory + runner_project_folder = os.path.join(private_data_dir, 'project') + if job.project.scm_type == 'git': + git_repo = git.Repo(project_path) + if not os.path.exists(runner_project_folder): + os.mkdir(runner_project_folder) + tmp_branch_name = 'awx_internal/{}'.format(uuid4()) + # always clone based on specific job revision + if not job.scm_revision: + raise RuntimeError('Unexpectedly could not determine a revision to run from project.') + source_branch = git_repo.create_head(tmp_branch_name, job.scm_revision) + # git clone must take file:// syntax for source repo or else options like depth will be ignored + source_as_uri = Path(project_path).as_uri() + git.Repo.clone_from( + source_as_uri, runner_project_folder, branch=source_branch, + depth=1, single_branch=True, # shallow, do not copy full history + recursive=True # include submodules + ) + # force option is necessary because remote refs are not counted, although no information is lost + git_repo.delete_head(tmp_branch_name, force=True) + else: + copy_tree(project_path, runner_project_folder) + if job.inventory.kind == 'smart': # cache smart inventory memberships so that the host_filter query is not # ran inside of the event saving code @@ -1663,7 +1756,24 @@ class RunProjectUpdate(BaseTask): @property def proot_show_paths(self): - return [settings.PROJECTS_ROOT] + show_paths = [settings.PROJECTS_ROOT] + if self.job_private_data_dir: + show_paths.append(self.job_private_data_dir) + return show_paths + + def __init__(self, *args, job_private_data_dir=None, **kwargs): + super(RunProjectUpdate, self).__init__(*args, **kwargs) + self.playbook_new_revision = None + self.original_branch = None + self.job_private_data_dir = job_private_data_dir + + def event_handler(self, event_data): + super(RunProjectUpdate, self).event_handler(event_data) + returned_data = event_data.get('event_data', {}) + if returned_data.get('task_action', '') == 'set_fact': + returned_facts = returned_data.get('res', {}).get('ansible_facts', {}) + if 'scm_version' in returned_facts: + self.playbook_new_revision = returned_facts['scm_version'] def build_private_data(self, project_update, private_data_dir): ''' @@ -1678,14 +1788,17 @@ class RunProjectUpdate(BaseTask): } } ''' - handle, self.revision_path = tempfile.mkstemp(dir=settings.PROJECTS_ROOT) - if settings.AWX_CLEANUP_PATHS: - self.cleanup_paths.append(self.revision_path) private_data = {'credentials': {}} if project_update.credential: credential = project_update.credential if credential.has_input('ssh_key_data'): private_data['credentials'][credential] = credential.get_input('ssh_key_data', default='') + + # Create dir where collections will live for the job run + if project_update.job_type != 'check' and getattr(self, 'job_private_data_dir'): + for folder_name in ('requirements_collections', 'requirements_roles'): + folder_path = os.path.join(self.job_private_data_dir, folder_name) + os.mkdir(folder_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) return private_data def build_passwords(self, project_update, runtime_passwords): @@ -1781,10 +1894,21 @@ class RunProjectUpdate(BaseTask): scm_url, extra_vars_new = self._build_scm_url_extra_vars(project_update) extra_vars.update(extra_vars_new) - if project_update.project.scm_revision and project_update.job_type == 'run': + scm_branch = project_update.scm_branch + branch_override = bool(project_update.scm_branch != project_update.project.scm_branch) + if project_update.job_type == 'run' and scm_branch and (not branch_override): scm_branch = project_update.project.scm_revision + elif not scm_branch: + scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD') + if project_update.job_type == 'check': + roles_enabled = False + collections_enabled = False else: - scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD') + roles_enabled = getattr(settings, 'AWX_ROLES_ENABLED', True) + collections_enabled = getattr(settings, 'AWX_COLLECTIONS_ENABLED', True) + # collections were introduced in Ansible version 2.8 + if Version(get_ansible_version()) <= Version('2.8'): + collections_enabled = False extra_vars.update({ 'project_path': project_update.get_project_path(check_if_exists=False), 'insights_url': settings.INSIGHTS_URL_BASE, @@ -1796,17 +1920,24 @@ class RunProjectUpdate(BaseTask): 'scm_clean': project_update.scm_clean, 'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False, 'scm_full_checkout': True if project_update.job_type == 'run' else False, - 'scm_revision_output': self.revision_path, - 'scm_revision': project_update.project.scm_revision, - 'roles_enabled': getattr(settings, 'AWX_ROLES_ENABLED', True) + 'roles_enabled': roles_enabled, + 'collections_enabled': collections_enabled, }) + if project_update.job_type != 'check' and self.job_private_data_dir: + extra_vars['collections_destination'] = os.path.join(self.job_private_data_dir, 'requirements_collections') + extra_vars['roles_destination'] = os.path.join(self.job_private_data_dir, 'requirements_roles') + # apply custom refspec from user for PR refs and the like + if project_update.scm_refspec: + extra_vars['scm_refspec'] = project_update.scm_refspec + elif project_update.project.allow_override: + # If branch is override-able, do extra fetch for all branches + extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*' self._write_extra_vars_file(private_data_dir, extra_vars) def build_cwd(self, project_update, private_data_dir): return self.get_path_to('..', 'playbooks') def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir): - self.build_cwd(project_update, private_data_dir) return os.path.join('project_update.yml') def get_password_prompts(self, passwords={}): @@ -1920,25 +2051,42 @@ class RunProjectUpdate(BaseTask): '{} spent {} waiting to acquire lock for local source tree ' 'for path {}.'.format(instance.log_format, waiting_time, lock_path)) - def pre_run_hook(self, instance): + def pre_run_hook(self, instance, private_data_dir): # re-create root project folder if a natural disaster has destroyed it if not os.path.exists(settings.PROJECTS_ROOT): os.mkdir(settings.PROJECTS_ROOT) self.acquire_lock(instance) + self.original_branch = None + if (instance.scm_type == 'git' and instance.job_type == 'run' and instance.project and + instance.scm_branch != instance.project.scm_branch): + project_path = instance.project.get_project_path(check_if_exists=False) + if os.path.exists(project_path): + git_repo = git.Repo(project_path) + self.original_branch = git_repo.active_branch def post_run_hook(self, instance, status): + if self.original_branch: + # for git project syncs, non-default branches can be problems + # restore to branch the repo was on before this run + try: + self.original_branch.checkout() + except Exception: + # this could have failed due to dirty tree, but difficult to predict all cases + logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format)) self.release_lock(instance) p = instance.project + if self.playbook_new_revision: + instance.scm_revision = self.playbook_new_revision + instance.save(update_fields=['scm_revision']) if instance.job_type == 'check' and status not in ('failed', 'canceled',): - fd = open(self.revision_path, 'r') - lines = fd.readlines() - if lines: - p.scm_revision = lines[0].strip() + if self.playbook_new_revision: + p.scm_revision = self.playbook_new_revision else: - logger.info("{} Could not find scm revision in check".format(instance.log_format)) + if status == 'successful': + logger.error("{} Could not find scm revision in check".format(instance.log_format)) p.playbook_files = p.playbooks p.inventory_files = p.inventories - p.save() + p.save(update_fields=['scm_revision', 'playbook_files', 'inventory_files']) # Update any inventories that depend on this project dependent_inventory_sources = p.scm_inventory_sources.filter(update_on_project_update=True) @@ -2159,11 +2307,12 @@ class RunInventoryUpdate(BaseTask): # All credentials not used by inventory source injector return inventory_update.get_extra_credentials() - def pre_run_hook(self, inventory_update): + def pre_run_hook(self, inventory_update, private_data_dir): source_project = None if inventory_update.inventory_source: source_project = inventory_update.inventory_source.source_project if (inventory_update.source=='scm' and inventory_update.launch_type!='scm' and source_project): + # In project sync, pulling galaxy roles is not needed local_project_sync = source_project.create_project_update( _eager_fields=dict( launch_type="sync", diff --git a/awx/main/tests/functional/api/test_job_runtime_params.py b/awx/main/tests/functional/api/test_job_runtime_params.py index d90f63e180..c2cd279bab 100644 --- a/awx/main/tests/functional/api/test_job_runtime_params.py +++ b/awx/main/tests/functional/api/test_job_runtime_params.py @@ -516,6 +516,25 @@ def test_job_launch_JT_with_credentials(machine_credential, credential, net_cred assert machine_credential in creds +@pytest.mark.django_db +def test_job_branch_rejected_and_accepted(deploy_jobtemplate): + deploy_jobtemplate.ask_scm_branch_on_launch = True + deploy_jobtemplate.save() + prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs( + scm_branch='foobar' + ) + assert 'scm_branch' in ignored_fields + assert 'does not allow override of branch' in errors['scm_branch'] + + deploy_jobtemplate.project.allow_override = True + deploy_jobtemplate.project.save() + prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs( + scm_branch='foobar' + ) + assert not ignored_fields + assert prompted_fields['scm_branch'] == 'foobar' + + @pytest.mark.django_db @pytest.mark.job_runtime_vars def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job_template_prompts, post, admin_user): diff --git a/awx/main/tests/functional/api/test_job_template.py b/awx/main/tests/functional/api/test_job_template.py index 10c978eb06..9691553195 100644 --- a/awx/main/tests/functional/api/test_job_template.py +++ b/awx/main/tests/functional/api/test_job_template.py @@ -505,3 +505,37 @@ def test_callback_disallowed_null_inventory(project): with pytest.raises(ValidationError) as exc: serializer.validate({'host_config_key': 'asdfbasecfeee'}) assert 'Cannot enable provisioning callback without an inventory set' in str(exc) + + +@pytest.mark.django_db +def test_job_template_branch_error(project, inventory, post, admin_user): + r = post( + url=reverse('api:job_template_list'), + data={ + "name": "fooo", + "inventory": inventory.pk, + "project": project.pk, + "playbook": "helloworld.yml", + "scm_branch": "foobar" + }, + user=admin_user, + expect=400 + ) + assert 'Project does not allow overriding branch' in str(r.data['scm_branch']) + + +@pytest.mark.django_db +def test_job_template_branch_prompt_error(project, inventory, post, admin_user): + r = post( + url=reverse('api:job_template_list'), + data={ + "name": "fooo", + "inventory": inventory.pk, + "project": project.pk, + "playbook": "helloworld.yml", + "ask_scm_branch_on_launch": True + }, + user=admin_user, + expect=400 + ) + assert 'Project does not allow overriding branch' in str(r.data['ask_scm_branch_on_launch']) diff --git a/awx/main/tests/functional/api/test_project.py b/awx/main/tests/functional/api/test_project.py index b66835431f..571d0624fb 100644 --- a/awx/main/tests/functional/api/test_project.py +++ b/awx/main/tests/functional/api/test_project.py @@ -5,17 +5,18 @@ from django.conf import settings import pytest from awx.api.versioning import reverse +from awx.main.models import Project, JobTemplate @pytest.mark.django_db class TestInsightsCredential: def test_insights_credential(self, patch, insights_project, admin_user, insights_credential): - patch(insights_project.get_absolute_url(), + patch(insights_project.get_absolute_url(), {'credential': insights_credential.id}, admin_user, expect=200) def test_non_insights_credential(self, patch, insights_project, admin_user, scm_credential): - patch(insights_project.get_absolute_url(), + patch(insights_project.get_absolute_url(), {'credential': scm_credential.id}, admin_user, expect=400) @@ -44,3 +45,52 @@ def test_project_unset_custom_virtualenv(get, patch, project, admin, value): url = reverse('api:project_detail', kwargs={'pk': project.id}) resp = patch(url, {'custom_virtualenv': value}, user=admin, expect=200) assert resp.data['custom_virtualenv'] is None + + +@pytest.mark.django_db +def test_no_changing_overwrite_behavior_if_used(post, patch, organization, admin_user): + r1 = post( + url=reverse('api:project_list'), + data={ + 'name': 'fooo', + 'organization': organization.id, + 'allow_override': True + }, + user=admin_user, + expect=201 + ) + JobTemplate.objects.create( + name='provides branch', project_id=r1.data['id'], + playbook='helloworld.yml', + scm_branch='foobar' + ) + r2 = patch( + url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}), + data={'allow_override': False}, + user=admin_user, + expect=400 + ) + assert 'job templates depend on branch override behavior for this project' in str(r2.data['allow_override']) + assert 'ids: 2' in str(r2.data['allow_override']) + assert Project.objects.get(pk=r1.data['id']).allow_override is True + + +@pytest.mark.django_db +def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization, admin_user): + r1 = post( + url=reverse('api:project_list'), + data={ + 'name': 'fooo', + 'organization': organization.id, + 'allow_override': True + }, + user=admin_user, + expect=201 + ) + patch( + url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}), + data={'allow_override': False}, + user=admin_user, + expect=200 + ) + assert Project.objects.get(pk=r1.data['id']).allow_override is False diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 75d616ac3b..b2ff7d7fe4 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -256,7 +256,7 @@ class TestExtraVarSanitation(TestJobExecution): def test_vars_unsafe_by_default(self, job, private_data_dir): job.created_by = User(pk=123, username='angry-spud') - job.inventory = Inventory(pk=123, name='example-inv') + job.inventory = Inventory(pk=123, name='example-inv') task = tasks.RunJob() task.build_extra_vars_file(job, private_data_dir) @@ -361,15 +361,16 @@ class TestExtraVarSanitation(TestJobExecution): class TestGenericRun(): def test_generic_failure(self, patch_Job): - job = Job(status='running', inventory=Inventory()) + job = Job(status='running', inventory=Inventory(), project=Project()) job.websocket_emit_status = mock.Mock() task = tasks.RunJob() task.update_model = mock.Mock(return_value=job) task.build_private_data_files = mock.Mock(side_effect=OSError()) - with pytest.raises(Exception): - task.run(1) + with mock.patch('awx.main.tasks.copy_tree'): + with pytest.raises(Exception): + task.run(1) update_model_call = task.update_model.call_args[1] assert 'OSError' in update_model_call['result_traceback'] @@ -386,8 +387,9 @@ class TestGenericRun(): task.update_model = mock.Mock(wraps=update_model_wrapper) task.build_private_data_files = mock.Mock() - with pytest.raises(Exception): - task.run(1) + with mock.patch('awx.main.tasks.copy_tree'): + with pytest.raises(Exception): + task.run(1) for c in [ mock.call(1, status='running', start_args=''), @@ -524,7 +526,10 @@ class TestGenericRun(): with mock.patch('awx.main.tasks.settings.AWX_ANSIBLE_COLLECTIONS_PATHS', ['/AWX_COLLECTION_PATH']): with mock.patch('awx.main.tasks.settings.AWX_TASK_ENV', {'ANSIBLE_COLLECTIONS_PATHS': '/MY_COLLECTION1:/MY_COLLECTION2'}): env = task.build_env(job, private_data_dir) - assert env['ANSIBLE_COLLECTIONS_PATHS'] == '/MY_COLLECTION1:/MY_COLLECTION2:/AWX_COLLECTION_PATH' + used_paths = env['ANSIBLE_COLLECTIONS_PATHS'].split(':') + assert used_paths[-1].endswith('/requirements_collections') + used_paths.pop() + assert used_paths == ['/MY_COLLECTION1', '/MY_COLLECTION2', '/AWX_COLLECTION_PATH'] def test_valid_custom_virtualenv(self, patch_Job, private_data_dir): job = Job(project=Project(), inventory=Inventory()) @@ -1720,8 +1725,6 @@ class TestProjectUpdateCredentials(TestJobExecution): call_args, _ = task._write_extra_vars_file.call_args_list[0] _, extra_vars = call_args - assert extra_vars["scm_revision_output"] == 'foobar' - def test_username_and_password_auth(self, project_update, scm_type): task = tasks.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() diff --git a/awx/playbooks/project_update.yml b/awx/playbooks/project_update.yml index 25cbef5951..51f90e5afc 100644 --- a/awx/playbooks/project_update.yml +++ b/awx/playbooks/project_update.yml @@ -11,9 +11,9 @@ # scm_username: username (only for svn/insights) # scm_password: password (only for svn/insights) # scm_accept_hostkey: true/false (only for git, set automatically) -# scm_revision: current revision in tower -# scm_revision_output: where to store gathered revision (temporary file) +# scm_refspec: a refspec to fetch in addition to obtaining version # roles_enabled: Allow us to pull roles from a requirements.yml file +# roles_destination: Path to save roles from galaxy to # awx_version: Current running version of the awx or tower as a string # awx_license_type: "open" for AWX; else presume Tower @@ -29,27 +29,12 @@ delegate_to: localhost - block: - - name: check repo using git - git: - dest: "{{project_path|quote}}" - repo: "{{scm_url}}" - version: "{{scm_branch|quote}}" - force: "{{scm_clean}}" - update: false - clone: false - register: repo_check - when: scm_full_checkout|default('') - ignore_errors: true - - - name: break if already checked out - meta: end_play - when: scm_full_checkout|default('') and repo_check is succeeded and repo_check.before == scm_branch - - name: update project using git git: dest: "{{project_path|quote}}" repo: "{{scm_url}}" version: "{{scm_branch|quote}}" + refspec: "{{scm_refspec|default(omit)}}" force: "{{scm_clean}}" accept_hostkey: "{{scm_accept_hostkey|default(omit)}}" register: git_result @@ -131,13 +116,6 @@ debug: msg="Repository Version {{ scm_version }}" when: scm_version is defined - - name: Write Repository Version - copy: - dest: "{{ scm_revision_output }}" - content: "{{ scm_version }}" - when: scm_version is defined and scm_revision_output is defined - delegate_to: localhost - - hosts: all gather_facts: false tasks: @@ -148,18 +126,28 @@ register: doesRequirementsExist - name: fetch galaxy roles from requirements.yml - command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ + command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}} args: chdir: "{{project_path|quote}}/roles" register: galaxy_result - when: doesRequirementsExist.stat.exists and (scm_version is undefined or (git_result is not skipped and git_result['before'] == git_result['after'])) + when: doesRequirementsExist.stat.exists changed_when: "'was installed successfully' in galaxy_result.stdout" - - name: fetch galaxy roles from requirements.yml (forced update) - command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ --force - args: - chdir: "{{project_path|quote}}/roles" - when: doesRequirementsExist.stat.exists and galaxy_result is skipped - when: roles_enabled|bool delegate_to: localhost + + - block: + - name: detect collections/requirements.yml + stat: path={{project_path|quote}}/collections/requirements.yml + register: doesCollectionRequirementsExist + + - name: fetch galaxy collections from collections/requirements.yml + command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}} + args: + chdir: "{{project_path|quote}}/collections" + register: galaxy_collection_result + when: doesCollectionRequirementsExist.stat.exists + changed_when: "'Installing ' in galaxy_collection_result.stdout" + + when: collections_enabled|bool + delegate_to: localhost diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 0ea3dd6746..b9274c62f4 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -604,6 +604,11 @@ ALLOW_JINJA_IN_EXTRA_VARS = 'template' # Note: This setting may be overridden by database settings. AWX_ROLES_ENABLED = True +# Enable dynamically pulling collections from a requirement.yml file +# when updating SCM projects +# Note: This setting may be overridden by database settings. +AWX_COLLECTIONS_ENABLED = True + # Enable bubblewrap support for running jobs (playbook runs only). # Note: This setting may be overridden by database settings. AWX_PROOT_ENABLED = True diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index 9bddc202f7..23aa5c2a6d 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -343,6 +343,28 @@ function getProjectUpdateDetails (updateId) { return { link, tooltip }; } +function getSCMBranchDetails (scmBranch) { + const label = strings.get('labels.SCM_BRANCH'); + const value = scmBranch || resource.model.get('scm_branch'); + + if (!value) { + return null; + } + + return { label, value }; +} + +function getSCMRefspecDetails (scmRefspec) { + const label = strings.get('labels.SCM_REFSPEC'); + const value = scmRefspec || resource.model.get('scm_refspec'); + + if (!value) { + return null; + } + + return { label, value }; +} + function getInventoryScmDetails (updateId, updateStatus) { const projectId = resource.model.get('summary_fields.source_project.id'); const projectName = resource.model.get('summary_fields.source_project.name'); @@ -800,6 +822,8 @@ function JobDetailsController ( vm.project = getProjectDetails(); vm.projectUpdate = getProjectUpdateDetails(); vm.projectStatus = getProjectStatusDetails(); + vm.scmBranch = getSCMBranchDetails(); + vm.scmRefspec = getSCMRefspecDetails(); vm.scmRevision = getSCMRevisionDetails(); vm.inventoryScm = getInventoryScmDetails(); vm.playbook = getPlaybookDetails(); @@ -840,6 +864,8 @@ function JobDetailsController ( started, finished, scm, + scmBranch, + scmRefspec, inventoryScm, scmRevision, instanceGroup, @@ -851,6 +877,8 @@ function JobDetailsController ( vm.finished = getFinishDetails(finished); vm.projectUpdate = getProjectUpdateDetails(scm.id); vm.projectStatus = getProjectStatusDetails(scm.status); + vm.scmBranch = getSCMBranchDetails(scmBranch); + vm.scmRefspec = getSCMRefspecDetails(scmRefspec); vm.environment = getEnvironmentDetails(environment); vm.artifacts = getArtifactsDetails(artifacts); vm.executionNode = getExecutionNodeDetails(executionNode); diff --git a/awx/ui/client/features/output/details.partial.html b/awx/ui/client/features/output/details.partial.html index 5ad6dde62e..14bf856269 100644 --- a/awx/ui/client/features/output/details.partial.html +++ b/awx/ui/client/features/output/details.partial.html @@ -218,6 +218,18 @@ + +
' + i18n._("Branch to checkout. In addition to branches, you can input tags, commit hashes, and arbitrary refs. Some commit hashes and refs may not be availble unless you also provide a custom refspec.") + '
', + dataTitle: i18n._('SCM Branch'), + subForm: 'sourceSubForm', + }, + scm_refspec: { + labelBind: "scmRefspecLabel", + type: 'text', + ngShow: "scm_type && scm_type.value === 'git'", + ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)', + awPopOver: '' + i18n._('A refspec to fetch (passed to the Ansible git module). This parameter allows access to references via the branch field not otherwise available.') + '
' + + '' + i18n._('NOTE: This field assumes the remote name is "origin".') + '
' + + '' + i18n._('Examples include:') + '
' + + '' + i18n._('The first fetches all references. The second fetches the Github pull request number 62, in this example the branch needs to be `pull/62/head`.') + + '
' + + '' + i18n._('For more information, refer to the') + ' ' + i18n._('Ansible Tower Documentation') + '.
', + dataTitle: i18n._('SCM Refspec'), subForm: 'sourceSubForm', }, credential: { @@ -183,6 +201,18 @@ export default ['i18n', 'NotificationsList', 'TemplateList', dataPlacement: 'right', labelClass: 'checkbox-options stack-inline', ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)' + }, + { + name: 'allow_override', + label: i18n._('Allow branch override'), + type: 'checkbox', + awPopOver: '' + i18n._('Allow changing the SCM branch or revision in a job template that uses this project.') + '
', + dataTitle: i18n._('Allow branch override'), + dataContainer: 'body', + dataPlacement: 'right', + labelClass: 'checkbox-options stack-inline', + ngDisabled: '!(project_obj.summary_fields.user_capabilities.edit || canAdd)', + ngShow: "scm_type && scm_type.value !== 'insights'", }] }, scm_update_cache_timeout: { diff --git a/awx/ui/client/src/scheduler/schedulerAdd.controller.js b/awx/ui/client/src/scheduler/schedulerAdd.controller.js index 3d6642d6c4..7c2a45dfd6 100644 --- a/awx/ui/client/src/scheduler/schedulerAdd.controller.js +++ b/awx/ui/client/src/scheduler/schedulerAdd.controller.js @@ -20,6 +20,20 @@ export default ['$filter', '$state', '$stateParams', '$http', 'Wait', scheduler, job_type; + const shouldShowPromptButton = (launchConf) => launchConf.survey_enabled || + launchConf.ask_inventory_on_launch || + launchConf.ask_credential_on_launch || + launchConf.ask_verbosity_on_launch || + launchConf.ask_job_type_on_launch || + launchConf.ask_limit_on_launch || + launchConf.ask_tags_on_launch || + launchConf.ask_skip_tags_on_launch || + launchConf.ask_diff_mode_on_launch || + launchConf.credential_needed_to_start || + launchConf.ask_variables_on_launch || + launchConf.ask_scm_branch_on_launch || + launchConf.variables_needed_to_start.length !== 0; + var schedule_url = ParentObject.related.schedules || `${ParentObject.related.inventory_source}schedules`; if (ParentObject){ $scope.parentObject = ParentObject; @@ -152,19 +166,7 @@ export default ['$filter', '$state', '$stateParams', '$http', 'Wait', $scope.noVars = true; } - if (!launchConf.survey_enabled && - !launchConf.ask_inventory_on_launch && - !launchConf.ask_credential_on_launch && - !launchConf.ask_verbosity_on_launch && - !launchConf.ask_job_type_on_launch && - !launchConf.ask_limit_on_launch && - !launchConf.ask_tags_on_launch && - !launchConf.ask_skip_tags_on_launch && - !launchConf.ask_diff_mode_on_launch && - !launchConf.survey_enabled && - !launchConf.credential_needed_to_start && - !launchConf.inventory_needed_to_start && - launchConf.variables_needed_to_start.length === 0) { + if (!shouldShowPromptButton(launchConf)) { $scope.showPromptButton = false; } else { $scope.showPromptButton = true; @@ -239,20 +241,8 @@ export default ['$filter', '$state', '$stateParams', '$http', 'Wait', }); }; - if (!launchConf.survey_enabled && - !launchConf.ask_inventory_on_launch && - !launchConf.ask_credential_on_launch && - !launchConf.ask_verbosity_on_launch && - !launchConf.ask_job_type_on_launch && - !launchConf.ask_limit_on_launch && - !launchConf.ask_tags_on_launch && - !launchConf.ask_skip_tags_on_launch && - !launchConf.ask_diff_mode_on_launch && - !launchConf.survey_enabled && - !launchConf.credential_needed_to_start && - !launchConf.inventory_needed_to_start && - launchConf.variables_needed_to_start.length === 0) { - $scope.showPromptButton = false; + if (!shouldShowPromptButton(launchConf)) { + $scope.showPromptButton = false; } else { $scope.showPromptButton = true; diff --git a/awx/ui/client/src/scheduler/schedulerEdit.controller.js b/awx/ui/client/src/scheduler/schedulerEdit.controller.js index 75ed397e7b..3caeb926e5 100644 --- a/awx/ui/client/src/scheduler/schedulerEdit.controller.js +++ b/awx/ui/client/src/scheduler/schedulerEdit.controller.js @@ -10,6 +10,21 @@ function($filter, $state, $stateParams, Wait, $scope, moment, let schedule, scheduler, scheduleCredentials = []; + const shouldShowPromptButton = (launchConf) => launchConf.survey_enabled || + launchConf.ask_inventory_on_launch || + launchConf.ask_credential_on_launch || + launchConf.ask_verbosity_on_launch || + launchConf.ask_job_type_on_launch || + launchConf.ask_limit_on_launch || + launchConf.ask_tags_on_launch || + launchConf.ask_skip_tags_on_launch || + launchConf.ask_diff_mode_on_launch || + launchConf.credential_needed_to_start || + launchConf.ask_variables_on_launch || + launchConf.ask_scm_branch_on_launch || + launchConf.passwords_needed_to_start.length !== 0 || + launchConf.variables_needed_to_start.length !== 0; + $scope.preventCredsWithPasswords = true; // initial end @ midnight values @@ -326,20 +341,7 @@ function($filter, $state, $stateParams, Wait, $scope, moment, // ask_variables_on_launch = true $scope.noVars = !launchConf.ask_variables_on_launch; - if (!launchConf.survey_enabled && - !launchConf.ask_inventory_on_launch && - !launchConf.ask_credential_on_launch && - !launchConf.ask_verbosity_on_launch && - !launchConf.ask_job_type_on_launch && - !launchConf.ask_limit_on_launch && - !launchConf.ask_tags_on_launch && - !launchConf.ask_skip_tags_on_launch && - !launchConf.ask_diff_mode_on_launch && - !launchConf.survey_enabled && - !launchConf.credential_needed_to_start && - !launchConf.inventory_needed_to_start && - launchConf.passwords_needed_to_start.length === 0 && - launchConf.variables_needed_to_start.length === 0) { + if (!shouldShowPromptButton(launchConf)) { $scope.showPromptButton = false; if (launchConf.ask_variables_on_launch) { @@ -424,20 +426,7 @@ function($filter, $state, $stateParams, Wait, $scope, moment, currentValues: scheduleResolve }); - if (!launchConf.survey_enabled && - !launchConf.ask_inventory_on_launch && - !launchConf.ask_credential_on_launch && - !launchConf.ask_verbosity_on_launch && - !launchConf.ask_job_type_on_launch && - !launchConf.ask_limit_on_launch && - !launchConf.ask_tags_on_launch && - !launchConf.ask_skip_tags_on_launch && - !launchConf.ask_diff_mode_on_launch && - !launchConf.survey_enabled && - !launchConf.credential_needed_to_start && - !launchConf.inventory_needed_to_start && - launchConf.passwords_needed_to_start.length === 0 && - launchConf.variables_needed_to_start.length === 0) { + if (!shouldShowPromptButton(launchConf)) { $scope.showPromptButton = false; } else { $scope.showPromptButton = true; diff --git a/awx/ui/client/src/shared/Utilities.js b/awx/ui/client/src/shared/Utilities.js index 456616ce93..8c3a2b9d7b 100644 --- a/awx/ui/client/src/shared/Utilities.js +++ b/awx/ui/client/src/shared/Utilities.js @@ -182,41 +182,52 @@ angular.module('Utilities', ['RestServices', 'Utilities']) } } else if (form) { //if no error code is detected it begins to loop through to see where the api threw an error fieldErrors = false; - for (field in form.fields) { - if (data[field] && form.fields[field].tab) { + + const addApiErrors = (field, fld) => { + if (data[fld] && field.tab) { // If the form is part of a tab group, activate the tab - $('#' + form.name + "_tabs a[href=\"#" + form.fields[field].tab + '"]').tab('show'); + $('#' + form.name + "_tabs a[href=\"#" + field.tab + '"]').tab('show'); } - if (form.fields[field].realName) { - if (data[form.fields[field].realName]) { - scope[field + '_api_error'] = data[form.fields[field].realName][0]; + if (field.realName) { + if (field.realName) { + scope[fld + '_api_error'] = data[field.realName][0]; //scope[form.name + '_form'][form.fields[field].realName].$setValidity('apiError', false); - $('[name="' + form.fields[field].realName + '"]').addClass('ng-invalid'); - $('html, body').animate({scrollTop: $('[name="' + form.fields[field].realName + '"]').offset().top}, 0); + $('[name="' + field.realName + '"]').addClass('ng-invalid'); + $('html, body').animate({scrollTop: $('[name="' + field.realName + '"]').offset().top}, 0); fieldErrors = true; } } - if (form.fields[field].sourceModel) { - if (data[field]) { - scope[form.fields[field].sourceModel + '_' + form.fields[field].sourceField + '_api_error'] = - data[field][0]; + if (field.sourceModel) { + if (data[fld]) { + scope[field.sourceModel + '_' + field.sourceField + '_api_error'] = + data[fld][0]; //scope[form.name + '_form'][form.fields[field].sourceModel + '_' + form.fields[field].sourceField].$setValidity('apiError', false); - $('[name="' + form.fields[field].sourceModel + '_' + form.fields[field].sourceField + '"]').addClass('ng-invalid'); - $('[name="' + form.fields[field].sourceModel + '_' + form.fields[field].sourceField + '"]').ScrollTo({ "onlyIfOutside": true, "offsetTop": 100 }); + $('[name="' + field.sourceModel + '_' + field.sourceField + '"]').addClass('ng-invalid'); + $('[name="' + field.sourceModel + '_' + field.sourceField + '"]').ScrollTo({ "onlyIfOutside": true, "offsetTop": 100 }); fieldErrors = true; } } else { - if (data[field]) { - scope[field + '_api_error'] = data[field][0]; - $('[name="' + field + '"]').addClass('ng-invalid'); - $('label[for="' + field + '"] span').addClass('error-color'); - $('html, body').animate({scrollTop: $('[name="' + field + '"]').offset().top}, 0); + if (data[fld]) { + scope[fld + '_api_error'] = data[fld][0]; + $('[name="' + fld + '"]').addClass('ng-invalid'); + $('label[for="' + fld + '"] span').addClass('error-color'); + $('html, body').animate({scrollTop: $('[name="' + fld + '"]').offset().top}, 0); fieldErrors = true; - if(form.fields[field].codeMirror){ - $(`#cm-${field}-container .CodeMirror`).addClass('error-border'); + if(field.codeMirror){ + $(`#cm-${fld}-container .CodeMirror`).addClass('error-border'); } } } + }; + + for (field in form.fields) { + if (form.fields[field].type === "checkbox_group") { + form.fields[field].fields.forEach(fld => { + addApiErrors(fld, fld.name); + }); + } else { + addApiErrors(form.fields[field], field); + } } if (defaultMsg) { Alert(defaultMsg.hdr, defaultMsg.msg); diff --git a/awx/ui/client/src/shared/form-generator.js b/awx/ui/client/src/shared/form-generator.js index 994ec53a89..ba26bc6223 100644 --- a/awx/ui/client/src/shared/form-generator.js +++ b/awx/ui/client/src/shared/form-generator.js @@ -1156,6 +1156,9 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat field.max + "" + i18n._("Select the playbook to be executed by this job.") + "
", + awPopOver: "" + i18n._("Select the playbook to be executed by this job." + + "You can select from the dropdown or enter a file within the input.") + "
", dataTitle: i18n._('Playbook'), dataPlacement: 'right', dataContainer: "body", diff --git a/awx/ui/client/src/templates/prompt/prompt.controller.js b/awx/ui/client/src/templates/prompt/prompt.controller.js index 60da76a641..d97b91681b 100644 --- a/awx/ui/client/src/templates/prompt/prompt.controller.js +++ b/awx/ui/client/src/templates/prompt/prompt.controller.js @@ -157,7 +157,7 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f activeTab = activeTab || vm.steps.credential.tab; order++; } - if(vm.promptDataClone.launchConf.ask_verbosity_on_launch || vm.promptDataClone.launchConf.ask_job_type_on_launch || vm.promptDataClone.launchConf.ask_limit_on_launch || vm.promptDataClone.launchConf.ask_tags_on_launch || vm.promptDataClone.launchConf.ask_skip_tags_on_launch || (vm.promptDataClone.launchConf.ask_variables_on_launch && !vm.promptDataClone.launchConf.ignore_ask_variables) || vm.promptDataClone.launchConf.ask_diff_mode_on_launch) { + if(vm.promptDataClone.launchConf.ask_verbosity_on_launch || vm.promptDataClone.launchConf.ask_job_type_on_launch || vm.promptDataClone.launchConf.ask_limit_on_launch || vm.promptDataClone.launchConf.ask_tags_on_launch || vm.promptDataClone.launchConf.ask_skip_tags_on_launch || (vm.promptDataClone.launchConf.ask_variables_on_launch && !vm.promptDataClone.launchConf.ignore_ask_variables) || vm.promptDataClone.launchConf.ask_diff_mode_on_launch || vm.promptDataClone.launchConf.ask_scm_branch_on_launch) { vm.steps.other_prompts.includeStep = true; vm.steps.other_prompts.tab = { _active: order === 1 ? true : false, diff --git a/awx/ui/client/src/templates/prompt/prompt.service.js b/awx/ui/client/src/templates/prompt/prompt.service.js index d7fbe60519..338a3ff128 100644 --- a/awx/ui/client/src/templates/prompt/prompt.service.js +++ b/awx/ui/client/src/templates/prompt/prompt.service.js @@ -10,7 +10,8 @@ function PromptService (Empty, $filter) { limit: {}, tags: {}, skipTags: {}, - diffMode: {} + diffMode: {}, + scmBranch: {} }; prompts.credentials.value = _.has(params, 'launchConf.defaults.credentials') ? _.cloneDeep(params.launchConf.defaults.credentials) : []; @@ -41,7 +42,7 @@ function PromptService (Empty, $filter) { prompts.tags.value = (jobTags && jobTags !== "") ? jobTags.split(',').map((i) => ({name: i, label: i, value: i})) : []; prompts.skipTags.value = (skipTags && skipTags !== "") ? skipTags.split(',').map((i) => ({name: i, label: i, value: i})) : []; prompts.diffMode.value = _.has(params, 'currentValues.diff_mode') && typeof params.currentValues.diff_mode === 'boolean' ? params.currentValues.diff_mode : (_.has(params, 'launchConf.defaults.diff_mode') ? params.launchConf.defaults.diff_mode : null); - + prompts.scmBranch.value = _.has(params, 'currentValues.scm_branch') && params.currentValues.scm_branch ? params.currentValues.scm_branch : (_.has(params, 'launchConf.defaults.scm_branch') ? params.launchConf.defaults.scm_branch : ""); return prompts; }; @@ -163,6 +164,9 @@ function PromptService (Empty, $filter) { if (promptData.launchConf.ask_diff_mode_on_launch && _.has(promptData, 'prompts.diffMode.value')) { launchData.diff_mode = promptData.prompts.diffMode.value; } + if (promptData.launchConf.ask_scm_branch_on_launch && _.has(promptData, 'prompts.scmBranch.value')) { + launchData.scm_branch = promptData.prompts.scmBranch.value; + } if (promptData.prompts.credentials.passwords) { _.forOwn(promptData.prompts.credentials.passwords, (val, key) => { if (!launchData.credential_passwords) { @@ -277,7 +281,9 @@ function PromptService (Empty, $filter) { if(_.has(params, 'promptData.prompts.diffMode.value') && _.get(params, 'promptData.launchConf.ask_diff_mode_on_launch')){ promptDataToSave.diff_mode = launchConfDefaults.diff_mode && launchConfDefaults.diff_mode === params.promptData.prompts.diffMode.value ? null : params.promptData.prompts.diffMode.value; } - + if(_.has(params, 'promptData.prompts.scmBranch.value') && _.get(params, 'promptData.launchConf.ask_scm_branch_on_launch')){ + promptDataToSave.scm_branch = launchConfDefaults.scm_branch && launchConfDefaults.scm_branch === params.promptData.prompts.scmBranch.value ? null : params.promptData.prompts.scmBranch.value; + } return promptDataToSave; }; } diff --git a/awx/ui/client/src/templates/prompt/steps/other-prompts/prompt-other-prompts.partial.html b/awx/ui/client/src/templates/prompt/steps/other-prompts/prompt-other-prompts.partial.html index ca57599137..941582ee7e 100644 --- a/awx/ui/client/src/templates/prompt/steps/other-prompts/prompt-other-prompts.partial.html +++ b/awx/ui/client/src/templates/prompt/steps/other-prompts/prompt-other-prompts.partial.html @@ -22,6 +22,22 @@ +