mirror of
https://github.com/ZwareBear/JFrog-Cloud-Installers.git
synced 2026-01-21 21:06:58 -06:00
Checking in code for rt 7.27.3 version
This commit is contained in:
@@ -0,0 +1,22 @@
|
||||
# Copyright 2020-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Amazon Software License (the "License"). You may not use this file except in compliance with the License.
|
||||
# A copy of the License is located at
|
||||
#
|
||||
# http://aws.amazon.com/asl/
|
||||
#
|
||||
# or in the "license" file accompanying this file.
|
||||
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
FROM asciidoctor/docker-asciidoctor
|
||||
|
||||
RUN apk add --no-cache \
|
||||
aws-cli \
|
||||
py3-pip \
|
||||
python3
|
||||
RUN wget https://raw.githubusercontent.com/aws-quickstart/quickstart-documentation-base-common/main/.utils/requirements.txt -O /tmp/req.txt
|
||||
RUN ln -sf /usr/bin/pip3 /usr/bin/pip
|
||||
RUN ln -sf /usr/bin/python3 /usr/bin/python
|
||||
RUN pip3 install -r /tmp/req.txt
|
||||
ENTRYPOINT ["dockerd-entrypoint.sh"]
|
||||
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
ASCIIDOC_ATTRIBUTES=""
|
||||
GITHUB_REPO_OWNER=$(echo ${GITHUB_REPOSITORY} | cut -d '/' -f 1)
|
||||
if [ -d docs/images ]; then
|
||||
mv docs/images images
|
||||
fi
|
||||
if [ "${GITHUB_REPO_OWNER}" == "aws-quickstart" ]; then
|
||||
cp docs/boilerplate/.css/AWS-Logo.svg images/
|
||||
if [ "${GITHUB_REF}" == "refs/heads/master" ] || [ "${GITHUB_REF}" == "refs/heads/main" ]; then
|
||||
ASCIIDOC_ATTRIBUTES="-a production_build"
|
||||
fi
|
||||
fi
|
||||
asciidoctor --base-dir docs/ --backend=html5 -o ../index.html -w --failure-level ERROR --doctype=book -a toc2 ${ASCIIDOC_ATTRIBUTES} docs/boilerplate/index.adoc
|
||||
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
if [[ -d "${GITHUB_WORKSPACE}/team_custom_rules" ]]; then
|
||||
# Install requirements for custom rules, plus the rules themselves.
|
||||
cd ${GITHUB_WORKSPACE}/team_custom_rules
|
||||
pip install -r requirements.txt
|
||||
python setup.py install
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
# back to normal
|
||||
CFNLINT_ARGS="-a ${GITHUB_WORKSPACE}/team_custom_rules/qs_cfn_lint_rules"
|
||||
echo "Using custom ruleset"
|
||||
else
|
||||
echo "NOT using custom ruleset"
|
||||
|
||||
fi
|
||||
|
||||
CFNLINT_JSON_OUT=$(mktemp)
|
||||
set +e
|
||||
cfn-lint ${CFNLINT_ARGS} -i W --templates templates/* --format json > ${CFNLINT_JSON_OUT}
|
||||
CFNLINT_EXIT_CODE=$?
|
||||
set -e
|
||||
python docs/boilerplate/.utils/pretty_cfnlint_output.py ${CFNLINT_JSON_OUT}
|
||||
exit ${CFNLINT_EXIT_CODE}
|
||||
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
repo_uri="https://x-access-token:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git"
|
||||
|
||||
remote_name="doc-upstream"
|
||||
main_branch=$(basename "$(git symbolic-ref --short refs/remotes/origin/HEAD)")
|
||||
target_branch="gh-pages"
|
||||
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add -A
|
||||
git add images
|
||||
git add index.html
|
||||
git commit -a -m "Updating documentation"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "nothing to commit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
git remote set-url origin ${repo_uri}
|
||||
git push origin HEAD:${target_branch} --force
|
||||
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
exit 0
|
||||
# set -e
|
||||
# curl -fsSL https://github.com/github/hub/raw/master/script/get | bash -s 2.14.1
|
||||
# sudo apt-get install jq -y
|
||||
# PAGES_STATUS=$(bin/hub api repos/${GITHUB_REPOSITORY}/pages | jq '.status' | sed -e 's/"//g')
|
||||
# if [ "${PAGES_STATUS}" != "null" ]; then
|
||||
# exit 0
|
||||
# fi
|
||||
|
||||
# bin/hub api -H Accept:application/vnd.github.switcheroo-preview+json repos/${GITHUB_REPOSITORY}/pages -f {"source":{"branch":"gh-pages"}}
|
||||
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash -e
|
||||
set -x
|
||||
git remote update
|
||||
git fetch
|
||||
set +e
|
||||
git remote set-head origin --auto
|
||||
default_branch=$(basename "$(git symbolic-ref --short refs/remotes/origin/HEAD)")
|
||||
doc_commit_id=$(git submodule | grep docs/boilerplate | cut -d - -f 2 | cut -f 1 -d " ")
|
||||
git rev-parse --verify origin/gh-pages
|
||||
CHECK_BRANCH=$?
|
||||
set -e
|
||||
if [[ $CHECK_BRANCH -ne 0 ]];then
|
||||
git checkout -b gh-pages
|
||||
git push origin gh-pages
|
||||
else
|
||||
git checkout gh-pages
|
||||
# git checkout --track origin/gh-pages
|
||||
fi
|
||||
git rm -rf .
|
||||
touch .gitmodules
|
||||
git restore -s origin/${default_branch} docs
|
||||
set +e
|
||||
git rm -r docs/boilerplate -r
|
||||
rm -rf docs/boilerplate
|
||||
set -e
|
||||
git restore -s origin/${default_branch} templates
|
||||
git submodule add https://github.com/aws-quickstart/quickstart-documentation-base-common.git docs/boilerplate
|
||||
cd docs/boilerplate
|
||||
git checkout "${doc_commit_id}"
|
||||
cd ../../
|
||||
rm configure_git_env.sh
|
||||
mv docs/images images
|
||||
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash -e
|
||||
# # Work in progress.
|
||||
# exit 1
|
||||
|
||||
BOILERPLATE_DIR="docs/boilerplate"
|
||||
GENERATED_DIR="docs/generated"
|
||||
SPECIFIC_DIR="docs/partner_editable"
|
||||
# Creating directories.
|
||||
mkdir -p ${GENERATED_DIR}/parameters
|
||||
mkdir -p ${GENERATED_DIR}/regions
|
||||
mkdir -p ${GENERATED_DIR}/services
|
||||
mkdir -p ${SPECIFIC_DIR}
|
||||
mkdir -p docs/images
|
||||
mkdir -p .github/workflows
|
||||
|
||||
# Copying content.
|
||||
rsync -avP ${BOILERPLATE_DIR}/.images/ docs/images/
|
||||
rsync -avP ${BOILERPLATE_DIR}/.specific/ ${SPECIFIC_DIR}
|
||||
|
||||
# enabling workflow.
|
||||
cp ${BOILERPLATE_DIR}/.actions/main-docs-build.yml .github/workflows/
|
||||
|
||||
|
||||
# creating placeholders.
|
||||
echo "// placeholder" > ${GENERATED_DIR}/parameters/index.adoc
|
||||
echo "// placeholder" > ${GENERATED_DIR}/regions/index.adoc
|
||||
echo "// placeholder" > ${GENERATED_DIR}/services/index.adoc
|
||||
echo "// placeholder" > ${GENERATED_DIR}/services/metadata.adoc
|
||||
|
||||
touch .nojekyll
|
||||
|
||||
git add -A docs/
|
||||
git add .github/
|
||||
git add .nojekyll
|
||||
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash -e
|
||||
# This file is meant to be the functional equalivent of the github actions workflow.
|
||||
#
|
||||
# // 5 env vars are required to use this.
|
||||
# - DOCBUILD_BOILERPLATE_S3_BUCKET
|
||||
# This defines the S3 bucketwhere a zip'd copy of *this repo* is located.
|
||||
# Example Value: "my-bucket-name-here"
|
||||
# - DOCBUILD_BOILERPLATE_S3_KEY
|
||||
# This defines the S3 Object key for the above-mentioned ZIP file.
|
||||
# Example Value: /path/to/my/file.zip
|
||||
# - DOCBUILD_CONTENT_S3_BUCKET
|
||||
# This defines the S3 bucket where a zip'd copy of repo to build is located.
|
||||
# (can be the same bucket)
|
||||
# Example value: "my-bucket-name-here"
|
||||
# - DOCBUILD_CONTENT_S3_KEY
|
||||
# This is the key where a ZIP of your content repo is located.
|
||||
# Example Value: "/path/to/my/other_file.zip"
|
||||
# - DOCBUILD_DESTINATION_S3_BUCKET
|
||||
# Bucket to upload the generated content to.
|
||||
# - DOCBUILD_DESTINATION_S3_KEY
|
||||
# S3 Key prefix for the generated content
|
||||
# - GITHUB_REPOSITORY
|
||||
# Easy identifier of the project that documentation is being built for.
|
||||
# - EX: jim-jimmerson/foobar
|
||||
#
|
||||
#
|
||||
#
|
||||
# Structure
|
||||
# <project repo> --- Content repo is unzipped.
|
||||
# docs/boilerplate -- Boilerplate repo is unzipped here.
|
||||
DL_DIR=$(mktemp -d)
|
||||
WORKING_DIR=$(mktemp -d)
|
||||
aws s3 cp s3://${DOCBUILD_BOILERPLATE_S3_BUCKET}/${DOCBUILD_BOILERPLATE_S3_KEY} ${DL_DIR}/boilerplate.zip
|
||||
aws s3 cp s3://${DOCBUILD_CONTENT_S3_BUCKET}/${DOCBUILD_CONTENT_S3_KEY} ${DL_DIR}/content.zip
|
||||
|
||||
unzip ${DL_DIR}/content.zip -d ${WORKING_DIR}
|
||||
rm -rf ${WORKING_DIR}/docs/boilerplate
|
||||
unzip ${DL_DIR}/boilerplate.zip -d ${WORKING_DIR}/docs/boilerplate
|
||||
|
||||
cd ${WORKING_DIR}
|
||||
./docs/boilerplate/.utils/generate_dynamic_content.sh
|
||||
./docs/boilerplate/.utils/build_docs.sh
|
||||
|
||||
aws s3 sync ${WORKING_DIR} s3://${DOCBUILD_DESTINATION_S3_BUCKET}/${DOCBUILD_DESTINATION_S3_KEY}/ --cache-control max-age=0,no-cache,no-store,must-revalidate --acl public-read
|
||||
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
# Wrapper to generate parameter tables within asciidoc workflow.
|
||||
set -e
|
||||
#sudo apt-get install pandoc -y
|
||||
pip3 install -r docs/boilerplate/.utils/requirements.txt;
|
||||
echo "Gen tables"
|
||||
python docs/boilerplate/.utils/generate_parameter_tables.py
|
||||
echo "Gen metadata"
|
||||
python docs/boilerplate/.utils/generate_metadata_attributes.py
|
||||
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python
|
||||
import io
|
||||
import cfnlint
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_cfn(filename):
|
||||
_decoded = cfnlint.decode.decode(filename, False)[0]
|
||||
return _decoded
|
||||
|
||||
def fetch_metadata():
|
||||
metadata_attributes = set()
|
||||
for yaml_cfn_file in Path('./templates').glob('*.template*'):
|
||||
template = get_cfn(Path(yaml_cfn_file))
|
||||
if not template:
|
||||
raise Exception(f"cfn-lint failed to load {yaml_cfn_file} without errors. Failure")
|
||||
_resources = template['Resources']
|
||||
for _resource in _resources.values():
|
||||
_type = _resource['Type'].lower()
|
||||
metadata_attributes.add(_type.split('::')[1])
|
||||
metadata_attributes.add(_type.replace('::','_'))
|
||||
with open('docs/generated/services/metadata.adoc', 'w') as f:
|
||||
f.write('\n')
|
||||
for attr in sorted(metadata_attributes):
|
||||
f.write(f":template_{attr}:\n")
|
||||
|
||||
if __name__ == '__main__':
|
||||
fetch_metadata()
|
||||
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env python
|
||||
import io
|
||||
import cfnlint
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_cfn(filename):
|
||||
_decoded = cfnlint.decode.decode(filename, False)[0]
|
||||
return _decoded
|
||||
|
||||
def _generate_table_name_and_header(label_name):
|
||||
data = []
|
||||
data.append(f"\n.{label_name}")
|
||||
data.append('[width="100%",cols="16%,11%,73%",options="header",]')
|
||||
data.append("|===")
|
||||
data.append("|Parameter label (name) |Default value|Description")
|
||||
return "\n".join(data)
|
||||
|
||||
def _generate_per_label_table_entry(label, param, default, description):
|
||||
data = []
|
||||
if not label:
|
||||
label = "**NO_LABEL**"
|
||||
data.append(f"|{label}")
|
||||
data.append(f"(`{param}`)|`{default}`|{description}")
|
||||
return '\n'.join(data)
|
||||
|
||||
def just_pass():
|
||||
template_entrypoints = {}
|
||||
template_order = {}
|
||||
found_files_with_glob_pattern=False
|
||||
for yaml_cfn_file in Path('./templates').glob('*.template*'):
|
||||
found_files_with_glob_pattern=True
|
||||
print(f"Working on {yaml_cfn_file}")
|
||||
template = get_cfn(Path(yaml_cfn_file))
|
||||
if not template:
|
||||
raise Exception(f"cfn-lint failed to load {yaml_cfn_file} without errors. Failure")
|
||||
entrypoint = template.get('Metadata', {}).get('QuickStartDocumentation', {}).get('EntrypointName')
|
||||
if not entrypoint:
|
||||
print(f"- No documentation entrypoint found. Continuing.")
|
||||
continue
|
||||
order = template.get('Metadata',{}).get('QuickStartDocumentation',{}).get('Order')
|
||||
if not order:
|
||||
print(f"- No documentation order found. Assigning x.")
|
||||
order = 'x'
|
||||
_pf = Path(yaml_cfn_file).stem + ".adoc"
|
||||
p_file = f"docs/generated/parameters/{_pf}"
|
||||
template_entrypoints[p_file.split('/')[-1]] = entrypoint
|
||||
template_order[p_file.split('/')[-1]] = str(order)
|
||||
|
||||
label_mappings = {}
|
||||
reverse_label_mappings = {}
|
||||
parameter_mappings = {}
|
||||
parameter_labels = {}
|
||||
no_groups = {}
|
||||
|
||||
def determine_optional_value(param):
|
||||
optional = template['Metadata'].get('QuickStartDocumentation', {}).get('OptionalParameters')
|
||||
if optional and (param in optional):
|
||||
return '__Optional__'
|
||||
return '**__Requires input__**'
|
||||
|
||||
for label in template['Metadata']['AWS::CloudFormation::Interface']['ParameterGroups']:
|
||||
label_name = label['Label']['default']
|
||||
label_params = label['Parameters']
|
||||
label_mappings[label_name] = label_params
|
||||
for ln in label_params:
|
||||
reverse_label_mappings[ln] = label_name
|
||||
|
||||
for label_name, label_data in template['Metadata']['AWS::CloudFormation::Interface']['ParameterLabels'].items():
|
||||
parameter_labels[label_name] = label_data.get('default')
|
||||
|
||||
for param_name, param_data in template['Parameters'].items():
|
||||
if param_data.get('Default') == '':
|
||||
param_data['Default'] = '**__Blank string__**'
|
||||
parameter_mappings[param_name] = param_data
|
||||
if not reverse_label_mappings.get(param_name):
|
||||
no_groups[param_name] = param_data
|
||||
|
||||
adoc_data = ""
|
||||
for label_name, label_params in label_mappings.items():
|
||||
header = _generate_table_name_and_header(label_name)
|
||||
adoc_data += header
|
||||
|
||||
for lparam in label_params:
|
||||
|
||||
param_data = _generate_per_label_table_entry(
|
||||
parameter_labels.get(lparam, ''),
|
||||
lparam,
|
||||
parameter_mappings[lparam].get('Default', determine_optional_value(lparam)),
|
||||
parameter_mappings[lparam].get('Description', 'NO_DESCRIPTION')
|
||||
)
|
||||
adoc_data += param_data
|
||||
adoc_data += "\n|==="
|
||||
|
||||
print(f"- Generating: {p_file}")
|
||||
with open (p_file, 'w') as p:
|
||||
p.write(adoc_data)
|
||||
if not found_files_with_glob_pattern:
|
||||
raise Exception("No files matching templates/*.template.(json|yaml|yml) were found. Unable to build documentation. Exiting.")
|
||||
if not template_entrypoints:
|
||||
raise Exception("No documentation entrypoints (Metadata => QuickStartDocumentation => EntrypointName) were found. Unable to build documentation. Exiting.")
|
||||
with open('docs/generated/parameters/index.adoc', 'w') as f:
|
||||
for template_file, order in sorted(template_order.items(), key=lambda x: x[1]):
|
||||
print (f"Index - {order} - {template_entrypoints.get(template_file)} - {template_file}")
|
||||
f.write(f"\n=== {template_entrypoints.get(template_file)}\n")
|
||||
f.write(f"include::{template_file}[]\n")
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("---")
|
||||
print("> Milton, don't be greedy. Let's pass it along and make sure everyone gets a piece.")
|
||||
print("> Can I keep a piece, because last time I was told that...")
|
||||
print("> Just pass.")
|
||||
print("---")
|
||||
just_pass()
|
||||
print("---")
|
||||
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import json
|
||||
|
||||
fn = sys.argv[1]
|
||||
|
||||
severity_to_icon = {
|
||||
'Warning':u'\u270B',
|
||||
'Error':u'\u274C'
|
||||
}
|
||||
|
||||
results = {}
|
||||
|
||||
with open(fn) as f:
|
||||
x = json.load(f)
|
||||
|
||||
for rule_match in x:
|
||||
_fn = rule_match['Filename']
|
||||
_sl = rule_match['Location']['Start']['LineNumber']
|
||||
_el = rule_match['Location']['End']['LineNumber']
|
||||
_msg = rule_match['Message']
|
||||
_lvl = rule_match['Level']
|
||||
_rid = rule_match['Rule']['Id']
|
||||
try:
|
||||
results[_fn][_sl] = (_lvl, _rid, _msg)
|
||||
except KeyError:
|
||||
results[_fn] = {_sl:(_lvl, _rid, _msg)}
|
||||
|
||||
for k in sorted(results.keys()):
|
||||
print('\n{}'.format(k))
|
||||
for l,v in results[k].items():
|
||||
print("- {} [{}] ({}) | Line: {} - {}".format(severity_to_icon.get(v[0]), v[0].upper(), v[1], l, v[2]))
|
||||
@@ -0,0 +1,5 @@
|
||||
requests
|
||||
cfn-lint
|
||||
pathlib
|
||||
datetime
|
||||
ruamel.yaml
|
||||
Reference in New Issue
Block a user