Checking in code for rt 7.27.10 and xray 3.35.0 version

This commit is contained in:
Giridharan Ramasamy
2021-12-02 21:29:06 +05:30
parent dfb3ee0eda
commit 57390ace44
389 changed files with 29988 additions and 0 deletions

View File

@@ -0,0 +1,24 @@
# Copyright 2020-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file.
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
FROM asciidoctor/docker-asciidoctor
RUN apk add --no-cache \
py3-pip \
python3 \
zip \
rsync
RUN wget https://raw.githubusercontent.com/REPO/BRANCH/.utils/requirements.txt -O /tmp/req.txt
RUN ln -sf /usr/bin/pip3 /usr/bin/pip
RUN ln -sf /usr/bin/python3 /usr/bin/python
RUN pip3 install awscli
RUN pip3 install -r /tmp/req.txt
ENTRYPOINT ["dockerd-entrypoint.sh"]

View File

@@ -0,0 +1,58 @@
#!/bin/bash
set -e
function build_language_docs(){
for dir in docs/languages/*/
do
dir=${dir%*/}
lang=$(echo ${dir%*/} | awk -F'[-]' '{print $2}')
asciidoctor --base-dir docs/languages/docs-${lang}/ --backend=html5 -o ../../../index-${lang}.html -w --failure-level ERROR --doctype=book -a toc2 ${ASCIIDOC_ATTRIBUTES} docs/languages/docs-${lang}/index.adoc
done
}
function _set_prod_asciidoc_attributes(){
export ASCIIDOC_ATTRIBUTES="-a production_build"
}
function build_docs_with_asciidoc_attributes(){
set +x
asciidoctor --base-dir docs/ --backend=html5 -o ../${HTML_FILE:-index.html} -w --failure-level ERROR --doctype=book -a toc2 ${ASCIIDOC_ATTRIBUTES} docs/boilerplate/index.adoc
set -x
}
function build_prod_example_docs(){
export HTML_FILE="prod_example.html"
_set_prod_asciidoc_attributes
build_docs_with_asciidoc_attributes
}
ASCIIDOC_ATTRIBUTES=""
GITHUB_REPO_OWNER=$(echo ${GITHUB_REPOSITORY} | cut -d '/' -f 1)
if [ -d docs/images ]; then
rsync -avP docs/images/ images/
fi
if [ -f docs/index.html ]; then
rm docs/index.html
fi
if [ "${GITHUB_REPO_OWNER}" == "aws-quickstart" ]; then
cp docs/boilerplate/.css/AWS-Logo.svg images/
if [ "${GITHUB_REF}" == "refs/heads/master" ] || [ "${GITHUB_REF}" == "refs/heads/main" ]; then
_set_prod_asciidoc_attributes
else
PREVIEW_BUILD="true"
fi
fi
build_docs_with_asciidoc_attributes
if [ -d docs/languages ]; then
build_language_docs
fi
if [ "${PREVIEW_BUILD}" == "true" ]; then
build_prod_example_docs
fi

View File

@@ -0,0 +1,23 @@
#!/bin/bash
set -e
if [[ -d "${GITHUB_WORKSPACE}/team_custom_rules" ]]; then
# Install requirements for custom rules, plus the rules themselves.
cd ${GITHUB_WORKSPACE}/team_custom_rules
pip install -r requirements.txt
python setup.py install
cd ${GITHUB_WORKSPACE}
# back to normal
CFNLINT_ARGS="-a ${GITHUB_WORKSPACE}/team_custom_rules/qs_cfn_lint_rules"
echo "Using custom ruleset"
else
echo "NOT using custom ruleset"
fi
CFNLINT_JSON_OUT=$(mktemp)
set +e
cfn-lint ${CFNLINT_ARGS} -i W --templates templates/* --format json > ${CFNLINT_JSON_OUT}
CFNLINT_EXIT_CODE=$?
set -e
python docs/boilerplate/.utils/pretty_cfnlint_output.py ${CFNLINT_JSON_OUT}
exit ${CFNLINT_EXIT_CODE}

View File

@@ -0,0 +1,39 @@
#!/bin/bash
set -eu
function common_steps(){
git add -A
git add images
git add index.html
git rm -r --force templates
git commit -a -m "Updating documentation"
git status
}
function github_actions_prod(){
repo_uri="https://x-access-token:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git"
remote_name="doc-upstream"
main_branch=$(basename "$(git symbolic-ref --short refs/remotes/origin/HEAD)")
target_branch="gh-pages"
cd "$GITHUB_WORKSPACE"
ls -lah
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
common_steps
git remote set-url origin ${repo_uri}
git status | grep "nothing to commit, working tree clean" || git push origin HEAD:${target_branch} --force
}
#if [ $? -ne 0 ]; then
# echo "nothing to commit"
# exit 0
#fi
if [ "${DOCBUILD_PROD:-x}" == "true" ]; then
common_steps
else
github_actions_prod
fi
git remote set-url origin ${repo_uri}
git status | grep "Your branch is up to date" || git push origin HEAD:${target_branch} --force

View File

@@ -0,0 +1,11 @@
#!/bin/bash
exit 0
# set -e
# curl -fsSL https://github.com/github/hub/raw/master/script/get | bash -s 2.14.1
# sudo apt-get install jq -y
# PAGES_STATUS=$(bin/hub api repos/${GITHUB_REPOSITORY}/pages | jq '.status' | sed -e 's/"//g')
# if [ "${PAGES_STATUS}" != "null" ]; then
# exit 0
# fi
# bin/hub api -H Accept:application/vnd.github.switcheroo-preview+json repos/${GITHUB_REPOSITORY}/pages -f {"source":{"branch":"gh-pages"}}

View File

@@ -0,0 +1,32 @@
#!/bin/bash -e
set -x
git remote update
git fetch
set +e
git remote set-head origin --auto
default_branch=$(basename "$(git symbolic-ref --short refs/remotes/origin/HEAD)")
doc_commit_id=$(git submodule | grep docs/boilerplate | cut -d - -f 2 | cut -f 1 -d " ")
git rev-parse --verify origin/gh-pages
CHECK_BRANCH=$?
set -e
if [[ $CHECK_BRANCH -ne 0 ]];then
git checkout -b gh-pages
git push origin gh-pages
else
git checkout gh-pages
# git checkout --track origin/gh-pages
fi
git rm -rf .
touch .gitmodules
git restore -s origin/${default_branch} docs
set +e
git rm -r docs/boilerplate -r
rm -rf docs/boilerplate
set -e
git restore -s origin/${default_branch} templates
git submodule add https://github.com/aws-quickstart/quickstart-documentation-base-common.git docs/boilerplate
cd docs/boilerplate
git checkout "${doc_commit_id}"
cd ../../
rm configure_git_env.sh
mv docs/images images

View File

@@ -0,0 +1,88 @@
#!/bin/bash -e
# # Work in progress.
# exit 1
#Adds Help and Second Language options (-h | -l)
while getopts hl option
do
case "${option}" in
h )
echo "Usage:"
echo "Run './create_repo_structure.sh' with no options for English langauge only."
echo "Run './create_repo_structure.sh -l' to add files for second langauge."
echo " "
echo "(-h) Show usage and brief help"
echo "(-l) Use to add files for second language for translation"
exit 0
;;
l )
CREATESECONDLANG="create_second_lang";;
* )
echo "this is in an invalid flag. Please see "-h" for help on valid flags"
exit 0
;;
esac
done
#Creates Standard English directory structure to the repo.
function create_repo() {
BOILERPLATE_DIR="docs/boilerplate"
GENERATED_DIR="docs/generated"
SPECIFIC_DIR="docs/partner_editable"
# Creating directories.
mkdir -p ${GENERATED_DIR}/parameters
mkdir -p ${GENERATED_DIR}/regions
mkdir -p ${GENERATED_DIR}/services
mkdir -p ${SPECIFIC_DIR}
mkdir -p docs/images
mkdir -p .github/workflows
# Copying content.
rsync -avP ${BOILERPLATE_DIR}/.images/ docs/images/
rsync -avP ${BOILERPLATE_DIR}/.specific/ ${SPECIFIC_DIR}
# enabling workflow.
cp ${BOILERPLATE_DIR}/.actions/main-docs-build.yml .github/workflows/
# creating placeholders.
echo "// placeholder" > ${GENERATED_DIR}/parameters/index.adoc
echo "// placeholder" > ${GENERATED_DIR}/regions/index.adoc
echo "// placeholder" > ${GENERATED_DIR}/services/index.adoc
echo "// placeholder" > ${GENERATED_DIR}/services/metadata.adoc
}
#Creates standard English and second language directory structures to the repo.
function create_second_lang() {
read -p "Please enter enter 2 character language code: " LANG_CODE
create_repo
LANG_DIR="docs/languages"
SPECIFIC_LANG_DIR="docs/languages/docs-${LANG_CODE}"
TRANSLATE_ONLY="docs/languages/docs-${LANG_CODE}/translate-only"
LANG_FOLDER="docs-${LANG_CODE}"
mkdir -p ${LANG_DIR}
mkdir -p ${SPECIFIC_LANG_DIR}
mkdir -p ${TRANSLATE_ONLY}
rsync -avP ${BOILERPLATE_DIR}/.specific/ ${SPECIFIC_LANG_DIR}/partner_editable
rsync -avP ${BOILERPLATE_DIR}/*.adoc ${TRANSLATE_ONLY} --exclude *.lang.adoc --exclude index.adoc --exclude _layout_cfn.adoc --exclude planning_deployment.adoc
rsync -avP ${BOILERPLATE_DIR}/_layout_cfn.lang.adoc ${SPECIFIC_LANG_DIR}/_layout_cfn.adoc
rsync -avP ${BOILERPLATE_DIR}/index.lang.adoc ${SPECIFIC_LANG_DIR}/index.adoc
rsync -avP ${BOILERPLATE_DIR}/planning_deployment.lang.adoc ${TRANSLATE_ONLY}/planning_deployment.adoc
rsync -avP ${BOILERPLATE_DIR}/index-docinfo-footer.html ${TRANSLATE_ONLY}
rsync -avP ${BOILERPLATE_DIR}/LICENSE ${TRANSLATE_ONLY}
sed -i "" "s/docs-lang-code/${LANG_FOLDER}/g" ${SPECIFIC_LANG_DIR}/index.adoc
}
while true
do
#clear
if [ $OPTIND -eq 1 ]; then create_repo; fi
shift $((OPTIND-1))
#printf "$# non-option arguments"
$CREATESECONDLANG
touch .nojekyll
git add -A docs/
git add .github/
git add .nojekyll
exit
done

View File

@@ -0,0 +1,84 @@
#!/bin/bash -e
# This file is meant to be the functional equalivent of the github actions workflow.
#
# // 5 env vars are required to use this.
# - DOCBUILD_BOILERPLATE_S3_BUCKET
# This defines the S3 bucketwhere a zip'd copy of *this repo* is located.
# Example Value: "my-bucket-name-here"
# - DOCBUILD_BOILERPLATE_S3_KEY
# This defines the S3 Object key for the above-mentioned ZIP file.
# Example Value: /path/to/my/file.zip
# - DOCBUILD_CONTENT_S3_BUCKET
# This defines the S3 bucket where a zip'd copy of repo to build is located.
# (can be the same bucket)
# Example value: "my-bucket-name-here"
# - DOCBUILD_CONTENT_S3_KEY
# This is the key where a ZIP of your content repo is located.
# Example Value: "/path/to/my/other_file.zip"
# - DOCBUILD_DESTINATION_S3_BUCKET
# Bucket to upload the generated content to.
# - DOCBUILD_DESTINATION_S3_KEY
# S3 Key prefix for the generated content
# - GITHUB_REPOSITORY
# Easy identifier of the project that documentation is being built for.
# - EX: jim-jimmerson/foobar
#
#
#
# Structure
# <project repo> --- Content repo is unzipped.
# docs/boilerplate -- Boilerplate repo is unzipped here.
function upload_preview_content(){
aws s3 sync --delete ${WORKING_DIR} ${DOCBUILD_DESTINATION_S3} --cache-control max-age=0,no-cache,no-store,must-revalidate --acl bucket-owner-full-control
}
function create_upload_ghpages_branch_archive(){
zip ${DL_DIR}/gh-pages.zip -r .
aws s3 cp ${DL_DIR}/gh-pages.zip ${DOCBUILD_DESTINATION_S3}
}
DL_DIR=$(mktemp -d)
WORKING_DIR=$(mktemp -d)
echo "${DOCBUILD_BOILERPLATE_S3}"
echo "${DOCBUILD_CONTENT_S3}"
aws s3 cp ${DOCBUILD_BOILERPLATE_S3} ${DL_DIR}/boilerplate.zip
aws s3 cp ${DOCBUILD_CONTENT_S3} ${DL_DIR}/content.zip
unzip ${DL_DIR}/content.zip -d ${WORKING_DIR}
rm -rf ${WORKING_DIR}/docs/boilerplate
unzip ${DL_DIR}/boilerplate.zip -d ${WORKING_DIR}/docs/boilerplate || exit 150
cd ${WORKING_DIR}
doc_commit_id=$(git submodule | grep docs/boilerplate | awk '{print $1}' | sed -e 's/^+//g' -e 's/^-//g')
echo "${doc_commit_id}"
if [ -z "${doc_commit_id}" ]; then
echo "docs/boilerplate submodule not found. exiting"
exit 150
fi
cd docs/boilerplate
echo "Checking out boilerplate at commit ID: ${doc_commit_id}"
git checkout "${doc_commit_id}"
cd ../../
if [ -d templates/ ]; then
./docs/boilerplate/.utils/generate_dynamic_content.sh
set -x
./docs/boilerplate/.utils/build_docs.sh
set +x
fi
if [ ! -f index.html ]; then
exit 1
fi
tmpfile=$(mktemp)
echo -e "repo commit:\n$(git -P log -1 | grep 'commit' | awk '{print $2}')\n\nsubmodule config:" >> ${tmpfile}
git submodule >> ${tmpfile}
echo -e "\n<!--\n$(cat ${tmpfile})\n-->" >> index.html
if [ "${DOCBUILD_PROD}" == "true" ]; then
create_upload_ghpages_branch_archive
else
upload_preview_content
fi

View File

@@ -0,0 +1,14 @@
#!/bin/bash
# Wrapper to generate parameter tables within asciidoc workflow.
set -e
#sudo apt-get install pandoc -y
pip3 install -r docs/boilerplate/.utils/requirements.txt;
set +e
egrep -qi '^:no_parameters:$' docs/partner_editable/_settings.adoc; EC=$?
set -e
if [ ${EC} -ne 0 ]; then
echo "Gen tables"
python docs/boilerplate/.utils/generate_parameter_tables.py
fi
egrep -qi '^:cdk_qs:$' docs/partner_editable/_settings.adoc || ( echo "Gen metadata"; python docs/boilerplate/.utils/generate_metadata_attributes.py )

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env python
import io
import cfnlint
import sys
from pathlib import Path
custom_attributes = {
'deterministic_ec2_instances':[
'aws_ec2_instance',
'aws_ec2_host',
'aws_ec2fleet',
'aws_autoscaling_autoscalinggroup'
]
}
def get_cfn(filename):
_decoded, _issues = cfnlint.decode.decode(filename)
if not _decoded:
raise Exception("cfn-lint failed to load {} without errors. Failure".format(filename))
return _decoded
def fetch_metadata():
metadata_attributes = set()
for yaml_cfn_file in Path('./templates').glob('*.template*'):
template = get_cfn(Path(yaml_cfn_file))
_resources = template['Resources']
for _resource in _resources.values():
_type = _resource['Type'].lower()
metadata_attributes.add(_type.split('::')[1])
metadata_attributes.add(_type.replace('::','_'))
for attribute, qualifying_conditions in custom_attributes.items():
for qc in qualifying_conditions:
if qc in metadata_attributes:
metadata_attributes.add(attribute)
break
with open('docs/generated/services/metadata.adoc', 'w') as f:
f.write('\n')
for attr in sorted(metadata_attributes):
f.write(f":template_{attr}:\n")
if __name__ == '__main__':
fetch_metadata()

View File

@@ -0,0 +1,132 @@
#!/usr/bin/env python
import io
import cfnlint
import datetime
import sys
import os
from pathlib import Path, PosixPath
def get_cfn(filename):
_decoded, _issues = cfnlint.decode.decode(filename)
if not _decoded:
print(f"Template: {filename} has an error. Run cfn-lint to determine the issue")
sys.exit(1)
return _decoded
def _generate_table_name_and_header(label_name):
data = []
data.append(f"\n.{label_name}")
data.append('[width="100%",cols="16%,11%,73%",options="header",]')
data.append("|===")
data.append("|Parameter label (name) |Default value|Description")
return "\n".join(data)
def _generate_per_label_table_entry(label, param, default, description):
data = []
if not label:
label = "**NO_LABEL**"
data.append(f"|{label}")
data.append(f"(`{param}`)|`{default}`|{description}")
return '\n'.join(data)
def _determine_file_list():
template_files = set()
if os.path.exists('./templates/.filename_standard_exception.txt'):
with open('./templates/.filename_standard_exception.txt') as f:
data = f.readlines()
for fn in data:
template_files.add(PosixPath(f"templates/{fn.strip()}"))
for yaml_cfn_file in Path('./templates').glob('*.template*'):
template_files.add(yaml_cfn_file)
return template_files
def just_pass():
template_entrypoints = {}
template_order = {}
found_files_with_glob_pattern=False
for yaml_cfn_file in _determine_file_list():
found_files_with_glob_pattern=True
print(f"Working on {yaml_cfn_file}")
template = get_cfn(Path(yaml_cfn_file))
if not template:
raise Exception(f"cfn-lint failed to load {yaml_cfn_file} without errors. Failure")
entrypoint = template.get('Metadata', {}).get('QuickStartDocumentation', {}).get('EntrypointName')
if not entrypoint:
print(f"- No documentation entrypoint found. Continuing.")
continue
order = template.get('Metadata',{}).get('QuickStartDocumentation',{}).get('Order')
if not order:
print(f"- No documentation order found. Assigning x.")
order = 'x'
_pf = Path(yaml_cfn_file).stem + ".adoc"
p_file = f"docs/generated/parameters/{_pf}"
template_entrypoints[p_file.split('/')[-1]] = entrypoint
template_order[p_file.split('/')[-1]] = str(order)
label_mappings = {}
reverse_label_mappings = {}
parameter_mappings = {}
parameter_labels = {}
no_groups = {}
def determine_optional_value(param):
optional = template['Metadata'].get('QuickStartDocumentation', {}).get('OptionalParameters')
if optional and (param in optional):
return '__Optional__'
return '**__Requires input__**'
for label in template['Metadata']['AWS::CloudFormation::Interface']['ParameterGroups']:
label_name = label['Label']['default']
label_params = label['Parameters']
label_mappings[label_name] = label_params
for ln in label_params:
reverse_label_mappings[ln] = label_name
for label_name, label_data in template['Metadata']['AWS::CloudFormation::Interface']['ParameterLabels'].items():
parameter_labels[label_name] = label_data.get('default')
for param_name, param_data in template['Parameters'].items():
if param_data.get('Default') == '':
param_data['Default'] = '**__Blank string__**'
parameter_mappings[param_name] = param_data
if not reverse_label_mappings.get(param_name):
no_groups[param_name] = param_data
adoc_data = ""
for label_name, label_params in label_mappings.items():
header = _generate_table_name_and_header(label_name)
adoc_data += header
for lparam in label_params:
param_data = _generate_per_label_table_entry(
parameter_labels.get(lparam, ''),
lparam,
parameter_mappings[lparam].get('Default', determine_optional_value(lparam)),
parameter_mappings[lparam].get('Description', 'NO_DESCRIPTION')
)
adoc_data += param_data
adoc_data += "\n|==="
print(f"- Generating: {p_file}")
with open (p_file, 'w') as p:
p.write(adoc_data)
if not found_files_with_glob_pattern:
raise Exception("No files matching templates/*.template.(json|yaml|yml) were found. Unable to build documentation. Exiting.")
if not template_entrypoints:
raise Exception("No documentation entrypoints (Metadata => QuickStartDocumentation => EntrypointName) were found. Unable to build documentation. Exiting.")
with open('docs/generated/parameters/index.adoc', 'w') as f:
for template_file, order in sorted(template_order.items(), key=lambda x: x[1]):
print (f"Index - {order} - {template_entrypoints.get(template_file)} - {template_file}")
f.write(f"\n=== {template_entrypoints.get(template_file)}\n")
f.write(f"include::{template_file}[]\n")
if __name__ == '__main__':
print("---")
print("> Milton, don't be greedy. Let's pass it along and make sure everyone gets a piece.")
print("> Can I keep a piece, because last time I was told that...")
print("> Just pass.")
print("---")
just_pass()
print("---")

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env python
import sys
import json
fn = sys.argv[1]
severity_to_icon = {
'Warning':u'\u270B',
'Error':u'\u274C'
}
results = {}
with open(fn) as f:
x = json.load(f)
for rule_match in x:
_fn = rule_match['Filename']
_sl = rule_match['Location']['Start']['LineNumber']
_el = rule_match['Location']['End']['LineNumber']
_msg = rule_match['Message']
_lvl = rule_match['Level']
_rid = rule_match['Rule']['Id']
try:
results[_fn][_sl] = (_lvl, _rid, _msg)
except KeyError:
results[_fn] = {_sl:(_lvl, _rid, _msg)}
for k in sorted(results.keys()):
print('\n{}'.format(k))
for l,v in results[k].items():
print("- {} [{}] ({}) | Line: {} - {}".format(severity_to_icon.get(v[0]), v[0].upper(), v[1], l, v[2]))

View File

@@ -0,0 +1,5 @@
requests
cfn-lint >= 0.39.0
pathlib
datetime
ruamel.yaml