Skip to content

Instantly share code, notes, and snippets.

@afym
Last active March 28, 2025 16:50
Show Gist options
  • Save afym/25b330fbed940c9adcb2f0dca3ac92f9 to your computer and use it in GitHub Desktop.
Save afym/25b330fbed940c9adcb2f0dca3ac92f9 to your computer and use it in GitHub Desktop.
cf.py
import os
import sys
import hcl2
def update_modules_in_file(file_path, new_source_base):
with open(file_path, "r") as f:
original_content = f.read()
f.seek(0)
parsed = hcl2.load(f)
new_content = ""
lines = original_content.splitlines()
inside_module = False
buffer = []
for line in lines:
if line.strip().startswith("module "):
inside_module = True
buffer = [line]
elif inside_module:
buffer.append(line)
if "}" in line:
inside_module = False
original_block = "\n".join(buffer)
modified_block = transform_module_block(original_block, new_source_base)
new_content += modified_block + "\n"
continue
else:
new_content += line + "\n"
with open(file_path, "w") as f:
f.write(new_content.strip() + "\n")
def transform_module_block(block, new_source_base):
source_line = ""
version_line = ""
new_lines = []
for line in block.splitlines():
stripped = line.strip()
if stripped.startswith("source"):
source_line = stripped.split("=")[1].strip().strip('"')
elif stripped.startswith("version"):
version_line = stripped.split("=")[1].strip().strip('"')
else:
new_lines.append(line)
if source_line and version_line:
updated_source = f' source = "{new_source_base}?version={version_line}"'
elif source_line:
updated_source = f' source = "{new_source_base}"'
else:
return block # No changes needed
# Replace the original source line
new_lines = [updated_source if "source" in l else l for l in new_lines]
return "\n".join(new_lines)
def process_directory(path, new_source_base):
for root, _, files in os.walk(path):
for file in files:
if file.endswith(".tf"):
full_path = os.path.join(root, file)
print(f"Processing: {full_path}")
update_modules_in_file(full_path, new_source_base)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: python transform_modules.py <directory_path> <new_source_base>")
sys.exit(1)
directory = sys.argv[1]
new_source = sys.argv[2]
process_directory(directory, new_source)
kubectl get pods --all-namespaces -o json | jq '.items[] | select(.status.containerStatuses[]?.lastState.terminated.reason == "OOMKilled") | {namespace: .metadata.namespace, pod: .metadata.name, reason: .status.containerStatuses[].lastState.terminated.reason}'
#!/bin/bash
yq eval "
(.users[] | select(.name == \"$cluster_name\").user.token) = \"$new_token\"
" -i "$kubeconfig_file"
# Function to update the Kubernetes config file with the new token
update_kubeconfig() {
local cluster_name="$1"
local kubeconfig_file="$2"
local new_token="$3"
# Replace the placeholder <token here> with the actual token
sed -i.bak -E "s/(name: $cluster_name[[:space:]]+user:[[:space:]]+token: )[^\"]+/\1$new_token/" "$kubeconfig_file"
echo "Token updated for cluster '$cluster_name' in the kubeconfig file."
}
# Ensure AWS CLI and jq are installed
if ! command -v aws &>/dev/null || ! command -v jq &>/dev/null; then
echo "Error: AWS CLI and jq must be installed to run this script."
exit 1
fi
# Variables
CLUSTER_NAME="<clustername>" # Replace with your cluster name
KUBECONFIG_FILE="config" # Path to your kubeconfig file (adjust as needed)
# Retrieve the token using AWS CLI and jq
TOKEN=$(aws eks get-token --cluster-name "$CLUSTER_NAME" --output json | jq -r '.status.token')
if [[ -z "$TOKEN" ]]; then
echo "Error: Failed to retrieve token for cluster '$CLUSTER_NAME'."
exit 1
fi
# Update the kubeconfig file
update_kubeconfig "$CLUSTER_NAME" "$KUBECONFIG_FILE" "$TOKEN"
----
sed -i '/^-\sMerge/s/^-\s\(Merge.*\)/- **\1**/' "$file"
#!/bin/bash
# Desired new tag
new_tag="v4.1.0"
# Extract all tags, sort them semver-aware, and find the largest one smaller than the new tag
previous_tag=$(git tag | sort -V | awk -v new_tag="$new_tag" '
{
if ($1 < new_tag) last=$1
}
END { print last }')
echo "Next previous version before $new_tag is: $previous_tag"
name: Update Release Notes
on:
push:
branches:
- main
jobs:
update-release-notes:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Create release.md for testing
run: |
cat <<EOF > /tmp/release.md
- Merge pull some changes here
- chore: IP-1221 some data here
- fix: ENG-3111 some other data here
EOF
- name: Process release.md
run: |
file="/tmp/release.md"
# Add ** around the first element if it starts with "Merge"
sed -i '1{s/^-\sMerge\(\.*\)/- **Merge\1**/}' "$file"
# Convert {word}-{number} patterns into Markdown links
sed -i -E 's/([A-Z]+-[0-9]+)/[\1](https:\/\/jira.com\/issue\/\1)/g' "$file"
- name: Display updated release.md
run: cat /tmp/release.md
!git show-branch | grep '*' | grep -v \"$(git rev-parse --abbrev-ref HEAD)\" | head -n1 | sed 's/.*\\[\\(.*\\)\\].*/\\1/' | sed 's/[\\^~].*//' #
git show-branch | grep '*' | grep -v "$(git rev-parse --abbrev-ref HEAD)" | head -n1 | sed 's/.*\[\(.*\)\].*/\1/' | sed 's/[~^].*//'
git show-branch | grep '*' | grep -v "$(git rev-parse --abbrev-ref HEAD)" | head -n1 | sed 's/.*\[\(.*\)\].*/\1/' | sed 's/[\^~].*//'
- name: Find Parent Branch
run: |
echo "Current branch: $GITHUB_REF_NAME"
BRANCH=$(git branch -r --contains $(git merge-base HEAD origin/main origin/develop) | grep -v "$GITHUB_REF_NAME" | head -n 1 | sed 's/origin\///')
echo "Parent branch: $BRANCH"
echo "PARENT_BRANCH=$BRANCH" >> $GITHUB_ENV
name: Create Release
on:
push:
branches:
- 'release/*' # Solo se ejecuta en ramas release/x.y
jobs:
create-release:
runs-on: ubuntu-latest
steps:
# 1. Checkout del repositorio
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Necesario para tener todos los tags
# 2. Obtener mensajes de commit desde el último tag
- name: Get commit messages since last tag
id: commit_messages
run: |
BRANCH_VERSION=$(git rev-parse --abbrev-ref HEAD | grep -oP 'release/\K[0-9]+\.[0-9]+')
echo "Branch version: ${BRANCH_VERSION}"
LATEST_TAG=$(git tag --list "v${BRANCH_VERSION}.*" | sort -V | tail -n 1)
echo "Latest tag: ${LATEST_TAG}"
LAST_TAG=$(git describe --tags --abbrev=0)
echo "Último tag encontrado: ${LAST_TAG}"
MESSAGES=$(git log ${LAST_TAG}..HEAD --pretty=format:"- %s")
echo "${MESSAGES}" > commit_messages.md
echo "messages<<EOF" >> $GITHUB_ENV
echo "${MESSAGES}" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
# 3. Crear el release con los mensajes de commit
- name: Create GitHub Release
uses: ncipollo/release-action@v1
with:
tag: ${{ github.ref_name }}
name: "Release ${{ github.ref_name }}"
body: |
🚀 **Nuevos cambios en esta versión:**
${{ env.messages }}
draft: false
prerelease: false
---
name: Detect Parent Branch and Skip First Push
on:
push:
branches:
- release/*
jobs:
detect-parent-branch:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Detect parent branch dynamically and check for first push
run: |
# Fetch all branches for comparison
git fetch --all
# Get the current branch name
current_branch=$(git rev-parse --abbrev-ref HEAD)
echo "Current branch: $current_branch"
# Find the closest parent branch dynamically
parent_branch=$(git for-each-ref --format='%(refname:short)' refs/remotes/ | while read branch; do
merge_base=$(git merge-base $current_branch $branch || true)
if [ -n "$merge_base" ]; then
echo "$merge_base $branch"
fi
done | sort | tail -n1 | awk '{print $2}')
echo "Detected parent branch: $parent_branch"
# Get the commit hash for the base and current commits
parent_commit=$(git merge-base $current_branch $parent_branch)
current_commit=$(git rev-parse HEAD)
echo "Parent commit: $parent_commit"
echo "Current commit: $current_commit"
# Compare the current commit with the parent branch commit
if [ "$parent_commit" = "$current_commit" ]; then
echo "This is the first push for the branch. Skipping workflow..."
exit 0
else
echo "Not the first push. Proceeding with the workflow."
fi
input_string="release/dasdasdasdasd/1.1"
pattern1="^release/[0-9]+\.[0-9]+$"
pattern2="^release/[a-zA-Z0-9-]+/[0-9]+\.[0-9]+$"
if ! [[ "$input_string" =~ $pattern1 || "$input_string" =~ $pattern2 ]]; then
echo "Invalid pattern: $input_string"
exit 1
fi
echo "Valid pattern: $input_string"
#!/bin/bash
# Check if a version argument was provided
if [ -z "$1" ]; then
echo "Please provide a version (e.g., ./get_changelog.sh v1.0.0)"
exit 1
fi
# The version to search for
VERSION="$1"
CHANGELOG_FILE="CHANGELOG.md"
# Check if CHANGELOG.md exists
if [ ! -f "$CHANGELOG_FILE" ]; then
echo "CHANGELOG.md file not found!"
exit 1
fi
# Initialize flags and variables
found_version=0
output=""
# Read the file line by line
while IFS= read -r line; do
# Check if the line contains the specified version header
if [[ "$line" == "## [$VERSION]"* ]]; then
found_version=1
continue
fi
# Stop if we reach the next version header and we already found our target version
if [[ $found_version -eq 1 && "$line" == "## ["* ]]; then
break
fi
# Collect lines after finding the target version
if [[ $found_version -eq 1 ]]; then
output+="$line"$'\n'
fi
done < "$CHANGELOG_FILE"
# Check if any output was collected
if [ -z "$output" ]; then
echo "No version found"
else
echo "$output"
fi
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/).
## [Unreleased]
### Added
- Support for multiple languages.
- Option to export reports as PDF.
### Fixed
- Resolved an issue with user authentication timing out unexpectedly.
- Fixed a UI bug on the settings page in dark mode.
## [v1.3.0] - 2024-11-01
### Added
- New dashboard with customizable widgets.
- Integration with third-party analytics tools.
- Support for batch file uploads.
### Changed
- Improved performance on the data processing pipeline.
- Updated the logo and branding colors to align with the new company style guide.
### Fixed
- Fixed a bug where notifications were not marked as read.
- Corrected a spelling error in the user profile section.
### Security
- Addressed a vulnerability related to session management.
## [v1.2.2] - 2024-09-20
### Fixed
- Resolved an issue with file uploads failing intermittently.
- Fixed alignment issues on the login page for mobile devices.
## [v1.2.1] - 2024-09-10
### Fixed
- Fixed a bug that caused the app to crash on certain iOS devices.
### Security
- Patched a security flaw in the authentication module.
## [v1.2.0] - 2024-08-15
### Added
- Initial release of the reporting feature.
- User roles and permissions system.
### Deprecated
- Deprecated support for older JSON format. Please use the new XML format.
### Removed
- Removed the legacy support for outdated API endpoints.
## [v1.1.0] - 2024-07-01
### Added
- Multi-factor authentication for enhanced security.
- New user activity log.
### Fixed
- Addressed memory leak issues reported by users.
- Fixed an issue with session expiration not working correctly.
## [v1.0.0] - 2024-06-01
### Added
- Initial release of the application with core functionalities:
- User registration and login
- Basic data analytics and visualization
- Real-time notifications
'''
# Logging request and response bodies
log_format custom '$remote_addr - $remote_user [$time_local] "$request" '
'status: $status, body_bytes_sent: $body_bytes_sent '
'"$http_referer" "$http_user_agent" '
'req_body: $request_body, res_body: $resp_body';
access_log /var/log/nginx/custom_demo.log custom;
'''
import argparse
import yaml
import re
# git log --since="1 year ago" --pretty=format:"%ad" --name-only --date=short | awk '/^[0-9]{4}-[0-9]{2}-[0-9]{2}/ {date=$1} /^[^ ]/ {print "- " $0 " (Last updated: " date ")"}' | sort -u
# git log --since="1 year ago" --pretty="" --name-only --diff-filter=A | sort | uniq | xargs -I{} git ls-tree -r master --name-only | sort | uniq
parameters = {
'AWS::AccountId': '192838191',
'ImportValue:codename': 'imported-value',
'ImportValue:foo': 'domo',
'Tier': 'dev',
}
resource_name_properties = {
'AWS::S3::Bucket': 'BucketName',
'AWS::EKS::Cluster': 'Name',
'AWS::EC2::Instance': 'InstanceId',
'AWS::IAM::Role': 'RoleName',
'AWS::SecretsManager::Secret': 'Name',
'AWS::ElasticLoadBalancing::LoadBalancer': 'LoadBalancerName'
}
class CloudFormationLoader(yaml.SafeLoader):
pass
def ref_constructor(loader, node):
return f"!Ref {loader.construct_scalar(node)}"
def join_constructor(loader, node):
return f"!Join {loader.construct_sequence(node)}"
def importvalue_constructor(loader, node):
return f"Fn::ImportValue {loader.construct_scalar(node)}"
def sub_constructor(loader, node):
return f"!Sub {loader.construct_scalar(node)}"
CloudFormationLoader.add_constructor('!Ref', ref_constructor)
CloudFormationLoader.add_constructor('!Join', join_constructor)
CloudFormationLoader.add_constructor('Fn::ImportValue', importvalue_constructor)
CloudFormationLoader.add_constructor('!Sub', sub_constructor)
CloudFormationLoader.add_constructor('Fn::Sub', sub_constructor)
def process_resource_name(resource_type, properties):
resource_property_key = resource_name_properties.get(resource_type)
if not resource_property_key:
return None
name = properties.get(resource_property_key)
if isinstance(name, str):
return name
elif isinstance(name, list) and len(name) == 2 and isinstance(name[0], str) and isinstance(name[1], list):
join_char = name[0]
joined_name = join_char.join(name[1])
return joined_name
return None
def process_resources(template):
output_lines = ["| AWS Resource Type | Resource Name |", "|-------------------|---------------|"]
resources = template.get('Resources', {})
for resource_name, resource_data in resources.items():
resource_type = resource_data.get('Type')
properties = resource_data.get('Properties', {})
resource_name_value = process_resource_name(resource_type, properties)
if resource_name_value:
output_lines.append(f"| {resource_type} | {resource_name_value} |")
return "\n".join(output_lines)
def process_cloudformation_template(content, parameters):
content = re.sub(r':\s*!Join', ':', content)
content = re.sub(r'!Ref\s*AWS::AccountId', f'"{parameters["AWS::AccountId"]}"', content)
content = re.sub(r'!Ref\s*Tier', f'"{parameters["Tier"]}"', content)
content = re.sub(r'Fn::ImportValue:\s*codename', f'"{parameters["ImportValue:codename"]}"', content)
content = re.sub(r'Fn::ImportValue:\s*foo', f'"{parameters["ImportValue:foo"]}"', content)
return content
def main():
parser = argparse.ArgumentParser(description='Process CloudFormation YAML file.')
parser.add_argument('-t', '--template', required=True, help='Path to the CloudFormation template file')
parser.add_argument('-o', '--output', required=True, help='Output markdown file path')
args = parser.parse_args()
with open(args.template, 'r') as file:
content = file.read()
processed_content = process_cloudformation_template(content, parameters)
template = yaml.load(processed_content, Loader=CloudFormationLoader)
output_content = process_resources(template)
with open(args.output, 'w') as file:
file.write(output_content)
print(f"Processed CloudFormation template and wrote output to {args.output}")
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment