AWS Cloud Automation with Python & Boto3

Initial Tasks:
✅ Task 1: Install boto3 (pip install boto3) and configure AWS credentials (aws configure).
pip install boto3
Challenge 1: Write a Python script that provisions an EC2 instance, a new security group, and a key pair. The same script should connect to ec2 after creation to check that everything is working fine. (The key pair should be generated via the Python script and used for the EC2 SSH connection.)
Solution
import boto3
import paramiko
import time
# Initialize AWS EC2 client
ec2 = boto3.client('ec2', region_name='us-east-1')
# Step 1: Create a new key pair
key_pair_name = "boto3-keypair"
key_pair = ec2.create_key_pair(KeyName="ssh_login")
private_key = key_pair['KeyMaterial']
# Save the private key to a file
with open(f"{key_pair_name}.pem", "w") as key_file:
key_file.write(private_key)
print(f"Key pair {key_pair_name} created and saved.")
# Step 2: Create a security group
sg_name = "boto3-sg"
sg_description = "Security group for Boto3 EC2"
security_group = ec2.create_security_group(
GroupName=sg_name,
Description=sg_description
)
security_group_id = security_group['GroupId']
# Allow SSH access
ec2.authorize_security_group_ingress(
GroupId=security_group_id,
IpPermissions=[
{'IpProtocol': 'tcp', 'FromPort': 22, 'ToPort': 22, 'IpRanges': [{'CidrIp': '0.0.0.0/0'}]}
]
)
print(f"Security Group {sg_name} created.")
# Step 3: Launch an EC2 instance
instance = ec2.run_instances(
ImageId='ami-0c55b159cbfafe1f0', # Replace with latest AMI ID
InstanceType='t2.micro',
KeyName=key_pair_name,
SecurityGroupIds=[security_group_id],
MinCount=1,
MaxCount=1
)['Instances'][0]
instance_id = instance['InstanceId']
print(f"EC2 instance {instance_id} launched.")
# Wait for instance to be running
ec2_resource = boto3.resource('ec2', region_name='us-east-1')
instance = ec2_resource.Instance(instance_id)
instance.wait_until_running()
instance.load()
print(f"Instance {instance_id} is now running. Public IP: {instance.public_ip_address}")
# Step 4: Connect to the instance using SSH
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Wait for instance SSH service to be ready
time.sleep(60) # Delay for SSH service readiness
ssh.connect(
hostname=instance.public_ip_address,
username='ec2-user',
key_filename=f"{key_pair_name}.pem"
)
stdin, stdout, stderr = ssh.exec_command("echo 'Connected to EC2 instance via SSH'")
print(stdout.read().decode())
ssh.close()
Explanation
Create a key pair and save it as a
.pem
file.Create a security group and allow SSH access.
Launch an EC2 instance using the created key pair and security group.
Wait for the instance to start and retrieve its public IP.
Connect via SSH using Paramiko and verify connectivity.
Challenge 2: Automate S3 lifecycle policies using boto3 (e.g., move files to Glacier after 30 days).
Solution
import boto3
s3 = boto3.client('s3')
bucket_name = "your-bucket-name"
# Define lifecycle policy
lifecycle_policy = {
"Rules": [
{
"ID": "MoveToGlacier",
"Prefix": "",
"Status": "Enabled",
"Transitions": [
{"Days": 30, "StorageClass": "GLACIER"}
]
}
]
}
# Apply policy to bucket
s3.put_bucket_lifecycle_configuration(
Bucket=bucket_name,
LifecycleConfiguration=lifecycle_policy
)
print(f"Lifecycle policy applied to {bucket_name}")
Explanation
Defines a policy that moves objects to Glacier after 30 days.
Uses
put_bucket_lifecycle_configuration()
to apply it to the S3 bucket.
Challenge 3: Create a script that starts or stops all EC2 instances in a specific AWS region.
Solution
import boto3
ec2 = boto3.client('ec2', region_name='us-east-1')
def manage_instances(action):
instances = ec2.describe_instances(Filters=[{"Name": "instance-state-name", "Values": ["running" if action == "stop" else "stopped"]}])
instance_ids = [i['InstanceId'] for res in instances['Reservations'] for i in res['Instances']]
if instance_ids:
if action == "start":
ec2.start_instances(InstanceIds=instance_ids)
print("Started instances:", instance_ids)
else:
ec2.stop_instances(InstanceIds=instance_ids)
print("Stopped instances:", instance_ids)
else:
print(f"No instances to {action}.")
manage_instances("stop") # Use "start" to start instances
Explanation
Fetches running/stopped instances.
Starts or stops them based on the action provided.
Challenge 4: Write a Python program that checks for unused IAM users and disables them.
Solution
import boto3
from datetime import datetime, timedelta
# Initialize IAM client
iam = boto3.client("iam")
# Define inactivity period (e.g., 90 days)
inactive_days = 90
time_threshold = datetime.utcnow() - timedelta(days=inactive_days)
# Get all IAM users
users = iam.list_users()["Users"]
for user in users:
username = user["UserName"]
last_used = iam.get_login_profile(UserName=username)
if "CreateDate" in last_used:
last_activity = last_used["CreateDate"]
if last_activity < time_threshold:
print(f"Disabling user: {username}")
iam.delete_login_profile(UserName=username)
iam.update_user(UserName=username, NewPath="/disabled/")
Explanation
Fetches IAM users.
Checks last activity and disables unused accounts.
Moves inactive users to a
/disabled/
path.
Challenge 5: Implement a log monitoring system that scans EC2 instances' /var/log for error messages and sends alerts via email (AWS SES) and Slack.
Solution
import boto3
import os
import smtplib
from slack_sdk import WebClient
# AWS and Slack configuration
region = "us-east-1"
ec2 = boto3.client("ec2", region_name=region)
# Define email and Slack details
slack_token = "your-slack-token"
slack_channel = "#alerts"
ses_email = "alert@example.com"
smtp_server = "smtp.example.com"
# Function to scan logs and send alerts
def scan_logs():
for file in os.listdir("/var/log"):
if file.endswith(".log"):
with open(f"/var/log/{file}") as log_file:
for line in log_file:
if "ERROR" in line:
send_alert(line)
# Send alerts via Slack and SES
def send_alert(message):
client = WebClient(token=slack_token)
client.chat_postMessage(channel=slack_channel, text=message)
with smtplib.SMTP(smtp_server) as server:
server.sendmail(ses_email, ses_email, message)
scan_logs()
Explanation
Scans logs for errors.
Sends alerts to Slack and email
Challenge 6: Automate DNS record updates in AWS Route 53 using Python.
Solution
route53 = boto3.client('route53')
route53.change_resource_record_sets(
HostedZoneId='ZXXXXXXXXXXXXX',
ChangeBatch={
'Changes': [
{
'Action': 'UPSERT',
'ResourceRecordSet': {
'Name': 'subdomain.example.com',
'Type': 'A',
'TTL': 300,
'ResourceRecords': [{'Value': '192.168.1.1'}]
}
}
]
}
)
Explanation
- Updates Route 53 DNS records dynamically.
Challenge 7: Write a script that triggers an AWS Lambda function using boto3.
Solution
import boto3
import json
lambda_client = boto3.client('lambda', region_name='us-east-1')
LAMBDA_FUNCTION_NAME = "your-lambda-function-name"
payload = json.dumps({"message": "Hello from Boto3!"})
response = lambda_client.invoke(
FunctionName=LAMBDA_FUNCTION_NAME,
InvocationType='RequestResponse', # Use 'Event' for async
Payload=payload
)
print("Lambda Response:", json.loads(response['Payload'].read()))
Explanation
Initialize Boto3 Lambda client (set the AWS region).
Define function name & payload (modify as needed).
Invoke Lambda (sync with RequestResponse, async with Event).
Print response from Lambda execution.
Challenge 8: Fetch AWS billing data & generate a PDF
Solution
from reportlab.pdfgen import canvas
import boto3
ce = boto3.client('ce')
cost_data = ce.get_cost_and_usage(
TimePeriod={'Start': '2025-02-01', 'End': '2025-02-28'},
Granularity='MONTHLY',
Metrics=['BlendedCost']
)
pdf = canvas.Canvas("AWS_Billing_Report.pdf")
pdf.drawString(100, 750, str(cost_data))
pdf.save()
Explanation
Fetches AWS billing data.
Saves it as a PDF using ReportLab.
Subscribe to my newsletter
Read articles from Pratik Bapat directly inside your inbox. Subscribe to the newsletter, and don't miss out.
Written by
