Loading...
Loading...
Comprehensive AWS cloud services skill covering S3, Lambda, DynamoDB, EC2, RDS, IAM, CloudFormation, and enterprise cloud architecture patterns with AWS SDK
npx skill4agent add manutej/luxor-claude-marketplace aws-cloud-services// v2 (monolithic)
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
// v3 (modular)
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
const client = new S3Client({ region: 'us-east-1' });{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:GetObject",
"s3:PutObject"
],
"Resource": "arn:aws:s3:::my-bucket/*",
"Condition": {
"IpAddress": {
"aws:SourceIp": "203.0.113.0/24"
}
}
}
]
}import { S3Client } from '@aws-sdk/client-s3';
// Specify region explicitly
const client = new S3Client({
region: 'us-west-2',
endpoint: 'https://s3.us-west-2.amazonaws.com' // Optional custom endpoint
});
// Use default region from environment/config
const defaultClient = new S3Client({}); // Uses AWS_REGION or default regionimport { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
import { readFileSync } from 'fs';
const client = new S3Client({ region: 'us-east-1' });
// Simple upload
const uploadFile = async (bucketName, key, filePath) => {
const fileContent = readFileSync(filePath);
const command = new PutObjectCommand({
Bucket: bucketName,
Key: key,
Body: fileContent,
ContentType: 'image/jpeg', // Optional
Metadata: { // Optional custom metadata
'uploaded-by': 'user-123',
'upload-date': new Date().toISOString()
},
ServerSideEncryption: 'AES256', // Enable encryption
ACL: 'private' // Access control
});
const response = await client.send(command);
return response;
};import { GetObjectCommand } from '@aws-sdk/client-s3';
import { writeFileSync } from 'fs';
const downloadFile = async (bucketName, key, destinationPath) => {
const command = new GetObjectCommand({
Bucket: bucketName,
Key: key
});
const response = await client.send(command);
// Convert stream to buffer
const chunks = [];
for await (const chunk of response.Body) {
chunks.push(chunk);
}
const buffer = Buffer.concat(chunks);
writeFileSync(destinationPath, buffer);
return response.Metadata;
};import { ListObjectsV2Command } from '@aws-sdk/client-s3';
const listObjects = async (bucketName, prefix = '') => {
const command = new ListObjectsV2Command({
Bucket: bucketName,
Prefix: prefix, // Filter by prefix
MaxKeys: 1000, // Max 1000 per request
Delimiter: '/' // Treat / as folder separator
});
const response = await client.send(command);
return response.Contents; // Array of objects
};
// Pagination for large buckets
const listAllObjects = async (bucketName) => {
let allObjects = [];
let continuationToken;
do {
const command = new ListObjectsV2Command({
Bucket: bucketName,
ContinuationToken: continuationToken
});
const response = await client.send(command);
allObjects = allObjects.concat(response.Contents || []);
continuationToken = response.NextContinuationToken;
} while (continuationToken);
return allObjects;
};import { DeleteObjectCommand, DeleteObjectsCommand } from '@aws-sdk/client-s3';
// Delete single object
const deleteObject = async (bucketName, key) => {
const command = new DeleteObjectCommand({
Bucket: bucketName,
Key: key
});
await client.send(command);
};
// Delete multiple objects (up to 1000 at once)
const deleteMultipleObjects = async (bucketName, keys) => {
const command = new DeleteObjectsCommand({
Bucket: bucketName,
Delete: {
Objects: keys.map(key => ({ Key: key })),
Quiet: false // Return list of deleted objects
}
});
const response = await client.send(command);
return response.Deleted;
};import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { PutObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
// Presigned URL for upload
const createUploadUrl = async (bucketName, key, expiresIn = 3600) => {
const command = new PutObjectCommand({
Bucket: bucketName,
Key: key,
ContentType: 'image/jpeg'
});
const url = await getSignedUrl(client, command, { expiresIn });
return url; // Client can PUT to this URL
};
// Presigned URL for download
const createDownloadUrl = async (bucketName, key, expiresIn = 3600) => {
const command = new GetObjectCommand({
Bucket: bucketName,
Key: key
});
const url = await getSignedUrl(client, command, { expiresIn });
return url; // Client can GET from this URL
};import {
CreateMultipartUploadCommand,
UploadPartCommand,
CompleteMultipartUploadCommand,
AbortMultipartUploadCommand
} from '@aws-sdk/client-s3';
const multipartUpload = async (bucketName, key, fileBuffer, partSize = 5 * 1024 * 1024) => {
// 1. Initiate multipart upload
const createCommand = new CreateMultipartUploadCommand({
Bucket: bucketName,
Key: key
});
const { UploadId } = await client.send(createCommand);
try {
// 2. Upload parts
const parts = [];
const numParts = Math.ceil(fileBuffer.length / partSize);
for (let i = 0; i < numParts; i++) {
const start = i * partSize;
const end = Math.min(start + partSize, fileBuffer.length);
const partBody = fileBuffer.slice(start, end);
const uploadCommand = new UploadPartCommand({
Bucket: bucketName,
Key: key,
UploadId,
PartNumber: i + 1,
Body: partBody
});
const { ETag } = await client.send(uploadCommand);
parts.push({ PartNumber: i + 1, ETag });
}
// 3. Complete multipart upload
const completeCommand = new CompleteMultipartUploadCommand({
Bucket: bucketName,
Key: key,
UploadId,
MultipartUpload: { Parts: parts }
});
const result = await client.send(completeCommand);
return result;
} catch (error) {
// Abort on error to avoid storage charges for incomplete uploads
const abortCommand = new AbortMultipartUploadCommand({
Bucket: bucketName,
Key: key,
UploadId
});
await client.send(abortCommand);
throw error;
}
};// Lambda handler signature
export const handler = async (event, context) => {
// event: Input data (API request, S3 event, etc.)
// context: Runtime information (request ID, remaining time, etc.)
console.log('Event:', JSON.stringify(event, null, 2));
console.log('Request ID:', context.requestId);
console.log('Remaining time:', context.getRemainingTimeInMillis());
// Process event
const result = await processEvent(event);
// Return response
return {
statusCode: 200,
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(result)
};
};// Using AWS SDK to create/update Lambda function
import {
LambdaClient,
CreateFunctionCommand,
UpdateFunctionCodeCommand,
UpdateFunctionConfigurationCommand
} from '@aws-sdk/client-lambda';
const lambdaClient = new LambdaClient({ region: 'us-east-1' });
const createFunction = async () => {
const command = new CreateFunctionCommand({
FunctionName: 'myFunction',
Runtime: 'nodejs20.x',
Role: 'arn:aws:iam::123456789012:role/lambda-execution-role',
Handler: 'index.handler',
Code: {
ZipFile: zipBuffer // Or S3Bucket/S3Key for S3-stored code
},
Environment: {
Variables: {
'BUCKET_NAME': 'my-bucket',
'TABLE_NAME': 'my-table'
}
},
MemorySize: 512, // MB
Timeout: 30, // seconds
Tags: {
'Environment': 'production',
'Team': 'backend'
}
});
const response = await lambdaClient.send(command);
return response.FunctionArn;
};// Lambda function for API Gateway
export const handler = async (event) => {
// Parse request
const { httpMethod, path, queryStringParameters, body } = event;
const requestBody = body ? JSON.parse(body) : null;
// Route based on HTTP method and path
if (httpMethod === 'GET' && path === '/users') {
const users = await getUsers();
return {
statusCode: 200,
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(users)
};
}
if (httpMethod === 'POST' && path === '/users') {
const newUser = await createUser(requestBody);
return {
statusCode: 201,
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(newUser)
};
}
// Not found
return {
statusCode: 404,
body: JSON.stringify({ message: 'Not found' })
};
};// Lambda function triggered by S3 events
export const handler = async (event) => {
// Process each S3 event record
for (const record of event.Records) {
const bucket = record.s3.bucket.name;
const key = decodeURIComponent(record.s3.object.key.replace(/\+/g, ' '));
const eventName = record.eventName;
console.log(`Event: ${eventName}, Bucket: ${bucket}, Key: ${key}`);
if (eventName.startsWith('ObjectCreated:')) {
await processNewFile(bucket, key);
} else if (eventName.startsWith('ObjectRemoved:')) {
await handleFileDeleted(bucket, key);
}
}
return { statusCode: 200 };
};// Lambda function for DynamoDB Streams
export const handler = async (event) => {
for (const record of event.Records) {
const { eventName, dynamodb } = record;
// INSERT, MODIFY, REMOVE
console.log(`Event: ${eventName}`);
if (eventName === 'INSERT') {
const newItem = AWS.DynamoDB.Converter.unmarshall(dynamodb.NewImage);
await handleNewItem(newItem);
}
if (eventName === 'MODIFY') {
const oldItem = AWS.DynamoDB.Converter.unmarshall(dynamodb.OldImage);
const newItem = AWS.DynamoDB.Converter.unmarshall(dynamodb.NewImage);
await handleItemUpdate(oldItem, newItem);
}
if (eventName === 'REMOVE') {
const oldItem = AWS.DynamoDB.Converter.unmarshall(dynamodb.OldImage);
await handleItemDeleted(oldItem);
}
}
};export const handler = async (event) => {
try {
// Process event
const result = await processEvent(event);
return { statusCode: 200, body: JSON.stringify(result) };
} catch (error) {
console.error('Error processing event:', error);
// Log to CloudWatch
console.error('Error details:', {
message: error.message,
stack: error.stack,
event
});
// Return error response
return {
statusCode: 500,
body: JSON.stringify({
error: 'Internal server error',
requestId: context.requestId
})
};
}
};// Access environment variables
const BUCKET_NAME = process.env.BUCKET_NAME;
const TABLE_NAME = process.env.TABLE_NAME;
const API_KEY = process.env.API_KEY; // Use Secrets Manager for sensitive dataUser Table:
- userId (Partition Key) -> "user-123"
- name -> "John Doe"
- email -> "john@example.com"Order Table:
- userId (Partition Key) -> "user-123"
- orderId (Sort Key) -> "order-456"
- total -> 99.99
- status -> "shipped"import { DynamoDBClient } from '@aws-sdk/client-dynamodb';
import { DynamoDBDocumentClient, PutCommand } from '@aws-sdk/lib-dynamodb';
const client = new DynamoDBClient({ region: 'us-east-1' });
const docClient = DynamoDBDocumentClient.from(client);
const putItem = async (tableName, item) => {
const command = new PutCommand({
TableName: tableName,
Item: item,
ConditionExpression: 'attribute_not_exists(userId)', // Prevent overwrite
ReturnValues: 'ALL_OLD' // Return previous item if existed
});
try {
const response = await docClient.send(command);
return response;
} catch (error) {
if (error.name === 'ConditionalCheckFailedException') {
console.log('Item already exists');
}
throw error;
}
};
// Example usage
await putItem('Users', {
userId: 'user-123',
name: 'John Doe',
email: 'john@example.com',
createdAt: new Date().toISOString(),
preferences: {
theme: 'dark',
notifications: true
}
});import { GetCommand } from '@aws-sdk/lib-dynamodb';
const getItem = async (tableName, key) => {
const command = new GetCommand({
TableName: tableName,
Key: key,
ConsistentRead: true, // Strong consistency (default: false)
ProjectionExpression: 'userId, #n, email', // Return specific attributes
ExpressionAttributeNames: {
'#n': 'name' // name is reserved word, use placeholder
}
});
const response = await docClient.send(command);
return response.Item;
};
// Example usage
const user = await getItem('Users', { userId: 'user-123' });import { UpdateCommand } from '@aws-sdk/lib-dynamodb';
const updateItem = async (tableName, key, updates) => {
const command = new UpdateCommand({
TableName: tableName,
Key: key,
UpdateExpression: 'SET #n = :name, email = :email, updatedAt = :now',
ExpressionAttributeNames: {
'#n': 'name'
},
ExpressionAttributeValues: {
':name': updates.name,
':email': updates.email,
':now': new Date().toISOString()
},
ConditionExpression: 'attribute_exists(userId)', // Only update if exists
ReturnValues: 'ALL_NEW' // Return updated item
});
const response = await docClient.send(command);
return response.Attributes;
};
// Atomic counter increment
const incrementCounter = async (tableName, key, counterAttribute) => {
const command = new UpdateCommand({
TableName: tableName,
Key: key,
UpdateExpression: 'ADD #counter :inc',
ExpressionAttributeNames: {
'#counter': counterAttribute
},
ExpressionAttributeValues: {
':inc': 1
},
ReturnValues: 'UPDATED_NEW'
});
const response = await docClient.send(command);
return response.Attributes[counterAttribute];
};import { QueryCommand } from '@aws-sdk/lib-dynamodb';
const queryItems = async (tableName, partitionKeyValue) => {
const command = new QueryCommand({
TableName: tableName,
KeyConditionExpression: 'userId = :userId AND orderId BETWEEN :start AND :end',
ExpressionAttributeValues: {
':userId': partitionKeyValue,
':start': 'order-100',
':end': 'order-200'
},
FilterExpression: 'orderStatus = :status', // Filter results (applied after query)
ExpressionAttributeValues: {
':status': 'completed'
},
Limit: 100, // Max items to return
ScanIndexForward: false // Sort descending (default: ascending)
});
const response = await docClient.send(command);
return response.Items;
};
// Pagination
const queryAllItems = async (tableName, partitionKeyValue) => {
let allItems = [];
let lastEvaluatedKey;
do {
const command = new QueryCommand({
TableName: tableName,
KeyConditionExpression: 'userId = :userId',
ExpressionAttributeValues: {
':userId': partitionKeyValue
},
ExclusiveStartKey: lastEvaluatedKey
});
const response = await docClient.send(command);
allItems = allItems.concat(response.Items);
lastEvaluatedKey = response.LastEvaluatedKey;
} while (lastEvaluatedKey);
return allItems;
};import { ScanCommand } from '@aws-sdk/lib-dynamodb';
const scanTable = async (tableName, filterExpression) => {
const command = new ScanCommand({
TableName: tableName,
FilterExpression: 'age > :minAge',
ExpressionAttributeValues: {
':minAge': 18
},
Limit: 1000
});
const response = await docClient.send(command);
return response.Items;
};
// Parallel scan for performance
const parallelScan = async (tableName, totalSegments = 4) => {
const scanSegment = async (segment) => {
const command = new ScanCommand({
TableName: tableName,
Segment: segment,
TotalSegments: totalSegments
});
const response = await docClient.send(command);
return response.Items;
};
// Scan all segments in parallel
const promises = [];
for (let i = 0; i < totalSegments; i++) {
promises.push(scanSegment(i));
}
const results = await Promise.all(promises);
return results.flat();
};import { DeleteCommand } from '@aws-sdk/lib-dynamodb';
const deleteItem = async (tableName, key) => {
const command = new DeleteCommand({
TableName: tableName,
Key: key,
ConditionExpression: 'attribute_exists(userId)', // Only delete if exists
ReturnValues: 'ALL_OLD' // Return deleted item
});
const response = await docClient.send(command);
return response.Attributes;
};import { BatchGetCommand, BatchWriteCommand } from '@aws-sdk/lib-dynamodb';
// Batch get (up to 100 items)
const batchGetItems = async (tableName, keys) => {
const command = new BatchGetCommand({
RequestItems: {
[tableName]: {
Keys: keys // Array of key objects
}
}
});
const response = await docClient.send(command);
return response.Responses[tableName];
};
// Batch write (up to 25 items)
const batchWriteItems = async (tableName, items) => {
const command = new BatchWriteCommand({
RequestItems: {
[tableName]: items.map(item => ({
PutRequest: { Item: item }
}))
}
});
await docClient.send(command);
};
// Batch delete
const batchDeleteItems = async (tableName, keys) => {
const command = new BatchWriteCommand({
RequestItems: {
[tableName]: keys.map(key => ({
DeleteRequest: { Key: key }
}))
}
});
await docClient.send(command);
};// User entity
{
PK: "USER#user-123",
SK: "METADATA",
type: "user",
name: "John Doe",
email: "john@example.com"
}
// User's order
{
PK: "USER#user-123",
SK: "ORDER#order-456",
type: "order",
total: 99.99,
status: "shipped"
}
// Access patterns:
// 1. Get user: PK = "USER#user-123", SK = "METADATA"
// 2. Get all user's orders: PK = "USER#user-123", SK begins_with "ORDER#"
// 3. Get specific order: PK = "USER#user-123", SK = "ORDER#order-456"import {
EC2Client,
RunInstancesCommand,
DescribeInstancesCommand,
StartInstancesCommand,
StopInstancesCommand,
TerminateInstancesCommand
} from '@aws-sdk/client-ec2';
const ec2Client = new EC2Client({ region: 'us-east-1' });
// Launch instance
const launchInstance = async () => {
const command = new RunInstancesCommand({
ImageId: 'ami-0c55b159cbfafe1f0', // Amazon Linux 2 AMI
InstanceType: 't3.micro',
MinCount: 1,
MaxCount: 1,
KeyName: 'my-key-pair',
SecurityGroupIds: ['sg-0123456789abcdef0'],
SubnetId: 'subnet-0123456789abcdef0',
IamInstanceProfile: {
Name: 'ec2-instance-profile'
},
UserData: Buffer.from(`#!/bin/bash
yum update -y
yum install -y httpd
systemctl start httpd
systemctl enable httpd
echo "Hello from EC2" > /var/www/html/index.html
`).toString('base64'),
TagSpecifications: [{
ResourceType: 'instance',
Tags: [
{ Key: 'Name', Value: 'WebServer' },
{ Key: 'Environment', Value: 'production' }
]
}]
});
const response = await ec2Client.send(command);
return response.Instances[0].InstanceId;
};
// Describe instances
const describeInstances = async (instanceIds) => {
const command = new DescribeInstancesCommand({
InstanceIds: instanceIds,
Filters: [
{ Name: 'instance-state-name', Values: ['running'] }
]
});
const response = await ec2Client.send(command);
return response.Reservations.flatMap(r => r.Instances);
};
// Stop instance
const stopInstance = async (instanceId) => {
const command = new StopInstancesCommand({
InstanceIds: [instanceId]
});
await ec2Client.send(command);
};
// Terminate instance
const terminateInstance = async (instanceId) => {
const command = new TerminateInstancesCommand({
InstanceIds: [instanceId]
});
await ec2Client.send(command);
};import {
RDSClient,
CreateDBInstanceCommand,
DescribeDBInstancesCommand,
ModifyDBInstanceCommand,
DeleteDBInstanceCommand
} from '@aws-sdk/client-rds';
const rdsClient = new RDSClient({ region: 'us-east-1' });
// Create database instance
const createDatabase = async () => {
const command = new CreateDBInstanceCommand({
DBInstanceIdentifier: 'mydb',
DBInstanceClass: 'db.t3.micro',
Engine: 'postgres',
EngineVersion: '15.3',
MasterUsername: 'admin',
MasterUserPassword: 'SecurePassword123!',
AllocatedStorage: 20, // GB
StorageType: 'gp3',
BackupRetentionPeriod: 7, // days
MultiAZ: true, // High availability
PubliclyAccessible: false,
VpcSecurityGroupIds: ['sg-0123456789abcdef0'],
DBSubnetGroupName: 'my-db-subnet-group',
StorageEncrypted: true,
Tags: [
{ Key: 'Environment', Value: 'production' },
{ Key: 'Application', Value: 'api' }
]
});
const response = await rdsClient.send(command);
return response.DBInstance;
};
// Describe database
const describeDatabase = async (dbInstanceId) => {
const command = new DescribeDBInstancesCommand({
DBInstanceIdentifier: dbInstanceId
});
const response = await rdsClient.send(command);
return response.DBInstances[0];
};AWSTemplateFormatVersion: '2010-09-09'
Description: 'Full-stack web application infrastructure'
Parameters:
Environment:
Type: String
Default: production
AllowedValues:
- development
- staging
- production
Resources:
# S3 Bucket for static assets
AssetsBucket:
Type: AWS::S3::Bucket
Properties:
BucketName: !Sub '${AWS::StackName}-assets-${Environment}'
VersioningConfiguration:
Status: Enabled
PublicAccessBlockConfiguration:
BlockPublicAcls: true
BlockPublicPolicy: true
IgnorePublicAcls: true
RestrictPublicBuckets: true
BucketEncryption:
ServerSideEncryptionConfiguration:
- ServerSideEncryptionByDefault:
SSEAlgorithm: AES256
# DynamoDB Table
UsersTable:
Type: AWS::DynamoDB::Table
Properties:
TableName: !Sub '${AWS::StackName}-users-${Environment}'
BillingMode: PAY_PER_REQUEST
AttributeDefinitions:
- AttributeName: userId
AttributeType: S
- AttributeName: email
AttributeType: S
KeySchema:
- AttributeName: userId
KeyType: HASH
GlobalSecondaryIndexes:
- IndexName: EmailIndex
KeySchema:
- AttributeName: email
KeyType: HASH
Projection:
ProjectionType: ALL
StreamSpecification:
StreamViewType: NEW_AND_OLD_IMAGES
# Lambda Execution Role
LambdaExecutionRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Version: '2012-10-17'
Statement:
- Effect: Allow
Principal:
Service: lambda.amazonaws.com
Action: sts:AssumeRole
ManagedPolicyArns:
- arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
Policies:
- PolicyName: DynamoDBAccess
PolicyDocument:
Version: '2012-10-17'
Statement:
- Effect: Allow
Action:
- dynamodb:GetItem
- dynamodb:PutItem
- dynamodb:UpdateItem
- dynamodb:Query
Resource: !GetAtt UsersTable.Arn
# Lambda Function
ApiFunction:
Type: AWS::Lambda::Function
Properties:
FunctionName: !Sub '${AWS::StackName}-api-${Environment}'
Runtime: nodejs20.x
Handler: index.handler
Role: !GetAtt LambdaExecutionRole.Arn
Code:
ZipFile: |
exports.handler = async (event) => {
return {
statusCode: 200,
body: JSON.stringify({ message: 'Hello from Lambda!' })
};
};
Environment:
Variables:
TABLE_NAME: !Ref UsersTable
BUCKET_NAME: !Ref AssetsBucket
ENVIRONMENT: !Ref Environment
Timeout: 30
MemorySize: 512
# API Gateway
RestApi:
Type: AWS::ApiGateway::RestApi
Properties:
Name: !Sub '${AWS::StackName}-api-${Environment}'
Description: REST API for application
ApiResource:
Type: AWS::ApiGateway::Resource
Properties:
RestApiId: !Ref RestApi
ParentId: !GetAtt RestApi.RootResourceId
PathPart: users
ApiMethod:
Type: AWS::ApiGateway::Method
Properties:
RestApiId: !Ref RestApi
ResourceId: !Ref ApiResource
HttpMethod: GET
AuthorizationType: NONE
Integration:
Type: AWS_PROXY
IntegrationHttpMethod: POST
Uri: !Sub 'arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${ApiFunction.Arn}/invocations'
ApiDeployment:
Type: AWS::ApiGateway::Deployment
DependsOn: ApiMethod
Properties:
RestApiId: !Ref RestApi
StageName: !Ref Environment
LambdaApiPermission:
Type: AWS::Lambda::Permission
Properties:
FunctionName: !Ref ApiFunction
Action: lambda:InvokeFunction
Principal: apigateway.amazonaws.com
SourceArn: !Sub 'arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${RestApi}/*'
Outputs:
ApiUrl:
Description: API Gateway URL
Value: !Sub 'https://${RestApi}.execute-api.${AWS::Region}.amazonaws.com/${Environment}'
Export:
Name: !Sub '${AWS::StackName}-api-url'
BucketName:
Description: S3 Bucket Name
Value: !Ref AssetsBucket
Export:
Name: !Sub '${AWS::StackName}-bucket-name'
TableName:
Description: DynamoDB Table Name
Value: !Ref UsersTable
Export:
Name: !Sub '${AWS::StackName}-table-name'import {
CloudFormationClient,
CreateStackCommand,
DescribeStacksCommand,
UpdateStackCommand,
DeleteStackCommand
} from '@aws-sdk/client-cloudformation';
import { readFileSync } from 'fs';
const cfClient = new CloudFormationClient({ region: 'us-east-1' });
// Create stack
const createStack = async (stackName, templatePath, parameters = {}) => {
const templateBody = readFileSync(templatePath, 'utf8');
const command = new CreateStackCommand({
StackName: stackName,
TemplateBody: templateBody,
Parameters: Object.entries(parameters).map(([key, value]) => ({
ParameterKey: key,
ParameterValue: value
})),
Capabilities: ['CAPABILITY_IAM'],
Tags: [
{ Key: 'ManagedBy', Value: 'CloudFormation' },
{ Key: 'Application', Value: 'MyApp' }
]
});
const response = await cfClient.send(command);
return response.StackId;
};
// Get stack status
const getStackStatus = async (stackName) => {
const command = new DescribeStacksCommand({
StackName: stackName
});
const response = await cfClient.send(command);
const stack = response.Stacks[0];
return {
status: stack.StackStatus,
outputs: stack.Outputs || []
};
};