Loading...
Loading...
Google Cloud Platform (GCP) development best practices for Cloud Functions, Cloud Run, Firestore, BigQuery, and Infrastructure as Code.
npx skill4agent add mindrally/skills gcp-developmentinfrastructure/
├── main.tf # Primary resources
├── variables.tf # Input variables
├── outputs.tf # Output values
├── versions.tf # Provider versions
├── terraform.tfvars # Variable values
└── modules/
├── compute/
├── storage/
└── networking/src/
├── functions/ # Cloud Functions
├── services/ # Cloud Run services
├── shared/ # Shared utilities
└── tests/ # Test filesimport { HttpFunction } from '@google-cloud/functions-framework';
export const helloWorld: HttpFunction = async (req, res) => {
try {
// Validate request
if (req.method !== 'POST') {
res.status(405).send('Method Not Allowed');
return;
}
// Business logic
const result = await processRequest(req.body);
res.status(200).json(result);
} catch (error) {
console.error('Function error:', error);
res.status(500).json({ error: 'Internal Server Error' });
}
};FROM node:20-slim AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production
FROM gcr.io/distroless/nodejs20-debian12
WORKDIR /app
COPY /app/node_modules ./node_modules
COPY . .
CMD ["dist/index.js"]# service.yaml
apiVersion: serving.knative.dev/v1
kind: Service
metadata:
name: my-service
spec:
template:
spec:
containers:
- image: gcr.io/PROJECT_ID/my-service
resources:
limits:
memory: 512Mi
cpu: '1'
env:
- name: NODE_ENV
value: productionimport { Firestore } from '@google-cloud/firestore';
const db = new Firestore();
// Use transactions for atomic operations
await db.runTransaction(async (transaction) => {
const docRef = db.collection('users').doc(userId);
const doc = await transaction.get(docRef);
if (!doc.exists) {
throw new Error('User not found');
}
transaction.update(docRef, {
lastLogin: Firestore.FieldValue.serverTimestamp()
});
});import { Storage } from '@google-cloud/storage';
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
// Generate signed URL for upload
const [url] = await bucket.file('uploads/file.pdf').getSignedUrl({
version: 'v4',
action: 'write',
expires: Date.now() + 15 * 60 * 1000, // 15 minutes
contentType: 'application/pdf',
});terraform {
required_version = ">= 1.0"
required_providers {
google = {
source = "hashicorp/google"
version = "~> 5.0"
}
}
backend "gcs" {
bucket = "my-terraform-state"
prefix = "terraform/state"
}
}
provider "google" {
project = var.project_id
region = var.region
}import { SecretManagerServiceClient } from '@google-cloud/secret-manager';
const client = new SecretManagerServiceClient();
async function getSecret(secretName: string): Promise<string> {
const [version] = await client.accessSecretVersion({
name: `projects/PROJECT_ID/secrets/${secretName}/versions/latest`,
});
return version.payload?.data?.toString() || '';
}# cloudbuild.yaml
steps:
- name: 'node:20'
entrypoint: npm
args: ['ci']
- name: 'node:20'
entrypoint: npm
args: ['test']
- name: 'gcr.io/cloud-builders/docker'
args: ['build', '-t', 'gcr.io/$PROJECT_ID/my-service', '.']
- name: 'gcr.io/cloud-builders/docker'
args: ['push', 'gcr.io/$PROJECT_ID/my-service']
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
entrypoint: gcloud
args:
- 'run'
- 'deploy'
- 'my-service'
- '--image=gcr.io/$PROJECT_ID/my-service'
- '--region=us-central1'import { TraceExporter } from '@google-cloud/opentelemetry-cloud-trace-exporter';
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
const provider = new NodeTracerProvider();
provider.addSpanProcessor(
new BatchSpanProcessor(new TraceExporter())
);
provider.register();