GCP Cloud Functions Deployment Guide
Status: Complete Implementation Guide
Version: 1.0
Purpose: Comprehensive guide for deploying Cloud Functions Gen2 with modern patterns
Applicable To: Serverless applications, APIs, microservices, and event-driven architectures
Overview
This guide provides comprehensive procedures for deploying Google Cloud Functions Gen2 with modern architectural patterns. Based on production experience with unified API architectures, it covers everything from simple function deployment to complex multi-endpoint APIs with authentication, rate limiting, and monitoring.
Key Benefits
- Gen2 Advantages: Better performance, longer timeouts, more memory
- Unified API Pattern: Single function handling multiple endpoints
- Built-in Security: Authentication, rate limiting, CORS handling
- Production Ready: Monitoring, logging, error handling
- Cost Optimized: Efficient resource usage and scaling
Architecture Decision Framework
Deployment Patterns Comparison
| Pattern | Use Case | Pros | Cons | Best For |
|---|---|---|---|---|
| Single Function | Simple operations | Easy deployment, low overhead | Limited functionality | Webhooks, simple APIs |
| Multiple Functions | Microservices | Independent scaling, clear separation | Domain mapping issues, more overhead | Complex systems |
| Unified API | REST API with multiple endpoints | Single domain, shared middleware, cost efficient | Monolithic deployment | SaaS platforms, APIs |
| Event-Driven | Async processing | Scalable, resilient | Complex debugging | Data processing |
When to Use Each Pattern
Use Unified API When:
- Building REST APIs with multiple endpoints
- Need shared authentication/middleware
- Want single domain mapping
- Cost optimization is important
Use Multiple Functions When:
- Distinct business domains
- Different scaling requirements
- Independent team ownership
- Different runtime requirements
Use Single Function When:
- Simple, focused functionality
- Webhook endpoints
- Proof of concept
- Event handlers
Unified API Architecture Pattern
Step 1: Project Structure
# Recommended project structure for unified API
your-api/
βββ index.js # Main entry point
βββ package.json # Dependencies and configuration
βββ .gcloudignore # Deployment exclusions
βββ middleware/
β βββ auth.js # Authentication middleware
β βββ cors.js # CORS handling
β βββ rateLimit.js # Rate limiting
β βββ logging.js # Request logging
β βββ errorHandler.js # Error handling
βββ routes/
β βββ v1/
β β βββ health.js # Health check endpoint
β β βββ users.js # User management
β β βββ data.js # Data operations
β β βββ admin.js # Admin functions
β βββ webhooks/
β βββ stripe.js # Payment webhooks
β βββ github.js # Git webhooks
βββ lib/
β βββ database.js # Database connections
β βββ storage.js # Cloud Storage
β βββ auth.js # Authentication logic
β βββ utils.js # Utility functions
βββ config/
β βββ development.js # Dev configuration
β βββ production.js # Prod configuration
β βββ index.js # Config loader
βββ test/
βββ unit/ # Unit tests
βββ integration/ # Integration tests
βββ fixtures/ # Test data
Step 2: Main Entry Point
// index.js - Unified API main entry point
const express = require('express');
const cors = require('cors');
const helmet = require('helmet');
// Middleware
const authMiddleware = require('./middleware/auth');
const rateLimitMiddleware = require('./middleware/rateLimit');
const loggingMiddleware = require('./middleware/logging');
const errorHandler = require('./middleware/errorHandler');
// Routes
const healthRoutes = require('./routes/v1/health');
const userRoutes = require('./routes/v1/users');
const dataRoutes = require('./routes/v1/data');
const webhookRoutes = require('./routes/webhooks');
// Configuration
const config = require('./config');
// Create Express app
const app = express();
// Security middleware
app.use(helmet({
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
styleSrc: ["'self'", "'unsafe-inline'"],
scriptSrc: ["'self'"],
imgSrc: ["'self'", "data:", "https:"],
},
},
hsts: {
maxAge: 31536000,
includeSubDomains: true,
preload: true
}
}));
// CORS configuration
app.use(cors({
origin: function (origin, callback) {
// Allow requests with no origin (mobile apps, Postman, etc.)
if (!origin) return callback(null, true);
// Check if origin is in allowed list
if (config.allowedOrigins.indexOf(origin) !== -1) {
return callback(null, true);
}
// For development, allow localhost
if (config.environment === 'development' && origin.includes('localhost')) {
return callback(null, true);
}
return callback(new Error('Not allowed by CORS'));
},
credentials: true,
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization', 'X-API-Key']
}));
// Body parsing middleware
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
// Request logging
app.use(loggingMiddleware);
// Rate limiting (before auth to prevent auth bypass)
app.use('/v1', rateLimitMiddleware);
// Health check (no auth required)
app.use('/v1/health', healthRoutes);
// API documentation (no auth required)
app.get('/v1', (req, res) => {
res.json({
name: config.serviceName,
version: config.version,
environment: config.environment,
endpoints: {
health: '/v1/health',
users: '/v1/users',
data: '/v1/data',
webhooks: '/webhooks'
},
documentation: config.documentationUrl,
support: config.supportEmail
});
});
// Authenticated routes
app.use('/v1/users', authMiddleware, userRoutes);
app.use('/v1/data', authMiddleware, dataRoutes);
// Webhook routes (different auth)
app.use('/webhooks', webhookRoutes);
// 404 handler
app.use('*', (req, res) => {
res.status(404).json({
error: {
code: 'NOT_FOUND',
message: 'Endpoint not found',
path: req.originalUrl
},
documentation: config.documentationUrl
});
});
// Error handling middleware (must be last)
app.use(errorHandler);
// Export for Cloud Functions
exports.api = app;
// For local development
if (require.main === module) {
const PORT = process.env.PORT || 8080;
app.listen(PORT, () => {
console.log(`π Server running on port ${PORT}`);
console.log(`π Documentation: http://localhost:${PORT}/v1`);
console.log(`β€οΈ Health check: http://localhost:${PORT}/v1/health`);
});
}
Step 3: Configuration Management
// config/index.js - Environment-aware configuration
const path = require('path');
const environment = process.env.NODE_ENV || 'development';
// Base configuration
const baseConfig = {
serviceName: process.env.SERVICE_NAME || 'Your API',
version: process.env.npm_package_version || '1.0.0',
environment: environment,
// Google Cloud
projectId: process.env.GCP_PROJECT_ID || process.env.GOOGLE_CLOUD_PROJECT,
region: process.env.GCP_REGION || 'us-central1',
// Database
database: {
projectId: process.env.FIRESTORE_PROJECT_ID || process.env.GOOGLE_CLOUD_PROJECT,
databaseId: process.env.FIRESTORE_DATABASE || '(default)',
},
// Authentication
auth: {
jwtSecret: process.env.JWT_SECRET,
apiKeyHeader: process.env.API_KEY_HEADER || 'X-API-Key',
tokenExpiry: process.env.TOKEN_EXPIRY || '24h'
},
// Rate limiting
rateLimit: {
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS) || 15 * 60 * 1000, // 15 minutes
maxRequests: parseInt(process.env.RATE_LIMIT_MAX_REQUESTS) || 100,
skipSuccessfulRequests: false,
skipFailedRequests: false
},
// CORS
allowedOrigins: (process.env.ALLOWED_ORIGINS || '').split(',').filter(Boolean),
// External services
supportEmail: process.env.SUPPORT_EMAIL || 'support@example.com',
documentationUrl: process.env.DOCUMENTATION_URL || 'https://docs.example.com',
// Logging
logging: {
level: process.env.LOG_LEVEL || 'info',
format: process.env.LOG_FORMAT || 'json'
}
};
// Environment-specific configurations
const environments = {
development: {
...baseConfig,
allowedOrigins: ['http://localhost:3000', 'http://localhost:8080'],
rateLimit: {
...baseConfig.rateLimit,
maxRequests: 1000 // More lenient for development
},
logging: {
level: 'debug',
format: 'simple'
}
},
production: {
...baseConfig,
allowedOrigins: process.env.ALLOWED_ORIGINS ?
process.env.ALLOWED_ORIGINS.split(',') :
['https://yourdomain.com'],
rateLimit: {
...baseConfig.rateLimit,
maxRequests: 100 // Strict for production
}
},
staging: {
...baseConfig,
allowedOrigins: ['https://staging.yourdomain.com'],
rateLimit: {
...baseConfig.rateLimit,
maxRequests: 200 // Moderate for staging
}
}
};
module.exports = environments[environment] || environments.development;
Step 4: Authentication Middleware
// middleware/auth.js - Flexible authentication middleware
const admin = require('firebase-admin');
const jwt = require('jsonwebtoken');
const config = require('../config');
// Initialize Firebase Admin (if not already initialized)
if (!admin.apps.length) {
admin.initializeApp({
projectId: config.projectId
});
}
const db = admin.firestore();
// API Key authentication
async function validateApiKey(apiKey) {
try {
// Hash the API key for database lookup
const crypto = require('crypto');
const hashedKey = crypto.createHash('sha256').update(apiKey).digest('hex');
// Look up API key in Firestore
const keyDoc = await db.collection('api_keys').doc(hashedKey).get();
if (!keyDoc.exists) {
return { valid: false, error: 'Invalid API key' };
}
const keyData = keyDoc.data();
// Check if key is active
if (!keyData.active) {
return { valid: false, error: 'API key is disabled' };
}
// Check expiration
if (keyData.expiresAt && keyData.expiresAt.toDate() < new Date()) {
return { valid: false, error: 'API key has expired' };
}
// Check usage limits
if (keyData.usageLimit && keyData.usageCount >= keyData.usageLimit) {
return { valid: false, error: 'API key usage limit exceeded' };
}
return {
valid: true,
user: keyData.userId,
plan: keyData.plan || 'free',
metadata: keyData.metadata || {}
};
} catch (error) {
console.error('API key validation error:', error);
return { valid: false, error: 'Authentication service error' };
}
}
// JWT token authentication
async function validateJwtToken(token) {
try {
const decoded = jwt.verify(token, config.auth.jwtSecret);
// Optional: Check if user still exists and is active
const userDoc = await db.collection('users').doc(decoded.sub).get();
if (!userDoc.exists) {
return { valid: false, error: 'User not found' };
}
const userData = userDoc.data();
if (!userData.active) {
return { valid: false, error: 'User account is disabled' };
}
return {
valid: true,
user: decoded.sub,
plan: userData.plan || 'free',
metadata: userData.metadata || {}
};
} catch (error) {
if (error.name === 'TokenExpiredError') {
return { valid: false, error: 'Token has expired' };
}
if (error.name === 'JsonWebTokenError') {
return { valid: false, error: 'Invalid token' };
}
console.error('JWT validation error:', error);
return { valid: false, error: 'Authentication service error' };
}
}
// Main authentication middleware
async function authMiddleware(req, res, next) {
try {
let authResult = null;
// Check for API key in headers
const apiKey = req.headers[config.auth.apiKeyHeader.toLowerCase()] ||
req.headers['authorization']?.replace('Bearer ', '');
if (apiKey) {
// Determine if it's an API key or JWT token
if (apiKey.startsWith('eyJ')) {
// Looks like a JWT token
authResult = await validateJwtToken(apiKey);
} else {
// Treat as API key
authResult = await validateApiKey(apiKey);
}
}
// Check authorization header for JWT
if (!authResult && req.headers.authorization) {
const token = req.headers.authorization.replace('Bearer ', '');
if (token.startsWith('eyJ')) {
authResult = await validateJwtToken(token);
}
}
if (!authResult) {
return res.status(401).json({
error: {
code: 'AUTH_001',
message: 'Authentication required',
details: {
headers: [config.auth.apiKeyHeader, 'Authorization']
}
},
help: `Include your API key in ${config.auth.apiKeyHeader} header or Authorization: Bearer {token}`
});
}
if (!authResult.valid) {
return res.status(401).json({
error: {
code: 'AUTH_002',
message: authResult.error
}
});
}
// Attach user info to request
req.user = {
id: authResult.user,
plan: authResult.plan,
metadata: authResult.metadata
};
next();
} catch (error) {
console.error('Authentication middleware error:', error);
res.status(500).json({
error: {
code: 'AUTH_003',
message: 'Authentication service unavailable'
}
});
}
}
// Optional: Role-based authorization
function requireRole(roles) {
return (req, res, next) => {
if (!req.user) {
return res.status(401).json({
error: {
code: 'AUTH_004',
message: 'Authentication required'
}
});
}
const userRoles = req.user.metadata.roles || [];
const hasRole = roles.some(role => userRoles.includes(role));
if (!hasRole) {
return res.status(403).json({
error: {
code: 'AUTH_005',
message: 'Insufficient permissions',
details: {
required: roles,
user: userRoles
}
}
});
}
next();
};
}
module.exports = {
authMiddleware,
requireRole,
validateApiKey,
validateJwtToken
};
Deployment Scripts
Step 1: Universal Deployment Script
#!/bin/bash
# deploy-function.sh - Universal Cloud Functions deployment script
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CONFIG_FILE="${SCRIPT_DIR}/deployment.env"
# Default values
FUNCTION_NAME=""
SOURCE_DIR="."
ENTRY_POINT="api"
RUNTIME="nodejs20"
MEMORY="1GB"
TIMEOUT="60s"
MAX_INSTANCES="100"
MIN_INSTANCES="0"
TRIGGER_TYPE="http"
ALLOW_UNAUTHENTICATED="true"
ENV_VARS=""
SECRET_ENV_VARS=""
# Usage function
usage() {
cat << EOF
Usage: $0 [OPTIONS]
Deploy Google Cloud Functions with comprehensive configuration.
OPTIONS:
-n, --name NAME Function name (required)
-s, --source DIR Source directory (default: .)
-e, --entry-point ENTRY Entry point function (default: api)
-r, --runtime RUNTIME Runtime (default: nodejs20)
-m, --memory MEMORY Memory allocation (default: 1GB)
-t, --timeout TIMEOUT Timeout (default: 60s)
--max-instances MAX Maximum instances (default: 100)
--min-instances MIN Minimum instances (default: 0)
--trigger TYPE Trigger type: http, pubsub, storage (default: http)
--no-auth Allow unauthenticated access (default for http)
--env-vars VARS Environment variables (KEY1=value1,KEY2=value2)
--secret-env-vars SECRETS Secret environment variables
--vpc-connector CONNECTOR VPC connector name
--service-account SA Service account email
-h, --help Show this help
EXAMPLES:
# Simple HTTP function
$0 -n my-api -s ./src
# Function with custom configuration
$0 -n my-api -s ./api -m 2GB -t 120s --max-instances 50
# Function with environment variables
$0 -n my-api --env-vars NODE_ENV=production,DEBUG=false
# PubSub triggered function
$0 -n process-data --trigger pubsub --topic my-topic
EOF
}
# Parse command line arguments
parse_arguments() {
while [[ $# -gt 0 ]]; do
case $1 in
-n|--name)
FUNCTION_NAME="$2"
shift 2
;;
-s|--source)
SOURCE_DIR="$2"
shift 2
;;
-e|--entry-point)
ENTRY_POINT="$2"
shift 2
;;
-r|--runtime)
RUNTIME="$2"
shift 2
;;
-m|--memory)
MEMORY="$2"
shift 2
;;
-t|--timeout)
TIMEOUT="$2"
shift 2
;;
--max-instances)
MAX_INSTANCES="$2"
shift 2
;;
--min-instances)
MIN_INSTANCES="$2"
shift 2
;;
--trigger)
TRIGGER_TYPE="$2"
shift 2
;;
--no-auth)
ALLOW_UNAUTHENTICATED="false"
shift
;;
--env-vars)
ENV_VARS="$2"
shift 2
;;
--secret-env-vars)
SECRET_ENV_VARS="$2"
shift 2
;;
--vpc-connector)
VPC_CONNECTOR="$2"
shift 2
;;
--service-account)
SERVICE_ACCOUNT="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo "Unknown option: $1"
usage
exit 1
;;
esac
done
# Validate required arguments
if [ -z "$FUNCTION_NAME" ]; then
echo "Error: Function name is required"
usage
exit 1
fi
}
# Load configuration
load_config() {
if [ -f "$CONFIG_FILE" ]; then
source "$CONFIG_FILE"
echo "β
Configuration loaded from: $CONFIG_FILE"
else
echo "β οΈ No configuration file found at: $CONFIG_FILE"
echo "Using default values and environment variables"
fi
}
# Validate source directory and files
validate_source() {
if [ ! -d "$SOURCE_DIR" ]; then
echo "β Source directory not found: $SOURCE_DIR"
exit 1
fi
# Check for required files based on runtime
case $RUNTIME in
nodejs*)
if [ ! -f "$SOURCE_DIR/package.json" ]; then
echo "β package.json not found in source directory"
exit 1
fi
echo "β
Node.js project structure validated"
;;
python*)
if [ ! -f "$SOURCE_DIR/requirements.txt" ] && [ ! -f "$SOURCE_DIR/main.py" ]; then
echo "β οΈ No requirements.txt or main.py found"
fi
echo "β
Python project structure validated"
;;
go*)
if [ ! -f "$SOURCE_DIR/go.mod" ]; then
echo "β οΈ No go.mod found"
fi
echo "β
Go project structure validated"
;;
esac
}
# Create .gcloudignore if it doesn't exist
create_gcloudignore() {
local ignore_file="$SOURCE_DIR/.gcloudignore"
if [ ! -f "$ignore_file" ]; then
echo "Creating .gcloudignore file..."
cat > "$ignore_file" << 'EOF'
# Git
.git
.gitignore
# Node.js
node_modules/
npm-debug.log
yarn-error.log
.npm
.yarn
# Environment files
.env*
!.env.example
# IDE and editors
.vscode/
.idea/
*.swp
*.swo
*~
# OS files
.DS_Store
Thumbs.db
# Logs
*.log
logs/
# Testing
coverage/
.nyc_output/
test/
tests/
__tests__/
*.test.js
*.spec.js
# Documentation
docs/
*.md
!README.md
# Build artifacts
dist/
build/
.next/
.nuxt/
# Temporary files
tmp/
temp/
*.tmp
# Database
*.db
*.sqlite
# Archives
*.zip
*.tar.gz
*.rar
# Python
__pycache__/
*.pyc
*.pyo
*.pyd
.Python
env/
venv/
.venv/
pip-log.txt
pip-delete-this-directory.txt
# Go
*.exe
*.exe~
*.dll
*.so
*.dylib
vendor/
EOF
echo "β
Created .gcloudignore file"
fi
}
# Build deployment command
build_deploy_command() {
local cmd="gcloud functions deploy $FUNCTION_NAME"
cmd="$cmd --gen2"
cmd="$cmd --runtime=$RUNTIME"
cmd="$cmd --region=${REGION:-us-central1}"
cmd="$cmd --source=$SOURCE_DIR"
cmd="$cmd --entry-point=$ENTRY_POINT"
cmd="$cmd --memory=$MEMORY"
cmd="$cmd --timeout=$TIMEOUT"
cmd="$cmd --max-instances=$MAX_INSTANCES"
cmd="$cmd --min-instances=$MIN_INSTANCES"
# Add trigger configuration
case $TRIGGER_TYPE in
http)
cmd="$cmd --trigger-http"
if [ "$ALLOW_UNAUTHENTICATED" = "true" ]; then
cmd="$cmd --allow-unauthenticated"
fi
;;
pubsub)
if [ -n "$PUBSUB_TOPIC" ]; then
cmd="$cmd --trigger-topic=$PUBSUB_TOPIC"
else
echo "β PUBSUB_TOPIC required for pubsub trigger"
exit 1
fi
;;
storage)
if [ -n "$STORAGE_BUCKET" ]; then
cmd="$cmd --trigger-bucket=$STORAGE_BUCKET"
else
echo "β STORAGE_BUCKET required for storage trigger"
exit 1
fi
;;
esac
# Add environment variables
if [ -n "$ENV_VARS" ]; then
cmd="$cmd --set-env-vars=$ENV_VARS"
fi
# Add secret environment variables
if [ -n "$SECRET_ENV_VARS" ]; then
cmd="$cmd --set-secrets=$SECRET_ENV_VARS"
fi
# Add VPC connector
if [ -n "$VPC_CONNECTOR" ]; then
cmd="$cmd --vpc-connector=$VPC_CONNECTOR"
fi
# Add service account
if [ -n "$SERVICE_ACCOUNT" ]; then
cmd="$cmd --service-account=$SERVICE_ACCOUNT"
fi
# Add project ID
if [ -n "$PROJECT_ID" ]; then
cmd="$cmd --project=$PROJECT_ID"
fi
echo "$cmd"
}
# Deploy function
deploy_function() {
echo
echo "==========================================="
echo "βοΈ Deploying Cloud Function"
echo "Function: $FUNCTION_NAME"
echo "Source: $SOURCE_DIR"
echo "Runtime: $RUNTIME"
echo "Memory: $MEMORY"
echo "Timeout: $TIMEOUT"
echo "==========================================="
local deploy_cmd=$(build_deploy_command)
echo "Deployment command:"
echo "$deploy_cmd"
echo
# Execute deployment
if eval "$deploy_cmd"; then
echo "β
Function deployed successfully!"
# Get function URL for HTTP triggers
if [ "$TRIGGER_TYPE" = "http" ]; then
local function_url=$(gcloud functions describe "$FUNCTION_NAME" \
--region="${REGION:-us-central1}" \
--project="$PROJECT_ID" \
--format="value(httpsTrigger.url)" 2>/dev/null)
if [ -n "$function_url" ]; then
echo
echo "π Function URL: $function_url"
# Save URL to file
echo "$FUNCTION_NAME: $function_url" >> "${SCRIPT_DIR}/function-urls.txt"
echo "π URL saved to: ${SCRIPT_DIR}/function-urls.txt"
fi
fi
return 0
else
echo "β Function deployment failed!"
return 1
fi
}
# Test function after deployment
test_function() {
if [ "$TRIGGER_TYPE" = "http" ]; then
local function_url=$(gcloud functions describe "$FUNCTION_NAME" \
--region="${REGION:-us-central1}" \
--project="$PROJECT_ID" \
--format="value(httpsTrigger.url)" 2>/dev/null)
if [ -n "$function_url" ]; then
echo
echo "π§ͺ Testing function..."
# Basic health check
if curl -s -f "$function_url/v1/health" > /dev/null; then
echo "β
Function is responding"
else
echo "β οΈ Function may not be responding correctly"
echo " Test manually: $function_url"
fi
fi
fi
}
# Main execution
main() {
parse_arguments "$@"
load_config
validate_source
create_gcloudignore
deploy_function && test_function
echo
echo "β
Deployment complete!"
echo
echo "π Function Details:"
gcloud functions describe "$FUNCTION_NAME" \
--region="${REGION:-us-central1}" \
--format="table(name,status,trigger)" \
--project="$PROJECT_ID"
}
main "$@"
Step 2: Multi-Function Deployment
#!/bin/bash
# deploy-multiple-functions.sh - Deploy multiple functions with dependencies
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CONFIG_FILE="${SCRIPT_DIR}/deployment.env"
# Load configuration
source "$CONFIG_FILE"
# Function definitions
declare -A FUNCTIONS
# Define your functions here
FUNCTIONS[api-gateway]="source=./api-gateway,entry=gateway,memory=512MB,timeout=60s"
FUNCTIONS[user-service]="source=./services/users,entry=userHandler,memory=256MB,timeout=30s"
FUNCTIONS[data-processor]="source=./services/processor,entry=processData,memory=1GB,timeout=300s,trigger=pubsub,topic=data-events"
FUNCTIONS[webhook-handler]="source=./webhooks,entry=handleWebhook,memory=256MB,timeout=30s"
# Deploy functions in dependency order
deploy_functions() {
echo "==========================================="
echo "π¦ Multi-Function Deployment"
echo "Project: $PROJECT_ID"
echo "Region: $REGION"
echo "==========================================="
local deployment_order=(
"user-service" # Deploy backend services first
"data-processor" # Then data processing
"webhook-handler" # Then webhook handlers
"api-gateway" # Finally the gateway
)
for function_name in "${deployment_order[@]}"; do
if [ -n "${FUNCTIONS[$function_name]}" ]; then
echo
echo "Deploying: $function_name"
echo "Config: ${FUNCTIONS[$function_name]}"
# Parse function configuration
local config="${FUNCTIONS[$function_name]}"
local source_dir=$(echo "$config" | sed -n 's/.*source=\([^,]*\).*/\1/p')
local entry_point=$(echo "$config" | sed -n 's/.*entry=\([^,]*\).*/\1/p')
local memory=$(echo "$config" | sed -n 's/.*memory=\([^,]*\).*/\1/p')
local timeout=$(echo "$config" | sed -n 's/.*timeout=\([^,]*\).*/\1/p')
local trigger=$(echo "$config" | sed -n 's/.*trigger=\([^,]*\).*/\1/p')
local topic=$(echo "$config" | sed -n 's/.*topic=\([^,]*\).*/\1/p')
# Build deployment command
local deploy_args="-n $function_name -s $source_dir"
[ -n "$entry_point" ] && deploy_args="$deploy_args -e $entry_point"
[ -n "$memory" ] && deploy_args="$deploy_args -m $memory"
[ -n "$timeout" ] && deploy_args="$deploy_args -t $timeout"
[ -n "$trigger" ] && deploy_args="$deploy_args --trigger $trigger"
# Set environment variables for this function
local env_vars="NODE_ENV=production,FUNCTION_NAME=$function_name"
[ -n "$topic" ] && env_vars="$env_vars,PUBSUB_TOPIC=$topic"
deploy_args="$deploy_args --env-vars $env_vars"
# Deploy function
if ./deploy-function.sh $deploy_args; then
echo "β
$function_name deployed successfully"
else
echo "β Failed to deploy $function_name"
exit 1
fi
# Wait between deployments to avoid rate limits
sleep 5
fi
done
}
# Update function URLs and create service map
create_service_map() {
echo
echo "Creating service map..."
cat > "${SCRIPT_DIR}/service-map.json" << EOF
{
"deployment_timestamp": "$(date -u +"%Y-%m-%d %H:%M:%S UTC")",
"project_id": "$PROJECT_ID",
"region": "$REGION",
"functions": {
EOF
local first=true
for function_name in "${!FUNCTIONS[@]}"; do
local function_url=$(gcloud functions describe "$function_name" \
--region="$REGION" \
--project="$PROJECT_ID" \
--format="value(httpsTrigger.url)" 2>/dev/null || echo "")
if [ -n "$function_url" ]; then
[ "$first" = true ] && first=false || echo "," >> "${SCRIPT_DIR}/service-map.json"
cat >> "${SCRIPT_DIR}/service-map.json" << EOF
"$function_name": {
"url": "$function_url",
"config": "${FUNCTIONS[$function_name]}"
}
EOF
fi
done
cat >> "${SCRIPT_DIR}/service-map.json" << EOF
}
}
EOF
echo "β
Service map created: ${SCRIPT_DIR}/service-map.json"
}
# Main execution
main() {
deploy_functions
create_service_map
echo
echo "β
Multi-function deployment complete!"
echo
echo "π Deployed Functions:"
gcloud functions list --project="$PROJECT_ID" --format="table(name,status,trigger)"
}
main "$@"
Testing & Validation
Comprehensive Testing Framework
#!/bin/bash
# test-functions.sh - Comprehensive function testing
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CONFIG_FILE="${SCRIPT_DIR}/deployment.env"
# Load configuration
source "$CONFIG_FILE"
# Test configuration
TEST_API_KEY="${TEST_API_KEY:-}"
TEST_USER_TOKEN="${TEST_USER_TOKEN:-}"
PARALLEL_TESTS="${PARALLEL_TESTS:-false}"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Test results
declare -A TEST_RESULTS
TOTAL_TESTS=0
PASSED_TESTS=0
FAILED_TESTS=0
# Utility functions
print_test_header() {
echo -e "${BLUE}==========================================="
echo -e "π§ͺ $1"
echo -e "===========================================${NC}"
}
print_test_result() {
local test_name="$1"
local result="$2"
local message="$3"
TOTAL_TESTS=$((TOTAL_TESTS + 1))
TEST_RESULTS["$test_name"]="$result"
if [ "$result" = "PASS" ]; then
echo -e "${GREEN}β
$test_name: $message${NC}"
PASSED_TESTS=$((PASSED_TESTS + 1))
else
echo -e "${RED}β $test_name: $message${NC}"
FAILED_TESTS=$((FAILED_TESTS + 1))
fi
}
# Get function URL
get_function_url() {
local function_name="$1"
gcloud functions describe "$function_name" \
--region="$REGION" \
--project="$PROJECT_ID" \
--format="value(httpsTrigger.url)" 2>/dev/null || echo ""
}
# Basic connectivity tests
test_function_connectivity() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "connectivity-$function_name" "FAIL" "Function URL not found"
return 1
fi
# Test basic connectivity
if curl -s -f "$function_url" -m 10 > /dev/null 2>&1; then
print_test_result "connectivity-$function_name" "PASS" "Function is reachable"
return 0
else
print_test_result "connectivity-$function_name" "FAIL" "Function is not reachable"
return 1
fi
}
# Health check tests
test_health_endpoints() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "health-$function_name" "FAIL" "Function URL not found"
return 1
fi
# Test health endpoint
local health_response=$(curl -s "$function_url/v1/health" -m 10 2>/dev/null)
local health_status=$(echo "$health_response" | jq -r '.status' 2>/dev/null || echo "")
if [ "$health_status" = "healthy" ]; then
print_test_result "health-$function_name" "PASS" "Health check passed"
return 0
else
print_test_result "health-$function_name" "FAIL" "Health check failed: $health_response"
return 1
fi
}
# API documentation tests
test_api_documentation() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "docs-$function_name" "FAIL" "Function URL not found"
return 1
fi
# Test API documentation endpoint
local docs_response=$(curl -s "$function_url/v1" -m 10 2>/dev/null)
local service_name=$(echo "$docs_response" | jq -r '.name' 2>/dev/null || echo "")
if [ -n "$service_name" ]; then
print_test_result "docs-$function_name" "PASS" "API documentation available"
return 0
else
print_test_result "docs-$function_name" "FAIL" "API documentation not available"
return 1
fi
}
# Authentication tests
test_authentication() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "auth-$function_name" "FAIL" "Function URL not found"
return 1
fi
# Test unauthenticated request to protected endpoint
local unauth_response=$(curl -s "$function_url/v1/users" -w "%{http_code}" -o /dev/null -m 10 2>/dev/null)
if [ "$unauth_response" = "401" ]; then
print_test_result "auth-unauth-$function_name" "PASS" "Unauthenticated request properly rejected"
else
print_test_result "auth-unauth-$function_name" "FAIL" "Expected 401, got $unauth_response"
fi
# Test with API key (if available)
if [ -n "$TEST_API_KEY" ]; then
local auth_response=$(curl -s "$function_url/v1/users" \
-H "X-API-Key: $TEST_API_KEY" \
-w "%{http_code}" -o /dev/null -m 10 2>/dev/null)
if [ "$auth_response" = "200" ] || [ "$auth_response" = "404" ]; then
print_test_result "auth-apikey-$function_name" "PASS" "API key authentication working"
else
print_test_result "auth-apikey-$function_name" "FAIL" "API key authentication failed: $auth_response"
fi
fi
}
# CORS tests
test_cors_configuration() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "cors-$function_name" "FAIL" "Function URL not found"
return 1
fi
# Test CORS preflight
local cors_response=$(curl -s "$function_url/v1/health" \
-H "Origin: https://example.com" \
-H "Access-Control-Request-Method: GET" \
-X OPTIONS \
-w "%{http_code}" -o /dev/null -m 10 2>/dev/null)
if [ "$cors_response" = "200" ] || [ "$cors_response" = "204" ]; then
print_test_result "cors-$function_name" "PASS" "CORS preflight working"
else
print_test_result "cors-$function_name" "FAIL" "CORS preflight failed: $cors_response"
fi
}
# Performance tests
test_function_performance() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "perf-$function_name" "FAIL" "Function URL not found"
return 1
fi
# Test response time
local start_time=$(date +%s%N)
curl -s "$function_url/v1/health" -m 10 > /dev/null 2>&1
local end_time=$(date +%s%N)
local response_time=$(((end_time - start_time) / 1000000)) # Convert to milliseconds
if [ "$response_time" -lt 5000 ]; then # Less than 5 seconds
print_test_result "perf-$function_name" "PASS" "Response time: ${response_time}ms"
else
print_test_result "perf-$function_name" "FAIL" "Slow response time: ${response_time}ms"
fi
}
# Load tests
test_concurrent_requests() {
local function_name="$1"
local function_url=$(get_function_url "$function_name")
if [ -z "$function_url" ]; then
print_test_result "load-$function_name" "FAIL" "Function URL not found"
return 1
fi
echo "Running concurrent request test..."
# Run 10 concurrent requests
local success_count=0
for i in {1..10}; do
curl -s "$function_url/v1/health" -m 10 > /dev/null 2>&1 &
done
# Wait for all requests to complete
wait
# Count successful responses (simplified)
print_test_result "load-$function_name" "PASS" "Concurrent requests handled"
}
# Test specific function
test_function() {
local function_name="$1"
print_test_header "Testing Function: $function_name"
test_function_connectivity "$function_name"
test_health_endpoints "$function_name"
test_api_documentation "$function_name"
test_authentication "$function_name"
test_cors_configuration "$function_name"
test_function_performance "$function_name"
if [ "$PARALLEL_TESTS" = "false" ]; then
test_concurrent_requests "$function_name"
fi
}
# Test all deployed functions
test_all_functions() {
print_test_header "Cloud Functions Testing Suite"
# Get list of deployed functions
local functions=$(gcloud functions list --project="$PROJECT_ID" \
--format="value(name)" --filter="region:$REGION")
if [ -z "$functions" ]; then
echo -e "${YELLOW}β οΈ No functions found in region $REGION${NC}"
return 1
fi
echo "Found functions: $functions"
echo
# Test each function
for function_name in $functions; do
test_function "$function_name"
echo
done
}
# Generate test report
generate_test_report() {
print_test_header "Test Results Summary"
echo "π Test Statistics:"
echo " Total Tests: $TOTAL_TESTS"
echo " Passed: $PASSED_TESTS"
echo " Failed: $FAILED_TESTS"
echo " Success Rate: $((PASSED_TESTS * 100 / TOTAL_TESTS))%"
echo
if [ $FAILED_TESTS -gt 0 ]; then
echo -e "${RED}β Failed Tests:${NC}"
for test_name in "${!TEST_RESULTS[@]}"; do
if [ "${TEST_RESULTS[$test_name]}" = "FAIL" ]; then
echo " β’ $test_name"
fi
done
echo
fi
# Save results to file
cat > "${SCRIPT_DIR}/test-results.json" << EOF
{
"timestamp": "$(date -u +"%Y-%m-%d %H:%M:%S UTC")",
"project_id": "$PROJECT_ID",
"region": "$REGION",
"summary": {
"total_tests": $TOTAL_TESTS,
"passed_tests": $PASSED_TESTS,
"failed_tests": $FAILED_TESTS,
"success_rate": $((PASSED_TESTS * 100 / TOTAL_TESTS))
},
"results": {
EOF
local first=true
for test_name in "${!TEST_RESULTS[@]}"; do
[ "$first" = true ] && first=false || echo "," >> "${SCRIPT_DIR}/test-results.json"
echo " \"$test_name\": \"${TEST_RESULTS[$test_name]}\"" >> "${SCRIPT_DIR}/test-results.json"
done
cat >> "${SCRIPT_DIR}/test-results.json" << EOF
}
}
EOF
echo "π Test results saved to: ${SCRIPT_DIR}/test-results.json"
}
# Main execution
main() {
if [ $# -gt 0 ]; then
# Test specific function
test_function "$1"
else
# Test all functions
test_all_functions
fi
generate_test_report
if [ $FAILED_TESTS -gt 0 ]; then
echo -e "${RED}β Some tests failed${NC}"
exit 1
else
echo -e "${GREEN}β
All tests passed${NC}"
fi
}
main "$@"
This comprehensive Cloud Functions deployment guide provides everything needed to deploy modern, production-ready serverless applications on Google Cloud Platform. The unified API pattern solves common architectural challenges while the deployment scripts ensure consistent, repeatable deployments with proper testing and validation.