|
| 1 | +#!/bin/bash |
| 2 | + |
| 3 | +# Cloud Function Diagnostic Script |
| 4 | +# Usage: ./diagnose_cloud_function.sh <project_id> |
| 5 | + |
| 6 | +set -e |
| 7 | + |
| 8 | +PROJECT_ID=$1 |
| 9 | + |
| 10 | +if [ -z "$PROJECT_ID" ]; then |
| 11 | + echo "❌ Error: Project ID is required as first argument" |
| 12 | + echo "Usage: $0 <project_id>" |
| 13 | + exit 1 |
| 14 | +fi |
| 15 | + |
| 16 | +echo "🔍 Cloud Function Diagnostic Report" |
| 17 | +echo "Project: $PROJECT_ID" |
| 18 | +echo "Date: $(date)" |
| 19 | +echo "=======================================" |
| 20 | +echo "" |
| 21 | + |
| 22 | +# Colors for output |
| 23 | +RED='\033[0;31m' |
| 24 | +GREEN='\033[0;32m' |
| 25 | +YELLOW='\033[1;33m' |
| 26 | +BLUE='\033[0;34m' |
| 27 | +CYAN='\033[0;36m' |
| 28 | +NC='\033[0m' # No Color |
| 29 | + |
| 30 | +# Helper functions |
| 31 | +print_success() { echo -e "${GREEN}✅ $1${NC}"; } |
| 32 | +print_error() { echo -e "${RED}❌ $1${NC}"; } |
| 33 | +print_warning() { echo -e "${YELLOW}⚠️ $1${NC}"; } |
| 34 | +print_info() { echo -e "${CYAN}ℹ️ $1${NC}"; } |
| 35 | +print_step() { echo -e "${BLUE}🔄 $1${NC}"; } |
| 36 | + |
| 37 | +# 1. Check if Cloud Function exists |
| 38 | +print_step "1. Checking Cloud Function deployment status..." |
| 39 | +if gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --quiet >/dev/null 2>&1; then |
| 40 | + print_success "Cloud Function 'titanic-data-loader' exists" |
| 41 | + |
| 42 | + # Get detailed function info |
| 43 | + echo "" |
| 44 | + print_info "Function Details:" |
| 45 | + gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="table(name,status,runtime,timeout,availableMemoryMb,serviceAccountEmail,eventTrigger.eventType,eventTrigger.resource)" |
| 46 | + |
| 47 | + # Check function status |
| 48 | + FUNCTION_STATUS=$(gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="value(status)") |
| 49 | + if [ "$FUNCTION_STATUS" = "ACTIVE" ]; then |
| 50 | + print_success "Function status: ACTIVE" |
| 51 | + else |
| 52 | + print_error "Function status: $FUNCTION_STATUS (Expected: ACTIVE)" |
| 53 | + fi |
| 54 | + |
| 55 | +else |
| 56 | + print_error "Cloud Function 'titanic-data-loader' NOT FOUND" |
| 57 | + echo "" |
| 58 | + print_info "Available functions in project:" |
| 59 | + gcloud functions list --project="$PROJECT_ID" --format="table(name,status,trigger.eventTrigger.eventType)" |
| 60 | + echo "" |
| 61 | + print_warning "This indicates the Terraform deployment may have failed or the function wasn't created" |
| 62 | + exit 1 |
| 63 | +fi |
| 64 | + |
| 65 | +echo "" |
| 66 | + |
| 67 | +# 2. Check service account |
| 68 | +print_step "2. Checking Cloud Function service account..." |
| 69 | +FUNCTION_SA=$(gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="value(serviceAccountEmail)") |
| 70 | +if [ -n "$FUNCTION_SA" ]; then |
| 71 | + print_success "Service Account: $FUNCTION_SA" |
| 72 | + |
| 73 | + # Check if service account exists |
| 74 | + if gcloud iam service-accounts describe "$FUNCTION_SA" --project="$PROJECT_ID" >/dev/null 2>&1; then |
| 75 | + print_success "Service account exists and is accessible" |
| 76 | + else |
| 77 | + print_error "Service account $FUNCTION_SA does not exist or is not accessible" |
| 78 | + fi |
| 79 | +else |
| 80 | + print_warning "No service account configured (using default)" |
| 81 | +fi |
| 82 | + |
| 83 | +echo "" |
| 84 | + |
| 85 | +# 3. Check trigger configuration |
| 86 | +print_step "3. Checking trigger configuration..." |
| 87 | +TRIGGER_BUCKET=$(gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="value(eventTrigger.resource)") |
| 88 | +TRIGGER_EVENT=$(gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="value(eventTrigger.eventType)") |
| 89 | + |
| 90 | +if [ -n "$TRIGGER_BUCKET" ]; then |
| 91 | + print_success "Trigger configured: $TRIGGER_EVENT on $TRIGGER_BUCKET" |
| 92 | + |
| 93 | + # Check if trigger bucket exists |
| 94 | + if gsutil ls "$TRIGGER_BUCKET" >/dev/null 2>&1; then |
| 95 | + print_success "Trigger bucket exists and is accessible" |
| 96 | + |
| 97 | + # Check bucket contents |
| 98 | + BUCKET_CONTENTS=$(gsutil ls "$TRIGGER_BUCKET/**" 2>/dev/null || echo "") |
| 99 | + if [ -n "$BUCKET_CONTENTS" ]; then |
| 100 | + print_info "Bucket contents:" |
| 101 | + echo "$BUCKET_CONTENTS" |
| 102 | + else |
| 103 | + print_info "Bucket is empty" |
| 104 | + fi |
| 105 | + else |
| 106 | + print_error "Trigger bucket $TRIGGER_BUCKET does not exist or is not accessible" |
| 107 | + fi |
| 108 | +else |
| 109 | + print_error "No trigger configuration found" |
| 110 | +fi |
| 111 | + |
| 112 | +echo "" |
| 113 | + |
| 114 | +# 4. Check function source code |
| 115 | +print_step "4. Checking function source code..." |
| 116 | +SOURCE_BUCKET=$(gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="value(sourceArchiveUrl)" | cut -d'/' -f3) |
| 117 | +SOURCE_OBJECT=$(gcloud functions describe titanic-data-loader --region=us-central1 --project="$PROJECT_ID" --format="value(sourceArchiveUrl)" | cut -d'/' -f4-) |
| 118 | + |
| 119 | +if [ -n "$SOURCE_BUCKET" ] && [ -n "$SOURCE_OBJECT" ]; then |
| 120 | + print_success "Source code: gs://$SOURCE_BUCKET/$SOURCE_OBJECT" |
| 121 | + |
| 122 | + if gsutil ls "gs://$SOURCE_BUCKET/$SOURCE_OBJECT" >/dev/null 2>&1; then |
| 123 | + print_success "Source code file exists" |
| 124 | + |
| 125 | + # Get source file info |
| 126 | + SOURCE_SIZE=$(gsutil ls -l "gs://$SOURCE_BUCKET/$SOURCE_OBJECT" | awk '{print $1}' | tail -n 1) |
| 127 | + print_info "Source code size: $SOURCE_SIZE bytes" |
| 128 | + else |
| 129 | + print_error "Source code file does not exist" |
| 130 | + fi |
| 131 | +else |
| 132 | + print_warning "Could not determine source code location" |
| 133 | +fi |
| 134 | + |
| 135 | +echo "" |
| 136 | + |
| 137 | +# 5. Check required APIs |
| 138 | +print_step "5. Checking required APIs..." |
| 139 | +REQUIRED_APIS=( |
| 140 | + "cloudfunctions.googleapis.com" |
| 141 | + "cloudbuild.googleapis.com" |
| 142 | + "eventarc.googleapis.com" |
| 143 | + "bigquery.googleapis.com" |
| 144 | + "storage.googleapis.com" |
| 145 | +) |
| 146 | + |
| 147 | +for api in "${REQUIRED_APIS[@]}"; do |
| 148 | + if gcloud services list --enabled --filter="name:$api" --format="value(name)" --project="$PROJECT_ID" >/dev/null 2>&1; then |
| 149 | + print_success "API enabled: $api" |
| 150 | + else |
| 151 | + print_error "API NOT enabled: $api" |
| 152 | + fi |
| 153 | +done |
| 154 | + |
| 155 | +echo "" |
| 156 | + |
| 157 | +# 6. Check function logs |
| 158 | +print_step "6. Checking function execution logs..." |
| 159 | +print_info "Searching for logs from the last 24 hours..." |
| 160 | + |
| 161 | +LOGS=$(gcloud logging read "resource.type=cloud_function AND resource.labels.function_name=titanic-data-loader" --project="$PROJECT_ID" --limit=10 --format="table(timestamp,severity,textPayload)" 2>/dev/null || echo "") |
| 162 | + |
| 163 | +if [ -n "$LOGS" ]; then |
| 164 | + print_success "Found recent logs:" |
| 165 | + echo "$LOGS" |
| 166 | +else |
| 167 | + print_warning "No logs found in the last 24 hours" |
| 168 | + print_info "This could mean:" |
| 169 | + echo " - Function has never been triggered" |
| 170 | + echo " - Function deployment failed" |
| 171 | + echo " - Logs are not being generated properly" |
| 172 | +fi |
| 173 | + |
| 174 | +echo "" |
| 175 | + |
| 176 | +# 7. Check IAM permissions |
| 177 | +print_step "7. Checking IAM permissions..." |
| 178 | +if [ -n "$FUNCTION_SA" ]; then |
| 179 | + print_info "Checking permissions for service account: $FUNCTION_SA" |
| 180 | + |
| 181 | + # Check BigQuery permissions |
| 182 | + if gcloud projects get-iam-policy "$PROJECT_ID" --flatten="bindings[].members" --filter="bindings.members:$FUNCTION_SA AND bindings.role:roles/bigquery.dataEditor" --format="value(bindings.role)" | grep -q "bigquery.dataEditor"; then |
| 183 | + print_success "Has BigQuery dataEditor permission" |
| 184 | + else |
| 185 | + print_error "Missing BigQuery dataEditor permission" |
| 186 | + fi |
| 187 | + |
| 188 | + # Check Storage permissions |
| 189 | + if gcloud projects get-iam-policy "$PROJECT_ID" --flatten="bindings[].members" --filter="bindings.members:$FUNCTION_SA AND bindings.role:roles/storage.objectViewer" --format="value(bindings.role)" | grep -q "storage.objectViewer"; then |
| 190 | + print_success "Has Storage objectViewer permission" |
| 191 | + else |
| 192 | + print_error "Missing Storage objectViewer permission" |
| 193 | + fi |
| 194 | +else |
| 195 | + print_info "Using default service account - checking default permissions..." |
| 196 | +fi |
| 197 | + |
| 198 | +echo "" |
| 199 | + |
| 200 | +# 8. Test trigger manually |
| 201 | +print_step "8. Testing function trigger manually..." |
| 202 | +TEMP_BUCKET="$PROJECT_ID-temp-bucket" |
| 203 | + |
| 204 | +print_info "Creating test file..." |
| 205 | +cat > test_trigger.csv << EOF |
| 206 | +PassengerId,Survived,Pclass,Name,Sex,Age |
| 207 | +1,0,3,"Test Passenger",male,22 |
| 208 | +EOF |
| 209 | + |
| 210 | +print_info "Uploading test file to trigger bucket..." |
| 211 | +if gsutil cp test_trigger.csv "gs://$TEMP_BUCKET/titanic.csv"; then |
| 212 | + print_success "Test file uploaded successfully" |
| 213 | + |
| 214 | + print_info "Waiting 15 seconds for function execution..." |
| 215 | + sleep 15 |
| 216 | + |
| 217 | + # Check for new logs |
| 218 | + NEW_LOGS=$(gcloud logging read "resource.type=cloud_function AND resource.labels.function_name=titanic-data-loader AND timestamp>=\"$(date -u -d '1 minute ago' +%Y-%m-%dT%H:%M:%SZ)\"" --project="$PROJECT_ID" --limit=5 --format="table(timestamp,severity,textPayload)" 2>/dev/null || echo "") |
| 219 | + |
| 220 | + if [ -n "$NEW_LOGS" ]; then |
| 221 | + print_success "Function execution detected after test upload!" |
| 222 | + echo "$NEW_LOGS" |
| 223 | + else |
| 224 | + print_error "No function execution detected after test upload" |
| 225 | + print_warning "This indicates the trigger is not working properly" |
| 226 | + fi |
| 227 | + |
| 228 | + # Clean up test file |
| 229 | + rm -f test_trigger.csv |
| 230 | + print_info "Test file cleaned up" |
| 231 | +else |
| 232 | + print_error "Failed to upload test file" |
| 233 | +fi |
| 234 | + |
| 235 | +echo "" |
| 236 | + |
| 237 | +# 9. Summary and recommendations |
| 238 | +print_step "9. Diagnostic Summary" |
| 239 | +echo "" |
| 240 | +print_info "DIAGNOSIS COMPLETE" |
| 241 | +echo "" |
| 242 | + |
| 243 | +# Provide recommendations based on findings |
| 244 | +if [ "$FUNCTION_STATUS" != "ACTIVE" ]; then |
| 245 | + print_error "CRITICAL: Function is not in ACTIVE state" |
| 246 | + echo "Recommendation: Redeploy the function using Terraform" |
| 247 | +fi |
| 248 | + |
| 249 | +if [ -z "$TRIGGER_BUCKET" ]; then |
| 250 | + print_error "CRITICAL: No trigger configured" |
| 251 | + echo "Recommendation: Check Terraform configuration for event trigger" |
| 252 | +fi |
| 253 | + |
| 254 | +if [ -z "$LOGS" ]; then |
| 255 | + print_warning "WARNING: No execution logs found" |
| 256 | + echo "Recommendation: Function may never have been triggered or has permission issues" |
| 257 | +fi |
| 258 | + |
| 259 | +echo "" |
| 260 | +print_info "💡 Next steps to fix the Cloud Function:" |
| 261 | +echo "1. Check Terraform apply logs for any deployment errors" |
| 262 | +echo "2. Verify all required APIs are enabled" |
| 263 | +echo "3. Check IAM permissions for the function service account" |
| 264 | +echo "4. Try redeploying the function with: terraform apply -target=google_cloudfunctions_function.titanic_data_loader" |
| 265 | +echo "5. Monitor logs in real-time: gcloud logging tail 'resource.type=cloud_function AND resource.labels.function_name=titanic-data-loader' --project=$PROJECT_ID" |
| 266 | + |
| 267 | +echo "" |
| 268 | +print_info "🔧 Useful debugging commands:" |
| 269 | +echo "# Check function deployment" |
| 270 | +echo "gcloud functions describe titanic-data-loader --region=us-central1 --project=$PROJECT_ID" |
| 271 | +echo "" |
| 272 | +echo "# Monitor real-time logs" |
| 273 | +echo "gcloud logging tail 'resource.type=cloud_function' --project=$PROJECT_ID" |
| 274 | +echo "" |
| 275 | +echo "# Test manual trigger" |
| 276 | +echo "gsutil cp your_file.csv gs://$PROJECT_ID-temp-bucket/titanic.csv" |
0 commit comments