Skip to content

Commit b923773

Browse files
committed
.
1 parent 38d51d1 commit b923773

File tree

6 files changed

+512
-11
lines changed

6 files changed

+512
-11
lines changed

dockerize/Caddyfile

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Caddyfile for MONAI Breast Density Classification
2+
# Simple reverse proxy to auth-gateway service
3+
4+
# Main site configuration
5+
:23434 {
6+
# TLS configuration using provided certificates
7+
tls /etc/ssl/certs/fullchain.pem /etc/ssl/private/privkey.pem
8+
9+
# Reverse proxy all requests to auth-gateway
10+
reverse_proxy auth-gateway:8090 {
11+
# Health check for upstream
12+
health_uri /health
13+
health_interval 30s
14+
health_timeout 10s
15+
16+
# Forward original client information
17+
header_up Host {host}
18+
header_up X-Real-IP {remote_host}
19+
header_up X-Forwarded-For {remote_host}
20+
header_up X-Forwarded-Proto {scheme}
21+
header_up X-Forwarded-Port {server_port}
22+
}
23+
24+
# Request/response logging for debugging (optional)
25+
log {
26+
output stdout
27+
format console
28+
level INFO
29+
}
30+
31+
# Security headers
32+
header {
33+
# Remove server identification
34+
-Server
35+
# Basic security headers
36+
X-Content-Type-Options nosniff
37+
X-Frame-Options DENY
38+
X-XSS-Protection "1; mode=block"
39+
Referrer-Policy strict-origin-when-cross-origin
40+
}
41+
42+
# Handle specific endpoints with better error pages
43+
handle_errors {
44+
@502 expression {http.error.status_code} == 502
45+
@503 expression {http.error.status_code} == 503
46+
@504 expression {http.error.status_code} == 504
47+
48+
respond @502 "Service temporarily unavailable - auth-gateway not ready" 502
49+
respond @503 "Service temporarily unavailable - please try again" 503
50+
respond @504 "Service timeout - request took too long" 504
51+
}
52+
}
53+
54+
# Optional: Redirect HTTP to HTTPS if needed
55+
# :80 {
56+
# redir https://{host}:23434{uri} permanent
57+
# }

dockerize/docker-compose copy.yaml

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
services:
2+
torchserve:
3+
# build:
4+
# context: .
5+
# dockerfile: Dockerfile
6+
# target: production
7+
image: monai-breast-density:latest
8+
container_name: torchserve
9+
privileged: true
10+
11+
# GPU support
12+
deploy:
13+
resources:
14+
reservations:
15+
devices:
16+
- driver: nvidia
17+
count: all
18+
capabilities: [gpu]
19+
limits:
20+
memory: 32G
21+
22+
runtime: nvidia
23+
environment:
24+
- NVIDIA_VISIBLE_DEVICES=all
25+
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
26+
- LOCAL_EXECUTION=true
27+
- PYTHONUNBUFFERED=1
28+
29+
# Port mappings (bind to localhost for security)
30+
ports:
31+
- "127.0.0.1:8085:8085" # Inference API
32+
- "127.0.0.1:8086:8086" # Management API
33+
- "127.0.0.1:8082:8082" # Metrics API
34+
- "127.0.0.1:7070:7070" # gRPC Inference
35+
- "127.0.0.1:7071:7071" # gRPC Management
36+
37+
# Volume mounts
38+
volumes:
39+
# Mount the specific MAR file directly into the model store
40+
# This assumes the MAR file is available on the host at the specified location
41+
- /mnt/secure/.fetch/model-core/monai-breast-density-classification_model-store_monai-breast-density-classification.mar:/home/model-server/model-store/monai-breast-density-classification_model-store_monai-breast-density-classification.mar:ro
42+
# Logs
43+
- ./logs:/home/model-server/logs
44+
# Temporary files
45+
- ./temp:/home/model-server/temp
46+
# Config overrides (optional)
47+
- ./config/config-docker.properties:/home/model-server/config/config-docker.properties:ro
48+
# Share point for keys
49+
- torchserve-tokens:/home/model-server
50+
# Input/Output exchange
51+
- shared-workspace:/home/model-server/workspace:rw
52+
53+
# Memory and shared memory settings for deep learning
54+
shm_size: 2gb
55+
ulimits:
56+
memlock: -1
57+
stack: 67108864
58+
59+
# Token-based health check - checks for key_file.json presence
60+
healthcheck:
61+
test: ["CMD", "test", "-f", "/home/model-server/key_file.json"]
62+
interval: 30s
63+
timeout: 10s
64+
start_period: 120s
65+
retries: 3
66+
67+
# Restart policy
68+
restart: unless-stopped
69+
70+
# Logging configuration
71+
logging:
72+
driver: "json-file"
73+
options:
74+
max-size: "100m"
75+
max-file: "5"
76+
77+
networks:
78+
- monai-network
79+
80+
# FastAPI Authentication & Gateway Service
81+
auth-gateway:
82+
image: monai-secretai:dev
83+
# build:
84+
# context: ./secretai
85+
# dockerfile: Dockerfile
86+
container_name: auth-gateway
87+
88+
environment:
89+
- TORCHSERVE_URL=http://torchserve:8085
90+
- PYTHONUNBUFFERED=1
91+
92+
# External ports (exposed to host)
93+
ports:
94+
- "127.0.0.1:8090:8090" # Auth/Gateway API
95+
96+
# Wait for TorchServe to be healthy before starting
97+
depends_on:
98+
torchserve:
99+
condition: service_healthy
100+
101+
# Health check
102+
healthcheck:
103+
test: ["CMD", "curl", "-f", "http://localhost:8090/health"]
104+
interval: 30s
105+
timeout: 10s
106+
start_period: 30s
107+
retries: 3
108+
109+
# Restart policy
110+
restart: unless-stopped
111+
112+
# Logging configuration
113+
logging:
114+
driver: "json-file"
115+
options:
116+
max-size: "50m"
117+
max-file: "3"
118+
119+
networks:
120+
- monai-network
121+
122+
volumes:
123+
- torchserve-tokens:/app/tokens:ro
124+
- shared-workspace:/app/workspace:rw
125+
126+
# Networks
127+
networks:
128+
monai-network:
129+
driver: bridge
130+
131+
# Volumes for persistent data
132+
volumes:
133+
temp:
134+
driver: local
135+
torchserve-tokens:
136+
driver: local
137+
shared-workspace:
138+
driver: local

dockerize/docker-compose.yaml

Lines changed: 57 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,51 @@
11
services:
2+
# Caddy Reverse Proxy - Single Entry Point
3+
caddy:
4+
image: caddy:2.7-alpine
5+
container_name: caddy-proxy
6+
7+
# Only port exposed to host
8+
ports:
9+
- "23434:23434" # HTTPS entry point
10+
11+
# Volume mounts for certificates and configuration
12+
volumes:
13+
# SSL certificates from host
14+
- /mnt/secure/cert:/etc/ssl/certs:ro
15+
- /mnt/secure/cert:/etc/ssl/private:ro
16+
# Caddyfile configuration
17+
- ./Caddyfile:/etc/caddy/Caddyfile:ro
18+
# Caddy data directory for automatic HTTPS (if needed)
19+
- caddy-data:/data
20+
- caddy-config:/config
21+
22+
# Wait for auth-gateway to be healthy before starting
23+
depends_on:
24+
auth-gateway:
25+
condition: service_healthy
26+
27+
# Health check
28+
healthcheck:
29+
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:23434/health"]
30+
interval: 30s
31+
timeout: 10s
32+
start_period: 15s
33+
retries: 3
34+
35+
# Restart policy
36+
restart: unless-stopped
37+
38+
# Logging configuration
39+
logging:
40+
driver: "json-file"
41+
options:
42+
max-size: "50m"
43+
max-file: "3"
44+
45+
# Network configuration - Caddy on internal network only
46+
networks:
47+
- monai-network
48+
249
torchserve:
350
# build:
451
# context: .
@@ -26,13 +73,8 @@ services:
2673
- LOCAL_EXECUTION=true
2774
- PYTHONUNBUFFERED=1
2875

29-
# Port mappings (bind to localhost for security)
30-
ports:
31-
- "127.0.0.1:8085:8085" # Inference API
32-
- "127.0.0.1:8086:8086" # Management API
33-
- "127.0.0.1:8082:8082" # Metrics API
34-
- "127.0.0.1:7070:7070" # gRPC Inference
35-
- "127.0.0.1:7071:7071" # gRPC Management
76+
# NO EXTERNAL PORTS - Internal network only
77+
# ports: removed - all communication goes through Caddy
3678

3779
# Volume mounts
3880
volumes:
@@ -77,7 +119,7 @@ services:
77119
networks:
78120
- monai-network
79121

80-
# FastAPI Authentication & Gateway Service
122+
# FastAPI Gateway Service (No Authentication)
81123
auth-gateway:
82124
image: monai-secretai:dev
83125
# build:
@@ -89,9 +131,8 @@ services:
89131
- TORCHSERVE_URL=http://torchserve:8085
90132
- PYTHONUNBUFFERED=1
91133

92-
# External ports (exposed to host)
93-
ports:
94-
- "127.0.0.1:8090:8090" # Auth/Gateway API
134+
# NO EXTERNAL PORTS - Internal network only
135+
# ports: removed - all communication goes through Caddy
95136

96137
# Wait for TorchServe to be healthy before starting
97138
depends_on:
@@ -135,4 +176,9 @@ volumes:
135176
torchserve-tokens:
136177
driver: local
137178
shared-workspace:
179+
driver: local
180+
# Caddy volumes for automatic HTTPS and configuration
181+
caddy-data:
182+
driver: local
183+
caddy-config:
138184
driver: local

dockerize/howto.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
2+
3+
# Run an example
4+
5+
```bash
6+
curl -k -X POST -F "file=@sample_A1.jpg" https://fetch.scrtlabs.com:23434/predict/breast-density --output report.pdf
7+
```

dockerize/report.pdf

4.05 KB
Binary file not shown.

0 commit comments

Comments
 (0)