Closed
Description
I am having a problem where google is indexing my code server. This is not ideal, as while it is password protected, I still don't want people finding it and brute forcing it. I was hoping to add some sort of method to add a robots.txt file to the default server configuration to deny search bots indexing the code server.
My docker-compose.yml
:
version: "2"
services:
code-server:
image: linuxserver/code-server
container_name: code-server
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/London
- PASSWORD=__
- SUDO_PASSWORD=__
volumes:
- ./config:/config
ports:
- 8443:8443
restart: unless-stopped
I know I am using the linuxserver/code-server image, but I think this is irrelavant to the issue.
My nginx vhost file:
server {
server_name code.domain.com;
location / {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Host $host:$server_port;
proxy_set_header X-Forwarded-Server $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection upgrade;
proxy_set_header Accept-Encoding gzip;
proxy_pass http://localhost:8443/;
}
listen [::]:443 ssl; # managed by Certbot
listen 443 ssl; # managed by Certbot
ssl_certificate /etc/letsencrypt/live/domain.com/fullchain.pem; # managed by Certbot
ssl_certificate_key /etc/letsencrypt/live/domain.com/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}
server {
if ($host = code.domain.com) {
return 301 https://$host$request_uri;
} # managed by Certbot
server_name code.domain.com;
listen 80;
listen [::]:80;
return 404; # managed by Certbot
}