diff --git a/Makefile b/Makefile index 041b03d..be2aeec 100644 --- a/Makefile +++ b/Makefile @@ -6,10 +6,10 @@ init: format: terraform fmt --recursive ./terraform -dryrun: +plan: terraform -chdir=./terraform plan -deploy: +apply: terraform -chdir=./terraform apply destroy: diff --git a/ansible/ansible.cfg b/ansible/ansible.cfg new file mode 100644 index 0000000..5d77b4c --- /dev/null +++ b/ansible/ansible.cfg @@ -0,0 +1,12 @@ +[defaults] +ask_vault_pass = true +forks = 20 +internal_poll_interval = 0.001 +interpreter_python = auto +inventory = inventory +private_key_file = ~/.ssh/id_ed25519_infra +remote_user = root + +[ssh_connection] +pipelining = true +ssh_args = -o ControlMaster=auto -o ControlPersist=60s diff --git a/ansible/host_vars/offworld.ppanda.org.yml b/ansible/host_vars/offworld.ppanda.org.yml new file mode 100644 index 0000000..860778f --- /dev/null +++ b/ansible/host_vars/offworld.ppanda.org.yml @@ -0,0 +1,21 @@ +$ANSIBLE_VAULT;1.1;AES256 +33656639666361623365616662353265656561663339633666656561646661623365393734363431 +6265633666643033393236353330333235663066333233320a343764393562623637643866623865 +34363239343835353835656363626439326231663565616166363739623536653436613435313561 +6338326363366264640a623737383434323438336164363064663038386634626139393166383430 +37353738643834653034303465653739643265363430313962343736363838323963376539353366 +64333135363461353234313837656134376235306135383936323362643330343733323836663135 +34396134376532306134306236646632386235623333346337623237316134653437303532666262 +30663637366532623438363937373962336163386336373162643330643866353931636631376631 +31336164663234333962653166613731373431633132313537363332393061323439313734376535 +30336166323661313730393963626230663765633434633132323839383763326665353163383064 +33323936373565376136656565623039323336376134303030626530343736383634393434393464 +64326465616337326264663137626235643038626663303439336336363662646537623064656161 +37626332313737616264386435373238613234323864346431343931323031313033623463656131 +39356663626537343562633234393631363837383139633638323937313366653132336336613162 +61626362316664633034323437656564373561343862616432316562616638303761366139336465 +63663634646262353035323362663062303466376439363863326539623166373461646235653036 +65626365396365643264333335623762393935633363303133323936366432376633653236366236 +64373632656136356333643863636165363162656535353032363462626637396261653530363034 +34613066613766376265383331323931653737663363336335653666333934653236643562656330 +61353662343665333161 diff --git a/ansible/inventory b/ansible/inventory new file mode 100644 index 0000000..d26b50f --- /dev/null +++ b/ansible/inventory @@ -0,0 +1 @@ +offworld.ppanda.org diff --git a/ansible/main.yml b/ansible/main.yml new file mode 100644 index 0000000..d2f72bb --- /dev/null +++ b/ansible/main.yml @@ -0,0 +1,9 @@ +--- +- name: Set up offworld vm + hosts: offworld.ppanda.org + become: true + gather_facts: no + roles: + - docker + - tailscale + - containers diff --git a/docker/caddy/Caddyfile b/ansible/roles/containers/files/caddy/Caddyfile similarity index 73% rename from docker/caddy/Caddyfile rename to ansible/roles/containers/files/caddy/Caddyfile index 810b143..6984472 100644 --- a/docker/caddy/Caddyfile +++ b/ansible/roles/containers/files/caddy/Caddyfile @@ -9,6 +9,12 @@ nas.ppanda.org:443 { } } +sync.ppanda.org:5984 { + reverse_proxy http://nexus-boron.scorpion-galaxy.ts.net:5984 { + header_up Host {upstream_hostport} + } +} + status.ppanda.org:443 { reverse_proxy uptime-kuma:3001 } diff --git a/docker/caddy/docker-compose.yml b/ansible/roles/containers/files/caddy/docker-compose.yml similarity index 100% rename from docker/caddy/docker-compose.yml rename to ansible/roles/containers/files/caddy/docker-compose.yml diff --git a/docker/uptime-kuma/docker-compose.yml b/ansible/roles/containers/files/uptime-kuma/docker-compose.yml similarity index 100% rename from docker/uptime-kuma/docker-compose.yml rename to ansible/roles/containers/files/uptime-kuma/docker-compose.yml diff --git a/ansible/roles/containers/tasks/main.yml b/ansible/roles/containers/tasks/main.yml new file mode 100644 index 0000000..66dfb05 --- /dev/null +++ b/ansible/roles/containers/tasks/main.yml @@ -0,0 +1,26 @@ +--- +- name: Copy caddy to the server + ansible.builtin.copy: + src: "./files/caddy" + dest: "/root/app" + mode: "0644" + +- name: Copy uptime-kuma the server + ansible.builtin.copy: + src: "./files/uptime-kuma" + dest: "/root/app" + mode: "0644" + +- name: Create docker network + ansible.builtin.command: > + docker network create proxy-network + failed_when: false + become: true + +- name: Run docker-compose up for Caddy and Uptime Kuma + ansible.builtin.command: > + docker compose + -f "/root/app/caddy/docker-compose.yml" + -f "/root/app/uptime-kuma/docker-compose.yml" + up -d + become: true diff --git a/ansible/roles/docker/defaults/main.yml b/ansible/roles/docker/defaults/main.yml new file mode 100644 index 0000000..0e7aaec --- /dev/null +++ b/ansible/roles/docker/defaults/main.yml @@ -0,0 +1,2 @@ +--- +gvisor_src_base_url: "https://storage.googleapis.com/gvisor/releases/release/latest/x86_64" diff --git a/ansible/roles/docker/files/docker-daemon.json b/ansible/roles/docker/files/docker-daemon.json new file mode 100644 index 0000000..e679def --- /dev/null +++ b/ansible/roles/docker/files/docker-daemon.json @@ -0,0 +1,7 @@ +{ + "runtimes": { + "runsc": { + "path": "/usr/local/bin/runsc" + } + } +} diff --git a/ansible/roles/docker/handlers/main.yml b/ansible/roles/docker/handlers/main.yml new file mode 100644 index 0000000..0b7071a --- /dev/null +++ b/ansible/roles/docker/handlers/main.yml @@ -0,0 +1,5 @@ +--- +- name: Restart Docker service + ansible.builtin.service: + name: docker + state: restarted diff --git a/ansible/roles/docker/tasks/gvisor.yml b/ansible/roles/docker/tasks/gvisor.yml new file mode 100644 index 0000000..5475abf --- /dev/null +++ b/ansible/roles/docker/tasks/gvisor.yml @@ -0,0 +1,31 @@ +--- +- name: Ensure gvisor is downloaded + ansible.builtin.get_url: + url: "{{ gvisor_src_base_url }}/runsc" + dest: /usr/local/bin/runsc + checksum: "sha512:{{ gvisor_src_base_url }}/runsc.sha512" + mode: "0744" + +- name: Ensure containerd-shim is downloaded + ansible.builtin.get_url: + url: "{{ gvisor_src_base_url }}/containerd-shim-runsc-v1" + dest: /usr/local/bin/containerd-shim-runsc-v1 + checksum: "sha512:{{ gvisor_src_base_url }}/containerd-shim-runsc-v1.sha512" + mode: "0744" + +- name: Ensure /etc/docker exists + ansible.builtin.file: + dest: /etc/docker + state: directory + owner: root + group: root + mode: "0755" + +- name: Ensure gvisor is installed + ansible.builtin.copy: + src: docker-daemon.json + dest: /etc/docker/daemon.json + owner: root + group: root + mode: "0644" + notify: "Restart Docker service" diff --git a/ansible/roles/docker/tasks/install.yml b/ansible/roles/docker/tasks/install.yml new file mode 100644 index 0000000..eddad43 --- /dev/null +++ b/ansible/roles/docker/tasks/install.yml @@ -0,0 +1,10 @@ +--- +- name: Ensure Docker and related packages are installed + ansible.builtin.package: + name: + - docker + - docker-cli-compose + - linux-virt + - linux-virt-dev + state: latest + update_cache: true diff --git a/ansible/roles/docker/tasks/main.yml b/ansible/roles/docker/tasks/main.yml new file mode 100644 index 0000000..ffd55ca --- /dev/null +++ b/ansible/roles/docker/tasks/main.yml @@ -0,0 +1,4 @@ +--- +- include_tasks: install.yml +- include_tasks: service.yml +- include_tasks: gvisor.yml diff --git a/ansible/roles/docker/tasks/service.yml b/ansible/roles/docker/tasks/service.yml new file mode 100644 index 0000000..e3a112f --- /dev/null +++ b/ansible/roles/docker/tasks/service.yml @@ -0,0 +1,6 @@ +--- +- name: Ensure Docker service is started + ansible.builtin.service: + name: docker + enabled: true + state: started diff --git a/ansible/roles/tailscale/defaults/main.yml b/ansible/roles/tailscale/defaults/main.yml new file mode 100644 index 0000000..24bee35 --- /dev/null +++ b/ansible/roles/tailscale/defaults/main.yml @@ -0,0 +1,17 @@ +--- +# Installation options +tailscale_uninstall: false +tailscale_authkey: "" +tailscale_tags: [] + +# Certificate options +tailscale_cert_enabled: false +tailscale_cert_caddy_user_permission: false +tailscale_cert_domain: "" +tailscale_cert_dir: "/usr/local/etc/ssl/certs" +tailscale_cert_filename: "{{ tailscale_cert_domain }}.crt" +tailscale_cert_private_key_dir: "/usr/local/etc/ssl/private" +tailscale_cert_private_key_filename: "{{ tailscale_cert_domain }}.key" + +# Debug +insecurely_log_authkey: false diff --git a/ansible/roles/tailscale/handlers/main.yml b/ansible/roles/tailscale/handlers/main.yml new file mode 100644 index 0000000..72a8e73 --- /dev/null +++ b/ansible/roles/tailscale/handlers/main.yml @@ -0,0 +1,5 @@ +--- +- name: Restart tailscale + ansible.builtin.service: + name: tailscale + state: restarted diff --git a/ansible/roles/tailscale/tasks/certificates.yml b/ansible/roles/tailscale/tasks/certificates.yml new file mode 100644 index 0000000..2bec898 --- /dev/null +++ b/ansible/roles/tailscale/tasks/certificates.yml @@ -0,0 +1,68 @@ +--- +- name: Assert that tailscale_cert_domain is not empty + ansible.builtin.assert: + that: + - tailscale_cert_domain | length > 0 + quiet: true + +- name: Assert that tailscale_cert_dir is not empty + ansible.builtin.assert: + that: + - tailscale_cert_dir | length > 0 + quiet: true + +- name: Assert that tailscale_cert_filename is not empty + ansible.builtin.assert: + that: + - tailscale_cert_filename | length > 0 + quiet: true + +- name: Assert that tailscale_cert_private_key_dir is not empty + ansible.builtin.assert: + that: + - tailscale_cert_private_key_dir | length > 0 + quiet: true + +- name: Assert that tailscale_cert_private_key_filename is not empty + ansible.builtin.assert: + that: + - tailscale_cert_private_key_filename | length > 0 + quiet: true + +- name: Ensure {{ tailscale_cert_dir }} exists + ansible.builtin.file: + path: "{{ tailscale_cert_dir }}" + state: directory + +- name: Ensure {{ tailscale_cert_private_key_dir }} exists + ansible.builtin.file: + path: "{{ tailscale_cert_private_key_dir }}" + state: directory + +- name: Check that certificates exist + stat: + path: "{{ item }}" + loop: + - "{{ tailscale_cert_dir }}/{{ tailscale_cert_filename }}" + - "{{ tailscale_cert_private_key_dir }}/{{ tailscale_cert_private_key_filename }}" + register: stat_certs + +- name: Run tailscale cert and generate cert + ansible.builtin.command: > + tailscale cert + --cert-file="{{ tailscale_cert_dir }}/{{ tailscale_cert_filename }}" + --key-file="{{ tailscale_cert_private_key_dir }}/{{ tailscale_cert_private_key_filename }}" + "{{ tailscale_cert_domain }}" + when: not (stat_certs.results | map(attribute='stat.exists')) is all + +- name: Allow caddy uid in /etc/default/tailscaled + ansible.builtin.lineinfile: + create: true + dest: /etc/default/tailscaled + regexp: "^#?TS_PERMIT_CERT_UID" + line: "TS_PERMIT_CERT_UID=\"caddy\"" + insertafter: EOF + state: present + notify: + - Restart tailscale + when: tailscale_cert_caddy_user_permission | bool diff --git a/ansible/roles/tailscale/tasks/install.yml b/ansible/roles/tailscale/tasks/install.yml new file mode 100644 index 0000000..63f7876 --- /dev/null +++ b/ansible/roles/tailscale/tasks/install.yml @@ -0,0 +1,15 @@ +--- +- name: Add community tailscale source for alpine + ansible.builtin.lineinfile: + dest: /etc/apk/repositories + regexp: "^http://dl-2\\.alpinelinux\\.org/alpine/edge/community.*" + line: "http://dl-2.alpinelinux.org/alpine/edge/community" + state: present + +- name: Install dhclient and tailscale + ansible.builtin.package: + name: + - dhclient # https://tailscale.com/kb/1188/linux-dns#dhcp-dhclient-overwriting-etcresolvconf + - tailscale + state: latest + update_cache: yes diff --git a/ansible/roles/tailscale/tasks/main.yml b/ansible/roles/tailscale/tasks/main.yml new file mode 100644 index 0000000..158f040 --- /dev/null +++ b/ansible/roles/tailscale/tasks/main.yml @@ -0,0 +1,21 @@ +--- +- name: Include uninstall.yml + include_tasks: uninstall.yml + when: tailscale_uninstall | bool + +- name: Include install.yml + include_tasks: install.yml + when: not tailscale_uninstall | bool + +- name: Include service.yml + include_tasks: service.yml + when: not tailscale_uninstall | bool + +# if /etc/default/tailscaled was modified, run 'restart tailscaled' handler +# before running `tailscale cert` command +- name: Flush handlers + meta: flush_handlers + +- name: Include certificates.yml + include_tasks: certificates.yml + when: not tailscale_uninstall | bool and tailscale_cert_enabled | bool diff --git a/ansible/roles/tailscale/tasks/service.yml b/ansible/roles/tailscale/tasks/service.yml new file mode 100644 index 0000000..e12cb3f --- /dev/null +++ b/ansible/roles/tailscale/tasks/service.yml @@ -0,0 +1,31 @@ +--- +- name: Enable tailscale daemon + become: true + ansible.builtin.service: + name: tailscale + state: started + enabled: true + +- name: Assert that tailscale_authkey is not empty + ansible.builtin.assert: + that: + - tailscale_authkey | length > 0 + quiet: true + +- name: Prepend 'tag:' to each item in the list + ansible.builtin.set_fact: + tailscale_prepared_tags: "{{ tailscale_tags | map('regex_replace', '^', 'tag:') | list }}" + +- name: Build tailscale tag args + ansible.builtin.set_fact: + tailscale_tags_string: >- + {% if tailscale_tags | length > 0 %} + --advertise-tags={{ tailscale_prepared_tags | join(',') }} + {% endif %} + +- name: Authenticate tailscale + ansible.builtin.command: > + tailscale up + {{ tailscale_tags_string | trim if tailscale_tags_string is not none else '' }} + --authkey="{{ tailscale_authkey }}" + no_log: "{{ not (insecurely_log_authkey | bool) }}" diff --git a/ansible/roles/tailscale/tasks/uninstall.yml b/ansible/roles/tailscale/tasks/uninstall.yml new file mode 100644 index 0000000..5322062 --- /dev/null +++ b/ansible/roles/tailscale/tasks/uninstall.yml @@ -0,0 +1,36 @@ +--- +- name: Check if tailscale is connected + ansible.builtin.command: tailscale status + changed_when: false + failed_when: false + register: tailscale_status + +- name: De-register tailscale node + become: true + # Hack to get correct changed/ok status + ansible.builtin.shell: tailscale status; tailscale logout + register: tailscale_logout + changed_when: "'Logged out.' not in tailscale_status.stdout and 'not logged in' not in tailscale_status.stdout" + when: + # [Errno 2] No such file or directory: 'tailscale' + - tailscale_status.rc != 2 + # "bash: tailscale: command not found" + - tailscale_status.rc != 127 + +- name: Disable tailscale service + become: true + ansible.builtin.service: + name: tailscale + state: stopped + enabled: false + +- name: Remove tailscale state and logs + become: true + ansible.builtin.file: + path: "/var/lib/tailscale" + state: absent + +- name: Uninstall tailscale package + ansible.builtin.package: + name: tailscale + state: absent diff --git a/docker/setup.sh b/docker/setup.sh deleted file mode 100644 index 776a24d..0000000 --- a/docker/setup.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh -# Author: Yuzhou "Joe" Mo (@yuzhoumo) -# License: MIT - -# upgrade packages and install docker -apk update && apk upgrade -apk add docker docker-compose linux-virt linux-virt-dev - -# create docker service -rc-update add docker boot -service docker start - -# install tailscale and authenticate -curl -fsSL https://tailscale.com/install.sh | sh -tailscale up --accept-routes --auth-key "${TS_AUTHKEY}" - -# bring up docker containers -docker pull caddy -docker compose \ - -f caddy/docker-compose.yml \ - -f uptime-kuma/docker-compose.yml \ - up -d diff --git a/terraform/dns.tf b/terraform/dns.tf new file mode 100644 index 0000000..c88117e --- /dev/null +++ b/terraform/dns.tf @@ -0,0 +1,181 @@ +##################### +# DNS - Root Domain # +##################### + +resource "vultr_dns_domain" "root" { + domain = local.root_domain +} + +################################## +# DNS - Offworld (Reverse Proxy) # +################################## + +resource "vultr_dns_record" "nas" { + domain = vultr_dns_domain.root.id + name = "nas" + type = "A" + data = module.host_offworld.main_ip + ttl = 300 +} + +resource "vultr_dns_record" "status" { + domain = vultr_dns_domain.root.id + name = "status" + type = "A" + data = module.host_offworld.main_ip + ttl = 300 +} + +resource "vultr_dns_record" "sync" { + domain = vultr_dns_domain.root.id + name = "sync" + type = "A" + data = module.host_offworld.main_ip + ttl = 300 +} + +###################### +# DNS - GitHub Pages # +###################### + +resource "vultr_dns_record" "gh_ipv4_a" { + domain = vultr_dns_domain.root.id + name = "@" + type = "A" + data = "185.199.108.153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv4_b" { + domain = vultr_dns_domain.root.id + name = "@" + type = "A" + data = "185.199.109.153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv4_c" { + domain = vultr_dns_domain.root.id + name = "@" + type = "A" + data = "185.199.110.153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv4_d" { + domain = vultr_dns_domain.root.id + name = "@" + type = "A" + data = "185.199.111.153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv6_a" { + domain = vultr_dns_domain.root.id + name = "@" + type = "AAAA" + data = "2606:50c0:8000::153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv6_b" { + domain = vultr_dns_domain.root.id + name = "@" + type = "AAAA" + data = "2606:50c0:8001::153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv6_c" { + domain = vultr_dns_domain.root.id + name = "@" + type = "AAAA" + data = "2606:50c0:8002::153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_ipv6_d" { + domain = vultr_dns_domain.root.id + name = "@" + type = "AAAA" + data = "2606:50c0:8003::153" + ttl = 300 +} + +resource "vultr_dns_record" "gh_pages_challenge" { + domain = vultr_dns_domain.root.id + name = "_github-pages-challenge-perpetualpanda" + type = "TXT" + data = "000dcdf2686daed53910f406c6a20f" + ttl = 300 +} + +#################### +# DNS - ProtonMail # +#################### + +resource "vultr_dns_record" "proton_domainkey1" { + domain = vultr_dns_domain.root.id + name = "protonmail._domainkey" + type = "CNAME" + data = "protonmail.domainkey.d3wxy5ytci2hzufheo7wcxxinbvkv4cso2g5ddycqj7gqiuta7n2q.domains.proton.ch" + ttl = 300 +} + +resource "vultr_dns_record" "proton_domainkey2" { + domain = vultr_dns_domain.root.id + name = "protonmail2._domainkey" + type = "CNAME" + data = "protonmail2.domainkey.d3wxy5ytci2hzufheo7wcxxinbvkv4cso2g5ddycqj7gqiuta7n2q.domains.proton.ch" + ttl = 300 +} + +resource "vultr_dns_record" "proton_domainkey3" { + domain = vultr_dns_domain.root.id + name = "protonmail3._domainkey" + type = "CNAME" + data = "protonmail3.domainkey.d3wxy5ytci2hzufheo7wcxxinbvkv4cso2g5ddycqj7gqiuta7n2q.domains.proton.ch" + ttl = 300 +} + +resource "vultr_dns_record" "proton_mx1" { + domain = vultr_dns_domain.root.id + name = "@" + type = "MX" + data = "mail.protonmail.ch" + priority = 10 + ttl = 300 +} + +resource "vultr_dns_record" "proton_mx2" { + domain = vultr_dns_domain.root.id + name = "@" + type = "MX" + data = "mailsec.protonmail.ch" + priority = 20 + ttl = 300 +} + +resource "vultr_dns_record" "proton_dmarc" { + domain = vultr_dns_domain.root.id + name = "_dmarc" + type = "TXT" + data = "v=DMARC1; p=quarantine" + ttl = 300 +} + +resource "vultr_dns_record" "proton_spf" { + domain = vultr_dns_domain.root.id + name = "@" + type = "TXT" + data = "v=spf1 include:_spf.protonmail.ch mx ~all" + ttl = 300 +} + +resource "vultr_dns_record" "proton_verification" { + domain = vultr_dns_domain.root.id + name = "@" + type = "TXT" + data = "protonmail-verification=b93e1e9a06d1776b86ad3a484de0c2968f25c1ab" + ttl = 300 +} diff --git a/terraform/main.tf b/terraform/main.tf index 6f161a2..8a854bc 100644 --- a/terraform/main.tf +++ b/terraform/main.tf @@ -3,210 +3,24 @@ ################### locals { - root_domain = "ppanda.org" - offworld_hostname = "offworld.${local.root_domain}" + root_domain = "ppanda.org" } -#################### -# Virtual Machines # -#################### +######### +# Hosts # +######### -module "vultr_instance_offworld" { - source = "github.com/perpetualpanda/tf-modules/vultr-vm" +module "host_offworld" { + source = "./modules/vultr-vm" - vm_hostname = local.offworld_hostname + vm_hostname = "offworld.${local.root_domain}" vm_os_id = "2076" # alpine linux vm_plan = "vc2-1c-0.5gb" # low spec plan vm_region = "ewr" # plan available in new jersey region - block_storage_label = "offworld-block-storage" - block_storage_gb = 1 - ssh_admin_key_label = "offworld-admin-ssh-key" ssh_admin_key = var.admin_ssh_key vultr_dns_domain_id = vultr_dns_domain.root.id vultr_api_key = var.vultr_api_key } - -##################### -# DNS - Root Domain # -##################### - -resource "vultr_dns_domain" "root" { - domain = local.root_domain -} - -################################## -# DNS - Offworld (Reverse Proxy) # -################################## - -resource "vultr_dns_record" "nas" { - domain = vultr_dns_domain.root.id - name = "nas" - type = "CNAME" - data = local.offworld_hostname - ttl = 300 -} - -resource "vultr_dns_record" "status" { - domain = vultr_dns_domain.root.id - name = "status" - type = "CNAME" - data = local.offworld_hostname - ttl = 300 -} - -resource "vultr_dns_record" "sync" { - domain = vultr_dns_domain.root.id - name = "sync" - type = "CNAME" - data = local.offworld_hostname - ttl = 300 -} - -###################### -# DNS - GitHub Pages # -###################### - -resource "vultr_dns_record" "gh_ipv4_a" { - domain = vultr_dns_domain.root.id - name = "@" - type = "A" - data = "185.199.108.153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv4_b" { - domain = vultr_dns_domain.root.id - name = "@" - type = "A" - data = "185.199.109.153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv4_c" { - domain = vultr_dns_domain.root.id - name = "@" - type = "A" - data = "185.199.110.153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv4_d" { - domain = vultr_dns_domain.root.id - name = "@" - type = "A" - data = "185.199.111.153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv6_a" { - domain = vultr_dns_domain.root.id - name = "@" - type = "AAAA" - data = "2606:50c0:8000::153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv6_b" { - domain = vultr_dns_domain.root.id - name = "@" - type = "AAAA" - data = "2606:50c0:8001::153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv6_c" { - domain = vultr_dns_domain.root.id - name = "@" - type = "AAAA" - data = "2606:50c0:8002::153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_ipv6_d" { - domain = vultr_dns_domain.root.id - name = "@" - type = "AAAA" - data = "2606:50c0:8003::153" - ttl = 300 -} - -resource "vultr_dns_record" "gh_pages_challenge" { - domain = vultr_dns_domain.root.id - name = "_github-pages-challenge-perpetualpanda" - type = "TXT" - data = "000dcdf2686daed53910f406c6a20f" - ttl = 300 -} - -#################### -# DNS - ProtonMail # -#################### - -resource "vultr_dns_record" "proton_domainkey1" { - domain = vultr_dns_domain.root.id - name = "protonmail._domainkey" - type = "CNAME" - data = "protonmail.domainkey.d3wxy5ytci2hzufheo7wcxxinbvkv4cso2g5ddycqj7gqiuta7n2q.domains.proton.ch" - ttl = 300 -} - -resource "vultr_dns_record" "proton_domainkey2" { - domain = vultr_dns_domain.root.id - name = "protonmail2._domainkey" - type = "CNAME" - data = "protonmail2.domainkey.d3wxy5ytci2hzufheo7wcxxinbvkv4cso2g5ddycqj7gqiuta7n2q.domains.proton.ch" - ttl = 300 -} - -resource "vultr_dns_record" "proton_domainkey3" { - domain = vultr_dns_domain.root.id - name = "protonmail3._domainkey" - type = "CNAME" - data = "protonmail3.domainkey.d3wxy5ytci2hzufheo7wcxxinbvkv4cso2g5ddycqj7gqiuta7n2q.domains.proton.ch" - ttl = 300 -} - -resource "vultr_dns_record" "proton_mx1" { - domain = vultr_dns_domain.root.id - name = "@" - type = "MX" - data = "mail.protonmail.ch" - priority = 10 - ttl = 300 -} - -resource "vultr_dns_record" "proton_mx2" { - domain = vultr_dns_domain.root.id - name = "@" - type = "MX" - data = "mailsec.protonmail.ch" - priority = 20 - ttl = 300 -} - -resource "vultr_dns_record" "proton_dmarc" { - domain = vultr_dns_domain.root.id - name = "_dmarc" - type = "TXT" - data = "v=DMARC1; p=quarantine" - ttl = 300 -} - -resource "vultr_dns_record" "proton_spf" { - domain = vultr_dns_domain.root.id - name = "@" - type = "TXT" - data = "v=spf1 include:_spf.protonmail.ch mx ~all" - ttl = 300 -} - -resource "vultr_dns_record" "proton_verification" { - domain = vultr_dns_domain.root.id - name = "@" - type = "TXT" - data = "protonmail-verification=b93e1e9a06d1776b86ad3a484de0c2968f25c1ab" - ttl = 300 -} diff --git a/terraform/modules/vultr-vm/main.tf b/terraform/modules/vultr-vm/main.tf new file mode 100644 index 0000000..5ce23e5 --- /dev/null +++ b/terraform/modules/vultr-vm/main.tf @@ -0,0 +1,31 @@ +terraform { + required_version = ">= 1.10" +} + +locals { + subdomain = regex("(.*)\\.[a-z]*\\.[a-z]*", var.vm_hostname)[0] +} + +resource "vultr_ssh_key" "admin_key" { + name = var.ssh_admin_key_label + ssh_key = var.ssh_admin_key +} + +resource "vultr_instance" "service_vm" { + backups = "disabled" + enable_ipv6 = false + hostname = var.vm_hostname + label = var.vm_hostname + os_id = var.vm_os_id + plan = var.vm_plan + region = var.vm_region + ssh_key_ids = [vultr_ssh_key.admin_key.id] +} + +resource "vultr_dns_record" "vm_hostname_record" { + domain = var.vultr_dns_domain_id + name = local.subdomain + data = vultr_instance.service_vm.main_ip + ttl = 300 + type = "A" +} diff --git a/terraform/modules/vultr-vm/outputs.tf b/terraform/modules/vultr-vm/outputs.tf new file mode 100644 index 0000000..f32ed55 --- /dev/null +++ b/terraform/modules/vultr-vm/outputs.tf @@ -0,0 +1,3 @@ +output "main_ip" { + value = vultr_instance.service_vm.main_ip +} diff --git a/terraform/modules/vultr-vm/providers.tf b/terraform/modules/vultr-vm/providers.tf new file mode 100644 index 0000000..9147502 --- /dev/null +++ b/terraform/modules/vultr-vm/providers.tf @@ -0,0 +1,3 @@ +provider "vultr" { + api_key = var.vultr_api_key +} diff --git a/terraform/modules/vultr-vm/variables.tf b/terraform/modules/vultr-vm/variables.tf new file mode 100644 index 0000000..105176b --- /dev/null +++ b/terraform/modules/vultr-vm/variables.tf @@ -0,0 +1,39 @@ +variable "ssh_admin_key" { + type = string + description = "public ssh key to add to the instance" +} + +variable "ssh_admin_key_label" { + type = string + description = "vultr label for the added ssh key" +} + +variable "vultr_api_key" { + type = string + description = "vultr api authentication key" +} + +variable "vultr_dns_domain_id" { + type = string + description = "vultr id of the dns domain" +} + +variable "vm_hostname" { + type = string + description = "hostname for the vultr instance" +} + +variable "vm_os_id" { + type = string + description = "vultr id for the os to install on the instance" +} + +variable "vm_plan" { + type = string + description = "vultr vm plan id for the instance" +} + +variable "vm_region" { + type = string + description = "vultr vm region id for the instance" +} diff --git a/terraform/modules/vultr-vm/versions.tf b/terraform/modules/vultr-vm/versions.tf new file mode 100644 index 0000000..f128e49 --- /dev/null +++ b/terraform/modules/vultr-vm/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + vultr = { + source = "vultr/vultr" + version = "~> 2.23.1" + } + } +} diff --git a/terraform/versions.tf b/terraform/versions.tf index 7bc27f9..e4596d7 100644 --- a/terraform/versions.tf +++ b/terraform/versions.tf @@ -3,16 +3,15 @@ terraform { required_providers { ansible = { - version = "1.3.0" + version = "~> 1.3.0" source = "ansible/ansible" } vultr = { - version = "2.23.1" + version = "~> 2.23.1" source = "vultr/vultr" } } - backend "remote" { organization = "perpetualpanda"