From c4ea88000b6022a2e4b5391486b51414653cb7d7 Mon Sep 17 00:00:00 2001 From: Jack Ivanov <17044561+jackivanov@users.noreply.github.com> Date: Mon, 8 Apr 2019 23:20:34 +0300 Subject: [PATCH] Refactoring to support roles inclusion (#1365) --- ansible.cfg | 3 +- cloud.yml | 40 +--- config.cfg | 22 +++ input.yml | 195 +++++++++---------- playbooks/rescue.yml | 5 + roles/cloud-azure/tasks/main.yml | 78 ++++---- roles/cloud-digitalocean/tasks/main.yml | 183 +++++++++--------- roles/cloud-ec2/tasks/main.yml | 77 ++++---- roles/cloud-gce/tasks/main.yml | 107 +++++------ roles/cloud-lightsail/tasks/main.yml | 88 ++++----- roles/cloud-openstack/tasks/main.yml | 141 +++++++------- roles/cloud-scaleway/tasks/main.yml | 243 ++++++++++++------------ roles/cloud-vultr/tasks/main.yml | 6 +- roles/common/tasks/main.yml | 48 ++--- roles/common/tasks/ubuntu.yml | 4 +- roles/dns_adblocking/handlers/main.yml | 4 + roles/dns_adblocking/tasks/main.yml | 78 ++++---- roles/local/tasks/main.yml | 11 +- roles/ssh_tunneling/tasks/main.yml | 210 ++++++++++---------- roles/strongswan/defaults/main.yml | 2 - roles/strongswan/tasks/main.yml | 56 +++--- roles/wireguard/defaults/main.yml | 14 -- roles/wireguard/tasks/main.yml | 1 - server.yml | 152 ++++++++------- users.yml | 54 +++--- 25 files changed, 866 insertions(+), 956 deletions(-) create mode 100644 playbooks/rescue.yml diff --git a/ansible.cfg b/ansible.cfg index aef4084..f6b2978 100644 --- a/ansible.cfg +++ b/ansible.cfg @@ -4,7 +4,8 @@ pipelining = True retry_files_enabled = False host_key_checking = False timeout = 60 -stdout_callback = full_skip +stdout_callback = default +display_skipped_hosts = no [paramiko_connection] record_host_keys = False diff --git a/cloud.yml b/cloud.yml index 671c776..310bf23 100644 --- a/cloud.yml +++ b/cloud.yml @@ -2,48 +2,20 @@ - name: Provision the server hosts: localhost tags: always + become: false vars_files: - config.cfg - pre_tasks: + tasks: - block: - name: Local pre-tasks import_tasks: playbooks/cloud-pre.yml - tags: always - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always - roles: - - role: cloud-digitalocean - when: algo_provider == "digitalocean" - - role: cloud-ec2 - when: algo_provider == "ec2" - - role: cloud-vultr - when: algo_provider == "vultr" - - role: cloud-gce - when: algo_provider == "gce" - - role: cloud-azure - when: algo_provider == "azure" - - role: cloud-lightsail - when: algo_provider == "lightsail" - - role: cloud-scaleway - when: algo_provider == "scaleway" - - role: cloud-openstack - when: algo_provider == "openstack" - - role: local - when: algo_provider == "local" + - name: Include a provisioning role + include_role: + name: "{{ 'local' if algo_provider == 'local' else 'cloud-' + algo_provider }}" - post_tasks: - - block: - name: Local post-tasks import_tasks: playbooks/cloud-post.yml - become: false - tags: cloud rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - include_tasks: playbooks/rescue.yml diff --git a/config.cfg b/config.cfg index bfcaac6..16411cf 100644 --- a/config.cfg +++ b/config.cfg @@ -25,6 +25,12 @@ ipsec_enabled: true # https://wiki.strongswan.org/projects/strongswan/wiki/LoggerConfiguration strongswan_log_level: 2 +# rightsourceip for ipsec +# ipv4 +strongswan_network: 10.19.48.0/24 +# ipv6 +strongswan_network_ipv6: 'fd9d:bc11:4020::/48' + # Deploy WireGuard wireguard_enabled: true wireguard_port: 51820 @@ -33,6 +39,22 @@ wireguard_port: 51820 # See: https://www.wireguard.com/quickstart/#nat-and-firewall-traversal-persistence wireguard_PersistentKeepalive: 0 +# WireGuard network configuration +_wireguard_network_ipv4: + subnet: 10.19.49.0 + prefix: 24 + gateway: 10.19.49.1 + clients_range: 10.19.49 + clients_start: 2 +_wireguard_network_ipv6: + subnet: 'fd9d:bc11:4021::' + prefix: 48 + gateway: 'fd9d:bc11:4021::1' + clients_range: 'fd9d:bc11:4021::' + clients_start: 2 +wireguard_network_ipv4: "{{ _wireguard_network_ipv4['subnet'] }}/{{ _wireguard_network_ipv4['prefix'] }}" +wireguard_network_ipv6: "{{ _wireguard_network_ipv6['subnet'] }}/{{ _wireguard_network_ipv6['prefix'] }}" + # Reduce the MTU of the VPN tunnel # Some cloud and internet providers use a smaller MTU (Maximum Transmission # Unit) than the normal value of 1500 and if you don't reduce the MTU of your diff --git a/input.yml b/input.yml index 5cc6017..f4b155b 100644 --- a/input.yml +++ b/input.yml @@ -25,115 +25,118 @@ - config.cfg tasks: - - pause: - prompt: | - What provider would you like to use? - {% for p in providers_map %} - {{ loop.index }}. {{ p['name']}} - {% endfor %} - - Enter the number of your desired provider - register: _algo_provider - when: provider is undefined - - - name: Set facts based on the input - set_fact: - algo_provider: "{{ provider | default(providers_map[_algo_provider.user_input|default(omit)|int - 1]['alias']) }}" - - - pause: - prompt: | - Name the vpn server - [algo] - register: _algo_server_name - when: - - server_name is undefined - - algo_provider != "local" - block: - pause: prompt: | - Do you want macOS/iOS IPsec clients to enable "Connect On Demand" when connected to cellular networks? - [y/N] - register: _ondemand_cellular - when: ondemand_cellular is undefined + What provider would you like to use? + {% for p in providers_map %} + {{ loop.index }}. {{ p['name']}} + {% endfor %} + + Enter the number of your desired provider + register: _algo_provider + when: provider is undefined + + - name: Set facts based on the input + set_fact: + algo_provider: "{{ provider | default(providers_map[_algo_provider.user_input|default(omit)|int - 1]['alias']) }}" - pause: prompt: | - Do you want macOS/iOS IPsec clients to enable "Connect On Demand" when connected to Wi-Fi? - [y/N] - register: _ondemand_wifi - when: ondemand_wifi is undefined - - - pause: - prompt: | - List the names of any trusted Wi-Fi networks where macOS/iOS IPsec clients should not use "Connect On Demand" - (e.g., your home network. Comma-separated value, e.g., HomeNet,OfficeWifi,AlgoWiFi) - register: _ondemand_wifi_exclude + Name the vpn server + [algo] + register: _algo_server_name when: - - ondemand_wifi_exclude is undefined - - (ondemand_wifi|default(false)|bool) or - (booleans_map[_ondemand_wifi.user_input|default(omit)]|default(false)) + - server_name is undefined + - algo_provider != "local" + - block: + - pause: + prompt: | + Do you want macOS/iOS IPsec clients to enable "Connect On Demand" when connected to cellular networks? + [y/N] + register: _ondemand_cellular + when: ondemand_cellular is undefined + + - pause: + prompt: | + Do you want macOS/iOS IPsec clients to enable "Connect On Demand" when connected to Wi-Fi? + [y/N] + register: _ondemand_wifi + when: ondemand_wifi is undefined + + - pause: + prompt: | + List the names of any trusted Wi-Fi networks where macOS/iOS IPsec clients should not use "Connect On Demand" + (e.g., your home network. Comma-separated value, e.g., HomeNet,OfficeWifi,AlgoWiFi) + register: _ondemand_wifi_exclude + when: + - ondemand_wifi_exclude is undefined + - (ondemand_wifi|default(false)|bool) or + (booleans_map[_ondemand_wifi.user_input|default(omit)]|default(false)) + + - pause: + prompt: | + Do you want the VPN to support Windows 10 or Linux Desktop clients? (enables compatible ciphers and key exchange, less secure) + [y/N] + register: _windows + when: windows is undefined + + - pause: + prompt: | + Do you want to retain the CA key? (required to add users in the future, but less secure) + [y/N] + register: _store_cakey + when: store_cakey is undefined + when: ipsec_enabled - pause: prompt: | - Do you want the VPN to support Windows 10 or Linux Desktop clients? (enables compatible ciphers and key exchange, less secure) + Do you want to install an ad blocking DNS resolver on this VPN server? [y/N] - register: _windows - when: windows is undefined + register: _local_dns + when: local_dns is undefined - pause: prompt: | - Do you want to retain the CA key? (required to add users in the future, but less secure) + Do you want each user to have their own account for SSH tunneling? [y/N] - register: _store_cakey - when: store_cakey is undefined - when: ipsec_enabled + register: _ssh_tunneling + when: ssh_tunneling is undefined - - pause: - prompt: | - Do you want to install an ad blocking DNS resolver on this VPN server? - [y/N] - register: _local_dns - when: local_dns is undefined - - - pause: - prompt: | - Do you want each user to have their own account for SSH tunneling? - [y/N] - register: _ssh_tunneling - when: ssh_tunneling is undefined - - - name: Set facts based on the input - set_fact: - algo_server_name: >- - {% if server_name is defined %}{% set _server = server_name %} - {%- elif _algo_server_name.user_input is defined and _algo_server_name.user_input != "" %}{% set _server = _algo_server_name.user_input %} - {%- else %}{% set _server = defaults['server_name'] %}{% endif -%} - {{ _server | regex_replace('(?!\.)(\W|_)', '-') }} - algo_ondemand_cellular: >- - {% if ondemand_cellular is defined %}{{ ondemand_cellular | bool }} - {%- elif _ondemand_cellular.user_input is defined and _ondemand_cellular.user_input != "" %}{{ booleans_map[_ondemand_cellular.user_input] | default(defaults['ondemand_cellular']) }} - {%- else %}false{% endif %} - algo_ondemand_wifi: >- - {% if ondemand_wifi is defined %}{{ ondemand_wifi | bool }} - {%- elif _ondemand_wifi.user_input is defined and _ondemand_wifi.user_input != "" %}{{ booleans_map[_ondemand_wifi.user_input] | default(defaults['ondemand_wifi']) }} - {%- else %}false{% endif %} - algo_ondemand_wifi_exclude: >- - {% if ondemand_wifi_exclude is defined %}{{ ondemand_wifi_exclude | b64encode }} - {%- elif _ondemand_wifi_exclude.user_input is defined and _ondemand_wifi_exclude.user_input != "" %}{{ _ondemand_wifi_exclude.user_input | b64encode }} - {%- else %}{{ '_null' | b64encode }}{% endif %} - algo_local_dns: >- - {% if local_dns is defined %}{{ local_dns | bool }} - {%- elif _local_dns.user_input is defined and _local_dns.user_input != "" %}{{ booleans_map[_local_dns.user_input] | default(defaults['local_dns']) }} - {%- else %}false{% endif %} - algo_ssh_tunneling: >- - {% if ssh_tunneling is defined %}{{ ssh_tunneling | bool }} - {%- elif _ssh_tunneling.user_input is defined and _ssh_tunneling.user_input != "" %}{{ booleans_map[_ssh_tunneling.user_input] | default(defaults['ssh_tunneling']) }} - {%- else %}false{% endif %} - algo_windows: >- - {% if windows is defined %}{{ windows | bool }} - {%- elif _windows.user_input is defined and _windows.user_input != "" %}{{ booleans_map[_windows.user_input] | default(defaults['windows']) }} - {%- else %}false{% endif %} - algo_store_cakey: >- - {% if store_cakey is defined %}{{ store_cakey | bool }} - {%- elif _store_cakey.user_input is defined and _store_cakey.user_input != "" %}{{ booleans_map[_store_cakey.user_input] | default(defaults['store_cakey']) }} - {%- else %}false{% endif %} + - name: Set facts based on the input + set_fact: + algo_server_name: >- + {% if server_name is defined %}{% set _server = server_name %} + {%- elif _algo_server_name.user_input is defined and _algo_server_name.user_input != "" %}{% set _server = _algo_server_name.user_input %} + {%- else %}{% set _server = defaults['server_name'] %}{% endif -%} + {{ _server | regex_replace('(?!\.)(\W|_)', '-') }} + algo_ondemand_cellular: >- + {% if ondemand_cellular is defined %}{{ ondemand_cellular | bool }} + {%- elif _ondemand_cellular.user_input is defined and _ondemand_cellular.user_input != "" %}{{ booleans_map[_ondemand_cellular.user_input] | default(defaults['ondemand_cellular']) }} + {%- else %}false{% endif %} + algo_ondemand_wifi: >- + {% if ondemand_wifi is defined %}{{ ondemand_wifi | bool }} + {%- elif _ondemand_wifi.user_input is defined and _ondemand_wifi.user_input != "" %}{{ booleans_map[_ondemand_wifi.user_input] | default(defaults['ondemand_wifi']) }} + {%- else %}false{% endif %} + algo_ondemand_wifi_exclude: >- + {% if ondemand_wifi_exclude is defined %}{{ ondemand_wifi_exclude | b64encode }} + {%- elif _ondemand_wifi_exclude.user_input is defined and _ondemand_wifi_exclude.user_input != "" %}{{ _ondemand_wifi_exclude.user_input | b64encode }} + {%- else %}{{ '_null' | b64encode }}{% endif %} + algo_local_dns: >- + {% if local_dns is defined %}{{ local_dns | bool }} + {%- elif _local_dns.user_input is defined and _local_dns.user_input != "" %}{{ booleans_map[_local_dns.user_input] | default(defaults['local_dns']) }} + {%- else %}false{% endif %} + algo_ssh_tunneling: >- + {% if ssh_tunneling is defined %}{{ ssh_tunneling | bool }} + {%- elif _ssh_tunneling.user_input is defined and _ssh_tunneling.user_input != "" %}{{ booleans_map[_ssh_tunneling.user_input] | default(defaults['ssh_tunneling']) }} + {%- else %}false{% endif %} + algo_windows: >- + {% if windows is defined %}{{ windows | bool }} + {%- elif _windows.user_input is defined and _windows.user_input != "" %}{{ booleans_map[_windows.user_input] | default(defaults['windows']) }} + {%- else %}false{% endif %} + algo_store_cakey: >- + {% if store_cakey is defined %}{{ store_cakey | bool }} + {%- elif _store_cakey.user_input is defined and _store_cakey.user_input != "" %}{{ booleans_map[_store_cakey.user_input] | default(defaults['store_cakey']) }} + {%- else %}false{% endif %} + rescue: + - include_tasks: playbooks/rescue.yml diff --git a/playbooks/rescue.yml b/playbooks/rescue.yml new file mode 100644 index 0000000..4c090ce --- /dev/null +++ b/playbooks/rescue.yml @@ -0,0 +1,5 @@ +--- +- debug: + var: fail_hint + +- fail: diff --git a/roles/cloud-azure/tasks/main.yml b/roles/cloud-azure/tasks/main.yml index 113352c..2c44809 100644 --- a/roles/cloud-azure/tasks/main.yml +++ b/roles/cloud-azure/tasks/main.yml @@ -1,47 +1,41 @@ --- +- name: Build python virtual environment + import_tasks: venv.yml + - block: - - name: Build python virtual environment - import_tasks: venv.yml + - name: Include prompts + import_tasks: prompts.yml - - block: - - name: Include prompts - import_tasks: prompts.yml + - set_fact: + algo_region: >- + {% if region is defined %}{{ region }} + {%- elif _algo_region.user_input is defined and _algo_region.user_input != "" %}{{ azure_regions[_algo_region.user_input | int -1 ]['name'] }} + {%- else %}{{ azure_regions[default_region | int - 1]['name'] }}{% endif %} - - set_fact: - algo_region: >- - {% if region is defined %}{{ region }} - {%- elif _algo_region.user_input is defined and _algo_region.user_input != "" %}{{ azure_regions[_algo_region.user_input | int -1 ]['name'] }} - {%- else %}{{ azure_regions[default_region | int - 1]['name'] }}{% endif %} + - name: Create AlgoVPN Server + azure_rm_deployment: + state: present + deployment_name: "{{ algo_server_name }}" + template: "{{ lookup('file', 'deployment.json') }}" + secret: "{{ secret }}" + tenant: "{{ tenant }}" + client_id: "{{ client_id }}" + subscription_id: "{{ subscription_id }}" + resource_group_name: "{{ algo_server_name }}" + location: "{{ algo_region }}" + parameters: + sshKeyData: + value: "{{ lookup('file', '{{ SSH_keys.public }}') }}" + WireGuardPort: + value: "{{ wireguard_port }}" + vmSize: + value: "{{ cloud_providers.azure.size }}" + imageReferenceSku: + value: "{{ cloud_providers.azure.image }}" + register: azure_rm_deployment - - name: Create AlgoVPN Server - azure_rm_deployment: - state: present - deployment_name: "{{ algo_server_name }}" - template: "{{ lookup('file', 'deployment.json') }}" - secret: "{{ secret }}" - tenant: "{{ tenant }}" - client_id: "{{ client_id }}" - subscription_id: "{{ subscription_id }}" - resource_group_name: "{{ algo_server_name }}" - location: "{{ algo_region }}" - parameters: - sshKeyData: - value: "{{ lookup('file', '{{ SSH_keys.public }}') }}" - WireGuardPort: - value: "{{ wireguard_port }}" - vmSize: - value: "{{ cloud_providers.azure.size }}" - imageReferenceSku: - value: "{{ cloud_providers.azure.image }}" - register: azure_rm_deployment - - - set_fact: - cloud_instance_ip: "{{ azure_rm_deployment.deployment.outputs.publicIPAddresses.value }}" - ansible_ssh_user: ubuntu - environment: - PYTHONPATH: "{{ azure_venv }}/lib/python2.7/site-packages/" - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - set_fact: + cloud_instance_ip: "{{ azure_rm_deployment.deployment.outputs.publicIPAddresses.value }}" + ansible_ssh_user: ubuntu + environment: + PYTHONPATH: "{{ azure_venv }}/lib/python2.7/site-packages/" diff --git a/roles/cloud-digitalocean/tasks/main.yml b/roles/cloud-digitalocean/tasks/main.yml index 488ea2d..93baefe 100644 --- a/roles/cloud-digitalocean/tasks/main.yml +++ b/roles/cloud-digitalocean/tasks/main.yml @@ -1,110 +1,105 @@ +--- +- name: Build python virtual environment + import_tasks: venv.yml + - block: - - name: Build python virtual environment - import_tasks: venv.yml + - name: Include prompts + import_tasks: prompts.yml - - block: - - name: Include prompts - import_tasks: prompts.yml + - name: Set additional facts + set_fact: + algo_do_region: >- + {% if region is defined %}{{ region }} + {%- elif _algo_region.user_input is defined and _algo_region.user_input != "" %}{{ do_regions[_algo_region.user_input | int -1 ]['slug'] }} + {%- else %}{{ do_regions[default_region | int - 1]['slug'] }}{% endif %} + public_key: "{{ lookup('file', '{{ SSH_keys.public }}') }}" - - name: Set additional facts - set_fact: - algo_do_region: >- - {% if region is defined %}{{ region }} - {%- elif _algo_region.user_input is defined and _algo_region.user_input != "" %}{{ do_regions[_algo_region.user_input | int -1 ]['slug'] }} - {%- else %}{{ do_regions[default_region | int - 1]['slug'] }}{% endif %} - public_key: "{{ lookup('file', '{{ SSH_keys.public }}') }}" - - - block: - - name: "Delete the existing Algo SSH keys" - digital_ocean: - state: absent - command: ssh - api_token: "{{ algo_do_token }}" - name: "{{ SSH_keys.comment }}" - register: ssh_keys - until: ssh_keys.changed != true - retries: 10 - delay: 1 - - rescue: - - name: Collect the fail error - digital_ocean: - state: absent - command: ssh - api_token: "{{ algo_do_token }}" - name: "{{ SSH_keys.comment }}" - register: ssh_keys - ignore_errors: yes - - - debug: var=ssh_keys - - - fail: - msg: "Please, ensure that your API token is not read-only." - - - name: "Upload the SSH key" + - block: + - name: "Delete the existing Algo SSH keys" digital_ocean: - state: present + state: absent command: ssh - ssh_pub_key: "{{ public_key }}" api_token: "{{ algo_do_token }}" name: "{{ SSH_keys.comment }}" - register: do_ssh_key + register: ssh_keys + until: ssh_keys.changed != true + retries: 10 + delay: 1 - - name: "Creating a droplet..." + rescue: + - name: Collect the fail error digital_ocean: - state: present - command: droplet - name: "{{ algo_server_name }}" - region_id: "{{ algo_do_region }}" - size_id: "{{ cloud_providers.digitalocean.size }}" - image_id: "{{ cloud_providers.digitalocean.image }}" - ssh_key_ids: "{{ do_ssh_key.ssh_key.id }}" - unique_name: yes + state: absent + command: ssh api_token: "{{ algo_do_token }}" - ipv6: yes - register: do + name: "{{ SSH_keys.comment }}" + register: ssh_keys + ignore_errors: yes - - set_fact: - cloud_instance_ip: "{{ do.droplet.ip_address }}" - ansible_ssh_user: root + - debug: var=ssh_keys - - name: Tag the droplet - digital_ocean_tag: - name: "Environment:Algo" - resource_id: "{{ do.droplet.id }}" + - fail: + msg: "Please, ensure that your API token is not read-only." + + - name: "Upload the SSH key" + digital_ocean: + state: present + command: ssh + ssh_pub_key: "{{ public_key }}" + api_token: "{{ algo_do_token }}" + name: "{{ SSH_keys.comment }}" + register: do_ssh_key + + - name: "Creating a droplet..." + digital_ocean: + state: present + command: droplet + name: "{{ algo_server_name }}" + region_id: "{{ algo_do_region }}" + size_id: "{{ cloud_providers.digitalocean.size }}" + image_id: "{{ cloud_providers.digitalocean.image }}" + ssh_key_ids: "{{ do_ssh_key.ssh_key.id }}" + unique_name: yes + api_token: "{{ algo_do_token }}" + ipv6: yes + register: do + + - set_fact: + cloud_instance_ip: "{{ do.droplet.ip_address }}" + ansible_ssh_user: root + + - name: Tag the droplet + digital_ocean_tag: + name: "Environment:Algo" + resource_id: "{{ do.droplet.id }}" + api_token: "{{ algo_do_token }}" + state: present + + - block: + - name: "Delete the new Algo SSH key" + digital_ocean: + state: absent + command: ssh api_token: "{{ algo_do_token }}" - state: present + name: "{{ SSH_keys.comment }}" + register: ssh_keys + until: ssh_keys.changed != true + retries: 10 + delay: 1 - - block: - - name: "Delete the new Algo SSH key" - digital_ocean: - state: absent - command: ssh - api_token: "{{ algo_do_token }}" - name: "{{ SSH_keys.comment }}" - register: ssh_keys - until: ssh_keys.changed != true - retries: 10 - delay: 1 + rescue: + - name: Collect the fail error + digital_ocean: + state: absent + command: ssh + api_token: "{{ algo_do_token }}" + name: "{{ SSH_keys.comment }}" + register: ssh_keys + ignore_errors: yes - rescue: - - name: Collect the fail error - digital_ocean: - state: absent - command: ssh - api_token: "{{ algo_do_token }}" - name: "{{ SSH_keys.comment }}" - register: ssh_keys - ignore_errors: yes + - debug: var=ssh_keys - - debug: var=ssh_keys - - - fail: - msg: "Please, ensure that your API token is not read-only." - environment: - PYTHONPATH: "{{ digitalocean_venv }}/lib/python2.7/site-packages/" - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - fail: + msg: "Please, ensure that your API token is not read-only." + environment: + PYTHONPATH: "{{ digitalocean_venv }}/lib/python2.7/site-packages/" diff --git a/roles/cloud-ec2/tasks/main.yml b/roles/cloud-ec2/tasks/main.yml index ea3a67a..ce6532b 100644 --- a/roles/cloud-ec2/tasks/main.yml +++ b/roles/cloud-ec2/tasks/main.yml @@ -1,48 +1,43 @@ +--- +- name: Build python virtual environment + import_tasks: venv.yml + - block: - - name: Build python virtual environment - import_tasks: venv.yml + - name: Include prompts + import_tasks: prompts.yml - - block: - - name: Include prompts - import_tasks: prompts.yml + - set_fact: + algo_region: >- + {% if region is defined %}{{ region }} + {%- elif _algo_region.user_input is defined and _algo_region.user_input != "" %}{{ aws_regions[_algo_region.user_input | int -1 ]['region_name'] }} + {%- else %}{{ aws_regions[default_region | int - 1]['region_name'] }}{% endif %} + stack_name: "{{ algo_server_name | replace('.', '-') }}" - - set_fact: - algo_region: >- - {% if region is defined %}{{ region }} - {%- elif _algo_region.user_input is defined and _algo_region.user_input != "" %}{{ aws_regions[_algo_region.user_input | int -1 ]['region_name'] }} - {%- else %}{{ aws_regions[default_region | int - 1]['region_name'] }}{% endif %} - stack_name: "{{ algo_server_name | replace('.', '-') }}" + - name: Locate official AMI for region + ec2_ami_facts: + aws_access_key: "{{ access_key }}" + aws_secret_key: "{{ secret_key }}" + owners: "{{ cloud_providers.ec2.image.owner }}" + region: "{{ algo_region }}" + filters: + name: "ubuntu/images/hvm-ssd/{{ cloud_providers.ec2.image.name }}-amd64-server-*" + register: ami_search - - name: Locate official AMI for region - ec2_ami_facts: - aws_access_key: "{{ access_key }}" - aws_secret_key: "{{ secret_key }}" - owners: "{{ cloud_providers.ec2.image.owner }}" - region: "{{ algo_region }}" - filters: - name: "ubuntu/images/hvm-ssd/{{ cloud_providers.ec2.image.name }}-amd64-server-*" - register: ami_search + - import_tasks: encrypt_image.yml + when: encrypted - - import_tasks: encrypt_image.yml - when: encrypted + - name: Set the ami id as a fact + set_fact: + ami_image: >- + {% if ami_search_encrypted.image_id is defined %}{{ ami_search_encrypted.image_id }} + {%- elif search_crypt.images is defined and search_crypt.images|length >= 1 %}{{ (search_crypt.images | sort(attribute='creation_date') | last)['image_id'] }} + {%- else %}{{ (ami_search.images | sort(attribute='creation_date') | last)['image_id'] }}{% endif %} - - name: Set the ami id as a fact - set_fact: - ami_image: >- - {% if ami_search_encrypted.image_id is defined %}{{ ami_search_encrypted.image_id }} - {%- elif search_crypt.images is defined and search_crypt.images|length >= 1 %}{{ (search_crypt.images | sort(attribute='creation_date') | last)['image_id'] }} - {%- else %}{{ (ami_search.images | sort(attribute='creation_date') | last)['image_id'] }}{% endif %} + - name: Deploy the stack + import_tasks: cloudformation.yml - - name: Deploy the stack - import_tasks: cloudformation.yml - - - set_fact: - cloud_instance_ip: "{{ stack.stack_outputs.ElasticIP }}" - ansible_ssh_user: ubuntu - environment: - PYTHONPATH: "{{ ec2_venv }}/lib/python2.7/site-packages/" - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - set_fact: + cloud_instance_ip: "{{ stack.stack_outputs.ElasticIP }}" + ansible_ssh_user: ubuntu + environment: + PYTHONPATH: "{{ ec2_venv }}/lib/python2.7/site-packages/" diff --git a/roles/cloud-gce/tasks/main.yml b/roles/cloud-gce/tasks/main.yml index baa5f46..eb211ee 100644 --- a/roles/cloud-gce/tasks/main.yml +++ b/roles/cloud-gce/tasks/main.yml @@ -1,62 +1,57 @@ +--- +- name: Build python virtual environment + import_tasks: venv.yml + - block: - - name: Build python virtual environment - import_tasks: venv.yml + - name: Include prompts + import_tasks: prompts.yml - - block: - - name: Include prompts - import_tasks: prompts.yml + - name: Network configured + gce_net: + name: "{{ algo_server_name }}" + fwname: "{{ algo_server_name }}-fw" + allowed: "udp:500,4500,{{ wireguard_port }};tcp:22" + state: "present" + mode: auto + src_range: 0.0.0.0/0 + service_account_email: "{{ service_account_email }}" + credentials_file: "{{ credentials_file_path }}" + project_id: "{{ project_id }}" - - name: Network configured - gce_net: - name: "{{ algo_server_name }}" - fwname: "{{ algo_server_name }}-fw" - allowed: "udp:500,4500,{{ wireguard_port }};tcp:22" - state: "present" - mode: auto - src_range: 0.0.0.0/0 - service_account_email: "{{ service_account_email }}" - credentials_file: "{{ credentials_file_path }}" - project_id: "{{ project_id }}" + - block: + - name: External IP allocated + gce_eip: + service_account_email: "{{ service_account_email }}" + credentials_file: "{{ credentials_file_path }}" + project_id: "{{ project_id }}" + name: "{{ algo_server_name }}" + region: "{{ algo_region.split('-')[0:2] | join('-') }}" + state: present + register: gce_eip - - block: - - name: External IP allocated - gce_eip: - service_account_email: "{{ service_account_email }}" - credentials_file: "{{ credentials_file_path }}" - project_id: "{{ project_id }}" - name: "{{ algo_server_name }}" - region: "{{ algo_region.split('-')[0:2] | join('-') }}" - state: present - register: gce_eip + - name: Set External IP as a fact + set_fact: + external_ip: "{{ gce_eip.address }}" + when: cloud_providers.gce.external_static_ip - - name: Set External IP as a fact - set_fact: - external_ip: "{{ gce_eip.address }}" - when: cloud_providers.gce.external_static_ip + - name: "Creating a new instance..." + gce: + instance_names: "{{ algo_server_name }}" + zone: "{{ algo_region }}" + external_ip: "{{ external_ip | default('ephemeral') }}" + machine_type: "{{ cloud_providers.gce.size }}" + image: "{{ cloud_providers.gce.image }}" + service_account_email: "{{ service_account_email }}" + credentials_file: "{{ credentials_file_path }}" + project_id: "{{ project_id }}" + metadata: '{"ssh-keys":"ubuntu:{{ ssh_public_key_lookup }}"}' + network: "{{ algo_server_name }}" + tags: + - "environment-algo" + register: google_vm - - name: "Creating a new instance..." - gce: - instance_names: "{{ algo_server_name }}" - zone: "{{ algo_region }}" - external_ip: "{{ external_ip | default('ephemeral') }}" - machine_type: "{{ cloud_providers.gce.size }}" - image: "{{ cloud_providers.gce.image }}" - service_account_email: "{{ service_account_email }}" - credentials_file: "{{ credentials_file_path }}" - project_id: "{{ project_id }}" - metadata: '{"ssh-keys":"ubuntu:{{ ssh_public_key_lookup }}"}' - network: "{{ algo_server_name }}" - tags: - - "environment-algo" - register: google_vm - - - set_fact: - cloud_instance_ip: "{{ google_vm.instance_data[0].public_ip }}" - ansible_ssh_user: ubuntu - environment: - PYTHONPATH: "{{ gce_venv }}/lib/python2.7/site-packages/" - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - set_fact: + cloud_instance_ip: "{{ google_vm.instance_data[0].public_ip }}" + ansible_ssh_user: ubuntu + environment: + PYTHONPATH: "{{ gce_venv }}/lib/python2.7/site-packages/" diff --git a/roles/cloud-lightsail/tasks/main.yml b/roles/cloud-lightsail/tasks/main.yml index 21e3d45..c152e5e 100644 --- a/roles/cloud-lightsail/tasks/main.yml +++ b/roles/cloud-lightsail/tasks/main.yml @@ -1,50 +1,44 @@ +--- +- name: Build python virtual environment + import_tasks: venv.yml + - block: - - name: Build python virtual environment - import_tasks: venv.yml + - name: Include prompts + import_tasks: prompts.yml - - block: - - name: Include prompts - import_tasks: prompts.yml + - name: Create an instance + lightsail: + aws_access_key: "{{ access_key }}" + aws_secret_key: "{{ secret_key }}" + name: "{{ algo_server_name }}" + state: present + region: "{{ algo_region }}" + zone: "{{ algo_region }}a" + blueprint_id: "{{ cloud_providers.lightsail.image }}" + bundle_id: "{{ cloud_providers.lightsail.size }}" + wait_timeout: 300 + open_ports: + - from_port: 4500 + to_port: 4500 + protocol: udp + - from_port: 500 + to_port: 500 + protocol: udp + - from_port: "{{ wireguard_port }}" + to_port: "{{ wireguard_port }}" + protocol: udp + user_data: | + #!/bin/bash + mkdir -p /home/ubuntu/.ssh/ + echo "{{ lookup('file', '{{ SSH_keys.public }}') }}" >> /home/ubuntu/.ssh/authorized_keys + chown -R ubuntu: /home/ubuntu/.ssh/ + chmod 0700 /home/ubuntu/.ssh/ + chmod 0600 /home/ubuntu/.ssh/* + test + register: algo_instance - - name: Create an instance - lightsail: - aws_access_key: "{{ access_key }}" - aws_secret_key: "{{ secret_key }}" - name: "{{ algo_server_name }}" - state: present - region: "{{ algo_region }}" - zone: "{{ algo_region }}a" - blueprint_id: "{{ cloud_providers.lightsail.image }}" - bundle_id: "{{ cloud_providers.lightsail.size }}" - wait_timeout: 300 - open_ports: - - from_port: 4500 - to_port: 4500 - protocol: udp - - from_port: 500 - to_port: 500 - protocol: udp - - from_port: "{{ wireguard_port }}" - to_port: "{{ wireguard_port }}" - protocol: udp - user_data: | - #!/bin/bash - mkdir -p /home/ubuntu/.ssh/ - echo "{{ lookup('file', '{{ SSH_keys.public }}') }}" >> /home/ubuntu/.ssh/authorized_keys - chown -R ubuntu: /home/ubuntu/.ssh/ - chmod 0700 /home/ubuntu/.ssh/ - chmod 0600 /home/ubuntu/.ssh/* - test - register: algo_instance - - - set_fact: - cloud_instance_ip: "{{ algo_instance['instance']['public_ip_address'] }}" - ansible_ssh_user: ubuntu - environment: - PYTHONPATH: "{{ lightsail_venv }}/lib/python2.7/site-packages/" - - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - set_fact: + cloud_instance_ip: "{{ algo_instance['instance']['public_ip_address'] }}" + ansible_ssh_user: ubuntu + environment: + PYTHONPATH: "{{ lightsail_venv }}/lib/python2.7/site-packages/" diff --git a/roles/cloud-openstack/tasks/main.yml b/roles/cloud-openstack/tasks/main.yml index 75b3db6..b8c1181 100644 --- a/roles/cloud-openstack/tasks/main.yml +++ b/roles/cloud-openstack/tasks/main.yml @@ -3,87 +3,80 @@ msg: "OpenStack credentials are not set. Download it from the OpenStack dashboard->Compute->API Access and source it in the shell (eg: source /tmp/dhc-openrc.sh)" when: lookup('env', 'OS_AUTH_URL') == "" +- name: Build python virtual environment + import_tasks: venv.yml + - block: - - name: Build python virtual environment - import_tasks: venv.yml + - name: Security group created + os_security_group: + state: "{{ state|default('present') }}" + name: "{{ algo_server_name }}-security_group" + description: AlgoVPN security group + register: os_security_group - - block: - - name: Security group created - os_security_group: - state: "{{ state|default('present') }}" - name: "{{ algo_server_name }}-security_group" - description: AlgoVPN security group - register: os_security_group + - name: Security rules created + os_security_group_rule: + state: "{{ state|default('present') }}" + security_group: "{{ os_security_group.id }}" + protocol: "{{ item.proto }}" + port_range_min: "{{ item.port_min }}" + port_range_max: "{{ item.port_max }}" + remote_ip_prefix: "{{ item.range }}" + with_items: + - { proto: tcp, port_min: 22, port_max: 22, range: 0.0.0.0/0 } + - { proto: icmp, port_min: -1, port_max: -1, range: 0.0.0.0/0 } + - { proto: udp, port_min: 4500, port_max: 4500, range: 0.0.0.0/0 } + - { proto: udp, port_min: 500, port_max: 500, range: 0.0.0.0/0 } + - { proto: udp, port_min: "{{ wireguard_port }}", port_max: "{{ wireguard_port }}", range: 0.0.0.0/0 } - - name: Security rules created - os_security_group_rule: - state: "{{ state|default('present') }}" - security_group: "{{ os_security_group.id }}" - protocol: "{{ item.proto }}" - port_range_min: "{{ item.port_min }}" - port_range_max: "{{ item.port_max }}" - remote_ip_prefix: "{{ item.range }}" - with_items: - - { proto: tcp, port_min: 22, port_max: 22, range: 0.0.0.0/0 } - - { proto: icmp, port_min: -1, port_max: -1, range: 0.0.0.0/0 } - - { proto: udp, port_min: 4500, port_max: 4500, range: 0.0.0.0/0 } - - { proto: udp, port_min: 500, port_max: 500, range: 0.0.0.0/0 } - - { proto: udp, port_min: "{{ wireguard_port }}", port_max: "{{ wireguard_port }}", range: 0.0.0.0/0 } + - name: Keypair created + os_keypair: + state: "{{ state|default('present') }}" + name: "{{ SSH_keys.comment|regex_replace('@', '_') }}" + public_key_file: "{{ SSH_keys.public }}" + register: os_keypair - - name: Keypair created - os_keypair: - state: "{{ state|default('present') }}" - name: "{{ SSH_keys.comment|regex_replace('@', '_') }}" - public_key_file: "{{ SSH_keys.public }}" - register: os_keypair + - name: Gather facts about flavors + os_flavor_facts: + ram: "{{ cloud_providers.openstack.flavor_ram }}" - - name: Gather facts about flavors - os_flavor_facts: - ram: "{{ cloud_providers.openstack.flavor_ram }}" + - name: Gather facts about images + os_image_facts: + image: "{{ cloud_providers.openstack.image }}" - - name: Gather facts about images - os_image_facts: - image: "{{ cloud_providers.openstack.image }}" + - name: Gather facts about public networks + os_networks_facts: - - name: Gather facts about public networks - os_networks_facts: + - name: Set the network as a fact + set_fact: + public_network_id: "{{ item.id }}" + when: + - item['router:external']|default(omit) + - item['admin_state_up']|default(omit) + - item['status'] == 'ACTIVE' + with_items: "{{ openstack_networks }}" - - name: Set the network as a fact - set_fact: - public_network_id: "{{ item.id }}" - when: - - item['router:external']|default(omit) - - item['admin_state_up']|default(omit) - - item['status'] == 'ACTIVE' - with_items: "{{ openstack_networks }}" + - name: Set facts + set_fact: + flavor_id: "{{ (openstack_flavors | sort(attribute='ram'))[0]['id'] }}" + image_id: "{{ openstack_image['id'] }}" + keypair_name: "{{ os_keypair.key.name }}" + security_group_name: "{{ os_security_group['secgroup']['name'] }}" - - name: Set facts - set_fact: - flavor_id: "{{ (openstack_flavors | sort(attribute='ram'))[0]['id'] }}" - image_id: "{{ openstack_image['id'] }}" - keypair_name: "{{ os_keypair.key.name }}" - security_group_name: "{{ os_security_group['secgroup']['name'] }}" + - name: Server created + os_server: + state: "{{ state|default('present') }}" + name: "{{ algo_server_name }}" + image: "{{ image_id }}" + flavor: "{{ flavor_id }}" + key_name: "{{ keypair_name }}" + security_groups: "{{ security_group_name }}" + nics: + - net-id: "{{ public_network_id }}" + register: os_server - - name: Server created - os_server: - state: "{{ state|default('present') }}" - name: "{{ algo_server_name }}" - image: "{{ image_id }}" - flavor: "{{ flavor_id }}" - key_name: "{{ keypair_name }}" - security_groups: "{{ security_group_name }}" - nics: - - net-id: "{{ public_network_id }}" - register: os_server - - - set_fact: - cloud_instance_ip: "{{ os_server['openstack']['public_v4'] }}" - ansible_ssh_user: ubuntu - environment: - PYTHONPATH: "{{ openstack_venv }}/lib/python2.7/site-packages/" - - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - set_fact: + cloud_instance_ip: "{{ os_server['openstack']['public_v4'] }}" + ansible_ssh_user: ubuntu + environment: + PYTHONPATH: "{{ openstack_venv }}/lib/python2.7/site-packages/" diff --git a/roles/cloud-scaleway/tasks/main.yml b/roles/cloud-scaleway/tasks/main.yml index 87ec1d7..9ff5124 100644 --- a/roles/cloud-scaleway/tasks/main.yml +++ b/roles/cloud-scaleway/tasks/main.yml @@ -1,140 +1,133 @@ -- block: - - name: Include prompts - import_tasks: prompts.yml +- name: Include prompts + import_tasks: prompts.yml - - name: Set disk size - set_fact: - server_disk_size: 50000000000 +- name: Set disk size + set_fact: + server_disk_size: 50000000000 - - name: Check server size - set_fact: - server_disk_size: 25000000000 - when: cloud_providers.scaleway.size == "START1-XS" +- name: Check server size + set_fact: + server_disk_size: 25000000000 + when: cloud_providers.scaleway.size == "START1-XS" - - name: Check if server exists +- name: Check if server exists + uri: + url: "https://cp-{{ algo_region }}.scaleway.com/servers" + method: GET + headers: + Content-Type: 'application/json' + X-Auth-Token: "{{ algo_scaleway_token }}" + status_code: 200 + register: scaleway_servers + +- name: Set server id as a fact + set_fact: + server_id: "{{ item.id }}" + no_log: true + when: algo_server_name == item.name + with_items: "{{ scaleway_servers.json.servers }}" + +- name: Create a server if it doesn't exist + block: + - name: Get the organization id uri: - url: "https://cp-{{ algo_region }}.scaleway.com/servers" + url: https://account.cloud.online.net/organizations method: GET headers: Content-Type: 'application/json' X-Auth-Token: "{{ algo_scaleway_token }}" status_code: 200 - register: scaleway_servers + register: scaleway_organizations + + - name: Set organization id as a fact + set_fact: + organization_id: "{{ item.id }}" + no_log: true + when: algo_scaleway_org == item.name + with_items: "{{ scaleway_organizations.json.organizations }}" + + - name: Get total count of images + uri: + url: "https://cp-{{ algo_region }}.scaleway.com/images" + method: GET + headers: + Content-Type: 'application/json' + X-Auth-Token: "{{ algo_scaleway_token }}" + status_code: 200 + register: scaleway_pages + + - name: Get images + uri: + url: "https://cp-{{ algo_region }}.scaleway.com/images?per_page=100&page={{ item }}" + method: GET + headers: + Content-Type: 'application/json' + X-Auth-Token: "{{ algo_scaleway_token }}" + status_code: 200 + register: scaleway_images + with_sequence: start=1 end={{ ((scaleway_pages.x_total_count|int / 100)| round )|int }} + + - name: Set image id as a fact + include_tasks: image_facts.yml + with_items: "{{ scaleway_images['results'] }}" + loop_control: + loop_var: outer_item + + - name: Create a server + uri: + url: "https://cp-{{ algo_region }}.scaleway.com/servers/" + method: POST + headers: + Content-Type: 'application/json' + X-Auth-Token: "{{ algo_scaleway_token }}" + body: + organization: "{{ organization_id }}" + name: "{{ algo_server_name }}" + image: "{{ image_id }}" + commercial_type: "{{cloud_providers.scaleway.size }}" + enable_ipv6: true + boot_type: local + tags: + - Environment:Algo + - AUTHORIZED_KEY={{ lookup('file', SSH_keys.public)|regex_replace(' ', '_') }} + status_code: 201 + body_format: json + register: algo_instance - name: Set server id as a fact set_fact: - server_id: "{{ item.id }}" - no_log: true - when: algo_server_name == item.name - with_items: "{{ scaleway_servers.json.servers }}" + server_id: "{{ algo_instance.json.server.id }}" + when: server_id is not defined - - name: Create a server if it doesn't exist - block: - - name: Get the organization id - uri: - url: https://account.cloud.online.net/organizations - method: GET - headers: - Content-Type: 'application/json' - X-Auth-Token: "{{ algo_scaleway_token }}" - status_code: 200 - register: scaleway_organizations +- name: Power on the server + uri: + url: https://cp-{{ algo_region }}.scaleway.com/servers/{{ server_id }}/action + method: POST + headers: + Content-Type: application/json + X-Auth-Token: "{{ algo_scaleway_token }}" + body: + action: poweron + status_code: 202 + body_format: json + ignore_errors: true + no_log: true - - name: Set organization id as a fact - set_fact: - organization_id: "{{ item.id }}" - no_log: true - when: algo_scaleway_org == item.name - with_items: "{{ scaleway_organizations.json.organizations }}" +- name: Wait for the server to become running + uri: + url: "https://cp-{{ algo_region }}.scaleway.com/servers/{{ server_id }}" + method: GET + headers: + Content-Type: 'application/json' + X-Auth-Token: "{{ algo_scaleway_token }}" + status_code: 200 + until: + - algo_instance.json.server.state is defined + - algo_instance.json.server.state == "running" + retries: 20 + delay: 30 + register: algo_instance - - name: Get total count of images - uri: - url: "https://cp-{{ algo_region }}.scaleway.com/images" - method: GET - headers: - Content-Type: 'application/json' - X-Auth-Token: "{{ algo_scaleway_token }}" - status_code: 200 - register: scaleway_pages - - - name: Get images - uri: - url: "https://cp-{{ algo_region }}.scaleway.com/images?per_page=100&page={{ item }}" - method: GET - headers: - Content-Type: 'application/json' - X-Auth-Token: "{{ algo_scaleway_token }}" - status_code: 200 - register: scaleway_images - with_sequence: start=1 end={{ ((scaleway_pages.x_total_count|int / 100)| round )|int }} - - - name: Set image id as a fact - include_tasks: image_facts.yml - with_items: "{{ scaleway_images['results'] }}" - loop_control: - loop_var: outer_item - - - name: Create a server - uri: - url: "https://cp-{{ algo_region }}.scaleway.com/servers/" - method: POST - headers: - Content-Type: 'application/json' - X-Auth-Token: "{{ algo_scaleway_token }}" - body: - organization: "{{ organization_id }}" - name: "{{ algo_server_name }}" - image: "{{ image_id }}" - commercial_type: "{{cloud_providers.scaleway.size }}" - enable_ipv6: true - boot_type: local - tags: - - Environment:Algo - - AUTHORIZED_KEY={{ lookup('file', SSH_keys.public)|regex_replace(' ', '_') }} - status_code: 201 - body_format: json - register: algo_instance - - - name: Set server id as a fact - set_fact: - server_id: "{{ algo_instance.json.server.id }}" - when: server_id is not defined - - - name: Power on the server - uri: - url: https://cp-{{ algo_region }}.scaleway.com/servers/{{ server_id }}/action - method: POST - headers: - Content-Type: application/json - X-Auth-Token: "{{ algo_scaleway_token }}" - body: - action: poweron - status_code: 202 - body_format: json - ignore_errors: true - no_log: true - - - name: Wait for the server to become running - uri: - url: "https://cp-{{ algo_region }}.scaleway.com/servers/{{ server_id }}" - method: GET - headers: - Content-Type: 'application/json' - X-Auth-Token: "{{ algo_scaleway_token }}" - status_code: 200 - until: - - algo_instance.json.server.state is defined - - algo_instance.json.server.state == "running" - retries: 20 - delay: 30 - register: algo_instance - - - set_fact: - cloud_instance_ip: "{{ algo_instance['json']['server']['public_ip']['address'] }}" - ansible_ssh_user: root - - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always +- set_fact: + cloud_instance_ip: "{{ algo_instance['json']['server']['public_ip']['address'] }}" + ansible_ssh_user: root diff --git a/roles/cloud-vultr/tasks/main.yml b/roles/cloud-vultr/tasks/main.yml index 78e514d..a1dfa90 100644 --- a/roles/cloud-vultr/tasks/main.yml +++ b/roles/cloud-vultr/tasks/main.yml @@ -1,3 +1,4 @@ +--- - block: - name: Include prompts import_tasks: prompts.yml @@ -29,8 +30,3 @@ environment: VULTR_API_CONFIG: "{{ algo_vultr_config }}" - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always diff --git a/roles/common/tasks/main.yml b/roles/common/tasks/main.yml index fcb5af1..4a40088 100644 --- a/roles/common/tasks/main.yml +++ b/roles/common/tasks/main.yml @@ -1,32 +1,26 @@ --- -- block: - - name: Check the system - raw: uname -a - register: OS - tags: - - update-users +- name: Check the system + raw: uname -a + register: OS + tags: + - update-users - - include_tasks: ubuntu.yml - when: '"Ubuntu" in OS.stdout or "Linux" in OS.stdout' - tags: - - update-users +- include_tasks: ubuntu.yml + when: '"Ubuntu" in OS.stdout or "Linux" in OS.stdout' + tags: + - update-users - - include_tasks: freebsd.yml - when: '"FreeBSD" in OS.stdout' - tags: - - update-users +- include_tasks: freebsd.yml + when: '"FreeBSD" in OS.stdout' + tags: + - update-users - - name: Sysctl tuning - sysctl: name="{{ item.item }}" value="{{ item.value }}" - when: item.item != "" - with_items: - - "{{ sysctl|default([]) }}" - tags: - - always +- name: Sysctl tuning + sysctl: name="{{ item.item }}" value="{{ item.value }}" + when: item.item != "" + with_items: + - "{{ sysctl|default([]) }}" + tags: + - always - - meta: flush_handlers - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always +- meta: flush_handlers diff --git a/roles/common/tasks/ubuntu.yml b/roles/common/tasks/ubuntu.yml index c49b892..08d37a3 100644 --- a/roles/common/tasks/ubuntu.yml +++ b/roles/common/tasks/ubuntu.yml @@ -97,11 +97,9 @@ - name: Install tools apt: - name: "{{ item }}" + name: "{{ tools|default([]) }}" state: present update_cache: true - with_items: - - "{{ tools|default([]) }}" - name: Install headers apt: diff --git a/roles/dns_adblocking/handlers/main.yml b/roles/dns_adblocking/handlers/main.yml index 98278ce..85cbe32 100644 --- a/roles/dns_adblocking/handlers/main.yml +++ b/roles/dns_adblocking/handlers/main.yml @@ -3,3 +3,7 @@ - name: restart apparmor service: name=apparmor state=restarted + +- name: daemon-reload + systemd: + daemon_reload: true diff --git a/roles/dns_adblocking/tasks/main.yml b/roles/dns_adblocking/tasks/main.yml index 6a44dbe..cd1a54a 100644 --- a/roles/dns_adblocking/tasks/main.yml +++ b/roles/dns_adblocking/tasks/main.yml @@ -1,52 +1,46 @@ --- -- block: - - name: Dnsmasq installed - package: name=dnsmasq +- name: Dnsmasq installed + package: name=dnsmasq - - name: The dnsmasq directory created - file: dest=/var/lib/dnsmasq state=directory mode=0755 owner=dnsmasq group=nogroup +- name: The dnsmasq directory created + file: dest=/var/lib/dnsmasq state=directory mode=0755 owner=dnsmasq group=nogroup - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu' +- include_tasks: ubuntu.yml + when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu' - - include_tasks: freebsd.yml - when: ansible_distribution == 'FreeBSD' +- include_tasks: freebsd.yml + when: ansible_distribution == 'FreeBSD' - - name: Dnsmasq configured - template: - src: dnsmasq.conf.j2 - dest: "{{ config_prefix|default('/') }}etc/dnsmasq.conf" - notify: - - restart dnsmasq +- name: Dnsmasq configured + template: + src: dnsmasq.conf.j2 + dest: "{{ config_prefix|default('/') }}etc/dnsmasq.conf" + notify: + - restart dnsmasq - - name: Adblock script created - template: - src: adblock.sh.j2 - dest: /usr/local/sbin/adblock.sh - owner: root - group: "{{ root_group|default('root') }}" - mode: 0755 +- name: Adblock script created + template: + src: adblock.sh.j2 + dest: /usr/local/sbin/adblock.sh + owner: root + group: "{{ root_group|default('root') }}" + mode: 0755 - - name: Adblock script added to cron - cron: - name: Adblock hosts update - minute: "{{ range(0, 60) | random }}" - hour: "{{ range(0, 24) | random }}" - job: /usr/local/sbin/adblock.sh - user: root +- name: Adblock script added to cron + cron: + name: Adblock hosts update + minute: "{{ range(0, 60) | random }}" + hour: "{{ range(0, 24) | random }}" + job: /usr/local/sbin/adblock.sh + user: root - - name: Update adblock hosts - command: /usr/local/sbin/adblock.sh +- name: Update adblock hosts + command: /usr/local/sbin/adblock.sh - - meta: flush_handlers +- meta: flush_handlers - - name: Dnsmasq enabled and started - service: - name: dnsmasq - state: started - enabled: yes - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always +- name: Dnsmasq enabled and started + service: + name: dnsmasq + state: started + enabled: yes diff --git a/roles/local/tasks/main.yml b/roles/local/tasks/main.yml index 5803cff..b690b6b 100644 --- a/roles/local/tasks/main.yml +++ b/roles/local/tasks/main.yml @@ -1,10 +1,3 @@ --- -- block: - - name: Include prompts - import_tasks: prompts.yml - - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always +- name: Include prompts + import_tasks: prompts.yml diff --git a/roles/ssh_tunneling/tasks/main.yml b/roles/ssh_tunneling/tasks/main.yml index c52840f..4ea4680 100644 --- a/roles/ssh_tunneling/tasks/main.yml +++ b/roles/ssh_tunneling/tasks/main.yml @@ -1,120 +1,114 @@ --- +- name: Ensure that the sshd_config file has desired options + blockinfile: + dest: /etc/ssh/sshd_config + marker: '# {mark} ANSIBLE MANAGED BLOCK ssh_tunneling_role' + block: | + Match Group algo + AllowTcpForwarding local + AllowAgentForwarding no + AllowStreamLocalForwarding no + PermitTunnel no + X11Forwarding no + notify: + - restart ssh + +- name: Ensure that the algo group exist + group: name=algo state=present + +- name: Ensure that the jail directory exist + file: + path: /var/jail/ + state: directory + mode: 0755 + owner: root + group: "{{ root_group|default('root') }}" + - block: - - name: Ensure that the sshd_config file has desired options - blockinfile: - dest: /etc/ssh/sshd_config - marker: '# {mark} ANSIBLE MANAGED BLOCK ssh_tunneling_role' - block: | - Match Group algo - AllowTcpForwarding local - AllowAgentForwarding no - AllowStreamLocalForwarding no - PermitTunnel no - X11Forwarding no - notify: - - restart ssh + - name: Ensure that the SSH users exist + user: + name: "{{ item }}" + groups: algo + home: '/var/jail/{{ item }}' + createhome: yes + generate_ssh_key: false + shell: /bin/false + state: present + append: yes + with_items: "{{ users }}" - - name: Ensure that the algo group exist - group: name=algo state=present - - - name: Ensure that the jail directory exist + - block: + - name: Clean up the ssh-tunnel directory file: - path: /var/jail/ + dest: "{{ ssh_tunnels_config_path }}" + state: absent + when: keys_clean_all|bool == True + + - name: Ensure the config directories exist + file: + dest: "{{ ssh_tunnels_config_path }}" state: directory - mode: 0755 - owner: root - group: "{{ root_group|default('root') }}" + recurse: yes + mode: '0700' - - block: - - name: Ensure that the SSH users exist - user: - name: "{{ item }}" - groups: algo - home: '/var/jail/{{ item }}' - createhome: yes - generate_ssh_key: false - shell: /bin/false - state: present - append: yes - with_items: "{{ users }}" + - name: Check if the private keys exist + stat: + path: "{{ ssh_tunnels_config_path }}/{{ item }}.pem" + register: privatekey + with_items: "{{ users }}" - - block: - - name: Clean up the ssh-tunnel directory - file: - dest: "{{ ssh_tunnels_config_path }}" - state: absent - when: keys_clean_all|bool == True + - name: Build ssh private keys + openssl_privatekey: + path: "{{ ssh_tunnels_config_path }}/{{ item.item }}.pem" + passphrase: "{{ p12_export_password }}" + cipher: aes256 + force: false + no_log: true + when: not item.stat.exists + with_items: "{{ privatekey.results }}" + register: openssl_privatekey - - name: Ensure the config directories exist - file: - dest: "{{ ssh_tunnels_config_path }}" - state: directory - recurse: yes - mode: '0700' + - name: Build ssh public keys + openssl_publickey: + path: "{{ ssh_tunnels_config_path }}/{{ item.item.item }}.pub" + privatekey_path: "{{ ssh_tunnels_config_path }}/{{ item.item.item }}.pem" + privatekey_passphrase: "{{ p12_export_password }}" + format: OpenSSH + force: true + no_log: true + when: item.changed + with_items: "{{ openssl_privatekey.results }}" - - name: Check if the private keys exist - stat: - path: "{{ ssh_tunnels_config_path }}/{{ item }}.pem" - register: privatekey - with_items: "{{ users }}" + - name: Build the client ssh config + template: + src: ssh_config.j2 + dest: "{{ ssh_tunnels_config_path }}/{{ item }}.ssh_config" + mode: 0700 + with_items: "{{ users }}" + delegate_to: localhost + become: false - - name: Build ssh private keys - openssl_privatekey: - path: "{{ ssh_tunnels_config_path }}/{{ item.item }}.pem" - passphrase: "{{ p12_export_password }}" - cipher: aes256 - force: false - no_log: true - when: not item.stat.exists - with_items: "{{ privatekey.results }}" - register: openssl_privatekey + - name: The authorized keys file created + authorized_key: + user: "{{ item }}" + key: "{{ lookup('file', ssh_tunnels_config_path + '/' + item + '.pub') }}" + state: present + manage_dir: true + exclusive: true + with_items: "{{ users }}" - - name: Build ssh public keys - openssl_publickey: - path: "{{ ssh_tunnels_config_path }}/{{ item.item.item }}.pub" - privatekey_path: "{{ ssh_tunnels_config_path }}/{{ item.item.item }}.pem" - privatekey_passphrase: "{{ p12_export_password }}" - format: OpenSSH - force: true - no_log: true - when: item.changed - with_items: "{{ openssl_privatekey.results }}" + - name: Get active users + getent: + database: group + key: algo + split: ':' - - name: Build the client ssh config - template: - src: ssh_config.j2 - dest: "{{ ssh_tunnels_config_path }}/{{ item }}.ssh_config" - mode: 0700 - with_items: "{{ users }}" - delegate_to: localhost - become: false - - - name: The authorized keys file created - authorized_key: - user: "{{ item }}" - key: "{{ lookup('file', ssh_tunnels_config_path + '/' + item + '.pub') }}" - state: present - manage_dir: true - exclusive: true - with_items: "{{ users }}" - - - name: Get active users - getent: - database: group - key: algo - split: ':' - - - name: Delete non-existing users - user: - name: "{{ item }}" - state: absent - remove: yes - force: yes - when: item not in users - with_items: "{{ getent_group['algo'][2].split(',') }}" - tags: update-users - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - name: Delete non-existing users + user: + name: "{{ item }}" + state: absent + remove: yes + force: yes + when: item not in users + with_items: "{{ getent_group['algo'][2].split(',') }}" + tags: update-users diff --git a/roles/strongswan/defaults/main.yml b/roles/strongswan/defaults/main.yml index b896933..de25120 100644 --- a/roles/strongswan/defaults/main.yml +++ b/roles/strongswan/defaults/main.yml @@ -1,8 +1,6 @@ --- ipsec_config_path: "configs/{{ IP_subject_alt_name }}/ipsec/" ipsec_pki_path: "{{ ipsec_config_path }}/.pki/" -strongswan_network: 10.19.48.0/24 -strongswan_network_ipv6: 'fd9d:bc11:4020::/48' strongswan_shell: /usr/sbin/nologin strongswan_home: /var/lib/strongswan BetweenClients_DROP: true diff --git a/roles/strongswan/tasks/main.yml b/roles/strongswan/tasks/main.yml index 6b9699e..e59295d 100644 --- a/roles/strongswan/tasks/main.yml +++ b/roles/strongswan/tasks/main.yml @@ -1,37 +1,31 @@ --- -- block: - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu' +- include_tasks: ubuntu.yml + when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu' - - name: Ensure that the strongswan user exist - user: - name: strongswan - group: nogroup - shell: "{{ strongswan_shell }}" - home: "{{ strongswan_home }}" - state: present +- name: Ensure that the strongswan user exist + user: + name: strongswan + group: nogroup + shell: "{{ strongswan_shell }}" + home: "{{ strongswan_home }}" + state: present - - name: Install strongSwan - package: name=strongswan state=present +- name: Install strongSwan + package: name=strongswan state=present - - import_tasks: ipsec_configuration.yml - - import_tasks: openssl.yml - tags: update-users - - import_tasks: distribute_keys.yml - - import_tasks: client_configs.yml - delegate_to: localhost - become: no - tags: update-users +- import_tasks: ipsec_configuration.yml +- import_tasks: openssl.yml + tags: update-users +- import_tasks: distribute_keys.yml +- import_tasks: client_configs.yml + delegate_to: localhost + become: no + tags: update-users - - name: strongSwan started - service: - name: strongswan - state: started - enabled: true +- name: strongSwan started + service: + name: strongswan + state: started + enabled: true - - meta: flush_handlers - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always +- meta: flush_handlers diff --git a/roles/wireguard/defaults/main.yml b/roles/wireguard/defaults/main.yml index d008336..e61c778 100644 --- a/roles/wireguard/defaults/main.yml +++ b/roles/wireguard/defaults/main.yml @@ -3,20 +3,6 @@ wireguard_PersistentKeepalive: 0 wireguard_config_path: "configs/{{ IP_subject_alt_name }}/wireguard/" wireguard_pki_path: "{{ wireguard_config_path }}/.pki/" wireguard_interface: wg0 -_wireguard_network_ipv4: - subnet: 10.19.49.0 - prefix: 24 - gateway: 10.19.49.1 - clients_range: 10.19.49 - clients_start: 2 -_wireguard_network_ipv6: - subnet: 'fd9d:bc11:4021::' - prefix: 48 - gateway: 'fd9d:bc11:4021::1' - clients_range: 'fd9d:bc11:4021::' - clients_start: 2 -wireguard_network_ipv4: "{{ _wireguard_network_ipv4['subnet'] }}/{{ _wireguard_network_ipv4['prefix'] }}" -wireguard_network_ipv6: "{{ _wireguard_network_ipv6['subnet'] }}/{{ _wireguard_network_ipv6['prefix'] }}" keys_clean_all: false wireguard_dns_servers: >- {% if local_dns|default(false)|bool or dns_encryption|default(false)|bool == true %} diff --git a/roles/wireguard/tasks/main.yml b/roles/wireguard/tasks/main.yml index 235eaa4..4434d09 100644 --- a/roles/wireguard/tasks/main.yml +++ b/roles/wireguard/tasks/main.yml @@ -75,7 +75,6 @@ notify: restart wireguard tags: update-users - - name: WireGuard enabled and started service: name: "{{ service_name }}" diff --git a/server.yml b/server.yml index 1ab3834..4032683 100644 --- a/server.yml +++ b/server.yml @@ -2,84 +2,90 @@ - name: Configure the server and install required software hosts: vpn-host gather_facts: false - tags: algo become: true vars_files: - config.cfg - - roles: - - role: common - tags: common - - role: dns_encryption - when: dns_encryption - tags: dns_encryption - - role: dns_adblocking - when: algo_local_dns - tags: dns_adblocking - - role: wireguard - when: wireguard_enabled - tags: wireguard - - role: strongswan - when: ipsec_enabled - tags: ipsec - - role: ssh_tunneling - when: algo_ssh_tunneling - tags: ssh_tunneling - - post_tasks: + tasks: - block: - - name: Delete the CA key - local_action: - module: file - path: "{{ ipsec_pki_path }}/private/cakey.pem" - state: absent - become: false - when: - - ipsec_enabled - - not algo_store_cakey + - import_role: + name: common + tags: common - - name: Dump the configuration - local_action: - module: copy - dest: "configs/{{ IP_subject_alt_name }}/.config.yml" - content: | - server: {{ 'localhost' if inventory_hostname == 'localhost' else inventory_hostname }} - server_user: {{ ansible_ssh_user }} - {% if algo_provider != "local" %} - ansible_ssh_private_key_file: {{ ansible_ssh_private_key_file|default(SSH_keys.private) }} - {% endif %} - algo_provider: {{ algo_provider }} - algo_server_name: {{ algo_server_name }} - algo_ondemand_cellular: {{ algo_ondemand_cellular }} - algo_ondemand_wifi: {{ algo_ondemand_wifi }} - algo_ondemand_wifi_exclude: {{ algo_ondemand_wifi_exclude }} - algo_local_dns: {{ algo_local_dns }} - algo_ssh_tunneling: {{ algo_ssh_tunneling }} - algo_windows: {{ algo_windows }} - algo_store_cakey: {{ algo_store_cakey }} - IP_subject_alt_name: {{ IP_subject_alt_name }} - ipsec_enabled: {{ ipsec_enabled }} - wireguard_enabled: {{ wireguard_enabled }} - {% if tests|default(false)|bool %}ca_password: {{ CA_password }}{% endif %} - become: false + - import_role: + name: dns_encryption + when: dns_encryption + tags: dns_encryption - - name: Create a symlink if deploying to localhost - file: - src: "{{ IP_subject_alt_name }}" - dest: configs/localhost - state: link - force: true - when: inventory_hostname == 'localhost' + - import_role: + name: dns_adblocking + when: algo_local_dns + tags: dns_adblocking - - debug: - msg: - - "{{ congrats.common.split('\n') }}" - - " {{ congrats.p12_pass if algo_ssh_tunneling or ipsec_enabled else '' }}" - - " {{ congrats.ca_key_pass if algo_store_cakey and ipsec_enabled else '' }}" - - " {{ congrats.ssh_access if algo_provider != 'local' else ''}}" - tags: always + - import_role: + name: wireguard + when: wireguard_enabled + tags: wireguard + + - import_role: + name: strongswan + when: ipsec_enabled + tags: ipsec + + - import_role: + name: ssh_tunneling + when: algo_ssh_tunneling + tags: ssh_tunneling + + - block: + - name: Delete the CA key + local_action: + module: file + path: "{{ ipsec_pki_path }}/private/cakey.pem" + state: absent + become: false + when: + - ipsec_enabled + - not algo_store_cakey + + - name: Dump the configuration + local_action: + module: copy + dest: "configs/{{ IP_subject_alt_name }}/.config.yml" + content: | + server: {{ 'localhost' if inventory_hostname == 'localhost' else inventory_hostname }} + server_user: {{ ansible_ssh_user }} + {% if algo_provider != "local" %} + ansible_ssh_private_key_file: {{ ansible_ssh_private_key_file|default(SSH_keys.private) }} + {% endif %} + algo_provider: {{ algo_provider }} + algo_server_name: {{ algo_server_name }} + algo_ondemand_cellular: {{ algo_ondemand_cellular }} + algo_ondemand_wifi: {{ algo_ondemand_wifi }} + algo_ondemand_wifi_exclude: {{ algo_ondemand_wifi_exclude }} + algo_local_dns: {{ algo_local_dns }} + algo_ssh_tunneling: {{ algo_ssh_tunneling }} + algo_windows: {{ algo_windows }} + algo_store_cakey: {{ algo_store_cakey }} + IP_subject_alt_name: {{ IP_subject_alt_name }} + ipsec_enabled: {{ ipsec_enabled }} + wireguard_enabled: {{ wireguard_enabled }} + {% if tests|default(false)|bool %}ca_password: {{ CA_password }}{% endif %} + become: false + + - name: Create a symlink if deploying to localhost + file: + src: "{{ IP_subject_alt_name }}" + dest: configs/localhost + state: link + force: true + when: inventory_hostname == 'localhost' + + - debug: + msg: + - "{{ congrats.common.split('\n') }}" + - " {{ congrats.p12_pass if algo_ssh_tunneling or ipsec_enabled else '' }}" + - " {{ congrats.ca_key_pass if algo_store_cakey and ipsec_enabled else '' }}" + - " {{ congrats.ssh_access if algo_provider != 'local' else ''}}" + tags: always rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - include_tasks: playbooks/rescue.yml diff --git a/users.yml b/users.yml index 3f74294..4347325 100644 --- a/users.yml +++ b/users.yml @@ -47,10 +47,7 @@ ansible_python_interpreter: "/usr/bin/python3" CA_password: "{{ CA_password }}" rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - include_tasks: playbooks/rescue.yml - name: User management hosts: vpn-host @@ -60,37 +57,32 @@ - config.cfg - "configs/{{ inventory_hostname }}/.config.yml" - pre_tasks: + tasks: - block: - name: Local pre-tasks import_tasks: playbooks/cloud-pre.yml become: false - rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always - roles: - - role: common - - role: wireguard - tags: [ 'vpn', 'wireguard' ] - when: wireguard_enabled - - role: strongswan - when: ipsec_enabled - tags: ipsec - - role: ssh_tunneling - when: algo_ssh_tunneling + - import_role: + name: common - post_tasks: - - block: - - debug: - msg: - - "{{ congrats.common.split('\n') }}" - - " {% if p12.changed %}{{ congrats.p12_pass }}{% endif %}" - tags: always + - import_role: + name: wireguard + when: wireguard_enabled + + - import_role: + name: strongswan + when: ipsec_enabled + tags: ipsec + + - import_role: + name: ssh_tunneling + when: algo_ssh_tunneling + + - debug: + msg: + - "{{ congrats.common.split('\n') }}" + - " {% if p12.changed %}{{ congrats.p12_pass }}{% endif %}" + tags: always rescue: - - debug: var=fail_hint - tags: always - - fail: - tags: always + - include_tasks: playbooks/rescue.yml