Skip to content

Commit

Permalink
Nomad install migrated to zip+systemd
Browse files Browse the repository at this point in the history
  • Loading branch information
aurelienmaury committed Jul 26, 2024
1 parent 65fae60 commit f17562f
Show file tree
Hide file tree
Showing 29 changed files with 388 additions and 283 deletions.
2 changes: 2 additions & 0 deletions TODO.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,11 @@ ROADMAP SEPTEMBRE:
* tests offline complet - aout
* statuer sur version stable entre septembre 2024 et janvier 2025

* playbook : liens vers le github pour dive into code

ROADMAP 1.0 (wood-dragon):

* virer skopeo
* playbooks de back/restore à chaque étage.
* playbooks de montée de version avec maintien data.
* couverture de tests incluant multi mono et offline.
Expand Down
16 changes: 16 additions & 0 deletions playbooks/14_core_letsencrypt.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,20 @@
---
- name: "Load collection vars"
hosts: "hashistack_sre[0]"
become: true
gather_facts: true

tasks:
- name: "Load common_vars role variables"
import_role:
name: "common_vars"
tags:
- always

- debug:
msg: "{{ acme_domains }}"


- name: "Import rtnp.galaxie_clans playbook for letsencrypt management"
import_playbook: rtnp.galaxie_clans.acme_rotate_certificates
vars:
Expand Down
27 changes: 12 additions & 15 deletions playbooks/init.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
- hs_workspace | length > 1
- hs_parent_domain is defined
- hs_parent_domain | length > 1
- hs_archi in ['multi', 'mono']

- name: Variable cooking
set_fact:
Expand All @@ -39,31 +38,28 @@
- "{{ _output_dir }}/group_vars/hashistack"
- "{{ _output_dir }}/group_vars/hashistack/secrets"
- "{{ _output_dir }}/group_vars/hashistack_sre"
- "{{ _output_dir }}/host_vars"
loop_control:
loop_var: _current_dir

- name: "[MONO] Create directory for inventory"
file:
path: "{{ _output_dir }}/host_vars/{{ hs_workspace }}-{{ hs_archi }}"
state: directory
recurse: true
when:
- hs_archi == 'mono'

- name: Create global variables
copy:
dest: "{{ _output_dir }}/group_vars/all.yml"
mode: 0640
content: |-
hs_archi: "{{ hs_archi }}"
hs_workspace: "{{ hs_workspace }}"
hs_parent_domain: "{{ hs_parent_domain }}"
{%- raw %}
hs_stage0_archi: "{{ hs_archi }}"
hs_stage0_instance_type: "PLAY2-MICRO"
hs_stage0_instance_image: "debian_bookworm"
# hs_stage0_instance_image: "rockylinux_9"
hs_public_domain: >-
{{ hs_workspace | regex_replace('_', '-') }}.{{ hs_parent_domain }}
public_domain: "{{ hs_public_domain }}"
# Atomic configuration variables for scw_one flavor.
hs_infra_scw_one_instance_type_master: "DEV1-S"
hs_infra_scw_one_instance_count_master: 3
hs_infra_scw_one_instance_type_minion: "DEV1-S"
hs_infra_scw_one_instance_count_minion: 0
hs_infra_scw_one_instance_image_all: "debian_bookworm"
# hs_infra_scw_one_instance_image_all: "rockylinux_9"
{% endraw -%}
- name: Create inventory files
Expand Down Expand Up @@ -115,6 +111,7 @@
content: |-
[defaults]
stdout_callback = community.general.unixy
# stdout_callback = ansible.posix.debug
display_skipped_hosts = false
display_ok_hosts = false
# callbacks_enabled = ansible.posix.profile_tasks,ansible.posix.profile_tasks
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ cryptography==42.0.4
distro==1.8.0
docker==6.1.3
docutils==0.19
dnspython
enrich==1.2.7
filelock==3.9.0
furo==2022.12.7
Expand Down
2 changes: 2 additions & 0 deletions roles/common_vars/defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ hs_workspace_ssh_private_key_file_relative_path: "{{ hs_workspace_secrets_dir_re
hs_workspace_ssh_public_key_file: "{{ hs_workspace_ssh_private_key_file }}.pub"
hs_workspace_ssh_public_key_file_relative_path: "{{ hs_workspace_ssh_private_key_file_relative_path }}.pub"

hs_local_cache_dir: "{{ hs_workspace_root }}"

hs_workspace_group: "hashistack"
hs_workspace_cluster_group: "hashistack_cluster"
hs_workspace_masters_group: "hashistack_masters"
Expand Down
27 changes: 9 additions & 18 deletions roles/consul/vars/main.yml
Original file line number Diff line number Diff line change
@@ -1,31 +1,22 @@
---

__hs_consul_precedence_marks:
- "{{ ansible_distribution | lower }}_{{ ansible_distribution_major_version }}_{{ ansible_architecture | lower }}"
- "{{ ansible_distribution | lower }}_{{ ansible_architecture | lower }}"
- "{{ ansible_distribution | lower }}_{{ ansible_distribution_major_version }}"
- "{{ ansible_distribution | lower }}"
- "{{ ansible_os_family | lower }}"

__hs_role_configure_precedence:
- "{{ __hs_consul_precedence_marks[0] }}/_configure.yml"
- "{{ __hs_consul_precedence_marks[1] }}/_configure.yml"
- "{{ __hs_consul_precedence_marks[2] }}/_configure.yml"
- "{{ __hs_consul_precedence_marks[3] }}/_configure.yml"
- "{{ __hs_consul_precedence_marks[4] }}/_configure.yml"

__hs_consul_is_master: >-
{{
inventory_hostname in groups[hs_consul_inventory_masters_group]
hs_consul_inventory_masters_group is defined
and groups[hs_consul_inventory_masters_group] is defined
and inventory_hostname in groups[hs_consul_inventory_masters_group]
}}
__hs_consul_is_minion: >-
{{
inventory_hostname in groups[hs_consul_inventory_minions_group]
hs_consul_inventory_minions_group is defined
and groups[hs_consul_inventory_minions_group] is defined
and inventory_hostname in groups[hs_consul_inventory_minions_group]
}}
__hs_consul_is_multi_nodes: >-
{{
groups[hs_consul_inventory_masters_group] | length > 1
hs_consul_inventory_masters_group is defined
and groups[hs_consul_inventory_masters_group] is defined
and (groups[hs_consul_inventory_masters_group] | length > 1)
}}
__hs_consul_tls_dir: "/etc/ssl/private"
Expand Down
8 changes: 7 additions & 1 deletion roles/infra/defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,20 @@ hs_infra_local_expected_dirs:
- "{{ hs_infra_local_secrets_dir }}"
- "{{ hs_infra_local_hs_group_vars_dir }}"

# Atomic configuration variables for outscale_one flavor.
# Atomic configuration variables for scw_one flavor.
hs_infra_scw_one_instance_type_master: "DEV1-S"
hs_infra_scw_one_instance_count_master: 3
hs_infra_scw_one_instance_type_minion: "DEV1-S"
hs_infra_scw_one_instance_count_minion: 0
hs_infra_scw_one_instance_image_all: "debian_bookworm"

hs_infra_flavor_params:
scw_one:
parent_domain: "{{ hs_parent_domain }}"
ssh_public_key_file: "{{ hs_infra_private_key_file }}.pub"
instance_type_master: "{{ hs_infra_scw_one_instance_type_master }}"
instance_count_master: "{{ hs_infra_scw_one_instance_count_master }}"
instance_count_minion: "{{ hs_infra_scw_one_instance_count_minion }}"
instance_type_minion: "{{ hs_infra_scw_one_instance_type_minion }}"
instance_image_all: "{{ hs_infra_scw_one_instance_image_all }}"

55 changes: 17 additions & 38 deletions roles/infra/files/scw_one/hosts.tf
Original file line number Diff line number Diff line change
Expand Up @@ -3,53 +3,32 @@ resource "scaleway_account_ssh_key" "default" {
public_key = trimspace(file(local.ssh_public_key_file))
}

resource "scaleway_instance_server" "sre" {
name = "${local.name_prefix}-sre"
type = local.instance_type_master
image = local.instance_image_all
routed_ip_enabled = true
}
resource "scaleway_instance_private_nic" "sre" {
server_id = scaleway_instance_server.sre.id
private_network_id = scaleway_vpc_private_network.internal.id
}

# --------
module "sre" {
source = "./node"

resource "scaleway_instance_server" "master_1" {
name = "${local.name_prefix}-master-1"
type = local.instance_type_master
image = local.instance_image_all
routed_ip_enabled = true
}
resource "scaleway_instance_private_nic" "master_1" {
server_id = scaleway_instance_server.master_1.id
node_name = "${local.name_prefix}-sre"
node_type = local.instance_type_master
node_image = local.instance_image_all
private_network_id = scaleway_vpc_private_network.internal.id
}

# --------
module "masters" {
source = "./node"
count = var.instance_count_master

resource "scaleway_instance_server" "master_2" {
name = "${local.name_prefix}-master-2"
type = local.instance_type_master
image = local.instance_image_all
routed_ip_enabled = true
}
resource "scaleway_instance_private_nic" "master_2" {
server_id = scaleway_instance_server.master_2.id
node_name = "${local.name_prefix}-master-${count.index + 1}"
node_type = local.instance_type_master
node_image = local.instance_image_all
private_network_id = scaleway_vpc_private_network.internal.id
}

# --------
module "minions" {
source = "./node"
count = var.instance_count_minion

resource "scaleway_instance_server" "master_3" {
name = "${local.name_prefix}-master-3"
type = local.instance_type_master
image = local.instance_image_all
routed_ip_enabled = true
}
resource "scaleway_instance_private_nic" "master_3" {
server_id = scaleway_instance_server.master_3.id
node_name = "${local.name_prefix}-minion-${count.index + 1}"
node_type = local.instance_type_minion
node_image = local.instance_image_all
private_network_id = scaleway_vpc_private_network.internal.id
}

5 changes: 1 addition & 4 deletions roles/infra/files/scw_one/main.tf
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
locals {
name_prefix = terraform.workspace
instance_type_master = var.instance_type_master
instance_type_minion = var.instance_type_minion
instance_image_all = var.instance_image_all
ssh_public_key_name = "${local.name_prefix}-service"
ssh_public_key_file = var.ssh_public_key_file
instance_default_ssh_user = var.instance_default_ssh_user
internal_cidr = "10.42.42.0/24"

edge_ip = "10.42.42.2"
sre_ip = "10.42.42.2"
master_1_ip = "10.42.42.3"
master_2_ip = "10.42.42.4"
master_3_ip = "10.42.42.5"
public_gw_type = "VPC-GW-S"
private_network_name = "${local.name_prefix}.hs"
parent_domain = var.parent_domain
Expand Down
57 changes: 57 additions & 0 deletions roles/infra/files/scw_one/node/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
terraform {
required_providers {
scaleway = {
source = "scaleway/scaleway"
version = "2.41.3"
}
}
required_version = ">= 1.7"
}

# ----

variable "node_name" {
type = string
}

variable "node_type" {
type = string
}

variable "node_image" {
type = string
}

variable "private_network_id" {
type = string
}

# ----

resource "scaleway_instance_server" "node" {
name = var.node_name
type = var.node_type
image = var.node_image
routed_ip_enabled = true
}

resource "scaleway_instance_private_nic" "node" {
server_id = scaleway_instance_server.node.id
private_network_id = var.private_network_id
}

# ----

data "scaleway_ipam_ip" "node" {
private_network_id = var.private_network_id
type = "ipv4"
resource {
type = "instance_private_nic"
id = scaleway_instance_private_nic.node.id
}
}

output "node_ipv4" {
value = data.scaleway_ipam_ip.node.address
}

41 changes: 4 additions & 37 deletions roles/infra/files/scw_one/output.tf
Original file line number Diff line number Diff line change
@@ -1,27 +1,3 @@
data "scaleway_ipam_ip" "master_1" {
private_network_id = scaleway_vpc_private_network.internal.id
type = "ipv4"
resource {
type = "instance_private_nic"
id = scaleway_instance_private_nic.master_1.id
}
}
data "scaleway_ipam_ip" "master_2" {
private_network_id = scaleway_vpc_private_network.internal.id
type = "ipv4"
resource {
type = "instance_private_nic"
id = scaleway_instance_private_nic.master_2.id
}
}
data "scaleway_ipam_ip" "master_3" {
private_network_id = scaleway_vpc_private_network.internal.id
type = "ipv4"
resource {
type = "instance_private_nic"
id = scaleway_instance_private_nic.master_3.id
}
}

output "default_ssh_user" {
value = local.instance_default_ssh_user
Expand All @@ -44,24 +20,15 @@ output "private_network_cidr" {
}

output "masters_ipv4" {
value = [
data.scaleway_ipam_ip.master_1.address,
data.scaleway_ipam_ip.master_2.address,
data.scaleway_ipam_ip.master_3.address,
]
value = module.masters.*.node_ipv4
}

data "scaleway_ipam_ip" "sre" {
private_network_id = scaleway_vpc_private_network.internal.id
type = "ipv4"
resource {
type = "instance_private_nic"
id = scaleway_instance_private_nic.sre.id
}
output "minions_ipv4" {
value = module.minions.*.node_ipv4
}

output "sre_ipv4" {
value = data.scaleway_ipam_ip.sre.address
value = module.sre.node_ipv4
}

data "scaleway_ipam_ip" "edge" {
Expand Down
Loading

0 comments on commit f17562f

Please sign in to comment.