Compare commits
8 Commits
ADDBlackbo
...
monitoring
| Author | SHA1 | Date | |
|---|---|---|---|
| 44519e842b | |||
| 27e692c1ed | |||
| 338e0b0f19 | |||
| 83178d9a0d | |||
| 0d85bd53aa | |||
| 3392c84c65 | |||
| 0dca30868b | |||
| d22bbd3dba |
35
playbooks/add-postgres-to-prometheus.yml
Normal file
35
playbooks/add-postgres-to-prometheus.yml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
- name: Add PostgreSQL exporter to Prometheus
|
||||||
|
hosts: 192.168.0.105
|
||||||
|
become: yes
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: Add postgres_exporter scrape config
|
||||||
|
blockinfile:
|
||||||
|
path: /etc/prometheus/prometheus.yml
|
||||||
|
insertafter: ' # Nginx metrics via nginx-prometheus-exporter'
|
||||||
|
block: |2
|
||||||
|
# PostgreSQL metrics via postgres_exporter
|
||||||
|
- job_name: 'postgres-app2'
|
||||||
|
scrape_interval: 15s
|
||||||
|
scrape_timeout: 10s
|
||||||
|
static_configs:
|
||||||
|
- targets: ['192.168.0.111:9187']
|
||||||
|
labels:
|
||||||
|
instance: 'app2'
|
||||||
|
service: 'postgresql'
|
||||||
|
job: 'postgres'
|
||||||
|
metric_relabel_configs:
|
||||||
|
- source_labels: [__address__]
|
||||||
|
target_label: instance
|
||||||
|
- source_labels: [__address__]
|
||||||
|
regex: '([^:]+):\\d+'
|
||||||
|
replacement: '${1}'
|
||||||
|
target_label: host
|
||||||
|
marker: "# {mark} ANSIBLE MANAGED BLOCK - postgres_exporter"
|
||||||
|
backup: yes
|
||||||
|
|
||||||
|
- name: Reload Prometheus
|
||||||
|
systemd:
|
||||||
|
name: prometheus
|
||||||
|
state: reloaded
|
||||||
6
playbooks/deploy-alertmanager.yml
Normal file
6
playbooks/deploy-alertmanager.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
- name: Deploy Alertmanager
|
||||||
|
hosts: 192.168.0.112 # app3
|
||||||
|
become: true
|
||||||
|
roles:
|
||||||
|
- alertmanager
|
||||||
6
playbooks/deploy-cadvisor.yml
Normal file
6
playbooks/deploy-cadvisor.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
- name: Deploy cAdvisor on App3
|
||||||
|
hosts: 192.168.0.112 # Указываем конкретный хост
|
||||||
|
become: true
|
||||||
|
roles:
|
||||||
|
- cadvisor
|
||||||
6
playbooks/deploy-loki.yml
Normal file
6
playbooks/deploy-loki.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
- name: Deploy Loki
|
||||||
|
hosts: 192.168.0.112 # app3
|
||||||
|
become: true
|
||||||
|
roles:
|
||||||
|
- loki
|
||||||
6
playbooks/deploy-node-red.yml
Normal file
6
playbooks/deploy-node-red.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
- name: Deploy Node-RED
|
||||||
|
hosts: 192.168.0.112 # app3
|
||||||
|
become: true
|
||||||
|
roles:
|
||||||
|
- node-red
|
||||||
12
playbooks/deploy-postgres-app2.yml
Normal file
12
playbooks/deploy-postgres-app2.yml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
- name: Deploy PostgreSQL and Postgres Exporter on App2
|
||||||
|
hosts: 192.168.0.111
|
||||||
|
become: yes
|
||||||
|
gather_facts: yes
|
||||||
|
|
||||||
|
roles:
|
||||||
|
- role: postgresql
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- role: postgres_exporter
|
||||||
|
tags: postgres_exporter
|
||||||
6
playbooks/deploy-promtail.yml
Normal file
6
playbooks/deploy-promtail.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
- name: Deploy Promtail on all nodes
|
||||||
|
hosts: all # Установим Promtail на все хосты для сбора логов
|
||||||
|
become: true
|
||||||
|
roles:
|
||||||
|
- promtail
|
||||||
12
roles/alertmanager/defaults/main.yml
Normal file
12
roles/alertmanager/defaults/main.yml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
# Alertmanager settings
|
||||||
|
alertmanager_port: 9093
|
||||||
|
alertmanager_config_path: /etc/alertmanager
|
||||||
|
|
||||||
|
# Email notifications (заполнить позже)
|
||||||
|
smtp_host: localhost
|
||||||
|
smtp_from: alertmanager@example.com
|
||||||
|
smtp_to: admin@example.com
|
||||||
|
|
||||||
|
# Webhook для тестирования
|
||||||
|
webhook_url: "http://localhost:9099"
|
||||||
33
roles/alertmanager/tasks/main.yml
Normal file
33
roles/alertmanager/tasks/main.yml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
- name: Create Alertmanager directories
|
||||||
|
file:
|
||||||
|
path: "{{ item }}"
|
||||||
|
state: directory
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0755'
|
||||||
|
loop:
|
||||||
|
- "{{ alertmanager_config_path }}"
|
||||||
|
- /var/lib/alertmanager
|
||||||
|
|
||||||
|
- name: Deploy Alertmanager configuration
|
||||||
|
template:
|
||||||
|
src: alertmanager.yml.j2
|
||||||
|
dest: "{{ alertmanager_config_path }}/alertmanager.yml"
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
|
||||||
|
- name: Run Alertmanager container
|
||||||
|
docker_container:
|
||||||
|
name: alertmanager
|
||||||
|
image: prom/alertmanager:latest
|
||||||
|
state: started
|
||||||
|
restart_policy: always
|
||||||
|
ports:
|
||||||
|
- "{{ alertmanager_port }}:9093"
|
||||||
|
volumes:
|
||||||
|
- "{{ alertmanager_config_path }}/alertmanager.yml:/etc/alertmanager/alertmanager.yml"
|
||||||
|
- /var/lib/alertmanager:/alertmanager
|
||||||
|
command: --config.file=/etc/alertmanager/alertmanager.yml --storage.path=/alertmanager
|
||||||
|
tags: alertmanager
|
||||||
52
roles/alertmanager/templates/alertmanager.yml.j2
Normal file
52
roles/alertmanager/templates/alertmanager.yml.j2
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
global:
|
||||||
|
# Настройки для уведомлений (можно настроить позже)
|
||||||
|
# smtp_smarthost: 'smtp.gmail.com:587'
|
||||||
|
# smtp_from: 'alertmanager@example.com'
|
||||||
|
# smtp_auth_username: 'user@gmail.com'
|
||||||
|
# smtp_auth_password: 'password'
|
||||||
|
# smtp_require_tls: true
|
||||||
|
|
||||||
|
route:
|
||||||
|
# Основной маршрут - все алерты идут в Node-RED
|
||||||
|
receiver: 'node-red-webhook'
|
||||||
|
group_by: ['alertname', 'severity']
|
||||||
|
group_wait: 10s
|
||||||
|
group_interval: 10s
|
||||||
|
repeat_interval: 1h
|
||||||
|
|
||||||
|
# Вложенные маршруты
|
||||||
|
routes:
|
||||||
|
- match:
|
||||||
|
severity: critical
|
||||||
|
receiver: 'node-red-critical'
|
||||||
|
group_wait: 5s
|
||||||
|
repeat_interval: 10m
|
||||||
|
|
||||||
|
- match:
|
||||||
|
severity: warning
|
||||||
|
receiver: 'node-red-warning'
|
||||||
|
group_wait: 30s
|
||||||
|
repeat_interval: 2h
|
||||||
|
|
||||||
|
receivers:
|
||||||
|
- name: 'node-red-webhook'
|
||||||
|
webhook_configs:
|
||||||
|
- url: 'http://node-red:1880/webhook/alertmanager'
|
||||||
|
send_resolved: true
|
||||||
|
|
||||||
|
- name: 'node-red-critical'
|
||||||
|
webhook_configs:
|
||||||
|
- url: 'http://node-red:1880/webhook/critical'
|
||||||
|
send_resolved: true
|
||||||
|
|
||||||
|
- name: 'node-red-warning'
|
||||||
|
webhook_configs:
|
||||||
|
- url: 'http://node-red:1880/webhook/warning'
|
||||||
|
send_resolved: true
|
||||||
|
|
||||||
|
inhibit_rules:
|
||||||
|
- source_match:
|
||||||
|
severity: 'critical'
|
||||||
|
target_match:
|
||||||
|
severity: 'warning'
|
||||||
|
equal: ['alertname', 'instance']
|
||||||
@ -1,6 +1,9 @@
|
|||||||
---
|
---
|
||||||
# cAdvisor configuration
|
# Default port for cAdvisor
|
||||||
cadvisor_version: "latest"
|
cadvisor_port: 8080
|
||||||
cadvisor_port: 8081
|
|
||||||
cadvisor_image: "gcr.io/cadvisor/cadvisor:{{ cadvisor_version }}"
|
# Network configuration
|
||||||
cadvisor_container_name: "cadvisor"
|
cadvisor_network_mode: "host" # Альтернатива: использовать host network для избежания конфликтов портов
|
||||||
|
|
||||||
|
# Alternative: use different port if default is busy
|
||||||
|
cadvisor_fallback_ports: [8081, 8082, 8083, 8084]
|
||||||
|
|||||||
@ -1,51 +1,43 @@
|
|||||||
---
|
---
|
||||||
- name: Ensure cAdvisor container is running
|
- name: Check for available port for cAdvisor
|
||||||
community.docker.docker_container:
|
shell: |
|
||||||
name: "{{ cadvisor_container_name }}"
|
for port in 8080 8081 8082 8083 8084 8085; do
|
||||||
image: "{{ cadvisor_image }}"
|
if ! ss -tulpn | grep -q ":${port} "; then
|
||||||
|
echo "${port}"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
args:
|
||||||
|
executable: /bin/bash
|
||||||
|
register: available_port
|
||||||
|
changed_when: false
|
||||||
|
tags: cadvisor
|
||||||
|
|
||||||
|
- name: Ensure Docker container for cAdvisor is running
|
||||||
|
docker_container:
|
||||||
|
name: cadvisor
|
||||||
|
image: gcr.io/cadvisor/cadvisor:latest
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: always
|
||||||
ports:
|
ports:
|
||||||
- "{{ cadvisor_port }}:8080"
|
- "{{ available_port.stdout | default('8084') }}:8080"
|
||||||
volumes:
|
volumes:
|
||||||
- "/:/rootfs:ro"
|
- "/:/rootfs:ro"
|
||||||
- "/var/run:/var/run:ro"
|
- "/var/run:/var/run:ro"
|
||||||
- "/sys:/sys:ro"
|
- "/sys:/sys:ro"
|
||||||
- "/var/lib/docker/:/var/lib/docker:ro"
|
- "/var/lib/docker/:/var/lib/docker:ro"
|
||||||
- "/dev/disk/:/dev/disk:ro"
|
- "/dev/disk/:/dev/disk:ro"
|
||||||
|
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||||
privileged: true
|
privileged: true
|
||||||
devices:
|
devices:
|
||||||
- "/dev/kmsg:/dev/kmsg"
|
- "/dev/kmsg:/dev/kmsg"
|
||||||
|
cgroup_parent: "docker.slice"
|
||||||
tags: cadvisor
|
tags: cadvisor
|
||||||
|
|
||||||
- name: Configure UFW for cAdvisor
|
- name: Display cAdvisor access info
|
||||||
ufw:
|
|
||||||
rule: allow
|
|
||||||
port: "{{ cadvisor_port }}"
|
|
||||||
proto: tcp
|
|
||||||
comment: "cAdvisor metrics"
|
|
||||||
tags: cadvisor
|
|
||||||
|
|
||||||
- name: Wait for cAdvisor to be ready
|
|
||||||
wait_for:
|
|
||||||
port: "{{ cadvisor_port }}"
|
|
||||||
host: "{{ ansible_host }}"
|
|
||||||
delay: 2
|
|
||||||
timeout: 60
|
|
||||||
tags: cadvisor
|
|
||||||
|
|
||||||
- name: Verify cAdvisor is accessible
|
|
||||||
uri:
|
|
||||||
url: "http://{{ ansible_host }}:{{ cadvisor_port }}/metrics"
|
|
||||||
return_content: true
|
|
||||||
status_code: 200
|
|
||||||
register: cadvisor_check
|
|
||||||
until: cadvisor_check.status == 200
|
|
||||||
retries: 5
|
|
||||||
delay: 3
|
|
||||||
tags: cadvisor
|
|
||||||
|
|
||||||
- name: Show cAdvisor status
|
|
||||||
debug:
|
debug:
|
||||||
msg: "cAdvisor successfully deployed at http://{{ ansible_host }}:{{ cadvisor_port }}/metrics"
|
msg: |
|
||||||
|
cAdvisor is available at:
|
||||||
|
- Web UI: http://{{ inventory_hostname }}:{{ available_port.stdout | default('8084') }}
|
||||||
|
- Metrics: http://{{ inventory_hostname }}:{{ available_port.stdout | default('8084') }}/metrics
|
||||||
tags: cadvisor
|
tags: cadvisor
|
||||||
|
|||||||
9
roles/loki/defaults/main.yml
Normal file
9
roles/loki/defaults/main.yml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
# Default port for Loki
|
||||||
|
loki_port: 3100
|
||||||
|
|
||||||
|
# Storage configuration
|
||||||
|
loki_storage_path: /var/lib/loki
|
||||||
|
|
||||||
|
# Retention period
|
||||||
|
loki_retention_period: 720h # 30 дней
|
||||||
33
roles/loki/tasks/main.yml
Normal file
33
roles/loki/tasks/main.yml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
- name: Create Loki directories
|
||||||
|
file:
|
||||||
|
path: "{{ item }}"
|
||||||
|
state: directory
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0755'
|
||||||
|
loop:
|
||||||
|
- /etc/loki
|
||||||
|
- /var/lib/loki
|
||||||
|
|
||||||
|
- name: Deploy Loki configuration
|
||||||
|
template:
|
||||||
|
src: loki-config.yml.j2
|
||||||
|
dest: /etc/loki/loki-config.yml
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
|
||||||
|
- name: Run Loki container
|
||||||
|
docker_container:
|
||||||
|
name: loki
|
||||||
|
image: grafana/loki:latest
|
||||||
|
state: started
|
||||||
|
restart_policy: always
|
||||||
|
ports:
|
||||||
|
- "3100:3100"
|
||||||
|
volumes:
|
||||||
|
- /etc/loki/loki-config.yml:/etc/loki/loki-config.yml
|
||||||
|
- /var/lib/loki:/loki
|
||||||
|
command: -config.file=/etc/loki/loki-config.yml
|
||||||
|
tags: loki
|
||||||
33
roles/loki/templates/loki-config.yml.j2
Normal file
33
roles/loki/templates/loki-config.yml.j2
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
auth_enabled: false
|
||||||
|
|
||||||
|
server:
|
||||||
|
http_listen_port: 3100
|
||||||
|
grpc_listen_port: 9096
|
||||||
|
|
||||||
|
common:
|
||||||
|
path_prefix: /tmp/loki # Изменяем путь на /tmp для теста
|
||||||
|
storage:
|
||||||
|
filesystem:
|
||||||
|
chunks_directory: /tmp/loki/chunks
|
||||||
|
rules_directory: /tmp/loki/rules
|
||||||
|
replication_factor: 1
|
||||||
|
ring:
|
||||||
|
instance_addr: 127.0.0.1
|
||||||
|
kvstore:
|
||||||
|
store: inmemory
|
||||||
|
|
||||||
|
limits_config:
|
||||||
|
allow_structured_metadata: false
|
||||||
|
|
||||||
|
schema_config:
|
||||||
|
configs:
|
||||||
|
- from: 2020-10-24
|
||||||
|
store: boltdb-shipper
|
||||||
|
object_store: filesystem
|
||||||
|
schema: v11
|
||||||
|
index:
|
||||||
|
prefix: index_
|
||||||
|
period: 24h
|
||||||
|
|
||||||
|
ruler:
|
||||||
|
alertmanager_url: http://alertmanager:9093
|
||||||
9
roles/node-red/defaults/main.yml
Normal file
9
roles/node-red/defaults/main.yml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
# Node-RED settings
|
||||||
|
node_red_port: 1880
|
||||||
|
node_red_data_dir: /var/lib/node-red
|
||||||
|
node_red_image: nodered/node-red:latest
|
||||||
|
|
||||||
|
# Persistence settings
|
||||||
|
node_red_persist_flows: true
|
||||||
|
node_red_enable_projects: false
|
||||||
32
roles/node-red/tasks/main.yml
Normal file
32
roles/node-red/tasks/main.yml
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
---
|
||||||
|
- name: Create Node-RED data directory with correct permissions
|
||||||
|
file:
|
||||||
|
path: "{{ node_red_data_dir }}"
|
||||||
|
state: directory
|
||||||
|
owner: 1000 # Node-RED контейнер запускается от пользователя 1000
|
||||||
|
group: 1000
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Run Node-RED container
|
||||||
|
docker_container:
|
||||||
|
name: node-red
|
||||||
|
image: "{{ node_red_image }}"
|
||||||
|
state: started
|
||||||
|
restart_policy: always
|
||||||
|
ports:
|
||||||
|
- "{{ node_red_port }}:1880"
|
||||||
|
volumes:
|
||||||
|
- "{{ node_red_data_dir }}:/data"
|
||||||
|
user: "1000:1000" # Запускаем от правильного пользователя
|
||||||
|
env:
|
||||||
|
NODE_RED_ENABLE_PROJECTS: "{{ 'true' if node_red_enable_projects else 'false' }}"
|
||||||
|
TZ: "UTC"
|
||||||
|
tags: node-red
|
||||||
|
|
||||||
|
- name: Display Node-RED access info
|
||||||
|
debug:
|
||||||
|
msg: |
|
||||||
|
Node-RED is available at:
|
||||||
|
- Web UI: http://{{ inventory_hostname }}:{{ node_red_port }}
|
||||||
|
- API: http://{{ inventory_hostname }}:{{ node_red_port }}/red/api
|
||||||
|
tags: node-red
|
||||||
12
roles/postgres_exporter/defaults/main.yml
Normal file
12
roles/postgres_exporter/defaults/main.yml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
# Postgres Exporter
|
||||||
|
postgres_exporter_version: "0.15.0"
|
||||||
|
postgres_exporter_port: 9187
|
||||||
|
postgres_exporter_user: "postgres_exporter"
|
||||||
|
postgres_exporter_password: "exporterpassword123"
|
||||||
|
|
||||||
|
# Connection settings
|
||||||
|
postgres_exporter_data_source_name: "user={{ postgres_exporter_user }} password={{ postgres_exporter_password }} host=localhost port=5432 dbname=postgres sslmode=disable"
|
||||||
|
|
||||||
|
# Systemd service
|
||||||
|
postgres_exporter_service_name: "postgres_exporter"
|
||||||
94
roles/postgres_exporter/tasks/main.yml
Normal file
94
roles/postgres_exporter/tasks/main.yml
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
---
|
||||||
|
- name: Install required packages
|
||||||
|
apt:
|
||||||
|
name:
|
||||||
|
- wget
|
||||||
|
- tar
|
||||||
|
state: present
|
||||||
|
update_cache: yes
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Create postgres_exporter user
|
||||||
|
user:
|
||||||
|
name: postgres_exporter
|
||||||
|
system: yes
|
||||||
|
shell: /bin/false
|
||||||
|
home: /nonexistent
|
||||||
|
comment: "Postgres Exporter Service User"
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Download Postgres Exporter
|
||||||
|
get_url:
|
||||||
|
url: "https://github.com/prometheus-community/postgres_exporter/releases/download/v{{ postgres_exporter_version }}/postgres_exporter-{{ postgres_exporter_version }}.linux-amd64.tar.gz"
|
||||||
|
dest: "/tmp/postgres_exporter-{{ postgres_exporter_version }}.tar.gz"
|
||||||
|
timeout: 30
|
||||||
|
validate_certs: no
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Extract Postgres Exporter
|
||||||
|
unarchive:
|
||||||
|
src: "/tmp/postgres_exporter-{{ postgres_exporter_version }}.tar.gz"
|
||||||
|
dest: "/tmp/"
|
||||||
|
remote_src: yes
|
||||||
|
creates: "/tmp/postgres_exporter-{{ postgres_exporter_version }}.linux-amd64"
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Install Postgres Exporter binary
|
||||||
|
copy:
|
||||||
|
src: "/tmp/postgres_exporter-{{ postgres_exporter_version }}.linux-amd64/postgres_exporter"
|
||||||
|
dest: "/usr/local/bin/postgres_exporter"
|
||||||
|
owner: postgres_exporter
|
||||||
|
group: postgres_exporter
|
||||||
|
mode: '0755'
|
||||||
|
remote_src: yes
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Create systemd service
|
||||||
|
template:
|
||||||
|
src: postgres_exporter.service.j2
|
||||||
|
dest: /etc/systemd/system/{{ postgres_exporter_service_name }}.service
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Clean up temp files
|
||||||
|
file:
|
||||||
|
path: "/tmp/postgres_exporter-{{ postgres_exporter_version }}.tar.gz"
|
||||||
|
state: absent
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Clean up extracted directory
|
||||||
|
file:
|
||||||
|
path: "/tmp/postgres_exporter-{{ postgres_exporter_version }}.linux-amd64"
|
||||||
|
state: absent
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Reload systemd
|
||||||
|
systemd:
|
||||||
|
daemon_reload: yes
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Enable and start Postgres Exporter
|
||||||
|
systemd:
|
||||||
|
name: "{{ postgres_exporter_service_name }}"
|
||||||
|
enabled: yes
|
||||||
|
state: started
|
||||||
|
daemon_reload: yes
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Configure UFW for Postgres Exporter
|
||||||
|
ufw:
|
||||||
|
rule: allow
|
||||||
|
port: "{{ postgres_exporter_port }}"
|
||||||
|
proto: tcp
|
||||||
|
comment: "Postgres Exporter metrics"
|
||||||
|
tags: postgres_exporter
|
||||||
|
|
||||||
|
- name: Verify Postgres Exporter is running
|
||||||
|
wait_for:
|
||||||
|
port: "{{ postgres_exporter_port }}"
|
||||||
|
host: "{{ ansible_host }}"
|
||||||
|
delay: 3
|
||||||
|
timeout: 60
|
||||||
|
tags: postgres_exporter
|
||||||
@ -0,0 +1,16 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Postgres Exporter
|
||||||
|
After=network.target postgresql.service
|
||||||
|
Wants=postgresql.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=postgres_exporter
|
||||||
|
Group=postgres_exporter
|
||||||
|
Environment=DATA_SOURCE_NAME="{{ postgres_exporter_data_source_name }}"
|
||||||
|
ExecStart=/usr/local/bin/postgres_exporter --web.listen-address=:{{ postgres_exporter_port }}
|
||||||
|
Restart=always
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
21
roles/postgresql/defaults/main.yml
Normal file
21
roles/postgresql/defaults/main.yml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
---
|
||||||
|
# PostgreSQL
|
||||||
|
postgresql_version: "17"
|
||||||
|
postgresql_port: 5432
|
||||||
|
postgresql_listen_addresses: "*"
|
||||||
|
postgresql_data_dir: "/var/lib/postgresql/{{ postgresql_version }}/main"
|
||||||
|
|
||||||
|
# Database configuration
|
||||||
|
postgresql_databases:
|
||||||
|
- name: testdb
|
||||||
|
owner: testuser
|
||||||
|
|
||||||
|
postgresql_users:
|
||||||
|
- name: testuser
|
||||||
|
password: "testpassword123"
|
||||||
|
databases: [testdb]
|
||||||
|
privileges: ["ALL"]
|
||||||
|
|
||||||
|
# Postgres exporter user (for metrics collection)
|
||||||
|
postgres_exporter_user: "postgres_exporter"
|
||||||
|
postgres_exporter_password: "exporterpassword123"
|
||||||
121
roles/postgresql/tasks/main.yml
Normal file
121
roles/postgresql/tasks/main.yml
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
---
|
||||||
|
- name: Install required packages for PostgreSQL installation
|
||||||
|
apt:
|
||||||
|
name:
|
||||||
|
- ca-certificates
|
||||||
|
- curl
|
||||||
|
- gnupg
|
||||||
|
- lsb-release
|
||||||
|
state: present
|
||||||
|
update_cache: yes
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Create PostgreSQL repository keyring directory
|
||||||
|
file:
|
||||||
|
path: /etc/apt/keyrings
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Download and install PostgreSQL GPG key
|
||||||
|
shell: |
|
||||||
|
curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /etc/apt/keyrings/postgresql.gpg
|
||||||
|
chmod 644 /etc/apt/keyrings/postgresql.gpg
|
||||||
|
args:
|
||||||
|
creates: /etc/apt/keyrings/postgresql.gpg
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Add PostgreSQL repository
|
||||||
|
apt_repository:
|
||||||
|
repo: "deb [signed-by=/etc/apt/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt {{ ansible_distribution_release }}-pgdg main"
|
||||||
|
state: present
|
||||||
|
update_cache: yes
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Install PostgreSQL
|
||||||
|
apt:
|
||||||
|
name:
|
||||||
|
- postgresql-{{ postgresql_version }}
|
||||||
|
- postgresql-contrib-{{ postgresql_version }}
|
||||||
|
- postgresql-client-{{ postgresql_version }}
|
||||||
|
state: present
|
||||||
|
update_cache: yes
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Ensure PostgreSQL service is started and enabled
|
||||||
|
service:
|
||||||
|
name: postgresql@17-main
|
||||||
|
state: started
|
||||||
|
enabled: yes
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Configure PostgreSQL listen addresses
|
||||||
|
lineinfile:
|
||||||
|
path: "/etc/postgresql/{{ postgresql_version }}/main/postgresql.conf"
|
||||||
|
regexp: "^listen_addresses[[:space:]]*="
|
||||||
|
line: "listen_addresses = '{{ postgresql_listen_addresses }}'"
|
||||||
|
backup: yes
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Configure PostgreSQL authentication
|
||||||
|
lineinfile:
|
||||||
|
path: "/etc/postgresql/{{ postgresql_version }}/main/pg_hba.conf"
|
||||||
|
line: "host all all 192.168.0.0/24 md5"
|
||||||
|
insertafter: "^# IPv4 local connections:"
|
||||||
|
backup: yes
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Reload PostgreSQL configuration
|
||||||
|
service:
|
||||||
|
name: postgresql@17-main
|
||||||
|
state: reloaded
|
||||||
|
name: postgresql@17-main
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Create PostgreSQL users and databases
|
||||||
|
become: yes
|
||||||
|
become_user: postgres
|
||||||
|
community.postgresql.postgresql_user:
|
||||||
|
name: "{{ item.name }}"
|
||||||
|
password: "{{ item.password }}"
|
||||||
|
state: present
|
||||||
|
loop: "{{ postgresql_users }}"
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Create PostgreSQL databases
|
||||||
|
become: yes
|
||||||
|
become_user: postgres
|
||||||
|
community.postgresql.postgresql_db:
|
||||||
|
name: "{{ item.name }}"
|
||||||
|
owner: "{{ item.owner }}"
|
||||||
|
state: present
|
||||||
|
loop: "{{ postgresql_databases }}"
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Create postgres_exporter user for monitoring
|
||||||
|
become: yes
|
||||||
|
become_user: postgres
|
||||||
|
community.postgresql.postgresql_user:
|
||||||
|
name: "{{ postgres_exporter_user }}"
|
||||||
|
password: "{{ postgres_exporter_password }}"
|
||||||
|
state: present
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Grant permissions to postgres_exporter user
|
||||||
|
become: yes
|
||||||
|
become_user: postgres
|
||||||
|
community.postgresql.postgresql_privs:
|
||||||
|
database: postgres
|
||||||
|
state: present
|
||||||
|
privs: CONNECT
|
||||||
|
type: database
|
||||||
|
roles: "{{ postgres_exporter_user }}"
|
||||||
|
tags: postgresql
|
||||||
|
|
||||||
|
- name: Configure UFW for PostgreSQL
|
||||||
|
ufw:
|
||||||
|
rule: allow
|
||||||
|
port: "{{ postgresql_port }}"
|
||||||
|
proto: tcp
|
||||||
|
comment: "PostgreSQL"
|
||||||
|
tags: postgresql
|
||||||
7
roles/promtail/defaults/main.yml
Normal file
7
roles/promtail/defaults/main.yml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
# Loki connection
|
||||||
|
loki_host: 192.168.0.112
|
||||||
|
loki_port: 3100
|
||||||
|
|
||||||
|
# Promtail settings
|
||||||
|
promtail_port: 9080
|
||||||
31
roles/promtail/tasks/main.yml
Normal file
31
roles/promtail/tasks/main.yml
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
- name: Create Promtail directories
|
||||||
|
file:
|
||||||
|
path: /etc/promtail
|
||||||
|
state: directory
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Deploy Promtail configuration
|
||||||
|
template:
|
||||||
|
src: promtail-config.yml.j2
|
||||||
|
dest: /etc/promtail/promtail-config.yml
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
|
||||||
|
- name: Run Promtail container (using host network)
|
||||||
|
docker_container:
|
||||||
|
name: promtail
|
||||||
|
image: grafana/promtail:latest
|
||||||
|
state: started
|
||||||
|
restart_policy: always
|
||||||
|
network_mode: host # <-- КЛЮЧЕВОЕ ИЗМЕНЕНИЕ
|
||||||
|
volumes:
|
||||||
|
- /var/log:/var/log:ro
|
||||||
|
- /var/lib/docker/containers:/var/lib/docker/containers:ro
|
||||||
|
- /etc/promtail/promtail-config.yml:/etc/promtail/config.yml
|
||||||
|
command: -config.file=/etc/promtail/config.yml
|
||||||
|
pid_mode: host
|
||||||
|
tags: promtail
|
||||||
28
roles/promtail/templates/promtail-config.yml.j2
Normal file
28
roles/promtail/templates/promtail-config.yml.j2
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
server:
|
||||||
|
http_listen_port: 9080
|
||||||
|
grpc_listen_port: 0
|
||||||
|
|
||||||
|
positions:
|
||||||
|
filename: /tmp/positions.yaml
|
||||||
|
|
||||||
|
clients:
|
||||||
|
- url: http://localhost:3100/loki/api/v1/push # Теперь localhost работает
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: system
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- localhost
|
||||||
|
labels:
|
||||||
|
job: varlogs
|
||||||
|
__path__: /var/log/*log
|
||||||
|
host: "{{ inventory_hostname }}"
|
||||||
|
|
||||||
|
- job_name: docker
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- localhost
|
||||||
|
labels:
|
||||||
|
job: docker
|
||||||
|
__path__: /var/lib/docker/containers/*/*log
|
||||||
|
host: "{{ inventory_hostname }}"
|
||||||
Reference in New Issue
Block a user