Enable Promtail
This commit is contained in:
parent
36c46f5d07
commit
d86c834b37
134
homelab/promtail.yml
Normal file
134
homelab/promtail.yml
Normal file
|
@ -0,0 +1,134 @@
|
|||
--- # Daemonset.yaml
|
||||
apiVersion: apps/v1
|
||||
kind: DaemonSet
|
||||
metadata:
|
||||
name: promtail-daemonset
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
name: promtail
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
name: promtail
|
||||
spec:
|
||||
serviceAccount: promtail-serviceaccount
|
||||
containers:
|
||||
- name: promtail-container
|
||||
image: grafana/promtail
|
||||
args:
|
||||
- -config.file=/etc/promtail/promtail.yaml
|
||||
env:
|
||||
- name: 'HOSTNAME' # needed when using kubernetes_sd_configs
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: 'spec.nodeName'
|
||||
volumeMounts:
|
||||
- name: logs
|
||||
mountPath: /var/log
|
||||
- name: promtail-config
|
||||
mountPath: /etc/promtail
|
||||
- mountPath: /var/lib/docker/containers
|
||||
name: varlibdockercontainers
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: logs
|
||||
hostPath:
|
||||
path: /var/log
|
||||
- name: varlibdockercontainers
|
||||
hostPath:
|
||||
path: /var/lib/docker/containers
|
||||
- name: promtail-config
|
||||
configMap:
|
||||
name: promtail-config
|
||||
--- # configmap.yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: promtail-config
|
||||
data:
|
||||
promtail.yaml: |
|
||||
server:
|
||||
http_listen_port: 9080
|
||||
grpc_listen_port: 0
|
||||
|
||||
clients:
|
||||
- url: http://monitoring:3030/loki/api/v1/push
|
||||
|
||||
positions:
|
||||
filename: /tmp/positions.yaml
|
||||
target_config:
|
||||
sync_period: 10s
|
||||
scrape_configs:
|
||||
- job_name: pod-logs
|
||||
kubernetes_sd_configs:
|
||||
- role: pod
|
||||
pipeline_stages:
|
||||
- docker: {}
|
||||
relabel_configs:
|
||||
- source_labels:
|
||||
- __meta_kubernetes_pod_node_name
|
||||
target_label: __host__
|
||||
- action: labelmap
|
||||
regex: __meta_kubernetes_pod_label_(.+)
|
||||
- action: replace
|
||||
replacement: $1
|
||||
separator: /
|
||||
source_labels:
|
||||
- __meta_kubernetes_namespace
|
||||
- __meta_kubernetes_pod_name
|
||||
target_label: job
|
||||
- action: replace
|
||||
source_labels:
|
||||
- __meta_kubernetes_namespace
|
||||
target_label: namespace
|
||||
- action: replace
|
||||
source_labels:
|
||||
- __meta_kubernetes_pod_name
|
||||
target_label: pod
|
||||
- action: replace
|
||||
source_labels:
|
||||
- __meta_kubernetes_pod_container_name
|
||||
target_label: container
|
||||
- replacement: /var/log/pods/*$1/*.log
|
||||
separator: /
|
||||
source_labels:
|
||||
- __meta_kubernetes_pod_uid
|
||||
- __meta_kubernetes_pod_container_name
|
||||
target_label: __path__
|
||||
|
||||
--- # Clusterrole.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: promtail-clusterrole
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources:
|
||||
- nodes
|
||||
- services
|
||||
- pods
|
||||
verbs:
|
||||
- get
|
||||
- watch
|
||||
- list
|
||||
|
||||
--- # ServiceAccount.yaml
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: promtail-serviceaccount
|
||||
|
||||
--- # Rolebinding.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: promtail-clusterrolebinding
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: promtail-serviceaccount
|
||||
namespace: default
|
||||
roleRef:
|
||||
kind: ClusterRole
|
||||
name: promtail-clusterrole
|
||||
apiGroup: rbac.authorization.k8s.io
|
|
@ -71,6 +71,35 @@
|
|||
i18n.defaultLocale = "en_GB.utf8";
|
||||
|
||||
services = {
|
||||
promtail = {
|
||||
enable = true;
|
||||
configuration = {
|
||||
server = {
|
||||
http_listen_port = 3031;
|
||||
grpc_listen_port = 0;
|
||||
};
|
||||
positions = {
|
||||
filename = "/tmp/positions.yaml";
|
||||
};
|
||||
clients = [{
|
||||
url = "http://monitoring:3030/loki/api/v1/push";
|
||||
}];
|
||||
scrape_configs = [{
|
||||
job_name = "journal";
|
||||
journal = {
|
||||
max_age = "12h";
|
||||
labels = {
|
||||
job = "systemd-journal";
|
||||
host = "london";
|
||||
};
|
||||
};
|
||||
relabel_configs = [{
|
||||
source_labels = [ "__journal__systemd_unit" ];
|
||||
target_label = "unit";
|
||||
}];
|
||||
}];
|
||||
};
|
||||
};
|
||||
fwupd.enable = true;
|
||||
syncthing = {
|
||||
enable = true;
|
||||
|
|
|
@ -98,6 +98,109 @@
|
|||
http_addr = "127.0.0.1";
|
||||
};
|
||||
};
|
||||
services.loki = {
|
||||
enable = true;
|
||||
configuration = {
|
||||
server.http_listen_port = 3030;
|
||||
auth_enabled = false;
|
||||
|
||||
ingester = {
|
||||
lifecycler = {
|
||||
address = "127.0.0.1";
|
||||
ring = {
|
||||
kvstore = {
|
||||
store = "inmemory";
|
||||
};
|
||||
replication_factor = 1;
|
||||
};
|
||||
};
|
||||
chunk_idle_period = "1h";
|
||||
max_chunk_age = "1h";
|
||||
chunk_target_size = 999999;
|
||||
chunk_retain_period = "30s";
|
||||
max_transfer_retries = 0;
|
||||
};
|
||||
|
||||
schema_config = {
|
||||
configs = [{
|
||||
from = "2022-06-06";
|
||||
store = "boltdb-shipper";
|
||||
object_store = "filesystem";
|
||||
schema = "v11";
|
||||
index = {
|
||||
prefix = "index_";
|
||||
period = "24h";
|
||||
};
|
||||
}];
|
||||
};
|
||||
|
||||
storage_config = {
|
||||
boltdb_shipper = {
|
||||
active_index_directory = "/var/lib/loki/boltdb-shipper-active";
|
||||
cache_location = "/var/lib/loki/boltdb-shipper-cache";
|
||||
cache_ttl = "24h";
|
||||
shared_store = "filesystem";
|
||||
};
|
||||
|
||||
filesystem = {
|
||||
directory = "/var/lib/loki/chunks";
|
||||
};
|
||||
};
|
||||
|
||||
limits_config = {
|
||||
reject_old_samples = true;
|
||||
reject_old_samples_max_age = "168h";
|
||||
};
|
||||
|
||||
chunk_store_config = {
|
||||
max_look_back_period = "0s";
|
||||
};
|
||||
|
||||
table_manager = {
|
||||
retention_deletes_enabled = false;
|
||||
retention_period = "0s";
|
||||
};
|
||||
|
||||
compactor = {
|
||||
working_directory = "/var/lib/loki";
|
||||
shared_store = "filesystem";
|
||||
compactor_ring = {
|
||||
kvstore = {
|
||||
store = "inmemory";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
services.promtail = {
|
||||
enable = true;
|
||||
configuration = {
|
||||
server = {
|
||||
http_listen_port = 3031;
|
||||
grpc_listen_port = 0;
|
||||
};
|
||||
positions = {
|
||||
filename = "/tmp/positions.yaml";
|
||||
};
|
||||
clients = [{
|
||||
url = "http://127.0.0.1:${toString config.services.loki.configuration.server.http_listen_port}/loki/api/v1/push";
|
||||
}];
|
||||
scrape_configs = [{
|
||||
job_name = "journal";
|
||||
journal = {
|
||||
max_age = "12h";
|
||||
labels = {
|
||||
job = "systemd-journal";
|
||||
host = "monitoring";
|
||||
};
|
||||
};
|
||||
relabel_configs = [{
|
||||
source_labels = [ "__journal__systemd_unit" ];
|
||||
target_label = "unit";
|
||||
}];
|
||||
}];
|
||||
};
|
||||
};
|
||||
services.alertmanager-ntfy = {
|
||||
enable = true;
|
||||
settings = {
|
||||
|
|
|
@ -47,6 +47,35 @@
|
|||
};
|
||||
|
||||
services = {
|
||||
promtail = {
|
||||
enable = true;
|
||||
configuration = {
|
||||
server = {
|
||||
http_listen_port = 3031;
|
||||
grpc_listen_port = 0;
|
||||
};
|
||||
positions = {
|
||||
filename = "/tmp/positions.yaml";
|
||||
};
|
||||
clients = [{
|
||||
url = "http://monitoring:3030/loki/api/v1/push";
|
||||
}];
|
||||
scrape_configs = [{
|
||||
job_name = "journal";
|
||||
journal = {
|
||||
max_age = "12h";
|
||||
labels = {
|
||||
job = "systemd-journal";
|
||||
host = "vancouver";
|
||||
};
|
||||
};
|
||||
relabel_configs = [{
|
||||
source_labels = [ "__journal__systemd_unit" ];
|
||||
target_label = "unit";
|
||||
}];
|
||||
}];
|
||||
};
|
||||
};
|
||||
restic = {
|
||||
backups = {
|
||||
"gsimmer" = {
|
||||
|
|
Loading…
Reference in a new issue