Update scrape intervals
This commit is contained in:
parent
41d8f18563
commit
b9e7228520
7 changed files with 10 additions and 8 deletions
|
@ -109,7 +109,7 @@ in
|
||||||
{"__address__" = "127.0.0.1:${toString value.port}", "instance" = constants.hostname, "job" = "${name}"},
|
{"__address__" = "127.0.0.1:${toString value.port}", "instance" = constants.hostname, "job" = "${name}"},
|
||||||
]
|
]
|
||||||
|
|
||||||
scrape_interval = "1m"
|
scrape_interval = "15s"
|
||||||
|
|
||||||
forward_to = [otelcol.receiver.prometheus.default.receiver]
|
forward_to = [otelcol.receiver.prometheus.default.receiver]
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ prometheus.exporter.unix "integrations_node_exporter" {
|
||||||
|
|
||||||
// Define how to scrape metrics from the node_exporter
|
// Define how to scrape metrics from the node_exporter
|
||||||
prometheus.scrape "integrations_node_exporter" {
|
prometheus.scrape "integrations_node_exporter" {
|
||||||
scrape_interval = "1m"
|
scrape_interval = "15s"
|
||||||
// Use the targets with labels from the discovery.relabel component
|
// Use the targets with labels from the discovery.relabel component
|
||||||
targets = discovery.relabel.integrations_node_exporter.output
|
targets = discovery.relabel.integrations_node_exporter.output
|
||||||
// Send the scraped metrics to the relabeling component
|
// Send the scraped metrics to the relabeling component
|
||||||
|
|
|
@ -185,7 +185,7 @@ in
|
||||||
environment.etc."alloy/forgejo_prometheus.alloy" = {
|
environment.etc."alloy/forgejo_prometheus.alloy" = {
|
||||||
text = ''
|
text = ''
|
||||||
prometheus.scrape "forgejo_exporter" {
|
prometheus.scrape "forgejo_exporter" {
|
||||||
scrape_interval = "1m"
|
scrape_interval = "15s"
|
||||||
targets = [
|
targets = [
|
||||||
{
|
{
|
||||||
"__address__" = "127.0.0.1:${toString config.services.forgejo.settings.server.HTTP_PORT}",
|
"__address__" = "127.0.0.1:${toString config.services.forgejo.settings.server.HTTP_PORT}",
|
||||||
|
|
|
@ -91,7 +91,7 @@ in
|
||||||
environment.etc."alloy/dovecot_prometheus.alloy" = {
|
environment.etc."alloy/dovecot_prometheus.alloy" = {
|
||||||
text = ''
|
text = ''
|
||||||
prometheus.scrape "dovecot_exporter" {
|
prometheus.scrape "dovecot_exporter" {
|
||||||
scrape_interval = "1m"
|
scrape_interval = "15s"
|
||||||
targets = [
|
targets = [
|
||||||
{
|
{
|
||||||
"__address__" = "127.0.0.1:${toString dovecotPromPort}",
|
"__address__" = "127.0.0.1:${toString dovecotPromPort}",
|
||||||
|
|
|
@ -17,4 +17,4 @@
|
||||||
labels:
|
labels:
|
||||||
severity: critical
|
severity: critical
|
||||||
annotations:
|
annotations:
|
||||||
summary: "Nginx VHost {{ $labels.vhost }} on {{ $labels.instance }} is generating many internal server errors over 1 hour"
|
summary: "Nginx VHost {{ $labels.vhost }} on {{ $labels.instance }} is generating many internal server errors over 30 minutes"
|
||||||
|
|
|
@ -161,7 +161,10 @@ in
|
||||||
services.prometheus = {
|
services.prometheus = {
|
||||||
enable = true;
|
enable = true;
|
||||||
listenAddress = "127.0.0.1";
|
listenAddress = "127.0.0.1";
|
||||||
extraFlags = [ "--web.enable-otlp-receiver" ];
|
extraFlags = [
|
||||||
|
"--web.enable-otlp-receiver"
|
||||||
|
"--storage.tsdb.retention.time=15d"
|
||||||
|
];
|
||||||
# alertmanager.enable = true;
|
# alertmanager.enable = true;
|
||||||
rules = [
|
rules = [
|
||||||
''
|
''
|
||||||
|
@ -224,7 +227,7 @@ in
|
||||||
compactor = {
|
compactor = {
|
||||||
retention_enabled = true;
|
retention_enabled = true;
|
||||||
compaction_interval = "24h";
|
compaction_interval = "24h";
|
||||||
retention_delete_delay = "24h";
|
retention_delete_delay = "15d";
|
||||||
delete_request_store = "filesystem";
|
delete_request_store = "filesystem";
|
||||||
working_directory = "${config.services.loki.dataDir}/retention";
|
working_directory = "${config.services.loki.dataDir}/retention";
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
{
|
{
|
||||||
pkgs,
|
pkgs,
|
||||||
lib,
|
lib,
|
||||||
config,
|
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
{
|
{
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue