Skip to content

Commit

Permalink
Merge branch 'main' into cmp_clarify_job
Browse files Browse the repository at this point in the history
  • Loading branch information
clayton-cornell authored Nov 1, 2023
2 parents 82405c3 + 59b7b69 commit 0877ea3
Show file tree
Hide file tree
Showing 15 changed files with 264 additions and 55 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/update-make-docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,16 @@ jobs:
- name: Update procedure
if: github.repository != 'grafana/writers-toolkit'
run: |
BRANCH=update-make-docs
git checkout -b "${BRANCH}"
curl -s -Lo docs/docs.mk https://raw.githubusercontent.com/grafana/writers-toolkit/main/docs/docs.mk
curl -s -Lo docs/make-docs https://raw.githubusercontent.com/grafana/writers-toolkit/main/docs/make-docs
if git diff --exit-code; then exit 0; fi
BRANCH="$(date +%Y-%m-%d)/update-make-docs"
git checkout -b "${BRANCH}"
git add .
git config --local user.email "[email protected]"
git config --local user.name "grafanabot"
git commit -m "Update \`make docs\` procedure"
git push -v origin "refs/heads/${BRANCH}"
gh pr create --fill
gh pr create --fill || true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ Main (unreleased)

- Added support for python profiling to `pyroscope.ebpf` component. (@korniltsev)

- Windows Flow Installer: Add /CONFIG /DISABLEPROFILING and /DISABLEREPORTING flag (@jkroepke)

- Add queueing logs remote write client for `loki.write` when WAL is enabled. (@thepalbi)

- New Grafana Agent Flow components:
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ PROPAGATE_VARS := \

GO_ENV := GOOS=$(GOOS) GOARCH=$(GOARCH) GOARM=$(GOARM) CGO_ENABLED=$(CGO_ENABLED)

VERSION ?= $(shell ./tools/image-tag)
VERSION ?= $(shell bash ./tools/image-tag)
GIT_REVISION := $(shell git rev-parse --short HEAD)
GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
VPREFIX := github.com/grafana/agent/pkg/build
Expand Down
22 changes: 11 additions & 11 deletions converter/internal/staticconvert/internal/build/agent_exporter.go
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@
package build

import (
"fmt"

"github.com/grafana/agent/component/discovery"
"github.com/grafana/agent/component/prometheus/exporter/agent"
"github.com/grafana/agent/converter/internal/common"
agent_exporter "github.com/grafana/agent/pkg/integrations/agent"
agent_exporter_v2 "github.com/grafana/agent/pkg/integrations/v2/agent"
)

func (b *IntegrationsConfigBuilder) appendAgentExporter(config *agent_exporter.Config) discovery.Exports {
args := toAgentExporter(config)
compLabel := common.LabelForParts(b.globalCtx.LabelPrefix, config.Name())
b.f.Body().AppendBlock(common.NewBlockWithOverride(
[]string{"prometheus", "exporter", "agent"},
compLabel,
args,
))

return common.NewDiscoveryExports(fmt.Sprintf("prometheus.exporter.agent.%s.targets", compLabel))
return b.appendExporterBlock(args, config.Name(), "agent")
}

func toAgentExporter(config *agent_exporter.Config) *agent.Arguments {
return &agent.Arguments{}
}

func (b *IntegrationsConfigBuilder) appendAgentExporterV2(config *agent_exporter_v2.Config) discovery.Exports {
args := toAgentExporterV2(config)
return b.appendExporterBlock(args, config.Name(), "agent")
}

func toAgentExporterV2(config *agent_exporter_v2.Config) *agent.Arguments {
return &agent.Arguments{}
}
101 changes: 97 additions & 4 deletions converter/internal/staticconvert/internal/build/builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@ import (
"fmt"
"strings"

"github.com/grafana/agent/component"
"github.com/grafana/agent/component/discovery"
"github.com/grafana/agent/component/prometheus/remotewrite"
"github.com/grafana/agent/converter/diag"
"github.com/grafana/agent/converter/internal/common"
"github.com/grafana/agent/converter/internal/prometheusconvert"
"github.com/grafana/agent/pkg/config"
agent_exporter "github.com/grafana/agent/pkg/integrations/agent"
Expand Down Expand Up @@ -34,10 +37,14 @@ import (
"github.com/grafana/agent/pkg/integrations/snowflake_exporter"
"github.com/grafana/agent/pkg/integrations/squid_exporter"
"github.com/grafana/agent/pkg/integrations/statsd_exporter"
agent_exporter_v2 "github.com/grafana/agent/pkg/integrations/v2/agent"
common_v2 "github.com/grafana/agent/pkg/integrations/v2/common"
"github.com/grafana/agent/pkg/integrations/windows_exporter"
"github.com/grafana/river/scanner"
"github.com/grafana/river/token/builder"
"github.com/prometheus/common/model"
prom_config "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/model/relabel"
)

type IntegrationsConfigBuilder struct {
Expand Down Expand Up @@ -151,10 +158,6 @@ func (b *IntegrationsConfigBuilder) appendV1Integrations() {
}
}

func (b *IntegrationsConfigBuilder) appendV2Integrations() {

}

func (b *IntegrationsConfigBuilder) appendExporter(commonConfig *int_config.Common, name string, extraTargets []discovery.Target) {
scrapeConfig := prom_config.DefaultScrapeConfig
scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name)
Expand Down Expand Up @@ -193,10 +196,100 @@ func (b *IntegrationsConfigBuilder) appendExporter(commonConfig *int_config.Comm
b.globalCtx.InitializeRemoteWriteExports()
}

func (b *IntegrationsConfigBuilder) appendV2Integrations() {
for _, integration := range b.cfg.Integrations.ConfigV2.Configs {
var exports discovery.Exports
var commonConfig common_v2.MetricsConfig

switch itg := integration.(type) {
case *agent_exporter_v2.Config:
exports = b.appendAgentExporterV2(itg)
commonConfig = itg.Common
}

if len(exports.Targets) > 0 {
b.appendExporterV2(&commonConfig, integration.Name(), exports.Targets)
}
}
}

func (b *IntegrationsConfigBuilder) appendExporterV2(commonConfig *common_v2.MetricsConfig, name string, extraTargets []discovery.Target) {
var relabelConfigs []*relabel.Config

for _, extraLabel := range commonConfig.ExtraLabels {
defaultConfig := relabel.DefaultRelabelConfig
relabelConfig := &defaultConfig
relabelConfig.SourceLabels = []model.LabelName{"__address__"}
relabelConfig.TargetLabel = extraLabel.Name
relabelConfig.Replacement = extraLabel.Value

relabelConfigs = append(relabelConfigs, relabelConfig)
}

commonConfig.ApplyDefaults(b.cfg.Integrations.ConfigV2.Metrics.Autoscrape)
scrapeConfig := prom_config.DefaultScrapeConfig
scrapeConfig.JobName = fmt.Sprintf("integrations/%s", name)
scrapeConfig.RelabelConfigs = commonConfig.Autoscrape.RelabelConfigs
scrapeConfig.MetricRelabelConfigs = commonConfig.Autoscrape.MetricRelabelConfigs
scrapeConfig.ScrapeInterval = commonConfig.Autoscrape.ScrapeInterval
scrapeConfig.ScrapeTimeout = commonConfig.Autoscrape.ScrapeTimeout
scrapeConfig.RelabelConfigs = relabelConfigs

scrapeConfigs := []*prom_config.ScrapeConfig{&scrapeConfig}

var remoteWriteExports *remotewrite.Exports
for _, metrics := range b.cfg.Metrics.Configs {
if metrics.Name == commonConfig.Autoscrape.MetricsInstance {
// This must match the name of the existing remote write config in the metrics config:
label, err := scanner.SanitizeIdentifier("metrics_" + metrics.Name)
if err != nil {
b.diags.Add(diag.SeverityLevelCritical, fmt.Sprintf("failed to sanitize job name: %s", err))
}

remoteWriteExports = &remotewrite.Exports{
Receiver: common.ConvertAppendable{Expr: "prometheus.remote_write." + label + ".receiver"},
}
break
}
}

if remoteWriteExports == nil {
b.diags.Add(diag.SeverityLevelCritical, fmt.Sprintf("integration %s is looking for an undefined metrics config: %s", name, commonConfig.Autoscrape.MetricsInstance))
}

promConfig := &prom_config.Config{
GlobalConfig: b.cfg.Metrics.Global.Prometheus,
ScrapeConfigs: scrapeConfigs,
}

jobNameToCompLabelsFunc := func(jobName string) string {
labelSuffix := strings.TrimPrefix(jobName, "integrations/")
if labelSuffix == "" {
return b.globalCtx.LabelPrefix
}

return fmt.Sprintf("%s_%s", b.globalCtx.LabelPrefix, labelSuffix)
}

// Need to pass in the remote write reference from the metrics config here:
b.diags.AddAll(prometheusconvert.AppendAllNested(b.f, promConfig, jobNameToCompLabelsFunc, extraTargets, remoteWriteExports))
}

func splitByCommaNullOnEmpty(s string) []string {
if s == "" {
return nil
}

return strings.Split(s, ",")
}

func (b *IntegrationsConfigBuilder) appendExporterBlock(args component.Arguments, name string, exporterName string) discovery.Exports {
compLabel := common.LabelForParts(b.globalCtx.LabelPrefix, name)
b.f.Body().AppendBlock(common.NewBlockWithOverride(
[]string{"prometheus", "exporter", exporterName},
compLabel,
args,
))

return common.NewDiscoveryExports(fmt.Sprintf("prometheus.exporter.%s.%s.targets", exporterName, compLabel))
}
4 changes: 0 additions & 4 deletions converter/internal/staticconvert/staticconvert.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,6 @@ import (
// as enabling integrations-next.
func Convert(in []byte, extraArgs []string) ([]byte, diag.Diagnostics) {
var diags diag.Diagnostics
// diags := validateExtraArgs(extraArgs)
// if len(diags) > 0 {
// return nil, diags
// }

fs := flag.NewFlagSet("convert", flag.ContinueOnError)
args := []string{"-config.file", "convert", "-config.expand-env"}
Expand Down
34 changes: 34 additions & 0 deletions converter/internal/staticconvert/testdata-v2/integrations_v2.river
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
prometheus.remote_write "metrics_default" {
endpoint {
name = "default-8be96f"
url = "http://localhost:9009/api/prom/push"

queue_config { }

metadata_config { }
}
}

prometheus.exporter.agent "integrations_agent" { }

discovery.relabel "integrations_agent" {
targets = prometheus.exporter.agent.integrations_agent.targets

rule {
source_labels = ["__address__"]
target_label = "test_label"
replacement = "test_label_value"
}

rule {
source_labels = ["__address__"]
target_label = "test_label_2"
replacement = "test_label_value_2"
}
}

prometheus.scrape "integrations_agent" {
targets = discovery.relabel.integrations_agent.output
forward_to = [prometheus.remote_write.metrics_default.receiver]
job_name = "integrations/agent"
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,14 @@ metrics:
global:
remote_write:
- url: http://localhost:9009/api/prom/push
configs:
- name: default

integrations:
agent:
instance: "agent"
instance: "default"
autoscrape:
metrics_instance: "default"
extra_labels:
test_label: test_label_value
test_label_2: test_label_value_2
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
(Critical) integration agent is looking for an undefined metrics config: not_default
(Warning) Please review your agent command line flags and ensure they are set in your Flow mode config file where necessary.
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
metrics:
global:
remote_write:
- url: http://localhost:9009/api/prom/push
configs:
- name: default

integrations:
agent:
instance: "default"
autoscrape:
metrics_instance: "not_default"
9 changes: 8 additions & 1 deletion converter/internal/staticconvert/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import (
"github.com/grafana/agent/pkg/integrations/squid_exporter"
"github.com/grafana/agent/pkg/integrations/statsd_exporter"
v2 "github.com/grafana/agent/pkg/integrations/v2"
agent_exporter_v2 "github.com/grafana/agent/pkg/integrations/v2/agent"
"github.com/grafana/agent/pkg/integrations/windows_exporter"
"github.com/grafana/agent/pkg/logs"
"github.com/grafana/agent/pkg/metrics"
Expand Down Expand Up @@ -157,7 +158,13 @@ func validateIntegrationsV1(integrationsConfig *v1.ManagerConfig) diag.Diagnosti
func validateIntegrationsV2(integrationsConfig *v2.SubsystemOptions) diag.Diagnostics {
var diags diag.Diagnostics

// TODO
for _, integration := range integrationsConfig.Configs {
switch itg := integration.(type) {
case *agent_exporter_v2.Config:
default:
diags.Add(diag.SeverityLevelError, fmt.Sprintf("The converter does not support converting the provided %s integration.", itg.Name()))
}
}

return diags
}
Expand Down
12 changes: 12 additions & 0 deletions docs/make-docs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@
# [Semantic versioning](https://semver.org/) is used to help the reader identify the significance of changes.
# Changes are relevant to this script and the support docs.mk GNU Make interface.
#
# ## 5.1.1 (2023-10-30)
#
# ### Added
#
# - Support for Datadog and Oracle data source plugins repositories.
#
# ## 5.1.0 (2023-10-20)
#
# ### Added
Expand Down Expand Up @@ -249,8 +255,10 @@ SOURCES_grafana_cloud_frontend_observability_faro_web_sdk='faro-web-sdk'
SOURCES_helm_charts_mimir_distributed='mimir'
SOURCES_helm_charts_tempo_distributed='tempo'
SOURCES_opentelemetry='opentelemetry-docs'
SOURCES_plugins_grafana_datadog_datasource='datadog-datasource'
SOURCES_plugins_grafana_jira_datasource='jira-datasource'
SOURCES_plugins_grafana_mongodb_datasource='mongodb-datasource'
SOURCES_plugins_grafana_oracle_datasource='oracle-datasource'
SOURCES_plugins_grafana_splunk_datasource='splunk-datasource'

VERSIONS_as_code='UNVERSIONED'
Expand All @@ -261,8 +269,10 @@ VERSIONS_grafana_cloud_k6='UNVERSIONED'
VERSIONS_grafana_cloud_data_configuration_integrations='UNVERSIONED'
VERSIONS_grafana_cloud_frontend_observability_faro_web_sdk='UNVERSIONED'
VERSIONS_opentelemetry='UNVERSIONED'
VERSIONS_plugins_grafana_datadog_datasource='latest'
VERSIONS_plugins_grafana_jira_datasource='latest'
VERSIONS_plugins_grafana_mongodb_datasource='latest'
VERSIONS_plugins_grafana_oracle_datasource='latest'
VERSIONS_plugins_grafana_splunk_datasource='latest'
VERSIONS_technical_documentation='UNVERSIONED'
VERSIONS_website='UNVERSIONED'
Expand All @@ -272,8 +282,10 @@ PATHS_grafana_cloud='content/docs/grafana-cloud'
PATHS_helm_charts_mimir_distributed='docs/sources/helm-charts/mimir-distributed'
PATHS_helm_charts_tempo_distributed='docs/sources/helm-charts/tempo-distributed'
PATHS_mimir='docs/sources/mimir'
PATHS_plugins_grafana_datadog_datasource='docs/sources'
PATHS_plugins_grafana_jira_datasource='docs/sources'
PATHS_plugins_grafana_mongodb_datasource='docs/sources'
PATHS_plugins_grafana_oracle_datasource='docs/sources'
PATHS_plugins_grafana_splunk_datasource='docs/sources'
PATHS_tempo='docs/sources/tempo'
PATHS_website='content'
Expand Down
8 changes: 8 additions & 0 deletions docs/sources/flow/setup/install/windows.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ To do a silent install of Grafana Agent on Windows, perform the following steps.

Replace `PATH_TO_INSTALLER` with the path where the unzipped installer executable is located.

### Silent install options

* `/CONFIG=<path>` Path to the configuration file. Default: `$INSTDIR\config.river`
* `/DISABLEREPORTING=<yes|no>` Disable [data collection][]. Default: `no`
* `/DISABLEPROFILING=<yes|no>` Disable profiling endpoint. Default: `no`

## Uninstall

You can uninstall Grafana Agent with Windows Remove Programs or `C:\Program Files\Grafana Agent\uninstaller.exe`. Uninstalling Grafana Agent stops the service and removes it from disk. This includes any configuration files in the installation directory.
Expand All @@ -69,4 +75,6 @@ Grafana Agent can also be silently uninstalled by running `uninstall.exe /S` as
[Start Grafana Agent]: "/docs/grafana-cloud/ -> /docs/grafana-cloud/monitor-infrastructure/agent/flow/setup/start-agent.md#windows"
[Configure Grafana Agent]: "/docs/agent/ -> /docs/agent/<AGENT_VERSION>/flow/setup/configure/configure-windows.md"
[Configure Grafana Agent]: "/docs/grafana-cloud/ -> /docs/grafana-cloud/monitor-infrastructure/agent/flow/setup/configure/configure-windows.md"
[data collection]: "/docs/agent/ -> /docs/agent/<AGENT_VERSION>/data-collection.md"
[data collection]: "/docs/grafana-cloud/ -> /docs/grafana-cloud/monitor-infrastructure/agent/data-collection.md"
{{% /docs/reference %}}
Loading

0 comments on commit 0877ea3

Please sign in to comment.