Skip to content

Commit

Permalink
Add support for new source scrape auth fields. (#22)
Browse files Browse the repository at this point in the history
  • Loading branch information
alistairjevans authored Sep 25, 2024
1 parent 8bd0c1b commit 6ac9eb9
Show file tree
Hide file tree
Showing 11 changed files with 337 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
strategy:
matrix:
terraform-version: ["0.13", "1.0", "1.8", "latest"]
config: [examples/basic, examples/advanced]
config: [examples/basic, examples/advanced, examples/scrape]
fail-fast: false
runs-on: ubuntu-latest
steps:
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ GOLANGCI_LINT := golangci-lint run --disable-all \
-E typecheck \
-E unused \
-E varcheck
VERSION := 0.2.0
VERSION := 0.3.1
.PHONY: test build

help:
Expand Down
3 changes: 3 additions & 0 deletions docs/data-sources/source.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ This Data Source allows you to look up existing Logtail Sources using their tabl
- `vector`
- `vercel_integration`
- **scrape_frequency_secs** (Number) For scrape platform types, how often to scrape the URLs.
- **scrape_request_basic_auth_password** (String, Sensitive) Basic auth password for scraping.
- **scrape_request_basic_auth_user** (String) Basic auth username for scraping.
- **scrape_request_headers** (List of Map of String) An array of request headers, each containing `name` and `value` fields.
- **scrape_urls** (List of String) For scrape platform types, the set of urls to scrape.
- **team_name** (String) Used to specify the team the resource should be created in when using global tokens.
- **token** (String) The token of this source. This token is used to identify and route the data you will send to Logtail.
Expand Down
3 changes: 3 additions & 0 deletions docs/resources/source.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ This resource allows you to create, modify, and delete Logtail Sources. For more
- **logs_retention** (Number) Data retention for logs in days. There might be additional charges for longer retention.
- **metrics_retention** (Number) Data retention for metrics in days. There might be additional charges for longer retention.
- **scrape_frequency_secs** (Number) For scrape platform types, how often to scrape the URLs.
- **scrape_request_basic_auth_password** (String, Sensitive) Basic auth password for scraping.
- **scrape_request_basic_auth_user** (String) Basic auth username for scraping.
- **scrape_request_headers** (List of Map of String) An array of request headers, each containing `name` and `value` fields.
- **scrape_urls** (List of String) For scrape platform types, the set of urls to scrape.
- **team_name** (String) Used to specify the team the resource should be created in when using global tokens.

Expand Down
18 changes: 18 additions & 0 deletions examples/scrape/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
provider "logtail" {
api_token = var.logtail_api_token
}

resource "logtail_source" "this" {
name = "Terraform Scrape Source"
platform = "prometheus_scrape"
scrape_urls = ["https://myserver.example.com/metrics"]
scrape_frequency_secs = 30
scrape_request_headers = [
{
name = "User-Agent"
value = "My Scraper"
}
]
scrape_request_basic_auth_user = "foo"
scrape_request_basic_auth_password = "bar"
}
3 changes: 3 additions & 0 deletions examples/scrape/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
output "logtail_source_token" {
value = logtail_source.this.token
}
9 changes: 9 additions & 0 deletions examples/scrape/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
variable "logtail_api_token" {
type = string
description = <<EOF
Logtail API Token
(https://docs.logtail.com/api/getting-started#obtaining-an-api-token)
EOF
# The value can be omitted if the LOGTAIL_API_TOKEN env var is set.
default = null
}
9 changes: 9 additions & 0 deletions examples/scrape/versions.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
terraform {
required_version = ">= 0.13"
required_providers {
logtail = {
source = "BetterStackHQ/logtail"
version = ">= 0.3.1"
}
}
}
16 changes: 15 additions & 1 deletion internal/provider/ptr.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

//nolint
// nolint
func load(d *schema.ResourceData, key string, receiver interface{}) {
switch x := receiver.(type) {
case **string:
Expand All @@ -32,6 +32,20 @@ func load(d *schema.ResourceData, key string, receiver interface{}) {
}
*x = &t
}
case **[]map[string]interface{}:
if v, ok := d.GetOkExists(key); ok {
var t []map[string]interface{}
for _, v := range v.([]interface{}) {
entry := v.(map[string]interface{})
newEntry := map[string]interface{}{}
for mapKey, mapValue := range entry {
newEntry[mapKey] = mapValue
}
t = append(t, newEntry)
}
*x = &t
}

default:
panic(fmt.Errorf("unexpected type %T", receiver))
}
Expand Down
95 changes: 80 additions & 15 deletions internal/provider/resource_source.go
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,28 @@ var sourceSchema = map[string]*schema.Schema{
Type: schema.TypeInt,
Optional: true,
},
"scrape_request_headers": {
Description: "An array of request headers, each containing `name` and `value` fields.",
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeMap,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
},
"scrape_request_basic_auth_user": {
Description: "Basic auth username for scraping.",
Type: schema.TypeString,
Optional: true,
},
"scrape_request_basic_auth_password": {
Description: "Basic auth password for scraping.",
Type: schema.TypeString,
Optional: true,
Sensitive: true,
},
}

func newSourceResource() *schema.Resource {
Expand All @@ -176,25 +198,29 @@ func newSourceResource() *schema.Resource {
Importer: &schema.ResourceImporter{
StateContext: schema.ImportStatePassthroughContext,
},
Description: "This resource allows you to create, modify, and delete Logtail Sources. For more information about the Sources API check https://docs.logtail.com/api/sources-api",
Schema: sourceSchema,
CustomizeDiff: validateRequestHeaders,
Description: "This resource allows you to create, modify, and delete Logtail Sources. For more information about the Sources API check https://docs.logtail.com/api/sources-api",
Schema: sourceSchema,
}
}

type source struct {
Name *string `json:"name,omitempty"`
Token *string `json:"token,omitempty"`
TableName *string `json:"table_name,omitempty"`
Platform *string `json:"platform,omitempty"`
IngestingPaused *bool `json:"ingesting_paused,omitempty"`
LogsRetention *int `json:"logs_retention,omitempty"`
MetricsRetention *int `json:"metrics_retention,omitempty"`
LiveTailPattern *string `json:"live_tail_pattern,omitempty"`
CreatedAt *string `json:"created_at,omitempty"`
UpdatedAt *string `json:"updated_at,omitempty"`
TeamName *string `json:"team_name,omitempty"`
ScrapeURLs *[]string `json:"scrape_urls,omitempty"`
ScrapeFrequencySecs *int `json:"scrape_frequency_secs,omitempty"`
Name *string `json:"name,omitempty"`
Token *string `json:"token,omitempty"`
TableName *string `json:"table_name,omitempty"`
Platform *string `json:"platform,omitempty"`
IngestingPaused *bool `json:"ingesting_paused,omitempty"`
LogsRetention *int `json:"logs_retention,omitempty"`
MetricsRetention *int `json:"metrics_retention,omitempty"`
LiveTailPattern *string `json:"live_tail_pattern,omitempty"`
CreatedAt *string `json:"created_at,omitempty"`
UpdatedAt *string `json:"updated_at,omitempty"`
TeamName *string `json:"team_name,omitempty"`
ScrapeURLs *[]string `json:"scrape_urls,omitempty"`
ScrapeFrequencySecs *int `json:"scrape_frequency_secs,omitempty"`
ScrapeRequestHeaders *[]map[string]interface{} `json:"scrape_request_headers,omitempty"`
ScrapeRequestBasicAuthUser *string `json:"scrape_request_basic_auth_user,omitempty"`
ScrapeRequestBasicAuthPassword *string `json:"scrape_request_basic_auth_password,omitempty"`
}

type sourceHTTPResponse struct {
Expand Down Expand Up @@ -224,6 +250,9 @@ func sourceRef(in *source) []struct {
{k: "updated_at", v: &in.UpdatedAt},
{k: "scrape_urls", v: &in.ScrapeURLs},
{k: "scrape_frequency_secs", v: &in.ScrapeFrequencySecs},
{k: "scrape_request_headers", v: &in.ScrapeRequestHeaders},
{k: "scrape_request_basic_auth_user", v: &in.ScrapeRequestBasicAuthUser},
{k: "scrape_request_basic_auth_password", v: &in.ScrapeRequestBasicAuthPassword},
}
}

Expand Down Expand Up @@ -275,3 +304,39 @@ func sourceUpdate(ctx context.Context, d *schema.ResourceData, meta interface{})
func sourceDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
return resourceDelete(ctx, meta, fmt.Sprintf("/api/v1/sources/%s", url.PathEscape(d.Id())))
}

func validateRequestHeaders(ctx context.Context, diff *schema.ResourceDiff, v interface{}) error {
if headers, ok := diff.GetOk("scrape_request_headers"); ok {
for _, header := range headers.([]interface{}) {
headerMap := header.(map[string]interface{})
if err := validateRequestHeader(headerMap); err != nil {
return fmt.Errorf("Invalid request header %v: %v", headerMap, err)
}
}
}
return nil
}

func validateRequestHeader(header map[string]interface{}) error {
if len(header) == 0 {
// Headers with calculated fields that are not known at the time will be passed as empty maps, ignore them
return nil
}

name, nameOk := header["name"].(string)
value, valueOk := header["value"].(string)

if !nameOk || name == "" {
return fmt.Errorf("must contain 'name' key with a non-empty string value")
}

if !valueOk || value == "" {
return fmt.Errorf("must contain 'value' key with a non-empty string value")
}

if len(header) != 2 {
return fmt.Errorf("must only contain 'name' and 'value' keys")
}

return nil
}
Loading

0 comments on commit 6ac9eb9

Please sign in to comment.