Documentation
¶
Index ¶
- type Config
- type Discoverer
- type Logger
- func (l *Logger) Debug(v ...interface{})
- func (l *Logger) Debugf(s string, v ...interface{})
- func (l *Logger) Debugln(v ...interface{})
- func (l *Logger) Error(v ...interface{})
- func (l *Logger) Errorf(s string, v ...interface{})
- func (l *Logger) Errorln(v ...interface{})
- func (l *Logger) Fatal(v ...interface{})
- func (l *Logger) Fatalf(s string, v ...interface{})
- func (l *Logger) Fatalln(v ...interface{})
- func (l *Logger) Info(v ...interface{})
- func (l *Logger) Infof(s string, v ...interface{})
- func (l *Logger) Infoln(v ...interface{})
- func (l *Logger) Warn(v ...interface{})
- func (l *Logger) Warnf(s string, v ...interface{})
- func (l *Logger) Warnln(v ...interface{})
- func (l *Logger) With(key string, value interface{}) plog.Logger
- type Pair
- type Registry
- type Service
- func (s *Service) AddDiscoverer(discoverer Discoverer)
- func (s *Service) AddScrapers(scrapers []Config)
- func (s *Service) Append(sample *model.Sample) error
- func (s *Service) Close() error
- func (s *Service) Commit() error
- func (s *Service) NeedsThrottling() bool
- func (s *Service) Open() error
- func (s *Service) Pairs() []Pair
- func (s *Service) RemoveDiscoverer(rm Discoverer)
- func (s *Service) RemoveScrapers(scrapers []Config)
- func (s *Service) Test(options interface{}) error
- func (s *Service) TestOptions() interface{}
- func (s *Service) Update(newConfigs []interface{}) error
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Config ¶
type Config struct {
Enabled bool `toml:"enabled" override:"enabled"`
// The job name to which the job label is set by default.
Name string `toml:"name" override:"name"`
// Type of the scraper
Type string `toml:"type" override:"type"`
// Database this data will be associated with
Database string `toml:"db" override:"db"`
// RetentionPolicyt this data will be associated with
RetentionPolicy string `toml:"rp" override:"rp"`
// The URL scheme with which to fetch metrics from targets.
Scheme string `toml:"scheme" override:"scheme"`
// The HTTP resource path on which to fetch metrics from targets.
MetricsPath string `toml:"metrics-path" override:"metrics-path"`
// A set of query parameters with which the target is scraped.
Params url.Values `toml:"params" override:"params"`
// How frequently to scrape the targets of this scrape config.
ScrapeInterval toml.Duration `toml:"scrape-interval" override:"scrape-interval"`
// The timeout for scraping targets of this config.
ScrapeTimeout toml.Duration `toml:"scrape-timeout" override:"scrape-timeout"`
// The HTTP basic authentication credentials for the targets.
Username string `toml:"username" override:"username"`
Password string `toml:"password" override:"password,redact"`
// Path to CA file
SSLCA string `toml:"ssl-ca" override:"ssl-ca"`
// Path to host cert file
SSLCert string `toml:"ssl-cert" override:"ssl-cert"`
// Path to cert key file
SSLKey string `toml:"ssl-key" override:"ssl-key"`
// SSLServerName is used to verify the hostname for the targets.
SSLServerName string `toml:"ssl-server-name" override:"ssl-server-name"`
// Use SSL but skip chain & host verification
InsecureSkipVerify bool `toml:"insecure-skip-verify" override:"insecure-skip-verify"`
// The bearer token for the targets.
BearerToken string `toml:"bearer-token" override:"bearer-token,redact"`
// HTTP proxy server to use to connect to the targets.
ProxyURL *url.URL `toml:"proxy-url" override:"proxy-url"`
// DiscoverID is the id of the discoverer that generates hosts for the scraper
DiscoverID string `toml:"discoverer-id" override:"discoverer-id"`
// DiscoverService is the type of the discoverer that generates hosts for the scraper
DiscoverService string `toml:"discoverer-service" override:"discoverer-service"`
// Blacklist is a list of hosts to ignore and not scrape
Blacklist []string `toml:"blacklist" override:"blacklist"`
}
Config is the scraper configuration
func (*Config) Prom ¶
func (c *Config) Prom() *config.ScrapeConfig
Prom generates the prometheus configuration for the scraper
type Discoverer ¶
type Discoverer interface {
// Service returns the service type of the Discoverer
Service() string
// ID returns the unique ID of this specific discoverer
ServiceID() string
// Prom creates a prometheus scrape configuration.
// TODO: replace when reimplement TargetManager
Prom(c *config.ScrapeConfig)
}
Discoverer represents a service that discovers hosts to scrape
type Logger ¶
Logger wraps kapacitor logging for prometheus
func (*Logger) Debug ¶
func (l *Logger) Debug(v ...interface{})
Debug logs a message at level Debug on the standard logger.
func (*Logger) Debugln ¶
func (l *Logger) Debugln(v ...interface{})
Debugln logs a message at level Debug on the standard logger.
func (*Logger) Error ¶
func (l *Logger) Error(v ...interface{})
Error logs a message at level Error on the standard logger.
func (*Logger) Errorln ¶
func (l *Logger) Errorln(v ...interface{})
Errorln logs a message at level Error on the standard logger.
func (*Logger) Fatal ¶
func (l *Logger) Fatal(v ...interface{})
Fatal logs a message at level Fatal on the standard logger.
func (*Logger) Fatalln ¶
func (l *Logger) Fatalln(v ...interface{})
Fatalln logs a message at level Fatal on the standard logger.
func (*Logger) Info ¶
func (l *Logger) Info(v ...interface{})
Info logs a message at level Info on the standard logger.
func (*Logger) Infoln ¶
func (l *Logger) Infoln(v ...interface{})
Infoln logs a message at level Info on the standard logger.
func (*Logger) Warn ¶
func (l *Logger) Warn(v ...interface{})
Warn logs a message at level Warn on the standard logger.
type Pair ¶
type Pair struct {
Discoverer Discoverer
Scraper Config
}
Pair is the linked discovery/scraper pair
type Registry ¶
type Registry interface {
// Commit finishes the update to the registry configuration
Commit() error
// AddDiscoverer adds discoverers to the registry
AddDiscoverer(Discoverer)
// RemoveDiscoverer removes discoverers from the registry
RemoveDiscoverer(Discoverer)
// AddScrapers adds scrapers to the registry
AddScrapers([]Config)
// RemoveScrapers removes scrapers from the registry
RemoveScrapers([]Config)
// Pairs returns the linked scraper/discovery combinations
Pairs() []Pair
}
Registry represents the combined configuration state of discoverers and scrapers
type Service ¶
type Service struct {
PointsWriter interface {
WriteKapacitorPoint(models.Point) error
}
// contains filtered or unexported fields
}
Service represents the scraper manager
func NewService ¶
NewService creates a new scraper service
func (*Service) AddDiscoverer ¶
func (s *Service) AddDiscoverer(discoverer Discoverer)
AddDiscoverer adds discoverer to the registry
func (*Service) AddScrapers ¶
AddScrapers adds scrapers to the registry
func (*Service) Append ¶
Append tranforms prometheus samples and inserts data into the tasks pipeline
func (*Service) NeedsThrottling ¶
NeedsThrottling conforms to SampleAppender and never returns true currently.
func (*Service) Pairs ¶
Pairs returns all named pairs of scrapers and discoverers from registry must be locked
func (*Service) RemoveDiscoverer ¶
func (s *Service) RemoveDiscoverer(rm Discoverer)
RemoveDiscoverer removes discoverer from the registry
func (*Service) RemoveScrapers ¶
RemoveScrapers removes scrapers from the registry
func (*Service) TestOptions ¶
func (s *Service) TestOptions() interface{}
TestOptions returns options that are allowed for the Test