Skip to content

Commit

Permalink
[receiver/elasticsearch]: add metrics related to GET operations (open…
Browse files Browse the repository at this point in the history
…-telemetry#14793)

feat: add metrics related to GET operations
  • Loading branch information
aboguszewski-sumo committed Nov 3, 2022
1 parent b8ed96c commit 6de7397
Show file tree
Hide file tree
Showing 8 changed files with 278 additions and 2 deletions.
5 changes: 5 additions & 0 deletions .chloggen/elasticsearch-receiver-get-ops.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
change_type: enhancement
component: elasticsearchreceiver
note: Add metrics related to GET operations
issues: [14635]

3 changes: 3 additions & 0 deletions receiver/elasticsearchreceiver/documentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ These are the metrics available for this scraper.
| **elasticsearch.node.ingest.operations.failed** | Total number of failed ingest operations during the lifetime of this node. | {operation} | Sum(Int) | <ul> </ul> |
| **elasticsearch.node.open_files** | The number of open file descriptors held by the node. | {files} | Sum(Int) | <ul> </ul> |
| **elasticsearch.node.operations.completed** | The number of operations completed by a node. | {operations} | Sum(Int) | <ul> <li>operation</li> </ul> |
| elasticsearch.node.operations.get.completed | The number of hits and misses resulting from GET operations. | {operations} | Sum(Int) | <ul> <li>get_result</li> </ul> |
| elasticsearch.node.operations.get.time | The time spent on hits and misses resulting from GET operations. | ms | Sum(Int) | <ul> <li>get_result</li> </ul> |
| **elasticsearch.node.operations.time** | Time spent on operations by a node. | ms | Sum(Int) | <ul> <li>operation</li> </ul> |
| **elasticsearch.node.pipeline.ingest.documents.current** | Total number of documents currently being ingested by a pipeline. | {documents} | Sum(Int) | <ul> <li>ingest_pipeline_name</li> </ul> |
| **elasticsearch.node.pipeline.ingest.documents.preprocessed** | Number of documents preprocessed by the ingest pipeline. | {documents} | Sum(Int) | <ul> <li>ingest_pipeline_name</li> </ul> |
Expand Down Expand Up @@ -117,6 +119,7 @@ metrics:
| direction | The direction of network data. | received, sent |
| document_state (state) | The state of the document. | active, deleted |
| fs_direction (direction) | The direction of filesystem IO. | read, write |
| get_result (result) | Result of get operation | hit, miss |
| health_status (status) | The health status of the cluster. | green, yellow, red |
| index_aggregation_type (aggregation) | Type of shard aggregation for index statistics | primary_shards, total |
| indexing_memory_state (state) | State of the indexing memory | current, total |
Expand Down
156 changes: 156 additions & 0 deletions receiver/elasticsearchreceiver/internal/metadata/generated_metrics.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 6 additions & 2 deletions receiver/elasticsearchreceiver/internal/model/nodestats.go
Original file line number Diff line number Diff line change
Expand Up @@ -219,8 +219,12 @@ type IndexingOperations struct {
}

type GetOperation struct {
Total int64 `json:"total"`
TotalTimeInMs int64 `json:"time_in_millis"`
Total int64 `json:"total"`
TotalTimeInMs int64 `json:"time_in_millis"`
Exists int64 `json:"exists_total"`
ExistsTimeInMs int64 `json:"exists_time_in_millis"`
Missing int64 `json:"missing_total"`
MissingTimeInMs int64 `json:"missing_time_in_millis"`
}

type SearchOperations struct {
Expand Down
24 changes: 24 additions & 0 deletions receiver/elasticsearchreceiver/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,12 @@ attributes:
- doc_value
- index_writer
- fixed_bit_set
get_result:
value: result
description: Result of get operation
enum:
- hit
- miss

metrics:
# these metrics are from /_nodes/stats, and are node level metrics
Expand Down Expand Up @@ -309,6 +315,24 @@ metrics:
value_type: int
attributes: [operation]
enabled: true
elasticsearch.node.operations.get.completed:
description: The number of hits and misses resulting from GET operations.
unit: "{operations}"
sum:
monotonic: true
aggregation: cumulative
value_type: int
attributes: [get_result]
enabled: false
elasticsearch.node.operations.get.time:
description: The time spent on hits and misses resulting from GET operations.
unit: ms
sum:
monotonic: true
aggregation: cumulative
value_type: int
attributes: [get_result]
enabled: false
elasticsearch.node.shards.size:
description: The size of the shards assigned to this node.
unit: By
Expand Down
6 changes: 6 additions & 0 deletions receiver/elasticsearchreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,12 @@ func (r *elasticsearchScraper) scrapeNodeMetrics(ctx context.Context, now pcommo
r.mb.RecordElasticsearchNodeOperationsTimeDataPoint(now, info.Indices.FlushOperations.TotalTimeInMs, metadata.AttributeOperationFlush)
r.mb.RecordElasticsearchNodeOperationsTimeDataPoint(now, info.Indices.WarmerOperations.TotalTimeInMs, metadata.AttributeOperationWarmer)

r.mb.RecordElasticsearchNodeOperationsGetCompletedDataPoint(now, info.Indices.GetOperation.Exists, metadata.AttributeGetResultHit)
r.mb.RecordElasticsearchNodeOperationsGetCompletedDataPoint(now, info.Indices.GetOperation.Missing, metadata.AttributeGetResultMiss)

r.mb.RecordElasticsearchNodeOperationsGetTimeDataPoint(now, info.Indices.GetOperation.ExistsTimeInMs, metadata.AttributeGetResultHit)
r.mb.RecordElasticsearchNodeOperationsGetTimeDataPoint(now, info.Indices.GetOperation.MissingTimeInMs, metadata.AttributeGetResultMiss)

r.mb.RecordElasticsearchNodeShardsSizeDataPoint(now, info.Indices.StoreInfo.SizeInBy)

// Elasticsearch version 7.13+ is required to collect `elasticsearch.node.shards.data_set.size`.
Expand Down
4 changes: 4 additions & 0 deletions receiver/elasticsearchreceiver/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ func TestScraper(t *testing.T) {
t.Parallel()

config := createDefaultConfig().(*Config)

config.Metrics.ElasticsearchNodeOperationsGetCompleted.Enabled = true
config.Metrics.ElasticsearchNodeOperationsGetTime.Enabled = true

config.Metrics.ElasticsearchIndexOperationsMergeSize.Enabled = true
config.Metrics.ElasticsearchIndexOperationsMergeDocsCount.Enabled = true
config.Metrics.ElasticsearchIndexSegmentsCount.Enabled = true
Expand Down
74 changes: 74 additions & 0 deletions receiver/elasticsearchreceiver/testdata/expected_metrics/full.json
Original file line number Diff line number Diff line change
Expand Up @@ -1450,6 +1450,80 @@
},
"unit": "ms"
},
{
"description": "The number of hits and misses resulting from GET operations.",
"sum": {
"aggregationTemporality": "AGGREGATION_TEMPORALITY_CUMULATIVE",
"dataPoints": [
{
"asInt": "512",
"attributes": [
{
"key": "result",
"value": {
"stringValue": "hit"
}
}
],
"startTimeUnixNano": "1661811026803971000",
"timeUnixNano": "1661811026805343000"
},
{
"asInt": "512",
"attributes": [
{
"key": "result",
"value": {
"stringValue": "miss"
}
}
],
"startTimeUnixNano": "1661811026803971000",
"timeUnixNano": "1661811026805343000"
}
],
"isMonotonic": true
},
"name": "elasticsearch.node.operations.get.completed",
"unit": "{operations}"
},
{
"description": "The time spent on hits and misses resulting from GET operations.",
"sum": {
"aggregationTemporality": "AGGREGATION_TEMPORALITY_CUMULATIVE",
"dataPoints": [
{
"asInt": "209",
"attributes": [
{
"key": "result",
"value": {
"stringValue": "hit"
}
}
],
"startTimeUnixNano": "1661811026803971000",
"timeUnixNano": "1661811026805343000"
},
{
"asInt": "124",
"attributes": [
{
"key": "result",
"value": {
"stringValue": "miss"
}
}
],
"startTimeUnixNano": "1661811026803971000",
"timeUnixNano": "1661811026805343000"
}
],
"isMonotonic": true
},
"name": "elasticsearch.node.operations.get.time",
"unit": "ms"
},
{
"description": "Total number of documents currently being ingested by a pipeline.",
"name": "elasticsearch.node.pipeline.ingest.documents.current",
Expand Down

0 comments on commit 6de7397

Please sign in to comment.