Created
February 4, 2021 18:07
-
-
Save fearful-symmetry/3ee1a5bfbe704068d87b7c23c188d262 to your computer and use it in GitHub Desktop.
_node/stats summary tool
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/local/bin/python3 | |
import json | |
def fetch_nodes(stats_file: str): | |
""" | |
return node data from _node/stats | |
""" | |
raw_fd = open(stats_file) | |
raw_json = json.load(raw_fd) | |
return raw_json["nodes"] | |
def get_ingest_nodes(nodes: dict): | |
""" | |
return stats for the ingest nodes | |
""" | |
ingest_nodes = [] | |
for node in nodes: | |
if "ingest" in nodes[node]["roles"]: | |
ingest_nodes.append(node) | |
return ingest_nodes | |
def sum_processors(pipeline: dict): | |
""" | |
sum all the processor stats in a given pipeline | |
""" | |
processors = {} | |
for processor_pipeline in pipeline["processors"]: | |
for processor in processor_pipeline: | |
if processor not in processors: | |
processors[processor] = { | |
"count": 0, "time_in_millis": 0, "current": 0, "failed": 0} | |
processors[processor]["count"] = processors[processor]["count"] + \ | |
processor_pipeline[processor]["stats"]["count"] | |
processors[processor]["time_in_millis"] = processors[processor]["time_in_millis"] + \ | |
processor_pipeline[processor]["stats"]["time_in_millis"] | |
processors[processor]["current"] = processors[processor]["current"] + \ | |
processor_pipeline[processor]["stats"]["current"] | |
processors[processor]["failed"] = processors[processor]["failed"] + \ | |
processor_pipeline[processor]["stats"]["failed"] | |
return processors | |
def print_processor_summary(processors: dict, skip_zero: bool, skip_no_error: bool): | |
""" | |
print a processor summary | |
""" | |
for processor in processors: | |
if skip_zero and processors[processor]["count"] == 0: | |
continue | |
if skip_no_error and processors[processor]["failed"] == 0: | |
continue | |
print("\tProcessor {}".format(processor)) | |
for stat in processors[processor]: | |
print("\t\t{}: {}".format(stat, processors[processor][stat])) | |
def print_summary(node_stats: dict, pipeline_stats: dict): | |
""" | |
print summary stats for the nodes and pipelines | |
""" | |
print("Summary stats for all ingest nodes:") | |
for node in node_stats: | |
print("{}: count: {}, time_in_millis: {}, current: {}, failed: {}".format( | |
node, node_stats[node]["count"], node_stats[node]["time_in_millis"], node_stats[node]["current"], node_stats[node]["failed"])) | |
print("Summary stats for all ingest pipelines:") | |
for pipeline in pipeline_stats: | |
print("{}: count: {}, time_in_millis: {}, current: {}, failed: {}".format( | |
pipeline, pipeline_stats[pipeline]["count"], pipeline_stats[pipeline]["time_in_millis"], pipeline_stats[pipeline]["current"], pipeline_stats[pipeline]["failed"])) | |
def main(): | |
processor_glob = "filebeat-7.10.2" | |
node_stats_file = "NodesStats_3Feb" | |
nodes = fetch_nodes(node_stats_file) | |
ingest_nodes = get_ingest_nodes(nodes) | |
# For tracking summary data | |
pipeline_stats = {} | |
node_stats = {} | |
for node in ingest_nodes: | |
for pipeline in nodes[node]["ingest"]["pipelines"]: | |
total_pipeline = nodes[node]["ingest"]["pipelines"][pipeline] | |
if total_pipeline["count"] == 0 or processor_glob not in pipeline: | |
continue | |
# print the pipeline we're on | |
print("In node {} for pipeline '{}' got:".format(node, pipeline)) | |
print("\tCount: {}, \n\ttime_in_millis: {}, \n\tcurrent: {}, \n\tfailed: {}".format( | |
total_pipeline["count"], total_pipeline["time_in_millis"], total_pipeline["current"], total_pipeline["failed"])) | |
# Gather node stats | |
if node not in node_stats: | |
node_stats[node] = { | |
"count": 0, "time_in_millis": 0, "current": 0, "failed": 0} | |
node_stats[node]["count"] = node_stats[node]["count"] + \ | |
total_pipeline["count"] | |
node_stats[node]["time_in_millis"] = node_stats[node]["time_in_millis"] + \ | |
total_pipeline["time_in_millis"] | |
node_stats[node]["current"] = node_stats[node]["current"] + \ | |
total_pipeline["current"] | |
node_stats[node]["failed"] = node_stats[node]["failed"] + \ | |
total_pipeline["failed"] | |
# Gather pipeline stats | |
if pipeline not in pipeline_stats: | |
pipeline_stats[pipeline] = { | |
"count": 0, "time_in_millis": 0, "current": 0, "failed": 0} | |
pipeline_stats[pipeline]["count"] = pipeline_stats[pipeline]["count"] + \ | |
total_pipeline["count"] | |
pipeline_stats[pipeline]["time_in_millis"] = pipeline_stats[pipeline]["time_in_millis"] + \ | |
total_pipeline["time_in_millis"] | |
pipeline_stats[pipeline]["current"] = pipeline_stats[pipeline]["current"] + \ | |
total_pipeline["current"] | |
pipeline_stats[pipeline]["failed"] = pipeline_stats[pipeline]["failed"] + \ | |
total_pipeline["failed"] | |
processors = sum_processors(total_pipeline) | |
print_processor_summary(processors, True, False) | |
# Print summary stats | |
print_summary(node_stats, pipeline_stats) | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment