Last active
August 30, 2022 09:20
-
-
Save rinov/b1e221142177eac2baa820b64a949b72 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Network sequence geneator from network session | |
""" | |
import pandas as pd | |
import numpy as np | |
from urllib.parse import urlparse | |
csv_file_path = "xxxx.csv" | |
output_filename = "output-uml.txt" | |
client_addess = '/192.168.x.y' | |
data = pd.read_csv( | |
csv_file_path, | |
header=None, | |
names=['URL', 'Status', 'ResponseCode', 'Protocol', 'Method', 'ContentType', 'ClientAddress', 'ClientPort', 'RemoteAddress', 'RemotePort', 'Exception', 'RequestStartTime', 'RequestEndTime', 'ResponseStartTime', 'ResponseEndTime', 'Duration', 'DNSDuration', 'ConnectDuration', 'SSLDuration', 'RequestDuration', 'ResponseDuration', 'Latency', 'Speed', 'RequestSpeed', 'ResponseSpeed', 'RequestHandshakeSize', 'RequestHeaderSize', 'RequestBodySize', 'ResponseHandshakeSize', 'ResponseHeaderSize', 'ResponseBodySize', 'RequestCompression', 'ResponseCompression'], | |
skiprows=1 | |
).fillna(value=0) | |
data = data[data['ClientAddress'] == client_addess] | |
data['RequestStartTime'] = pd.to_datetime(data['RequestStartTime']) | |
data['ResponseEndTime'] = pd.to_datetime(data['ResponseEndTime']) | |
data['RemoteAddress'] = data['RemoteAddress'].apply(lambda x: x.split('/')[0]) | |
data.index = data['RequestStartTime'] | |
domain_groups = list(data.groupby('RemoteAddress').groups.keys()) | |
document = "" | |
for domain in domain_groups: | |
target = data[data['RemoteAddress'] == domain] | |
document += f"\r## {domain}" | |
completed_request = target[target['Status'] == 'COMPLETE'] | |
availability = completed_request[completed_request['ResponseCode'] < 500].shape[0] / (1 if completed_request.shape[0] <= 0 else 0 + completed_request.shape[0]) * 100 | |
avg_latency = np.mean(completed_request['Duration']) | |
avg_request_size = np.mean(completed_request['RequestHeaderSize'] + completed_request['RequestBodySize'] + completed_request['RequestHandshakeSize']) | |
avg_response_size = np.mean(completed_request['ResponseHeaderSize'] + completed_request['ResponseBodySize'] + completed_request['ResponseHandshakeSize']) | |
document += f""" | |
\r<details><summary>シーケンスを表示</summary>\n | |
\rAvailability: `{round(availability, 3)} %` | |
\rAverage Latency: `{round(avg_latency, 1)} ms` | |
\rAverage Request Size: `{round(avg_request_size, 1)} bytes` | |
\rAverage Response Size: `{round(avg_response_size, 1)} bytes`\n | |
\r```uml | |
\rskinparam monochrome false | |
""" | |
prev_record = None | |
prev_concurent = False | |
records = target.to_dict('records') | |
records_size = len(records) | |
if records_size < 2: | |
continue | |
for index, record in enumerate(records): | |
if record['Status'] != 'COMPLETE' or record['Protocol'] != 'https': | |
continue | |
# 前回リクエストが完了する前に今回リクエストが開始されている場合は少なくとも直列ではない | |
if prev_record: | |
is_concurrent = (record['RequestStartTime'] <= prev_record['ResponseEndTime']) or (False if index >= records_size - 1 else records[index + 1]['RequestStartTime'] <= record['ResponseEndTime']) | |
else: | |
is_concurrent = records[index+1]['RequestStartTime'] <= record['ResponseEndTime'] | |
header = "group Parallel\n" if not prev_concurent and is_concurrent else "" | |
footer = "end" if prev_concurent and not is_concurrent else "" | |
prev_record = record | |
prev_concurent = is_concurrent | |
document += f''' | |
{header} | |
"Client" -> "{domain}": {urlparse(record['URL']).path} {record['Method']} | |
Note over of "{domain}": {record['RequestStartTime']} takes {record['Duration']} ms | |
"Client" <-- "{domain}": {record['ResponseCode']} | |
{footer} | |
''' | |
document += "\r```\n</details>\n\n" | |
with open(output_filename, "w") as f: | |
f.write(document) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment