添加压测脚本run_stress_code_js1.sh和stress_code_js1.py,用于对code_js1接口进行并发压力测试 生成压测结果JSON文件,包含请求统计、延迟和吞吐量等指标
156 lines
5.9 KiB
Python
156 lines
5.9 KiB
Python
#!/usr/bin/env python3
|
|
import argparse, json, time, os, concurrent.futures, statistics
|
|
from urllib import request
|
|
from urllib.error import HTTPError, URLError
|
|
|
|
def parse_headers(hstr: str):
|
|
if not hstr:
|
|
return {}
|
|
try:
|
|
return json.loads(hstr)
|
|
except Exception:
|
|
return {}
|
|
|
|
def has_data(payload):
|
|
if 'data' not in payload or payload['data'] is None:
|
|
return False
|
|
v = payload['data']
|
|
if isinstance(v, (list, dict)):
|
|
return len(v) > 0
|
|
if isinstance(v, str):
|
|
return len(v.strip()) > 0
|
|
return True
|
|
|
|
def one_request(idx, url, method, headers, body_bytes, timeout):
|
|
start = time.perf_counter()
|
|
try:
|
|
req = request.Request(url, data=body_bytes, headers=headers, method=method)
|
|
with request.urlopen(req, timeout=timeout) as resp:
|
|
body = resp.read()
|
|
elapsed = (time.perf_counter() - start) * 1000.0
|
|
try:
|
|
payload = json.loads(body)
|
|
return {
|
|
'idx': idx,
|
|
'ok': True,
|
|
'elapsed_ms': elapsed,
|
|
'has_data': has_data(payload),
|
|
'status': resp.status,
|
|
'size': len(body)
|
|
}
|
|
except Exception as je:
|
|
return {
|
|
'idx': idx,
|
|
'ok': False,
|
|
'elapsed_ms': elapsed,
|
|
'error': f'invalid_json: {je}',
|
|
}
|
|
except HTTPError as e:
|
|
elapsed = (time.perf_counter() - start) * 1000.0
|
|
return {'idx': idx, 'ok': False, 'elapsed_ms': elapsed, 'error': f'HTTP {e.code}: {e.reason}'}
|
|
except URLError as e:
|
|
elapsed = (time.perf_counter() - start) * 1000.0
|
|
return {'idx': idx, 'ok': False, 'elapsed_ms': elapsed, 'error': f'URL error: {e.reason}'}
|
|
except Exception as e:
|
|
elapsed = (time.perf_counter() - start) * 1000.0
|
|
return {'idx': idx, 'ok': False, 'elapsed_ms': elapsed, 'error': str(e)}
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description='Simple concurrent stress test runner for code_js1 API')
|
|
parser.add_argument('--url', required=True, help='Target URL')
|
|
parser.add_argument('--method', default='POST', help='HTTP method, default POST')
|
|
parser.add_argument('--total', type=int, default=2000, help='Total requests')
|
|
parser.add_argument('--concurrency', type=int, default=100, help='Concurrent workers')
|
|
parser.add_argument('--timeout', type=float, default=5.0, help='Per-request timeout in seconds')
|
|
parser.add_argument('--headers', default='{"Content-Type":"application/json"}', help='JSON headers string')
|
|
parser.add_argument('--body', default='{}', help='JSON body string (for POST)')
|
|
parser.add_argument('--out-prefix', default='scripts/results/code_js1', help='Output file prefix (without extension)')
|
|
|
|
args = parser.parse_args()
|
|
|
|
headers = parse_headers(args.headers)
|
|
try:
|
|
body_obj = json.loads(args.body) if args.body else {}
|
|
except Exception:
|
|
body_obj = {}
|
|
body_bytes = json.dumps(body_obj).encode('utf-8') if args.method.upper() != 'GET' else None
|
|
|
|
print(f"Starting stress: url={args.url} method={args.method} total={args.total} concurrency={args.concurrency}")
|
|
|
|
start_all = time.perf_counter()
|
|
latencies = []
|
|
errors = []
|
|
ok_count = 0
|
|
has_data_count = 0
|
|
resp_sizes = []
|
|
statuses = {}
|
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=args.concurrency) as ex:
|
|
futures = [ex.submit(one_request, i+1, args.url, args.method, headers, body_bytes, args.timeout) for i in range(args.total)]
|
|
for fut in concurrent.futures.as_completed(futures):
|
|
r = fut.result()
|
|
if r.get('ok'):
|
|
ok_count += 1
|
|
latencies.append(r['elapsed_ms'])
|
|
if r.get('has_data'): has_data_count += 1
|
|
resp_sizes.append(r.get('size', 0))
|
|
statuses[r.get('status', 0)] = statuses.get(r.get('status', 0), 0) + 1
|
|
else:
|
|
errors.append({'idx': r['idx'], 'error': r.get('error'), 'elapsed_ms': r.get('elapsed_ms')})
|
|
|
|
dur_ms = (time.perf_counter() - start_all) * 1000.0
|
|
throughput = ok_count / (dur_ms / 1000.0) if dur_ms > 0 else 0.0
|
|
|
|
result = {
|
|
'url': args.url,
|
|
'method': args.method,
|
|
'total': args.total,
|
|
'concurrency': args.concurrency,
|
|
'timeout_sec': args.timeout,
|
|
'ok': ok_count,
|
|
'errors': len(errors),
|
|
'has_data': has_data_count,
|
|
'duration_ms': dur_ms,
|
|
'throughput_rps': throughput,
|
|
'status_counts': statuses,
|
|
}
|
|
|
|
if latencies:
|
|
lat_sorted = sorted(latencies)
|
|
def pct(p):
|
|
idx = max(min(int(p*len(lat_sorted)) - 1, len(lat_sorted)-1), 0)
|
|
return lat_sorted[idx]
|
|
result['latency_ms'] = {
|
|
'min': lat_sorted[0],
|
|
'avg': statistics.mean(latencies),
|
|
'max': lat_sorted[-1],
|
|
'p50': pct(0.50),
|
|
'p90': pct(0.90),
|
|
'p95': pct(0.95),
|
|
'p99': pct(0.99),
|
|
}
|
|
result['resp_size_bytes'] = {
|
|
'avg': statistics.mean(resp_sizes) if resp_sizes else 0,
|
|
'min': min(resp_sizes) if resp_sizes else 0,
|
|
'max': max(resp_sizes) if resp_sizes else 0,
|
|
}
|
|
|
|
# ensure results dir exists
|
|
out_dir = os.path.dirname(args.out_prefix)
|
|
if out_dir and not os.path.exists(out_dir):
|
|
os.makedirs(out_dir, exist_ok=True)
|
|
|
|
ts = time.strftime('%Y%m%d_%H%M%S')
|
|
json_path = f"{args.out_prefix}_{ts}.json"
|
|
|
|
# save JSON summary only
|
|
with open(json_path, 'w', encoding='utf-8') as f:
|
|
json.dump(result, f, ensure_ascii=False, indent=2)
|
|
|
|
# print summary
|
|
print("\n=== 压测结果总结 ===")
|
|
print(json.dumps(result, ensure_ascii=False, indent=2))
|
|
print(f"结果已保存: {json_path}")
|
|
|
|
if __name__ == '__main__':
|
|
main() |