toulouse-maelis: [tools] connector benchmark script (#81399)
gitea/passerelle/pipeline/head This commit looks good Details

This commit is contained in:
Nicolas Roche 2023-09-23 15:00:22 +02:00 committed by Nicolas Roche
parent bac28e933c
commit 441ac49c58
1 changed files with 167 additions and 0 deletions

View File

@ -0,0 +1,167 @@
#!/usr/bin/python3
import argparse
import copy
import functools
import random
import statistics
import threading
import time
from multiprocessing import Lock, Pool, Process, Queue
from multiprocessing.sharedctypes import Value
import requests
# CONN = 'https://parsifal-passerelle.dev.publik.love/toulouse-maelis/integ-toulouse'
CONN = 'https://passerelle-parsifal.test.entrouvert.org/toulouse-maelis/test'
APIKEY = 'nicolas'
FAMILY_ID = '322423' # NICO TEST / UDAVE INTEG
PERSON_ID = '176658' # INTEG
duis = [str(i) for i in range(330120, 33151)]
def get_endpoint(args):
payload = None
if args.test == 'read-family':
url = args.conn + '/read-family?family_id=%s' % args.family
elif args.test == 'search-family':
url = args.conn + '/search-family?q=%s' % args.query
elif args.test == 'update-family':
payload = {
'category': 'BI',
'situation': 'VIEM',
'nbChild': '3',
'nbTotalChild': '4',
'nbAES': '1',
}
url = args.conn + '/update-family?family_id=%s' % args.family
elif args.test == 'person-catalog':
url = (
args.conn
+ '/get-person-activity-list?family_id=%s&person_id=%s&start_date=2022-09-01&end_date=2023-08-31'
% (
args.family,
args.person,
)
)
elif args.test == 'global-catalog':
url = args.conn + '/read-activity-list?ref_date=2023-01-01'
else:
raise Exception('unknown test')
url += '&apikey=%s' % APIKEY
return url, payload
def check(i, args):
url, payload = get_endpoint(args)
if args.test == 'read-family':
resp = requests.get(url)
elif args.test == 'search-family':
resp = requests.get(url)
elif args.test == 'update-family':
resp = requests.post(url, json=payload)
elif args.test == 'person-catalog':
resp = requests.get(url)
elif args.test == 'global-catalog':
resp = requests.get(url)
else:
raise Exception('unknown test')
resp.raise_for_status()
res = resp.json()
if res['err']:
raise Exception('API error: %s' % res['err_desc'])
return res
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors')
parser.add_argument('--conn', '-e', default=CONN, help='url of local intance of maelis connector')
parser.add_argument('--number', '-n', type=int, default=300, help='number of requests')
parser.add_argument('--concurrency', '-c', type=int, default=100, help='number of parallel processes')
parser.add_argument('--test', '-t', default='read-family', help='WS to test')
parser.add_argument('family', help=FAMILY_ID, nargs='?', default=FAMILY_ID)
parser.add_argument(
'query', help='Recherche en texte intégral (plus ou moins)', nargs='?', default='SIMP'
)
parser.add_argument('--person', '-P', default=PERSON_ID, help='person id')
args = parser.parse_args()
done = 0
count = args.number
concurrency = args.concurrency
errors = 0
error_types = set()
durations = []
barrier = threading.Barrier(concurrency + 1)
done_lock = threading.Lock()
def f(i):
global done, durations, errors
barrier.wait()
while done < count:
with done_lock:
if done >= count:
break
current_done = done
done += 1
try:
start = time.time()
check(i, args)
duration = time.time() - start
durations.append(duration)
except Exception as e:
error_types.add(repr(e))
errors += 1
done_value = Value('i', 0, lock=True)
result_queue = Queue(count)
def target(result_queue, done_value):
while done_value.value < count:
with done_value.get_lock():
if done_value.value >= count:
break
done_value.value += 1
try:
start = time.time()
check(i, args)
duration = time.time() - start
result_queue.put((True, duration))
except Exception as e:
result_queue.put((False, repr(e)))
begin = time.time()
processes = []
for i in range(concurrency):
processes.append(Process(target=target, args=(result_queue, done_value)))
processes[-1].start()
while done < count:
ok, value = result_queue.get()
done += 1
print('Done %05d' % done, end='\r')
if ok:
durations.append(value)
else:
errors += 1
error_types.add(value)
print('Done %05d' % done, end='\r')
print()
for process in processes:
process.join()
print('Number of requests', count)
print('Concurrency', concurrency)
print('Errors', errors, 'on', count, 'types: ', list(error_types))
print('RPS', float(count - errors) / (time.time() - begin))
print('Min', min(durations))
print('Max', max(durations))
print('Average', statistics.fmean(durations))
print('Quantiles', statistics.quantiles(durations, n=10))