toulouse-maelis: [tools] read_family benchmark script (#81399)
This commit is contained in:
parent
898a14f821
commit
b497988bf5
|
@ -0,0 +1,154 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import functools
|
||||
import random
|
||||
import statistics
|
||||
import threading
|
||||
import time
|
||||
from multiprocessing import Lock, Pool, Process, Queue
|
||||
from multiprocessing.sharedctypes import Value
|
||||
|
||||
import utils
|
||||
|
||||
FAMILY_ID = '322423' # NICO
|
||||
|
||||
|
||||
duis = [str(i) for i in range(330120, 33151)]
|
||||
|
||||
|
||||
client = None
|
||||
|
||||
# utils.configure_logging(0)
|
||||
|
||||
_client = None
|
||||
|
||||
|
||||
def check(client, i, args):
|
||||
client = client or utils.get_client(args.env, 'Family')
|
||||
result = client.service.readFamily(
|
||||
dossierNumber=args.family,
|
||||
# schoolYear=
|
||||
# incomeYear=2020, # <-- pour avoir les quotients
|
||||
# referenceYear=2020,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors')
|
||||
parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod')
|
||||
parser.add_argument('--reuse', '-r', default=False, help='reuse zeep client', action='store_true')
|
||||
parser.add_argument('--number', '-n', type=int, default=300, help='number of requests')
|
||||
parser.add_argument('--concurrency', '-c', type=int, default=100, help='number of parallel processes')
|
||||
parser.add_argument('family', help=FAMILY_ID, nargs='?', default=FAMILY_ID)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.reuse:
|
||||
_client = utils.get_client(args.env, 'Family')
|
||||
|
||||
done = 0
|
||||
count = args.number
|
||||
concurrency = args.concurrency
|
||||
errors = 0
|
||||
error_types = set()
|
||||
durations = []
|
||||
|
||||
barrier = threading.Barrier(concurrency + 1)
|
||||
done_lock = threading.Lock()
|
||||
|
||||
def f(i):
|
||||
global done, durations, errors
|
||||
|
||||
__client = None
|
||||
if args.reuse:
|
||||
__client = _client or utils.get_client(args.env, 'Family')
|
||||
barrier.wait()
|
||||
|
||||
while done < count:
|
||||
with done_lock:
|
||||
if done >= count:
|
||||
break
|
||||
current_done = done
|
||||
done += 1
|
||||
|
||||
try:
|
||||
start = time.time()
|
||||
check(__client, i, args)
|
||||
duration = time.time() - start
|
||||
durations.append(duration)
|
||||
except Exception as e:
|
||||
error_types.add(repr(e))
|
||||
errors += 1
|
||||
|
||||
if 1:
|
||||
done_value = Value('i', 0, lock=True)
|
||||
result_queue = Queue(count)
|
||||
|
||||
def target(result_queue, done_value):
|
||||
_client = None
|
||||
if args.reuse:
|
||||
_client = utils.get_client(args.env, 'Family')
|
||||
while done_value.value < count:
|
||||
with done_value.get_lock():
|
||||
if done_value.value >= count:
|
||||
break
|
||||
done_value.value += 1
|
||||
try:
|
||||
start = time.time()
|
||||
check(_client or utils.get_client(args.env, 'Family'), i, args)
|
||||
duration = time.time() - start
|
||||
result_queue.put((True, duration))
|
||||
except Exception as e:
|
||||
result_queue.put((False, repr(e)))
|
||||
|
||||
begin = time.time()
|
||||
processes = []
|
||||
for i in range(concurrency):
|
||||
processes.append(Process(target=target, args=(result_queue, done_value)))
|
||||
processes[-1].start()
|
||||
|
||||
while done < count:
|
||||
ok, value = result_queue.get()
|
||||
done += 1
|
||||
print('Done %05d' % done, end='\r')
|
||||
if ok:
|
||||
durations.append(value)
|
||||
else:
|
||||
errors += 1
|
||||
error_types.add(value)
|
||||
print('Done %05d' % done, end='\r')
|
||||
print()
|
||||
for process in processes:
|
||||
process.join()
|
||||
|
||||
else:
|
||||
threads = [threading.Thread(target=f, args=(i,)) for i in range(concurrency)]
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
barrier.wait()
|
||||
|
||||
begin = time.time()
|
||||
|
||||
while done != count:
|
||||
print('Done %05d' % done, end='\r')
|
||||
time.sleep(0.5)
|
||||
print('Done %05d' % done, end='\r')
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
print()
|
||||
|
||||
print('Number of requests', count)
|
||||
print('Concurrency', concurrency)
|
||||
print('Errors', errors, 'on', count, 'types: ', list(error_types))
|
||||
print('RPS', float(count - errors) / (time.time() - begin))
|
||||
print('Min', min(durations))
|
||||
print('Max', max(durations))
|
||||
print('Average', statistics.fmean(durations))
|
||||
print('Quantiles', statistics.quantiles(durations, n=10))
|
|
@ -8,6 +8,7 @@ import requests
|
|||
import zeep
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from lxml import etree
|
||||
from zeep.cache import InMemoryCache
|
||||
from zeep.transports import Transport
|
||||
from zeep.wsse.username import UsernameToken
|
||||
|
||||
|
@ -105,6 +106,9 @@ def get_wsdl_url(env, service):
|
|||
return config['url'] + 'services/' + service.title() + 'Service?wsdl'
|
||||
|
||||
|
||||
zeep_cache = InMemoryCache()
|
||||
|
||||
|
||||
def get_client(env, service):
|
||||
config = load_config(env)
|
||||
settings = zeep.Settings(strict=False, xsd_ignore_sequence_order=True)
|
||||
|
@ -112,9 +116,8 @@ def get_client(env, service):
|
|||
session = requests.Session()
|
||||
session.verify = config['session_verify']
|
||||
wsdl_url = get_wsdl_url(env, service)
|
||||
print(wsdl_url)
|
||||
|
||||
transport = Transport(session=session)
|
||||
transport = Transport(session=session, cache=zeep_cache)
|
||||
client = zeep.Client(wsdl_url, transport=transport, wsse=wsse, settings=settings)
|
||||
return client
|
||||
|
||||
|
|
Loading…
Reference in New Issue