misc: fix consider-using-with pylint error (#62099)

This commit is contained in:
Lauréline Guérin 2022-03-17 17:56:30 +01:00
parent 1e90f91e99
commit 3dbee7bc63
No known key found for this signature in database
GPG Key ID: 1FAB9B9B4F93D473
19 changed files with 383 additions and 346 deletions

View File

@ -799,7 +799,7 @@ class AbstractCartaDSCS(BaseResource):
def pack(self, dossier_id):
dossier = CartaDSDossier.objects.get(id=dossier_id)
zip_filename = os.path.join(default_storage.path('cartads_cs'), '%s.zip' % dossier.tracking_code)
zip_file = zipfile.ZipFile(zip_filename, mode='w')
with zipfile.ZipFile(zip_filename, mode='w') as zip_file:
liste_pdf = self.liste_pdf(None, dossier.type_dossier_id)
cerfa_id = liste_pdf['data'][0]['id']
for cerfa in liste_pdf['data']:
@ -808,7 +808,9 @@ class AbstractCartaDSCS(BaseResource):
cerfa_id = cerfa['id']
break
cerfa_id = cerfa_id.replace('*', '-')
pieces = self.pieces(None, dossier.type_dossier_id, dossier.objet_demande_id, dossier.tracking_code)
pieces = self.pieces(
None, dossier.type_dossier_id, dossier.objet_demande_id, dossier.tracking_code
)
for piece in pieces['data']:
cnt = 1
for file in piece['files']:
@ -834,7 +836,6 @@ class AbstractCartaDSCS(BaseResource):
),
)
cnt += 1
zip_file.close()
dossier.zip_ready = True
dossier.save()
self.add_job('send_to_cartads', dossier_id=dossier.id)
@ -843,7 +844,8 @@ class AbstractCartaDSCS(BaseResource):
ftp = FTP(self.ftp_server)
ftp.login(self.ftp_username, self.ftp_password)
ftp.cwd(self.ftp_client_name)
ftp.storbinary('STOR %s' % os.path.basename(zip_filename), open(zip_filename, 'rb'))
with open(zip_filename, 'rb') as fd:
ftp.storbinary('STOR %s' % os.path.basename(zip_filename), fd)
ftp.quit()
def send_to_cartads(self, dossier_id):

View File

@ -214,9 +214,11 @@ class Cryptor(BaseResource):
if not os.path.exists(metadata_filename):
raise APIError('unknown uuid', http_status=404)
content = read_decrypt(open(content_filename, 'rb'), self.private_key)
with open(content_filename, 'rb') as fd:
content = read_decrypt(fd, self.private_key)
metadata = json.load(open(metadata_filename, 'r'))
with open(metadata_filename, 'r') as fd:
metadata = json.load(fd)
filename = metadata.get('filename')
content_type = metadata.get('content_type')

View File

@ -50,14 +50,15 @@ class Command(BaseCommand):
storage = DefaultStorage()
lock_filename = storage.path('family-%s/import-orleans-data.lock' % connector.id)
try:
fd = open(lock_filename, 'w')
fd = open(lock_filename, 'w') # pylint: disable=consider-using-with
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
raise CommandError('Command already running.')
try:
archive_name = os.path.basename(options['archive_file'])
connector.archive.save(archive_name, File(open(options['archive_file'], 'rb')))
with open(options['archive_file'], 'rb') as archive_fd:
connector.archive.save(archive_name, File(archive_fd))
except Exception as e:
raise CommandError('Error occured: %s' % e)
finally:

View File

@ -174,14 +174,14 @@ class GenericFamily(BaseResource):
def clean(self):
if self.archive:
try:
archive = zipfile.ZipFile(self.archive)
except zipfile.BadZipfile:
raise ValidationError(_('Invalid zip file.'))
with zipfile.ZipFile(self.archive) as archive:
if self.file_format != 'native':
modname = 'passerelle.apps.family.loaders.%s' % self.file_format
__import__(modname)
module = sys.modules[modname]
module.Loader(self).clean(archive)
except zipfile.BadZipfile:
raise ValidationError(_('Invalid zip file.'))
return super().clean()
@ -195,7 +195,7 @@ class GenericFamily(BaseResource):
if not os.path.exists(invoices_dir):
os.makedirs(invoices_dir)
archive = zipfile.ZipFile(self.archive.path)
with zipfile.ZipFile(self.archive.path) as archive:
if self.file_format != 'native':
modname = 'passerelle.apps.family.loaders.%s' % self.file_format
__import__(modname)
@ -526,7 +526,8 @@ class Invoice(models.Model):
if not self.has_pdf:
raise Http404(_('PDF file not found'))
response = HttpResponse(open(self.pdf_filename(), 'rb').read(), content_type='application/pdf')
with open(self.pdf_filename(), 'rb') as fd:
response = HttpResponse(fd.read(), content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' % self.external_id
return response

View File

@ -232,9 +232,7 @@ class Resource(BaseResource):
def process(self, fd):
try:
archive = zipfile.ZipFile(fd)
except Exception:
return False, 'could not load zipfile'
with zipfile.ZipFile(fd) as archive:
# sort files
doc_files = []
ent_files = []
@ -262,7 +260,9 @@ class Resource(BaseResource):
fichier = FICHIER_DONNEES_XPATH(doc_node)[0].text
attachments.setdefault(code, []).append(fichier)
doc_files = [value for l in attachments.values() for value in l if value.lower().endswith('.xml')]
doc_files = [
value for l in attachments.values() for value in l if value.lower().endswith('.xml')
]
if len(doc_files) != 1:
return False, 'too many/few doc files found: %s' % doc_files
@ -302,6 +302,8 @@ class Resource(BaseResource):
data.update(self.extract_data(document))
if hasattr(self, 'update_data_%s' % self.procedure):
getattr(self, 'update_data_%s' % self.procedure)(data)
except zipfile.BadZipfile:
return False, 'could not load zipfile'
return data, None
def transfer(self, data):

View File

@ -17,7 +17,8 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if options['output']:
output = open(options['output'], 'w')
else:
with open(options['output'], 'w') as output:
json.dump(export_site(slugs=options['slugs']), output, indent=4)
return
output = sys.stdout
json.dump(export_site(slugs=options['slugs']), output, indent=4)

View File

@ -244,8 +244,6 @@ class ZipTemplate(object):
def diff_zip(one, two):
differences = []
def compute_diff(one, two, fd_one, fd_two):
content_one = fd_one.read()
content_two = fd_two.read()
@ -257,14 +255,9 @@ def diff_zip(one, two):
return ['File %s differs' % one] + diff
return 'File %s differs' % one
if not hasattr(one, 'read'):
one = open(one, mode='rb')
with one:
if not hasattr(two, 'read'):
two = open(two, 'rb')
with two:
with zipfile.ZipFile(one) as one_zip:
with zipfile.ZipFile(two) as two_zip:
def run(one, two):
differences = []
with zipfile.ZipFile(one) as one_zip, zipfile.ZipFile(two) as two_zip:
one_nl = set(one_zip.namelist())
two_nl = set(two_zip.namelist())
for name in one_nl - two_nl:
@ -284,5 +277,18 @@ def diff_zip(one, two):
two_zip_files = [zi.filename for zi in two_zip.infolist()]
if one_zip_files != two_zip_files:
differences.append('Files are not in the same order')
return differences
if not hasattr(one, 'read'):
with open(one, mode='rb') as one:
if not hasattr(two, 'read'):
with open(two, 'rb') as two:
return run(one, two)
with two:
return run(one, two)
with one:
if not hasattr(two, 'read'):
with open(two, 'rb') as two:
return run(one, two)
with two:
return run(one, two)

View File

@ -20,9 +20,8 @@ def genesys(db):
@pytest.fixture
def mock_codifications_ok():
response = open(
os.path.join(os.path.dirname(__file__), 'data', 'genesys_select_codifications.xml')
).read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'genesys_select_codifications.xml')) as fd:
response = fd.read()
with tests.utils.mock_url(FAKE_URL, response) as mock:
yield mock
@ -143,9 +142,8 @@ def test_ws_link_created(app, genesys):
assert Link.objects.count() == 0
RESPONSE_SELECT_USAGER = open(
os.path.join(os.path.dirname(__file__), 'data', 'genesys_select_usager.xml')
).read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'genesys_select_usager.xml')) as fd:
RESPONSE_SELECT_USAGER = fd.read()
def test_ws_dossiers(app, genesys):

View File

@ -290,11 +290,13 @@ def test_pieces_management(connector, app, cached_data):
resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', b'%PDF...')])
assert resp.json == [{'error': 'The CERFA should be a PDF file.'}]
pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'minimal.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'minimal.pdf'), 'rb') as fd:
pdf_contents = fd.read()
resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)])
assert resp.json == [{'error': 'The CERFA should not be a scanned document.'}]
pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb') as fd:
pdf_contents = fd.read()
resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)])
cerfa_token = resp.json[0]['token']
@ -554,7 +556,8 @@ def test_doc_pieces_management(connector, app, cached_data):
assert len(data[0]['files']) == 1
assert list(data[0]['files'][0].keys()) == ['url']
pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb') as fd:
pdf_contents = fd.read()
resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)])
assert resp.json[0]['token']
assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 1
@ -594,12 +597,14 @@ def test_daact_pieces_management(connector, app, cached_data):
assert len(piece['files']) == 1
assert list(piece['files'][0].keys()) == ['url']
pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb') as fd:
pdf_contents = fd.read()
resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)])
assert resp.json[0]['token']
assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 1
pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb') as fd:
pdf_contents = fd.read()
resp = app.post(data[1]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)])
assert resp.json[0]['token']
assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 2

View File

@ -60,9 +60,11 @@ def payload(request):
def assert_xml_doc(filename, assertions):
schema = etree.XMLSchema(etree.parse(open(os.path.join(get_test_base_dir('cityweb'), 'cityweb.xsd'))))
with open(os.path.join(get_test_base_dir('cityweb'), 'cityweb.xsd')) as fd:
schema = etree.XMLSchema(etree.parse(fd))
content = open(filename).read()
with open(filename) as fd:
content = fd.read()
xml_content = etree.fromstring(content)
assert len(xml_content.nsmap) == 1
assert xml_content.nsmap['xs'] == "http://tempuri.org/XMLSchema.xsd"

View File

@ -529,15 +529,18 @@ def test_raw_uploadfile(mocked_request, app, setup, debug, caplog):
"""simulate the 3 (ordered) HTTP queries involved"""
response = {'status': '200'}
if method == 'GET' and uri == 'http://example.com/cmisatom':
content = open('%s/tests/data/cmis/cmis1.out.xml' % os.getcwd(), 'rb').read()
with open('%s/tests/data/cmis/cmis1.out.xml' % os.getcwd(), 'rb') as fd:
content = fd.read()
elif (
method == 'GET'
and uri
== 'http://example.com/cmisatom/test/path?path=/test-eo&filter=&includeAllowableActions=false&includeACL=false&includePolicyIds=false&includeRelationships=&renditionFilter='
):
content = open('%s/tests/data/cmis/cmis2.out.xml' % os.getcwd(), 'rb').read()
with open('%s/tests/data/cmis/cmis2.out.xml' % os.getcwd(), 'rb') as fd:
content = fd.read()
elif method == 'POST' and uri == 'http://example.com/cmisatom/test/children?id=L3Rlc3QtZW8%3D':
expected_input = open('%s/tests/data/cmis/cmis3.in.xml' % os.getcwd(), 'r').read()
with open('%s/tests/data/cmis/cmis3.in.xml' % os.getcwd(), 'r') as fd:
expected_input = fd.read()
expected_input = expected_input.replace('\n', '')
expected_input = re.sub('> *<', '><', expected_input)
input1 = ET.tostring(ET.XML(expected_input))
@ -556,7 +559,8 @@ def test_raw_uploadfile(mocked_request, app, setup, debug, caplog):
if input1 != input2:
raise Exception('expect [[%s]] but get [[%s]]' % (body, expected_input))
content = open('%s/tests/data/cmis/cmis3.out.xml' % os.getcwd(), 'rb').read()
with open('%s/tests/data/cmis/cmis3.out.xml' % os.getcwd(), 'rb') as fd:
content = fd.read()
else:
raise Exception('my fault error, url is not yet mocked: %s' % uri)
return (response, content)

View File

@ -375,7 +375,8 @@ def test_fondettes_concerto_validation():
resource.clean()
filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_data.zip')
resource.archive = File(open(filepath), 'family_data.zip')
with open(filepath) as fd:
resource.archive = File(fd, 'family_data.zip')
with pytest.raises(ValidationError):
resource.clean()
@ -384,18 +385,21 @@ def test_orleans_concerto_loader():
# all related objects will also be deleted
Family.objects.all().delete()
filepath = os.path.join(os.path.dirname(__file__), 'data', 'orleans', 'family_data_orleans.zip')
with open(filepath, 'rb') as fd:
resource = GenericFamily(
title='test orleans',
slug='test-orleans',
archive=File(open(filepath, 'rb'), 'family_data_orleans.zip'),
archive=File(fd, 'family_data_orleans.zip'),
file_format='concerto_orleans',
)
from passerelle.apps.family.loaders.concerto_orleans import Loader
loader = Loader(resource)
loader.archive = zipfile.ZipFile(filepath)
with zipfile.ZipFile(filepath) as z:
loader.archive = z
families = loader.build_families()
assert len(families) == 18
for family in families.values():
assert family['external_id']
@ -510,10 +514,11 @@ def test_family_pending_invoices_by_nameid():
def test_incorrect_orleans_data(caplog):
filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_incorrect_data_orleans.zip')
with open(filepath, 'rb') as fd:
GenericFamily.objects.create(
title='test orleans',
slug='test-orleans',
archive=File(open(filepath, 'rb'), 'family_incorrect_data_orleans.zip'),
archive=File(fd, 'family_incorrect_data_orleans.zip'),
file_format='concerto_orleans',
)
for record in caplog.records:

View File

@ -56,7 +56,8 @@ DEMAND_STATUS = {'closed': True, 'status': 'accepted', 'comment': 'dossier trait
@mock.patch('passerelle.apps.mdel.models.Demand.create_zip', lambda x, y: '1-14-ILE-LA')
def test_generic_payload_logging(caplog, app, mdel):
filename = os.path.join(os.path.dirname(__file__), 'data', 'mdel', 'formdata.json')
payload = json.load(open(filename))
with open(filename) as fd:
payload = json.load(fd)
resp = app.post_json('/mdel/test/create', params=payload, status=200)
assert resp.json['data']['demand_id'] == '1-14-ILE-LA'
@ -82,7 +83,8 @@ def test_generic_payload_logging(caplog, app, mdel):
@mock.patch('passerelle.utils.Request.get')
def test_proxy_logger(mocked_get, caplog, app, arcgis):
payload = open(os.path.join(os.path.dirname(__file__), 'data', 'nancy_arcgis', 'sigresponse.json')).read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'nancy_arcgis', 'sigresponse.json')) as fd:
payload = fd.read()
mocked_get.return_value = tests.utils.FakedResponse(content=payload, status_code=200)
# simple logger
@ -160,7 +162,8 @@ def test_proxy_logger(mocked_get, caplog, app, arcgis):
@mock.patch('requests.Session.send')
def test_proxy_logger_transaction_id(mocked_send, app, arcgis):
payload = open(os.path.join(os.path.dirname(__file__), 'data', 'nancy_arcgis', 'sigresponse.json')).read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'nancy_arcgis', 'sigresponse.json')) as fd:
payload = fd.read()
mocked_send.return_value = tests.utils.FakedResponse(content=payload, status_code=200)
arcgis.log_evel = 'DEBUG'
arcgis.base_url = 'https://example.net/'
@ -936,7 +939,8 @@ def test_generic_up_in_endpoints_infos(db, app, connector_class, expected):
def test_generic_endpoint_superuser_access(db, app, admin_user, simple_user):
MDEL.objects.create(slug='test')
filename = os.path.join(os.path.dirname(__file__), 'data', 'mdel', 'formdata.json')
payload = json.load(open(filename))
with open(filename) as fd:
payload = json.load(fd)
app = login(app, username='user', password='user')
resp = app.post_json('/mdel/test/create', params=payload, status=403)

View File

@ -127,7 +127,8 @@ def test_contact_mode_typologies_list_with_invalid_xml(app, setup):
with mock.patch('passerelle.utils.Request.post') as request_post:
response = mock.Mock()
types_filename = os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_typologies.xml')
types = open(types_filename).read()
with open(types_filename) as fd:
types = fd.read()
response.content = types.replace('Courrier', 'Courrier & autres')
request_post.return_value = response
endpoint = reverse(
@ -145,7 +146,8 @@ def test_contact_mode_typologies_list(app, setup):
with mock.patch('passerelle.utils.Request.post') as request_post:
response = mock.Mock()
types_filename = os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_typologies.xml')
response.content = open(types_filename).read()
with open(types_filename) as fd:
response.content = fd.read()
request_post.return_value = response
endpoint = reverse(
'generic-endpoint',
@ -171,7 +173,8 @@ def test_contact_mode_typologies_list(app, setup):
def get_typo_response():
types_filename = os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_typologies.xml')
types = open(types_filename).read()
with open(types_filename) as fd:
types = fd.read()
typo_response = mock.Mock()
typo_response.content = types
return typo_response
@ -357,9 +360,8 @@ def test_get_pavs(app, setup):
with mock.patch('passerelle.utils.Request.post') as request_post:
response = mock.Mock()
json_response = mock.Mock()
json_response.return_value = json.load(
open(os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_pavs.json'))
)
with open(os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_pavs.json')) as fd:
json_response.return_value = json.load(fd)
response.json = json_response
request_post.return_value = response
response = app.get(

View File

@ -200,7 +200,8 @@ def test_export_to_file(app, setup, filetype):
assert Bdp.objects.count() == 1
bdp.delete()
assert Bdp.objects.count() == 0
import_site(json.load(open(f.name)), overwrite=True)
with open(f.name) as fd:
import_site(json.load(fd), overwrite=True)
assert Bdp.objects.count() == 1

View File

@ -73,7 +73,8 @@ def iph_mocked_get(url, params=None, **kwargs):
else:
raise Exception('my fault error, url is not yet mocked: %s' % url)
response._content = open(target_file, 'rb').read()
with open(target_file, 'rb') as fd:
response._content = fd.read()
response.status_code = 200
return response
@ -346,9 +347,8 @@ def test_get_file(mocked_post, mocked_get, app, conn):
response = Response()
response.status_code = 200
soap_response = open(
os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb'
).read()
with open(os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb') as fd:
soap_response = fd.read()
response._content = force_bytes(soap_response)
mocked_post.return_value = response
@ -398,9 +398,8 @@ def test_get_file_invalid_appendix(mocked_post, mocked_get, app, conn):
response = Response()
response.status_code = 200
soap_response = open(
os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb'
).read()
with open(os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb') as fd:
soap_response = fd.read()
response._content = soap_response
mocked_post.return_value = response
@ -421,9 +420,8 @@ def test_get_file_not_found_appendix(mocked_post, mocked_get, app, conn):
response = Response()
response.status_code = 200
soap_response = open(
os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb'
).read()
with open(os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb') as fd:
soap_response = fd.read()
response._content = soap_response
mocked_post.return_value = response
@ -444,9 +442,8 @@ def test_get_file_appendix(mocked_post, mocked_get, app, conn):
response = Response()
response.status_code = 200
soap_response = open(
os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb'
).read()
with open(os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb') as fd:
soap_response = fd.read()
response._content = soap_response
mocked_post.return_value = response
@ -521,7 +518,8 @@ def test_call_wsdl(mocked_get, app, conn):
)
resp = app.get(url)
assert resp.headers['content-type'] == 'text/xml'
assert resp.content == open(wsdl_file(), 'rb').read()
with open(wsdl_file(), 'rb') as fd:
assert resp.content == fd.read()
@mock.patch('passerelle.utils.Request.get', side_effect=ConnectionError('mocked error'))
@ -548,12 +546,14 @@ def test_no_auth_on_wsdl_imports(mocked_post, mocked_load, mocked_get, app, conn
"""
response_xmlmime, response_post = Response(), Response()
response_xmlmime.status_code, response_post.status_code = 200, 200
response_xmlmime._content = open(xmlmime(), 'rb').read()
with open(xmlmime(), 'rb') as fd:
response_xmlmime._content = fd.read()
response_post._content = force_bytes(
"""<?xml version='1.0' encoding='UTF-8'?><S:Envelope xmlns:S="http://schemas.xmlsoap.org/soap/envelope/"><S:Body><echoResponse xmlns="http://www.adullact.org/spring-ws/iparapheur/1.0" xmlns:xmime="http://www.w3.org/2005/05/xmlmime">[publik_test] m'a dit: &quot;ping&quot;!</echoResponse></S:Body></S:Envelope>
"""
)
mocked_load.return_value = open(wsdl_file(), 'rb').read()
with open(wsdl_file(), 'rb') as fd:
mocked_load.return_value = fd.read()
mocked_get.return_value = response_xmlmime
mocked_post.return_value = response_post
url = reverse(

View File

@ -57,7 +57,7 @@ def validate_schema(doc, xsd):
def check_zip_file(zipdir, expected_files):
# check files order in zip
zipres = zipfile.ZipFile(zipdir, 'r')
with zipfile.ZipFile(zipdir, 'r') as zipres:
files = [f.filename for f in zipres.infolist()]
assert sorted(files[: len(expected_files)]) == sorted(expected_files)

View File

@ -4,9 +4,12 @@ from passerelle.utils.conversion import to_pdf
def test_pdf_to_pdf_do_nothing():
pdf = open(os.path.join(os.path.dirname(__file__), 'data', 'minimal.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'minimal.pdf'), 'rb') as fd:
pdf = fd.read()
assert to_pdf(pdf) == pdf
pdf = open(os.path.join(os.path.dirname(__file__), 'data', 'minimal_bom.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'minimal_bom.pdf'), 'rb') as fd:
pdf = fd.read()
assert to_pdf(pdf) == pdf
pdf = open(os.path.join(os.path.dirname(__file__), 'data', 'minimal_bomutf8.pdf'), 'rb').read()
with open(os.path.join(os.path.dirname(__file__), 'data', 'minimal_bomutf8.pdf'), 'rb') as fd:
pdf = fd.read()
assert to_pdf(pdf) == pdf

View File

@ -146,20 +146,18 @@ def test_with_parts(tpl_builder, dest):
with full_path.open('rb') as fd:
with zipfile.ZipFile(fd) as zi:
assert zi.namelist() == ['coucou-10-part1.xml', 'coucou-10-dôc.xml']
assert (
zi.open('coucou-10-part1.xml').read().decode('utf-8')
== '<?xml version="1.0"?><body>blabla</body>'
)
assert zi.open('coucou-10-dôc.xml').read().decode('utf-8') == '<a>Héllo World!</a>'
with zi.open('coucou-10-part1.xml') as zfd:
assert zfd.read().decode('utf-8') == '<?xml version="1.0"?><body>blabla</body>'
with zi.open('coucou-10-dôc.xml') as zfd:
assert zfd.read().decode('utf-8') == '<a>Héllo World!</a>'
with io.BytesIO(z.render_to_bytes()) as fd:
with zipfile.ZipFile(fd) as zi:
assert zi.namelist() == ['coucou-10-part1.xml', 'coucou-10-dôc.xml']
assert (
zi.open('coucou-10-part1.xml').read().decode('utf-8')
== '<?xml version="1.0"?><body>blabla</body>'
)
assert zi.open('coucou-10-dôc.xml').read().decode('utf-8') == '<a>Héllo World!</a>'
with zi.open('coucou-10-part1.xml') as zfd:
assert zfd.read().decode('utf-8') == '<?xml version="1.0"?><body>blabla</body>'
with zi.open('coucou-10-dôc.xml') as zfd:
assert zfd.read().decode('utf-8') == '<a>Héllo World!</a>'
def test_xml_error(tpl_builder, dest):