Skip to content
Snippets Groups Projects
Commit d79f072a authored by oco-odoo's avatar oco-odoo
Browse files

[FIX] l10n_fr_fec: alleviate memory error when exporting too many move lines


We now split the export of the FEC files per batch of move lines. This was necessary, as the fetchall() ran on the cursor to get the results could return a huge amount of data on big databases, saturating the RAM.

This fix is not exhaustive, and memory consumption could be lowered by removing the need to convert the whole file to base 64 and write it in attachment. Those changes are not doable in stable ; further improvements will come to master.

OPW 3442579

closes odoo/odoo#130247

Signed-off-by: default avatarde Wouters de Bouchout Jean-Benoît (jbw) <jbw@odoo.com>
parent e4e2a28c
No related branches found
No related tags found
No related merge requests found
......@@ -302,7 +302,8 @@ class AccountFrFec(models.TransientModel):
rows_to_write.append(listrow)
# LINES
sql_query = '''
query_limit = int(self.env['ir.config_parameter'].sudo().get_param('l10n_fr_fec.batch_size', 500000)) # To prevent memory errors when fetching the results
sql_query = f'''
SELECT
REGEXP_REPLACE(replace(aj.code, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS JournalCode,
REGEXP_REPLACE(replace(COALESCE(aj__name.value, aj.name), '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS JournalLib,
......@@ -360,35 +361,54 @@ class AccountFrFec(models.TransientModel):
am.date >= %s
AND am.date <= %s
AND am.company_id = %s
'''
# For official report: only use posted entries
if self.export_type == "official":
sql_query += '''
AND am.state = 'posted'
'''
sql_query += '''
{"AND am.state = 'posted'" if self.export_type == 'official' else ""}
ORDER BY
am.date,
am.name,
aml.id
LIMIT %s
OFFSET %s
'''
lang = self.env.user.lang or get_lang(self.env).code
self._cr.execute(
sql_query, (lang, self.date_from, self.date_to, company.id))
for row in self._cr.fetchall():
rows_to_write.append(list(row))
with io.BytesIO() as fecfile:
csv_writer = pycompat.csv_writer(fecfile, delimiter='|', lineterminator='')
# Write header and initial balances
for initial_row in rows_to_write:
initial_row = list(initial_row)
# We don't skip \n at then end of the file if there are only initial balances, for simplicity. An empty period export shouldn't happen IRL.
initial_row[-1] += u'\r\n'
csv_writer.writerow(initial_row)
# Write current period's data
query_offset = 0
has_more_results = True
while has_more_results:
self._cr.execute(
sql_query,
(lang, self.date_from, self.date_to, company.id, query_limit + 1, query_offset)
)
query_offset += query_limit
has_more_results = self._cr.rowcount > query_limit # we load one more result than the limit to check if there is more
query_results = self._cr.fetchall()
for i, row in enumerate(query_results[:query_limit]):
if i < len(query_results) - 1:
# The file is not allowed to end with an empty line, so we can't use lineterminator on the writer
row = list(row)
row[-1] += u'\r\n'
csv_writer.writerow(row)
base64_result = base64.encodebytes(fecfile.getvalue())
fecvalue = self._csv_write_rows(rows_to_write)
end_date = fields.Date.to_string(self.date_to).replace('-', '')
suffix = ''
if self.export_type == "nonofficial":
suffix = '-NONOFFICIAL'
self.write({
'fec_data': base64.encodebytes(fecvalue),
'fec_data': base64_result,
# Filename = <siren>FECYYYYMMDD where YYYMMDD is the closing date
'filename': '%sFEC%s%s.csv' % (company_legal_data, end_date, suffix),
})
......@@ -404,7 +424,7 @@ class AccountFrFec(models.TransientModel):
'target': 'self',
}
def _csv_write_rows(self, rows, lineterminator=u'\r\n'):
def _csv_write_rows(self, rows, lineterminator=u'\r\n'): #DEPRECATED; will disappear in master
"""
Write FEC rows into a file
It seems that Bercy's bureaucracy is not too happy about the
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment