Skip to content

Commit

Permalink
[IMP] core: cutoff imports after some number of errors
Browse files Browse the repository at this point in the history
Importing 100 records and getting 100 errors (often the same every
time because a required field was not mapped) is not super useful and
spams the logs a lot.

Cutting off at one point seems useful.

Note: integrates warnings-related fix from odoo#47972 reported by Grzegorz
Marczyński in odoo#47936.

closes odoo#48072

X-original-commit: 36c2c7f
Signed-off-by: Xavier Morel (xmo) <[email protected]>
  • Loading branch information
xmo-odoo committed Mar 20, 2020
1 parent 47952dd commit 6b05f1d
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 1 deletion.
14 changes: 14 additions & 0 deletions odoo/addons/test_impex/tests/test_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,20 @@ def test_not_provided(self):
u"Missing required value for the field 'Value' (value)")])
self.assertIs(result['ids'], False)

@mute_logger('odoo.sql_db', 'odoo.models')
def test_ignore_excess_messages(self):
result = self.import_(['const'], [[str(n)] for n in range(100)])
self.assertIs(result['ids'], False)
self.assertEqual(len(result['messages']), 11)
for m in result['messages'][:-1]:
self.assertEqual(m['type'], 'error')
self.assertEqual(m['message'], u"Missing required value for the field 'Value' (value)")
last = result['messages'][-1]
self.assertEqual(last['type'], 'warning')
self.assertEqual(
last['message'],
u"Found more than 10 errors and more than one error per 10 records, interrupted to avoid showing too many errors."
)

class test_text(ImporterCase):
model_name = 'export.text'
Expand Down
11 changes: 10 additions & 1 deletion odoo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -970,8 +970,9 @@ def flush(xml_id=None):
except Exception:
pass

errors = 0
# try again, this time record by record
for rec_data in data_list:
for i, rec_data in enumerate(data_list, 1):
try:
with cr.savepoint():
rec = self._load_records([rec_data], mode == 'update')
Expand All @@ -984,6 +985,7 @@ def flush(xml_id=None):
messages.append(dict(info, type='error', **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
# Failed to write, log to messages, rollback savepoint (to
# avoid broken transaction) and keep going
errors += 1
except Exception as e:
_logger.debug("Error while loading record", exc_info=True)
info = rec_data['info']
Expand All @@ -992,6 +994,13 @@ def flush(xml_id=None):
messages.append(dict(info, type='error', message=message, moreinfo=moreinfo))
# Failed for some reason, perhaps due to invalid data supplied,
# rollback savepoint and keep going
errors += 1
if errors >= 10 and (errors >= i / 10):
messages.append({
'type': 'warning',
'message': _(u"Found more than 10 errors and more than one error per 10 records, interrupted to avoid showing too many errors.")
})
break

# make 'flush' available to the methods below, in the case where XMLID
# resolution fails, for instance
Expand Down

0 comments on commit 6b05f1d

Please sign in to comment.