Skip to content

Commit b60de0d

Browse files
jat-odooxmo-odoo
authored andcommitted
[IMP] import/export UIs
Task 40692 Various changes to import/export (mainly) UIs: * default to excel & "full" (non-import-compatible) export * auto-detect encoding of CSV using chardet * remember column -> field mapping after having imported a file (useful for repeated imports where auto-matching failed) * better handle localised booleans & column names * automatically select source list view's fields when exporting * better integrate import templates feature and add a number of templates
1 parent 57676dc commit b60de0d

37 files changed

+440
-266
lines changed

addons/base_import/models/base_import.py

Lines changed: 104 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# -*- coding: utf-8 -*-
22
# Part of Odoo. See LICENSE file for full copyright and licensing details.
33

4+
import chardet
45
import datetime
56
import io
67
import itertools
@@ -45,6 +46,37 @@
4546
for mime, (ext, handler, req) in FILE_TYPE_DICT.items()
4647
}
4748

49+
class Base(models.AbstractModel):
50+
_inherit = 'base'
51+
52+
@api.model
53+
def get_import_templates(self):
54+
"""
55+
Get the import templates label and path.
56+
57+
:return: a list(dict) containing label and template path
58+
like ``[{'label': 'foo', 'template': 'path'}]``
59+
"""
60+
return []
61+
62+
class ImportMapping(models.Model):
63+
""" mapping of previous column:field selections
64+
65+
This is useful when repeatedly importing from a third-party
66+
system: column names generated by the external system may
67+
not match Odoo's field names or labels. This model is used
68+
to save the mapping between column names and fields so that
69+
next time a user imports from the same third-party systems
70+
we can automatically match the columns to the correct field
71+
without them having to re-enter the mapping every single
72+
time.
73+
"""
74+
_name = 'base_import.mapping'
75+
76+
res_model = fields.Char(index=True)
77+
column_name = fields.Char()
78+
field_name = fields.Char()
79+
4880

4981
class Import(models.TransientModel):
5082

@@ -251,15 +283,10 @@ def _read_ods(self, options):
251283
def _read_csv(self, options):
252284
""" Returns a CSV-parsed iterator of all non-empty lines in the file
253285
:throws csv.Error: if an error is detected during CSV parsing
254-
:throws UnicodeDecodeError: if ``options.encoding`` is incorrect
255286
"""
256287
csv_data = self.file
257-
258-
# TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet
259-
encoding = options.get('encoding', 'utf-8')
260-
if encoding != 'utf-8':
261-
# csv module expect utf-8, see http://docs.python.org/2/library/csv.html
262-
csv_data = csv_data.decode(encoding).encode('utf-8')
288+
encoding = chardet.detect(csv_data)['encoding']
289+
csv_data = csv_data.decode(encoding).encode('utf-8')
263290

264291
csv_iterator = pycompat.csv_reader(
265292
io.BytesIO(csv_data),
@@ -376,7 +403,7 @@ def check_patterns(patterns, preview_values):
376403

377404
if results:
378405
return results
379-
return ['id', 'text', 'char', 'datetime', 'selection', 'many2one', 'one2many', 'many2many', 'html']
406+
return ['id', 'text', 'boolean', 'char', 'datetime', 'selection', 'many2one', 'one2many', 'many2many', 'html']
380407

381408
@api.model
382409
def _find_type_from_preview(self, options, preview):
@@ -400,6 +427,7 @@ def _match_header(self, header, fields, options):
400427
:rtype: list(Field)
401428
"""
402429
string_match = None
430+
IrTranslation = self.env['ir.translation']
403431
for field in fields:
404432
# FIXME: should match all translations & original
405433
# TODO: use string distance (levenshtein? hamming?)
@@ -409,6 +437,9 @@ def _match_header(self, header, fields, options):
409437
# matching string are not reliable way because
410438
# strings have no unique constraint
411439
string_match = field
440+
translated_header = IrTranslation._get_source('ir.model.fields,field_description', 'model', self.env.lang, header).lower()
441+
if translated_header == field['string'].lower():
442+
string_match = field
412443
if string_match:
413444
# this behavior is only applied if there is no matching field['name']
414445
return [string_match]
@@ -440,23 +471,31 @@ def _match_headers(self, rows, fields, options):
440471
441472
Will consume the first line of the ``rows`` iterator.
442473
443-
Returns a pair of (None, None) if headers were not requested
444-
or the list of headers and a dict mapping cell indices
445-
to key paths in the ``fields`` tree
474+
Returns the list of headers and a dict mapping cell indices
475+
to key paths in the ``fields`` tree. If headers were not
476+
requested, both collections are empty.
446477
447478
:param Iterator rows:
448479
:param dict fields:
449480
:param dict options:
450-
:rtype: (None, None) | (list(str), dict(int: list(str)))
481+
:rtype: (list(str), dict(int: list(str)))
451482
"""
452483
if not options.get('headers'):
453484
return [], {}
454485

455486
headers = next(rows)
456-
return headers, {
457-
index: [field['name'] for field in self._match_header(header, fields, options)] or None
458-
for index, header in enumerate(headers)
459-
}
487+
matches = {}
488+
mapping_records = self.env['base_import.mapping'].search_read([('res_model', '=', self.res_model)], ['column_name', 'field_name'])
489+
mapping_fields = {rec['column_name']: rec['field_name'] for rec in mapping_records}
490+
for index, header in enumerate(headers):
491+
match_field = []
492+
mapping_field_name = mapping_fields.get(header.lower())
493+
if mapping_field_name:
494+
match_field = mapping_field_name.split('/')
495+
if not match_field:
496+
match_field = [field['name'] for field in self._match_header(header, fields, options)]
497+
matches[index] = match_field or None
498+
return headers, matches
460499

461500
@api.multi
462501
def parse_preview(self, options, count=10):
@@ -469,7 +508,7 @@ def parse_preview(self, options, count=10):
469508
470509
:param int count: number of preview lines to generate
471510
:param options: format-specific options.
472-
CSV: {encoding, quoting, separator, headers}
511+
CSV: {quoting, separator, headers}
473512
:type options: {str, str, str, bool}
474513
:returns: {fields, matches, headers, preview} | {error, preview}
475514
:rtype: {dict(str: dict(...)), dict(int, list(str)), list(str), list(list(str))} | {str, str}
@@ -482,21 +521,31 @@ def parse_preview(self, options, count=10):
482521
# Match should have consumed the first row (iif headers), get
483522
# the ``count`` next rows for preview
484523
preview = list(itertools.islice(rows, count))
485-
assert preview, "CSV file seems to have no content"
524+
assert preview, "file seems to have no content"
486525
header_types = self._find_type_from_preview(options, preview)
487-
if options.get('keep_matches', False) and len(options.get('fields', [])):
526+
if options.get('keep_matches') and len(options.get('fields', [])):
488527
matches = {}
489528
for index, match in enumerate(options.get('fields')):
490529
if match:
491530
matches[index] = match.split('/')
492531

532+
if options.get('keep_matches'):
533+
advanced_mode = options.get('advanced')
534+
else:
535+
# Check is label contain relational field
536+
has_relational_header = any(len(models.fix_import_export_id_paths(col)) > 1 for col in headers)
537+
# Check is matches fields have relational field
538+
has_relational_match = any(len(match) > 1 for field, match in matches.items() if match)
539+
advanced_mode = has_relational_header or has_relational_match
540+
493541
return {
494542
'fields': fields,
495543
'matches': matches or False,
496544
'headers': headers or False,
497545
'headers_type': header_types or False,
498546
'preview': preview,
499547
'options': options,
548+
'advanced_mode': advanced_mode,
500549
'debug': self.user_has_groups('base.group_no_one'),
501550
}
502551
except Exception as error:
@@ -505,7 +554,7 @@ def parse_preview(self, options, count=10):
505554
# preview to a list in the return.
506555
_logger.debug("Error during parsing preview", exc_info=True)
507556
preview = None
508-
if self.file_type == 'text/csv':
557+
if self.file_type == 'text/csv' and self.file:
509558
preview = self.file[:ERROR_PREVIEW_BYTES].decode('iso-8859-1')
510559
return {
511560
'error': str(error),
@@ -627,7 +676,13 @@ def _parse_import_data_recursive(self, model, prefix, data, import_fields, optio
627676
try:
628677
line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index]), user_format), server_format)
629678
except ValueError as e:
630-
raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e))
679+
try:
680+
# Allow to import date in datetime fields
681+
if field['type'] == 'datetime':
682+
user_format = pycompat.to_native(options.get('date_format'))
683+
line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index]), user_format), server_format)
684+
except ValueError as e:
685+
raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e))
631686
except Exception as e:
632687
raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e))
633688
# Check if the field is in import_field and is a relational (followed by /)
@@ -643,12 +698,14 @@ def _parse_import_data_recursive(self, model, prefix, data, import_fields, optio
643698
return data
644699

645700
@api.multi
646-
def do(self, fields, options, dryrun=False):
701+
def do(self, fields, columns, options, dryrun=False):
647702
""" Actual execution of the import
648703
649704
:param fields: import mapping: maps each column to a field,
650705
``False`` for the columns to ignore
651706
:type fields: list(str|bool)
707+
:param columns: columns label
708+
:type columns: list(str|bool)
652709
:param dict options:
653710
:param bool dryrun: performs all import operations (and
654711
validations) but rollbacks writes, allows
@@ -661,7 +718,7 @@ def do(self, fields, options, dryrun=False):
661718
error message associated with the error (a string)
662719
and ``record`` the data which failed to import (or
663720
``false`` if that data isn't available or provided)
664-
:rtype: list({type, message, record})
721+
:rtype: dict(ids: list(int), messages: list({type, message, record}))
665722
"""
666723
self.ensure_one()
667724
self._cr.execute('SAVEPOINT import')
@@ -671,11 +728,13 @@ def do(self, fields, options, dryrun=False):
671728
# Parse date and float field
672729
data = self._parse_import_data(data, import_fields, options)
673730
except ValueError as error:
674-
return [{
675-
'type': 'error',
676-
'message': pycompat.text_type(error),
677-
'record': False,
678-
}]
731+
return {
732+
'messages': [{
733+
'type': 'error',
734+
'message': pycompat.text_type(error),
735+
'record': False,
736+
}]
737+
}
679738

680739
_logger.info('importing %d rows...', len(data))
681740

@@ -698,4 +757,20 @@ def do(self, fields, options, dryrun=False):
698757
except psycopg2.InternalError:
699758
pass
700759

701-
return import_result['messages']
760+
# Insert/Update mapping columns when import complete successfully
761+
if import_result['ids'] and options.get('headers'):
762+
BaseImportMapping = self.env['base_import.mapping']
763+
for index, column_name in enumerate(columns):
764+
if column_name:
765+
# Update to latest selected field
766+
exist_records = BaseImportMapping.search([('res_model', '=', self.res_model), ('column_name', '=', column_name)])
767+
if exist_records:
768+
exist_records.write({'field_name': fields[index]})
769+
else:
770+
BaseImportMapping.create({
771+
'res_model': self.res_model,
772+
'column_name': column_name,
773+
'field_name': fields[index]
774+
})
775+
776+
return import_result

addons/base_import/security/ir.model.access.csv

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,5 @@ access_base_import_tests_models_m2o_required_related,base.import.tests.models.m2
1212
access_base_import_tests_models_o2m,base.import.tests.models.o2m,model_base_import_tests_models_o2m,base.group_user,1,1,1,1
1313
access_base_import_tests_models_o2m_child,base.import.tests.models.o2m.child,model_base_import_tests_models_o2m_child,base.group_user,1,1,1,1
1414
access_base_import_tests_models_preview,base.import.tests.models.preview,model_base_import_tests_models_preview,base.group_user,1,1,1,1
15+
access_base_import_mapping,base.import.mapping,model_base_import_mapping,base.group_user,1,1,1,1
1516

addons/base_import/static/csv/res.partner.csv

Lines changed: 0 additions & 7 deletions
This file was deleted.

0 commit comments

Comments
 (0)