trytond-csv_import/csv_import.py

509 lines
17 KiB
Python
Raw Permalink Normal View History

2013-09-12 16:03:10 +02:00
# This file is part of csv_import module for Tryton.
# The COPYRIGHT file at the top level of this repository contains
# the full copyright notices and license terms.
2020-10-28 15:22:05 +01:00
import os
import re
import unicodedata
import string
import csv
from io import StringIO
2013-09-12 16:03:10 +02:00
from datetime import datetime
2014-11-07 14:21:01 +01:00
from trytond.config import config
2013-09-12 16:03:10 +02:00
from trytond.model import ModelSQL, ModelView, fields, Workflow
from trytond.pool import Pool, PoolMeta
2020-10-28 15:22:05 +01:00
from trytond.pyson import Eval
2013-09-12 16:03:10 +02:00
from trytond.transaction import Transaction
2020-10-28 15:22:05 +01:00
from trytond.i18n import gettext
from trytond.exceptions import UserError
2013-09-12 16:03:10 +02:00
__all__ = ['BaseExternalMapping',
'CSVProfile', 'CSVProfileBaseExternalMapping', 'CSVArchive']
2013-11-28 14:42:20 +01:00
def slugify(value):
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
2020-10-28 15:22:05 +01:00
value = re.sub('[^\w\s-]', '', value.decode('utf-8')).strip().lower()
return re.sub('[-\s]+', '-', value)
2013-09-12 16:03:10 +02:00
2020-10-28 15:22:05 +01:00
class BaseExternalMapping(metaclass=PoolMeta):
__name__ = 'base.external.mapping'
csv_mapping = fields.Many2One('base.external.mapping', 'CSV Mapping')
csv_rel_field = fields.Many2One('ir.model.field', 'CSV Field related')
2013-09-12 16:03:10 +02:00
class CSVProfile(ModelSQL, ModelView):
2014-04-23 09:08:29 +02:00
'CSV Profile'
2013-09-12 16:03:10 +02:00
__name__ = 'csv.profile'
2013-10-18 13:01:33 +02:00
name = fields.Char('Name', required=True)
2013-09-12 16:03:10 +02:00
archives = fields.One2Many('csv.archive', 'profile',
'Archives')
model = fields.Many2One('ir.model', 'Model', required=True)
mappings = fields.Many2Many('csv.profile-base.external.mapping',
'profile', 'mapping', 'Mappings', required=True)
2013-09-12 16:03:10 +02:00
code_internal = fields.Many2One('ir.model.field', 'Tryton Code Field',
domain=[('model', '=', Eval('model'))],
states={
'invisible': ~Eval('update_record', True),
'required': Eval('update_record', True),
2013-10-22 13:46:39 +02:00
}, depends=['model', 'update_record'],
help='Code field in Tryton.')
code_external = fields.Integer("CSV Code Field",
states={
'invisible': ~Eval('update_record', True),
'required': Eval('update_record', True),
2014-11-18 11:02:41 +01:00
}, depends=['model', 'update_record'],
2013-09-12 16:03:10 +02:00
help='Code field in CSV column.')
create_record = fields.Boolean('Create', help='Create record from CSV')
update_record = fields.Boolean('Update', help='Update record from CSV')
testing = fields.Boolean('Testing', help='Not create or update records')
2013-09-12 16:03:10 +02:00
active = fields.Boolean('Active')
2014-12-02 21:28:55 +01:00
csv_header = fields.Boolean('Header',
2013-09-12 16:03:10 +02:00
help='Header (field names) on archives')
csv_archive_separator = fields.Selection([
(',', 'Comma'),
(';', 'Semicolon'),
('tab', 'Tabulator'),
('|', '|'),
2013-12-02 13:37:45 +01:00
], 'CSV Separator', help="Archive CSV Separator",
2013-09-12 16:03:10 +02:00
required=True)
csv_quote = fields.Char('Quote', required=True,
help='Character to use as quote')
note = fields.Text('Notes')
@staticmethod
def default_active():
return True
@staticmethod
def default_create_record():
return True
@staticmethod
def default_update_record():
return False
@staticmethod
def default_csv_header():
return True
@staticmethod
def default_csv_archive_separator():
return ","
@staticmethod
def default_csv_quote():
return '"'
@staticmethod
def default_code_external():
return 0
2013-09-12 16:03:10 +02:00
class CSVProfileBaseExternalMapping(ModelSQL):
2013-12-02 13:29:30 +01:00
'CSV Profile - Base External Mapping'
__name__ = 'csv.profile-base.external.mapping'
_table = 'csv_profile_mapping_rel'
profile = fields.Many2One('csv.profile', 'Profile',
ondelete='CASCADE', select=True, required=True)
2014-11-18 11:02:41 +01:00
mapping = fields.Many2One('base.external.mapping', 'Mapping',
ondelete='RESTRICT', required=True)
2013-09-12 16:03:10 +02:00
class CSVArchive(Workflow, ModelSQL, ModelView):
2014-04-23 09:08:29 +02:00
'CSV Archive'
2013-09-12 16:03:10 +02:00
__name__ = 'csv.archive'
_rec_name = 'archive_name'
profile = fields.Many2One('csv.profile', 'CSV Profile', ondelete='CASCADE',
required=True)
2013-09-12 16:03:10 +02:00
date_archive = fields.DateTime('Date', required=True)
2015-09-08 00:47:28 +02:00
data = fields.Function(fields.Binary('Archive', filename='archive_name',
required=True), 'get_data', setter='set_data')
2013-09-12 16:03:10 +02:00
archive_name = fields.Char('Archive Name')
logs = fields.Text("Logs", readonly=True)
2013-09-12 16:03:10 +02:00
state = fields.Selection([
('draft', 'Draft'),
('done', 'Done'),
('canceled', 'Canceled'),
], 'State', required=True, readonly=True)
@classmethod
def __setup__(cls):
super(CSVArchive, cls).__setup__()
2020-10-28 15:22:05 +01:00
cls._order = [
('date_archive', 'DESC'),
('id', 'DESC'),
]
2013-09-12 16:03:10 +02:00
cls._transitions |= set((
('draft', 'done'),
('draft', 'canceled'),
('canceled', 'draft'),
))
cls._buttons.update({
'cancel': {
'invisible': Eval('state') != 'draft',
2020-10-28 15:22:05 +01:00
'depends': ['state'],
2013-09-12 16:03:10 +02:00
},
'draft': {
'invisible': Eval('state') != 'canceled',
2020-10-28 15:22:05 +01:00
'depends': ['state'],
2013-09-12 16:03:10 +02:00
},
'import_csv': {
'invisible': Eval('state') != 'draft',
2020-10-28 15:22:05 +01:00
'depends': ['state'],
2013-09-12 16:03:10 +02:00
},
})
def get_data(self, name):
path = os.path.join(config.get('database', 'path'),
2020-10-28 15:22:05 +01:00
Transaction().database.name, 'csv_import')
archive = '%s/%s' % (path, self.archive_name.replace(' ', '_'))
2013-09-12 16:03:10 +02:00
try:
2020-10-28 15:22:05 +01:00
with open(archive, 'rb') as f:
2015-09-08 00:47:28 +02:00
return fields.Binary.cast(f.read())
2013-09-12 16:03:10 +02:00
except IOError:
2020-10-28 15:22:05 +01:00
pass
2013-09-19 10:20:43 +02:00
2013-09-12 16:03:10 +02:00
@classmethod
def set_data(cls, archives, name, value):
path = os.path.join(config.get('database', 'path'),
2020-10-28 15:22:05 +01:00
Transaction().database.name, 'csv_import')
if not os.path.exists(path):
2020-10-28 15:22:05 +01:00
os.makedirs(path, mode=0o777)
2013-09-12 16:03:10 +02:00
for archive in archives:
archive = '%s/%s' % (path, archive.archive_name.replace(' ', '_'))
2013-09-12 16:03:10 +02:00
try:
2020-10-28 15:22:05 +01:00
with open(archive, 'wb') as f:
2013-09-12 16:03:10 +02:00
f.write(value)
2020-10-28 15:22:05 +01:00
except IOError:
raise UserError(gettext('csv_import.msg_error'))
2013-09-12 16:03:10 +02:00
2020-10-28 15:22:05 +01:00
@fields.depends('profile', '_parent_profile.rec_name')
2013-09-12 16:03:10 +02:00
def on_change_profile(self):
2015-08-28 11:08:07 +02:00
if self.profile:
today = Pool().get('ir.date').today()
files = len(self.search([
('archive_name', 'like', '%s_%s_%s.csv' %
(today, '%', slugify(self.profile.rec_name))),
]))
self.archive_name = '%s_%s_%s.csv' % \
(today, files, slugify(self.profile.rec_name))
else:
self.archive_name = None
2013-09-12 16:03:10 +02:00
@staticmethod
def default_date_archive():
return datetime.now()
@staticmethod
def default_state():
return 'draft'
@staticmethod
def default_profile():
CSVProfile = Pool().get('csv.profile')
csv_profiles = CSVProfile.search([])
if len(csv_profiles) == 1:
return csv_profiles[0].id
2020-10-28 19:08:31 +01:00
@classmethod
def _import_data_sale(cls, record, values, parent_values=None):
'''
Sale and Sale Line data
'''
pool = Pool()
Sale = pool.get('sale.sale')
SaleLine = pool.get('sale.line')
Party = pool.get('party.party')
record_name = record.__name__
if record_name == 'sale.sale':
party = values.get('party')
if party:
party = Party(party)
if not record.id:
record = Sale.get_sale_data(values.get('party'))
if hasattr(record, 'shop') and not getattr(record, 'shop'):
shop, = pool.get('sale.shop').search([], limit=1)
record.shop = shop
if values.get('invoice_address') \
and values.get('invoice_address') in party.addresses:
record.invoice_address = values.get('invoice_address')
if values.get('shipment_address') \
and values.get('shipment_address') in party.addresses:
record.shipment_address = values.get('shipment_address')
if values.get('customer_reference'):
record.customer_reference = values.get('customer_reference')
if values.get('lines'):
record.lines = values.get('lines')
return record
if record_name == 'sale.line':
if values.get('product') and values.get('quantity'):
sale = Sale.get_sale_data(parent_values.get('party'))
line = SaleLine.get_sale_line_data(
sale,
values.get('product'),
values.get('quantity')
)
line.on_change_product()
return line
return record
@classmethod
def _import_data_purchase(cls, record, values, parent_values=None):
'''
Purchase and Purchase Line data
'''
pool = Pool()
Purchase = pool.get('purchase.purchase')
Party = pool.get('party.party')
record_name = record.__name__
if record_name == 'purchase.purchase':
party = values.get('party')
if party:
party = Party(party)
if not record.id:
default_values = record.default_get(record._fields.keys(),
with_rec_name=False)
for key, value in default_values.items():
setattr(record, key, value)
record.party = party
record.on_change_party()
if values.get('invoice_address') \
and values.get('invoice_address') in party.addresses:
record.invoice_address = values.get('invoice_address')
if values.get('lines'):
record.lines = values.get('lines')
return record
if record_name == 'purchase.line':
if values.get('product') and values.get('quantity'):
purchase = Purchase()
default_values = Purchase.default_get(Purchase._fields.keys(),
with_rec_name=False)
for key, value in default_values.items():
setattr(purchase, key, value)
purchase.party = parent_values.get('party')
purchase.on_change_party()
record.purchase = purchase
record.product = values.get('product')
record.quantity = values.get('quantity')
record.on_change_product()
return record
return record
2013-09-12 16:03:10 +02:00
@classmethod
2015-09-10 23:35:06 +02:00
def _import_data(cls, record, values, parent_values=None):
'''Load _import_data_modelname or seattr from dict'''
method_data = '_import_data_%s' % record.__name__.split('.')[0]
if hasattr(cls, method_data):
import_data = getattr(cls, method_data)
record = import_data(record, values, parent_values)
2020-10-28 15:22:05 +01:00
for k, v in values.items():
setattr(record, k, v)
return record
2013-09-12 16:03:10 +02:00
@classmethod
def post_import(cls, profile, records):
""" This method is made to be overridden and execute something with
imported records after import them. At the end of the inherited
@param profile: profile object
@param records: List of id records.
2013-09-12 16:03:10 +02:00
"""
pass
2013-09-12 16:03:10 +02:00
@classmethod
def _read_csv_file(cls, archive):
'''Read CSV data from archive'''
headers = None
profile = archive.profile
2014-11-18 11:02:41 +01:00
separator = profile.csv_archive_separator
if separator == "tab":
separator = '\t'
quote = profile.csv_quote
header = profile.csv_header
2020-10-28 15:22:05 +01:00
data = StringIO(archive.data.decode('ascii', errors='replace'))
try:
2020-10-28 15:22:05 +01:00
reader = csv.reader(data, delimiter=str(separator),
2013-12-02 15:06:44 +01:00
quotechar=str(quote))
2020-10-28 15:22:05 +01:00
except TypeError:
cls.write([archive], {'logs': 'Error - %s' % (
2020-10-28 15:22:05 +01:00
gettext('csv_import.msg_read_error',
filename=archive.archive_name.replace(' ', '_'))
)})
return
2013-09-12 16:03:10 +02:00
2020-10-28 15:22:05 +01:00
if header:
# TODO. Know why some header columns get ""
headers = ["".join(list(filter(lambda x: x in string.printable,
x.replace('"', '')))) for x in next(reader)]
return reader, headers
2013-09-12 16:03:10 +02:00
@classmethod
@ModelView.button
2013-09-12 16:03:10 +02:00
@Workflow.transition('done')
def import_csv(cls, archives):
'''
Process archives to import data from CSV files
base: base model, e.g: party
childs: new lines related a base, e.g: addresses
'''
2013-09-12 16:03:10 +02:00
pool = Pool()
ExternalMapping = pool.get('base.external.mapping')
2013-10-18 14:58:11 +02:00
logs = []
2013-09-12 16:03:10 +02:00
for archive in archives:
profile = archive.profile
2020-10-28 19:08:31 +01:00
if not profile.create_record and not profile.update_record or not archive.data:
continue
2013-10-18 14:58:11 +02:00
2020-10-28 15:22:05 +01:00
reader, headers = cls._read_csv_file(archive)
2013-09-18 08:35:31 +02:00
base_model = profile.model.model
child_mappings = []
for mapping in profile.mappings:
if not mapping.model.model == base_model:
child_mappings.append(mapping)
else:
base_mapping = mapping
2020-10-28 15:22:05 +01:00
if not base_mapping:
logs.append(gettext('csv_import.msg_not_mapping',
profile=profile.rec_name))
continue
2013-10-18 14:58:11 +02:00
2013-09-12 16:03:10 +02:00
new_records = []
new_lines = []
2020-10-28 15:22:05 +01:00
rows = list(reader)
Base = pool.get(base_model)
for i in range(len(rows)):
row = rows[i]
2013-09-12 16:03:10 +02:00
if not row:
continue
#join header and row to convert a list to dict {header: value}
vals = dict(zip(headers, row))
#get values base model
if not new_lines:
base_values = ExternalMapping.map_external_to_tryton(
base_mapping.name, vals)
if not list(base_values.values()):
continue
2020-10-28 15:22:05 +01:00
if not list(base_values.values())[0] == '':
new_lines = []
#get values child models
child_values = None
2013-12-05 14:17:16 +01:00
child_rel_field = None
for child in child_mappings:
2020-10-28 15:22:05 +01:00
if not child.csv_rel_field:
logs.append(gettext('csv_import.msg_missing_rel_field',
mapping=child.rec_name))
continue
child_rel_field = child.csv_rel_field.name
child_values = ExternalMapping.map_external_to_tryton(
child.name, vals)
Child = pool.get(child.model.model)
2015-12-03 01:02:38 +01:00
child = Child()
# get default values in child model
2015-12-03 01:02:38 +01:00
child_values = cls._import_data(child, child_values,
2014-11-18 11:02:41 +01:00
base_values)
new_lines.append(child_values)
2013-12-05 14:17:16 +01:00
if child_rel_field:
base_values[child_rel_field] = new_lines
2020-10-28 15:22:05 +01:00
# next row is empty first value, is a new line. Continue
2014-11-18 11:02:41 +01:00
if i < len(rows) - 1:
if rows[i + 1]:
if rows[i + 1][0] == '':
continue
else:
new_lines = []
#create object or get object exist
2015-09-10 23:35:06 +02:00
record = None
records = None
if profile.update_record:
val = row[profile.code_external]
2014-11-18 11:02:41 +01:00
records = Base.search([
(profile.code_internal.name, '=', val)
])
if records:
2015-09-10 23:35:06 +02:00
record = Base(records[0])
if profile.create_record and not records:
2015-09-10 23:35:06 +02:00
record = Base()
2015-09-10 23:35:06 +02:00
if not record:
2020-10-28 15:22:05 +01:00
logs.append(gettext('csv_import.msg_not_create_update',
line=i + 1))
continue
#get default values from base model
2015-09-10 23:35:06 +02:00
record = cls._import_data(record, base_values)
#save - not testing
if not profile.testing:
try:
record.save() # save or update
except (UserError, ValueError) as e:
raise UserError(e.__str__())
2020-10-28 15:22:05 +01:00
logs.append(gettext('csv_import.msg_record_saved',
record=record.id))
2015-09-10 23:35:06 +02:00
new_records.append(record.id)
if profile.testing:
2020-10-28 15:22:05 +01:00
logs.append(gettext('csv_import.msg_success_simulation'))
cls.post_import(profile, new_records)
cls.write([archive], {'logs': '\n'.join(logs)})
2013-09-12 16:03:10 +02:00
2014-01-23 12:12:28 +01:00
@classmethod
def copy(cls, archives, default=None):
if default is None:
default = {}
default = default.copy()
default['logs'] = None
return super(CSVArchive, cls).copy(archives, default=default)
2013-09-12 16:03:10 +02:00
@classmethod
@ModelView.button
@Workflow.transition('draft')
def draft(cls, archives):
pass
@classmethod
@ModelView.button
@Workflow.transition('canceled')
def cancel(cls, archives):
pass