2013-09-12 16:03:10 +02:00
|
|
|
# This file is part of csv_import module for Tryton.
|
|
|
|
# The COPYRIGHT file at the top level of this repository contains
|
|
|
|
# the full copyright notices and license terms.
|
|
|
|
from StringIO import StringIO
|
|
|
|
from csv import reader
|
|
|
|
from datetime import datetime
|
2013-09-18 08:35:31 +02:00
|
|
|
from email.mime.text import MIMEText
|
|
|
|
from trytond.config import CONFIG
|
2013-09-23 11:56:46 +02:00
|
|
|
from trytond.exceptions import UserError
|
2013-09-12 16:03:10 +02:00
|
|
|
from trytond.model import ModelSQL, ModelView, fields, Workflow
|
|
|
|
from trytond.pool import Pool, PoolMeta
|
|
|
|
from trytond.pyson import Eval, If
|
2013-09-18 08:35:31 +02:00
|
|
|
from trytond.tools import get_smtp_server
|
2013-09-12 16:03:10 +02:00
|
|
|
from trytond.transaction import Transaction
|
2013-09-18 08:35:31 +02:00
|
|
|
import logging
|
2013-09-12 16:03:10 +02:00
|
|
|
import os
|
2013-11-28 14:42:20 +01:00
|
|
|
import re
|
2013-09-12 16:03:10 +02:00
|
|
|
import psycopg2
|
2013-11-28 14:42:20 +01:00
|
|
|
import unicodedata
|
2013-12-02 13:18:36 +01:00
|
|
|
import string
|
2013-09-12 16:03:10 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
|
|
|
|
__all__ = ['BaseExternalMapping',
|
|
|
|
'CSVProfile', 'CSVProfileBaseExternalMapping', 'CSVArchive']
|
2013-09-12 16:03:10 +02:00
|
|
|
__metaclass__ = PoolMeta
|
2013-11-28 14:42:20 +01:00
|
|
|
_slugify_strip_re = re.compile(r'[^\w\s-]')
|
|
|
|
_slugify_hyphenate_re = re.compile(r'[-\s]+')
|
|
|
|
|
|
|
|
|
|
|
|
def slugify(value):
|
|
|
|
if not isinstance(value, unicode):
|
|
|
|
value = unicode(value)
|
|
|
|
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
|
|
|
|
value = unicode(_slugify_strip_re.sub('', value).strip().lower())
|
|
|
|
return _slugify_hyphenate_re.sub('-', value)
|
2013-09-12 16:03:10 +02:00
|
|
|
|
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
class BaseExternalMapping:
|
|
|
|
__name__ = 'base.external.mapping'
|
|
|
|
csv_mapping = fields.Many2One('base.external.mapping', 'CSV Mapping')
|
|
|
|
csv_rel_field = fields.Many2One('ir.model.field', 'CSV Field related')
|
|
|
|
|
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
class CSVProfile(ModelSQL, ModelView):
|
|
|
|
' CSV Profile'
|
|
|
|
__name__ = 'csv.profile'
|
2013-10-18 13:01:33 +02:00
|
|
|
name = fields.Char('Name', required=True)
|
2013-09-12 16:03:10 +02:00
|
|
|
archives = fields.One2Many('csv.archive', 'profile',
|
|
|
|
'Archives')
|
2013-10-21 19:54:26 +02:00
|
|
|
model = fields.Many2One('ir.model', 'Model', required=True)
|
2013-12-02 13:18:36 +01:00
|
|
|
mappings = fields.Many2Many('csv.profile-base.external.mapping',
|
|
|
|
'profile', 'mapping', 'Mappings', required=True)
|
2013-09-12 16:03:10 +02:00
|
|
|
code_internal = fields.Many2One('ir.model.field', 'Tryton Code Field',
|
2013-10-21 19:54:26 +02:00
|
|
|
domain=[('model', '=', Eval('model'))],
|
|
|
|
states={
|
|
|
|
'invisible': ~Eval('update_record', True),
|
|
|
|
'required': Eval('update_record', True),
|
2013-10-22 13:46:39 +02:00
|
|
|
}, depends=['model', 'update_record'],
|
2013-10-21 19:54:26 +02:00
|
|
|
help='Code field in Tryton.')
|
|
|
|
code_external = fields.Integer("CSV Code Field",
|
|
|
|
states={
|
|
|
|
'invisible': ~Eval('update_record', True),
|
|
|
|
'required': Eval('update_record', True),
|
2013-10-22 13:46:39 +02:00
|
|
|
}, depends=['model', 'update_record'],
|
2013-09-12 16:03:10 +02:00
|
|
|
help='Code field in CSV column.')
|
|
|
|
create_record = fields.Boolean('Create', help='Create record from CSV')
|
|
|
|
update_record = fields.Boolean('Update', help='Update record from CSV')
|
2013-12-02 13:18:36 +01:00
|
|
|
testing = fields.Boolean('Testing', help='Not create or update records')
|
2013-09-12 16:03:10 +02:00
|
|
|
active = fields.Boolean('Active')
|
2013-12-02 13:18:36 +01:00
|
|
|
csv_header = fields.Boolean('Header', readonly=True,
|
2013-09-12 16:03:10 +02:00
|
|
|
help='Header (field names) on archives')
|
|
|
|
csv_archive_separator = fields.Selection([
|
|
|
|
(',', 'Comma'),
|
|
|
|
(';', 'Semicolon'),
|
|
|
|
('tab', 'Tabulator'),
|
|
|
|
('|', '|'),
|
2013-12-02 13:37:45 +01:00
|
|
|
], 'CSV Separator', help="Archive CSV Separator",
|
2013-09-12 16:03:10 +02:00
|
|
|
required=True)
|
|
|
|
csv_quote = fields.Char('Quote', required=True,
|
|
|
|
help='Character to use as quote')
|
|
|
|
note = fields.Text('Notes')
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_active():
|
|
|
|
return True
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_create_record():
|
|
|
|
return True
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_update_record():
|
|
|
|
return False
|
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
@staticmethod
|
|
|
|
def default_csv_header():
|
|
|
|
return True
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_csv_archive_separator():
|
|
|
|
return ","
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_csv_quote():
|
|
|
|
return '"'
|
|
|
|
|
2013-10-21 19:54:26 +02:00
|
|
|
@staticmethod
|
|
|
|
def default_code_external():
|
|
|
|
return 0
|
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
class CSVProfileBaseExternalMapping(ModelSQL):
|
2013-12-02 13:29:30 +01:00
|
|
|
'CSV Profile - Base External Mapping'
|
2013-12-02 13:18:36 +01:00
|
|
|
__name__ = 'csv.profile-base.external.mapping'
|
|
|
|
_table = 'csv_profile_mapping_rel'
|
|
|
|
profile = fields.Many2One('csv.profile', 'Profile',
|
|
|
|
ondelete='CASCADE', select=True, required=True)
|
|
|
|
mapping = fields.Many2One('base.external.mapping', 'Mapping', ondelete='RESTRICT',
|
|
|
|
required=True)
|
|
|
|
|
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
class CSVArchive(Workflow, ModelSQL, ModelView):
|
|
|
|
' CSV Archive'
|
|
|
|
__name__ = 'csv.archive'
|
|
|
|
_rec_name = 'archive_name'
|
|
|
|
profile = fields.Many2One('csv.profile', 'CSV Profile', ondelete='CASCADE',
|
|
|
|
required=True, on_change=['profile'])
|
|
|
|
date_archive = fields.DateTime('Date', required=True)
|
|
|
|
data = fields.Function(fields.Binary('Archive', required=True),
|
|
|
|
'get_data', setter='set_data')
|
|
|
|
archive_name = fields.Char('Archive Name')
|
2013-11-28 15:23:45 +01:00
|
|
|
logs = fields.Text("Logs", readonly=True)
|
2013-09-12 16:03:10 +02:00
|
|
|
state = fields.Selection([
|
|
|
|
('draft', 'Draft'),
|
|
|
|
('done', 'Done'),
|
|
|
|
('canceled', 'Canceled'),
|
|
|
|
], 'State', required=True, readonly=True)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def __setup__(cls):
|
|
|
|
super(CSVArchive, cls).__setup__()
|
2013-10-18 13:38:49 +02:00
|
|
|
cls._order.insert(0, ('date_archive', 'DESC'))
|
|
|
|
cls._order.insert(1, ('id', 'DESC'))
|
2013-09-12 16:03:10 +02:00
|
|
|
cls._transitions |= set((
|
|
|
|
('draft', 'done'),
|
|
|
|
('draft', 'canceled'),
|
|
|
|
('canceled', 'draft'),
|
|
|
|
))
|
|
|
|
cls._buttons.update({
|
|
|
|
'cancel': {
|
|
|
|
'invisible': Eval('state') != 'draft',
|
|
|
|
},
|
|
|
|
'draft': {
|
|
|
|
'invisible': Eval('state') != 'canceled',
|
|
|
|
'icon': If(Eval('state') == 'canceled', 'tryton-clear',
|
|
|
|
'tryton-go-previous'),
|
|
|
|
},
|
|
|
|
'import_csv': {
|
|
|
|
'invisible': Eval('state') != 'draft',
|
|
|
|
},
|
|
|
|
})
|
|
|
|
cls._error_messages.update({
|
|
|
|
'error': 'CSV Import Error!',
|
|
|
|
'reading_error': 'Error reading file %s.',
|
|
|
|
'read_error': 'Error reading file: %s.\nError %s.',
|
2013-12-02 13:18:36 +01:00
|
|
|
'success_simulation': 'Simulation successfully.',
|
|
|
|
'record_saved': 'Record %s saved successfully!',
|
|
|
|
'record_error': 'Error saving records.',
|
2013-09-12 16:03:10 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
def get_data(self, name):
|
2013-09-20 10:07:36 +02:00
|
|
|
cursor = Transaction().cursor
|
|
|
|
path = os.path.join(CONFIG.get('data_path', '/var/lib/trytond'),
|
|
|
|
cursor.database_name, 'csv_import')
|
2013-09-20 11:27:43 +02:00
|
|
|
archive = '%s/%s' % (path, self.archive_name.replace(' ', '_'))
|
2013-09-12 16:03:10 +02:00
|
|
|
try:
|
|
|
|
with open(archive, 'r') as f:
|
|
|
|
return buffer(f.read())
|
|
|
|
except IOError:
|
|
|
|
self.raise_user_error('error',
|
|
|
|
error_description='reading_error',
|
2013-09-20 11:27:43 +02:00
|
|
|
error_description_args=(self.archive_name.replace(' ', '_'),),
|
2013-09-12 16:03:10 +02:00
|
|
|
raise_exception=True)
|
2013-09-19 10:20:43 +02:00
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
@classmethod
|
|
|
|
def set_data(cls, archives, name, value):
|
2013-09-20 09:10:41 +02:00
|
|
|
cursor = Transaction().cursor
|
|
|
|
path = os.path.join(CONFIG.get('data_path', '/var/lib/trytond'),
|
|
|
|
cursor.database_name, 'csv_import')
|
|
|
|
if not os.path.exists(path):
|
|
|
|
os.makedirs(path, mode=0777)
|
2013-09-12 16:03:10 +02:00
|
|
|
for archive in archives:
|
2013-09-20 11:27:43 +02:00
|
|
|
archive = '%s/%s' % (path, archive.archive_name.replace(' ', '_'))
|
2013-09-12 16:03:10 +02:00
|
|
|
try:
|
|
|
|
with open(archive, 'w') as f:
|
|
|
|
f.write(value)
|
|
|
|
except IOError, e:
|
|
|
|
cls.raise_user_error('error',
|
|
|
|
error_description='save_error',
|
|
|
|
error_description_args=(e,),
|
|
|
|
raise_exception=True)
|
|
|
|
|
|
|
|
def on_change_profile(self):
|
2013-11-28 14:42:20 +01:00
|
|
|
if not self.profile:
|
|
|
|
return {'archive_name': None}
|
2013-09-12 16:03:10 +02:00
|
|
|
today = Pool().get('ir.date').today()
|
|
|
|
files = len(self.search([
|
|
|
|
('archive_name', 'like', '%s_%s_%s.csv' %
|
2013-11-28 14:42:20 +01:00
|
|
|
(today, '%', slugify(self.profile.rec_name))),
|
2013-09-12 16:03:10 +02:00
|
|
|
]))
|
|
|
|
return {
|
|
|
|
'archive_name': ('%s_%s_%s.csv' %
|
2013-11-28 14:42:20 +01:00
|
|
|
(today, files, slugify(self.profile.rec_name))),
|
2013-09-12 16:03:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_date_archive():
|
|
|
|
return datetime.now()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default_state():
|
|
|
|
return 'draft'
|
|
|
|
|
2013-11-27 17:26:36 +01:00
|
|
|
@staticmethod
|
|
|
|
def default_profile():
|
|
|
|
CSVProfile = Pool().get('csv.profile')
|
|
|
|
csv_profiles = CSVProfile.search([])
|
|
|
|
if len(csv_profiles) == 1:
|
|
|
|
return csv_profiles[0].id
|
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
@classmethod
|
2013-12-02 13:18:36 +01:00
|
|
|
def _add_default_values(cls, csv_model, values, parent_values=None):
|
2013-09-12 16:03:10 +02:00
|
|
|
""" This method is to be overridden and compute the default values
|
|
|
|
of the model
|
|
|
|
"""
|
2013-12-02 13:18:36 +01:00
|
|
|
return values
|
2013-09-12 16:03:10 +02:00
|
|
|
|
|
|
|
@classmethod
|
2013-12-02 13:18:36 +01:00
|
|
|
def post_import(cls, model, records):
|
|
|
|
""" This method is made to be overridden and execute something with
|
|
|
|
imported records after import them. At the end of the inherited
|
|
|
|
@param cr: The database cursor,
|
|
|
|
@param model: base object
|
|
|
|
@param records: List of id records.
|
2013-09-12 16:03:10 +02:00
|
|
|
"""
|
2013-12-02 13:18:36 +01:00
|
|
|
pass
|
2013-09-12 16:03:10 +02:00
|
|
|
|
|
|
|
@classmethod
|
2013-12-02 13:18:36 +01:00
|
|
|
def _read_csv_file(cls, archive):
|
|
|
|
'''Read CSV data from archive'''
|
|
|
|
headers = None
|
|
|
|
profile = archive.profile
|
|
|
|
|
|
|
|
separator = profile.csv_archive_separator
|
|
|
|
if separator == "tab":
|
|
|
|
separator = '\t'
|
|
|
|
quote = profile.csv_quote
|
|
|
|
header = profile.csv_header
|
|
|
|
|
|
|
|
data = StringIO(archive.data)
|
|
|
|
try:
|
|
|
|
rows = reader(data, delimiter=str(separator),
|
|
|
|
quotechar='"')
|
|
|
|
except TypeError, e:
|
|
|
|
cls.write([archive], {'logs': 'Error - %s' % (
|
|
|
|
cls.raise_user_error('error',
|
|
|
|
error_description='read_error',
|
|
|
|
error_description_args=(archive.archive_name, e),
|
|
|
|
raise_exception=False),
|
|
|
|
)})
|
|
|
|
return
|
2013-09-12 16:03:10 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
if header:
|
|
|
|
headers = [filter(lambda x: x in string.printable, x).replace('"','')
|
|
|
|
for x in next(rows)] #TODO. Know why some header columns get ""
|
|
|
|
return rows, headers
|
2013-09-12 16:03:10 +02:00
|
|
|
|
|
|
|
@classmethod
|
2013-11-28 15:23:45 +01:00
|
|
|
@ModelView.button
|
2013-09-12 16:03:10 +02:00
|
|
|
@Workflow.transition('done')
|
|
|
|
def import_csv(cls, archives):
|
2013-12-02 13:18:36 +01:00
|
|
|
'''
|
|
|
|
Process archives to import data from CSV files
|
|
|
|
base: base model, e.g: party
|
|
|
|
childs: new lines related a base, e.g: addresses
|
|
|
|
'''
|
2013-09-12 16:03:10 +02:00
|
|
|
pool = Pool()
|
|
|
|
ExternalMapping = pool.get('base.external.mapping')
|
|
|
|
User = pool.get('res.user')
|
2013-10-18 14:58:11 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
logs = []
|
2013-09-12 16:03:10 +02:00
|
|
|
context = {}
|
2013-10-18 14:58:11 +02:00
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
for archive in archives:
|
|
|
|
profile = archive.profile
|
2013-10-21 19:54:26 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
if not profile.create_record and not profile.update_record:
|
|
|
|
continue
|
2013-10-18 14:58:11 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
data, headers = cls._read_csv_file(archive)
|
2013-09-18 08:35:31 +02:00
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
base_model = profile.model.model
|
|
|
|
|
|
|
|
child_mappings = []
|
|
|
|
for mapping in profile.mappings:
|
|
|
|
if not mapping.model.model == base_model:
|
|
|
|
child_mappings.append(mapping)
|
|
|
|
else:
|
|
|
|
base_mapping = mapping
|
2013-10-18 14:58:11 +02:00
|
|
|
|
2013-09-12 16:03:10 +02:00
|
|
|
new_records = []
|
2013-12-02 13:18:36 +01:00
|
|
|
new_lines = []
|
|
|
|
rows = list(data)
|
|
|
|
Base = pool.get(base_model)
|
|
|
|
for i in range(len(rows)):
|
|
|
|
row = rows[i]
|
2013-09-12 16:03:10 +02:00
|
|
|
if not row:
|
|
|
|
continue
|
|
|
|
|
2013-12-02 13:18:36 +01:00
|
|
|
#join header and row to convert a list to dict {header: value}
|
|
|
|
vals = dict(zip(headers, row))
|
|
|
|
|
|
|
|
#get values base model
|
|
|
|
if not new_lines:
|
|
|
|
base_values = ExternalMapping.map_external_to_tryton(
|
|
|
|
base_mapping.name, vals)
|
|
|
|
if base_values.values()[0] == '':
|
|
|
|
new_line = True
|
|
|
|
else:
|
|
|
|
new_line = None
|
|
|
|
new_lines = []
|
|
|
|
|
|
|
|
#get values child models
|
|
|
|
child_values = None
|
|
|
|
for child in child_mappings:
|
|
|
|
child_rel_field = child.csv_rel_field.name
|
|
|
|
child_values = ExternalMapping.map_external_to_tryton(
|
|
|
|
child.name, vals)
|
|
|
|
Child = pool.get(child.model.model)
|
|
|
|
# get default values in child model
|
|
|
|
child_values = cls._add_default_values(Child, child_values, base_values)
|
|
|
|
new_lines.append(child_values)
|
|
|
|
|
|
|
|
base_values[child_rel_field] = new_lines
|
|
|
|
|
|
|
|
#next row is empty first value, is a new line. Continue
|
|
|
|
if i < len(rows)-1:
|
|
|
|
if rows[i+1]:
|
|
|
|
if rows[i+1][0] == '':
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
new_lines = []
|
|
|
|
|
|
|
|
#create object or get object exist
|
|
|
|
records = None
|
|
|
|
if profile.update_record:
|
|
|
|
val = row[profile.code_external]
|
|
|
|
records = Base.search([(profile.code_internal.name, '=', val)])
|
|
|
|
if records:
|
|
|
|
base = Base(records[0])
|
|
|
|
if profile.create_record and not records:
|
|
|
|
base = Base()
|
|
|
|
|
|
|
|
#get default values from base model+
|
|
|
|
vals = cls._add_default_values(base, base_values)
|
|
|
|
|
|
|
|
#assign key, value in object class
|
|
|
|
#base.key = value
|
|
|
|
for key, value in vals.iteritems():
|
|
|
|
setattr(base, key, value)
|
|
|
|
|
|
|
|
#save - not testing
|
|
|
|
if not profile.testing:
|
|
|
|
try:
|
|
|
|
base.save() #save or update
|
|
|
|
logs.append(cls.raise_user_error('record_saved',
|
|
|
|
error_args=(base.id,), raise_exception=False))
|
|
|
|
new_records.append(base.id)
|
|
|
|
except:
|
|
|
|
logs.append(cls.raise_user_error('record_error',
|
|
|
|
raise_exception=False))
|
|
|
|
|
|
|
|
if profile.testing:
|
|
|
|
logs.append(cls.raise_user_error('success_simulation',
|
|
|
|
raise_exception=False))
|
|
|
|
|
|
|
|
cls.post_import(Base, new_records)
|
|
|
|
cls.write([archive], {'logs': '\n'.join(logs)})
|
2013-09-12 16:03:10 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@ModelView.button
|
|
|
|
@Workflow.transition('draft')
|
|
|
|
def draft(cls, archives):
|
|
|
|
pass
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@ModelView.button
|
|
|
|
@Workflow.transition('canceled')
|
|
|
|
def cancel(cls, archives):
|
|
|
|
pass
|