trytond-csv_import/csv_import.py

386 lines
13 KiB
Python
Raw Normal View History

2013-09-12 16:03:10 +02:00
# This file is part of csv_import module for Tryton.
# The COPYRIGHT file at the top level of this repository contains
# the full copyright notices and license terms.
from StringIO import StringIO
from csv import reader
from datetime import datetime
2013-09-18 08:35:31 +02:00
from trytond.config import CONFIG
2013-09-12 16:03:10 +02:00
from trytond.model import ModelSQL, ModelView, fields, Workflow
from trytond.pool import Pool, PoolMeta
from trytond.pyson import Eval, If
from trytond.transaction import Transaction
import os
2013-11-28 14:42:20 +01:00
import re
import unicodedata
import string
2013-09-12 16:03:10 +02:00
__all__ = ['BaseExternalMapping',
'CSVProfile', 'CSVProfileBaseExternalMapping', 'CSVArchive']
2013-09-12 16:03:10 +02:00
__metaclass__ = PoolMeta
2013-11-28 14:42:20 +01:00
_slugify_strip_re = re.compile(r'[^\w\s-]')
_slugify_hyphenate_re = re.compile(r'[-\s]+')
def slugify(value):
if not isinstance(value, unicode):
value = unicode(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = unicode(_slugify_strip_re.sub('', value).strip().lower())
return _slugify_hyphenate_re.sub('-', value)
2013-09-12 16:03:10 +02:00
class BaseExternalMapping:
__name__ = 'base.external.mapping'
csv_mapping = fields.Many2One('base.external.mapping', 'CSV Mapping')
csv_rel_field = fields.Many2One('ir.model.field', 'CSV Field related')
2013-09-12 16:03:10 +02:00
class CSVProfile(ModelSQL, ModelView):
' CSV Profile'
__name__ = 'csv.profile'
2013-10-18 13:01:33 +02:00
name = fields.Char('Name', required=True)
2013-09-12 16:03:10 +02:00
archives = fields.One2Many('csv.archive', 'profile',
'Archives')
model = fields.Many2One('ir.model', 'Model', required=True)
mappings = fields.Many2Many('csv.profile-base.external.mapping',
'profile', 'mapping', 'Mappings', required=True)
2013-09-12 16:03:10 +02:00
code_internal = fields.Many2One('ir.model.field', 'Tryton Code Field',
domain=[('model', '=', Eval('model'))],
states={
'invisible': ~Eval('update_record', True),
'required': Eval('update_record', True),
2013-10-22 13:46:39 +02:00
}, depends=['model', 'update_record'],
help='Code field in Tryton.')
code_external = fields.Integer("CSV Code Field",
states={
'invisible': ~Eval('update_record', True),
'required': Eval('update_record', True),
2013-10-22 13:46:39 +02:00
}, depends=['model', 'update_record'],
2013-09-12 16:03:10 +02:00
help='Code field in CSV column.')
create_record = fields.Boolean('Create', help='Create record from CSV')
update_record = fields.Boolean('Update', help='Update record from CSV')
testing = fields.Boolean('Testing', help='Not create or update records')
2013-09-12 16:03:10 +02:00
active = fields.Boolean('Active')
csv_header = fields.Boolean('Header', readonly=True,
2013-09-12 16:03:10 +02:00
help='Header (field names) on archives')
csv_archive_separator = fields.Selection([
(',', 'Comma'),
(';', 'Semicolon'),
('tab', 'Tabulator'),
('|', '|'),
2013-12-02 13:37:45 +01:00
], 'CSV Separator', help="Archive CSV Separator",
2013-09-12 16:03:10 +02:00
required=True)
csv_quote = fields.Char('Quote', required=True,
help='Character to use as quote')
note = fields.Text('Notes')
@staticmethod
def default_active():
return True
@staticmethod
def default_create_record():
return True
@staticmethod
def default_update_record():
return False
@staticmethod
def default_csv_header():
return True
@staticmethod
def default_csv_archive_separator():
return ","
@staticmethod
def default_csv_quote():
return '"'
@staticmethod
def default_code_external():
return 0
2013-09-12 16:03:10 +02:00
class CSVProfileBaseExternalMapping(ModelSQL):
2013-12-02 13:29:30 +01:00
'CSV Profile - Base External Mapping'
__name__ = 'csv.profile-base.external.mapping'
_table = 'csv_profile_mapping_rel'
profile = fields.Many2One('csv.profile', 'Profile',
ondelete='CASCADE', select=True, required=True)
mapping = fields.Many2One('base.external.mapping', 'Mapping', ondelete='RESTRICT',
required=True)
2013-09-12 16:03:10 +02:00
class CSVArchive(Workflow, ModelSQL, ModelView):
' CSV Archive'
__name__ = 'csv.archive'
_rec_name = 'archive_name'
profile = fields.Many2One('csv.profile', 'CSV Profile', ondelete='CASCADE',
required=True, on_change=['profile'])
date_archive = fields.DateTime('Date', required=True)
data = fields.Function(fields.Binary('Archive', required=True),
'get_data', setter='set_data')
archive_name = fields.Char('Archive Name')
logs = fields.Text("Logs", readonly=True)
2013-09-12 16:03:10 +02:00
state = fields.Selection([
('draft', 'Draft'),
('done', 'Done'),
('canceled', 'Canceled'),
], 'State', required=True, readonly=True)
@classmethod
def __setup__(cls):
super(CSVArchive, cls).__setup__()
2013-10-18 13:38:49 +02:00
cls._order.insert(0, ('date_archive', 'DESC'))
cls._order.insert(1, ('id', 'DESC'))
2013-09-12 16:03:10 +02:00
cls._transitions |= set((
('draft', 'done'),
('draft', 'canceled'),
('canceled', 'draft'),
))
cls._buttons.update({
'cancel': {
'invisible': Eval('state') != 'draft',
},
'draft': {
'invisible': Eval('state') != 'canceled',
'icon': If(Eval('state') == 'canceled', 'tryton-clear',
'tryton-go-previous'),
},
'import_csv': {
'invisible': Eval('state') != 'draft',
},
})
cls._error_messages.update({
'error': 'CSV Import Error!',
'reading_error': 'Error reading file %s.',
'read_error': 'Error reading file: %s.\nError %s.',
'success_simulation': 'Simulation successfully.',
'record_saved': 'Record %s saved successfully!',
'record_error': 'Error saving records.',
2013-09-12 16:03:10 +02:00
})
def get_data(self, name):
2013-09-20 10:07:36 +02:00
cursor = Transaction().cursor
path = os.path.join(CONFIG.get('data_path', '/var/lib/trytond'),
cursor.database_name, 'csv_import')
archive = '%s/%s' % (path, self.archive_name.replace(' ', '_'))
2013-09-12 16:03:10 +02:00
try:
with open(archive, 'r') as f:
return buffer(f.read())
except IOError:
self.raise_user_error('error',
error_description='reading_error',
error_description_args=(self.archive_name.replace(' ', '_'),),
2013-09-12 16:03:10 +02:00
raise_exception=True)
2013-09-19 10:20:43 +02:00
2013-09-12 16:03:10 +02:00
@classmethod
def set_data(cls, archives, name, value):
cursor = Transaction().cursor
path = os.path.join(CONFIG.get('data_path', '/var/lib/trytond'),
cursor.database_name, 'csv_import')
if not os.path.exists(path):
os.makedirs(path, mode=0777)
2013-09-12 16:03:10 +02:00
for archive in archives:
archive = '%s/%s' % (path, archive.archive_name.replace(' ', '_'))
2013-09-12 16:03:10 +02:00
try:
with open(archive, 'w') as f:
f.write(value)
except IOError, e:
cls.raise_user_error('error',
error_description='save_error',
error_description_args=(e,),
raise_exception=True)
def on_change_profile(self):
2013-11-28 14:42:20 +01:00
if not self.profile:
return {'archive_name': None}
2013-09-12 16:03:10 +02:00
today = Pool().get('ir.date').today()
files = len(self.search([
('archive_name', 'like', '%s_%s_%s.csv' %
2013-11-28 14:42:20 +01:00
(today, '%', slugify(self.profile.rec_name))),
2013-09-12 16:03:10 +02:00
]))
return {
'archive_name': ('%s_%s_%s.csv' %
2013-11-28 14:42:20 +01:00
(today, files, slugify(self.profile.rec_name))),
2013-09-12 16:03:10 +02:00
}
@staticmethod
def default_date_archive():
return datetime.now()
@staticmethod
def default_state():
return 'draft'
@staticmethod
def default_profile():
CSVProfile = Pool().get('csv.profile')
csv_profiles = CSVProfile.search([])
if len(csv_profiles) == 1:
return csv_profiles[0].id
2013-09-12 16:03:10 +02:00
@classmethod
def _add_default_values(cls, csv_model, values, parent_values=None):
2013-09-12 16:03:10 +02:00
""" This method is to be overridden and compute the default values
of the model
"""
return values
2013-09-12 16:03:10 +02:00
@classmethod
def post_import(cls, profile, records):
""" This method is made to be overridden and execute something with
imported records after import them. At the end of the inherited
@param profile: profile object
@param records: List of id records.
2013-09-12 16:03:10 +02:00
"""
pass
2013-09-12 16:03:10 +02:00
@classmethod
def _read_csv_file(cls, archive):
'''Read CSV data from archive'''
headers = None
profile = archive.profile
separator = profile.csv_archive_separator
if separator == "tab":
separator = '\t'
quote = profile.csv_quote
header = profile.csv_header
data = StringIO(archive.data)
try:
rows = reader(data, delimiter=str(separator),
2013-12-02 15:06:44 +01:00
quotechar=str(quote))
except TypeError, e:
cls.write([archive], {'logs': 'Error - %s' % (
cls.raise_user_error('error',
error_description='read_error',
error_description_args=(archive.archive_name, e),
raise_exception=False),
)})
return
2013-09-12 16:03:10 +02:00
if header:
headers = [filter(lambda x: x in string.printable, x).replace('"','')
for x in next(rows)] #TODO. Know why some header columns get ""
return rows, headers
2013-09-12 16:03:10 +02:00
@classmethod
@ModelView.button
2013-09-12 16:03:10 +02:00
@Workflow.transition('done')
def import_csv(cls, archives):
'''
Process archives to import data from CSV files
base: base model, e.g: party
childs: new lines related a base, e.g: addresses
'''
2013-09-12 16:03:10 +02:00
pool = Pool()
ExternalMapping = pool.get('base.external.mapping')
2013-10-18 14:58:11 +02:00
logs = []
2013-09-12 16:03:10 +02:00
for archive in archives:
profile = archive.profile
if not profile.create_record and not profile.update_record:
continue
2013-10-18 14:58:11 +02:00
data, headers = cls._read_csv_file(archive)
2013-09-18 08:35:31 +02:00
base_model = profile.model.model
child_mappings = []
for mapping in profile.mappings:
if not mapping.model.model == base_model:
child_mappings.append(mapping)
else:
base_mapping = mapping
2013-10-18 14:58:11 +02:00
2013-09-12 16:03:10 +02:00
new_records = []
new_lines = []
rows = list(data)
Base = pool.get(base_model)
for i in range(len(rows)):
row = rows[i]
2013-09-12 16:03:10 +02:00
if not row:
continue
#join header and row to convert a list to dict {header: value}
vals = dict(zip(headers, row))
#get values base model
if not new_lines:
base_values = ExternalMapping.map_external_to_tryton(
base_mapping.name, vals)
2013-12-02 15:06:44 +01:00
if not base_values.values()[0] == '':
new_lines = []
#get values child models
child_values = None
for child in child_mappings:
child_rel_field = child.csv_rel_field.name
child_values = ExternalMapping.map_external_to_tryton(
child.name, vals)
Child = pool.get(child.model.model)
# get default values in child model
child_values = cls._add_default_values(Child, child_values, base_values)
new_lines.append(child_values)
base_values[child_rel_field] = new_lines
#next row is empty first value, is a new line. Continue
if i < len(rows)-1:
if rows[i+1]:
if rows[i+1][0] == '':
continue
else:
new_lines = []
#create object or get object exist
records = None
if profile.update_record:
val = row[profile.code_external]
records = Base.search([(profile.code_internal.name, '=', val)])
if records:
base = Base(records[0])
if profile.create_record and not records:
base = Base()
#get default values from base model+
2013-12-02 19:23:03 +01:00
record_vals = cls._add_default_values(base, base_values)
#assign key, value in object class
#base.key = value
2013-12-02 19:23:03 +01:00
for key, value in record_vals.iteritems():
setattr(base, key, value)
#save - not testing
if not profile.testing:
base.save() #save or update
logs.append(cls.raise_user_error('record_saved',
error_args=(base.id,), raise_exception=False))
new_records.append(base.id)
if profile.testing:
logs.append(cls.raise_user_error('success_simulation',
raise_exception=False))
cls.post_import(profile, new_records)
cls.write([archive], {'logs': '\n'.join(logs)})
2013-09-12 16:03:10 +02:00
@classmethod
@ModelView.button
@Workflow.transition('draft')
def draft(cls, archives):
pass
@classmethod
@ModelView.button
@Workflow.transition('canceled')
def cancel(cls, archives):
pass