2
# -*- coding: utf-8 -*-
3
##############################################################################
5
# OpenERP, Open Source Management Solution
6
# Copyright (C) 2011 TeMPO Consulting, MSF. All Rights Reserved
7
# Developer: Olivier DOSSMANN
9
# This program is free software: you can redistribute it and/or modify
10
# it under the terms of the GNU Affero General Public License as
11
# published by the Free Software Foundation, either version 3 of the
12
# License, or (at your option) any later version.
14
# This program is distributed in the hope that it will be useful,
15
# but WITHOUT ANY WARRANTY; without even the implied warranty of
16
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17
# GNU Affero General Public License for more details.
19
# You should have received a copy of the GNU Affero General Public License
20
# along with this program. If not, see <http://www.gnu.org/licenses/>.
22
##############################################################################
25
from osv import fields
26
from tools.translate import _
27
from time import strftime
28
from tempfile import NamedTemporaryFile
29
from base64 import decodestring
30
from spreadsheet_xml.spreadsheet_xml import SpreadsheetXML
31
from csv import DictReader
35
class msf_doc_import_accounting(osv.osv_memory):
36
_name = 'msf.doc.import.accounting'
39
'date': fields.date(string="Migration date", required=True),
40
'file': fields.binary(string="File", filters='*.xml, *.xls', required=True),
41
'filename': fields.char(string="Imported filename", size=256),
42
'progression': fields.float(string="Progression", readonly=True),
43
'message': fields.char(string="Message", size=256, readonly=True),
44
'state': fields.selection([('draft', 'Created'), ('inprogress', 'In Progress'), ('error', 'Error'), ('done', 'Done')], string="State", readonly=True, required=True),
45
'error_ids': fields.one2many('msf.doc.import.accounting.errors', 'wizard_id', "Errors", readonly=True),
49
'date': lambda *a: strftime('%Y-%m-%d'),
50
'progression': lambda *a: 0.0,
51
'state': lambda *a: 'draft',
52
'message': lambda *a: _('Initialization…'),
55
def create_entries(self, cr, uid, ids, context=None):
63
journal_ids = self.pool.get('account.journal').search(cr, uid, [('type', '=', 'migration'), ('is_current_instance', '=', True)])
65
raise osv.except_osv(_('Warning'), _('No migration journal found!'))
66
journal_id = journal_ids[0]
67
# Fetch default funding pool: MSF Private Fund
69
msf_fp_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'analytic_distribution', 'analytic_account_msf_private_funds')[1]
73
for w in self.browse(cr, uid, ids):
75
entries = self.pool.get('msf.doc.import.accounting.lines').search(cr, uid, [('wizard_id', '=', w.id)])
77
raise osv.except_osv(_('Error'), _('No lines…'))
79
b_entries = self.pool.get('msf.doc.import.accounting.lines').browse(cr, uid, entries)
81
self.write(cr, uid, [w.id], {'message': _('Grouping by currencies…'), 'progression': 10.0})
82
# Search all currencies (to create moves)
83
available_currencies = {}
84
for entry in b_entries:
85
if (entry.currency_id.id, entry.period_id.id) not in available_currencies:
86
available_currencies[(entry.currency_id.id, entry.period_id.id)] = []
87
available_currencies[(entry.currency_id.id, entry.period_id.id)].append(entry)
89
self.write(cr, uid, ids, {'message': _('Writing a move for each currency…'), 'progression': 20.0})
91
nb_currencies = float(len(available_currencies))
92
current_percent = 20.0
93
remaining_percent = 80.0
94
step = float(remaining_percent / nb_currencies)
95
for c_id, p_id in available_currencies:
99
'manual_currency_id': c_id,
100
'journal_id': journal_id,
101
'document_date': w.date,
106
move_id = self.pool.get('account.move').create(cr, uid, move_vals, context)
107
for l_num, l in enumerate(available_currencies[(c_id, p_id)]):
109
progression = 20.0 + ((float(l_num) / float(len(b_entries))) * step) + (float(num - 1) * step)
110
self.write(cr, uid, [w.id], {'progression': progression})
112
# Create analytic distribution
113
if l.account_id.is_analytic_addicted:
114
distrib_id = self.pool.get('analytic.distribution').create(cr, uid, {}, context)
116
'distribution_id': distrib_id,
120
'source_date': l.date,
121
'destination_id': l.destination_id.id,
123
common_vals.update({'analytic_id': l.cost_center_id.id,})
124
cc_res = self.pool.get('cost.center.distribution.line').create(cr, uid, common_vals)
125
common_vals.update({'analytic_id': msf_fp_id, 'cost_center_id': l.cost_center_id.id,})
126
fp_res = self.pool.get('funding.pool.distribution.line').create(cr, uid, common_vals)
130
'name': l.description,
132
'account_id': l.account_id.id,
134
'document_date': l.document_date,
136
'journal_id': journal_id,
137
'debit_currency': l.debit,
138
'credit_currency': l.credit,
140
'analytic_distribution_id': distrib_id,
141
'partner_id': l.partner_id and l.partner_id.id or False,
142
'employee_id': l.employee_id and l.employee_id.id or False,
144
self.pool.get('account.move.line').create(cr, uid, move_line_vals, context, check=False)
145
# Validate the Journal Entry for lines to be valid (if possible)
146
self.write(cr, uid, [w.id], {'message': _('Validating journal entry…')})
147
self.pool.get('account.move').validate(cr, uid, [move_id], context=context)
149
progression = 20.0 + (float(num) * step)
150
self.write(cr, uid, [w.id], {'progression': progression})
154
def _import(self, dbname, uid, ids, context=None):
156
Do treatment before validation:
157
- check data from wizard
158
- check that file exists and that data are inside
159
- check integrity of data in files
164
# Prepare some values
166
if context.get('from_yml', False):
167
from_yml = context.get('from_yml')
168
# Do changes because of YAML tests
172
cr = pooler.get_db(dbname).cursor()
179
self.write(cr, uid, ids, {'message': _('Cleaning up old imports…'), 'progression': 1.00})
180
# Clean up old temporary imported lines
181
old_lines_ids = self.pool.get('msf.doc.import.accounting.lines').search(cr, uid, [])
182
self.pool.get('msf.doc.import.accounting.lines').unlink(cr, uid, old_lines_ids)
185
for wiz in self.browse(cr, uid, ids):
187
self.write(cr, uid, [wiz.id], {'message': _('Checking file…'), 'progression': 2.00})
188
# UF-2045: Check that the given date is in an open period
189
wiz_period_ids = self.pool.get('account.period').get_period_from_date(cr, uid, wiz.date, context)
190
if not wiz_period_ids:
191
raise osv.except_osv(_('Warning'), _('No period found!'))
192
period = self.pool.get('account.period').browse(cr, uid, wiz_period_ids[0], context)
193
if not period or period.state in ['created', 'done']:
194
raise osv.except_osv(_('Warning'), _('Period for migration is not open!'))
195
date = wiz.date or False
197
# Check that a file was given
199
raise osv.except_osv(_('Error'), _('Nothing to import.'))
201
self.write(cr, uid, [wiz.id], {'message': _('Copying file…'), 'progression': 3.00})
202
fileobj = NamedTemporaryFile('w+b', delete=False)
203
fileobj.write(decodestring(wiz.file))
205
content = SpreadsheetXML(xmlfile=fileobj.name)
207
raise osv.except_osv(_('Warning'), _('No content'))
209
self.write(cr, uid, [wiz.id], {'message': _('Processing line…'), 'progression': 4.00})
210
rows = content.getRows()
211
nb_rows = len([x for x in content.getRows()])
213
self.write(cr, uid, [wiz.id], {'message': _('Reading headers…'), 'progression': 5.00})
214
# Use the first row to find which column to use
216
col_names = ['Description', 'Reference', 'Document Date', 'Posting Date', 'G/L Account', 'Third party', 'Destination', 'Cost Centre', 'Booking Debit', 'Booking Credit', 'Booking Currency']
217
for num, r in enumerate(rows):
218
header = [x and x.data for x in r.iter_cells()]
221
cols[el] = header.index(el)
223
# Number of line to bypass in line's count
228
raise osv.except_osv(_('Error'), _("'%s' column not found in file.") % (el or '',))
232
self.write(cr, uid, [wiz.id], {'message': _('Reading lines…'), 'progression': 6.00})
233
# Check file's content
234
for num, r in enumerate(rows):
236
percent = (float(num+1) / float(nb_rows+1)) * 100.0
237
progression = ((float(num+1) * 94) / float(nb_rows)) + 6
238
self.write(cr, uid, [wiz.id], {'message': _('Checking file…'), 'progression': progression})
239
# Prepare some values
245
r_destination = False
247
# UTP-766: Do not use Document date column, but wizard's one as document date for each line
248
#r_document_date = False
249
current_line_num = num + base_num
250
# Fetch all XML row values
251
line = self.pool.get('import.cell.data').get_line_values(cr, uid, ids, r)
252
# Bypass this line if NO debit AND NO credit
254
bd = line[cols['Booking Debit']]
255
except IndexError, e:
258
bc = line[cols['Booking Credit']]
259
except IndexError, e:
261
if not line[cols['Booking Debit']] and not line[cols['Booking Credit']]:
264
# Check that currency is active
265
if not line[cols['Booking Currency']]:
266
errors.append(_('Line %s. No currency specified!') % (current_line_num,))
268
curr_ids = self.pool.get('res.currency').search(cr, uid, [('name', '=', line[cols['Booking Currency']])])
270
errors.append(_('Line %s. Currency not found: %s') % (current_line_num, line[cols['Booking Currency']],))
272
for c in self.pool.get('res.currency').browse(cr, uid, curr_ids):
274
errors.append(_('Line %s. Currency is not active: %s') % (current_line_num, line[cols['Booking Currency']],))
276
r_currency = curr_ids[0]
277
if not line[cols['Booking Currency']] in money:
278
money[line[cols['Booking Currency']]] = {}
279
if not 'debit' in money[line[cols['Booking Currency']]]:
280
money[line[cols['Booking Currency']]]['debit'] = 0
281
if not 'credit' in money[line[cols['Booking Currency']]]:
282
money[line[cols['Booking Currency']]]['credit'] = 0
283
if not 'name' in money[line[cols['Booking Currency']]]:
284
money[line[cols['Booking Currency']]]['name'] = line[cols['Booking Currency']]
285
# Increment global debit/credit
286
if line[cols['Booking Debit']]:
287
money[line[cols['Booking Currency']]]['debit'] += line[cols['Booking Debit']]
288
r_debit = line[cols['Booking Debit']]
289
if line[cols['Booking Credit']]:
290
money[line[cols['Booking Currency']]]['credit'] += line[cols['Booking Credit']]
291
r_credit = line[cols['Booking Credit']]
292
# Check document/posting dates
293
# UTP-766: Do not use Document date column, but wizard's one
294
#if not line[cols['Document Date']]:
295
# errors.append(_('Line %s. No document date specified!') % (current_line_num,))
297
# UTP-766: Do not use Posting date column, but wizard's one
298
#if line[cols['Document Date']] > date:
299
# errors.append(_("Line %s. Document date '%s' should be inferior or equal to given Posting date '%s'.") % (current_line_num, line[cols['Document Date']], date,))
301
# Fetch document date
302
#r_document_date = line[cols['Document Date']].strftime('%Y-%m-%d')
304
if not line[cols['G/L Account']]:
305
errors.append(_('Line %s. No G/L account specified!') % (current_line_num,))
307
account_ids = self.pool.get('account.account').search(cr, uid, [('code', '=', line[cols['G/L Account']])])
309
errors.append(_('Line %s. G/L account %s not found!') % (current_line_num, line[cols['G/L Account']],))
311
r_account = account_ids[0]
312
account = self.pool.get('account.account').browse(cr, uid, r_account)
313
# Check that Third party exists (if not empty)
314
tp_label = _('Partner')
315
if line[cols['Third party']]:
316
if account.type_for_register == 'advance':
317
tp_ids = self.pool.get('hr.employee').search(cr, uid, [('name', '=', line[cols['Third party']])])
318
tp_label = _('Employee')
320
tp_ids = self.pool.get('res.partner').search(cr, uid, [('name', '=', line[cols['Third party']])])
322
errors.append(_('Line %s. %s not found: %s') % (current_line_num, tp_label, line[cols['Third party']],))
324
r_partner = tp_ids[0]
325
# Check analytic axis only if G/L account is analytic-a-holic
326
if account.is_analytic_addicted:
328
if not line[cols['Destination']]:
329
errors.append(_('Line %s. No destination specified!') % (current_line_num,))
331
destination_ids = self.pool.get('account.analytic.account').search(cr, uid, [('category', '=', 'DEST'), '|', ('name', '=', line[cols['Destination']]), ('code', '=', line[cols['Destination']])])
332
if not destination_ids:
333
errors.append(_('Line %s. Destination %s not found!') % (current_line_num, line[cols['Destination']],))
335
r_destination = destination_ids[0]
337
if not line[cols['Cost Centre']]:
338
errors.append(_('Line %s. No cost center specified!') % (current_line_num,))
340
cc_ids = self.pool.get('account.analytic.account').search(cr, uid, [('category', '=', 'OC'), '|', ('name', '=', line[cols['Cost Centre']]), ('code', '=', line[cols['Cost Centre']])])
342
errors.append(_('Line %s. Cost Center %s not found!') % (current_line_num, line[cols['Cost Centre']]))
345
# NOTE: There is no need to check G/L account, Cost Center and Destination regarding document/posting date because this check is already done at Journal Entries validation.
347
# Registering data regarding these "keys":
354
'description': line[cols['Description']] or '',
355
'ref': line[cols['Reference']] or '',
356
'account_id': r_account or False,
357
'debit': r_debit or 0.0,
358
'credit': r_credit or 0.0,
359
'cost_center_id': r_cc or False,
360
'destination_id': r_destination or False,
361
'document_date': date or False, #r_document_date or False,
362
'date': date or False,
363
'currency_id': r_currency or False,
365
'period_id': period and period.id or False,
367
if account.type_for_register == 'advance':
368
vals.update({'employee_id': r_partner,})
370
vals.update({'partner_id': r_partner,})
371
line_res = self.pool.get('msf.doc.import.accounting.lines').create(cr, uid, vals, context)
373
errors.append(_('Line %s. A problem occured for line registration. Please contact an Administrator.') % (current_line_num,))
376
# Check if all is ok for the file
377
## The lines should be balanced for each currency
379
if (money[c]['debit'] - money[c]['credit']) >= 10**-2:
380
raise osv.except_osv(_('Error'), _('Currency %s is not balanced: %s') % (money[c]['name'], (money[c]['debit'] - money[c]['credit']),))
382
self.write(cr, uid, ids, {'message': _('Check complete. Reading potential errors or write needed changes.'), 'progression': 100.0})
385
# If errors, cancel probable modifications
389
message = _('Import FAILED.')
391
error_ids = self.pool.get('msf.doc.import.accounting.errors').search(cr, uid, [], context)
393
self.pool.get('msf.doc.import.accounting.errors').unlink(cr, uid, error_ids ,context)
394
# create errors lines
396
self.pool.get('msf.doc.import.accounting.errors').create(cr, uid, {'wizard_id': wiz.id, 'name': e}, context)
400
self.write(cr, uid, ids, {'message': _('Writing changes…'), 'progression': 0.0})
401
# Create all journal entries
402
self.create_entries(cr, uid, ids, context)
403
message = _('Import successful.')
406
self.write(cr, uid, ids, {'message': message, 'state': wiz_state, 'progression': 100.0})
412
except osv.except_osv as osv_error:
414
self.write(cr, uid, ids, {'message': _("An error occured. %s: %s") % (osv_error.name, osv_error.value,), 'state': 'done', 'progression': 100.0})
417
except Exception as e:
419
self.write(cr, uid, ids, {'message': _("An error occured: %s") % (e.args and e.args[0] or '',), 'state': 'done', 'progression': 100.0})
424
def button_validate(self, cr, uid, ids, context=None):
426
Launch process in a thread and return a wizard
431
if context.get('from_yml', False):
432
res = self.write(cr, uid, ids, {'state': 'inprogress'}, context=context)
433
self._import(cr, uid, ids, context=context)
435
# Launch a thread if we come from web
436
thread = threading.Thread(target=self._import, args=(cr.dbname, uid, ids, context))
438
res = self.write(cr, uid, ids, {'state': 'inprogress'}, context=context)
441
def button_update(self, cr, uid, ids, context=None):
447
msf_doc_import_accounting()
449
class msf_doc_import_accounting_lines(osv.osv):
450
_name = 'msf.doc.import.accounting.lines'
453
'description': fields.text("Description", required=False, readonly=True),
454
'ref': fields.text("Reference", required=False, readonly=True),
455
'document_date': fields.date("Document date", required=True, readonly=True),
456
'date': fields.date("Posting date", required=True, readonly=True),
457
'account_id': fields.many2one('account.account', "G/L Account", required=True, readonly=True),
458
'destination_id': fields.many2one('account.analytic.account', "Destination", required=False, readonly=True),
459
'cost_center_id': fields.many2one('account.analytic.account', "Cost Center", required=False, readonly=True),
460
'debit': fields.float("Debit", required=False, readonly=True),
461
'credit': fields.float("Credit", required=False, readonly=True),
462
'currency_id': fields.many2one('res.currency', "Currency", required=True, readonly=True),
463
'partner_id': fields.many2one('res.partner', "Partner", required=False, readonly=True),
464
'employee_id': fields.many2one('hr.employee', "Employee", required=False, readonly=True),
465
'period_id': fields.many2one('account.period', "Period", required=True, readonly=True),
466
'wizard_id': fields.integer("Wizard", required=True, readonly=True),
470
'description': lambda *a: '',
471
'ref': lambda *a: '',
472
'document_date': lambda *a: strftime('%Y-%m-%d'),
473
'date': lambda *a: strftime('%Y-%m-%d'),
474
'debit': lambda *a: 0.0,
475
'credit': lambda *a: 0.0,
478
msf_doc_import_accounting_lines()
480
class msf_doc_import_accounting_errors(osv.osv_memory):
481
_name = 'msf.doc.import.accounting.errors'
484
'name': fields.text("Description", readonly=True, required=True),
485
'wizard_id': fields.many2one('msf.doc.import.accounting', "Wizard", required=True, readonly=True),
488
msf_doc_import_accounting_errors()
489
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: