diff --git a/account_move_csv_import/__init__.py b/account_move_csv_import/__init__.py index 9809b86..7749576 100644 --- a/account_move_csv_import/__init__.py +++ b/account_move_csv_import/__init__.py @@ -1,3 +1,4 @@ # -*- encoding: utf-8 -*- +from . import models from . import wizard diff --git a/account_move_csv_import/models/__init__.py b/account_move_csv_import/models/__init__.py new file mode 100644 index 0000000..5665f63 --- /dev/null +++ b/account_move_csv_import/models/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from . import account diff --git a/account_move_csv_import/models/account.py b/account_move_csv_import/models/account.py new file mode 100644 index 0000000..20989f1 --- /dev/null +++ b/account_move_csv_import/models/account.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +# © 2018 Akretion, +# @author Mourad EL HADJ MIMOUNE +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from odoo import fields, models + + +class AccountMoveLine(models.Model): + _inherit = "account.move.line" + + import_reconcile = fields.Char() + import_external_id = fields.Char() \ No newline at end of file diff --git a/account_move_csv_import/wizard/import_move.py b/account_move_csv_import/wizard/import_move.py index 5f4bb94..8469723 100644 --- a/account_move_csv_import/wizard/import_move.py +++ b/account_move_csv_import/wizard/import_move.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -# © 2012-2017 Akretion (http://www.akretion.com) +# © 2012-2018 Akretion (http://www.akretion.com) # @author Alexis de Lattre +# @author Mourad EL HADJ MIMOUNE # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import models, fields, api, _ @@ -30,12 +31,31 @@ class AccountMoveImport(models.TransientModel): ('meilleuregestion', 'MeilleureGestion (Prisme)'), ('quadra', 'Quadra (without analytic)'), ('extenso', 'In Extenso'), + ('fec_txt', 'FEC (text)'), ('payfit', 'Payfit'), ], string='File Format', required=True, help="Select the type of file you are importing.") post_move = fields.Boolean( string='Post Journal Entry', help="If True, the journal entry will be posted after the import.") + date_format = fields.Char( + default='%d/%m/%Y', + required=True, + help='Date format is applicable only on Generic csv file ex "%d%m%Y"') + move_ref_unique = fields.Boolean( + string='Is move ref unique ?', + help="If True, ref is used to detect new move in file.") + date_by_move_line = fields.Boolean( + string='Is date by move line ?', + help="If True, we dont't use date to detecte the lines" + "of account move. In odoo date are on account move.") + file_with_header = fields.Boolean( + help="Indicate if file contain a headers or not.") + create_missing_partner = fields.Boolean( + help='If True, for all messing partner we will create a new partner.' + ' All this partner containt this text "Create by import move" in note' + '(comment field). To complete them you can filtre partners by this' + ' note text.') force_journal_id = fields.Many2one( 'account.journal', string="Force Journal", help="Journal in which the journal entry will be created, " @@ -71,6 +91,8 @@ def file_format_change(self): # 'ref': '9804', # optional # 'journal': {'code': 'VT'}, # 'date': '2017-02-15', # also accepted in datetime format + # 'reconcile_ref': 'A1242', # will be written in import_reconcile + # # and be processed after move line creation # 'line': 2, # Line number for error messages. # # Must be the line number including headers # }, @@ -78,32 +100,49 @@ def file_format_change(self): # 3rd line... # ] - def file2pivot(self, fileobj, filestr): + def file2pivot( + self, fileobj, filestr, file_with_header=False, fieldnames=False): file_format = self.file_format if file_format == 'meilleuregestion': - return self.meilleuregestion2pivot(fileobj) + return self.meilleuregestion2pivot( + fileobj, file_with_header=file_with_header) elif file_format == 'genericcsv': - return self.genericcsv2pivot(fileobj) + return self.genericcsv2pivot( + fileobj, file_with_header=file_with_header, + date_format=self.date_format, + fieldnames=fieldnames) elif file_format == 'quadra': - return self.quadra2pivot(filestr) + return self.quadra2pivot( + fileobj, file_with_header=file_with_header) elif file_format == 'extenso': - return self.extenso2pivot(fileobj) + return self.extenso2pivot( + fileobj, file_with_header=file_with_header) elif file_format == 'payfit': - return self.payfit2pivot(filestr) + return self.payfit2pivot( + fileobj, file_with_header=file_with_header) + elif file_format == 'fec_txt': + return self.fectxt2pivot( + fileobj, file_with_header=file_with_header) else: raise UserError(_("You must select a file format.")) - def run_import(self): + def run_import(self, fieldnames=False): self.ensure_one() fileobj = TemporaryFile('w+') filestr = self.file_to_import.decode('base64') fileobj.write(filestr) fileobj.seek(0) # We must start reading from the beginning ! - pivot = self.file2pivot(fileobj, filestr) + pivot = self.file2pivot( + fileobj, filestr, file_with_header=self.file_with_header, + fieldnames=fieldnames) fileobj.close() logger.debug('pivot before update: %s', pivot) self.update_pivot(pivot) - moves = self.create_moves_from_pivot(pivot, post=self.post_move) + moves = self.create_moves_from_pivot( + pivot, post=self.post_move, move_ref_unique=self.move_ref_unique, + date_by_move_line=self.date_by_move_line, + create_missing_partner=self.create_missing_partner) + self.reconcile_move_lines(moves) action = { 'name': _('Imported Journal Entries'), 'res_model': 'account.move', @@ -145,7 +184,7 @@ def update_pivot(self, pivot): if not l['debit']: l['debit'] = 0.0 - def extenso2pivot(self, fileobj): + def extenso2pivot(self, fileobj, file_with_header=False): fieldnames = [ 'journal', 'date', False, 'account', False, False, False, False, 'debit', 'credit'] @@ -159,6 +198,8 @@ def extenso2pivot(self, fileobj): i = 0 for l in reader: i += 1 + if i == 1 and file_with_header: + continue l['credit'] = l['credit'] or '0' l['debit'] = l['debit'] or '0' vals = { @@ -172,36 +213,84 @@ def extenso2pivot(self, fileobj): res.append(vals) return res - def genericcsv2pivot(self, fileobj): - # Prisme + def fectxt2pivot(self, fileobj, file_with_header=False): fieldnames = [ - 'date', 'journal', 'account', 'partner', - 'analytic', 'name', 'debit', 'credit', - ] + 'journal', False, False, 'date', 'account_ebp', 'name', + False, False, # CompAuxNum|CompAuxLib + 'ref', False, 'name', 'debit', 'credit', + 'reconcile_ref', False, False, False, False] reader = unicodecsv.DictReader( fileobj, fieldnames=fieldnames, - delimiter=',', - quotechar='"', - quoting=unicodecsv.QUOTE_MINIMAL, + delimiter='\t', + quoting=False, encoding='utf-8') res = [] i = 0 for l in reader: i += 1 + if i == 1 and file_with_header: + continue + l['credit'] = l['credit'] or '0' + l['debit'] = l['debit'] or '0' vals = { 'journal': {'code': l['journal']}, 'account': {'code': l['account']}, - 'credit': float(l['credit'] or 0), - 'debit': float(l['debit'] or 0), - 'date': datetime.strptime(l['date'], '%d/%m/%Y'), + # 'partner': {'ref': '1242'}, + 'credit': float(l['credit'].replace(',', '.')), + 'debit': float(l['debit'].replace(',', '.')), + 'date': datetime.strptime(l['date'], '%d%m%Y'), 'name': l['name'], + 'reconcile_ref': l['reconcile_ref'], 'line': i, - } - if l['analytic']: - vals['analytic'] = {'code': l['analytic']} - if l['partner']: - vals['partner'] = {'ref': l['partner']} + } + res.append(vals) + return res + + def genericcsv2pivot(self, fileobj, file_with_header=False, + date_format='%d/%m/%Y', fieldnames=False): + # Generic CSV + if not fieldnames: + fieldnames = [ + 'date', 'journal', 'account', 'partner', + 'ref', 'name', 'debit', 'credit', 'reconcile_ref', 'analytic' + ] + reader = unicodecsv.DictReader( + fileobj, + fieldnames=fieldnames, + delimiter=',', + quotechar='"', + quoting=unicodecsv.QUOTE_MINIMAL, + encoding='utf-8') + res = [] + i = 0 + for ln in reader: + i += 1 + if i == 1 and file_with_header: + continue + date_str = ln['date'] + # to avoid bug complete date if format id ddmmyy ex + # 10118 ==> 010118 (for 01/01/2018) + if date_format == '%d%m%y' and\ + len(ln['date']) == 5: + date_str = '0' + date_str + date = datetime.strptime(date_str, date_format) + vals = { + 'journal': {'code': ln['journal']}, + 'account': {'code': ln['account']}, + 'credit': float(ln['credit'].replace(',', '.') or 0), + 'debit': float(ln['debit'].replace(',', '.') or 0), + 'date': date, + 'name': ln['name'], + 'reconcile_ref': ln['reconcile_ref'], + 'line': i, + } + if ln['analytic']: + vals['analytic'] = {'code': ln['analytic']} + if ln['partner']: + vals['partner'] = {'ref': ln['partner']} + if ln.get('id'): + vals['id'] = ln['id'] res.append(vals) return res @@ -296,48 +385,77 @@ def payfit2pivot(self, filestr): res.append(vals) return res - def create_moves_from_pivot(self, pivot, post=False): + def create_moves_from_pivot( + self, pivot, post=False, move_ref_unique=False, + date_by_move_line=False, create_missing_partner=False): logger.debug('Final pivot: %s', pivot) bdio = self.env['business.document.import'] amo = self.env['account.move'] + amol = self.env['account.move.line'] + part_obj = self.env['res.partner'] acc_speed_dict = bdio._prepare_account_speed_dict() aacc_speed_dict = bdio._prepare_analytic_account_speed_dict() journal_speed_dict = bdio._prepare_journal_speed_dict() chatter_msg = [] # MATCH what needs to be matched... + CHECKS - for l in pivot: - assert l.get('line') and isinstance(l.get('line'), int),\ + for ln in pivot: + assert ln.get('line') and isinstance(ln.get('line'), int),\ 'missing line number' - error_prefix = _('Line %d:') % l['line'] + error_prefix = _('Line %d:') % ln['line'] bdiop = bdio.with_context(error_prefix=error_prefix) account = bdiop._match_account( - l['account'], chatter_msg, acc_speed_dict) - l['account_id'] = account.id - if l.get('partner'): - partner = bdiop._match_partner( - l['partner'], chatter_msg, partner_type=False) - l['partner_id'] = partner.commercial_partner_id.id - if l.get('analytic'): + ln['account'], chatter_msg, acc_speed_dict) + ln['account_id'] = account.id + if ln.get('partner'): + partner = False + try: + partner = bdiop._match_partner( + ln['partner'], chatter_msg, partner_type=False) + except Exception as e: + if create_missing_partner: + pass + else: + raise e + if create_missing_partner and not partner: + name_field = 'name' + # fix bug if module partner_firstname is installed + if 'firstname' in part_obj._fields: + name_field = 'firstname' + partner = part_obj.create({ + name_field: ln['partner']['ref'], + 'ref': ln['partner']['ref'], + 'comment': 'Create by import move', + 'is_company': True, + }) + logger.info( + ">>>>>>>>>>>>: ajout new partner %s" % ln['partner']) + ln['partner_id'] = partner.commercial_partner_id.id + if ln.get('analytic'): analytic = bdiop._match_analytic_account( - l['analytic'], chatter_msg, aacc_speed_dict) - l['analytic_account_id'] = analytic.id + ln['analytic'], chatter_msg, aacc_speed_dict) + ln['analytic_account_id'] = analytic.id journal = bdiop._match_journal( - l['journal'], chatter_msg, journal_speed_dict) - l['journal_id'] = journal.id - if not l.get('name'): + ln['journal'], chatter_msg, journal_speed_dict) + ln['journal_id'] = journal.id + # test if imported: + if ln.get('id'): + if amol.search([('import_external_id', '=', ln.get('id'))]): + raise UserError(_( + 'Line %d: yet imported.') % ln['line']) + if not ln.get('name'): raise UserError(_( - 'Line %d: missing label.') % l['line']) - if not l.get('date'): + 'Line %d: missing label.') % ln['line']) + if not ln.get('date'): raise UserError(_( - 'Line %d: missing date.') % l['line']) - if not isinstance(l.get('credit'), float): + 'Line %d: missing date.') % ln['line']) + if not isinstance(ln.get('credit'), float): raise UserError(_( 'Line %d: bad value for credit (%s).') - % (l['line'], l['credit'])) - if not isinstance(l.get('debit'), float): + % (ln['line'], ln['credit'])) + if not isinstance(ln.get('debit'), float): raise UserError(_( 'Line %d: bad value for debit (%s).') - % (l['line'], l['debit'])) + % (ln['line'], ln['debit'])) # test that they don't have both a value # EXTRACT MOVES moves = [] @@ -347,15 +465,21 @@ def create_moves_from_pivot(self, pivot, post=False): cur_balance = 0.0 prec = self.env.user.company_id.currency_id.rounding cur_move = {} - for l in pivot: - ref = l.get('ref', False) - if ( - cur_ref == ref and - cur_journal_id == l['journal_id'] and - cur_date == l['date'] and - not float_is_zero(cur_balance, precision_rounding=prec)): + for ln in pivot: + ref = ln.get('ref', False) + detect_new_move = True + if move_ref_unique: + detect_new_move = (cur_ref == ref) + if not date_by_move_line: + detect_new_move = detect_new_move and\ + (cur_date == ln['date']) + detect_new_move = detect_new_move and ( + cur_journal_id == ln['journal_id']) and\ + not float_is_zero(cur_balance, precision_rounding=prec) + if (detect_new_move): # append to current move - cur_move['line_ids'].append((0, 0, self._prepare_move_line(l))) + cur_move['line_ids'].append( + (0, 0, self._prepare_move_line(ln))) else: # new move if moves and not float_is_zero( @@ -363,17 +487,19 @@ def create_moves_from_pivot(self, pivot, post=False): raise UserError(_( "The journal entry that ends on line %d is not " "balanced (balance is %s).") - % (l['line'] - 1, cur_balance)) + % (ln['line'] - 1, cur_balance)) if cur_move: - assert len(cur_move['line_ids']) > 1,\ - 'move should have more than 1 line' + if len(cur_move['line_ids']) <= 1: + raise UserError(_( + "move should have more than 1 line num: %s," + "data : %s") % (ln['line'], cur_move['line_ids'])) moves.append(cur_move) - cur_move = self._prepare_move(l) - cur_move['line_ids'] = [(0, 0, self._prepare_move_line(l))] - cur_date = l['date'] + cur_move = self._prepare_move(ln) + cur_move['line_ids'] = [(0, 0, self._prepare_move_line(ln))] + cur_date = ln['date'] cur_ref = ref - cur_journal_id = l['journal_id'] - cur_balance += l['credit'] - l['debit'] + cur_journal_id = ln['journal_id'] + cur_balance += ln['credit'] - ln['debit'] if cur_move: moves.append(cur_move) if not float_is_zero(cur_balance, precision_rounding=prec): @@ -382,7 +508,8 @@ def create_moves_from_pivot(self, pivot, post=False): "balanced (balance is %s).") % cur_balance) rmoves = self.env['account.move'] for move in moves: - rmoves += amo.create(move) + rmoves |= amo.create(move) + logger.info( 'Account moves IDs %s created via file import' % rmoves.ids) if post: @@ -405,5 +532,63 @@ def _prepare_move_line(self, pivot_line): 'partner_id': pivot_line.get('partner_id'), 'account_id': pivot_line['account_id'], 'analytic_account_id': pivot_line.get('analytic_account_id'), + 'import_reconcile': pivot_line.get('reconcile_ref') or False, } + if pivot_line.get('id'): + vals.update({'import_external_id': pivot_line.get('id')}) return vals + + def reconcile_move_lines(self, moves): + prec = self.env.user.company_id.currency_id.rounding + logger.info('Start to reconcile imported moves') + lines = self.env['account.move.line'].search([ + ('move_id', 'in', moves.ids), + ('import_reconcile', '!=', False), + ]) + torec = {} # key = reconcile mark, value = movelines_recordset + for line in lines: + if line.import_reconcile in torec: + torec[line.import_reconcile] += line + else: + torec[line.import_reconcile] = line + for rec_ref, lines_to_rec in torec.iteritems(): + if len(lines_to_rec) < 2: + logger.warning( + "Skip reconcile of ref '%s' because " + "this ref is only on 1 move line", rec_ref) + continue + total = 0.0 + accounts = {} + partners = {} + for line in lines_to_rec: + total += line.credit + total -= line.debit + accounts[line.account_id] = True + partners[line.partner_id.id or False] = True + if not float_is_zero(total, precision_digits=prec): + logger.warning( + "Skip reconcile of ref '%s' because the lines with " + "this ref are not balanced (%s)", rec_ref, total) + continue + if len(accounts) > 1: + logger.warning( + "Skip reconcile of ref '%s' because the lines with " + "this ref have different accounts (%s)", + rec_ref, ', '.join([acc.code for acc in accounts.keys()])) + continue + if not accounts.keys()[0].reconcile: + logger.warning( + "Skip reconcile of ref '%s' because the account '%s' " + "is not configured with 'Allow Reconciliation'", + rec_ref, accounts.keys()[0].display_name) + continue + if len(partners) > 1: + logger.warning( + "Skip reconcile of ref '%s' because the lines with " + "this ref have different partners (IDs %s)", + rec_ref, ', '.join([str(key) for key in partners.keys()])) + continue + lines_to_rec.reconcile() + print "reconcile mark created" + logger.info('Reconcile imported moves finished') + lines_to_rec.write({'import_reconcile': False}) diff --git a/account_move_csv_import/wizard/import_move_view.xml b/account_move_csv_import/wizard/import_move_view.xml index 4422c40..9979c5c 100644 --- a/account_move_csv_import/wizard/import_move_view.xml +++ b/account_move_csv_import/wizard/import_move_view.xml @@ -14,29 +14,40 @@
- - - - - - - - - - + + + + + + + + + + + + + + + + + + +

Information about the Generic CSV format

  • Columns: -
    1. Date (DD/MM/YYYY)
    2. +
      1. Date : by default(DD/MM/YYYY)
      2. Journal code
      3. Legal account
      4. Partner internal reference
      5. -
      6. Analytic account
      7. +
      8. Account move ref (not line)
      9. Label
      10. Debit
      11. Credit
      12. +
      13. Reconcile ref
      14. +
      15. Analytic account
    3. Encoding: UTF-8
    4. Field separator: , (coma)
    5. @@ -45,6 +56,12 @@

If exporting from LibreOffice, set Language to English on numeric cells.

+

If the imported file contain the reconcile ref,. + you can set this filter to get not matched line : + Matching Number "is not set", import reconcile "is set" + and group by import reconcile field. + The match can't be added if lines to match has different partners or sum(debit) != sum(credit) +