diff --git a/addons/account/account.py b/addons/account/account.py index 98ebcb54224089fe2dca573a3935139836895519..eeb915bddb0bd7ee83951fa601576b4d5dca728f 100644 --- a/addons/account/account.py +++ b/addons/account/account.py @@ -26,7 +26,7 @@ from operator import itemgetter import time import openerp -from openerp import SUPERUSER_ID +from openerp import SUPERUSER_ID, api from openerp import tools from openerp.osv import fields, osv, expression from openerp.tools.translate import _ @@ -62,7 +62,7 @@ class account_payment_term(osv.osv): 'name': fields.char('Payment Term', translate=True, required=True), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the payment term without removing it."), 'note': fields.text('Description', translate=True), - 'line_ids': fields.one2many('account.payment.term.line', 'payment_id', 'Terms'), + 'line_ids': fields.one2many('account.payment.term.line', 'payment_id', 'Terms', copy=True), } _defaults = { 'active': 1, @@ -213,9 +213,6 @@ def _code_get(self, cr, uid, context=None): # Accounts #---------------------------------------------------------- -class account_tax(osv.osv): - _name = 'account.tax' - class account_account(osv.osv): _order = "parent_left" _parent_order = "code" @@ -640,16 +637,16 @@ class account_account(osv.osv): def _check_moves(self, cr, uid, ids, method, context=None): line_obj = self.pool.get('account.move.line') - account_ids = self.search(cr, uid, [('id', 'child_of', ids)]) + account_ids = self.search(cr, uid, [('id', 'child_of', ids)], context=context) - if line_obj.search(cr, uid, [('account_id', 'in', account_ids)]): + if line_obj.search(cr, uid, [('account_id', 'in', account_ids)], context=context): if method == 'write': raise osv.except_osv(_('Error!'), _('You cannot deactivate an account that contains journal items.')) elif method == 'unlink': raise osv.except_osv(_('Error!'), _('You cannot remove an account that contains journal items.')) #Checking whether the account is set as a property to any Partner or not - value = 'account.account,' + str(ids[0]) - partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','=',value)], context=context) + values = ['account.account,%s' % (account_id,) for account_id in ids] + partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','in', values)], context=context) if partner_prop_acc: raise osv.except_osv(_('Warning!'), _('You cannot remove/deactivate an account which is set on a customer or supplier.')) return True @@ -691,10 +688,10 @@ class account_account(osv.osv): # Dont allow changing the company_id when account_move_line already exist if 'company_id' in vals: - move_lines = self.pool.get('account.move.line').search(cr, uid, [('account_id', 'in', ids)]) + move_lines = self.pool.get('account.move.line').search(cr, uid, [('account_id', 'in', ids)], context=context) if move_lines: # Allow the write if the value is the same - for i in [i['company_id'][0] for i in self.read(cr,uid,ids,['company_id'])]: + for i in [i['company_id'][0] for i in self.read(cr,uid,ids,['company_id'], context=context)]: if vals['company_id']!=i: raise osv.except_osv(_('Warning!'), _('You cannot change the owner company of an account that already contains journal items.')) if 'active' in vals and not vals['active']: @@ -730,7 +727,7 @@ class account_journal(osv.osv): 'centralisation': fields.boolean('Centralized Counterpart', help="Check this box to determine that each entry of this journal won't create a new counterpart but will share the same counterpart. This is used in fiscal year closing."), 'update_posted': fields.boolean('Allow Cancelling Entries', help="Check this box if you want to allow the cancellation the entries related to this journal or of the invoice related to this journal"), 'group_invoice_lines': fields.boolean('Group Invoice Lines', help="If this box is checked, the system will try to group the accounting lines when generating them from invoices."), - 'sequence_id': fields.many2one('ir.sequence', 'Entry Sequence', help="This field contains the information related to the numbering of the journal entries of this journal.", required=True), + 'sequence_id': fields.many2one('ir.sequence', 'Entry Sequence', help="This field contains the information related to the numbering of the journal entries of this journal.", required=True, copy=False), 'user_id': fields.many2one('res.users', 'User', help="The user responsible for this journal"), 'groups_id': fields.many2many('res.groups', 'account_journal_group_rel', 'journal_id', 'group_id', 'Groups'), 'currency': fields.many2one('res.currency', 'Currency', help='The currency used to enter statement'), @@ -769,15 +766,12 @@ class account_journal(osv.osv): (_check_currency, 'Configuration error!\nThe currency chosen should be shared by the default accounts too.', ['currency','default_debit_account_id','default_credit_account_id']), ] - def copy(self, cr, uid, id, default=None, context=None, done_list=None, local=False): - default = {} if default is None else default.copy() - if done_list is None: - done_list = [] + def copy(self, cr, uid, id, default=None, context=None): + default = dict(context or {}) journal = self.browse(cr, uid, id, context=context) default.update( code=_("%s (copy)") % (journal['code'] or ''), - name=_("%s (copy)") % (journal['name'] or ''), - sequence_id=False) + name=_("%s (copy)") % (journal['name'] or '')) return super(account_journal, self).copy(cr, uid, id, default, context=context) def write(self, cr, uid, ids, vals, context=None): @@ -865,7 +859,10 @@ class account_fiscalyear(osv.osv): 'date_start': fields.date('Start Date', required=True), 'date_stop': fields.date('End Date', required=True), 'period_ids': fields.one2many('account.period', 'fiscalyear_id', 'Periods'), - 'state': fields.selection([('draft','Open'), ('done','Closed')], 'Status', readonly=True), + 'state': fields.selection([('draft','Open'), ('done','Closed')], 'Status', readonly=True, copy=False), + 'end_journal_period_id': fields.many2one( + 'account.journal.period', 'End of Year Entries Journal', + readonly=True, copy=False), } _defaults = { 'state': 'draft', @@ -960,7 +957,7 @@ class account_period(osv.osv): 'date_start': fields.date('Start of Period', required=True, states={'done':[('readonly',True)]}), 'date_stop': fields.date('End of Period', required=True, states={'done':[('readonly',True)]}), 'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscal Year', required=True, states={'done':[('readonly',True)]}, select=True), - 'state': fields.selection([('draft','Open'), ('done','Closed')], 'Status', readonly=True, + 'state': fields.selection([('draft','Open'), ('done','Closed')], 'Status', readonly=True, copy=False, help='When monthly periods are created. The status is \'Draft\'. At the end of monthly period it is in \'Done\' status.'), 'company_id': fields.related('fiscalyear_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True) } @@ -1000,12 +997,14 @@ class account_period(osv.osv): (_check_year_limit, 'Error!\nThe period is invalid. Either some periods are overlapping or the period\'s dates are not matching the scope of the fiscal year.', ['date_stop']) ] + @api.returns('self') def next(self, cr, uid, period, step, context=None): ids = self.search(cr, uid, [('date_start','>',period.date_start)]) if len(ids)>=step: return ids[step-1] return False + @api.returns('self') def find(self, cr, uid, dt=None, context=None): if context is None: context = {} if not dt: @@ -1028,13 +1027,14 @@ class account_period(osv.osv): raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel')) return result - def action_draft(self, cr, uid, ids, *args): + def action_draft(self, cr, uid, ids, context=None): mode = 'draft' for period in self.browse(cr, uid, ids): if period.fiscalyear_id.state == 'done': raise osv.except_osv(_('Warning!'), _('You can not re-open a period which belongs to closed fiscal year')) cr.execute('update account_journal_period set state=%s where period_id in %s', (mode, tuple(ids),)) cr.execute('update account_period set state=%s where id in %s', (mode, tuple(ids),)) + self.invalidate_cache(cr, uid, context=context) return True def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100): @@ -1132,22 +1132,6 @@ class account_journal_period(osv.osv): } _order = "period_id" - -class account_fiscalyear(osv.osv): - _inherit = "account.fiscalyear" - _description = "Fiscal Year" - _columns = { - 'end_journal_period_id':fields.many2one('account.journal.period','End of Year Entries Journal', readonly=True), - } - - def copy(self, cr, uid, id, default=None, context=None): - default = {} if default is None else default.copy() - default.update({ - 'period_ids': [], - 'end_journal_period_id': False - }) - return super(account_fiscalyear, self).copy(cr, uid, id, default=default, context=context) - #---------------------------------------------------------- # Entries #---------------------------------------------------------- @@ -1235,13 +1219,21 @@ class account_move(osv.osv): return [line.move_id.id for line in line_obj.browse(cr, uid, ids, context=context)] _columns = { - 'name': fields.char('Number', required=True), - 'ref': fields.char('Reference'), + 'name': fields.char('Number', required=True, copy=False), + 'ref': fields.char('Reference', copy=False), 'period_id': fields.many2one('account.period', 'Period', required=True, states={'posted':[('readonly',True)]}), 'journal_id': fields.many2one('account.journal', 'Journal', required=True, states={'posted':[('readonly',True)]}), - 'state': fields.selection([('draft','Unposted'), ('posted','Posted')], 'Status', required=True, readonly=True, - help='All manually created new journal entries are usually in the status \'Unposted\', but you can set the option to skip that status on the related journal. In that case, they will behave as journal entries automatically created by the system on document validation (invoices, bank statements...) and will be created in \'Posted\' status.'), - 'line_id': fields.one2many('account.move.line', 'move_id', 'Entries', states={'posted':[('readonly',True)]}), + 'state': fields.selection( + [('draft','Unposted'), ('posted','Posted')], 'Status', + required=True, readonly=True, copy=False, + help='All manually created new journal entries are usually in the status \'Unposted\', ' + 'but you can set the option to skip that status on the related journal. ' + 'In that case, they will behave as journal entries automatically created by the ' + 'system on document validation (invoices, bank statements...) and will be created ' + 'in \'Posted\' status.'), + 'line_id': fields.one2many('account.move.line', 'move_id', 'Entries', + states={'posted':[('readonly',True)]}, + copy=True), 'to_check': fields.boolean('To Review', help='Check this box if you are unsure of that journal entry and if you want to note it as \'to be reviewed\' by an accounting expert.'), 'partner_id': fields.related('line_id', 'partner_id', type="many2one", relation="res.partner", string="Partner", store={ _name: (lambda self, cr,uid,ids,c: ids, ['line_id'], 10), @@ -1309,6 +1301,7 @@ class account_move(osv.osv): 'SET state=%s '\ 'WHERE id IN %s', ('posted', tuple(valid_moves),)) + self.invalidate_cache(cr, uid, context=context) return True def button_validate(self, cursor, user, ids, context=None): @@ -1335,6 +1328,7 @@ class account_move(osv.osv): cr.execute('UPDATE account_move '\ 'SET state=%s '\ 'WHERE id IN %s', ('draft', tuple(ids),)) + self.invalidate_cache(cr, uid, context=context) return True def write(self, cr, uid, ids, vals, context=None): @@ -1350,23 +1344,9 @@ class account_move(osv.osv): # TODO: Check if period is closed ! # def create(self, cr, uid, vals, context=None): - if context is None: - context = {} - if 'line_id' in vals and context.get('copy'): - for l in vals['line_id']: - if not l[0]: - l[2].update({ - 'reconcile_id':False, - 'reconcile_partial_id':False, - 'analytic_lines':False, - 'invoice':False, - 'ref':False, - 'balance':False, - 'account_tax_id':False, - 'statement_id': False, - }) - - if 'journal_id' in vals and vals.get('journal_id', False): + context = dict(context or {}) + if vals.get('line_id'): + if vals.get('journal_id'): for l in vals['line_id']: if not l[0]: l[2]['journal_id'] = vals['journal_id'] @@ -1383,7 +1363,6 @@ class account_move(osv.osv): l[2]['period_id'] = default_period context['period_id'] = default_period - if vals.get('line_id', False): c = context.copy() c['novalidate'] = True c['period_id'] = vals['period_id'] if 'period_id' in vals else self._get_period(cr, uid, context) @@ -1398,22 +1377,8 @@ class account_move(osv.osv): result = super(account_move, self).create(cr, uid, vals, context) return result - def copy(self, cr, uid, id, default=None, context=None): - default = {} if default is None else default.copy() - context = {} if context is None else context.copy() - default.update({ - 'state':'draft', - 'ref': False, - 'name':'/', - }) - context.update({ - 'copy':True - }) - return super(account_move, self).copy(cr, uid, id, default, context) - def unlink(self, cr, uid, ids, context=None, check=True): - if context is None: - context = {} + context = dict(context or {}) if isinstance(ids, (int, long)): ids = [ids] toremove = [] @@ -1447,8 +1412,8 @@ class account_move(osv.osv): def _centralise(self, cr, uid, move, mode, context=None): assert mode in ('debit', 'credit'), 'Invalid Mode' #to prevent sql injection currency_obj = self.pool.get('res.currency') - if context is None: - context = {} + account_move_line_obj = self.pool.get('account.move.line') + context = dict(context or {}) if mode=='credit': account_id = move.journal_id.default_debit_account_id.id @@ -1473,7 +1438,7 @@ class account_move(osv.osv): line_id = res[0] else: context.update({'journal_id': move.journal_id.id, 'period_id': move.period_id.id}) - line_id = self.pool.get('account.move.line').create(cr, uid, { + line_id = account_move_line_obj.create(cr, uid, { 'name': _(mode.capitalize()+' Centralisation'), 'centralisation': mode, 'partner_id': False, @@ -1498,6 +1463,7 @@ class account_move(osv.osv): cr.execute('SELECT SUM(%s) FROM account_move_line WHERE move_id=%%s AND id!=%%s' % (mode,), (move.id, line_id2)) result = cr.fetchone()[0] or 0.0 cr.execute('update account_move_line set '+mode2+'=%s where id=%s', (result, line_id)) + account_move_line_obj.invalidate_cache(cr, uid, [mode2], [line_id], context=context) #adjust also the amount in currency if needed cr.execute("select currency_id, sum(amount_currency) as amount_currency from account_move_line where move_id = %s and currency_id is not null group by currency_id", (move.id,)) @@ -1510,9 +1476,10 @@ class account_move(osv.osv): res = cr.fetchone() if res: cr.execute('update account_move_line set amount_currency=%s , account_id=%s where id=%s', (amount_currency, account_id, res[0])) + account_move_line_obj.invalidate_cache(cr, uid, ['amount_currency', 'account_id'], [res[0]], context=context) else: context.update({'journal_id': move.journal_id.id, 'period_id': move.period_id.id}) - line_id = self.pool.get('account.move.line').create(cr, uid, { + line_id = account_move_line_obj.create(cr, uid, { 'name': _('Currency Adjustment'), 'centralisation': 'currency', 'partner_id': False, @@ -1818,7 +1785,7 @@ class account_tax_code(osv.osv): return [] if isinstance(ids, (int, long)): ids = [ids] - reads = self.read(cr, uid, ids, ['name','code'], context, load='_classic_write') + reads = self.read(cr, uid, ids, ['name','code'], context=context, load='_classic_write') return [(x['id'], (x['code'] and (x['code'] + ' - ') or '') + x['name']) \ for x in reads] @@ -1827,19 +1794,13 @@ class account_tax_code(osv.osv): if user.company_id: return user.company_id.id return self.pool.get('res.company').search(cr, uid, [('parent_id', '=', False)])[0] + _defaults = { 'company_id': _default_company, 'sign': 1.0, 'notprintable': False, } - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - default.update({'line_ids': []}) - return super(account_tax_code, self).copy(cr, uid, id, default, context) - _check_recursion = check_cycle _constraints = [ (_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id']) @@ -1868,10 +1829,9 @@ class account_tax(osv.osv): def copy_data(self, cr, uid, id, default=None, context=None): if default is None: default = {} - name = self.read(cr, uid, id, ['name'], context=context)['name'] - default = default.copy() - default.update({'name': name + _(' (Copy)')}) - return super(account_tax, self).copy_data(cr, uid, id, default=default, context=context) + this = self.browse(cr, uid, id, context=context) + tmp_default = dict(default, name=_("%s (Copy)") % this.name) + return super(account_tax, self).copy_data(cr, uid, id, default=tmp_default, context=context) _name = 'account.tax' _description = 'Tax' @@ -2087,6 +2047,7 @@ class account_tax(osv.osv): tax = self.browse(cr, uid, tax_id, context=context) return self.compute_all(cr, uid, [tax], amount, 1) # TOCHECK may use force_exclude parameter + @api.v7 def compute_all(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, force_excluded=False): """ :param force_excluded: boolean used to say that we don't want to consider the value of field price_include of @@ -2137,6 +2098,12 @@ class account_tax(osv.osv): 'taxes': tin + tex } + @api.v8 + def compute_all(self, price_unit, quantity, product=None, partner=None, force_excluded=False): + return self._model.compute_all( + self._cr, self._uid, self, price_unit, quantity, + product=product, partner=partner, force_excluded=force_excluded) + def compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None): _logger.warning("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included.") return self._compute(cr, uid, taxes, price_unit, quantity, product, partner) @@ -2270,7 +2237,7 @@ class account_model(osv.osv): 'name': fields.char('Model Name', required=True, help="This is a model for recurring accounting entries"), 'journal_id': fields.many2one('account.journal', 'Journal', required=True), 'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), - 'lines_id': fields.one2many('account.model.line', 'model_id', 'Model Entries'), + 'lines_id': fields.one2many('account.model.line', 'model_id', 'Model Entries', copy=True), 'legend': fields.text('Legend', readonly=True, size=100), } @@ -2292,6 +2259,7 @@ class account_model(osv.osv): context = {} if data.get('date', False): + context = dict(context) context.update({'date': data['date']}) move_date = context.get('date', time.strftime('%Y-%m-%d')) @@ -2409,8 +2377,8 @@ class account_subscription(osv.osv): 'period_total': fields.integer('Number of Periods', required=True), 'period_nbr': fields.integer('Period', required=True), 'period_type': fields.selection([('day','days'),('month','month'),('year','year')], 'Period Type', required=True), - 'state': fields.selection([('draft','Draft'),('running','Running'),('done','Done')], 'Status', required=True, readonly=True), - 'lines_id': fields.one2many('account.subscription.line', 'subscription_id', 'Subscription Lines') + 'state': fields.selection([('draft','Draft'),('running','Running'),('done','Done')], 'Status', required=True, readonly=True, copy=False), + 'lines_id': fields.one2many('account.subscription.line', 'subscription_id', 'Subscription Lines', copy=True) } _defaults = { 'date_start': fields.date.context_today, @@ -2742,7 +2710,7 @@ class account_tax_code_template(osv.osv): return [] if isinstance(ids, (int, long)): ids = [ids] - reads = self.read(cr, uid, ids, ['name','code'], context, load='_classic_write') + reads = self.read(cr, uid, ids, ['name','code'], context=context, load='_classic_write') return [(x['id'], (x['code'] and x['code'] + ' - ' or '') + x['name']) \ for x in reads] diff --git a/addons/account/account_bank_statement.py b/addons/account/account_bank_statement.py index 3320ee8316257c88891b43e6fdef4338cd269c36..98e80f895f4fe4bce5e02d5369ec616ad0dd0e28 100644 --- a/addons/account/account_bank_statement.py +++ b/addons/account/account_bank_statement.py @@ -69,8 +69,7 @@ class account_bank_statement(osv.osv): return False def _compute_default_statement_name(self, cr, uid, journal_id, context=None): - if context is None: - context = {} + context = dict(context or {}) obj_seq = self.pool.get('ir.sequence') period = self.pool.get('account.period').browse(cr, uid, self._get_period(cr, uid, context=context), context=context) context['fiscalyear_id'] = period.fiscalyear_id.id @@ -114,8 +113,16 @@ class account_bank_statement(osv.osv): _description = "Bank Statement" _inherit = ['mail.thread'] _columns = { - 'name': fields.char('Reference', states={'draft': [('readonly', False)]}, readonly=True, help='if you give the Name other then /, its created Accounting Entries Move will be with same name as statement name. This allows the statement entries to have the same references than the statement itself'), # readonly for account_cash_statement - 'date': fields.date('Date', required=True, states={'confirm': [('readonly', True)]}, select=True), + 'name': fields.char( + 'Reference', states={'draft': [('readonly', False)]}, + readonly=True, # readonly for account_cash_statement + copy=False, + help='if you give the Name other then /, its created Accounting Entries Move ' + 'will be with same name as statement name. ' + 'This allows the statement entries to have the same references than the ' + 'statement itself'), + 'date': fields.date('Date', required=True, states={'confirm': [('readonly', True)]}, + select=True, copy=False), 'journal_id': fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'period_id': fields.many2one('account.period', 'Period', required=True, @@ -132,14 +139,15 @@ class account_bank_statement(osv.osv): string="Computed Balance", help='Balance as calculated based on Opening Balance and transaction lines'), 'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), 'line_ids': fields.one2many('account.bank.statement.line', - 'statement_id', 'Statement lines', - states={'confirm':[('readonly', True)]}), + 'statement_id', 'Statement lines', + states={'confirm':[('readonly', True)]}, copy=True), 'move_line_ids': fields.one2many('account.move.line', 'statement_id', - 'Entry lines', states={'confirm':[('readonly',True)]}), + 'Entry lines', states={'confirm':[('readonly',True)]}), 'state': fields.selection([('draft', 'New'), ('open','Open'), # used by cash statements ('confirm', 'Closed')], 'Status', required=True, readonly="1", + copy=False, help='When new statement is created the status will be \'Draft\'.\n' 'And after getting confirmation from the bank it will be in \'Confirmed\' status.'), 'currency': fields.function(_currency, string='Currency', @@ -182,7 +190,7 @@ class account_bank_statement(osv.osv): pids = period_pool.find(cr, uid, dt=date, context=ctx) if pids: res.update({'period_id': pids[0]}) - context.update({'period_id': pids[0]}) + context = dict(context, period_id=pids[0]) return { 'value':res, @@ -363,24 +371,13 @@ class account_bank_statement(osv.osv): return {'value': res} def unlink(self, cr, uid, ids, context=None): - stat = self.read(cr, uid, ids, ['state'], context=context) - unlink_ids = [] - for t in stat: - if t['state'] in ('draft'): - unlink_ids.append(t['id']) - else: - raise osv.except_osv(_('Invalid Action!'), _('In order to delete a bank statement, you must first cancel it to delete related journal items.')) - osv.osv.unlink(self, cr, uid, unlink_ids, context=context) - return True - - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - if context is None: - context = {} - default = default.copy() - default['move_line_ids'] = [] - return super(account_bank_statement, self).copy(cr, uid, id, default, context=context) + for item in self.browse(cr, uid, ids, context=context): + if item.state != 'draft': + raise osv.except_osv( + _('Invalid Action!'), + _('In order to delete a bank statement, you must first cancel it to delete related journal items.') + ) + return super(account_bank_statement, self).unlink(cr, uid, ids, context=context) def button_journal_entries(self, cr, uid, ids, context=None): ctx = (context or {}).copy() @@ -806,8 +803,8 @@ class account_bank_statement_line(osv.osv): _description = "Bank Statement Line" _inherit = ['ir.needaction_mixin'] _columns = { - 'name': fields.char('Description', required=True), - 'date': fields.date('Date', required=True), + 'name': fields.char('Description', required=True, copy=False), + 'date': fields.date('Date', required=True, copy=False), 'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')), 'partner_id': fields.many2one('res.partner', 'Partner'), 'bank_account_id': fields.many2one('res.partner.bank','Bank Account'), diff --git a/addons/account/account_cash_statement.py b/addons/account/account_cash_statement.py index c1c5265d8c22a4e82de2458eb561c2a91882df94..5c6e4ef4292b6378d4c1de87a5083a14e93864ef 100644 --- a/addons/account/account_cash_statement.py +++ b/addons/account/account_cash_statement.py @@ -179,7 +179,7 @@ class account_cash_statement(osv.osv): }, help="Total of cash transaction lines."), 'closing_date': fields.datetime("Closed On"), - 'details_ids' : fields.one2many('account.cashbox.line', 'bank_statement_id', string='CashBox Lines'), + 'details_ids' : fields.one2many('account.cashbox.line', 'bank_statement_id', string='CashBox Lines', copy=True), 'opening_details_ids' : fields.one2many('account.cashbox.line', 'bank_statement_id', string='Opening Cashbox Lines'), 'closing_details_ids' : fields.one2many('account.cashbox.line', 'bank_statement_id', string='Closing Cashbox Lines'), 'user_id': fields.many2one('res.users', 'Responsible', required=False), @@ -337,7 +337,7 @@ class account_journal(osv.osv): return result _columns = { - 'cashbox_line_ids' : fields.one2many('account.journal.cashbox.line', 'journal_id', 'CashBox'), + 'cashbox_line_ids' : fields.one2many('account.journal.cashbox.line', 'journal_id', 'CashBox', copy=True), } _defaults = { diff --git a/addons/account/account_invoice.py b/addons/account/account_invoice.py index 18773648e14881a1ac5fd25414b1d6b2ba8e6665..ad99ae400581c7b0cc929e8f1f6cc03a8b29956f 100644 --- a/addons/account/account_invoice.py +++ b/addons/account/account_invoice.py @@ -19,203 +19,36 @@ # ############################################################################## -import time +import itertools from lxml import etree -import openerp.addons.decimal_precision as dp -import openerp.exceptions - -from openerp.osv import fields, osv -from openerp.tools.translate import _ - -class account_invoice(osv.osv): - def _amount_all(self, cr, uid, ids, name, args, context=None): - res = {} - for invoice in self.browse(cr, uid, ids, context=context): - res[invoice.id] = { - 'amount_untaxed': 0.0, - 'amount_tax': 0.0, - 'amount_total': 0.0 - } - for line in invoice.invoice_line: - res[invoice.id]['amount_untaxed'] += line.price_subtotal - for line in invoice.tax_line: - res[invoice.id]['amount_tax'] += line.amount - res[invoice.id]['amount_total'] = res[invoice.id]['amount_tax'] + res[invoice.id]['amount_untaxed'] - return res - - def _get_journal(self, cr, uid, context=None): - if context is None: - context = {} - type_inv = context.get('type', 'out_invoice') - user = self.pool.get('res.users').browse(cr, uid, uid, context=context) - company_id = context.get('company_id', user.company_id.id) - type2journal = {'out_invoice': 'sale', 'in_invoice': 'purchase', 'out_refund': 'sale_refund', 'in_refund': 'purchase_refund'} - journal_obj = self.pool.get('account.journal') - domain = [('company_id', '=', company_id)] - if isinstance(type_inv, list): - domain.append(('type', 'in', [type2journal.get(type) for type in type_inv if type2journal.get(type)])) - else: - domain.append(('type', '=', type2journal.get(type_inv, 'sale'))) - res = journal_obj.search(cr, uid, domain, limit=1) - return res and res[0] or False - - def _get_currency(self, cr, uid, context=None): - res = False - journal_id = self._get_journal(cr, uid, context=context) - if journal_id: - journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context) - res = journal.currency and journal.currency.id or journal.company_id.currency_id.id - return res - - def _get_journal_analytic(self, cr, uid, type_inv, context=None): - type2journal = {'out_invoice': 'sale', 'in_invoice': 'purchase', 'out_refund': 'sale', 'in_refund': 'purchase'} - tt = type2journal.get(type_inv, 'sale') - result = self.pool.get('account.analytic.journal').search(cr, uid, [('type','=',tt)], context=context) - if not result: - raise osv.except_osv(_('No Analytic Journal!'),_("You must define an analytic journal of type '%s'!") % (tt,)) - return result[0] - - def _get_type(self, cr, uid, context=None): - if context is None: - context = {} - return context.get('type', 'out_invoice') - - def _reconciled(self, cr, uid, ids, name, args, context=None): - res = {} - for inv in self.browse(cr, uid, ids, context=context): - res[inv.id] = self.test_paid(cr, uid, [inv.id]) - if not res[inv.id] and inv.state == 'paid': - self.signal_open_test(cr, uid, [inv.id]) - return res - - def _get_reference_type(self, cr, uid, context=None): - return [('none', _('Free Reference'))] - def _amount_residual(self, cr, uid, ids, name, args, context=None): - """Function of the field residua. It computes the residual amount (balance) for each invoice""" - if context is None: - context = {} - ctx = context.copy() - result = {} - currency_obj = self.pool.get('res.currency') - for invoice in self.browse(cr, uid, ids, context=context): - nb_inv_in_partial_rec = max_invoice_id = 0 - result[invoice.id] = 0.0 - if invoice.move_id: - for aml in invoice.move_id.line_id: - if aml.account_id.type in ('receivable','payable'): - if aml.currency_id and aml.currency_id.id == invoice.currency_id.id: - result[invoice.id] += aml.amount_residual_currency - else: - ctx['date'] = aml.date - result[invoice.id] += currency_obj.compute(cr, uid, aml.company_id.currency_id.id, invoice.currency_id.id, aml.amount_residual, context=ctx) - - if aml.reconcile_partial_id.line_partial_ids: - #we check if the invoice is partially reconciled and if there are other invoices - #involved in this partial reconciliation (and we sum these invoices) - for line in aml.reconcile_partial_id.line_partial_ids: - if line.invoice and invoice.type == line.invoice.type: - nb_inv_in_partial_rec += 1 - #store the max invoice id as for this invoice we will make a balance instead of a simple division - max_invoice_id = max(max_invoice_id, line.invoice.id) - if nb_inv_in_partial_rec: - #if there are several invoices in a partial reconciliation, we split the residual by the number - #of invoice to have a sum of residual amounts that matches the partner balance - new_value = currency_obj.round(cr, uid, invoice.currency_id, result[invoice.id] / nb_inv_in_partial_rec) - if invoice.id == max_invoice_id: - #if it's the last the invoice of the bunch of invoices partially reconciled together, we make a - #balance to avoid rounding errors - result[invoice.id] = result[invoice.id] - ((nb_inv_in_partial_rec - 1) * new_value) - else: - result[invoice.id] = new_value +from openerp import models, fields, api, _ +from openerp.exceptions import except_orm, Warning, RedirectWarning +import openerp.addons.decimal_precision as dp - #prevent the residual amount on the invoice to be less than 0 - result[invoice.id] = max(result[invoice.id], 0.0) - return result +# mapping invoice type to journal type +TYPE2JOURNAL = { + 'out_invoice': 'sale', + 'in_invoice': 'purchase', + 'out_refund': 'sale_refund', + 'in_refund': 'purchase_refund', +} - # Give Journal Items related to the payment reconciled to this invoice - # Return ids of partial and total payments related to the selected invoices - def _get_lines(self, cr, uid, ids, name, arg, context=None): - res = {} - for invoice in self.browse(cr, uid, ids, context=context): - id = invoice.id - res[id] = [] - if not invoice.move_id: - continue - data_lines = [x for x in invoice.move_id.line_id if x.account_id.id == invoice.account_id.id] - partial_ids = [] - for line in data_lines: - ids_line = [] - if line.reconcile_id: - ids_line = line.reconcile_id.line_id - elif line.reconcile_partial_id: - ids_line = line.reconcile_partial_id.line_partial_ids - l = map(lambda x: x.id, ids_line) - partial_ids.append(line.id) - res[id] =[x for x in l if x <> line.id and x not in partial_ids] - return res +# mapping invoice type to refund type +TYPE2REFUND = { + 'out_invoice': 'out_refund', # Customer Invoice + 'in_invoice': 'in_refund', # Supplier Invoice + 'out_refund': 'out_invoice', # Customer Refund + 'in_refund': 'in_invoice', # Supplier Refund +} - def _get_invoice_line(self, cr, uid, ids, context=None): - result = {} - for line in self.pool.get('account.invoice.line').browse(cr, uid, ids, context=context): - result[line.invoice_id.id] = True - return result.keys() - - def _get_invoice_tax(self, cr, uid, ids, context=None): - result = {} - for tax in self.pool.get('account.invoice.tax').browse(cr, uid, ids, context=context): - result[tax.invoice_id.id] = True - return result.keys() - - def _compute_lines(self, cr, uid, ids, name, args, context=None): - result = {} - for invoice in self.browse(cr, uid, ids, context=context): - src = [] - lines = [] - if invoice.move_id: - for m in invoice.move_id.line_id: - temp_lines = [] - if m.reconcile_id: - temp_lines = map(lambda x: x.id, m.reconcile_id.line_id) - elif m.reconcile_partial_id: - temp_lines = map(lambda x: x.id, m.reconcile_partial_id.line_partial_ids) - lines += [x for x in temp_lines if x not in lines] - src.append(m.id) - - lines = filter(lambda x: x not in src, lines) - result[invoice.id] = lines - return result +MAGIC_COLUMNS = ('id', 'create_uid', 'create_date', 'write_uid', 'write_date') - def _get_invoice_from_line(self, cr, uid, ids, context=None): - move = {} - for line in self.pool.get('account.move.line').browse(cr, uid, ids, context=context): - if line.reconcile_partial_id: - for line2 in line.reconcile_partial_id.line_partial_ids: - move[line2.move_id.id] = True - if line.reconcile_id: - for line2 in line.reconcile_id.line_id: - move[line2.move_id.id] = True - invoice_ids = [] - if move: - invoice_ids = self.pool.get('account.invoice').search(cr, uid, [('move_id','in',move.keys())], context=context) - return invoice_ids - - def _get_invoice_from_reconcile(self, cr, uid, ids, context=None): - move = {} - for r in self.pool.get('account.move.reconcile').browse(cr, uid, ids, context=context): - for line in r.line_partial_ids: - move[line.move_id.id] = True - for line in r.line_id: - move[line.move_id.id] = True - - invoice_ids = [] - if move: - invoice_ids = self.pool.get('account.invoice').search(cr, uid, [('move_id','in',move.keys())], context=context) - return invoice_ids +class account_invoice(models.Model): _name = "account.invoice" _inherit = ['mail.thread'] - _description = 'Invoice' + _description = "Invoice" _order = "number desc, id desc" _track = { 'type': { @@ -225,670 +58,713 @@ class account_invoice(osv.osv): 'account.mt_invoice_validated': lambda self, cr, uid, obj, ctx=None: obj.state == 'open' and obj.type in ('out_invoice', 'out_refund'), }, } - _columns = { - 'name': fields.char('Reference/Description', select=True, readonly=True, states={'draft':[('readonly',False)]}), - 'origin': fields.char('Source Document', help="Reference of the document that produced this invoice.", readonly=True, states={'draft':[('readonly',False)]}), - 'supplier_invoice_number': fields.char('Supplier Invoice Number', size=64, help="The reference of this invoice as provided by the supplier.", readonly=True, states={'draft':[('readonly',False)]}), - 'type': fields.selection([ + + @api.one + @api.depends('invoice_line.price_subtotal', 'tax_line.amount') + def _compute_amount(self): + self.amount_untaxed = sum(line.price_subtotal for line in self.invoice_line) + self.amount_tax = sum(line.amount for line in self.tax_line) + self.amount_total = self.amount_untaxed + self.amount_tax + + @api.model + def _default_journal(self): + inv_type = self._context.get('type', 'out_invoice') + inv_types = inv_type if isinstance(inv_type, list) else [inv_type] + company_id = self._context.get('company_id', self.env.user.company_id.id) + domain = [ + ('type', 'in', filter(None, map(TYPE2JOURNAL.get, inv_types))), + ('company_id', '=', company_id), + ] + return self.env['account.journal'].search(domain, limit=1) + + @api.model + def _default_currency(self): + journal = self._default_journal() + return journal.currency or journal.company_id.currency_id + + @api.model + @api.returns('account.analytic.journal') + def _get_journal_analytic(self, inv_type): + """ Return the analytic journal corresponding to the given invoice type. """ + journal_type = TYPE2JOURNAL.get(inv_type, 'sale') + journal = self.env['account.analytic.journal'].search([('type', '=', journal_type)], limit=1) + if not journal: + raise except_orm(_('No Analytic Journal!'), + _("You must define an analytic journal of type '%s'!") % (journal_type,)) + return journal + + @api.one + @api.depends('account_id', 'move_id.line_id.account_id', 'move_id.line_id.reconcile_id') + def _compute_reconciled(self): + self.reconciled = self.test_paid() + if not self.reconciled and self.state == 'paid': + self.signal_open_test() + + @api.model + def _get_reference_type(self): + return [('none', _('Free Reference'))] + + @api.one + @api.depends( + 'state', 'currency_id', 'invoice_line.price_subtotal', + 'move_id.line_id.account_id.type', + 'move_id.line_id.amount_residual', + 'move_id.line_id.amount_residual_currency', + 'move_id.line_id.currency_id', + 'move_id.line_id.reconcile_partial_id.line_partial_ids.invoice.type', + ) + def _compute_residual(self): + nb_inv_in_partial_rec = max_invoice_id = 0 + self.residual = 0.0 + for line in self.move_id.line_id: + if line.account_id.type in ('receivable', 'payable'): + if line.currency_id == self.currency_id: + self.residual += line.amount_residual_currency + else: + # ahem, shouldn't we use line.currency_id here? + from_currency = line.company_id.currency_id.with_context(date=line.date) + self.residual += from_currency.compute(line.amount_residual, self.currency_id) + # we check if the invoice is partially reconciled and if there + # are other invoices involved in this partial reconciliation + for pline in line.reconcile_partial_id.line_partial_ids: + if pline.invoice and self.type == pline.invoice.type: + nb_inv_in_partial_rec += 1 + # store the max invoice id as for this invoice we will + # make a balance instead of a simple division + max_invoice_id = max(max_invoice_id, pline.invoice.id) + if nb_inv_in_partial_rec: + # if there are several invoices in a partial reconciliation, we + # split the residual by the number of invoices to have a sum of + # residual amounts that matches the partner balance + new_value = self.currency_id.round(self.residual / nb_inv_in_partial_rec) + if self.id == max_invoice_id: + # if it's the last the invoice of the bunch of invoices + # partially reconciled together, we make a balance to avoid + # rounding errors + self.residual = self.residual - ((nb_inv_in_partial_rec - 1) * new_value) + else: + self.residual = new_value + # prevent the residual amount on the invoice to be less than 0 + self.residual = max(self.residual, 0.0) + + @api.one + @api.depends( + 'move_id.line_id.account_id', + 'move_id.line_id.reconcile_id.line_id', + 'move_id.line_id.reconcile_partial_id.line_partial_ids', + ) + def _compute_move_lines(self): + # Give Journal Items related to the payment reconciled to this invoice. + # Return partial and total payments related to the selected invoice. + self.move_lines = self.env['account.move.line'] + if not self.move_id: + return + data_lines = self.move_id.line_id.filtered(lambda l: l.account_id == self.account_id) + partial_lines = self.env['account.move.line'] + for data_line in data_lines: + if data_line.reconcile_id: + lines = data_line.reconcile_id.line_id + elif data_line.reconcile_partial_id: + lines = data_line.reconcile_partial_id.line_partial_ids + else: + lines = self.env['account_move_line'] + partial_lines += data_line + self.move_lines = lines - partial_lines + + @api.one + @api.depends( + 'move_id.line_id.reconcile_id.line_id', + 'move_id.line_id.reconcile_partial_id.line_partial_ids', + ) + def _compute_payments(self): + partial_lines = lines = self.env['account.move.line'] + for line in self.move_id.line_id: + if line.reconcile_id: + lines |= line.reconcile_id.line_id + elif line.reconcile_partial_id: + lines |= line.reconcile_partial_id.line_partial_ids + partial_lines += line + self.payment_ids = (lines - partial_lines).sorted() + + name = fields.Char(string='Reference/Description', index=True, + readonly=True, states={'draft': [('readonly', False)]}) + origin = fields.Char(string='Source Document', + help="Reference of the document that produced this invoice.", + readonly=True, states={'draft': [('readonly', False)]}) + supplier_invoice_number = fields.Char(string='Supplier Invoice Number', + help="The reference of this invoice as provided by the supplier.", + readonly=True, states={'draft': [('readonly', False)]}) + type = fields.Selection([ ('out_invoice','Customer Invoice'), ('in_invoice','Supplier Invoice'), ('out_refund','Customer Refund'), ('in_refund','Supplier Refund'), - ],'Type', readonly=True, select=True, change_default=True, track_visibility='always'), - - 'number': fields.related('move_id','name', type='char', readonly=True, size=64, relation='account.move', store=True, string='Number'), - 'internal_number': fields.char('Invoice Number', readonly=True, help="Unique number of the invoice, computed automatically when the invoice is created."), - 'reference': fields.char('Invoice Reference', help="The partner reference of this invoice."), - 'reference_type': fields.selection(_get_reference_type, 'Payment Reference', - required=True, readonly=True, states={'draft':[('readonly',False)]}), - 'comment': fields.text('Additional Information'), - - 'state': fields.selection([ + ], string='Type', readonly=True, index=True, change_default=True, + default=lambda self: self._context.get('type', 'out_invoice'), + track_visibility='always') + + number = fields.Char(related='move_id.name', store=True, readonly=True, copy=False) + internal_number = fields.Char(string='Invoice Number', readonly=True, + default=False, copy=False, + help="Unique number of the invoice, computed automatically when the invoice is created.") + reference = fields.Char(string='Invoice Reference', + help="The partner reference of this invoice.") + reference_type = fields.Selection('_get_reference_type', string='Payment Reference', + required=True, readonly=True, states={'draft': [('readonly', False)]}, + default='none') + comment = fields.Text('Additional Information') + + state = fields.Selection([ ('draft','Draft'), ('proforma','Pro-forma'), ('proforma2','Pro-forma'), ('open','Open'), ('paid','Paid'), ('cancel','Cancelled'), - ],'Status', select=True, readonly=True, track_visibility='onchange', - help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed Invoice. \ - \n* The \'Pro-forma\' when invoice is in Pro-forma status,invoice does not have an invoice number. \ - \n* The \'Open\' status is used when user create invoice,a invoice number is generated.Its in open status till user does not pay invoice. \ - \n* The \'Paid\' status is set automatically when the invoice is paid. Its related journal entries may or may not be reconciled. \ - \n* The \'Cancelled\' status is used when user cancel invoice.'), - 'sent': fields.boolean('Sent', readonly=True, help="It indicates that the invoice has been sent."), - 'date_invoice': fields.date('Invoice Date', readonly=True, states={'draft':[('readonly',False)]}, select=True, help="Keep empty to use the current date"), - 'date_due': fields.date('Due Date', readonly=True, states={'draft':[('readonly',False)]}, select=True, - help="If you use payment terms, the due date will be computed automatically at the generation "\ - "of accounting entries. The payment term may compute several due dates, for example 50% now and 50% in one month, but if you want to force a due date, make sure that the payment term is not set on the invoice. If you keep the payment term and the due date empty, it means direct payment."), - 'partner_id': fields.many2one('res.partner', 'Partner', change_default=True, readonly=True, required=True, states={'draft':[('readonly',False)]}, track_visibility='always'), - 'payment_term': fields.many2one('account.payment.term', 'Payment Terms',readonly=True, states={'draft':[('readonly',False)]}, - help="If you use payment terms, the due date will be computed automatically at the generation "\ - "of accounting entries. If you keep the payment term and the due date empty, it means direct payment. "\ - "The payment term may compute several due dates, for example 50% now, 50% in one month."), - 'period_id': fields.many2one('account.period', 'Force Period', domain=[('state','<>','done')], help="Keep empty to use the period of the validation(invoice) date.", readonly=True, states={'draft':[('readonly',False)]}), - - 'account_id': fields.many2one('account.account', 'Account', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="The partner account used for this invoice."), - 'invoice_line': fields.one2many('account.invoice.line', 'invoice_id', 'Invoice Lines', readonly=True, states={'draft':[('readonly',False)]}), - 'tax_line': fields.one2many('account.invoice.tax', 'invoice_id', 'Tax Lines', readonly=True, states={'draft':[('readonly',False)]}), - - 'move_id': fields.many2one('account.move', 'Journal Entry', readonly=True, select=1, ondelete='restrict', help="Link to the automatically generated Journal Items."), - 'amount_untaxed': fields.function(_amount_all, digits_compute=dp.get_precision('Account'), string='Subtotal', track_visibility='always', - store={ - 'account.invoice': (lambda self, cr, uid, ids, c={}: ids, ['invoice_line'], 20), - 'account.invoice.tax': (_get_invoice_tax, None, 20), - 'account.invoice.line': (_get_invoice_line, ['price_unit','invoice_line_tax_id','quantity','discount','invoice_id'], 20), - }, - multi='all'), - 'amount_tax': fields.function(_amount_all, digits_compute=dp.get_precision('Account'), string='Tax', - store={ - 'account.invoice': (lambda self, cr, uid, ids, c={}: ids, ['invoice_line'], 20), - 'account.invoice.tax': (_get_invoice_tax, None, 20), - 'account.invoice.line': (_get_invoice_line, ['price_unit','invoice_line_tax_id','quantity','discount','invoice_id'], 20), - }, - multi='all'), - 'amount_total': fields.function(_amount_all, digits_compute=dp.get_precision('Account'), string='Total', - store={ - 'account.invoice': (lambda self, cr, uid, ids, c={}: ids, ['invoice_line'], 20), - 'account.invoice.tax': (_get_invoice_tax, None, 20), - 'account.invoice.line': (_get_invoice_line, ['price_unit','invoice_line_tax_id','quantity','discount','invoice_id'], 20), - }, - multi='all'), - 'currency_id': fields.many2one('res.currency', 'Currency', required=True, readonly=True, states={'draft':[('readonly',False)]}, track_visibility='always'), - 'journal_id': fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}, - domain="[('type', 'in', {'out_invoice': ['sale'], 'out_refund': ['sale_refund'], 'in_refund': ['purchase_refund'], 'in_invoice': ['purchase']}.get(type, [])), ('company_id', '=', company_id)]"), - 'company_id': fields.many2one('res.company', 'Company', required=True, change_default=True, readonly=True, states={'draft':[('readonly',False)]}), - 'check_total': fields.float('Verification Total', digits_compute=dp.get_precision('Account'), readonly=True, states={'draft':[('readonly',False)]}), - 'reconciled': fields.function(_reconciled, string='Paid/Reconciled', type='boolean', - store={ - 'account.invoice': (lambda self, cr, uid, ids, c={}: ids, None, 50), # Check if we can remove ? - 'account.move.line': (_get_invoice_from_line, None, 50), - 'account.move.reconcile': (_get_invoice_from_reconcile, None, 50), - }, help="It indicates that the invoice has been paid and the journal entry of the invoice has been reconciled with one or several journal entries of payment."), - 'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account', - help='Bank Account Number to which the invoice will be paid. A Company bank account if this is a Customer Invoice or Supplier Refund, otherwise a Partner bank account number.', readonly=True, states={'draft':[('readonly',False)]}), - 'move_lines':fields.function(_get_lines, type='many2many', relation='account.move.line', string='Entry Lines'), - 'residual': fields.function(_amount_residual, digits_compute=dp.get_precision('Account'), string='Balance', - store={ - 'account.invoice': (lambda self, cr, uid, ids, c={}: ids, ['invoice_line','move_id'], 50), - 'account.invoice.tax': (_get_invoice_tax, None, 50), - 'account.invoice.line': (_get_invoice_line, ['price_unit','invoice_line_tax_id','quantity','discount','invoice_id'], 50), - 'account.move.line': (_get_invoice_from_line, None, 50), - 'account.move.reconcile': (_get_invoice_from_reconcile, None, 50), - }, - help="Remaining amount due."), - 'payment_ids': fields.function(_compute_lines, relation='account.move.line', type="many2many", string='Payments'), - 'move_name': fields.char('Journal Entry', readonly=True, states={'draft':[('readonly',False)]}), - 'user_id': fields.many2one('res.users', 'Salesperson', readonly=True, track_visibility='onchange', states={'draft':[('readonly',False)]}), - 'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position', readonly=True, states={'draft':[('readonly',False)]}), - 'commercial_partner_id': fields.related('partner_id', 'commercial_partner_id', string='Commercial Entity', type='many2one', - relation='res.partner', store=True, readonly=True, - help="The commercial entity that will be used on Journal Entries for this invoice") - } - _defaults = { - 'type': _get_type, - 'state': 'draft', - 'journal_id': _get_journal, - 'currency_id': _get_currency, - 'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.invoice', context=c), - 'reference_type': 'none', - 'check_total': 0.0, - 'internal_number': False, - 'user_id': lambda s, cr, u, c: u, - 'sent': False, - } + ], string='Status', index=True, readonly=True, default='draft', + track_visibility='onchange', copy=False, + help=" * The 'Draft' status is used when a user is encoding a new and unconfirmed Invoice.\n" + " * The 'Pro-forma' when invoice is in Pro-forma status,invoice does not have an invoice number.\n" + " * The 'Open' status is used when user create invoice,a invoice number is generated.Its in open status till user does not pay invoice.\n" + " * The 'Paid' status is set automatically when the invoice is paid. Its related journal entries may or may not be reconciled.\n" + " * The 'Cancelled' status is used when user cancel invoice.") + sent = fields.Boolean(readonly=True, default=False, copy=False, + help="It indicates that the invoice has been sent.") + date_invoice = fields.Date(string='Invoice Date', + readonly=True, states={'draft': [('readonly', False)]}, index=True, + help="Keep empty to use the current date", copy=False) + date_due = fields.Date(string='Due Date', + readonly=True, states={'draft': [('readonly', False)]}, index=True, copy=False, + help="If you use payment terms, the due date will be computed automatically at the generation " + "of accounting entries. The payment term may compute several due dates, for example 50% " + "now and 50% in one month, but if you want to force a due date, make sure that the payment " + "term is not set on the invoice. If you keep the payment term and the due date empty, it " + "means direct payment.") + partner_id = fields.Many2one('res.partner', string='Partner', change_default=True, + required=True, readonly=True, states={'draft': [('readonly', False)]}, + track_visibility='always') + payment_term = fields.Many2one('account.payment.term', string='Payment Terms', + readonly=True, states={'draft': [('readonly', False)]}, + help="If you use payment terms, the due date will be computed automatically at the generation " + "of accounting entries. If you keep the payment term and the due date empty, it means direct payment. " + "The payment term may compute several due dates, for example 50% now, 50% in one month.") + period_id = fields.Many2one('account.period', string='Force Period', + domain=[('state', '!=', 'done')], copy=False, + help="Keep empty to use the period of the validation(invoice) date.", + readonly=True, states={'draft': [('readonly', False)]}) + + account_id = fields.Many2one('account.account', string='Account', + required=True, readonly=True, states={'draft': [('readonly', False)]}, + help="The partner account used for this invoice.") + invoice_line = fields.One2many('account.invoice.line', 'invoice_id', string='Invoice Lines', + readonly=True, states={'draft': [('readonly', False)]}, copy=True) + tax_line = fields.One2many('account.invoice.tax', 'invoice_id', string='Tax Lines', + readonly=True, states={'draft': [('readonly', False)]}, copy=True) + move_id = fields.Many2one('account.move', string='Journal Entry', + readonly=True, index=True, ondelete='restrict', copy=False, + help="Link to the automatically generated Journal Items.") + + amount_untaxed = fields.Float(string='Subtotal', digits=dp.get_precision('Account'), + store=True, readonly=True, compute='_compute_amount', track_visibility='always') + amount_tax = fields.Float(string='Tax', digits=dp.get_precision('Account'), + store=True, readonly=True, compute='_compute_amount') + amount_total = fields.Float(string='Total', digits=dp.get_precision('Account'), + store=True, readonly=True, compute='_compute_amount') + + currency_id = fields.Many2one('res.currency', string='Currency', + required=True, readonly=True, states={'draft': [('readonly', False)]}, + default=_default_currency, track_visibility='always') + journal_id = fields.Many2one('account.journal', string='Journal', + required=True, readonly=True, states={'draft': [('readonly', False)]}, + default=_default_journal, + domain="[('type', 'in', {'out_invoice': ['sale'], 'out_refund': ['sale_refund'], 'in_refund': ['purchase_refund'], 'in_invoice': ['purchase']}.get(type, [])), ('company_id', '=', company_id)]") + company_id = fields.Many2one('res.company', string='Company', change_default=True, + required=True, readonly=True, states={'draft': [('readonly', False)]}, + default=lambda self: self.env['res.company']._company_default_get('account.invoice')) + check_total = fields.Float(string='Verification Total', digits=dp.get_precision('Account'), + readonly=True, states={'draft': [('readonly', False)]}, default=0.0) + + reconciled = fields.Boolean(string='Paid/Reconciled', + store=True, readonly=True, compute='_compute_reconciled', + help="It indicates that the invoice has been paid and the journal entry of the invoice has been reconciled with one or several journal entries of payment.") + partner_bank_id = fields.Many2one('res.partner.bank', string='Bank Account', + help='Bank Account Number to which the invoice will be paid. A Company bank account if this is a Customer Invoice or Supplier Refund, otherwise a Partner bank account number.', + readonly=True, states={'draft': [('readonly', False)]}) + + move_lines = fields.Many2many('account.move.line', string='Entry Lines', + compute='_compute_move_lines') + residual = fields.Float(string='Balance', digits=dp.get_precision('Account'), + compute='_compute_residual', store=True, + help="Remaining amount due.") + payment_ids = fields.Many2many('account.move.line', string='Payments', + compute='_compute_payments') + move_name = fields.Char(string='Journal Entry', readonly=True, + states={'draft': [('readonly', False)]}, copy=False) + user_id = fields.Many2one('res.users', string='Salesperson', track_visibility='onchange', + readonly=True, states={'draft': [('readonly', False)]}, + default=lambda self: self.env.user) + fiscal_position = fields.Many2one('account.fiscal.position', string='Fiscal Position', + readonly=True, states={'draft': [('readonly', False)]}) + commercial_partner_id = fields.Many2one('res.partner', string='Commercial Entity', + related='partner_id.commercial_partner_id', store=True, readonly=True, + help="The commercial entity that will be used on Journal Entries for this invoice") + _sql_constraints = [ - ('number_uniq', 'unique(number, company_id, journal_id, type)', 'Invoice Number must be unique per Company!'), + ('number_uniq', 'unique(number, company_id, journal_id, type)', + 'Invoice Number must be unique per Company!'), ] - - - - def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False): - journal_obj = self.pool.get('account.journal') - if context is None: - context = {} - - if context.get('active_model', '') in ['res.partner'] and context.get('active_ids', False) and context['active_ids']: - partner = self.pool[context['active_model']].read(cr, uid, context['active_ids'], ['supplier','customer'])[0] + @api.model + def fields_view_get(self, view_id=None, view_type=False, toolbar=False, submenu=False): + context = self._context + if context.get('active_model') == 'res.partner' and context.get('active_ids'): + partner = self.env['res.partner'].browse(context['active_ids'])[0] if not view_type: - view_id = self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'account.invoice.tree')]) + view_id = self.env['ir.ui.view'].search([('name', '=', 'account.invoice.tree')]).id view_type = 'tree' - if view_type == 'form': - if partner['supplier'] and not partner['customer']: - view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.supplier.form')]) - elif partner['customer'] and not partner['supplier']: - view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.form')]) - if view_id and isinstance(view_id, (list, tuple)): - view_id = view_id[0] - res = super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) - - type = context.get('journal_type', False) + elif view_type == 'form': + if partner.supplier and not partner.customer: + view_id = self.env['ir.ui.view'].search([('name', '=', 'account.invoice.supplier.form')]).id + elif partner.customer and not partner.supplier: + view_id = self.env['ir.ui.view'].search([('name', '=', 'account.invoice.form')]).id + + res = super(account_invoice, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) + + # adapt selection of field journal_id for field in res['fields']: if field == 'journal_id' and type: - journal_select = journal_obj._name_search(cr, uid, '', [('type', '=', type)], context=context, limit=None, name_get_uid=1) + journal_select = self.env['account.journal']._name_search('', [('type', '=', type)], name_get_uid=1) res['fields'][field]['selection'] = journal_select doc = etree.XML(res['arch']) - if context.get('type', False): + if context.get('type'): for node in doc.xpath("//field[@name='partner_bank_id']"): if context['type'] == 'in_refund': node.set('domain', "[('partner_id.ref_companies', 'in', [company_id])]") elif context['type'] == 'out_refund': node.set('domain', "[('partner_id', '=', partner_id)]") - res['arch'] = etree.tostring(doc) if view_type == 'search': - if context.get('type', 'in_invoice') in ('out_invoice', 'out_refund'): + if context.get('type') in ('out_invoice', 'out_refund'): for node in doc.xpath("//group[@name='extended filter']"): doc.remove(node) - res['arch'] = etree.tostring(doc) if view_type == 'tree': partner_string = _('Customer') - if context.get('type', 'out_invoice') in ('in_invoice', 'in_refund'): + if context.get('type') in ('in_invoice', 'in_refund'): partner_string = _('Supplier') for node in doc.xpath("//field[@name='reference']"): node.set('invisible', '0') for node in doc.xpath("//field[@name='partner_id']"): node.set('string', partner_string) - res['arch'] = etree.tostring(doc) - return res - def get_log_context(self, cr, uid, context=None): - if context is None: - context = {} - res = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'invoice_form') - view_id = res and res[1] or False - context['view_id'] = view_id - return context - - def invoice_print(self, cr, uid, ids, context=None): - ''' - This function prints the invoice and mark it as sent, so that we can see more easily the next step of the workflow - ''' - assert len(ids) == 1, 'This option should only be used for a single id at a time.' - self.write(cr, uid, ids, {'sent': True}, context=context) - return self.pool['report'].get_action(cr, uid, ids, 'account.report_invoice', context=context) + res['arch'] = etree.tostring(doc) + return res - def action_invoice_sent(self, cr, uid, ids, context=None): - ''' - This function opens a window to compose an email, with the edi invoice template message loaded by default - ''' - assert len(ids) == 1, 'This option should only be used for a single id at a time.' - ir_model_data = self.pool.get('ir.model.data') - try: - template_id = ir_model_data.get_object_reference(cr, uid, 'account', 'email_template_edi_invoice')[1] - except ValueError: - template_id = False - try: - compose_form_id = ir_model_data.get_object_reference(cr, uid, 'mail', 'email_compose_message_wizard_form')[1] - except ValueError: - compose_form_id = False - ctx = dict(context) - ctx.update({ - 'default_model': 'account.invoice', - 'default_res_id': ids[0], - 'default_use_template': bool(template_id), - 'default_template_id': template_id, - 'default_composition_mode': 'comment', - 'mark_invoice_as_sent': True, - }) + @api.multi + def invoice_print(self): + """ Print the invoice and mark it as sent, so that we can see more + easily the next step of the workflow + """ + assert len(self) == 1, 'This option should only be used for a single id at a time.' + self.sent = True + return self.env['report'].get_action(self, 'account.report_invoice') + + @api.multi + def action_invoice_sent(self): + """ Open a window to compose an email, with the edi invoice template + message loaded by default + """ + assert len(self) == 1, 'This option should only be used for a single id at a time.' + template = self.env.ref('account.email_template_edi_invoice', False) + compose_form = self.env.ref('mail.email_compose_message_wizard_form', False) + ctx = dict(self._context, + default_model='account.invoice', + default_res_id=self.id, + default_use_template=bool(template), + default_template_id=template.id, + default_composition_mode='comment', + mark_invoice_as_sent=True, + ) return { 'name': _('Compose Email'), 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', - 'views': [(compose_form_id, 'form')], - 'view_id': compose_form_id, + 'views': [(compose_form.id, 'form')], + 'view_id': compose_form.id, 'target': 'new', 'context': ctx, } - def confirm_paid(self, cr, uid, ids, context=None): - if context is None: - context = {} - self.write(cr, uid, ids, {'state':'paid'}, context=context) - return True - - def unlink(self, cr, uid, ids, context=None): - if context is None: - context = {} - invoices = self.read(cr, uid, ids, ['state','internal_number'], context=context) - unlink_ids = [] - - for t in invoices: - if t['state'] not in ('draft', 'cancel'): - raise openerp.exceptions.Warning(_('You cannot delete an invoice which is not draft or cancelled. You should refund it instead.')) - elif t['internal_number']: - raise openerp.exceptions.Warning(_('You cannot delete an invoice after it has been validated (and received a number). You can set it back to "Draft" state and modify its content, then re-confirm it.')) - else: - unlink_ids.append(t['id']) - - osv.osv.unlink(self, cr, uid, unlink_ids, context=context) - return True - - def onchange_partner_id(self, cr, uid, ids, type, partner_id, - date_invoice=False, payment_term=False, - partner_bank_id=False, company_id=False, - context=None): - partner_payment_term = False - acc_id = False - bank_id = False + @api.multi + def confirm_paid(self): + return self.write({'state': 'paid'}) + + @api.multi + def unlink(self): + for invoice in self: + if invoice.state not in ('draft', 'cancel'): + raise Warning(_('You cannot delete an invoice which is not draft or cancelled. You should refund it instead.')) + elif invoice.internal_number: + raise Warning(_('You cannot delete an invoice after it has been validated (and received a number). You can set it back to "Draft" state and modify its content, then re-confirm it.')) + return super(account_invoice, self).unlink() + + @api.multi + def onchange_partner_id(self, type, partner_id, date_invoice=False, + payment_term=False, partner_bank_id=False, company_id=False): + account_id = False + payment_term_id = False fiscal_position = False + bank_id = False - opt = [('uid', str(uid))] if partner_id: - - opt.insert(0, ('id', partner_id)) - p = self.pool.get('res.partner').browse(cr, uid, partner_id, - context=context) + p = self.env['res.partner'].browse(partner_id) + rec_account = p.property_account_receivable + pay_account = p.property_account_payable if company_id: - if (p.property_account_receivable.company_id and (p.property_account_receivable.company_id.id != company_id)) and (p.property_account_payable.company_id and (p.property_account_payable.company_id.id != company_id)): - property_obj = self.pool.get('ir.property') - rec_pro_id = property_obj.search(cr,uid,[('name','=','property_account_receivable'),('res_id','=','res.partner,'+str(partner_id)+''),('company_id','=',company_id)]) - pay_pro_id = property_obj.search(cr,uid,[('name','=','property_account_payable'),('res_id','=','res.partner,'+str(partner_id)+''),('company_id','=',company_id)]) - if not rec_pro_id: - rec_pro_id = property_obj.search(cr,uid,[('name','=','property_account_receivable'),('company_id','=',company_id)]) - if not pay_pro_id: - pay_pro_id = property_obj.search(cr,uid,[('name','=','property_account_payable'),('company_id','=',company_id)]) - rec_line_data = property_obj.read(cr,uid,rec_pro_id,['name','value_reference','res_id']) - pay_line_data = property_obj.read(cr,uid,pay_pro_id,['name','value_reference','res_id']) - rec_res_id = rec_line_data and rec_line_data[0].get('value_reference',False) and int(rec_line_data[0]['value_reference'].split(',')[1]) or False - pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False - if not rec_res_id and not pay_res_id: - model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_config') + if p.property_account_receivable.company_id and \ + p.property_account_receivable.company_id.id != company_id and \ + p.property_account_payable.company_id and \ + p.property_account_payable.company_id.id != company_id: + prop = self.env['ir.property'] + rec_dom = [('name', '=', 'property_account_receivable'), ('company_id', '=', company_id)] + pay_dom = [('name', '=', 'property_account_payable'), ('company_id', '=', company_id)] + res_dom = [('res_id', '=', 'res.partner,%s' % partner_id)] + rec_prop = prop.search(rec_dom + res_dom) or prop.search(rec_dom) + pay_prop = prop.search(pay_dom + res_dom) or prop.search(pay_dom) + rec_account = rec_prop.get_by_record(rec_prop) + pay_account = pay_prop.get_by_record(pay_prop) + if not rec_account and not pay_account: + action = self.env.ref('account.action_account_config') msg = _('Cannot find a chart of accounts for this company, You should configure it. \nPlease go to Account Configuration.') - raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel')) - account_obj = self.pool.get('account.account') - rec_obj_acc = account_obj.browse(cr, uid, [rec_res_id], - context=context) - pay_obj_acc = account_obj.browse(cr, uid, [pay_res_id], - context=context) - p.property_account_receivable = rec_obj_acc[0] - p.property_account_payable = pay_obj_acc[0] + raise RedirectWarning(msg, action.id, _('Go to the configuration panel')) if type in ('out_invoice', 'out_refund'): - acc_id = p.property_account_receivable.id - partner_payment_term = p.property_payment_term and p.property_payment_term.id or False + account_id = rec_account.id + payment_term_id = p.property_payment_term.id else: - acc_id = p.property_account_payable.id - partner_payment_term = p.property_supplier_payment_term and p.property_supplier_payment_term.id or False - fiscal_position = p.property_account_position and p.property_account_position.id or False - if p.bank_ids: - bank_id = p.bank_ids[0].id + account_id = pay_account.id + payment_term_id = p.property_supplier_payment_term.id + fiscal_position = p.property_account_position.id + bank_id = p.bank_ids.id result = {'value': { - 'account_id': acc_id, - 'payment_term': partner_payment_term, - 'fiscal_position': fiscal_position - } - } + 'account_id': account_id, + 'payment_term': payment_term_id, + 'fiscal_position': fiscal_position, + }} if type in ('in_invoice', 'in_refund'): result['value']['partner_bank_id'] = bank_id - if payment_term != partner_payment_term: - if partner_payment_term: - to_update = self.onchange_payment_term_date_invoice( - cr, uid, ids, partner_payment_term, date_invoice) - result['value'].update(to_update['value']) + if payment_term != payment_term_id: + if payment_term_id: + to_update = self.onchange_payment_term_date_invoice(payment_term_id, date_invoice) + result['value'].update(to_update.get('value', {})) else: result['value']['date_due'] = False if partner_bank_id != bank_id: - to_update = self.onchange_partner_bank(cr, uid, ids, bank_id) - result['value'].update(to_update['value']) + to_update = self.onchange_partner_bank(bank_id) + result['value'].update(to_update.get('value', {})) + return result - def onchange_journal_id(self, cr, uid, ids, journal_id=False, context=None): - result = {} + @api.multi + def onchange_journal_id(self, journal_id=False): if journal_id: - journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context) - currency_id = journal.currency and journal.currency.id or journal.company_id.currency_id.id - company_id = journal.company_id.id - result = {'value': { - 'currency_id': currency_id, - 'company_id': company_id, - } + journal = self.env['account.journal'].browse(journal_id) + return { + 'value': { + 'currency_id': journal.currency.id or journal.company_id.currency_id.id, + 'company_id': journal.company_id.id, } - return result + } + return {} - def onchange_payment_term_date_invoice(self, cr, uid, ids, payment_term_id, date_invoice): - res = {} - if isinstance(ids, (int, long)): - ids = [ids] + @api.multi + def onchange_payment_term_date_invoice(self, payment_term_id, date_invoice): if not date_invoice: - date_invoice = time.strftime('%Y-%m-%d') + date_invoice = fields.Date.today() if not payment_term_id: - inv = self.browse(cr, uid, ids[0]) - #To make sure the invoice due date should contain due date which is entered by user when there is no payment term defined - return {'value':{'date_due': inv.date_due and inv.date_due or date_invoice}} - pterm_list = self.pool.get('account.payment.term').compute(cr, uid, payment_term_id, value=1, date_ref=date_invoice) + # To make sure the invoice due date should contain due date which is + # entered by user when there is no payment term defined + return {'value': {'date_due': self.date_due or date_invoice}} + pterm = self.env['account.payment.term'].browse(payment_term_id) + pterm_list = pterm.compute(value=1, date_ref=date_invoice)[0] if pterm_list: - pterm_list = [line[0] for line in pterm_list] - pterm_list.sort() - res = {'value':{'date_due': pterm_list[-1]}} + return {'value': {'date_due': max(line[0] for line in pterm_list)}} else: - raise osv.except_osv(_('Insufficient Data!'), _('The payment term of supplier does not have a payment term line.')) - return res + raise except_orm(_('Insufficient Data!'), + _('The payment term of supplier does not have a payment term line.')) - def onchange_invoice_line(self, cr, uid, ids, lines): + @api.multi + def onchange_invoice_line(self, lines): return {} - def onchange_partner_bank(self, cursor, user, ids, partner_bank_id=False): + @api.multi + def onchange_partner_bank(self, partner_bank_id=False): return {'value': {}} - def onchange_company_id(self, cr, uid, ids, company_id, part_id, type, invoice_line, currency_id, context=None): - #TODO: add the missing context parameter when forward-porting in trunk so we can remove - # this hack! - context = self.pool['res.users'].context_get(cr, uid) + @api.multi + def onchange_company_id(self, company_id, part_id, type, invoice_line, currency_id): + # TODO: add the missing context parameter when forward-porting in trunk + # so we can remove this hack! + self = self.with_context(self.env['res.users'].context_get()) - val = {} - dom = {} - obj_journal = self.pool.get('account.journal') - account_obj = self.pool.get('account.account') - inv_line_obj = self.pool.get('account.invoice.line') + values = {} + domain = {} if company_id and part_id and type: - acc_id = False - partner_obj = self.pool.get('res.partner').browse(cr, uid, part_id, context=context) - - if partner_obj.property_account_payable and partner_obj.property_account_receivable: - if partner_obj.property_account_payable.company_id.id != company_id and partner_obj.property_account_receivable.company_id.id != company_id: - property_obj = self.pool.get('ir.property') - rec_pro_id = property_obj.search(cr, uid, [('name','=','property_account_receivable'),('res_id','=','res.partner,'+str(part_id)+''),('company_id','=',company_id)]) - pay_pro_id = property_obj.search(cr, uid, [('name','=','property_account_payable'),('res_id','=','res.partner,'+str(part_id)+''),('company_id','=',company_id)]) - - if not rec_pro_id: - rec_pro_id = property_obj.search(cr, uid, [('name','=','property_account_receivable'),('company_id','=',company_id)]) - if not pay_pro_id: - pay_pro_id = property_obj.search(cr, uid, [('name','=','property_account_payable'),('company_id','=',company_id)]) - - rec_line_data = property_obj.read(cr, uid, rec_pro_id, ['name','value_reference','res_id']) - pay_line_data = property_obj.read(cr, uid, pay_pro_id, ['name','value_reference','res_id']) - rec_res_id = rec_line_data and rec_line_data[0].get('value_reference',False) and int(rec_line_data[0]['value_reference'].split(',')[1]) or False - pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False - - if not rec_res_id and not pay_res_id: - model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_config') - msg = _('Cannot find a chart of accounts for this company, You should configure it. \nPlease go to Account Configuration.') - raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel')) - if type in ('out_invoice', 'out_refund'): - acc_id = rec_res_id - else: - acc_id = pay_res_id + p = self.env['res.partner'].browse(part_id) + if p.property_account_payable and p.property_account_receivable and \ + p.property_account_payable.company_id.id != company_id and \ + p.property_account_receivable.company_id.id != company_id: + prop = self.env['ir.property'] + rec_dom = [('name', '=', 'property_account_receivable'), ('company_id', '=', company_id)] + pay_dom = [('name', '=', 'property_account_payable'), ('company_id', '=', company_id)] + res_dom = [('res_id', '=', 'res.partner,%s' % part_id)] + rec_prop = prop.search(rec_dom + res_dom) or prop.search(rec_dom) + pay_prop = prop.search(pay_dom + res_dom) or prop.search(pay_dom) + rec_account = rec_prop.get_by_record(rec_prop) + pay_account = pay_prop.get_by_record(pay_prop) + if not rec_account and not pay_account: + action = self.env.ref('account.action_account_config') + msg = _('Cannot find a chart of accounts for this company, You should configure it. \nPlease go to Account Configuration.') + raise RedirectWarning(msg, action.id, _('Go to the configuration panel')) + + if type in ('out_invoice', 'out_refund'): + acc_id = rec_account.id + else: + acc_id = pay_account.id + values= {'account_id': acc_id} - val= {'account_id': acc_id} - if ids: + if self: if company_id: - inv_obj = self.browse(cr,uid,ids) - for line in inv_obj[0].invoice_line: - if line.account_id: - if line.account_id.company_id.id != company_id: - result_id = account_obj.search(cr, uid, [('name','=',line.account_id.name),('company_id','=',company_id)]) - if not result_id: - model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_config') - msg = _('Cannot find a chart of accounts for this company, You should configure it. \nPlease go to Account Configuration.') - raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel')) - inv_line_obj.write(cr, uid, [line.id], {'account_id': result_id[-1]}) - else: - if invoice_line: - for inv_line in invoice_line: - obj_l = account_obj.browse(cr, uid, inv_line[2]['account_id']) - if obj_l.company_id.id != company_id: - raise osv.except_osv(_('Configuration Error!'), - _('Invoice line account\'s company and invoice\'s company does not match.')) - else: + for line in self.invoice_line: + if not line.account_id: continue - if company_id and type: - journal_mapping = { - 'out_invoice': 'sale', - 'out_refund': 'sale_refund', - 'in_refund': 'purchase_refund', - 'in_invoice': 'purchase', - } - journal_type = journal_mapping[type] - journal_ids = obj_journal.search(cr, uid, [('company_id','=',company_id), ('type', '=', journal_type)]) - if journal_ids: - val['journal_id'] = journal_ids[0] - ir_values_obj = self.pool.get('ir.values') - res_journal_default = ir_values_obj.get(cr, uid, 'default', 'type=%s' % (type), ['account.invoice']) - for r in res_journal_default: - if r[1] == 'journal_id' and r[2] in journal_ids: - val['journal_id'] = r[2] - if not val.get('journal_id', False): - journal_type_map = dict(obj_journal._columns['type'].selection) - journal_type_label = self.pool['ir.translation']._get_source(cr, uid, None, ('code','selection'), - context.get('lang'), - journal_type_map.get(journal_type)) - model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_journal_form') - msg = _("""Cannot find any account journal of type "%s" for this company, You should create one.\n Please go to Journal Configuration""") % journal_type_label - raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel')) - dom = {'journal_id': [('id', 'in', journal_ids)]} - else: - journal_ids = obj_journal.search(cr, uid, []) - - return {'value': val, 'domain': dom} + if line.account_id.company_id.id == company_id: + continue + accounts = self.env['account.account'].search([('name', '=', line.account_id.name), ('company_id', '=', company_id)]) + if not accounts: + action = self.env.ref('account.action_account_config') + msg = _('Cannot find a chart of accounts for this company, You should configure it. \nPlease go to Account Configuration.') + raise RedirectWarning(msg, action.id, _('Go to the configuration panel')) + line.write({'account_id': accounts[-1].id}) + else: + for line_cmd in invoice_line or []: + if len(line_cmd) >= 3 and isinstance(line_cmd[2], dict): + line = self.env['account.account'].browse(line_cmd[2]['account_id']) + if line.company_id.id != company_id: + raise except_orm( + _('Configuration Error!'), + _("Invoice line account's company and invoice's company does not match.") + ) - # go from canceled state to draft state - def action_cancel_draft(self, cr, uid, ids, *args): - self.write(cr, uid, ids, {'state':'draft'}) - self.delete_workflow(cr, uid, ids) - self.create_workflow(cr, uid, ids) + if company_id and type: + journal_type = TYPE2JOURNAL[type] + journals = self.env['account.journal'].search([('type', '=', journal_type), ('company_id', '=', company_id)]) + if journals: + values['journal_id'] = journals[0].id + journal_defaults = self.env['ir.values'].get_defaults_dict('account.invoice', 'type=%s' % type) + if 'journal_id' in journal_defaults: + values['journal_id'] = journal_defaults['journal_id'] + if not values.get('journal_id'): + field_desc = journals.fields_get(['journal_id']) + type_label = next(t for t, label in field_desc['journal_id']['selection'] if t == journal_type) + action = self.env.ref('account.action_account_journal_form') + msg = _('Cannot find any account journal of type "%s" for this company, You should create one.\n Please go to Journal Configuration') % type_label + raise RedirectWarning(msg, action.id, _('Go to the configuration panel')) + domain = {'journal_id': [('id', 'in', journals.ids)]} + + return {'value': values, 'domain': domain} + + @api.multi + def action_cancel_draft(self): + # go from canceled state to draft state + self.write({'state': 'draft'}) + self.delete_workflow() + self.create_workflow() return True - def get_formview_id(self, cr, uid, id, context=None): + @api.one + @api.returns('ir.ui.view') + def get_formview_id(self): """ Update form view id of action to open the invoice """ - obj = self.browse(cr, uid, id, context=context) - if obj.type == 'in_invoice': - model, view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'invoice_supplier_form') + if self.type == 'in_invoice': + return self.env.ref('account.invoice_supplier_form') else: - model, view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'invoice_form') - return view_id - - # Workflow stuff - ################# - - # return the ids of the move lines which has the same account than the invoice - # whose id is in ids - def move_line_id_payment_get(self, cr, uid, ids, *args): - if not ids: return [] - result = self.move_line_id_payment_gets(cr, uid, ids, *args) - return result.get(ids[0], []) - - def move_line_id_payment_gets(self, cr, uid, ids, *args): - res = {} - if not ids: return res - cr.execute('SELECT i.id, l.id '\ - 'FROM account_move_line l '\ - 'LEFT JOIN account_invoice i ON (i.move_id=l.move_id) '\ - 'WHERE i.id IN %s '\ - 'AND l.account_id=i.account_id', - (tuple(ids),)) - for r in cr.fetchall(): - res.setdefault(r[0], []) - res[r[0]].append( r[1] ) - return res - - def copy(self, cr, uid, id, default=None, context=None): - default = default or {} - default.update({ - 'state':'draft', - 'number':False, - 'move_id':False, - 'move_name':False, - 'internal_number': False, - 'period_id': False, - 'sent': False, - }) - if 'date_invoice' not in default: - default.update({ - 'date_invoice':False - }) - if 'date_due' not in default: - default.update({ - 'date_due':False - }) - return super(account_invoice, self).copy(cr, uid, id, default, context) + return self.env.ref('account.invoice_form') - def test_paid(self, cr, uid, ids, *args): - res = self.move_line_id_payment_get(cr, uid, ids) - if not res: + @api.multi + def move_line_id_payment_get(self): + # return the move line ids with the same account as the invoice self + if not self.id: + return [] + query = """ SELECT l.id + FROM account_move_line l, account_invoice i + WHERE i.id = %s AND l.move_id = i.move_id AND l.account_id = i.account_id + """ + self._cr.execute(query, (self.id,)) + return [row[0] for row in self._cr.fetchall()] + + @api.multi + def test_paid(self): + # check whether all corresponding account move lines are reconciled + line_ids = self.move_line_id_payment_get() + if not line_ids: return False - ok = True - for id in res: - cr.execute('select reconcile_id from account_move_line where id=%s', (id,)) - ok = ok and bool(cr.fetchone()[0]) - return ok - - def button_reset_taxes(self, cr, uid, ids, context=None): - if context is None: - context = {} - ctx = context.copy() - ait_obj = self.pool.get('account.invoice.tax') - for id in ids: - cr.execute("DELETE FROM account_invoice_tax WHERE invoice_id=%s AND manual is False", (id,)) - partner = self.browse(cr, uid, id, context=ctx).partner_id + query = "SELECT reconcile_id FROM account_move_line WHERE id IN %s" + self._cr.execute(query, (tuple(line_ids),)) + return all(row[0] for row in self._cr.fetchall()) + + @api.multi + def button_reset_taxes(self): + account_invoice_tax = self.env['account.invoice.tax'] + ctx = dict(self._context) + for invoice in self: + self._cr.execute("DELETE FROM account_invoice_tax WHERE invoice_id=%s AND manual is False", (invoice.id,)) + self.invalidate_cache() + partner = invoice.partner_id if partner.lang: - ctx.update({'lang': partner.lang}) - for taxe in ait_obj.compute(cr, uid, id, context=ctx).values(): - ait_obj.create(cr, uid, taxe) - # Update the stored value (fields.function), so we write to trigger recompute - self.pool.get('account.invoice').write(cr, uid, ids, {'invoice_line':[]}, context=ctx) - return True - - def button_compute(self, cr, uid, ids, context=None, set_total=False): - self.button_reset_taxes(cr, uid, ids, context) - for inv in self.browse(cr, uid, ids, context=context): + ctx['lang'] = partner.lang + for taxe in account_invoice_tax.compute(invoice).values(): + account_invoice_tax.create(taxe) + # dummy write on self to trigger recomputations + return self.with_context(ctx).write({'invoice_line': []}) + + @api.multi + def button_compute(self, set_total=False): + self.button_reset_taxes() + for invoice in self: if set_total: - self.pool.get('account.invoice').write(cr, uid, [inv.id], {'check_total': inv.amount_total}) + invoice.check_total = invoice.amount_total return True - def _convert_ref(self, cr, uid, ref): + @staticmethod + def _convert_ref(ref): return (ref or '').replace('/','') - def _get_analytic_lines(self, cr, uid, id, context=None): - if context is None: - context = {} - inv = self.browse(cr, uid, id) - cur_obj = self.pool.get('res.currency') - - company_currency = self.pool['res.company'].browse(cr, uid, inv.company_id.id).currency_id.id - if inv.type in ('out_invoice', 'in_refund'): - sign = 1 - else: - sign = -1 + @api.multi + def _get_analytic_lines(self): + """ Return a list of dict for creating analytic lines for self[0] """ + company_currency = self.company_id.currency_id + sign = 1 if self.type in ('out_invoice', 'in_refund') else -1 - iml = self.pool.get('account.invoice.line').move_line_get(cr, uid, inv.id, context=context) + iml = self.env['account.invoice.line'].move_line_get(self.id) for il in iml: if il['account_analytic_id']: - if inv.type in ('in_invoice', 'in_refund'): - ref = inv.reference + if self.type in ('in_invoice', 'in_refund'): + ref = self.reference else: - ref = self._convert_ref(cr, uid, inv.number) - if not inv.journal_id.analytic_journal_id: - raise osv.except_osv(_('No Analytic Journal!'),_("You have to define an analytic journal on the '%s' journal!") % (inv.journal_id.name,)) + ref = self._convert_ref(self.number) + if not self.journal_id.analytic_journal_id: + raise except_orm(_('No Analytic Journal!'), + _("You have to define an analytic journal on the '%s' journal!") % (self.journal_id.name,)) + currency = self.currency_id.with_context(date=self.date_invoice) il['analytic_lines'] = [(0,0, { 'name': il['name'], - 'date': inv['date_invoice'], + 'date': self.date_invoice, 'account_id': il['account_analytic_id'], 'unit_amount': il['quantity'], - 'amount': cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, il['price'], context={'date': inv.date_invoice}) * sign, + 'amount': currency.compute(il['price'], company_currency) * sign, 'product_id': il['product_id'], 'product_uom_id': il['uos_id'], 'general_account_id': il['account_id'], - 'journal_id': inv.journal_id.analytic_journal_id.id, + 'journal_id': self.journal_id.analytic_journal_id.id, 'ref': ref, })] return iml - def action_date_assign(self, cr, uid, ids, *args): - for inv in self.browse(cr, uid, ids): - res = self.onchange_payment_term_date_invoice(cr, uid, inv.id, inv.payment_term.id, inv.date_invoice) - if res and res['value']: - self.write(cr, uid, [inv.id], res['value']) + @api.multi + def action_date_assign(self): + for inv in self: + res = inv.onchange_payment_term_date_invoice(inv.payment_term.id, inv.date_invoice) + if res and res.get('value'): + inv.write(res['value']) return True - def finalize_invoice_move_lines(self, cr, uid, invoice_browse, move_lines): - """finalize_invoice_move_lines(cr, uid, invoice, move_lines) -> move_lines - Hook method to be overridden in additional modules to verify and possibly alter the - move lines to be created by an invoice, for special cases. - :param invoice_browse: browsable record of the invoice that is generating the move lines - :param move_lines: list of dictionaries with the account.move.lines (as for create()) - :return: the (possibly updated) final move_lines to create for this invoice + @api.multi + def finalize_invoice_move_lines(self, move_lines): + """ finalize_invoice_move_lines(move_lines) -> move_lines + + Hook method to be overridden in additional modules to verify and + possibly alter the move lines to be created by an invoice, for + special cases. + :param move_lines: list of dictionaries with the account.move.lines (as for create()) + :return: the (possibly updated) final move_lines to create for this invoice """ return move_lines - def check_tax_lines(self, cr, uid, inv, compute_taxes, ait_obj): - company_currency = self.pool['res.company'].browse(cr, uid, inv.company_id.id).currency_id - if not inv.tax_line: + @api.multi + def check_tax_lines(self, compute_taxes): + account_invoice_tax = self.env['account.invoice.tax'] + company_currency = self.company_id.currency_id + if not self.tax_line: for tax in compute_taxes.values(): - ait_obj.create(cr, uid, tax) + account_invoice_tax.create(tax) else: tax_key = [] - for tax in inv.tax_line: + for tax in self.tax_line: if tax.manual: continue key = (tax.tax_code_id.id, tax.base_code_id.id, tax.account_id.id, tax.account_analytic_id.id) tax_key.append(key) - if not key in compute_taxes: - raise osv.except_osv(_('Warning!'), _('Global taxes defined, but they are not in invoice lines !')) + if key not in compute_taxes: + raise except_orm(_('Warning!'), _('Global taxes defined, but they are not in invoice lines !')) base = compute_taxes[key]['base'] if abs(base - tax.base) > company_currency.rounding: - raise osv.except_osv(_('Warning!'), _('Tax base different!\nClick on compute to update the tax base.')) + raise except_orm(_('Warning!'), _('Tax base different!\nClick on compute to update the tax base.')) for key in compute_taxes: - if not key in tax_key: - raise osv.except_osv(_('Warning!'), _('Taxes are missing!\nClick on compute button.')) + if key not in tax_key: + raise except_orm(_('Warning!'), _('Taxes are missing!\nClick on compute button.')) - def compute_invoice_totals(self, cr, uid, inv, company_currency, ref, invoice_move_lines, context=None): - if context is None: - context={} + @api.multi + def compute_invoice_totals(self, company_currency, ref, invoice_move_lines): total = 0 total_currency = 0 - cur_obj = self.pool.get('res.currency') - for i in invoice_move_lines: - if inv.currency_id.id != company_currency: - context.update({'date': inv.date_invoice or time.strftime('%Y-%m-%d')}) - i['currency_id'] = inv.currency_id.id - i['amount_currency'] = i['price'] - i['price'] = cur_obj.compute(cr, uid, inv.currency_id.id, - company_currency, i['price'], - context=context) + for line in invoice_move_lines: + if self.currency_id != company_currency: + currency = self.currency_id.with_context(date=self.date_invoice or fields.Date.today()) + line['currency_id'] = currency.id + line['amount_currency'] = line['price'] + line['price'] = currency.compute(line['price'], company_currency) else: - i['amount_currency'] = False - i['currency_id'] = False - i['ref'] = ref - if inv.type in ('out_invoice','in_refund'): - total += i['price'] - total_currency += i['amount_currency'] or i['price'] - i['price'] = - i['price'] + line['currency_id'] = False + line['amount_currency'] = False + line['ref'] = ref + if self.type in ('out_invoice','in_refund'): + total += line['price'] + total_currency += line['amount_currency'] or line['price'] + line['price'] = - line['price'] else: - total -= i['price'] - total_currency -= i['amount_currency'] or i['price'] + total -= line['price'] + total_currency -= line['amount_currency'] or line['price'] return total, total_currency, invoice_move_lines - def inv_line_characteristic_hashcode(self, invoice, invoice_line): + def inv_line_characteristic_hashcode(self, invoice_line): """Overridable hashcode generation for invoice lines. Lines having the same hashcode will be grouped together if the journal has the 'group line' option. Of course a module can add fields to invoice lines that would need to be tested too before merging lines or not.""" - return "%s-%s-%s-%s-%s"%( + return "%s-%s-%s-%s-%s" % ( invoice_line['account_id'], - invoice_line.get('tax_code_id',"False"), - invoice_line.get('product_id',"False"), - invoice_line.get('analytic_account_id',"False"), - invoice_line.get('date_maturity',"False")) + invoice_line.get('tax_code_id', 'False'), + invoice_line.get('product_id', 'False'), + invoice_line.get('analytic_account_id', 'False'), + invoice_line.get('date_maturity', 'False'), + ) - def group_lines(self, cr, uid, iml, line, inv): + def group_lines(self, iml, line): """Merge account move lines (and hence analytic lines) if invoice line hashcodes are equals""" - if inv.journal_id.group_invoice_lines: + if self.journal_id.group_invoice_lines: line2 = {} for x, y, l in line: - tmp = self.inv_line_characteristic_hashcode(inv, l) - + tmp = self.inv_line_characteristic_hashcode(l) if tmp in line2: am = line2[tmp]['debit'] - line2[tmp]['credit'] + (l['debit'] - l['credit']) line2[tmp]['debit'] = (am > 0) and am or 0.0 @@ -902,43 +778,35 @@ class account_invoice(osv.osv): line.append((0,0,val)) return line - def action_move_create(self, cr, uid, ids, context=None): - """Creates invoice related analytics and financial move lines""" - ait_obj = self.pool.get('account.invoice.tax') - cur_obj = self.pool.get('res.currency') - period_obj = self.pool.get('account.period') - payment_term_obj = self.pool.get('account.payment.term') - journal_obj = self.pool.get('account.journal') - move_obj = self.pool.get('account.move') - if context is None: - context = {} - inv_date = {} - for inv in self.browse(cr, uid, ids, context=context): + @api.multi + def action_move_create(self): + """ Creates invoice related analytics and financial move lines """ + account_invoice_tax = self.env['account.invoice.tax'] + account_move = self.env['account.move'] + + for inv in self: if not inv.journal_id.sequence_id: - raise osv.except_osv(_('Error!'), _('Please define sequence on the journal related to this invoice.')) + raise except_orm(_('Error!'), _('Please define sequence on the journal related to this invoice.')) if not inv.invoice_line: - raise osv.except_osv(_('No Invoice Lines!'), _('Please create some invoice lines.')) + raise except_orm(_('No Invoice Lines!'), _('Please create some invoice lines.')) if inv.move_id: continue - ctx = context.copy() - ctx.update({'lang': inv.partner_id.lang}) - date_invoice = inv.date_invoice or fields.date.context_today(self,cr,uid,context=context) - inv_date = {'date_invoice': date_invoice} - company_currency = self.pool['res.company'].browse(cr, uid, inv.company_id.id).currency_id.id - # create the analytical lines - # one move line per invoice line - iml = self._get_analytic_lines(cr, uid, inv.id, context=ctx) + ctx = dict(self._context, lang=inv.partner_id.lang) + date_invoice = inv.date_invoice or fields.Date.context_today(self) + + company_currency = inv.company_id.currency_id + # create the analytical lines, one move line per invoice line + iml = inv._get_analytic_lines() # check if taxes are all computed - compute_taxes = ait_obj.compute(cr, uid, inv.id, context=ctx) - self.check_tax_lines(cr, uid, inv, compute_taxes, ait_obj) + compute_taxes = account_invoice_tax.compute(inv) + inv.check_tax_lines(compute_taxes) # I disabled the check_total feature - group_check_total_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'group_supplier_inv_check_total')[1] - group_check_total = self.pool.get('res.groups').browse(cr, uid, group_check_total_id, context=context) - if group_check_total and uid in [x.id for x in group_check_total.users]: - if (inv.type in ('in_invoice', 'in_refund') and abs(inv.check_total - inv.amount_total) >= (inv.currency_id.rounding/2.0)): - raise osv.except_osv(_('Bad Total!'), _('Please verify the price of the invoice!\nThe encoded total does not match the computed total.')) + group_check_total = self.env.ref('account.group_supplier_inv_check_total') + if self.env.user in group_check_total.users: + if inv.type in ('in_invoice', 'in_refund') and abs(inv.check_total - inv.amount_total) >= (inv.currency_id.rounding / 2.0): + raise except_orm(_('Bad Total!'), _('Please verify the price of the invoice!\nThe encoded total does not match the computed total.')) if inv.payment_term: total_fixed = total_percent = 0 @@ -949,54 +817,46 @@ class account_invoice(osv.osv): total_percent += line.value_amount total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0) if (total_fixed + total_percent) > 100: - raise osv.except_osv(_('Error!'), _("Cannot create the invoice.\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'.")) + raise except_orm(_('Error!'), _("Cannot create the invoice.\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'.")) # one move line per tax line - iml += ait_obj.move_line_get(cr, uid, inv.id) + iml += account_invoice_tax.move_line_get(inv.id) if inv.type in ('in_invoice', 'in_refund'): ref = inv.reference else: - ref = self._convert_ref(cr, uid, inv.number) + ref = self._convert_ref(inv.number) - diff_currency_p = inv.currency_id.id <> company_currency + diff_currency = inv.currency_id != company_currency # create one move line for the total and possibly adjust the other lines amount - total = 0 - total_currency = 0 - total, total_currency, iml = self.compute_invoice_totals(cr, uid, inv, company_currency, ref, iml, context=ctx) - acc_id = inv.account_id.id + total, total_currency, iml = inv.with_context(ctx).compute_invoice_totals(company_currency, ref, iml) - name = inv['name'] or inv['supplier_invoice_number'] or '/' - totlines = False + name = inv.name or inv.supplier_invoice_number or '/' + totlines = [] if inv.payment_term: - totlines = payment_term_obj.compute(cr, - uid, inv.payment_term.id, total, date_invoice or False, context=ctx) + totlines = inv.with_context(ctx).payment_term.compute(total, date_invoice)[0] if totlines: res_amount_currency = total_currency - i = 0 - ctx.update({'date': date_invoice}) - for t in totlines: - if inv.currency_id.id != company_currency: - amount_currency = cur_obj.compute(cr, uid, company_currency, inv.currency_id.id, t[1], context=ctx) + ctx['date'] = date_invoice + for i, t in enumerate(totlines): + if inv.currency_id != company_currency: + amount_currency = company_currency.with_context(ctx).compute(t[1], inv.currency_id) else: amount_currency = False - # last line add the diff + # last line: add the diff res_amount_currency -= amount_currency or 0 - i += 1 - if i == len(totlines): + if i + 1 == len(totlines): amount_currency += res_amount_currency iml.append({ 'type': 'dest', 'name': name, 'price': t[1], - 'account_id': acc_id, + 'account_id': inv.account_id.id, 'date_maturity': t[0], - 'amount_currency': diff_currency_p \ - and amount_currency or False, - 'currency_id': diff_currency_p \ - and inv.currency_id.id or False, + 'amount_currency': diff_currency and amount_currency, + 'currency_id': diff_currency and inv.currency_id.id, 'ref': ref, }) else: @@ -1004,579 +864,542 @@ class account_invoice(osv.osv): 'type': 'dest', 'name': name, 'price': total, - 'account_id': acc_id, - 'date_maturity': inv.date_due or False, - 'amount_currency': diff_currency_p \ - and total_currency or False, - 'currency_id': diff_currency_p \ - and inv.currency_id.id or False, + 'account_id': inv.account_id.id, + 'date_maturity': inv.date_due, + 'amount_currency': diff_currency and total_currency, + 'currency_id': diff_currency and inv.currency_id.id, 'ref': ref - }) + }) - date = date_invoice or time.strftime('%Y-%m-%d') + date = date_invoice - part = self.pool.get("res.partner")._find_accounting_partner(inv.partner_id) + part = self.env['res.partner']._find_accounting_partner(inv.partner_id) - line = map(lambda x:(0,0,self.line_get_convert(cr, uid, x, part.id, date, context=ctx)),iml) + line = [(0, 0, self.line_get_convert(l, part.id, date)) for l in iml] + line = inv.group_lines(iml, line) - line = self.group_lines(cr, uid, iml, line, inv) - - journal_id = inv.journal_id.id - journal = journal_obj.browse(cr, uid, journal_id, context=ctx) + journal = inv.journal_id.with_context(ctx) if journal.centralisation: - raise osv.except_osv(_('User Error!'), + raise except_orm(_('User Error!'), _('You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu.')) - line = self.finalize_invoice_move_lines(cr, uid, inv, line) + line = inv.finalize_invoice_move_lines(line) - move = { - 'ref': inv.reference and inv.reference or inv.name, + move_vals = { + 'ref': inv.reference or inv.name, 'line_id': line, - 'journal_id': journal_id, + 'journal_id': journal.id, 'date': date, 'narration': inv.comment, 'company_id': inv.company_id.id, } - period_id = inv.period_id and inv.period_id.id or False - ctx.update(company_id=inv.company_id.id) - if not period_id: - period_ids = period_obj.find(cr, uid, date_invoice, context=ctx) - period_id = period_ids and period_ids[0] or False - if period_id: - move['period_id'] = period_id + ctx['company_id'] = inv.company_id.id + period = inv.period_id + if not period: + period = period.with_context(ctx).find(date_invoice)[:1] + if period: + move_vals['period_id'] = period.id for i in line: - i[2]['period_id'] = period_id + i[2]['period_id'] = period.id - ctx.update(invoice=inv) - move_id = move_obj.create(cr, uid, move, context=ctx) - new_move_name = move_obj.browse(cr, uid, move_id, context=ctx).name + ctx['invoice'] = inv + move = account_move.with_context(ctx).create(move_vals) # make the invoice point to that move - vals = inv_date - vals.update(move_id=move_id, period_id=period_id, move_name=new_move_name) - self.write(cr, uid, [inv.id], vals, context=ctx) + vals = { + 'date_invoice': date_invoice, + 'move_id': move.id, + 'period_id': period.id, + 'move_name': move.name, + } + inv.with_context(ctx).write(vals) # Pass invoice in context in method post: used if you want to get the same # account move reference when creating the same invoice after a cancelled one: - move_obj.post(cr, uid, [move_id], context=ctx) - self._log_event(cr, uid, ids) + move.post() + self._log_event() return True - def invoice_validate(self, cr, uid, ids, context=None): - self.write(cr, uid, ids, {'state':'open'}, context=context) - return True + @api.multi + def invoice_validate(self): + return self.write({'state': 'open'}) - def line_get_convert(self, cr, uid, x, part, date, context=None): + @api.model + def line_get_convert(self, line, part, date): return { - 'date_maturity': x.get('date_maturity', False), + 'date_maturity': line.get('date_maturity', False), 'partner_id': part, - 'name': x['name'][:64], + 'name': line['name'][:64], 'date': date, - 'debit': x['price']>0 and x['price'], - 'credit': x['price']<0 and -x['price'], - 'account_id': x['account_id'], - 'analytic_lines': x.get('analytic_lines', []), - 'amount_currency': x['price']>0 and abs(x.get('amount_currency', False)) or -abs(x.get('amount_currency', False)), - 'currency_id': x.get('currency_id', False), - 'tax_code_id': x.get('tax_code_id', False), - 'tax_amount': x.get('tax_amount', False), - 'ref': x.get('ref', False), - 'quantity': x.get('quantity',1.00), - 'product_id': x.get('product_id', False), - 'product_uom_id': x.get('uos_id', False), - 'analytic_account_id': x.get('account_analytic_id', False), + 'debit': line['price']>0 and line['price'], + 'credit': line['price']<0 and -line['price'], + 'account_id': line['account_id'], + 'analytic_lines': line.get('analytic_lines', []), + 'amount_currency': line['price']>0 and abs(line.get('amount_currency', False)) or -abs(line.get('amount_currency', False)), + 'currency_id': line.get('currency_id', False), + 'tax_code_id': line.get('tax_code_id', False), + 'tax_amount': line.get('tax_amount', False), + 'ref': line.get('ref', False), + 'quantity': line.get('quantity',1.00), + 'product_id': line.get('product_id', False), + 'product_uom_id': line.get('uos_id', False), + 'analytic_account_id': line.get('account_analytic_id', False), } - def action_number(self, cr, uid, ids, context=None): - if context is None: - context = {} - #TODO: not correct fix but required a frech values before reading it. - self.write(cr, uid, ids, {}) - - for obj_inv in self.browse(cr, uid, ids, context=context): - invtype = obj_inv.type - number = obj_inv.number - move_id = obj_inv.move_id and obj_inv.move_id.id or False - reference = obj_inv.reference or '' + @api.multi + def action_number(self): + #TODO: not correct fix but required a fresh values before reading it. + self.write({}) - self.write(cr, uid, ids, {'internal_number': number}) + for inv in self: + self.write({'internal_number': inv.number}) - if invtype in ('in_invoice', 'in_refund'): - if not reference: - ref = self._convert_ref(cr, uid, number) + if inv.type in ('in_invoice', 'in_refund'): + if not inv.reference: + ref = self._convert_ref(inv.number) else: - ref = reference + ref = inv.reference else: - ref = self._convert_ref(cr, uid, number) - - cr.execute('UPDATE account_move SET ref=%s ' \ - 'WHERE id=%s AND (ref is null OR ref = \'\')', - (ref, move_id)) - cr.execute('UPDATE account_move_line SET ref=%s ' \ - 'WHERE move_id=%s AND (ref is null OR ref = \'\')', - (ref, move_id)) - cr.execute('UPDATE account_analytic_line SET ref=%s ' \ - 'FROM account_move_line ' \ - 'WHERE account_move_line.move_id = %s ' \ - 'AND account_analytic_line.move_id = account_move_line.id', - (ref, move_id)) + ref = self._convert_ref(inv.number) + + self._cr.execute(""" UPDATE account_move SET ref=%s + WHERE id=%s AND (ref IS NULL OR ref = '')""", + (ref, inv.move_id.id)) + self._cr.execute(""" UPDATE account_move_line SET ref=%s + WHERE move_id=%s AND (ref IS NULL OR ref = '')""", + (ref, inv.move_id.id)) + self._cr.execute(""" UPDATE account_analytic_line SET ref=%s + FROM account_move_line + WHERE account_move_line.move_id = %s AND + account_analytic_line.move_id = account_move_line.id""", + (ref, inv.move_id.id)) + self.invalidate_cache() + return True - def action_cancel(self, cr, uid, ids, context=None): - if context is None: - context = {} - account_move_obj = self.pool.get('account.move') - invoices = self.read(cr, uid, ids, ['move_id', 'payment_ids']) - move_ids = [] # ones that we will need to remove - for i in invoices: - if i['move_id']: - move_ids.append(i['move_id'][0]) - if i['payment_ids']: - account_move_line_obj = self.pool.get('account.move.line') - pay_ids = account_move_line_obj.browse(cr, uid, i['payment_ids']) - for move_line in pay_ids: - if move_line.reconcile_partial_id and move_line.reconcile_partial_id.line_partial_ids: - raise osv.except_osv(_('Error!'), _('You cannot cancel an invoice which is partially paid. You need to unreconcile related payment entries first.')) + @api.multi + def action_cancel(self): + moves = self.env['account.move'] + for inv in self: + if inv.move_id: + moves += inv.move_id + if inv.payment_ids: + for move_line in inv.payment_ids: + if move_line.reconcile_partial_id.line_partial_ids: + raise except_orm(_('Error!'), _('You cannot cancel an invoice which is partially paid. You need to unreconcile related payment entries first.')) # First, set the invoices as cancelled and detach the move ids - self.write(cr, uid, ids, {'state':'cancel', 'move_id':False}) - if move_ids: + self.write({'state': 'cancel', 'move_id': False}) + if moves: # second, invalidate the move(s) - account_move_obj.button_cancel(cr, uid, move_ids, context=context) + moves.button_cancel() # delete the move this invoice was pointing to # Note that the corresponding move_lines and move_reconciles # will be automatically deleted too - account_move_obj.unlink(cr, uid, move_ids, context=context) - self._log_event(cr, uid, ids, -1.0, 'Cancel Invoice') + moves.unlink() + self._log_event(-1.0, 'Cancel Invoice') return True ################### - def list_distinct_taxes(self, cr, uid, ids): - invoices = self.browse(cr, uid, ids) - taxes = {} - for inv in invoices: - for tax in inv.tax_line: - if not tax['name'] in taxes: - taxes[tax['name']] = {'name': tax['name']} - return taxes.values() - - def _log_event(self, cr, uid, ids, factor=1.0, name='Open Invoice'): + @api.multi + def _log_event(self, factor=1.0, name='Open Invoice'): #TODO: implement messages system return True - def name_get(self, cr, uid, ids, context=None): - if not ids: - return [] - types = { - 'out_invoice': _('Invoice'), - 'in_invoice': _('Supplier Invoice'), - 'out_refund': _('Refund'), - 'in_refund': _('Supplier Refund'), - } - return [(r['id'], '%s %s' % (r['number'] or types[r['type']], r['name'] or '')) for r in self.read(cr, uid, ids, ['type', 'number', 'name'], context, load='_classic_write')] - - def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100): - if not args: - args = [] - if context is None: - context = {} - ids = [] + @api.one + def _compute_display_name(self): + TYPES = { + 'out_invoice': _('Invoice'), + 'in_invoice': _('Supplier Invoice'), + 'out_refund': _('Refund'), + 'in_refund': _('Supplier Refund'), + } + self.display_name = "%s %s" % (self.number or TYPES[self.type], self.name or '') + + @api.model + def name_search(self, name, args=None, operator='ilike', limit=100): + args = args or [] + recs = self.browse() if name: - ids = self.search(cr, user, [('number','=',name)] + args, limit=limit, context=context) - if not ids: - ids = self.search(cr, user, [('name',operator,name)] + args, limit=limit, context=context) - return self.name_get(cr, user, ids, context) + recs = self.search([('number', '=', name)] + args, limit=limit) + if not recs: + recs = self.search([('name', operator, name)] + args, limit=limit) + return recs.name_get() - def _refund_cleanup_lines(self, cr, uid, lines, context=None): - """Convert records to dict of values suitable for one2many line creation + @api.model + def _refund_cleanup_lines(self, lines): + """ Convert records to dict of values suitable for one2many line creation - :param list(browse_record) lines: records to convert + :param recordset lines: records to convert :return: list of command tuple for one2many line creation [(0, 0, dict of valueis), ...] """ - clean_lines = [] + result = [] for line in lines: - clean_line = {} - for field in line._all_columns.keys(): - if line._all_columns[field].column._type == 'many2one': - clean_line[field] = line[field].id - elif line._all_columns[field].column._type not in ['many2many','one2many']: - clean_line[field] = line[field] - elif field == 'invoice_line_tax_id': - tax_list = [] - for tax in line[field]: - tax_list.append(tax.id) - clean_line[field] = [(6,0, tax_list)] - clean_lines.append(clean_line) - return map(lambda x: (0,0,x), clean_lines) - - def _prepare_refund(self, cr, uid, invoice, date=None, period_id=None, description=None, journal_id=None, context=None): - """Prepare the dict of values to create the new refund from the invoice. + values = {} + for name, field in line._fields.iteritems(): + if name in MAGIC_COLUMNS: + continue + elif field.type == 'many2one': + values[name] = line[name].id + elif field.type not in ['many2many', 'one2many']: + values[name] = line[name] + elif name == 'invoice_line_tax_id': + values[name] = [(6, 0, line[name].ids)] + result.append((0, 0, values)) + return result + + @api.model + def _prepare_refund(self, invoice, date=None, period_id=None, description=None, journal_id=None): + """ Prepare the dict of values to create the new refund from the invoice. This method may be overridden to implement custom refund generation (making sure to call super() to establish a clean extension chain). - :param integer invoice_id: id of the invoice to refund - :param dict invoice: read of the invoice to refund + :param record invoice: invoice to refund :param string date: refund creation date from the wizard :param integer period_id: force account.period from the wizard :param string description: description of the refund from the wizard :param integer journal_id: account.journal from the wizard :return: dict of value to create() the refund """ - obj_journal = self.pool.get('account.journal') - - type_dict = { - 'out_invoice': 'out_refund', # Customer Invoice - 'in_invoice': 'in_refund', # Supplier Invoice - 'out_refund': 'out_invoice', # Customer Refund - 'in_refund': 'in_invoice', # Supplier Refund - } - invoice_data = {} + values = {} for field in ['name', 'reference', 'comment', 'date_due', 'partner_id', 'company_id', 'account_id', 'currency_id', 'payment_term', 'user_id', 'fiscal_position']: - if invoice._all_columns[field].column._type == 'many2one': - invoice_data[field] = invoice[field].id + if invoice._fields[field].type == 'many2one': + values[field] = invoice[field].id else: - invoice_data[field] = invoice[field] if invoice[field] else False + values[field] = invoice[field] or False - invoice_lines = self._refund_cleanup_lines(cr, uid, invoice.invoice_line, context=context) + values['invoice_line'] = self._refund_cleanup_lines(invoice.invoice_line) + + tax_lines = filter(lambda l: l.manual, invoice.tax_line) + values['tax_line'] = self._refund_cleanup_lines(tax_lines) - tax_lines = filter(lambda l: l['manual'], invoice.tax_line) - tax_lines = self._refund_cleanup_lines(cr, uid, tax_lines, context=context) if journal_id: - refund_journal_ids = [journal_id] + journal = self.env['account.journal'].browse(journal_id) elif invoice['type'] == 'in_invoice': - refund_journal_ids = obj_journal.search(cr, uid, [('type','=','purchase_refund')], context=context) + journal = self.env['account.journal'].search([('type', '=', 'purchase_refund')], limit=1) else: - refund_journal_ids = obj_journal.search(cr, uid, [('type','=','sale_refund')], context=context) - - if not date: - date = time.strftime('%Y-%m-%d') - invoice_data.update({ - 'type': type_dict[invoice['type']], - 'date_invoice': date, - 'state': 'draft', - 'number': False, - 'invoice_line': invoice_lines, - 'tax_line': tax_lines, - 'journal_id': refund_journal_ids and refund_journal_ids[0] or False, - }) + journal = self.env['account.journal'].search([('type', '=', 'sale_refund')], limit=1) + values['journal_id'] = journal.id + + values['type'] = TYPE2REFUND[invoice['type']] + values['date_invoice'] = date or fields.Date.today() + values['state'] = 'draft' + values['number'] = False + if period_id: - invoice_data['period_id'] = period_id + values['period_id'] = period_id if description: - invoice_data['name'] = description - return invoice_data - - def refund(self, cr, uid, ids, date=None, period_id=None, description=None, journal_id=None, context=None): - new_ids = [] - for invoice in self.browse(cr, uid, ids, context=context): - invoice = self._prepare_refund(cr, uid, invoice, - date=date, - period_id=period_id, - description=description, - journal_id=journal_id, - context=context) + values['name'] = description + return values + + @api.multi + @api.returns('self') + def refund(self, date=None, period_id=None, description=None, journal_id=None): + new_invoices = self.browse() + for invoice in self: # create the new invoice - new_ids.append(self.create(cr, uid, invoice, context=context)) - - return new_ids - - def pay_and_reconcile(self, cr, uid, ids, pay_amount, pay_account_id, period_id, pay_journal_id, writeoff_acc_id, writeoff_period_id, writeoff_journal_id, context=None, name=''): - if context is None: - context = {} - #TODO check if we can use different period for payment and the writeoff line - assert len(ids)==1, "Can only pay one invoice at a time." - invoice = self.browse(cr, uid, ids[0], context=context) - src_account_id = invoice.account_id.id + values = self._prepare_refund(invoice, date=date, period_id=period_id, + description=description, journal_id=journal_id) + new_invoices += self.create(values) + return new_invoices + + @api.v8 + def pay_and_reconcile(self, pay_amount, pay_account_id, period_id, pay_journal_id, + writeoff_acc_id, writeoff_period_id, writeoff_journal_id, name=''): + # TODO check if we can use different period for payment and the writeoff line + assert len(self)==1, "Can only pay one invoice at a time." # Take the seq as name for move - types = {'out_invoice': -1, 'in_invoice': 1, 'out_refund': 1, 'in_refund': -1} - direction = types[invoice.type] - #take the choosen date - if 'date_p' in context and context['date_p']: - date=context['date_p'] - else: - date=time.strftime('%Y-%m-%d') + SIGN = {'out_invoice': -1, 'in_invoice': 1, 'out_refund': 1, 'in_refund': -1} + direction = SIGN[self.type] + # take the chosen date + date = self._context.get('date_p') or fields.Date.today() # Take the amount in currency and the currency of the payment - if 'amount_currency' in context and context['amount_currency'] and 'currency_id' in context and context['currency_id']: - amount_currency = context['amount_currency'] - currency_id = context['currency_id'] + if self._context.get('amount_currency') and self._context.get('currency_id'): + amount_currency = self._context['amount_currency'] + currency_id = self._context['currency_id'] else: amount_currency = False currency_id = False - if invoice.type in ('in_invoice', 'in_refund'): - ref = invoice.reference + pay_journal = self.env['account.journal'].browse(pay_journal_id) + if self.type in ('in_invoice', 'in_refund'): + ref = self.reference else: - ref = self._convert_ref(cr, uid, invoice.number) - partner = self.pool['res.partner']._find_accounting_partner(invoice.partner_id) + ref = self._convert_ref(self.number) + partner = self.partner_id._find_accounting_partner(self.partner_id) + name = name or self.invoice_line.name or self.number # Pay attention to the sign for both debit/credit AND amount_currency l1 = { - 'debit': direction * pay_amount>0 and direction * pay_amount, - 'credit': direction * pay_amount<0 and - direction * pay_amount, - 'account_id': src_account_id, + 'name': name, + 'debit': direction * pay_amount > 0 and direction * pay_amount, + 'credit': direction * pay_amount < 0 and -direction * pay_amount, + 'account_id': self.account_id.id, 'partner_id': partner.id, - 'ref':ref, + 'ref': ref, 'date': date, - 'currency_id':currency_id, - 'amount_currency':amount_currency and direction * amount_currency or 0.0, - 'company_id': invoice.company_id.id, + 'currency_id': currency_id, + 'amount_currency': direction * (amount_currency or 0.0), + 'company_id': self.company_id.id, } l2 = { - 'debit': direction * pay_amount<0 and - direction * pay_amount, - 'credit': direction * pay_amount>0 and direction * pay_amount, + 'name': name, + 'debit': direction * pay_amount < 0 and -direction * pay_amount, + 'credit': direction * pay_amount > 0 and direction * pay_amount, 'account_id': pay_account_id, 'partner_id': partner.id, - 'ref':ref, + 'ref': ref, 'date': date, - 'currency_id':currency_id, - 'amount_currency':amount_currency and - direction * amount_currency or 0.0, - 'company_id': invoice.company_id.id, + 'currency_id': currency_id, + 'amount_currency': -direction * (amount_currency or 0.0), + 'company_id': self.company_id.id, } + move = self.env['account.move'].create({ + 'ref': ref, + 'line_id': [(0, 0, l1), (0, 0, l2)], + 'journal_id': pay_journal_id, + 'period_id': period_id, + 'date': date, + }) - if not name: - name = invoice.invoice_line and invoice.invoice_line[0].name or invoice.number - l1['name'] = name - l2['name'] = name - - lines = [(0, 0, l1), (0, 0, l2)] - move = {'ref': ref, 'line_id': lines, 'journal_id': pay_journal_id, 'period_id': period_id, 'date': date} - move_id = self.pool.get('account.move').create(cr, uid, move, context=context) - - line_ids = [] + move_ids = (move | self.move_id).ids + self._cr.execute("SELECT id FROM account_move_line WHERE move_id IN %s", + (tuple(move_ids),)) + lines = self.env['account.move.line'].browse([r[0] for r in self._cr.fetchall()]) + lines2rec = lines.browse() total = 0.0 - line = self.pool.get('account.move.line') - move_ids = [move_id,] - if invoice.move_id: - move_ids.append(invoice.move_id.id) - cr.execute('SELECT id FROM account_move_line '\ - 'WHERE move_id IN %s', - ((move_id, invoice.move_id.id),)) - lines = line.browse(cr, uid, map(lambda x: x[0], cr.fetchall()) ) - for l in lines+invoice.payment_ids: - if l.account_id.id == src_account_id: - line_ids.append(l.id) - total += (l.debit or 0.0) - (l.credit or 0.0) - - inv_id, name = self.name_get(cr, uid, [invoice.id], context=context)[0] - if (not round(total,self.pool.get('decimal.precision').precision_get(cr, uid, 'Account'))) or writeoff_acc_id: - self.pool.get('account.move.line').reconcile(cr, uid, line_ids, 'manual', writeoff_acc_id, writeoff_period_id, writeoff_journal_id, context) + for line in itertools.chain(lines, self.payment_ids): + if line.account_id == self.account_id: + lines2rec += line + total += (line.debit or 0.0) - (line.credit or 0.0) + + inv_id, name = self.name_get()[0] + if not round(total, self.env['decimal.precision'].precision_get('Account')) or writeoff_acc_id: + lines2rec.reconcile('manual', writeoff_acc_id, writeoff_period_id, writeoff_journal_id) else: - code = invoice.currency_id.symbol + code = self.currency_id.symbol # TODO: use currency's formatting function msg = _("Invoice partially paid: %s%s of %s%s (%s%s remaining).") % \ - (pay_amount, code, invoice.amount_total, code, total, code) - self.message_post(cr, uid, [inv_id], body=msg, context=context) - self.pool.get('account.move.line').reconcile_partial(cr, uid, line_ids, 'manual', context) + (pay_amount, code, self.amount_total, code, total, code) + self.message_post(body=msg) + lines2rec.reconcile_partial('manual') # Update the stored value (fields.function), so we write to trigger recompute - self.pool.get('account.invoice').write(cr, uid, ids, {}, context=context) - return True - - -class account_invoice_line(osv.osv): - - def _amount_line(self, cr, uid, ids, prop, unknow_none, unknow_dict): - res = {} - tax_obj = self.pool.get('account.tax') - cur_obj = self.pool.get('res.currency') - for line in self.browse(cr, uid, ids): - price = line.price_unit * (1-(line.discount or 0.0)/100.0) - taxes = tax_obj.compute_all(cr, uid, line.invoice_line_tax_id, price, line.quantity, product=line.product_id, partner=line.invoice_id.partner_id) - res[line.id] = taxes['total'] - if line.invoice_id: - cur = line.invoice_id.currency_id - res[line.id] = cur_obj.round(cr, uid, cur, res[line.id]) - return res + return self.write({}) - def _price_unit_default(self, cr, uid, context=None): - if context is None: - context = {} - if context.get('check_total', False): - t = context['check_total'] - for l in context.get('invoice_line', {}): - if isinstance(l, (list, tuple)) and len(l) >= 3 and l[2]: - tax_obj = self.pool.get('account.tax') - p = l[2].get('price_unit', 0) * (1-l[2].get('discount', 0)/100.0) - t = t - (p * l[2].get('quantity')) - taxes = l[2].get('invoice_line_tax_id') - if len(taxes[0]) >= 3 and taxes[0][2]: - taxes = tax_obj.browse(cr, uid, list(taxes[0][2])) - for tax in tax_obj.compute_all(cr, uid, taxes, p,l[2].get('quantity'), l[2].get('product_id', False), context.get('partner_id', False))['taxes']: - t = t - tax['amount'] - return t - return 0 + @api.v7 + def pay_and_reconcile(self, cr, uid, ids, pay_amount, pay_account_id, period_id, pay_journal_id, + writeoff_acc_id, writeoff_period_id, writeoff_journal_id, context=None, name=''): + recs = self.browse(cr, uid, ids, context) + return recs.pay_and_reconcile(pay_amount, pay_account_id, period_id, pay_journal_id, + writeoff_acc_id, writeoff_period_id, writeoff_journal_id, name=name) +class account_invoice_line(models.Model): _name = "account.invoice.line" _description = "Invoice Line" _order = "invoice_id,sequence,id" - _columns = { - 'name': fields.text('Description', required=True), - 'origin': fields.char('Source Document', help="Reference of the document that produced this invoice."), - 'sequence': fields.integer('Sequence', help="Gives the sequence of this line when displaying the invoice."), - 'invoice_id': fields.many2one('account.invoice', 'Invoice Reference', ondelete='cascade', select=True), - 'uos_id': fields.many2one('product.uom', 'Unit of Measure', ondelete='set null', select=True), - 'product_id': fields.many2one('product.product', 'Product', ondelete='set null', select=True), - 'account_id': fields.many2one('account.account', 'Account', required=True, domain=[('type','<>','view'), ('type', '<>', 'closed')], help="The income or expense account related to the selected product."), - 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')), - 'price_subtotal': fields.function(_amount_line, string='Amount', type="float", - digits_compute= dp.get_precision('Account'), store=True), - 'quantity': fields.float('Quantity', digits_compute= dp.get_precision('Product Unit of Measure'), required=True), - 'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')), - 'invoice_line_tax_id': fields.many2many('account.tax', 'account_invoice_line_tax', 'invoice_line_id', 'tax_id', 'Taxes', domain=[('parent_id','=',False)]), - 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'), - 'company_id': fields.related('invoice_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True), - 'partner_id': fields.related('invoice_id','partner_id',type='many2one',relation='res.partner',string='Partner',store=True) - } - def _default_account_id(self, cr, uid, context=None): + @api.one + @api.depends('price_unit', 'discount', 'invoice_line_tax_id', 'quantity', + 'product_id', 'invoice_id.partner_id', 'invoice_id.currency_id') + def _compute_price(self): + price = self.price_unit * (1 - (self.discount or 0.0) / 100.0) + taxes = self.invoice_line_tax_id.compute_all(price, self.quantity, product=self.product_id, partner=self.invoice_id.partner_id) + self.price_subtotal = taxes['total'] + if self.invoice_id: + self.price_subtotal = self.invoice_id.currency_id.round(self.price_subtotal) + + @api.model + def _default_price_unit(self): + if not self._context.get('check_total'): + return 0 + total = self._context['check_total'] + for l in self._context.get('invoice_line', []): + if isinstance(l, (list, tuple)) and len(l) >= 3 and l[2]: + vals = l[2] + price = vals.get('price_unit', 0) * (1 - vals.get('discount', 0) / 100.0) + total = total - (price * vals.get('quantity')) + taxes = vals.get('invoice_line_tax_id') + if taxes and len(taxes[0]) >= 3 and taxes[0][2]: + taxes = self.env['account.tax'].browse(taxes[0][2]) + tax_res = taxes.compute_all(price, vals.get('quantity'), + product=vals.get('product_id'), partner=self._context.get('partner_id')) + for tax in tax_res['taxes']: + total = total - tax['amount'] + return total + + @api.model + def _default_account(self): # XXX this gets the default account for the user's company, # it should get the default account for the invoice's company # however, the invoice's company does not reach this point - if context is None: - context = {} - if context.get('type') in ('out_invoice','out_refund'): - prop = self.pool.get('ir.property').get(cr, uid, 'property_account_income_categ', 'product.category', context=context) + if self._context.get('type') in ('out_invoice', 'out_refund'): + return self.env['ir.property'].get('property_account_income_categ', 'product.category') else: - prop = self.pool.get('ir.property').get(cr, uid, 'property_account_expense_categ', 'product.category', context=context) - return prop and prop.id or False - - _defaults = { - 'quantity': 1, - 'discount': 0.0, - 'price_unit': _price_unit_default, - 'account_id': _default_account_id, - 'sequence': 10, - } - - def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): - if context is None: - context = {} - res = super(account_invoice_line,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) - if context.get('type', False): + return self.env['ir.property'].get('property_account_expense_categ', 'product.category') + + name = fields.Text(string='Description', required=True) + origin = fields.Char(string='Source Document', + help="Reference of the document that produced this invoice.") + sequence = fields.Integer(string='Sequence', default=10, + help="Gives the sequence of this line when displaying the invoice.") + invoice_id = fields.Many2one('account.invoice', string='Invoice Reference', + ondelete='cascade', index=True) + uos_id = fields.Many2one('product.uom', string='Unit of Measure', + ondelete='set null', index=True) + product_id = fields.Many2one('product.product', string='Product', + ondelete='set null', index=True) + account_id = fields.Many2one('account.account', string='Account', + required=True, domain=[('type', 'not in', ['view', 'closed'])], + default=_default_account, + help="The income or expense account related to the selected product.") + price_unit = fields.Float(string='Unit Price', required=True, + digits= dp.get_precision('Product Price'), + default=_default_price_unit) + price_subtotal = fields.Float(string='Amount', digits= dp.get_precision('Account'), + store=True, readonly=True, compute='_compute_price') + quantity = fields.Float(string='Quantity', digits= dp.get_precision('Product Unit of Measure'), + required=True, default=1) + discount = fields.Float(string='Discount (%)', digits= dp.get_precision('Discount'), + default=0.0) + invoice_line_tax_id = fields.Many2many('account.tax', + 'account_invoice_line_tax', 'invoice_line_id', 'tax_id', + string='Taxes', domain=[('parent_id', '=', False)]) + account_analytic_id = fields.Many2one('account.analytic.account', + string='Analytic Account') + company_id = fields.Many2one('res.company', string='Company', + related='invoice_id.company_id', store=True, readonly=True) + partner_id = fields.Many2one('res.partner', string='Partner', + related='invoice_id.partner_id', store=True, readonly=True) + + @api.model + def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): + res = super(account_invoice_line, self).fields_view_get( + view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) + if self._context.get('type'): doc = etree.XML(res['arch']) for node in doc.xpath("//field[@name='product_id']"): - if context['type'] in ('in_invoice', 'in_refund'): + if self._context['type'] in ('in_invoice', 'in_refund'): node.set('domain', "[('purchase_ok', '=', True)]") else: node.set('domain', "[('sale_ok', '=', True)]") res['arch'] = etree.tostring(doc) return res - def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, currency_id=False, context=None, company_id=None): - if context is None: - context = {} - company_id = company_id if company_id != None else context.get('company_id',False) - context = dict(context) - context.update({'company_id': company_id, 'force_company': company_id}) + @api.multi + def product_id_change(self, product, uom_id, qty=0, name='', type='out_invoice', + partner_id=False, fposition_id=False, price_unit=False, currency_id=False, + context=None, company_id=None): + context = context or {} + company_id = company_id if company_id is not None else context.get('company_id', False) + self = self.with_context(company_id=company_id, force_company=company_id) + if not partner_id: - raise osv.except_osv(_('No Partner Defined!'),_("You must first select a partner!") ) + raise except_orm(_('No Partner Defined!'), _("You must first select a partner!")) if not product: if type in ('in_invoice', 'in_refund'): - return {'value': {}, 'domain':{'product_uom':[]}} + return {'value': {}, 'domain': {'product_uom': []}} else: - return {'value': {'price_unit': 0.0}, 'domain':{'product_uom':[]}} - part = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context) - fpos_obj = self.pool.get('account.fiscal.position') - fpos = fposition_id and fpos_obj.browse(cr, uid, fposition_id, context=context) or False + return {'value': {'price_unit': 0.0}, 'domain': {'product_uom': []}} + + values = {} + + part = self.env['res.partner'].browse(partner_id) + fpos = self.env['account.fiscal.position'].browse(fposition_id) if part.lang: - context.update({'lang': part.lang}) - result = {} - res = self.pool.get('product.product').browse(cr, uid, product, context=context) - - result['name'] = res.partner_ref - if type in ('out_invoice','out_refund'): - a = res.property_account_income.id - if not a: - a = res.categ_id.property_account_income_categ.id + self = self.with_context(lang=part.lang) + product = self.env['product.product'].browse(product) + + values['name'] = product.partner_ref + if type in ('out_invoice', 'out_refund'): + account = product.property_account_income or product.categ_id.property_account_income_categ else: - a = res.property_account_expense.id - if not a: - a = res.categ_id.property_account_expense_categ.id - a = fpos_obj.map_account(cr, uid, fpos, a) - if a: - result['account_id'] = a + account = product.property_account_expense or product.categ_id.property_account_expense_categ + account = fpos.map_account(account) + if account: + values['account_id'] = account.id if type in ('out_invoice', 'out_refund'): - taxes = res.taxes_id and res.taxes_id or (a and self.pool.get('account.account').browse(cr, uid, a, context=context).tax_ids or False) - if res.description_sale: - result['name'] += '\n'+res.description_sale + taxes = product.taxes_id or account.tax_ids + if product.description_sale: + values['name'] += '\n' + product.description_sale else: - taxes = res.supplier_taxes_id and res.supplier_taxes_id or (a and self.pool.get('account.account').browse(cr, uid, a, context=context).tax_ids or False) - if res.description_purchase: - result['name'] += '\n'+res.description_purchase + taxes = product.supplier_taxes_id or account.tax_ids + if product.description_purchase: + values['name'] += '\n' + product.description_purchase - tax_id = fpos_obj.map_tax(cr, uid, fpos, taxes) + taxes = fpos.map_tax(taxes) + values['invoice_line_tax_id'] = taxes.ids if type in ('in_invoice', 'in_refund'): - result.update( {'price_unit': price_unit or res.standard_price,'invoice_line_tax_id': tax_id} ) + values['price_unit'] = price_unit or product.standard_price else: - result.update({'price_unit': res.list_price, 'invoice_line_tax_id': tax_id}) + values['price_unit'] = product.list_price - result['uos_id'] = uom_id or res.uom_id.id + values['uos_id'] = uom_id or product.uom_id.id + domain = {'uos_id': [('category_id', '=', product.uom_id.category_id.id)]} - domain = {'uos_id':[('category_id','=',res.uom_id.category_id.id)]} + company = self.env['res.company'].browse(company_id) + currency = self.env['res.currency'].browse(currency_id) - res_final = {'value':result, 'domain':domain} + if company and currency: + if company.currency_id != currency: + if type in ('in_invoice', 'in_refund'): + values['price_unit'] = product.standard_price + values['price_unit'] = values['price_unit'] * currency.rate - if not company_id or not currency_id: - return res_final + if values['uos_id'] and values['uos_id'] != product.uom_id.id: + values['price_unit'] = self.env['product.uom']._compute_price( + product.uom_id.id, values['price_unit'], values['uos_id']) - company = self.pool.get('res.company').browse(cr, uid, company_id, context=context) - currency = self.pool.get('res.currency').browse(cr, uid, currency_id, context=context) + return {'value': values, 'domain': domain} - if company.currency_id.id != currency.id: - if type in ('in_invoice', 'in_refund'): - res_final['value']['price_unit'] = res.standard_price - new_price = res_final['value']['price_unit'] * currency.rate - res_final['value']['price_unit'] = new_price - - if result['uos_id'] and result['uos_id'] != res.uom_id.id: - new_price = self.pool.get('product.uom')._compute_price(cr, uid, res.uom_id.id, res_final['value']['price_unit'], result['uos_id']) - res_final['value']['price_unit'] = new_price - return res_final - - def uos_id_change(self, cr, uid, ids, product, uom, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, currency_id=False, context=None, company_id=None): - if context is None: - context = {} - company_id = company_id if company_id != None else context.get('company_id',False) - context = dict(context) - context.update({'company_id': company_id}) + @api.multi + def uos_id_change(self, product, uom, qty=0, name='', type='out_invoice', partner_id=False, + fposition_id=False, price_unit=False, currency_id=False, context=None, company_id=None): + context = context or {} + company_id = company_id if company_id != None else context.get('company_id', False) + self = self.with_context(company_id=company_id) + + result = self.product_id_change( + product, uom, qty, name, type, partner_id, fposition_id, price_unit, + currency_id, context=context, company_id=company_id, + ) warning = {} - res = self.product_id_change(cr, uid, ids, product, uom, qty, name, type, partner_id, fposition_id, price_unit, currency_id, context=context, company_id=company_id) if not uom: - res['value']['price_unit'] = 0.0 + result['value']['price_unit'] = 0.0 if product and uom: - prod = self.pool.get('product.product').browse(cr, uid, product, context=context) - prod_uom = self.pool.get('product.uom').browse(cr, uid, uom, context=context) - if prod.uom_id.category_id.id != prod_uom.category_id.id: + prod = self.env['product.product'].browse(product) + prod_uom = self.env['product.uom'].browse(uom) + if prod.uom_id.category_id != prod_uom.category_id: warning = { 'title': _('Warning!'), - 'message': _('The selected unit of measure is not compatible with the unit of measure of the product.') + 'message': _('The selected unit of measure is not compatible with the unit of measure of the product.'), } - res['value'].update({'uos_id': prod.uom_id.id}) - return {'value': res['value'], 'warning': warning} - return res + result['value']['uos_id'] = prod.uom_id.id + if warning: + result['warning'] = warning + return result + + @api.model + def move_line_get(self, invoice_id): + inv = self.env['account.invoice'].browse(invoice_id) + currency = inv.currency_id.with_context(date=inv.date_invoice) + company_currency = inv.company_id.currency_id - def move_line_get(self, cr, uid, invoice_id, context=None): res = [] - tax_obj = self.pool.get('account.tax') - cur_obj = self.pool.get('res.currency') - if context is None: - context = {} - inv = self.pool.get('account.invoice').browse(cr, uid, invoice_id, context=context) - company_currency = self.pool['res.company'].browse(cr, uid, inv.company_id.id).currency_id.id for line in inv.invoice_line: - mres = self.move_line_get_item(cr, uid, line, context) + mres = self.move_line_get_item(line) if not mres: continue res.append(mres) - tax_code_found= False - for tax in tax_obj.compute_all(cr, uid, line.invoice_line_tax_id, - (line.price_unit * (1.0 - (line['discount'] or 0.0) / 100.0)), - line.quantity, line.product_id, - inv.partner_id)['taxes']: - + tax_code_found = False + taxes = line.invoice_line_tax_id.compute_all( + (line.price_unit * (1.0 - (line.discount or 0.0) / 100.0)), + line.quantity, line.product_id, inv.partner_id)['taxes'] + for tax in taxes: if inv.type in ('out_invoice', 'in_invoice'): tax_code_id = tax['base_code_id'] tax_amount = line.price_subtotal * tax['base_sign'] @@ -1587,7 +1410,7 @@ class account_invoice_line(osv.osv): if tax_code_found: if not tax_code_id: continue - res.append(self.move_line_get_item(cr, uid, line, context)) + res.append(dict(mres)) res[-1]['price'] = 0.0 res[-1]['account_analytic_id'] = False elif not tax_code_id: @@ -1595,143 +1418,134 @@ class account_invoice_line(osv.osv): tax_code_found = True res[-1]['tax_code_id'] = tax_code_id - res[-1]['tax_amount'] = cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, tax_amount, context={'date': inv.date_invoice}) + res[-1]['tax_amount'] = currency.compute(tax_amount, company_currency) + return res - def move_line_get_item(self, cr, uid, line, context=None): + @api.model + def move_line_get_item(self, line): return { - 'type':'src', + 'type': 'src', 'name': line.name.split('\n')[0][:64], - 'price_unit':line.price_unit, - 'quantity':line.quantity, - 'price':line.price_subtotal, - 'account_id':line.account_id.id, - 'product_id':line.product_id.id, - 'uos_id':line.uos_id.id, - 'account_analytic_id':line.account_analytic_id.id, - 'taxes':line.invoice_line_tax_id, + 'price_unit': line.price_unit, + 'quantity': line.quantity, + 'price': line.price_subtotal, + 'account_id': line.account_id.id, + 'product_id': line.product_id.id, + 'uos_id': line.uos_id.id, + 'account_analytic_id': line.account_analytic_id.id, + 'taxes': line.invoice_line_tax_id, } + # # Set the tax field according to the account and the fiscal position # - def onchange_account_id(self, cr, uid, ids, product_id, partner_id, inv_type, fposition_id, account_id): + @api.multi + def onchange_account_id(self, product_id, partner_id, inv_type, fposition_id, account_id): if not account_id: return {} unique_tax_ids = [] - fpos = fposition_id and self.pool.get('account.fiscal.position').browse(cr, uid, fposition_id) or False - account = self.pool.get('account.account').browse(cr, uid, account_id) + account = self.env['account.account'].browse(account_id) if not product_id: - taxes = account.tax_ids - unique_tax_ids = self.pool.get('account.fiscal.position').map_tax(cr, uid, fpos, taxes) + fpos = self.env['account.fiscal.position'].browse(fposition_id) + unique_tax_ids = fpos.map_tax(account.tax_ids).ids else: - product_change_result = self.product_id_change(cr, uid, ids, product_id, False, type=inv_type, - partner_id=partner_id, fposition_id=fposition_id, - company_id=account.company_id.id) - if product_change_result and 'value' in product_change_result and 'invoice_line_tax_id' in product_change_result['value']: + product_change_result = self.product_id_change(product_id, False, type=inv_type, + partner_id=partner_id, fposition_id=fposition_id, company_id=account.company_id.id) + if 'invoice_line_tax_id' in product_change_result.get('value', {}): unique_tax_ids = product_change_result['value']['invoice_line_tax_id'] - return {'value':{'invoice_line_tax_id': unique_tax_ids}} + return {'value': {'invoice_line_tax_id': unique_tax_ids}} -class account_invoice_tax(osv.osv): +class account_invoice_tax(models.Model): _name = "account.invoice.tax" _description = "Invoice Tax" + _order = 'sequence' - def _count_factor(self, cr, uid, ids, name, args, context=None): - res = {} - for invoice_tax in self.browse(cr, uid, ids, context=context): - res[invoice_tax.id] = { - 'factor_base': 1.0, - 'factor_tax': 1.0, - } - if invoice_tax.amount <> 0.0: - factor_tax = invoice_tax.tax_amount / invoice_tax.amount - res[invoice_tax.id]['factor_tax'] = factor_tax - - if invoice_tax.base <> 0.0: - factor_base = invoice_tax.base_amount / invoice_tax.base - res[invoice_tax.id]['factor_base'] = factor_base - - return res - - _columns = { - 'invoice_id': fields.many2one('account.invoice', 'Invoice Line', ondelete='cascade', select=True), - 'name': fields.char('Tax Description', required=True), - 'account_id': fields.many2one('account.account', 'Tax Account', required=True, domain=[('type','<>','view'),('type','<>','income'), ('type', '<>', 'closed')]), - 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic account'), - 'base': fields.float('Base', digits_compute=dp.get_precision('Account')), - 'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')), - 'manual': fields.boolean('Manual'), - 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of invoice tax."), - 'base_code_id': fields.many2one('account.tax.code', 'Base Code', help="The account basis of the tax declaration."), - 'base_amount': fields.float('Base Code Amount', digits_compute=dp.get_precision('Account')), - 'tax_code_id': fields.many2one('account.tax.code', 'Tax Code', help="The tax basis of the tax declaration."), - 'tax_amount': fields.float('Tax Code Amount', digits_compute=dp.get_precision('Account')), - 'company_id': fields.related('account_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), - 'factor_base': fields.function(_count_factor, string='Multipication factor for Base code', type='float', multi="all"), - 'factor_tax': fields.function(_count_factor, string='Multipication factor Tax code', type='float', multi="all") - } - - def base_change(self, cr, uid, ids, base, currency_id=False, company_id=False, date_invoice=False): - cur_obj = self.pool.get('res.currency') - company_obj = self.pool.get('res.company') - company_currency = False - factor = 1 - if ids: - factor = self.read(cr, uid, ids[0], ['factor_base'])['factor_base'] - if company_id: - company_currency = company_obj.read(cr, uid, [company_id], ['currency_id'])[0]['currency_id'][0] - if currency_id and company_currency: - base = cur_obj.compute(cr, uid, currency_id, company_currency, base*factor, context={'date': date_invoice or time.strftime('%Y-%m-%d')}, round=False) - return {'value': {'base_amount':base}} - - def amount_change(self, cr, uid, ids, amount, currency_id=False, company_id=False, date_invoice=False): - cur_obj = self.pool.get('res.currency') - company_obj = self.pool.get('res.company') - company_currency = False - factor = 1 - if ids: - factor = self.read(cr, uid, ids[0], ['factor_tax'])['factor_tax'] - if company_id: - company_currency = company_obj.read(cr, uid, [company_id], ['currency_id'])[0]['currency_id'][0] - if currency_id and company_currency: - amount = cur_obj.compute(cr, uid, currency_id, company_currency, amount*factor, context={'date': date_invoice or time.strftime('%Y-%m-%d')}, round=False) + @api.one + @api.depends('base', 'base_amount', 'amount', 'tax_amount') + def _compute_factors(self): + self.factor_base = self.base_amount / self.base if self.base else 1.0 + self.factor_tax = self.tax_amount / self.amount if self.amount else 1.0 + + invoice_id = fields.Many2one('account.invoice', string='Invoice Line', + ondelete='cascade', index=True) + name = fields.Char(string='Tax Description', + required=True) + account_id = fields.Many2one('account.account', string='Tax Account', + required=True, domain=[('type', 'not in', ['view', 'income', 'closed'])]) + account_analytic_id = fields.Many2one('account.analytic.account', string='Analytic account') + base = fields.Float(string='Base', digits=dp.get_precision('Account')) + amount = fields.Float(string='Amount', digits=dp.get_precision('Account')) + manual = fields.Boolean(string='Manual', default=True) + sequence = fields.Integer(string='Sequence', + help="Gives the sequence order when displaying a list of invoice tax.") + base_code_id = fields.Many2one('account.tax.code', string='Base Code', + help="The account basis of the tax declaration.") + base_amount = fields.Float(string='Base Code Amount', digits=dp.get_precision('Account'), + default=0.0) + tax_code_id = fields.Many2one('account.tax.code', string='Tax Code', + help="The tax basis of the tax declaration.") + tax_amount = fields.Float(string='Tax Code Amount', digits=dp.get_precision('Account'), + default=0.0) + + company_id = fields.Many2one('res.company', string='Company', + related='account_id.company_id', store=True, readonly=True) + factor_base = fields.Float(string='Multipication factor for Base code', + compute='_compute_factors') + factor_tax = fields.Float(string='Multipication factor Tax code', + compute='_compute_factors') + + @api.multi + def base_change(self, base, currency_id=False, company_id=False, date_invoice=False): + factor = self.factor_base if self else 1 + company = self.env['res.company'].browse(company_id) + if currency_id and company.currency_id: + currency = self.env['res.currency'].browse(currency_id) + currency = currency.with_context(date=date_invoice or fields.Date.today()) + base = currency.compute(base * factor, company.currency_id, round=False) + return {'value': {'base_amount': base}} + + @api.multi + def amount_change(self, amount, currency_id=False, company_id=False, date_invoice=False): + factor = self.factor_tax if self else 1 + company = self.env['res.company'].browse(company_id) + if currency_id and company.currency_id: + currency = self.env['res.currency'].browse(currency_id) + currency = currency.with_context(date=date_invoice or fields.Date.today()) + amount = currency.compute(amount * factor, company.currency_id, round=False) return {'value': {'tax_amount': amount}} - _order = 'sequence' - _defaults = { - 'manual': 1, - 'base_amount': 0.0, - 'tax_amount': 0.0, - } - def compute(self, cr, uid, invoice_id, context=None): + @api.v8 + def compute(self, invoice): tax_grouped = {} - tax_obj = self.pool.get('account.tax') - cur_obj = self.pool.get('res.currency') - inv = self.pool.get('account.invoice').browse(cr, uid, invoice_id, context=context) - cur = inv.currency_id - company_currency = self.pool['res.company'].browse(cr, uid, inv.company_id.id).currency_id.id - for line in inv.invoice_line: - for tax in tax_obj.compute_all(cr, uid, line.invoice_line_tax_id, (line.price_unit* (1-(line.discount or 0.0)/100.0)), line.quantity, line.product_id, inv.partner_id)['taxes']: - val={} - val['invoice_id'] = inv.id - val['name'] = tax['name'] - val['amount'] = tax['amount'] - val['manual'] = False - val['sequence'] = tax['sequence'] - val['base'] = cur_obj.round(cr, uid, cur, tax['price_unit'] * line['quantity']) - - if inv.type in ('out_invoice','in_invoice'): + currency = invoice.currency_id.with_context(date=invoice.date_invoice or fields.Date.today()) + company_currency = invoice.company_id.currency_id + for line in invoice.invoice_line: + taxes = line.invoice_line_tax_id.compute_all( + (line.price_unit * (1 - (line.discount or 0.0) / 100.0)), + line.quantity, line.product_id, invoice.partner_id)['taxes'] + for tax in taxes: + val = { + 'invoice_id': invoice.id, + 'name': tax['name'], + 'amount': tax['amount'], + 'manual': False, + 'sequence': tax['sequence'], + 'base': currency.round(tax['price_unit'] * line['quantity']), + } + if invoice.type in ('out_invoice','in_invoice'): val['base_code_id'] = tax['base_code_id'] val['tax_code_id'] = tax['tax_code_id'] - val['base_amount'] = cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, val['base'] * tax['base_sign'], context={'date': inv.date_invoice or time.strftime('%Y-%m-%d')}, round=False) - val['tax_amount'] = cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, val['amount'] * tax['tax_sign'], context={'date': inv.date_invoice or time.strftime('%Y-%m-%d')}, round=False) + val['base_amount'] = currency.compute(val['base'] * tax['base_sign'], company_currency, round=False) + val['tax_amount'] = currency.compute(val['amount'] * tax['tax_sign'], company_currency, round=False) val['account_id'] = tax['account_collected_id'] or line.account_id.id val['account_analytic_id'] = tax['account_analytic_collected_id'] else: val['base_code_id'] = tax['ref_base_code_id'] val['tax_code_id'] = tax['ref_tax_code_id'] - val['base_amount'] = cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, val['base'] * tax['ref_base_sign'], context={'date': inv.date_invoice or time.strftime('%Y-%m-%d')}, round=False) - val['tax_amount'] = cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, val['amount'] * tax['ref_tax_sign'], context={'date': inv.date_invoice or time.strftime('%Y-%m-%d')}, round=False) + val['base_amount'] = currency.compute(val['base'] * tax['ref_base_sign'], company_currency, round=False) + val['tax_amount'] = currency.compute(val['amount'] * tax['ref_tax_sign'], company_currency, round=False) val['account_id'] = tax['account_paid_id'] or line.account_id.id val['account_analytic_id'] = tax['account_analytic_paid_id'] @@ -1739,46 +1553,55 @@ class account_invoice_tax(osv.osv): if not key in tax_grouped: tax_grouped[key] = val else: - tax_grouped[key]['amount'] += val['amount'] tax_grouped[key]['base'] += val['base'] + tax_grouped[key]['amount'] += val['amount'] tax_grouped[key]['base_amount'] += val['base_amount'] tax_grouped[key]['tax_amount'] += val['tax_amount'] for t in tax_grouped.values(): - t['base'] = cur_obj.round(cr, uid, cur, t['base']) - t['amount'] = cur_obj.round(cr, uid, cur, t['amount']) - t['base_amount'] = cur_obj.round(cr, uid, cur, t['base_amount']) - t['tax_amount'] = cur_obj.round(cr, uid, cur, t['tax_amount']) + t['base'] = currency.round(t['base']) + t['amount'] = currency.round(t['amount']) + t['base_amount'] = currency.round(t['base_amount']) + t['tax_amount'] = currency.round(t['tax_amount']) + return tax_grouped - def move_line_get(self, cr, uid, invoice_id): + @api.v7 + def compute(self, cr, uid, invoice_id, context=None): + recs = self.browse(cr, uid, [], context) + invoice = recs.env['account.invoice'].browse(invoice_id) + return recs.compute(invoice) + + @api.model + def move_line_get(self, invoice_id): res = [] - cr.execute('SELECT * FROM account_invoice_tax WHERE invoice_id=%s', (invoice_id,)) - for t in cr.dictfetchall(): - if not t['amount'] \ - and not t['tax_code_id'] \ - and not t['tax_amount']: + self._cr.execute( + 'SELECT * FROM account_invoice_tax WHERE invoice_id = %s', + (invoice_id,) + ) + for row in self._cr.dictfetchall(): + if not (row['amount'] or row['tax_code_id'] or row['tax_amount']): continue res.append({ - 'type':'tax', - 'name':t['name'], - 'price_unit': t['amount'], + 'type': 'tax', + 'name': row['name'], + 'price_unit': row['amount'], 'quantity': 1, - 'price': t['amount'] or 0.0, - 'account_id': t['account_id'], - 'tax_code_id': t['tax_code_id'], - 'tax_amount': t['tax_amount'], - 'account_analytic_id': t['account_analytic_id'], + 'price': row['amount'] or 0.0, + 'account_id': row['account_id'], + 'tax_code_id': row['tax_code_id'], + 'tax_amount': row['tax_amount'], + 'account_analytic_id': row['account_analytic_id'], }) return res -class res_partner(osv.osv): - """ Inherits partner and adds invoice information in the partner form """ +class res_partner(models.Model): + # Inherits partner and adds invoice information in the partner form _inherit = 'res.partner' - _columns = { - 'invoice_ids': fields.one2many('account.invoice', 'partner_id', 'Invoices', readonly=True), - } + + invoice_ids = fields.One2many('account.invoice', 'partner_id', string='Invoices', + readonly=True) def _find_accounting_partner(self, partner): ''' @@ -1786,21 +1609,18 @@ class res_partner(osv.osv): ''' return partner.commercial_partner_id - def copy(self, cr, uid, id, default=None, context=None): - default = default or {} - default.update({'invoice_ids' : []}) - return super(res_partner, self).copy(cr, uid, id, default, context) - - -class mail_compose_message(osv.Model): +class mail_compose_message(models.Model): _inherit = 'mail.compose.message' - def send_mail(self, cr, uid, ids, context=None): - context = context or {} - if context.get('default_model') == 'account.invoice' and context.get('default_res_id') and context.get('mark_invoice_as_sent'): - context = dict(context, mail_post_autofollow=True) - self.pool.get('account.invoice').write(cr, uid, [context['default_res_id']], {'sent': True}, context=context) - self.pool.get('account.invoice').message_post(cr, uid, [context['default_res_id']], body=_("Invoice sent"), context=context) - return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context) + @api.multi + def send_mail(self): + context = self._context + if context.get('default_model') == 'account.invoice' and \ + context.get('default_res_id') and context.get('mark_invoice_as_sent'): + invoice = self.env['account.invoice'].browse(context['default_res_id']) + invoice = invoice.with_context(mail_post_autofollow=True) + self.write({'sent': True}) + self.message_post(body=_("Invoice sent")) + return super(mail_compose_message, self).send_mail() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/account/account_invoice_view.xml b/addons/account/account_invoice_view.xml index ef4edef7e149b913e05b30c6061682a4e9639e8e..64159dab18dc65c0ce31d37cee6e51c7e66baf7c 100644 --- a/addons/account/account_invoice_view.xml +++ b/addons/account/account_invoice_view.xml @@ -183,7 +183,7 @@ <field domain="[('company_id', '=', company_id), ('type', '=', 'payable')]" name="account_id" groups="account.group_account_user"/> <field name="journal_id" groups="account.group_account_user" - on_change="onchange_journal_id(journal_id, context)" options="{'no_create': True}"/> + on_change="onchange_journal_id(journal_id)" options="{'no_create': True}"/> <field name="currency_id" groups="base.group_multi_currency"/> <field name="check_total" groups="account.group_supplier_inv_check_total"/> </group> @@ -258,7 +258,7 @@ <group> <field name="move_id" groups="account.group_account_user"/> <field name="period_id" domain="[('state', '=', 'draft'), ('company_id', '=', company_id)]" groups="account.group_account_user"/> - <field name="company_id" on_change="onchange_company_id(company_id,partner_id,type,invoice_line,currency_id,context)" widget="selection" groups="base.group_multi_company"/> + <field name="company_id" on_change="onchange_company_id(company_id,partner_id,type,invoice_line,currency_id)" widget="selection" groups="base.group_multi_company"/> </group> </group> </page> @@ -329,7 +329,7 @@ <group> <field name="date_invoice"/> <field name="journal_id" groups="account.group_account_user" - on_change="onchange_journal_id(journal_id, context)" options="{'no_create': True}"/> + on_change="onchange_journal_id(journal_id)" options="{'no_create': True}"/> <field domain="[('company_id', '=', company_id),('type','=', 'receivable')]" name="account_id" groups="account.group_account_user"/> @@ -393,7 +393,7 @@ <page string="Other Info"> <group col="4"> <group> - <field name="company_id" on_change="onchange_company_id(company_id,partner_id,type,invoice_line,currency_id,context)" widget="selection" groups="base.group_multi_company"/> + <field name="company_id" on_change="onchange_company_id(company_id,partner_id,type,invoice_line,currency_id)" widget="selection" groups="base.group_multi_company"/> <field name="user_id" groups="base.group_user" context="{'default_groups_ref': ['base.group_user', 'base.group_partner_manager', 'account.group_account_invoice']}"/> <field domain="[('partner_id.ref_companies', 'in', [company_id])]" name="partner_bank_id"/> <field name="period_id" domain="[('state', '=', 'draft'), ('company_id', '=', company_id)]" diff --git a/addons/account/account_move_line.py b/addons/account/account_move_line.py index 444e14a715a110a228ae8d6df5e24a3b6e42d82c..cdcf0190f468b554406b82395da970f71a04baf5 100644 --- a/addons/account/account_move_line.py +++ b/addons/account/account_move_line.py @@ -19,15 +19,12 @@ # ############################################################################## -import sys import time from datetime import datetime -from operator import itemgetter -from lxml import etree from openerp import workflow -from openerp.osv import fields, osv, orm +from openerp.osv import fields, osv from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp from openerp import tools @@ -42,8 +39,7 @@ class account_move_line(osv.osv): fiscalperiod_obj = self.pool.get('account.period') account_obj = self.pool.get('account.account') fiscalyear_ids = [] - if context is None: - context = {} + context = dict(context or {}) initial_bal = context.get('initial_bal', False) company_clause = " " if context.get('company_id', False): @@ -217,17 +213,14 @@ class account_move_line(osv.osv): period_id = context.get('period_id') if type(period_id) == str: ids = period_obj.search(cr, uid, [('name', 'ilike', period_id)]) - context.update({ - 'period_id': ids and ids[0] or False - }) + context = dict(context, period_id=ids and ids[0] or False) return context def _default_get(self, cr, uid, fields, context=None): #default_get should only do the following: # -propose the next amount in debit/credit in order to balance the move # -propose the next account from the journal (default debit/credit account) accordingly - if context is None: - context = {} + context = dict(context or {}) account_obj = self.pool.get('account.account') period_obj = self.pool.get('account.period') journal_obj = self.pool.get('account.journal') @@ -456,10 +449,10 @@ class account_move_line(osv.osv): 'move_id': fields.many2one('account.move', 'Journal Entry', ondelete="cascade", help="The move of this entry line.", select=2, required=True), 'narration': fields.related('move_id','narration', type='text', relation='account.move', string='Internal Note'), 'ref': fields.related('move_id', 'ref', string='Reference', type='char', store=True), - 'statement_id': fields.many2one('account.bank.statement', 'Statement', help="The bank statement used for bank reconciliation", select=1), - 'reconcile_id': fields.many2one('account.move.reconcile', 'Reconcile', readonly=True, ondelete='set null', select=2), - 'reconcile_partial_id': fields.many2one('account.move.reconcile', 'Partial Reconcile', readonly=True, ondelete='set null', select=2), - 'reconcile': fields.function(_get_reconcile, type='char', string='Reconcile Ref', store={ + 'statement_id': fields.many2one('account.bank.statement', 'Statement', help="The bank statement used for bank reconciliation", select=1, copy=False), + 'reconcile_id': fields.many2one('account.move.reconcile', 'Reconcile', readonly=True, ondelete='set null', select=2, copy=False), + 'reconcile_partial_id': fields.many2one('account.move.reconcile', 'Partial Reconcile', readonly=True, ondelete='set null', select=2, copy=False), + 'reconcile_ref': fields.function(_get_reconcile, type='char', string='Reconcile Ref', oldname='reconcile', store={ 'account.move.line': (lambda self, cr, uid, ids, c={}: ids, ['reconcile_id','reconcile_partial_id'], 50),'account.move.reconcile': (_get_move_from_reconcile, None, 50)}), 'amount_currency': fields.float('Amount Currency', help="The amount expressed in an optional other currency if it is a multi-currency entry.", digits_compute=dp.get_precision('Account')), 'amount_residual_currency': fields.function(_amount_residual, string='Residual Amount in Currency', multi="residual", help="The residual amount on a receivable or payable of a journal entry expressed in its currency (maybe different of the company currency)."), @@ -484,13 +477,13 @@ class account_move_line(osv.osv): 'analytic_lines': fields.one2many('account.analytic.line', 'move_id', 'Analytic lines'), 'centralisation': fields.selection([('normal','Normal'),('credit','Credit Centralisation'),('debit','Debit Centralisation'),('currency','Currency Adjustment')], 'Centralisation', size=8), 'balance': fields.function(_balance, fnct_search=_balance_search, string='Balance'), - 'state': fields.selection([('draft','Unbalanced'), ('valid','Balanced')], 'Status', readonly=True), + 'state': fields.selection([('draft','Unbalanced'), ('valid','Balanced')], 'Status', readonly=True, copy=False), 'tax_code_id': fields.many2one('account.tax.code', 'Tax Account', help="The Account can either be a base tax code or a tax code account."), 'tax_amount': fields.float('Tax/Base Amount', digits_compute=dp.get_precision('Account'), select=True, help="If the Tax account is a tax code account, this field will contain the taxed amount.If the tax account is base tax code, "\ "this field will contain the basic amount(without tax)."), 'invoice': fields.function(_invoice, string='Invoice', type='many2one', relation='account.invoice', fnct_search=_invoice_search), - 'account_tax_id':fields.many2one('account.tax', 'Tax'), + 'account_tax_id':fields.many2one('account.tax', 'Tax', copy=False), 'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account'), 'company_id': fields.related('account_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True) @@ -666,6 +659,7 @@ class account_move_line(osv.osv): if (amount>0) and journal: x = journal_obj.browse(cr, uid, journal).default_credit_account_id if x: acc = x + context = dict(context) context.update({ 'date': date, 'res.currency.compute.account': acc, @@ -1006,12 +1000,8 @@ class account_move_line(osv.osv): period_pool = self.pool.get('account.period') pids = period_pool.find(cr, user, date, context=context) if pids: - res.update({ - 'period_id':pids[0] - }) - context.update({ - 'period_id':pids[0] - }) + res.update({'period_id':pids[0]}) + context = dict(context, period_id=pids[0]) return { 'value':res, 'context':context, @@ -1158,15 +1148,14 @@ class account_move_line(osv.osv): move_obj = self.pool.get('account.move') cur_obj = self.pool.get('res.currency') journal_obj = self.pool.get('account.journal') - if context is None: - context = {} + context = dict(context or {}) if vals.get('move_id', False): move = self.pool.get('account.move').browse(cr, uid, vals['move_id'], context=context) if move.company_id: vals['company_id'] = move.company_id.id if move.date and not vals.get('date'): vals['date'] = move.date - if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']: + if ('account_id' in vals) and not account_obj.read(cr, uid, [vals['account_id']], ['active'])[0]['active']: raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.')) if 'journal_id' in vals and vals['journal_id']: context['journal_id'] = vals['journal_id'] diff --git a/addons/account/account_view.xml b/addons/account/account_view.xml index c447576b34865eb3c7c93ab901b0e0037b975fd7..5e80bfdadfe63759ef0b8fb2b042cfc724b202c9 100644 --- a/addons/account/account_view.xml +++ b/addons/account/account_view.xml @@ -1209,7 +1209,7 @@ <field name="debit" sum="Total Debit"/> <field name="credit" sum="Total Credit"/> <field name="date_maturity" invisible="context.get('journal_type', False) not in ['sale','sale_refund','purchase','purchase_refund']"/> - <field name="reconcile"/> + <field name="reconcile_ref"/> <field name="invoice" invisible="1"/> <field name="amount_currency" readonly="True" invisible="not context.get('currency',False)"/> <field name="currency_id" readonly="True" invisible="not context.get('currency',False)" /> diff --git a/addons/account/edi/invoice.py b/addons/account/edi/invoice.py index c82d041ef85eb77e729fae4032a5a4e0a4478007..ad3a5bdfd5826fc799783b0b36a50c9bd4f42085 100644 --- a/addons/account/edi/invoice.py +++ b/addons/account/edi/invoice.py @@ -208,7 +208,7 @@ class account_invoice(osv.osv, EDIMixin): edi_document.pop('partner_ref', None) # journal_id: should be selected based on type: simply put the 'type' in the context when calling create(), will be selected correctly - context.update(type=invoice_type) + context = dict(context, type=invoice_type) # for invoice lines, the account_id value should be taken from the product's default, i.e. from the default category, as it will not be provided. for edi_invoice_line in edi_document['invoice_line']: diff --git a/addons/account/ir_sequence.py b/addons/account/ir_sequence.py index d3615a847b7d5616f39ba9ccd174ac3b90232e5e..19d7ee0cf8779df9a0236c942ad3215fff54ec48 100644 --- a/addons/account/ir_sequence.py +++ b/addons/account/ir_sequence.py @@ -18,7 +18,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## - +from openerp import api from openerp.osv import fields, osv class ir_sequence_fiscalyear(osv.osv): @@ -43,9 +43,10 @@ class ir_sequence(osv.osv): _inherit = 'ir.sequence' _columns = { 'fiscal_ids': fields.one2many('account.sequence.fiscalyear', - 'sequence_main_id', 'Sequences') + 'sequence_main_id', 'Sequences', copy=True) } + @api.cr_uid_ids_context def _next(self, cr, uid, seq_ids, context=None): if context is None: context = {} diff --git a/addons/account/partner.py b/addons/account/partner.py index 70f5280b86190d16e4d472f63eff963d990e60e4..afbd9114c107f574fb31ac2e9416eaefb31e166c 100644 --- a/addons/account/partner.py +++ b/addons/account/partner.py @@ -23,6 +23,7 @@ from operator import itemgetter import time from openerp.osv import fields, osv +from openerp import api class account_fiscal_position(osv.osv): _name = 'account.fiscal.position' @@ -33,8 +34,8 @@ class account_fiscal_position(osv.osv): 'name': fields.char('Fiscal Position', required=True), 'active': fields.boolean('Active', help="By unchecking the active field, you may hide a fiscal position without deleting it."), 'company_id': fields.many2one('res.company', 'Company'), - 'account_ids': fields.one2many('account.fiscal.position.account', 'position_id', 'Account Mapping'), - 'tax_ids': fields.one2many('account.fiscal.position.tax', 'position_id', 'Tax Mapping'), + 'account_ids': fields.one2many('account.fiscal.position.account', 'position_id', 'Account Mapping', copy=True), + 'tax_ids': fields.one2many('account.fiscal.position.tax', 'position_id', 'Tax Mapping', copy=True), 'note': fields.text('Notes'), 'auto_apply': fields.boolean('Automatic', help="Apply automatically this fiscal position."), 'vat_required': fields.boolean('VAT required', help="Apply only if partner has a VAT number."), @@ -46,6 +47,7 @@ class account_fiscal_position(osv.osv): 'active': True, } + @api.v7 def map_tax(self, cr, uid, fposition_id, taxes, context=None): if not taxes: return [] @@ -63,6 +65,20 @@ class account_fiscal_position(osv.osv): result.add(t.id) return list(result) + @api.v8 + def map_tax(self, taxes): + result = taxes.browse() + for tax in taxes: + found = False + for t in self.tax_ids: + if t.tax_src_id == tax: + result |= t.tax_dest_id + found = True + if not found: + result |= tax + return result + + @api.v7 def map_account(self, cr, uid, fposition_id, account_id, context=None): if not fposition_id: return account_id @@ -72,6 +88,13 @@ class account_fiscal_position(osv.osv): break return account_id + @api.v8 + def map_account(self, account): + for pos in self.account_ids: + if pos.account_src_id == account: + return pos.account_dest_id + return account + def get_fiscal_position(self, cr, uid, company_id, partner_id, delivery_id=None, context=None): if not partner_id: return False @@ -279,7 +302,14 @@ class res_partner(osv.osv): help="This payment term will be used instead of the default one for purchase orders and supplier invoices"), 'ref_companies': fields.one2many('res.company', 'partner_id', 'Companies that refers to partner'), - 'last_reconciliation_date': fields.datetime('Latest Full Reconciliation Date', help='Date on which the partner accounting entries were fully reconciled last time. It differs from the last date where a reconciliation has been made for this partner, as here we depict the fact that nothing more was to be reconciled at this date. This can be achieved in 2 different ways: either the last unreconciled debit/credit entry of this partner was reconciled, either the user pressed the button "Nothing more to reconcile" during the manual reconciliation process.') + 'last_reconciliation_date': fields.datetime( + 'Latest Full Reconciliation Date', copy=False, + help='Date on which the partner accounting entries were fully reconciled last time. ' + 'It differs from the last date where a reconciliation has been made for this partner, ' + 'as here we depict the fact that nothing more was to be reconciled at this date. ' + 'This can be achieved in 2 different ways: either the last unreconciled debit/credit ' + 'entry of this partner was reconciled, either the user pressed the button ' + '"Nothing more to reconcile" during the manual reconciliation process.') } def _commercial_fields(self, cr, uid, context=None): diff --git a/addons/account/report/account_journal.py b/addons/account/report/account_journal.py index 13dfd538f4a02292060903544294173ef6916877..1854e6601aba93c3130aae09b29b2d48f815e9be 100644 --- a/addons/account/report/account_journal.py +++ b/addons/account/report/account_journal.py @@ -156,6 +156,7 @@ class journal_print(report_sxw.rml_parse, common_report_header): journal_id = [journal_id] obj_mline = self.pool.get('account.move.line') self.cr.execute('update account_journal_period set state=%s where journal_id IN %s and period_id=%s and state=%s', ('printed', self.journal_ids, period_id, 'draft')) + self.pool.get('account.journal.period').invalidate_cache(self.cr, self.uid, ['state'], context=self.context) move_state = ['draft','posted'] if self.target_move == 'posted': diff --git a/addons/account/report/account_report.py b/addons/account/report/account_report.py index fbe23d2c6dcc7d3e3d41932d45243b51c0d04c3a..8b65071915151d43744b5b16db0dd2b614a02072 100644 --- a/addons/account/report/account_report.py +++ b/addons/account/report/account_report.py @@ -20,7 +20,7 @@ ############################################################################## import time -from datetime import datetime +import datetime from dateutil.relativedelta import relativedelta from openerp import tools @@ -91,7 +91,7 @@ class report_aged_receivable(osv.osv): """ if context is None:context = {} if not self.called: - self.init(cr, user) + self._init(cr, user) self.called = True # To make sure that init doesn't get called multiple times res = super(report_aged_receivable, self).fields_view_get(cr, user, view_id, view_type, context, toolbar=toolbar, submenu=submenu) @@ -116,29 +116,37 @@ class report_aged_receivable(osv.osv): 'balance': fields.function(_calc_bal, string='Balance', readonly=True), } - def init(self, cr, uid=1): + def init(self, cr): + return self._init(cr, 1) + + def _init(self, cr, uid): """ This view will be used in dashboard The reason writing this code here is, we need to check date range from today to first date of fiscal year. """ pool_obj_fy = self.pool['account.fiscalyear'] - today = time.strftime('%Y-%m-%d') + current_date = datetime.date.today() fy_id = pool_obj_fy.find(cr, uid, exception=False) - LIST_RANGES = [] + names = [] + + def add(names, start_on, stop_on): + names.append(start_on.strftime("%Y-%m-%d") + ' to ' + stop_on.strftime('%Y-%m-%d')) + return names + if fy_id: - fy_start_date = pool_obj_fy.read(cr, uid, fy_id, ['date_start'])['date_start'] - fy_start_date = datetime.strptime(fy_start_date, '%Y-%m-%d') - last_month_date = datetime.strptime(today, '%Y-%m-%d') - relativedelta(months=1) + fiscal_year = pool_obj_fy.browse(cr, uid, fy_id) + fy_start_date = datetime.datetime.strptime(fiscal_year.date_start, '%Y-%m-%d').date() + last_month_date = current_date - relativedelta(months=1) while (last_month_date > fy_start_date): - LIST_RANGES.append(today + " to " + last_month_date.strftime('%Y-%m-%d')) - today = (last_month_date- relativedelta(days=1)).strftime('%Y-%m-%d') - last_month_date = datetime.strptime(today, '%Y-%m-%d') - relativedelta(months=1) + add(names, current_date, last_month_date) + current_date = last_month_date - relativedelta(days=1) + last_month_date = current_date - relativedelta(months=1) - LIST_RANGES.append(today +" to " + fy_start_date.strftime('%Y-%m-%d')) + add(names, current_date, fy_start_date) cr.execute('delete from temp_range') - for range in LIST_RANGES: - self.pool['temp.range'].create(cr, uid, {'name':range}) + for name in names: + self.pool['temp.range'].create(cr, uid, {'name':name}) cr.execute(""" create or replace view report_aged_receivable as ( diff --git a/addons/account/wizard/account_automatic_reconcile.py b/addons/account/wizard/account_automatic_reconcile.py index 2ad701a9bf5227420e869a5f5225cc7e1dc5f968..3ddad1c050c5368b66d442e2ab0e91811750dfe5 100644 --- a/addons/account/wizard/account_automatic_reconcile.py +++ b/addons/account/wizard/account_automatic_reconcile.py @@ -239,7 +239,7 @@ class account_automatic_reconcile(osv.osv_memory): (account_id.id,)) additional_unrec = cr.fetchone()[0] unreconciled = unreconciled + additional_unrec - context.update({'reconciled': reconciled, 'unreconciled': unreconciled}) + context = dict(context, reconciled=reconciled, unreconciled=unreconciled) model_data_ids = obj_model.search(cr,uid,[('model','=','ir.ui.view'),('name','=','account_automatic_reconcile_view1')]) resource_id = obj_model.read(cr, uid, model_data_ids, fields=['res_id'])[0]['res_id'] return { diff --git a/addons/account/wizard/account_chart.py b/addons/account/wizard/account_chart.py index de652a947f09ae93e6fa65d296804f2027012fe4..73be9040d22c1fa898f723c040acd7cf6f37d20d 100644 --- a/addons/account/wizard/account_chart.py +++ b/addons/account/wizard/account_chart.py @@ -85,7 +85,7 @@ class account_chart(osv.osv_memory): fy_obj = self.pool.get('account.fiscalyear') if context is None: context = {} - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_tree') id = result and result[1] or False result = act_obj.read(cr, uid, [id], context=context)[0] diff --git a/addons/account/wizard/account_fiscalyear_close.py b/addons/account/wizard/account_fiscalyear_close.py index 3e3bbb5fd9308167abc297a814fdb3c2ce545e21..9ad646df32cc16d623576e344ac358de9a47bfc7 100644 --- a/addons/account/wizard/account_fiscalyear_close.py +++ b/addons/account/wizard/account_fiscalyear_close.py @@ -62,6 +62,7 @@ class account_fiscalyear_close(osv.osv_memory): raise osv.except_osv(_('Warning!'), _('The entries to reconcile should belong to the same company.')) r_id = self.pool.get('account.move.reconcile').create(cr, uid, {'type': 'auto', 'opening_reconciliation': True}) cr.execute('update account_move_line set reconcile_id = %s where id in %s',(r_id, tuple(ids),)) + obj_acc_move_line.invalidate_cache(cr, uid, ['reconcile_id'], ids, context=context) return r_id obj_acc_period = self.pool.get('account.period') @@ -175,6 +176,7 @@ class account_fiscalyear_close(osv.osv_memory): AND b.reconcile_id IN (SELECT DISTINCT(reconcile_id) FROM account_move_line a WHERE a.period_id IN ('''+fy2_period_set+''')))''', (new_journal.id, period.id, period.date_start, move_id, tuple(account_ids),)) + self.invalidate_cache(cr, uid, context=context) #2. report of the accounts with defferal method == 'detail' cr.execute(''' @@ -203,7 +205,7 @@ class account_fiscalyear_close(osv.osv_memory): WHERE account_id IN %s AND ''' + query_line + ''') ''', (new_journal.id, period.id, period.date_start, move_id, tuple(account_ids),)) - + self.invalidate_cache(cr, uid, context=context) #3. report of the accounts with defferal method == 'balance' cr.execute(''' @@ -243,6 +245,7 @@ class account_fiscalyear_close(osv.osv_memory): 'draft') if query_2nd_part: cr.execute(query_1st_part + query_2nd_part, tuple(query_2nd_part_args)) + self.invalidate_cache(cr, uid, context=context) #validate and centralize the opening move obj_acc_move.validate(cr, uid, [move_id], context=context) @@ -267,6 +270,7 @@ class account_fiscalyear_close(osv.osv_memory): cr.execute('UPDATE account_fiscalyear ' \ 'SET end_journal_period_id = %s ' \ 'WHERE id = %s', (ids[0], old_fyear.id)) + obj_acc_fiscalyear.invalidate_cache(cr, uid, ['end_journal_period_id'], [old_fyear.id], context=context) return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_fiscalyear_close_state.py b/addons/account/wizard/account_fiscalyear_close_state.py index ed84ab65184d11dc61790fe8d8ca1d64209fa5b1..de2aa2134ee18f297d87be15f17139295d14cb71 100644 --- a/addons/account/wizard/account_fiscalyear_close_state.py +++ b/addons/account/wizard/account_fiscalyear_close_state.py @@ -41,6 +41,10 @@ class account_fiscalyear_close_state(osv.osv_memory): @param ids: List of Account fiscalyear close state’s IDs """ + journal_period_obj = self.pool.get('account.journal.period') + period_obj = self.pool.get('account.period') + fiscalyear_obj = self.pool.get('account.fiscalyear') + for data in self.read(cr, uid, ids, context=context): fy_id = data['fy_id'][0] @@ -53,6 +57,7 @@ class account_fiscalyear_close_state(osv.osv_memory): 'WHERE fiscalyear_id = %s', ('done', fy_id)) cr.execute('UPDATE account_fiscalyear ' \ 'SET state = %s WHERE id = %s', ('done', fy_id)) + self.invalidate_cache(cr, uid, context=context) return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_invoice_refund.py b/addons/account/wizard/account_invoice_refund.py index d2d408dce6811e31bcb0c2ad6e832866610e9c57..0d6d7391ccf6c96087adb6d152502d64247ca476 100644 --- a/addons/account/wizard/account_invoice_refund.py +++ b/addons/account/wizard/account_invoice_refund.py @@ -68,10 +68,10 @@ class account_invoice_refund(osv.osv_memory): } def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False): - if context is None:context = {} journal_obj = self.pool.get('account.journal') user_obj = self.pool.get('res.users') # remove the entry with key 'form_view_ref', otherwise fields_view_get crashes + context = dict(context or {}) context.pop('form_view_ref', None) res = super(account_invoice_refund,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) type = context.get('type', 'out_invoice') @@ -168,7 +168,7 @@ class account_invoice_refund(osv.osv_memory): to_reconcile_ids.setdefault(line.account_id.id, []).append(line.id) if line.reconcile_id: line.reconcile_id.unlink() - inv_obj.signal_invoice_open(cr, uid, [refund.id]) + refund.signal_workflow('invoice_open') refund = inv_obj.browse(cr, uid, refund_id[0], context=context) for tmpline in refund.move_id.line_id: if tmpline.account_id.id == inv.account_id.id: @@ -212,13 +212,15 @@ class account_invoice_refund(osv.osv_memory): if 'value' in data and data['value']: inv_obj.write(cr, uid, [inv_id], data['value']) created_inv.append(inv_id) + xml_id = (inv.type == 'out_refund') and 'action_invoice_tree1' or \ (inv.type == 'in_refund') and 'action_invoice_tree2' or \ (inv.type == 'out_invoice') and 'action_invoice_tree3' or \ (inv.type == 'in_invoice') and 'action_invoice_tree4' result = mod_obj.get_object_reference(cr, uid, 'account', xml_id) id = result and result[1] or False - result = act_obj.read(cr, uid, id, context=context) + + result = act_obj.read(cr, uid, [id], context=context)[0] invoice_domain = eval(result['domain']) invoice_domain.append(('id', 'in', created_inv)) result['domain'] = invoice_domain diff --git a/addons/account/wizard/account_invoice_state.py b/addons/account/wizard/account_invoice_state.py index 1aed2c6c4775bc8e3f716d3d0ea64c25a5922cd5..75b4802bd5809a7b00da693eaf09bd22c7aac5c2 100644 --- a/addons/account/wizard/account_invoice_state.py +++ b/addons/account/wizard/account_invoice_state.py @@ -33,12 +33,13 @@ class account_invoice_confirm(osv.osv_memory): def invoice_confirm(self, cr, uid, ids, context=None): if context is None: context = {} - account_invoice_obj = self.pool['account.invoice'] - data_inv = account_invoice_obj.read(cr, uid, context['active_ids'], ['state'], context=context) - for record in data_inv: - if record['state'] not in ('draft','proforma','proforma2'): + active_ids = context.get('active_ids', []) or [] + + proxy = self.pool['account.invoice'] + for record in proxy.browse(cr, uid, active_ids, context=context): + if record.state not in ('draft', 'proforma', 'proforma2'): raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be confirmed as they are not in 'Draft' or 'Pro-Forma' state.")) - account_invoice_obj.signal_invoice_open(cr, uid, [ record['id'] ]) + record.signal_workflow('invoice_open') return {'type': 'ir.actions.act_window_close'} @@ -55,12 +56,13 @@ class account_invoice_cancel(osv.osv_memory): def invoice_cancel(self, cr, uid, ids, context=None): if context is None: context = {} - account_invoice_obj = self.pool['account.invoice'] - data_inv = account_invoice_obj.read(cr, uid, context['active_ids'], ['state'], context=context) - for record in data_inv: - if record['state'] in ('cancel','paid'): + proxy = self.pool['account.invoice'] + active_ids = context.get('active_ids', []) or [] + + for record in proxy.browse(cr, uid, active_ids, context=context): + if record.state in ('cancel','paid'): raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state.")) - account_invoice_obj.signal_invoice_cancel(cr , uid, [record['id']]) + record.signal_workflow('invoice_cancel') return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_open_closed_fiscalyear.py b/addons/account/wizard/account_open_closed_fiscalyear.py index f4e90ae9f2f3f8ffa05a25ad1cdbaed1a5f5cab1..c142dcc3e212bfb75cf6d67fbdc8e35c4223d1d8 100644 --- a/addons/account/wizard/account_open_closed_fiscalyear.py +++ b/addons/account/wizard/account_open_closed_fiscalyear.py @@ -41,6 +41,7 @@ class account_open_closed_fiscalyear(osv.osv_memory): ids_move = move_obj.search(cr, uid, [('journal_id','=',period_journal.journal_id.id),('period_id','=',period_journal.period_id.id)]) if ids_move: cr.execute('delete from account_move where id IN %s', (tuple(ids_move),)) + self.invalidate_cache(cr, uid, context=context) return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_period_close.py b/addons/account/wizard/account_period_close.py index a50861c65ef47694e4e73c8822cd909b4c5095c5..ada52c389ef79c1f700077dd4401f7fa3d538ea3 100644 --- a/addons/account/wizard/account_period_close.py +++ b/addons/account/wizard/account_period_close.py @@ -39,6 +39,7 @@ class account_period_close(osv.osv_memory): @param uid: the current user’s ID for security checks, @param ids: account period close’s ID or list of IDs """ + journal_period_pool = self.pool.get('account.journal.period') period_pool = self.pool.get('account.period') account_move_obj = self.pool.get('account.move') @@ -52,6 +53,7 @@ class account_period_close(osv.osv_memory): cr.execute('update account_journal_period set state=%s where period_id=%s', (mode, id)) cr.execute('update account_period set state=%s where id=%s', (mode, id)) + self.invalidate_cache(cr, uid, context=context) return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_reconcile.py b/addons/account/wizard/account_reconcile.py index 804003c975a65977de0489d9a8485bb4e86a9b5e..656661a34b90c57211f48fd3d5156384de62f3eb 100644 --- a/addons/account/wizard/account_reconcile.py +++ b/addons/account/wizard/account_reconcile.py @@ -140,6 +140,7 @@ class account_move_line_reconcile_writeoff(osv.osv_memory): return {'type': 'ir.actions.act_window_close'} def trans_rec_reconcile(self, cr, uid, ids, context=None): + context = dict(context or {}) account_move_line_obj = self.pool.get('account.move.line') period_obj = self.pool.get('account.period') if context is None: diff --git a/addons/account/wizard/account_state_open.py b/addons/account/wizard/account_state_open.py index 1950a139983d685d6f00e02699e425f2e314d068..e622a1ea55bb6a11d15194ad6d0253b81ef35961 100644 --- a/addons/account/wizard/account_state_open.py +++ b/addons/account/wizard/account_state_open.py @@ -27,14 +27,16 @@ class account_state_open(osv.osv_memory): _description = 'Account State Open' def change_inv_state(self, cr, uid, ids, context=None): - obj_invoice = self.pool.get('account.invoice') + proxy = self.pool.get('account.invoice') if context is None: context = {} - if 'active_ids' in context: - data_inv = obj_invoice.browse(cr, uid, context['active_ids'][0], context=context) - if data_inv.reconciled: + + active_ids = context.get('active_ids') + if isinstance(active_ids, list): + invoice = proxy.browse(cr, uid, active_ids[0], context=context) + if invoice.reconciled: raise osv.except_osv(_('Warning!'), _('Invoice is already reconciled.')) - obj_invoice.signal_open_test(cr, uid, context['active_ids'][0]) + invoice.signal_workflow('open_test') return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_statement_from_invoice.py b/addons/account/wizard/account_statement_from_invoice.py index 285b8e336b36ff03e1047ab94e5cee46847def99..de2d44499fd3c0b7c7624dee738c51667d7dfecd 100644 --- a/addons/account/wizard/account_statement_from_invoice.py +++ b/addons/account/wizard/account_statement_from_invoice.py @@ -34,8 +34,7 @@ class account_statement_from_invoice_lines(osv.osv_memory): } def populate_statement(self, cr, uid, ids, context=None): - if context is None: - context = {} + context = dict(context or {}) statement_id = context.get('statement_id', False) if not statement_id: return {'type': 'ir.actions.act_window_close'} diff --git a/addons/account/wizard/account_use_model.py b/addons/account/wizard/account_use_model.py index 06f02719065437f5fa2ac15361adade5d11c6c2a..be9944f3db7eaec72a391dea02699a31d7ecdc94 100644 --- a/addons/account/wizard/account_use_model.py +++ b/addons/account/wizard/account_use_model.py @@ -58,7 +58,7 @@ class account_use_model(osv.osv_memory): model_ids = context['active_ids'] move_ids = account_model_obj.generate(cr, uid, model_ids, context=context) - context.update({'move_ids':move_ids}) + context = dict(context, move_ids=move_ids) model_data_ids = mod_obj.search(cr, uid,[('model','=','ir.ui.view'),('name','=','view_move_form')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { diff --git a/addons/account_analytic_analysis/account_analytic_analysis.py b/addons/account_analytic_analysis/account_analytic_analysis.py index 9550f77bcab5de52d9cb4858e6580cf18d3c26de..3b5819968a3b27e74640eec850c22d4a791c51f4 100644 --- a/addons/account_analytic_analysis/account_analytic_analysis.py +++ b/addons/account_analytic_analysis/account_analytic_analysis.py @@ -24,7 +24,6 @@ import logging import time from openerp.osv import osv, fields -from openerp.osv.orm import intersect, except_orm import openerp.tools from openerp.tools.translate import _ @@ -518,7 +517,7 @@ class account_analytic_account(osv.osv): 'invoiced_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Invoiced"), 'remaining_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Remaining", help="Expectation of remaining income for this contract. Computed as the sum of remaining subtotals which, in turn, are computed as the maximum between '(Estimation - Invoiced)' and 'To Invoice' amounts"), 'toinvoice_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total to Invoice", help=" Sum of everything that could be invoiced for this contract."), - 'recurring_invoice_line_ids': fields.one2many('account.analytic.invoice.line', 'analytic_account_id', 'Invoice Lines'), + 'recurring_invoice_line_ids': fields.one2many('account.analytic.invoice.line', 'analytic_account_id', 'Invoice Lines', copy=True), 'recurring_invoices' : fields.boolean('Generate recurring invoices automatically'), 'recurring_rule_type': fields.selection([ ('daily', 'Day(s)'), @@ -595,8 +594,7 @@ class account_analytic_account(osv.osv): return value def cron_account_analytic_account(self, cr, uid, context=None): - if context is None: - context = {} + context = dict(context or {}) remind = {} def fill_remind(key, domain, write_pending=False): @@ -612,7 +610,7 @@ class account_analytic_account(osv.osv): accounts = self.browse(cr, uid, accounts_ids, context=context) for account in accounts: if write_pending: - account.write({'state' : 'pending'}, context=context) + account.write({'state' : 'pending'}) remind_user = remind.setdefault(account.manager_id.id, {}) remind_type = remind_user.setdefault(key, {}) remind_partner = remind_type.setdefault(account.partner_id, []).append(account) diff --git a/addons/account_analytic_plans/account_analytic_plans.py b/addons/account_analytic_plans/account_analytic_plans.py index a8cb69b0de0b65757406b3bace6c5cc92a000fae..510632321d5faab5e41d67f2bc02c2df9e68f1aa 100644 --- a/addons/account_analytic_plans/account_analytic_plans.py +++ b/addons/account_analytic_plans/account_analytic_plans.py @@ -43,8 +43,15 @@ class one2many_mod2(fields.one2many): ids2 = obj.pool[self._obj].search(cr, user, [(self._fields_id,'in',ids),('analytic_account_id','child_of',[acc_id])], limit=self._limit) if ids2 is None: ids2 = obj.pool[self._obj].search(cr, user, [(self._fields_id,'in',ids)], limit=self._limit) - for r in obj.pool[self._obj]._read_flat(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): - res[r[self._fields_id]].append( r['id'] ) + + for r in obj.pool[self._obj].read(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): + key = r[self._fields_id] + if isinstance(key, tuple): + # Read return a tuple in the case where the field is a many2one + # but we want to get the id of this field. + key = key[0] + + res[key].append( r['id'] ) return res class account_analytic_line(osv.osv): @@ -71,7 +78,7 @@ class account_analytic_plan(osv.osv): _description = "Analytic Plan" _columns = { 'name': fields.char('Analytic Plan', required=True, select=True), - 'plan_ids': fields.one2many('account.analytic.plan.line', 'plan_id', 'Analytic Plans'), + 'plan_ids': fields.one2many('account.analytic.plan.line', 'plan_id', 'Analytic Plans', copy=True), } @@ -100,7 +107,7 @@ class account_analytic_plan_instance(osv.osv): 'name': fields.char('Analytic Distribution'), 'code': fields.char('Distribution Code', size=16), 'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal' ), - 'account_ids': fields.one2many('account.analytic.plan.instance.line', 'plan_id', 'Account Id'), + 'account_ids': fields.one2many('account.analytic.plan.instance.line', 'plan_id', 'Account Id', copy=True), 'account1_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account1 Id'), 'account2_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account2 Id'), 'account3_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account3 Id'), @@ -124,13 +131,6 @@ class account_analytic_plan_instance(osv.osv): context=context, count=count) return res - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({'account1_ids':False, 'account2_ids':False, 'account3_ids':False, - 'account4_ids':False, 'account5_ids':False, 'account6_ids':False}) - return super(account_analytic_plan_instance, self).copy(cr, uid, id, default, context=context) - def _default_journal(self, cr, uid, context=None): if context is None: context = {} @@ -373,8 +373,8 @@ class account_invoice(osv.osv): res['analytics_id'] = x.get('analytics_id', False) return res - def _get_analytic_lines(self, cr, uid, id, context=None): - inv = self.browse(cr, uid, [id])[0] + def _get_analytic_lines(self, cr, uid, ids, context=None): + inv = self.browse(cr, uid, ids)[0] cur_obj = self.pool.get('res.currency') invoice_line_obj = self.pool.get('account.invoice.line') acct_ins_obj = self.pool.get('account.analytic.plan.instance') diff --git a/addons/account_analytic_plans/wizard/account_crossovered_analytic.py b/addons/account_analytic_plans/wizard/account_crossovered_analytic.py index 1bdc91f2995824f8bf16878026c373b21f10ff42..699c263322e10c880807fe27e3a78773d79e6fb2 100644 --- a/addons/account_analytic_plans/wizard/account_crossovered_analytic.py +++ b/addons/account_analytic_plans/wizard/account_crossovered_analytic.py @@ -45,7 +45,7 @@ class account_crossovered_analytic(osv.osv_memory): res = cr.fetchall() acc_ids = [x[0] for x in res] - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] data['ref'] = data['ref'][0] obj_acc = self.pool.get('account.analytic.account').browse(cr, uid, data['ref'], context=context) diff --git a/addons/account_anglo_saxon/test/anglo_saxon.yml b/addons/account_anglo_saxon/test/anglo_saxon.yml index 9c953482f5387286f963e886bb1ffabf3f2ae278..c98de7c47db9d43675c5d7dfba8b333fae71c93f 100644 --- a/addons/account_anglo_saxon/test/anglo_saxon.yml +++ b/addons/account_anglo_saxon/test/anglo_saxon.yml @@ -127,8 +127,8 @@ Reception is ready for process so now done the reception. - !python {model: stock.picking}: | - picking_id = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_001")).picking_ids[0] - picking_id.do_transfer(context=context) + picking_id = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_001"), context=context).picking_ids[0] + picking_id.do_transfer() - I check the Stock Interim account (Received) is credited successfully. - @@ -144,7 +144,7 @@ - !python {model: purchase.order}: | invoice_ids = [x.id for x in self.browse(cr, uid, ref("purchase_order_001")).invoice_ids] - self.pool.get('account.invoice').signal_invoice_open(cr, uid, invoice_ids) + self.pool.get('account.invoice').signal_workflow(cr, uid, invoice_ids, 'invoice_open') - I check the Stock Interim account (Received) is debited sucessfully when Invoice validated. - @@ -166,7 +166,7 @@ !python {model: purchase.order}: | po = self.browse(cr, uid, ref("purchase_order_001")) for invoice in po.invoice_ids: - self.pool.get('account.invoice').signal_invoice_open(cr, uid, [invoice.id]) + invoice.signal_workflow('invoice_open') - I pay the invoice. - @@ -220,8 +220,8 @@ I process the delivery. - !python {model: stock.picking}: | - picking = self.pool.get('stock.picking').browse(cr, uid, ref("stock_picking_out001")) - picking.do_transfer(context=context) + picking = self.pool.get('stock.picking').browse(cr, uid, ref("stock_picking_out001"), context=context) + picking.do_transfer() - I check Stock Interim account (Delivery) is debited successfully. - @@ -252,7 +252,7 @@ !python {model: stock.picking}: | move_name = self.pool.get('stock.picking').browse(cr, uid, ref('stock_picking_out001')).name account_invoice = self.pool.get('account.invoice').search(cr, uid, [('origin', '=', move_name)]) - self.pool.get('account.invoice').signal_invoice_open(cr, uid, account_invoice) + self.pool.get('account.invoice').signal_workflow(cr, uid, account_invoice, 'invoice_open') - I check Income Account is Credited sucessfully when Invoice validated. - diff --git a/addons/account_anglo_saxon/test/anglo_saxon_avg_fifo.yml b/addons/account_anglo_saxon/test/anglo_saxon_avg_fifo.yml index b9d290c89e33ffa2eaa493b413f8e171a3f86c70..034422602950cc85f2fc8c243fbd1c38a25527b0 100644 --- a/addons/account_anglo_saxon/test/anglo_saxon_avg_fifo.yml +++ b/addons/account_anglo_saxon/test/anglo_saxon_avg_fifo.yml @@ -135,8 +135,8 @@ Reception is ready for process so now done the reception. - !python {model: stock.picking}: | - picking_id = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_001_fifo")).picking_ids[0] - picking_id.do_transfer(context=context) + picking_id = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_001_fifo"), context=context).picking_ids[0] + picking_id.do_transfer() - I check the Stock Interim account (Received) is credit successfully. - @@ -154,7 +154,7 @@ invoice_ids = [x.id for x in self.browse(cr, uid, ref("purchase_order_001_fifo")).invoice_ids] line_ids = self.pool.get('account.invoice.line').search(cr, uid, [('invoice_id', 'in', invoice_ids)]) self.pool.get('account.invoice.line').write(cr, uid, line_ids, {'price_unit': 10}) - self.pool.get('account.invoice').signal_invoice_open(cr, uid, invoice_ids) + self.pool.get('account.invoice').signal_workflow(cr, uid, invoice_ids, 'invoice_open') - I check the Stock Interim account (Received) is debited sucessfully when Invoice validated. - @@ -220,8 +220,8 @@ I process the delivery. - !python {model: stock.picking}: | - picking = self.pool.get('stock.picking').browse(cr, uid, ref("stock_picking_out001_fifo")) - picking.do_transfer(context=context) + picking = self.pool.get('stock.picking').browse(cr, uid, ref("stock_picking_out001_fifo"), context=context) + picking.do_transfer() - I check Stock Interim account (Delivery) is debited successfully. - @@ -257,7 +257,7 @@ account_invoice_line = self.pool.get('account.invoice.line').search(cr, uid, [('invoice_id', 'in', account_invoice)]) self.pool.get('account.invoice.line').write(cr, uid, account_invoice_line, {'invoice_line_tax_id': [(6, 0, [])]}) self.pool.get('account.invoice').button_reset_taxes(cr, uid, account_invoice) - self.pool.get('account.invoice').signal_invoice_open(cr, uid, account_invoice) + self.pool.get('account.invoice').signal_workflow(cr, uid, account_invoice, 'invoice_open') - I check Income Account is Credited sucessfully when Invoice validated. - diff --git a/addons/account_asset/account_asset.py b/addons/account_asset/account_asset.py index 36ed9424d5a6b3aa3d8b914a0547357e3f519b98..be6ffdce3e556d7d3139691c78ece0e390c2289b 100644 --- a/addons/account_asset/account_asset.py +++ b/addons/account_asset/account_asset.py @@ -254,9 +254,9 @@ class account_asset_asset(osv.osv): 'note': fields.text('Note'), 'category_id': fields.many2one('account.asset.category', 'Asset Category', required=True, change_default=True, readonly=True, states={'draft':[('readonly',False)]}), 'parent_id': fields.many2one('account.asset.asset', 'Parent Asset', readonly=True, states={'draft':[('readonly',False)]}), - 'child_ids': fields.one2many('account.asset.asset', 'parent_id', 'Children Assets'), + 'child_ids': fields.one2many('account.asset.asset', 'parent_id', 'Children Assets', copy=True), 'purchase_date': fields.date('Purchase Date', required=True, readonly=True, states={'draft':[('readonly',False)]}), - 'state': fields.selection([('draft','Draft'),('open','Running'),('close','Close')], 'Status', required=True, + 'state': fields.selection([('draft','Draft'),('open','Running'),('close','Close')], 'Status', required=True, copy=False, help="When an asset is created, the status is 'Draft'.\n" \ "If the asset is confirmed, the status goes in 'Running' and the depreciation lines can be posted in the accounting.\n" \ "You can manually close an asset when the depreciation is over. If the last line of depreciation is posted, the asset automatically goes in that status."), @@ -329,23 +329,13 @@ class account_asset_asset(osv.osv): res['value'] = {'prorata': False} return res - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - if context is None: - context = {} - default.update({'depreciation_line_ids': [], 'account_move_line_ids': [], 'history_ids': [], 'state': 'draft'}) - return super(account_asset_asset, self).copy(cr, uid, id, default, context=context) - def _compute_entries(self, cr, uid, ids, period_id, context=None): result = [] period_obj = self.pool.get('account.period') depreciation_obj = self.pool.get('account.asset.depreciation.line') period = period_obj.browse(cr, uid, period_id, context=context) depreciation_ids = depreciation_obj.search(cr, uid, [('asset_id', 'in', ids), ('depreciation_date', '<=', period.date_stop), ('depreciation_date', '>=', period.date_start), ('move_check', '=', False)], context=context) - if context is None: - context = {} - context.update({'depreciation_date':period.date_stop}) + context = dict(context or {}, depreciation_date=period.date_stop) return depreciation_obj.create_move(cr, uid, depreciation_ids, context=context) def create(self, cr, uid, vals, context=None): @@ -354,9 +344,7 @@ class account_asset_asset(osv.osv): return asset_id def open_entries(self, cr, uid, ids, context=None): - if context is None: - context = {} - context.update({'search_default_asset_id': ids, 'default_asset_id': ids}) + context = dict(context or {}, search_default_asset_id=ids, default_asset_id=ids) return { 'name': _('Journal Items'), 'view_type': 'form', @@ -392,9 +380,8 @@ class account_asset_depreciation_line(osv.osv): } def create_move(self, cr, uid, ids, context=None): + context = dict(context or {}) can_close = False - if context is None: - context = {} asset_obj = self.pool.get('account.asset.asset') period_obj = self.pool.get('account.period') move_obj = self.pool.get('account.move') @@ -466,8 +453,6 @@ class account_move_line(osv.osv): _inherit = 'account.move.line' _columns = { 'asset_id': fields.many2one('account.asset.asset', 'Asset', ondelete="restrict"), - 'entry_ids': fields.one2many('account.move.line', 'asset_id', 'Entries', readonly=True, states={'draft':[('readonly',False)]}), - } class account_asset_history(osv.osv): diff --git a/addons/account_bank_statement_extensions/account_bank_statement.py b/addons/account_bank_statement_extensions/account_bank_statement.py index 0b0386ecf8e82240893eefe8aee1560345edf6d9..38cd68fbc2f362f04f37f673a53e9d4f4855fe3b 100644 --- a/addons/account_bank_statement_extensions/account_bank_statement.py +++ b/addons/account_bank_statement_extensions/account_bank_statement.py @@ -39,21 +39,27 @@ class account_bank_statement(osv.osv): return res def button_confirm_bank(self, cr, uid, ids, context=None): + bank_statement_line_obj = self.pool.get('account.bank.statement.line') super(account_bank_statement, self).button_confirm_bank(cr, uid, ids, context=context) for st in self.browse(cr, uid, ids, context=context): if st.line_ids: + line_ids = [l.id for l in st.line_ids] cr.execute("UPDATE account_bank_statement_line \ SET state='confirm' WHERE id in %s ", - (tuple([x.id for x in st.line_ids]),)) + (tuple(line_ids),)) + bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context) return True def button_cancel(self, cr, uid, ids, context=None): + bank_statement_line_obj = self.pool.get('account.bank.statement.line') super(account_bank_statement, self).button_cancel(cr, uid, ids, context=context) for st in self.browse(cr, uid, ids, context=context): if st.line_ids: + line_ids = [l.id for l in st.line_ids] cr.execute("UPDATE account_bank_statement_line \ SET state='draft' WHERE id in %s ", - (tuple([x.id for x in st.line_ids]),)) + (tuple([line_ids]),)) + bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context) return True @@ -65,7 +71,7 @@ class account_bank_statement_line_global(osv.osv): 'name': fields.char('OBI', required=True, help="Originator to Beneficiary Information"), 'code': fields.char('Code', size=64, required=True), 'parent_id': fields.many2one('account.bank.statement.line.global', 'Parent Code', ondelete='cascade'), - 'child_ids': fields.one2many('account.bank.statement.line.global', 'parent_id', 'Child Codes'), + 'child_ids': fields.one2many('account.bank.statement.line.global', 'parent_id', 'Child Codes', copy=True), 'type': fields.selection([ ('iso20022', 'ISO 20022'), ('coda', 'CODA'), @@ -110,7 +116,7 @@ class account_bank_statement_line(osv.osv): 'globalisation_amount': fields.related('globalisation_id', 'amount', type='float', relation='account.bank.statement.line.global', string='Glob. Amount', readonly=True), 'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed')], - 'Status', required=True, readonly=True), + 'Status', required=True, readonly=True, copy=False), 'counterparty_name': fields.char('Counterparty Name', size=35), 'counterparty_bic': fields.char('Counterparty BIC', size=11), 'counterparty_number': fields.char('Counterparty Number', size=34), diff --git a/addons/account_budget/account_budget.py b/addons/account_budget/account_budget.py index bed9085875ead0ed55114b5053d285eab1c7469a..d41e25e0f04c8c904bdb982d0ec2dddb62d3c680 100644 --- a/addons/account_budget/account_budget.py +++ b/addons/account_budget/account_budget.py @@ -62,8 +62,8 @@ class crossovered_budget(osv.osv): 'validating_user_id': fields.many2one('res.users', 'Validate User', readonly=True), 'date_from': fields.date('Start Date', required=True, states={'done':[('readonly',True)]}), 'date_to': fields.date('End Date', required=True, states={'done':[('readonly',True)]}), - 'state' : fields.selection([('draft','Draft'),('cancel', 'Cancelled'),('confirm','Confirmed'),('validate','Validated'),('done','Done')], 'Status', select=True, required=True, readonly=True), - 'crossovered_budget_line': fields.one2many('crossovered.budget.lines', 'crossovered_budget_id', 'Budget Lines', states={'done':[('readonly',True)]}), + 'state' : fields.selection([('draft','Draft'),('cancel', 'Cancelled'),('confirm','Confirmed'),('validate','Validated'),('done','Done')], 'Status', select=True, required=True, readonly=True, copy=False), + 'crossovered_budget_line': fields.one2many('crossovered.budget.lines', 'crossovered_budget_id', 'Budget Lines', states={'done':[('readonly',True)]}, copy=True), 'company_id': fields.many2one('res.company', 'Company', required=True), } diff --git a/addons/account_check_writing/wizard/account_check_batch_printing.py b/addons/account_check_writing/wizard/account_check_batch_printing.py index bf59557d9044fa099a308e7f211adac92d5797e1..aab8ca277f247cdd90abc66e8cbc48910a43e949 100644 --- a/addons/account_check_writing/wizard/account_check_batch_printing.py +++ b/addons/account_check_writing/wizard/account_check_batch_printing.py @@ -33,7 +33,7 @@ class account_check_write(osv.osv_memory): def _get_next_number(self, cr, uid, context=None): dummy, sequence_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_check_writing', 'sequence_check_number') - return self.pool.get('ir.sequence').read(cr, uid, sequence_id, ['number_next'])['number_next'] + return self.pool.get('ir.sequence').read(cr, uid, [sequence_id], ['number_next'])[0]['number_next'] _defaults = { 'check_number': _get_next_number, @@ -47,7 +47,7 @@ class account_check_write(osv.osv_memory): #update the sequence to number the checks from the value encoded in the wizard dummy, sequence_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_check_writing', 'sequence_check_number') - increment = ir_sequence_obj.read(cr, uid, sequence_id, ['number_increment'])['number_increment'] + increment = ir_sequence_obj.read(cr, uid, [sequence_id], ['number_increment'])[0]['number_increment'] new_value = self.browse(cr, uid, ids[0], context=context).check_number ir_sequence_obj.write(cr, uid, sequence_id, {'number_next': new_value}) diff --git a/addons/account_followup/account_followup.py b/addons/account_followup/account_followup.py index dc00119c0bf29c6f98c3216580b78c8418f417d9..23fbeb598b8fbf9e158e4476da6deaf0707eb136 100644 --- a/addons/account_followup/account_followup.py +++ b/addons/account_followup/account_followup.py @@ -19,6 +19,7 @@ # ############################################################################## +from openerp import api from openerp.osv import fields, osv from lxml import etree from openerp.tools.translate import _ @@ -28,7 +29,7 @@ class followup(osv.osv): _description = 'Account Follow-up' _rec_name = 'name' _columns = { - 'followup_line': fields.one2many('account_followup.followup.line', 'followup_id', 'Follow-up'), + 'followup_line': fields.one2many('account_followup.followup.line', 'followup_id', 'Follow-up', copy=True), 'company_id': fields.many2one('res.company', 'Company', required=True), 'name': fields.related('company_id', 'name', string = "Name", readonly=True, type="char"), } @@ -154,6 +155,7 @@ class res_partner(osv.osv): 'latest_followup_level_id_without_lit': latest_level_without_lit} return res + @api.cr_uid_ids_context def do_partner_manual_action(self, cr, uid, partner_ids, context=None): #partner_ids -> res.partner for partner in self.browse(cr, uid, partner_ids, context=context): @@ -190,6 +192,7 @@ class res_partner(osv.osv): } return self.pool['report'].get_action(cr, uid, [], 'account_followup.report_followup', data=datas, context=context) + @api.cr_uid_ids_context def do_partner_mail(self, cr, uid, partner_ids, context=None): if context is None: context = {} @@ -268,7 +271,7 @@ class res_partner(osv.osv): if date <= current_date and aml['balance'] > 0: strbegin = "<TD><B>" strend = "</B></TD>" - followup_table +="<TR>" + strbegin + str(aml['date']) + strend + strbegin + aml['name'] + strend + strbegin + aml['ref'] + strend + strbegin + str(date) + strend + strbegin + str(aml['balance']) + strend + strbegin + block + strend + "</TR>" + followup_table +="<TR>" + strbegin + str(aml['date']) + strend + strbegin + aml['name'] + strend + strbegin + (aml['ref'] or '') + strend + strbegin + str(date) + strend + strbegin + str(aml['balance']) + strend + strbegin + block + strend + "</TR>" total = reduce(lambda x, y: x+y['balance'], currency_dict['line'], 0.00) @@ -434,15 +437,17 @@ class res_partner(osv.osv): _columns = { 'payment_responsible_id':fields.many2one('res.users', ondelete='set null', string='Follow-up Responsible', help="Optionally you can assign a user to this field, which will make him responsible for the action.", - track_visibility="onchange"), - 'payment_note':fields.text('Customer Payment Promise', help="Payment Note", track_visibility="onchange"), - 'payment_next_action':fields.text('Next Action', + track_visibility="onchange", copy=False), + 'payment_note':fields.text('Customer Payment Promise', help="Payment Note", track_visibility="onchange", copy=False), + 'payment_next_action':fields.text('Next Action', copy=False, help="This is the next action to be taken. It will automatically be set when the partner gets a follow-up level that requires a manual action. ", track_visibility="onchange"), - 'payment_next_action_date':fields.date('Next Action Date', - help="This is when the manual follow-up is needed. " \ - "The date will be set to the current date when the partner gets a follow-up level that requires a manual action. "\ - "Can be practical to set manually e.g. to see if he keeps his promises."), + 'payment_next_action_date': fields.date('Next Action Date', copy=False, + help="This is when the manual follow-up is needed. " + "The date will be set to the current date when the partner " + "gets a follow-up level that requires a manual action. " + "Can be practical to set manually e.g. to see if he keeps " + "his promises."), 'unreconciled_aml_ids':fields.one2many('account.move.line', 'partner_id', domain=['&', ('reconcile_id', '=', False), '&', ('account_id.active','=', True), '&', ('account_id.type', '=', 'receivable'), ('state', '!=', 'draft')]), 'latest_followup_date':fields.function(_get_latest, method=True, type='date', string="Latest Follow-up Date", diff --git a/addons/account_followup/report/account_followup_print.py b/addons/account_followup/report/account_followup_print.py index 4e8568539153a050cb07930ec0f9d4cee4076ae2..bf20527d3fa47221521f7f10e2cbd9da39f05152 100644 --- a/addons/account_followup/report/account_followup_print.py +++ b/addons/account_followup/report/account_followup_print.py @@ -75,9 +75,7 @@ class report_rappel(report_sxw.rml_parse): return [{'line': lines} for lines in lines_per_currency.values()] def _get_text(self, stat_line, followup_id, context=None): - if context is None: - context = {} - context.update({'lang': stat_line.partner_id.lang}) + context = dict(context or {}, lang=stat_line.partner_id.lang) fp_obj = self.pool['account_followup.followup'] fp_line = fp_obj.browse(self.cr, self.uid, followup_id, context=context).followup_line if not fp_line: diff --git a/addons/account_followup/tests/test_account_followup.py b/addons/account_followup/tests/test_account_followup.py index b3025021278258a0559543403375f11cedb1d8ea..697fb433362555099c67ab509bc6c1fce187a826 100644 --- a/addons/account_followup/tests/test_account_followup.py +++ b/addons/account_followup/tests/test_account_followup.py @@ -42,7 +42,7 @@ class TestAccountFollowup(TransactionCase): 'quantity': 5, 'price_unit':200 })]}) - self.registry('account.invoice').signal_invoice_open(cr, uid, [self.invoice_id]) + self.registry('account.invoice').signal_workflow(cr, uid, [self.invoice_id], 'invoice_open') self.voucher = self.registry("account.voucher") @@ -112,8 +112,7 @@ class TestAccountFollowup(TransactionCase): partner_rec = self.partner.browse(cr, uid, self.partner_id) self.run_wizard_three_times() self.partner.action_done(cr, uid, self.partner_id) - self.assertEqual(partner_rec.payment_next_action, - "", "Manual action not emptied") + self.assertFalse(partner_rec.payment_next_action, "Manual action not emptied") self.assertFalse(partner_rec.payment_responsible_id) self.assertFalse(partner_rec.payment_next_action_date) diff --git a/addons/account_followup/wizard/account_followup_print.py b/addons/account_followup/wizard/account_followup_print.py index 7b7cb8fc77edcd85c6e4f41a2a1d9a28846a9a56..83100723edb0e3ca9f62b96b967d8e90813ee6bc 100644 --- a/addons/account_followup/wizard/account_followup_print.py +++ b/addons/account_followup/wizard/account_followup_print.py @@ -204,15 +204,14 @@ class account_followup_print(osv.osv_memory): return len(partners_to_clear) def do_process(self, cr, uid, ids, context=None): - if context is None: - context = {} + context = dict(context or {}) #Get partners tmp = self._get_partners_followp(cr, uid, ids, context=context) partner_list = tmp['partner_ids'] to_update = tmp['to_update'] date = self.browse(cr, uid, ids, context=context)[0].date - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] data['followup_id'] = data['followup_id'][0] #Update partners diff --git a/addons/account_payment/account_payment.py b/addons/account_payment/account_payment.py index d2b2786e9958bda08447f6d60772227026bb13fc..de72960a9c68a808ab9a3769e94875e604f230e6 100644 --- a/addons/account_payment/account_payment.py +++ b/addons/account_payment/account_payment.py @@ -87,13 +87,13 @@ class payment_order(osv.osv): _columns = { 'date_scheduled': fields.date('Scheduled Date', states={'done':[('readonly', True)]}, help='Select a date if you have chosen Preferred Date to be fixed.'), - 'reference': fields.char('Reference', required=1, states={'done': [('readonly', True)]}), + 'reference': fields.char('Reference', required=1, states={'done': [('readonly', True)]}, copy=False), 'mode': fields.many2one('payment.mode', 'Payment Mode', select=True, required=1, states={'done': [('readonly', True)]}, help='Select the Payment Mode to be applied.'), 'state': fields.selection([ ('draft', 'Draft'), ('cancel', 'Cancelled'), ('open', 'Confirmed'), - ('done', 'Done')], 'Status', select=True, + ('done', 'Done')], 'Status', select=True, copy=False, help='When an order is placed the status is \'Draft\'.\n Once the bank is confirmed the status is set to \'Confirmed\'.\n Then the order is paid the status is \'Done\'.'), 'line_ids': fields.one2many('payment.line', 'order_id', 'Payment lines', states={'done': [('readonly', True)]}), 'total': fields.function(_total, string="Total", type='float'), @@ -132,19 +132,9 @@ class payment_order(osv.osv): def set_done(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'date_done': time.strftime('%Y-%m-%d')}) - self.signal_done(cr, uid, [ids[0]]) + self.signal_workflow(cr, uid, ids, 'done') return True - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default.update({ - 'state': 'draft', - 'line_ids': [], - 'reference': self.pool.get('ir.sequence').get(cr, uid, 'payment.order') - }) - return super(payment_order, self).copy(cr, uid, id, default, context=context) - def write(self, cr, uid, ids, vals, context=None): if context is None: context = {} @@ -179,7 +169,7 @@ class payment_line(osv.osv): "due_date": "date_maturity", "reference": "ref"}.get(orig, orig) - def info_owner(self, cr, uid, ids, name=None, args=None, context=None): + def _info_owner(self, cr, uid, ids, name=None, args=None, context=None): result = {} for line in self.browse(cr, uid, ids, context=context): owner = line.order_id.mode.bank_id.partner_id @@ -197,7 +187,7 @@ class payment_line(osv.osv): cntry = partner_record.country_id and partner_record.country_id.name or '' return partner_record.name + "\n" + st + " " + st1 + "\n" + zip_city + "\n" +cntry - def info_partner(self, cr, uid, ids, name=None, args=None, context=None): + def _info_partner(self, cr, uid, ids, name=None, args=None, context=None): result = {} for line in self.browse(cr, uid, ids, context=context): result[line.id] = False @@ -322,8 +312,8 @@ class payment_line(osv.osv): type='date', help="Invoice Effective Date"), 'ml_maturity_date': fields.function(_get_ml_maturity_date, type='date', string='Due Date'), 'ml_inv_ref': fields.function(_get_ml_inv_ref, type='many2one', relation='account.invoice', string='Invoice Ref.'), - 'info_owner': fields.function(info_owner, string="Owner Account", type="text", help='Address of the Main Partner'), - 'info_partner': fields.function(info_partner, string="Destination Account", type="text", help='Address of the Ordering Customer.'), + 'info_owner': fields.function(_info_owner, string="Owner Account", type="text", help='Address of the Main Partner'), + 'info_partner': fields.function(_info_partner, string="Destination Account", type="text", help='Address of the Ordering Customer.'), 'date': fields.date('Payment Date', help="If no payment date is specified, the bank will treat this payment line directly"), 'create_date': fields.datetime('Created', readonly=True), 'state': fields.selection([('normal','Free'), ('structured','Structured')], 'Communication Type', required=True), diff --git a/addons/account_payment/test/payment_order_process.yml b/addons/account_payment/test/payment_order_process.yml index 2bb04807a6449c2c17c34fd0079c45aea26dc162..5aaf97b9e7118c664fec9d2e7197d02418d0a41e 100644 --- a/addons/account_payment/test/payment_order_process.yml +++ b/addons/account_payment/test/payment_order_process.yml @@ -79,7 +79,7 @@ - I create a bank statement. - - !record {model: account.bank.statement, id: account_bank_statement_1}: + !record {model: account.bank.statement, id: account_bank_statement_1, view: False}: balance_end_real: 0.0 balance_start: 0.0 date: !eval time.strftime('%Y-%m-%d') diff --git a/addons/account_payment/wizard/account_payment_order.py b/addons/account_payment/wizard/account_payment_order.py index 8d5df048fe023560766b7047d7721398e9723521..78b634d44fc84dc1d9c3a46764d86cff63c1a697 100644 --- a/addons/account_payment/wizard/account_payment_order.py +++ b/addons/account_payment/wizard/account_payment_order.py @@ -107,7 +107,7 @@ class payment_order_create(osv.osv_memory): domain = [('reconcile_id', '=', False), ('account_id.type', '=', 'payable'), ('credit', '>', 0), ('account_id.reconcile', '=', True)] domain = domain + ['|', ('date_maturity', '<=', search_due_date), ('date_maturity', '=', False)] line_ids = line_obj.search(cr, uid, domain, context=context) - context.update({'line_ids': line_ids}) + context = dict(context, line_ids=line_ids) model_data_ids = mod_obj.search(cr, uid,[('model', '=', 'ir.ui.view'), ('name', '=', 'view_create_payment_order_lines')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return {'name': _('Entry Lines'), diff --git a/addons/account_payment/wizard/account_payment_populate_statement.py b/addons/account_payment/wizard/account_payment_populate_statement.py index ef068a55b2c6876cef2e939e5cd4c14219d71b34..6a049ff78a91de998072b637d1639d25a433a194 100644 --- a/addons/account_payment/wizard/account_payment_populate_statement.py +++ b/addons/account_payment/wizard/account_payment_populate_statement.py @@ -62,7 +62,7 @@ class account_payment_populate_statement(osv.osv_memory): if context is None: context = {} - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] line_ids = data['lines'] if not line_ids: return {'type': 'ir.actions.act_window_close'} @@ -77,7 +77,7 @@ class account_payment_populate_statement(osv.osv_memory): if not line.move_line_id.id: continue - context.update({'move_line_ids': [line.move_line_id.id]}) + context = dict(context, move_line_ids=[line.move_line_id.id]) result = voucher_obj.onchange_partner_id(cr, uid, [], partner_id=line.partner_id.id, journal_id=statement.journal_id.id, amount=abs(amount), currency_id= statement.currency.id, ttype='payment', date=line.ml_maturity_date, context=context) if line.move_line_id: diff --git a/addons/account_sequence/account_sequence.py b/addons/account_sequence/account_sequence.py index 1b60e51e1240bc621354d81cb89931e261010d49..c85320d92c32197c2991765e7da38cca6e4142d3 100644 --- a/addons/account_sequence/account_sequence.py +++ b/addons/account_sequence/account_sequence.py @@ -26,7 +26,9 @@ class account_move(osv.osv): _inherit = 'account.move' _columns = { - 'internal_sequence_number': fields.char('Internal Number', readonly=True, help='Internal Sequence Number'), + 'internal_sequence_number': fields.char('Internal Number', + readonly=True, copy=False, + help='Internal Sequence Number'), } def post(self, cr, uid, ids, context=None): diff --git a/addons/account_voucher/account_voucher.py b/addons/account_voucher/account_voucher.py index 9fde90231b2cdbf09f3517a695a09c22a67129a6..275e002695e2f12aab380f9646888242cd41baff 100644 --- a/addons/account_voucher/account_voucher.py +++ b/addons/account_voucher/account_voucher.py @@ -335,10 +335,13 @@ class account_voucher(osv.osv): ('receipt','Receipt'), ],'Default Type', readonly=True, states={'draft':[('readonly',False)]}), 'name':fields.char('Memo', readonly=True, states={'draft':[('readonly',False)]}), - 'date':fields.date('Date', readonly=True, select=True, states={'draft':[('readonly',False)]}, help="Effective date for accounting entries"), + 'date':fields.date('Date', readonly=True, select=True, states={'draft':[('readonly',False)]}, + help="Effective date for accounting entries", copy=False), 'journal_id':fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'account_id':fields.many2one('account.account', 'Account', required=True, readonly=True, states={'draft':[('readonly',False)]}), - 'line_ids':fields.one2many('account.voucher.line','voucher_id','Voucher Lines', readonly=True, states={'draft':[('readonly',False)]}), + 'line_ids':fields.one2many('account.voucher.line', 'voucher_id', 'Voucher Lines', + readonly=True, copy=True, + states={'draft':[('readonly',False)]}), 'line_cr_ids':fields.one2many('account.voucher.line','voucher_id','Credits', domain=[('type','=','cr')], context={'default_type':'cr'}, readonly=True, states={'draft':[('readonly',False)]}), 'line_dr_ids':fields.one2many('account.voucher.line','voucher_id','Debits', @@ -352,16 +355,17 @@ class account_voucher(osv.osv): ('cancel','Cancelled'), ('proforma','Pro-forma'), ('posted','Posted') - ], 'Status', readonly=True, track_visibility='onchange', + ], 'Status', readonly=True, track_visibility='onchange', copy=False, help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed Voucher. \ \n* The \'Pro-forma\' when voucher is in Pro-forma status,voucher does not have an voucher number. \ \n* The \'Posted\' status is used when user create voucher,a voucher number is generated and voucher entries are created in account \ \n* The \'Cancelled\' status is used when user cancel voucher.'), 'amount': fields.float('Total', digits_compute=dp.get_precision('Account'), required=True, readonly=True, states={'draft':[('readonly',False)]}), 'tax_amount':fields.float('Tax Amount', digits_compute=dp.get_precision('Account'), readonly=True, states={'draft':[('readonly',False)]}), - 'reference': fields.char('Ref #', readonly=True, states={'draft':[('readonly',False)]}, help="Transaction reference number."), - 'number': fields.char('Number', readonly=True,), - 'move_id':fields.many2one('account.move', 'Account Entry'), + 'reference': fields.char('Ref #', readonly=True, states={'draft':[('readonly',False)]}, + help="Transaction reference number.", copy=False), + 'number': fields.char('Number', readonly=True, copy=False), + 'move_id':fields.many2one('account.move', 'Account Entry', copy=False), 'move_ids': fields.related('move_id','line_id', type='one2many', relation='account.move.line', string='Journal Items', readonly=True), 'partner_id':fields.many2one('res.partner', 'Partner', change_default=1, readonly=True, states={'draft':[('readonly',False)]}), 'audit': fields.related('move_id','to_check', type='boolean', help='Check this box if you are unsure of that journal entry and if you want to note it as \'to be reviewed\' by an accounting expert.', relation='account.move', string='To Review'), @@ -573,7 +577,7 @@ class account_voucher(osv.osv): ctx.update({'date': date}) #read the voucher rate with the right date in the context currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id - voucher_rate = self.pool.get('res.currency').read(cr, uid, currency_id, ['rate'], context=ctx)['rate'] + voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate'] ctx.update({ 'voucher_special_currency': payment_rate_currency_id, 'voucher_special_currency_rate': rate * voucher_rate}) @@ -615,7 +619,7 @@ class account_voucher(osv.osv): 'payment_rate_currency_id': payment_rate_currency_id }) #read the voucher rate with the right date in the context - voucher_rate = self.pool.get('res.currency').read(cr, uid, currency_id, ['rate'], context=ctx)['rate'] + voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate'] ctx.update({ 'voucher_special_currency_rate': payment_rate * voucher_rate, 'voucher_special_currency': payment_rate_currency_id}) @@ -847,7 +851,7 @@ class account_voucher(osv.osv): ctx = context.copy() ctx.update({'date': date}) #read the voucher rate with the right date in the context - voucher_rate = self.pool.get('res.currency').read(cr, uid, currency_id, ['rate'], context=ctx)['rate'] + voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate'] ctx.update({ 'voucher_special_currency_rate': payment_rate * voucher_rate, 'voucher_special_currency': payment_rate_currency_id}) @@ -922,7 +926,7 @@ class account_voucher(osv.osv): return vals def button_proforma_voucher(self, cr, uid, ids, context=None): - self.signal_proforma_voucher(cr, uid, ids) + self.signal_workflow(cr, uid, ids, 'proforma_voucher') return {'type': 'ir.actions.act_window_close'} def proforma_voucher(self, cr, uid, ids, context=None): @@ -1182,7 +1186,7 @@ class account_voucher(osv.osv): tot_line = line_total rec_lst_ids = [] - date = self.read(cr, uid, voucher_id, ['date'], context=context)['date'] + date = self.read(cr, uid, [voucher_id], ['date'], context=context)[0]['date'] ctx = context.copy() ctx.update({'date': date}) voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context=ctx) @@ -1419,22 +1423,6 @@ class account_voucher(osv.osv): reconcile = move_line_pool.reconcile_partial(cr, uid, rec_ids, writeoff_acc_id=voucher.writeoff_acc_id.id, writeoff_period_id=voucher.period_id.id, writeoff_journal_id=voucher.journal_id.id) return True - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default.update({ - 'state': 'draft', - 'number': False, - 'move_id': False, - 'line_cr_ids': False, - 'line_dr_ids': False, - 'reference': False - }) - if 'date' not in default: - default['date'] = time.strftime('%Y-%m-%d') - return super(account_voucher, self).copy(cr, uid, id, default, context) - - class account_voucher_line(osv.osv): _name = 'account.voucher.line' _description = 'Voucher Lines' @@ -1495,7 +1483,7 @@ class account_voucher_line(osv.osv): 'reconcile': fields.boolean('Full Reconcile'), 'type':fields.selection([('dr','Debit'),('cr','Credit')], 'Dr/Cr'), 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'), - 'move_line_id': fields.many2one('account.move.line', 'Journal Item'), + 'move_line_id': fields.many2one('account.move.line', 'Journal Item', copy=False), 'date_original': fields.related('move_line_id','date', type='date', relation='account.move.line', string='Date', readonly=1), 'date_due': fields.related('move_line_id','date_maturity', type='date', relation='account.move.line', string='Due Date', readonly=1), 'amount_original': fields.function(_compute_balance, multi='dc', type='float', string='Original Amount', store=True, digits_compute=dp.get_precision('Account')), diff --git a/addons/account_voucher/test/case1_usd_usd.yml b/addons/account_voucher/test/case1_usd_usd.yml index 44269104370d89bad6a2c3b60f30b7f0d64a1bb1..84ae118ab16b6f2ee7c752028bf4803e3cf62846 100644 --- a/addons/account_voucher/test/case1_usd_usd.yml +++ b/addons/account_voucher/test/case1_usd_usd.yml @@ -182,7 +182,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 1 USD/USD'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that the move of my first voucher is valid - @@ -276,7 +276,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'Second payment: Case 1'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that the move of my second voucher is valid - diff --git a/addons/account_voucher/test/case1_usd_usd_payment_rate.yml b/addons/account_voucher/test/case1_usd_usd_payment_rate.yml index 037fc1497ec545806a4f8419ca26e43e22e3da1d..c0aa617783ca6d468047151ac48a3bb8af534ed6 100644 --- a/addons/account_voucher/test/case1_usd_usd_payment_rate.yml +++ b/addons/account_voucher/test/case1_usd_usd_payment_rate.yml @@ -191,7 +191,7 @@ - !python {model: account.voucher}: | voucher = ref('account_voucher_1_case1_payment_rate') - self.signal_proforma_voucher(cr, uid, [voucher]) + self.signal_workflow(cr, uid, [voucher], 'proforma_voucher') - I check that the move of my first voucher is valid - diff --git a/addons/account_voucher/test/case2_suppl_usd_eur.yml b/addons/account_voucher/test/case2_suppl_usd_eur.yml index c13bfcbd5d44d09258004d40515098c64e7944d2..836bee35d30bee60ea722272929e25f05d3138a3 100644 --- a/addons/account_voucher/test/case2_suppl_usd_eur.yml +++ b/addons/account_voucher/test/case2_suppl_usd_eur.yml @@ -161,7 +161,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 2 SUPPL USD/EUR'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that the move of my voucher is valid - @@ -263,7 +263,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'Second payment: Case 2 SUPPL USD/EUR'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that my voucher state is posted - diff --git a/addons/account_voucher/test/case2_usd_eur_debtor_in_eur.yml b/addons/account_voucher/test/case2_usd_eur_debtor_in_eur.yml index f523e312933516717f62574b8bc405101f7eb658..45c86eb7ad05529dbb57ad496ea3d147afb2e2bc 100644 --- a/addons/account_voucher/test/case2_usd_eur_debtor_in_eur.yml +++ b/addons/account_voucher/test/case2_usd_eur_debtor_in_eur.yml @@ -182,7 +182,7 @@ !python {model: account.voucher}: | from openerp import netsvc voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 2 USD/EUR DR EUR'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that the move of my voucher is valid - @@ -257,7 +257,7 @@ !python {model: account.voucher}: | from openerp import netsvc voucher = self.search(cr, uid, [('name', '=', 'Second payment: Case 2 SUPPL USD/EUR DR EUR'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that my voucher state is posted - diff --git a/addons/account_voucher/test/case2_usd_eur_debtor_in_usd.yml b/addons/account_voucher/test/case2_usd_eur_debtor_in_usd.yml index 52c416e06cce779b3504f0864badd753557416a7..f353cd8c6fc64a0640781b2a4f28063bb4e9b6a9 100644 --- a/addons/account_voucher/test/case2_usd_eur_debtor_in_usd.yml +++ b/addons/account_voucher/test/case2_usd_eur_debtor_in_usd.yml @@ -179,7 +179,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 2 USD/EUR DR USD'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that the move of my voucher is valid - @@ -266,7 +266,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'Second payment: Case 2 SUPPL USD/EUR DR USD'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that my voucher state is posted - diff --git a/addons/account_voucher/test/case3_eur_eur.yml b/addons/account_voucher/test/case3_eur_eur.yml index 99153ceeca41df8456b3dfae32aa48c8a01f8b85..064b79ed30536c5392b4cf343b84e8781a67d5a4 100644 --- a/addons/account_voucher/test/case3_eur_eur.yml +++ b/addons/account_voucher/test/case3_eur_eur.yml @@ -141,7 +141,7 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 3'),('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') - I check that the move of my first voucher is valid - @@ -228,7 +228,8 @@ - !python {model: account.voucher}: | voucher = self.search(cr, uid, [('name', '=', 'Second payment: Case 3'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') + - I check that the move of my second voucher is valid - diff --git a/addons/account_voucher/test/case4_cad_chf.yml b/addons/account_voucher/test/case4_cad_chf.yml index 90c36e371bf2ed69db35deeadb422d5700fb5889..476a22c3f1546c86071bff872e958e7a03369ec2 100644 --- a/addons/account_voucher/test/case4_cad_chf.yml +++ b/addons/account_voucher/test/case4_cad_chf.yml @@ -88,12 +88,13 @@ I check that first invoice move is correct for debtor account (debit - credit == 149.39) - !python {model: account.invoice}: | + from openerp.tools import float_compare invoice_id = self.browse(cr, uid, ref("account_first_invoice_jan_cad")) assert invoice_id.move_id, "Move not created for open invoice" move_line_obj = self.pool.get('account.move.line') move_lines = move_line_obj.search(cr, uid, [('move_id', '=', invoice_id.move_id.id), ('account_id', '=', invoice_id.account_id.id)]) move_line = move_line_obj.browse(cr, uid, move_lines[0]) - assert (move_line.debit - move_line.credit == 149.39), "Invoice move is incorrect for debtors account" + assert float_compare(move_line.debit - move_line.credit, 149.39, 2) == 0, "Invoice move is incorrect for debtors account" - I set the context that will be used for the encoding of all the vouchers of this file - @@ -142,9 +143,9 @@ I confirm the voucher - !python {model: account.voucher}: | - from openerp import netsvc voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 4'), ('partner_id', '=', ref('base.res_partner_19'))]) - self.signal_proforma_voucher(cr, uid, voucher) + self.signal_workflow(cr, uid, voucher, 'proforma_voucher') + - I check that the move of my voucher is valid - @@ -166,15 +167,17 @@ I check that my writeoff is correct. 11.05 credit and -13.26 amount_currency - !python {model: account.voucher}: | + from openerp.tools import float_compare voucher = self.search(cr, uid, [('name', '=', 'First payment: Case 4'), ('partner_id', '=', ref('base.res_partner_19'))]) voucher_id = self.browse(cr, uid, voucher[0]) move_line_obj = self.pool.get('account.move.line') move_lines = move_line_obj.search(cr, uid, [('move_id', '=', voucher_id.move_id.id)]) + assert move_lines, "Voucher move has no lines" for move_line in move_line_obj.browse(cr, uid, move_lines): - if move_line.amount_currency == 200: - assert move_line.debit == 160.00, "Bank account has wrong entry." - elif move_line.amount_currency == -298.78: - assert move_line.credit == 149.39, "Debtor account has wrong entry." + if float_compare(move_line.amount_currency, 200, 2) == 0: + assert float_compare(move_line.debit, 160.00, 2) == 0, "Bank account has wrong entry." + elif float_compare(move_line.amount_currency, -298.78, 2) == 0: + assert float_compare(move_line.credit, 149.39, 2) == 0, "Debtor account has wrong entry." elif move_line.debit == 0.00 and move_line.credit == 0.00: assert move_line.amount_currency == 98.78, "Incorrect Currency Difference, got %s as amount_currency (expected 98.78)." % (move_line.amount_currency) assert move_line.currency_id.id == ref('base.CAD'), "Incorrect Currency Difference, got %s (expected 'CAD')" % (move_line.currency_id.name) diff --git a/addons/account_voucher/test/case5_suppl_usd_usd.yml b/addons/account_voucher/test/case5_suppl_usd_usd.yml index 917e2366f56b48505e103a8d6367e213921cce40..c20d07a81926ba42014f00332b60cf20e491fa69 100644 --- a/addons/account_voucher/test/case5_suppl_usd_usd.yml +++ b/addons/account_voucher/test/case5_suppl_usd_usd.yml @@ -16,14 +16,14 @@ - !record {model: res.currency.rate, id: nov_usd}: currency_id: base.USD - name: !eval "'%s-11-01' %(datetime.now().year)" + name: !eval "'%s-11-01 00:00:00' %(datetime.now().year)" rate: 1.8 - I create currency USD in OpenERP for December of 1.5 Rate - !record {model: res.currency.rate, id: dec_usd}: currency_id: base.USD - name: !eval "'%s-12-01' %(datetime.now().year)" + name: !eval "'%s-12-01 00:00:00' %(datetime.now().year)" rate: 1.5 - I set the income and expense currency accounts on the main company @@ -92,13 +92,16 @@ I check that first invoice move is correct for creditor account(debit - credit == -555.56) - !python {model: account.invoice}: | + from openerp.tools import float_compare invoice_id = self.browse(cr, uid, ref("account_supplier_invoice_november")) assert invoice_id.move_id, "Move not created for open invoice" move_line_obj = self.pool.get('account.move.line') move_lines = move_line_obj.search(cr, uid, [('move_id', '=', invoice_id.move_id.id), ('account_id', '=', invoice_id.account_id.id)]) move_line = move_line_obj.browse(cr, uid, move_lines[0]) - assert (move_line.debit - move_line.credit == -555.56), "Invoice move is incorrect for creditor account" - assert (move_line.amount_currency == -1000), "Amount currency is incorrect for creditor account" + assert float_compare(move_line.debit - move_line.credit, -555.56, 2) == 0, \ + "Invoice move is incorrect for creditor account" + assert float_compare(move_line.amount_currency, -1000, 2) == 0, \ + "Amount currency is incorrect for creditor account" - I set the context that will be used for the encoding of all the vouchers of this file - diff --git a/addons/account_voucher/test/case_eur_usd.yml b/addons/account_voucher/test/case_eur_usd.yml index 825fae1d1afe977ebd13169b9b2f9714129a518b..69304c342c9e4bd7b48d961f890a4c783de282c6 100644 --- a/addons/account_voucher/test/case_eur_usd.yml +++ b/addons/account_voucher/test/case_eur_usd.yml @@ -107,6 +107,7 @@ for item in voucher.line_cr_ids: if item.amount_unreconciled == 1400: data += [(item.id, 1400)] + assert data, "Credit line not found" for line_id, amount in data: self.pool.get('account.voucher.line').write(cr, uid, [line_id], {'amount': amount}) assert (voucher.state=='draft'), "Voucher is not in draft state" @@ -170,5 +171,6 @@ move_line_obj = self.pool.get('account.move.line') move_lines = move_line_obj.search(cr, uid, [('move_id', '=', invoice_id.move_id.id), ('invoice', '=', invoice_id.id), ('account_id', '=', invoice_id.account_id.id)]) move_line = move_line_obj.browse(cr, uid, move_lines[0]) - assert (move_line.amount_residual_currency == 0.0 and move_line.amount_residual == 0.0 and invoice_id.state == 'paid') , "Residual amount is not correct for Invoice" - + assert move_line.amount_residual_currency == 0.0, "Residual amount is not correct for Invoice" + assert move_line.amount_residual == 0.0, "Residual amount is not correct for Invoice" + assert invoice_id.state == 'paid', "Invoice is not paid" diff --git a/addons/account_voucher/test/sales_payment.yml b/addons/account_voucher/test/sales_payment.yml index ed9305a320c876305cbb500d0ec2f182153f676a..7bd0e18a5c4ea0d0cdd47fcb7e3734cf1e329fd5 100644 --- a/addons/account_voucher/test/sales_payment.yml +++ b/addons/account_voucher/test/sales_payment.yml @@ -68,7 +68,7 @@ voucher_id = self.browse(cr, uid, id) assert (voucher_id.writeoff_amount == 0.0), "Writeoff amount is not 0.0" assert (voucher_id.state=='draft'), "Voucher is not in draft state" - self.signal_proforma_voucher(cr, uid, [voucher_id.id]) + voucher_id.signal_workflow('proforma_voucher') - Finally i will Confirm the state of the invoice is paid diff --git a/addons/account_voucher/test/sales_receipt.yml b/addons/account_voucher/test/sales_receipt.yml index e75eda2aac00da48e7a6d65225c79266c91dbf1f..9bae2c3549bfad5e451a3d2ab88f9b329963e0a3 100644 --- a/addons/account_voucher/test/sales_receipt.yml +++ b/addons/account_voucher/test/sales_receipt.yml @@ -69,7 +69,7 @@ id = self.create(cr, uid, vals) voucher_id = self.browse(cr, uid, id) assert (voucher_id.state=='draft'), "Voucher is not in draft state" - self.signal_proforma_voucher(cr, uid, [voucher_id.id]) + voucher_id.signal_workflow('proforma_voucher') - I check that move lines are reconciled meaning voucher is paid - diff --git a/addons/analytic/analytic.py b/addons/analytic/analytic.py index 1d728ba1e7ca28ad163863e74d95a0df8eb7c679..e31efa0eb2a93c2ecbd7e29561abbd9fa3c554cd 100644 --- a/addons/analytic/analytic.py +++ b/addons/analytic/analytic.py @@ -159,7 +159,8 @@ class account_analytic_account(osv.osv): if account.company_id.currency_id.id != value: raise osv.except_osv(_('Error!'), _("If you set a company, the currency selected has to be the same as it's currency. \nYou can remove the company belonging, and thus change the currency, only on analytic account of type 'view'. This can be really useful for consolidation purposes of several companies charts with different currencies, for example.")) if value: - return cr.execute("""update account_analytic_account set currency_id=%s where id=%s""", (value, account.id, )) + cr.execute("""update account_analytic_account set currency_id=%s where id=%s""", (value, account.id)) + self.invalidate_cache(cr, uid, ['currency_id'], [account.id], context=context) def _currency(self, cr, uid, ids, field_name, arg, context=None): result = {} @@ -173,7 +174,7 @@ class account_analytic_account(osv.osv): _columns = { 'name': fields.char('Account/Contract Name', required=True, track_visibility='onchange'), 'complete_name': fields.function(_get_full_name, type='char', string='Full Name'), - 'code': fields.char('Reference', select=True, track_visibility='onchange'), + 'code': fields.char('Reference', select=True, track_visibility='onchange', copy=False), 'type': fields.selection([('view','Analytic View'), ('normal','Analytic Account'),('contract','Contract or Project'),('template','Template of Contract')], 'Type of Account', required=True, help="If you select the View Type, it means you won\'t allow to create journal entries using that account.\n"\ "The type 'Analytic account' stands for usual accounts that you only want to use in accounting.\n"\ @@ -196,7 +197,14 @@ class account_analytic_account(osv.osv): 'date_start': fields.date('Start Date'), 'date': fields.date('Expiration Date', select=True, track_visibility='onchange'), 'company_id': fields.many2one('res.company', 'Company', required=False), #not required because we want to allow different companies to use the same chart of account, except for leaf accounts. - 'state': fields.selection([('template', 'Template'),('draft','New'),('open','In Progress'),('pending','To Renew'),('close','Closed'),('cancelled', 'Cancelled')], 'Status', required=True, track_visibility='onchange'), + 'state': fields.selection([('template', 'Template'), + ('draft','New'), + ('open','In Progress'), + ('pending','To Renew'), + ('close','Closed'), + ('cancelled', 'Cancelled')], + 'Status', required=True, + track_visibility='onchange', copy=False), 'currency_id': fields.function(_currency, fnct_inv=_set_company_currency, #the currency_id field is readonly except if it's a view account and if there is no company store = { 'res.company': (_get_analytic_account, ['currency_id'], 10), @@ -266,10 +274,7 @@ class account_analytic_account(osv.osv): if not default: default = {} analytic = self.browse(cr, uid, id, context=context) - default.update( - code=False, - line_ids=[], - name=_("%s (copy)") % (analytic['name'])) + default['name'] = _("%s (copy)") % analytic['name'] return super(account_analytic_account, self).copy(cr, uid, id, default, context=context) def on_change_company(self, cr, uid, id, company_id): diff --git a/addons/analytic_contract_hr_expense/analytic_contract_hr_expense.py b/addons/analytic_contract_hr_expense/analytic_contract_hr_expense.py index 3682755b9c4817cd5ea428520d831eb924ac6fb2..5c251708d91aeab974f78bf0ecf61100feca8258 100644 --- a/addons/analytic_contract_hr_expense/analytic_contract_hr_expense.py +++ b/addons/analytic_contract_hr_expense/analytic_contract_hr_expense.py @@ -19,7 +19,6 @@ # ############################################################################## from openerp.osv import fields, osv -from openerp.osv.orm import intersect from openerp.tools.translate import _ from openerp.addons.decimal_precision import decimal_precision as dp @@ -128,7 +127,7 @@ class account_analytic_account(osv.osv): act_obj = self.pool.get('ir.actions.act_window') dummy, act_window_id = mod_obj.get_object_reference(cr, uid, 'hr_expense', 'expense_all') - result = act_obj.read(cr, uid, act_window_id, context=context) + result = act_obj.read(cr, uid, [act_window_id], context=context)[0] line_ids = self.pool.get('hr.expense.line').search(cr,uid,[('analytic_account', 'in', ids)]) result['domain'] = [('line_ids', 'in', line_ids)] diff --git a/addons/analytic_user_function/analytic_user_function.py b/addons/analytic_user_function/analytic_user_function.py index 5a17ac91e5330d80ff112e6e3c9d26dbf1169fe0..42530bfbb7b7aee86e7cf5806dd51a5806b81a61 100644 --- a/addons/analytic_user_function/analytic_user_function.py +++ b/addons/analytic_user_function/analytic_user_function.py @@ -59,7 +59,7 @@ class analytic_user_funct_grid(osv.osv): class account_analytic_account(osv.osv): _inherit = "account.analytic.account" _columns = { - 'user_product_ids': fields.one2many('analytic.user.funct.grid', 'account_id', 'Users/Products Rel.'), + 'user_product_ids': fields.one2many('analytic.user.funct.grid', 'account_id', 'Users/Products Rel.', copy=True), } diff --git a/addons/auth_crypt/auth_crypt.py b/addons/auth_crypt/auth_crypt.py index 6c9deb51e92a5467078492603be3316cb03c5589..384e7960cbe65858dfa28ba3bf887db3ba51080d 100644 --- a/addons/auth_crypt/auth_crypt.py +++ b/addons/auth_crypt/auth_crypt.py @@ -32,6 +32,7 @@ class res_users(osv.osv): def set_pw(self, cr, uid, id, name, value, args, context): if value: self._set_password(cr, uid, id, value, context=context) + self.invalidate_cache(cr, uid, context=context) def get_pw( self, cr, uid, ids, name, args, context ): cr.execute('select id, password from res_users where id in %s', (tuple(map(int, ids)),)) @@ -39,7 +40,7 @@ class res_users(osv.osv): _columns = { 'password': fields.function(get_pw, fnct_inv=set_pw, type='char', string='Password', invisible=True, store=True), - 'password_crypt': fields.char(string='Encrypted Password', invisible=True), + 'password_crypt': fields.char(string='Encrypted Password', invisible=True, copy=False), } def check_credentials(self, cr, uid, password): @@ -50,6 +51,7 @@ class res_users(osv.osv): stored, encrypted = cr.fetchone() if stored and not encrypted: self._set_password(cr, uid, uid, stored) + self.invalidate_cache(cr, uid) try: return super(res_users, self).check_credentials(cr, uid, password) except openerp.exceptions.AccessDenied: diff --git a/addons/auth_crypt/i18n/base_crypt.pot b/addons/auth_crypt/i18n/base_crypt.pot deleted file mode 100644 index 4aa6bbcdc54c20bb44220a5f3388a0adfe284e09..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/base_crypt.pot +++ /dev/null @@ -1,22 +0,0 @@ -# Translation of OpenERP Server. -# This file contains the translation of the following modules: -# * base_crypt -# -msgid "" -msgstr "" -"Project-Id-Version: OpenERP Server 7.0alpha\n" -"Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-12-03 16:03+0000\n" -"Last-Translator: <>\n" -"Language-Team: \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: \n" -"Plural-Forms: \n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - diff --git a/addons/auth_crypt/i18n/bg.po b/addons/auth_crypt/i18n/bg.po deleted file mode 100644 index 24e74238284100de960b89182f6b0ff9ba858338..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/bg.po +++ /dev/null @@ -1,76 +0,0 @@ -# Bulgarian translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-02-18 09:47+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Bulgarian <bg@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Този модул Ð·Ð°Ð¼ÐµÐ½Ñ Ð¿Ð°Ñ€Ð¾Ð»Ð¸Ñ‚Ðµ в чиÑÑ‚ текÑÑ‚ в базата данни Ñ Ñ…ÐµÑˆÐ¸Ñ€Ð°Ð½Ð¸ такива,\n" -#~ " за предотвратÑване прочита на оригиналната парола.\n" -#~ " За ÑъщеÑтвуващата потребителÑка база, когато премахването на паролите в " -#~ "чиÑÑ‚ текÑÑ‚ Ñе Ñлучва за първи път,\n" -#~ " влизане на потребител Ñтава, Ñлед инÑталиране на base_crypt.\n" -#~ " След като инÑталирате този модул нÑма да бъде възможно да Ñе възÑтанови " -#~ "забравена парола за\n" -#~ " потребители, единÑтвеното решение е админиÑтратор, да зададе нова парола.\n" -#~ "\n" -#~ " Забележка: инÑталиране на този модул не значи, че може да пренебрегне " -#~ "оÑновните мерки за ÑигурноÑÑ‚,\n" -#~ " като парола вÑе още е изпратена в прав текÑÑ‚ в мрежата (от клиента),\n" -#~ " оÑвен ако не използвате защитен протокол, като XML-RPCS.\n" -#~ " " - -#, python-format -#~ msgid "Error" -#~ msgstr "Грешка" - -#~ msgid "Base - Password Encryption" -#~ msgstr "База - Криптиране на пароли" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "ÐœÐ¾Ð»Ñ Ð¸Ð·Ð±ÐµÑ€ÐµÑ‚Ðµ парола!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "Избраната фирма не е измежду разрешените фирми за този потребител" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Ðе може да има двама потребители Ñ ÐµÐ´Ð¸Ð½ и Ñъщ \"логин\"!" diff --git a/addons/auth_crypt/i18n/ca.po b/addons/auth_crypt/i18n/ca.po deleted file mode 100644 index 2d08f1ab47472f07f27a419f51db3394231e029f..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/ca.po +++ /dev/null @@ -1,78 +0,0 @@ -# Catalan translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-03-26 18:08+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Catalan <ca@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Aquest mòdul substitueix la contrasenya en text pla per un hash codificat,\n" -#~ "prevenint que algú pugui llegir la contrasenya original.\n" -#~ "Per a un usuari existent, l'esborrat de la contrasenya en text pla es " -#~ "realitza la primera\n" -#~ "vegada que l'usuari es connecta després d'instal·lar base_crypt.\n" -#~ "Després d'instal·lar aquest mòdul, els usuaris no podran recuperar la seva " -#~ "contrasenya,\n" -#~ "un administrador haurà d'introduir una nova contrasenya.\n" -#~ "\n" -#~ "Nota: Instal·lar aquest mòdul no significa que podeu ignorar les mesures " -#~ "bà siques de seguretat,\n" -#~ "perquè la contrasenya és enviada sense codificar pel client,\n" -#~ "a menys que utilitzeu un protocol segur com XML-RPCS.\n" -#~ " " - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "Si us plau, escriviu una contrasenya!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "La companyia seleccionada no està en les companyies permeses per aquest " -#~ "usuari" - -#~ msgid "res.users" -#~ msgstr "res.usuaris" - -#, python-format -#~ msgid "Error" -#~ msgstr "Error" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Base - Encriptació de la Contrasenya" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "No podeu tenir dos usuaris amb el mateix identificador d'usuari!" diff --git a/addons/auth_crypt/i18n/el.po b/addons/auth_crypt/i18n/el.po deleted file mode 100644 index 4adc016c3e1f4884f701ab681553cef7b9a103c6..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/el.po +++ /dev/null @@ -1,24 +0,0 @@ -# Translation of OpenERP Server. -# This file contains the translation of the following modules: -# * base_crypt -# -# Copyright (C) 2008,2009 P. Christeas <p_christ@hol.gr> -# <> <>, 2009. -msgid "" -msgstr "" -"Project-Id-Version: OpenERP Server 5.0.0\n" -"Report-Msgid-Bugs-To: support@openerp.com\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-02-15 15:37+0000\n" -"Last-Translator: <> <>\n" -"Language-Team: <>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" diff --git a/addons/auth_crypt/i18n/es_CL.po b/addons/auth_crypt/i18n/es_CL.po deleted file mode 100644 index 3f53b8b8b883474643d2d8e4c165afee5489fe62..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/es_CL.po +++ /dev/null @@ -1,80 +0,0 @@ -# Spanish (Chile) translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-10-03 17:17+0000\n" -"Last-Translator: David Acevedo Toledo <Unknown>\n" -"Language-Team: Spanish (Chile) <es_CL@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#, python-format -#~ msgid "Error" -#~ msgstr "Error!" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Base - Encriptación de la Contraseña" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "¡Por favor, escriba una contraseña!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "La compañÃa seleccionada no está dentro de las compañÃas autorizadas para " -#~ "este usuario" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "¡No puede tener dos usuarios con el mismo nombre!" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Este módulo sustituye la contraseña escrita en texto plano por una " -#~ "codificada,\n" -#~ "previniendo que alguien pueda leer la contraseña original.\n" -#~ "Para un usuario existente, el sustitución de la contraseña en texto plano se " -#~ "realiza la primera vez\n" -#~ "que el usuario se conecte después de instalar base_crypt.\n" -#~ "Después de instalar este módulo los usuarios no podrán recuperar su " -#~ "contraseña olvidada,\n" -#~ "un administrador tendrá que cambiarla por una nueva.\n" -#~ "\n" -#~ "Nota: instalar este módulo no significa que pueda ignorar las medidas " -#~ "básicas de seguridad,\n" -#~ "como la contraseña que es enviada por el cliente que sigue sin ser " -#~ "codificada en la red,\n" -#~ "a menos que utilice un protocolo seguro como XML-RPCS.\n" -#~ " " diff --git a/addons/auth_crypt/i18n/es_CR.po b/addons/auth_crypt/i18n/es_CR.po deleted file mode 100644 index 35f8372f4c71b9784bd5314529bf6ae63cbd0dbf..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/es_CR.po +++ /dev/null @@ -1,79 +0,0 @@ -# Spanish translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-02-13 19:04+0000\n" -"Last-Translator: Carlos Vásquez (CLEARCORP) " -"<carlos.vasquez@clearcorp.co.cr>\n" -"Language-Team: Spanish <es@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" -"Language: es\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "res.users" -#~ msgstr "res.usuarios" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "¡No puede tener dos usuarios con el mismo identificador de usuario!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "La compañÃa seleccionada no está autorizada como compañÃa para este usuario" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "¡Por favor, escriba una contraseña!" - -#, python-format -#~ msgid "Error" -#~ msgstr "Error" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Este módulo sustituye la contraseña en texto plano por un hash codificado,\n" -#~ "previniendo que alguien pueda leer la contraseña original.\n" -#~ "Para un usuario existente, el borrado de la contraseña en texto plano se " -#~ "realiza la primera vez\n" -#~ "que el usuario se conecte después de instalar base_crypt.\n" -#~ "Después de instalar este módulo los usuarios no podrán recuperar su " -#~ "contraseña,\n" -#~ "un administrador tendrá que introducir una nueva contraseña.\n" -#~ "\n" -#~ "Nota: instalar este módulo no significa que pueda ignorar las medidas " -#~ "básicas de seguridad,\n" -#~ "porque la contraseña es enviada sin codificar por el cliente,\n" -#~ "a menos que utilice un protocolo seguro como XML-RPCS.\n" -#~ " " - -#~ msgid "Base - Password Encryption" -#~ msgstr "Base - Encriptación de la Contraseña" diff --git a/addons/auth_crypt/i18n/es_MX.po b/addons/auth_crypt/i18n/es_MX.po deleted file mode 100644 index be7c1499288f5e06caf140bea2cbdb35d24ea9cd..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/es_MX.po +++ /dev/null @@ -1,86 +0,0 @@ -# Spanish translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2011-01-11 11:14+0000\n" -"PO-Revision-Date: 2011-02-15 15:37+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Spanish <es@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2011-09-05 05:58+0000\n" -"X-Generator: Launchpad (build 13830)\n" - -#. module: base_crypt -#: sql_constraint:res.users:0 -msgid "You can not have two users with the same login !" -msgstr "¡No puede tener dos usuarios con el mismo identificador de usuario!" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "res.users" -msgstr "res.usuarios" - -#. module: base_crypt -#: constraint:res.users:0 -msgid "The chosen company is not in the allowed companies for this user" -msgstr "" -"La compañÃa seleccionada no está autorizada como compañÃa para este usuario" - -#. module: base_crypt -#: code:addons/base_crypt/crypt.py:132 -#, python-format -msgid "Please specify the password !" -msgstr "¡Por favor, escriba una contraseña!" - -#. module: base_crypt -#: model:ir.module.module,shortdesc:base_crypt.module_meta_information -msgid "Base - Password Encryption" -msgstr "Base - Encriptación de la Contraseña" - -#. module: base_crypt -#: code:addons/base_crypt/crypt.py:132 -#, python-format -msgid "Error" -msgstr "Error" - -#. module: base_crypt -#: model:ir.module.module,description:base_crypt.module_meta_information -msgid "" -"This module replaces the cleartext password in the database with a password " -"hash,\n" -"preventing anyone from reading the original password.\n" -"For your existing user base, the removal of the cleartext passwords occurs " -"the first time\n" -"a user logs into the database, after installing base_crypt.\n" -"After installing this module it won't be possible to recover a forgotten " -"password for your\n" -"users, the only solution is for an admin to set a new password.\n" -"\n" -"Note: installing this module does not mean you can ignore basic security " -"measures,\n" -"as the password is still transmitted unencrypted on the network (by the " -"client),\n" -"unless you are using a secure protocol such as XML-RPCS.\n" -" " -msgstr "" -"Este módulo sustituye la contraseña en texto plano por un hash codificado,\n" -"previniendo que alguien pueda leer la contraseña original.\n" -"Para un usuario existente, el borrado de la contraseña en texto plano se " -"realiza la primera vez\n" -"que el usuario se conecte después de instalar base_crypt.\n" -"Después de instalar este módulo los usuarios no podrán recuperar su " -"contraseña,\n" -"un administrador tendrá que introducir una nueva contraseña.\n" -"\n" -"Nota: instalar este módulo no significa que pueda ignorar las medidas " -"básicas de seguridad,\n" -"porque la contraseña es enviada sin codificar por el cliente,\n" -"a menos que utilice un protocolo seguro como XML-RPCS.\n" -" " diff --git a/addons/auth_crypt/i18n/es_PY.po b/addons/auth_crypt/i18n/es_PY.po deleted file mode 100644 index 8e5cbcbee86f232244e9c90f26f262ca19913b5f..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/es_PY.po +++ /dev/null @@ -1,78 +0,0 @@ -# Spanish (Paraguay) translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-03-08 17:36+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Spanish (Paraguay) <es_PY@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Este módulo sustituye la contraseña en texto plano por un hash codificado,\n" -#~ "previniendo que alguien pueda leer la contraseña original.\n" -#~ "Para un usuario existente, el borrado de la contraseña en texto plano se " -#~ "realiza la primera vez\n" -#~ "que el usuario se conecte después de instalar base_crypt.\n" -#~ "Después de instalar este módulo los usuarios no podrán recuperar su " -#~ "contraseña,\n" -#~ "un administrador tendrá que introducir una nueva contraseña.\n" -#~ "\n" -#~ "Nota: instalar este módulo no significa que pueda ignorar las medidas " -#~ "básicas de seguridad,\n" -#~ "porque la contraseña es enviada sin codificar por el cliente,\n" -#~ "a menos que utilice un protocolo seguro como XML-RPCS.\n" -#~ " " - -#, python-format -#~ msgid "Error" -#~ msgstr "Error!" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Base - Encriptación de la Contraseña" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "¡Por favor, escriba una contraseña!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "La compañÃa seleccionada no está en las compañÃas permitidas para este " -#~ "usuario" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "¡No puede tener dos usuarios con el mismo identificador de usuario!" diff --git a/addons/auth_crypt/i18n/es_VE.po b/addons/auth_crypt/i18n/es_VE.po deleted file mode 100644 index be7c1499288f5e06caf140bea2cbdb35d24ea9cd..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/es_VE.po +++ /dev/null @@ -1,86 +0,0 @@ -# Spanish translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2011-01-11 11:14+0000\n" -"PO-Revision-Date: 2011-02-15 15:37+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Spanish <es@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2011-09-05 05:58+0000\n" -"X-Generator: Launchpad (build 13830)\n" - -#. module: base_crypt -#: sql_constraint:res.users:0 -msgid "You can not have two users with the same login !" -msgstr "¡No puede tener dos usuarios con el mismo identificador de usuario!" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "res.users" -msgstr "res.usuarios" - -#. module: base_crypt -#: constraint:res.users:0 -msgid "The chosen company is not in the allowed companies for this user" -msgstr "" -"La compañÃa seleccionada no está autorizada como compañÃa para este usuario" - -#. module: base_crypt -#: code:addons/base_crypt/crypt.py:132 -#, python-format -msgid "Please specify the password !" -msgstr "¡Por favor, escriba una contraseña!" - -#. module: base_crypt -#: model:ir.module.module,shortdesc:base_crypt.module_meta_information -msgid "Base - Password Encryption" -msgstr "Base - Encriptación de la Contraseña" - -#. module: base_crypt -#: code:addons/base_crypt/crypt.py:132 -#, python-format -msgid "Error" -msgstr "Error" - -#. module: base_crypt -#: model:ir.module.module,description:base_crypt.module_meta_information -msgid "" -"This module replaces the cleartext password in the database with a password " -"hash,\n" -"preventing anyone from reading the original password.\n" -"For your existing user base, the removal of the cleartext passwords occurs " -"the first time\n" -"a user logs into the database, after installing base_crypt.\n" -"After installing this module it won't be possible to recover a forgotten " -"password for your\n" -"users, the only solution is for an admin to set a new password.\n" -"\n" -"Note: installing this module does not mean you can ignore basic security " -"measures,\n" -"as the password is still transmitted unencrypted on the network (by the " -"client),\n" -"unless you are using a secure protocol such as XML-RPCS.\n" -" " -msgstr "" -"Este módulo sustituye la contraseña en texto plano por un hash codificado,\n" -"previniendo que alguien pueda leer la contraseña original.\n" -"Para un usuario existente, el borrado de la contraseña en texto plano se " -"realiza la primera vez\n" -"que el usuario se conecte después de instalar base_crypt.\n" -"Después de instalar este módulo los usuarios no podrán recuperar su " -"contraseña,\n" -"un administrador tendrá que introducir una nueva contraseña.\n" -"\n" -"Nota: instalar este módulo no significa que pueda ignorar las medidas " -"básicas de seguridad,\n" -"porque la contraseña es enviada sin codificar por el cliente,\n" -"a menos que utilice un protocolo seguro como XML-RPCS.\n" -" " diff --git a/addons/auth_crypt/i18n/fa.po b/addons/auth_crypt/i18n/fa.po deleted file mode 100644 index e4291629d6f06697d868b2ab299736f6a8b138db..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/fa.po +++ /dev/null @@ -1,23 +0,0 @@ -# Persian translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-12-18 20:12+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Persian <fa@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" diff --git a/addons/auth_crypt/i18n/fi.po b/addons/auth_crypt/i18n/fi.po deleted file mode 100644 index 39221ce5d69c4d63eba60ab8bc42bd5cfb60d63a..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/fi.po +++ /dev/null @@ -1,40 +0,0 @@ -# Finnish translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-06-08 10:57+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Finnish <fi@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#, python-format -#~ msgid "Error" -#~ msgstr "Virhe" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Base - Salasanan kryptaus" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "Määrittele salasana !" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "Valittu yritys ei ole sallittu tälle käyttäjälle" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Kahdella eri käyttäjällä ei voi olla samaa käyttäjätunnusta!" diff --git a/addons/auth_crypt/i18n/gu.po b/addons/auth_crypt/i18n/gu.po deleted file mode 100644 index 6d506cb357326eadbf91cfca3671cb46dfdc66a5..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/gu.po +++ /dev/null @@ -1,37 +0,0 @@ -# Gujarati translation for openobject-addons -# Copyright (c) 2012 Rosetta Contributors and Canonical Ltd 2012 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2012. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-04-19 08:51+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Gujarati <gu@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "તમે બે વપરાશકરà«àª¤àª¾àª“ને àªàª• જ લોગીન ન કરી શકો!" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "પાસવરà«àª¡ સà«àªªàª·à«àªŸ કરો!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "પસંદ કરેલ કંપની માનà«àª¯ કંપનીઓમાં આ વપરાશકરà«àª¤àª¾ માટે નથી" - -#, python-format -#~ msgid "Error" -#~ msgstr "àªà«‚લ" diff --git a/addons/auth_crypt/i18n/id.po b/addons/auth_crypt/i18n/id.po deleted file mode 100644 index 035480b7a849008716990608808817688d7b282a..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/id.po +++ /dev/null @@ -1,23 +0,0 @@ -# Indonesian translation for openobject-addons -# Copyright (c) 2012 Rosetta Contributors and Canonical Ltd 2012 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2012. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-12-09 08:46+0000\n" -"Last-Translator: Budhi Hartono <klikmaxima@gmail.com>\n" -"Language-Team: Indonesian <id@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-10 04:39+0000\n" -"X-Generator: Launchpad (build 16341)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "Pengguna" diff --git a/addons/auth_crypt/i18n/ja.po b/addons/auth_crypt/i18n/ja.po deleted file mode 100644 index d92df144a2c3a827449b03cb80e3d11c4e3a6e86..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/ja.po +++ /dev/null @@ -1,40 +0,0 @@ -# Japanese translation for openobject-addons -# Copyright (c) 2012 Rosetta Contributors and Canonical Ltd 2012 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2012. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-04-01 06:05+0000\n" -"Last-Translator: Masaki Yamaya <Unknown>\n" -"Language-Team: Japanese <ja@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "åŒä¸€ã®ãƒã‚°ã‚¤ãƒ³ã«ï¼’ã¤ã®ãƒ¦ãƒ¼ã‚¶ã‚’æŒã¤ã“ã¨ã¯ã§ãã¾ã›ã‚“ï¼" - -#, python-format -#~ msgid "Error" -#~ msgstr "エラー" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "é¸æŠžã—ãŸä¼šç¤¾ã¯ã€ã“ã®ãƒ¦ãƒ¼ã‚¶ã«è¨±ã•ã‚ŒãŸä¼šç¤¾ã§ã¯ã‚ã‚Šã¾ã›ã‚“。" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "パスワードを指定ã—ã¦ãã ã•ã„ï¼" diff --git a/addons/auth_crypt/i18n/lv.po b/addons/auth_crypt/i18n/lv.po deleted file mode 100644 index 4d34fec70a99abdedebb857822639953600ba50e..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/lv.po +++ /dev/null @@ -1,40 +0,0 @@ -# Latvian translation for openobject-addons -# Copyright (c) 2012 Rosetta Contributors and Canonical Ltd 2012 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2012. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-10-16 16:11+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Latvian <lv@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Nevar bÅ«t divi lietotÄji ar vienÄdu pieteikuma vÄrdu!" - -#, python-format -#~ msgid "Error" -#~ msgstr "Kļūda" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "IzvÄ“lÄ“tais uzņēmums nav Å¡im lietotÄjam atļauto uzņēmumu sarakstÄ" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "LÅ«dzu norÄdiet paroli!" diff --git a/addons/auth_crypt/i18n/nb.po b/addons/auth_crypt/i18n/nb.po deleted file mode 100644 index c9e2d8709d1904acadd205de93fdc9c9fa12a049..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/nb.po +++ /dev/null @@ -1,44 +0,0 @@ -# Norwegian Bokmal translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2012-12-04 20:38+0000\n" -"Last-Translator: Kaare Pettersen <Unknown>\n" -"Language-Team: Norwegian Bokmal <nb@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-05 05:20+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "Brukere." - -#, python-format -#~ msgid "Error" -#~ msgstr "Feil" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Basis - Passord kryptering" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "Vennligst angi passordet !" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "Det valgte firmaet er ikke i listen over tillatte firmaer for denne brukeren" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Du kan ikke ha to brukere med samme login !" diff --git a/addons/auth_crypt/i18n/oc.po b/addons/auth_crypt/i18n/oc.po deleted file mode 100644 index a6cd3aa45a12b89730b9d3e257deda5a5f5c112b..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/oc.po +++ /dev/null @@ -1,45 +0,0 @@ -# Occitan (post 1500) translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-11-20 09:17+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Occitan (post 1500) <oc@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#, python-format -#~ msgid "Error" -#~ msgstr "Error" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Basa - Chifratge del senhal" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "Entratz un senhal" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "La societat causida fa pas partida de las societats autorizadas per aqueste " -#~ "utilizaire" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Podètz pas aver dos utilizaires amb lo meteis identificant !" diff --git a/addons/auth_crypt/i18n/sk.po b/addons/auth_crypt/i18n/sk.po deleted file mode 100644 index b082277e875688ba1281dc295a78f554c154b785..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/sk.po +++ /dev/null @@ -1,41 +0,0 @@ -# Slovak translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-02-21 08:14+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Slovak <sk@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#, python-format -#~ msgid "Error" -#~ msgstr "Chyba" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "ProsÃm, zadajte heslo!" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "Vybraná spoloÄnosÅ¥ nie je medzi schválenými spoloÄnosÅ¥ami tohto použÃvateľa" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Nemôžte maÅ¥ dvoch použÃvateľov s rovnakým pristúpovým menom!" diff --git a/addons/auth_crypt/i18n/sq.po b/addons/auth_crypt/i18n/sq.po deleted file mode 100644 index c57af2d287b56584dff881b669c5e691526eef9a..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/sq.po +++ /dev/null @@ -1,23 +0,0 @@ -# Albanian translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-03-28 15:26+0000\n" -"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" -"Language-Team: Albanian <sq@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" diff --git a/addons/auth_crypt/i18n/sr@latin.po b/addons/auth_crypt/i18n/sr@latin.po deleted file mode 100644 index 9e9547f154bd3d30dc6470b464594c4e597f565c..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/sr@latin.po +++ /dev/null @@ -1,76 +0,0 @@ -# Serbian Latin translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-10-05 13:25+0000\n" -"Last-Translator: Milan Milosevic <Unknown>\n" -"Language-Team: Serbian Latin <sr@latin@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#, python-format -#~ msgid "Error" -#~ msgstr "GreÅ¡ka" - -#~ msgid "Base - Password Encryption" -#~ msgstr "Baza - Å ifrovanje lozinke" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "Molimo odredite lozinku" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Ne možete imati dva korisnika sa istom prijavom!" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Ovaj modul zamenjuje vidljivu lozinku u bazi podataka znacima,\n" -#~ "Å¡to onemogućava bilo koga da proÄita originalnu lozinku.\n" -#~ "Å to se tiÄe VaÅ¡e baze podataka, ukidanje vidljive lozinke deÅ¡ava se prvi put " -#~ "poÅ¡to se korisnik prijavi u bazu podataka, po instalaciji base_crypt.\n" -#~ "Po instalaciji ovog modula neće biti moguće izmeniti zaboravljenu lozinku za " -#~ "VaÅ¡e korisnike, jedino reÅ¡enje će biti da im administrator postavi novu " -#~ "lozinku.\n" -#~ "\n" -#~ "BeleÅ¡ka: Instaliranje ovog modula ne znaÄi da možete ignorisati osnovne " -#~ "sigurnosne mere,\n" -#~ "budući da se lozinka i dalje prenosi neÅ¡ifrovana u mreži (od strane " -#~ "klijenta),\n" -#~ "ukoliko ne koristite siguran protokol kao XML-RPCS.\n" -#~ " " - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "Odabrano preduzeće nije u dozvoljenim preduzećima za ovog korisnioka" diff --git a/addons/auth_crypt/i18n/vi.po b/addons/auth_crypt/i18n/vi.po deleted file mode 100644 index 4ad1a22fdaf833e6fe098509bec4e58879948d55..0000000000000000000000000000000000000000 --- a/addons/auth_crypt/i18n/vi.po +++ /dev/null @@ -1,80 +0,0 @@ -# Vietnamese translation for openobject-addons -# Copyright (c) 2011 Rosetta Contributors and Canonical Ltd 2011 -# This file is distributed under the same license as the openobject-addons package. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. -# -msgid "" -msgstr "" -"Project-Id-Version: openobject-addons\n" -"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n" -"POT-Creation-Date: 2012-12-03 16:03+0000\n" -"PO-Revision-Date: 2011-07-20 09:55+0000\n" -"Last-Translator: OpenBMS JSC <Unknown>\n" -"Language-Team: Vietnamese <vi@li.org>\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"X-Launchpad-Export-Date: 2012-12-04 05:53+0000\n" -"X-Generator: Launchpad (build 16335)\n" - -#. module: base_crypt -#: model:ir.model,name:base_crypt.model_res_users -msgid "Users" -msgstr "" - -#, python-format -#~ msgid "Error" -#~ msgstr "Lá»—i" - -#~ msgid "Base - Password Encryption" -#~ msgstr "CÆ¡ sở - Mã hóa Máºt khẩu" - -#, python-format -#~ msgid "Please specify the password !" -#~ msgstr "Vui lòng xác định máºt khẩu !" - -#~ msgid "The chosen company is not in the allowed companies for this user" -#~ msgstr "" -#~ "Công ty được lá»±a chá»n không nằm trong các công ty mà ngÆ°á»i sá» dụng nà y được " -#~ "phép" - -#~ msgid "res.users" -#~ msgstr "res.users" - -#~ msgid "You can not have two users with the same login !" -#~ msgstr "Bạn không thể có hai ngÆ°á»i sá» dụng vá»›i cùng tên đăng nháºp !" - -#~ msgid "" -#~ "This module replaces the cleartext password in the database with a password " -#~ "hash,\n" -#~ "preventing anyone from reading the original password.\n" -#~ "For your existing user base, the removal of the cleartext passwords occurs " -#~ "the first time\n" -#~ "a user logs into the database, after installing base_crypt.\n" -#~ "After installing this module it won't be possible to recover a forgotten " -#~ "password for your\n" -#~ "users, the only solution is for an admin to set a new password.\n" -#~ "\n" -#~ "Note: installing this module does not mean you can ignore basic security " -#~ "measures,\n" -#~ "as the password is still transmitted unencrypted on the network (by the " -#~ "client),\n" -#~ "unless you are using a secure protocol such as XML-RPCS.\n" -#~ " " -#~ msgstr "" -#~ "Mô-Ä‘un nà y thay thế máºt khẩu dạng tÆ°á»ng minh (cleartext) trong cÆ¡ sở dữ liệu " -#~ "vá»›i má»™t máºt khẩu băm (hash),\n" -#~ "ngăn chặn bất cứ ai Ä‘á»c các máºt khẩu ban đầu.\n" -#~ "Äối vá»›i ngÆ°á»i dùng hiện tại của bạn, việc loại bá» các máºt khẩu tÆ°á»ng minh " -#~ "xảy ra lần đầu tiên\n" -#~ "ngÆ°á»i dùng đăng nháºp và o cÆ¡ sở dữ liệu, sau khi cà i đặt base_crypt.\n" -#~ "Sau khi cà i đặt mô-Ä‘un nà y, sẽ không thể khôi phục lại má»™t máºt khẩu bị lãng " -#~ "quên cho\n" -#~ "ngÆ°á»i sá» dụng của bạn, giải pháp duy nhất là để má»™t quản trị viên thiết láºp " -#~ "má»™t máºt khẩu má»›i.\n" -#~ "\n" -#~ "LÆ°u ý: cà i đặt mô-Ä‘un nà y không có nghÄ©a là bạn có thể bá» qua các biện pháp " -#~ "bảo máºt cÆ¡ bản,\n" -#~ "nhÆ° máºt khẩu vẫn được truyá»n không mã hóa trên mạng (từ máy khách),\n" -#~ "trừ khi bạn sá» dụng má»™t giao thức an toà n chẳng hạn nhÆ° XML-RPCS.\n" -#~ " " diff --git a/addons/auth_ldap/users_ldap.py b/addons/auth_ldap/users_ldap.py index e6a7f0f47167c9fd377efa05b15bd30c646d9acd..9ddc3d6ed456baf91eb015a7b7d8857f4cd93df7 100644 --- a/addons/auth_ldap/users_ldap.py +++ b/addons/auth_ldap/users_ldap.py @@ -237,14 +237,14 @@ class res_company(osv.osv): _inherit = "res.company" _columns = { 'ldaps': fields.one2many( - 'res.company.ldap', 'company', 'LDAP Parameters'), + 'res.company.ldap', 'company', 'LDAP Parameters', copy=True), } class users(osv.osv): _inherit = "res.users" - def login(self, db, login, password): - user_id = super(users, self).login(db, login, password) + def _login(self, db, login, password): + user_id = super(users, self)._login(db, login, password) if user_id: return user_id registry = RegistryManager.get(db) diff --git a/addons/auth_oauth/res_users.py b/addons/auth_oauth/res_users.py index 8d57a23f7028206562552277e211c7234e5a89fb..d2aed236f758fcd122e047ccfd4e75967882431b 100644 --- a/addons/auth_oauth/res_users.py +++ b/addons/auth_oauth/res_users.py @@ -17,8 +17,8 @@ class res_users(osv.Model): _columns = { 'oauth_provider_id': fields.many2one('auth.oauth.provider', 'OAuth Provider'), - 'oauth_uid': fields.char('OAuth User ID', help="Oauth Provider user_id"), - 'oauth_access_token': fields.char('OAuth Access Token', readonly=True), + 'oauth_uid': fields.char('OAuth User ID', help="Oauth Provider user_id", copy=False), + 'oauth_access_token': fields.char('OAuth Access Token', readonly=True, copy=False), } _sql_constraints = [ diff --git a/addons/auth_openid/res_users.py b/addons/auth_openid/res_users.py index 897dea079823f59cc570a658a440e7a8b726be0d..1c6c635118ac1d2a0b8f6b6d4e29d0b22d994379 100644 --- a/addons/auth_openid/res_users.py +++ b/addons/auth_openid/res_users.py @@ -30,11 +30,11 @@ class res_users(osv.osv): # TODO create helper fields for autofill openid_url and openid_email -> http://pad.openerp.com/web-openid _columns = { - 'openid_url': fields.char('OpenID URL', size=1024), - 'openid_email': fields.char('OpenID Email', size=256, + 'openid_url': fields.char('OpenID URL', size=1024, copy=False), + 'openid_email': fields.char('OpenID Email', size=256, copy=False, help="Used for disambiguation in case of a shared OpenID URL"), 'openid_key': fields.char('OpenID Key', size=utils.KEY_LENGTH, - readonly=True), + readonly=True, copy=False), } def _check_openid_url_email(self, cr, uid, ids, context=None): @@ -48,19 +48,8 @@ class res_users(osv.osv): (_check_openid_url_email, lambda self, *a, **kw: self._check_openid_url_email_msg(*a, **kw), ['active', 'openid_url', 'openid_email']), ] - def copy(self, cr, uid, rid, defaults=None, context=None): - reset_fields = 'openid_url openid_email'.split() - reset_values = dict.fromkeys(reset_fields, False) - if defaults is None: - defaults = reset_values - else: - defaults = dict(reset_values, **defaults) - - defaults['openid_key'] = False - return super(res_users, self).copy(cr, uid, rid, defaults, context) - - def login(self, db, login, password): - result = super(res_users, self).login(db, login, password) + def _login(self, db, login, password): + result = super(res_users, self)._login(db, login, password) if result: return result else: @@ -69,6 +58,7 @@ class res_users(osv.osv): SET login_date=now() AT TIME ZONE 'UTC' WHERE login=%s AND openid_key=%s AND active=%s RETURNING id""", (tools.ustr(login), tools.ustr(password), True)) + # beware: record cache may be invalid res = cr.fetchone() cr.commit() return res[0] if res else False diff --git a/addons/auth_signup/res_users.py b/addons/auth_signup/res_users.py index 576a778be453ee23a252deb6b7d8a82151c46513..97bcea52bc1eea5b0ee76dcac043c4d8067c1edf 100644 --- a/addons/auth_signup/res_users.py +++ b/addons/auth_signup/res_users.py @@ -104,9 +104,9 @@ class res_partner(osv.Model): return self._get_signup_url_for_action(cr, uid, ids, context=context) _columns = { - 'signup_token': fields.char('Signup Token'), - 'signup_type': fields.char('Signup Token Type'), - 'signup_expiration': fields.datetime('Signup Expiration'), + 'signup_token': fields.char('Signup Token', copy=False), + 'signup_type': fields.char('Signup Token Type', copy=False), + 'signup_expiration': fields.datetime('Signup Expiration', copy=False), 'signup_valid': fields.function(_get_signup_valid, type='boolean', string='Signup Token is Valid'), 'signup_url': fields.function(_get_signup_url, type='char', string='Signup URL'), } @@ -302,7 +302,7 @@ class res_users(osv.Model): user_id = super(res_users, self).create(cr, uid, values, context=context) user = self.browse(cr, uid, user_id, context=context) if user.email and not context.get('no_reset_password'): - context.update({'create_user': True}) + context = dict(context, create_user=True) try: self.action_reset_password(cr, uid, [user.id], context=context) except MailDeliveryException: diff --git a/addons/auth_signup/signup.xml b/addons/auth_signup/signup.xml deleted file mode 100644 index d01ee58f2f182d636db249ad9738c03e8d571154..0000000000000000000000000000000000000000 --- a/addons/auth_signup/signup.xml +++ /dev/null @@ -1,42 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<openerp> - <data> - - <record id="signup_form_view" model="ir.ui.view"> - <field name="name">signup.signup.form</field> - <field name="model">signup.signup</field> - <field name="type">form</field> - <field name="arch" type="xml"> - <form string="Signup"> - <field name="state" invisible="1"/> - <group colspan="4" states="draft,missmatch"> - <field name="name" required="1"/> - <field name="email" required="1"/> - <field name="password" required='1' on_change="onchange_pw(password,password_confirmation)"/> - <field name="password_confirmation" required='1' on_change="onchange_pw(password,password_confirmation)"/> - <group colspan="4" states="missmatch"> - <div>Passwords missmatch</div> - </group> - <group colspan="2" col="1"> - <button string="Sign Up" name="signup" attrs="{'readonly': [('state', '=', 'missmatch')]}" type="object"/> - </group> - </group> - <group colspan="4" states="done" col="1"> - <div>You can now login.</div> - <button special="cancel" string="Close"/> - </group> - </form> - </field> - </record> - - <record id="signup_action" model="ir.actions.act_window"> - <field name="name">signup.signup</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">signup.signup</field> - <field name="view_type">form</field> - <field name="view_mode">form</field> - <field name="target">new</field> - </record> - - </data> -</openerp> diff --git a/addons/base_action_rule/base_action_rule.py b/addons/base_action_rule/base_action_rule.py index 1f5fcc58ccf00bbc875bd36aab17edab1f336f90..b743603f805eb03ad1c7154d6d24f14909c1a632 100644 --- a/addons/base_action_rule/base_action_rule.py +++ b/addons/base_action_rule/base_action_rule.py @@ -57,7 +57,7 @@ class base_action_rule(osv.osv): 'name': fields.char('Rule Name', required=True), 'model_id': fields.many2one('ir.model', 'Related Document Model', required=True, domain=[('osv_memory', '=', False)]), - 'model': fields.related('model_id', 'model', type="char", size=256, string='Model'), + 'model': fields.related('model_id', 'model', type="char", string='Model'), 'create_date': fields.datetime('Create Date', readonly=1), 'active': fields.boolean('Active', help="When unchecked, the rule is hidden and will not be executed."), @@ -96,7 +96,7 @@ class base_action_rule(osv.osv): ondelete='restrict', domain="[('model_id', '=', model_id.model)]", help="If present, this condition must be satisfied before executing the action rule."), - 'last_run': fields.datetime('Last Run', readonly=1), + 'last_run': fields.datetime('Last Run', readonly=1, copy=False), } _defaults = { @@ -152,64 +152,6 @@ class base_action_rule(osv.osv): return True - def _wrap_create(self, old_create, model): - """ Return a wrapper around `old_create` calling both `old_create` and - `_process`, in that order. - """ - def create(cr, uid, vals, context=None, **kwargs): - # avoid loops or cascading actions - if context and context.get('action'): - return old_create(cr, uid, vals, context=context) - - context = dict(context or {}, action=True) - new_id = old_create(cr, uid, vals, context=context, **kwargs) - - # retrieve the action rules to run on creation - action_dom = [('model', '=', model), ('kind', 'in', ['on_create', 'on_create_or_write'])] - action_ids = self.search(cr, uid, action_dom, context=context) - - # check postconditions, and execute actions on the records that satisfy them - for action in self.browse(cr, uid, action_ids, context=context): - if self._filter(cr, uid, action, action.filter_id, [new_id], context=context): - self._process(cr, uid, action, [new_id], context=context) - return new_id - - return create - - def _wrap_write(self, old_write, model): - """ Return a wrapper around `old_write` calling both `old_write` and - `_process`, in that order. - """ - def write(cr, uid, ids, vals, context=None, **kwargs): - # avoid loops or cascading actions - if context and context.get('action'): - return old_write(cr, uid, ids, vals, context=context, **kwargs) - - context = dict(context or {}, action=True) - ids = [ids] if isinstance(ids, (int, long, str)) else ids - - # retrieve the action rules to run on update - action_dom = [('model', '=', model), ('kind', 'in', ['on_write', 'on_create_or_write'])] - action_ids = self.search(cr, uid, action_dom, context=context) - actions = self.browse(cr, uid, action_ids, context=context) - - # check preconditions - pre_ids = {} - for action in actions: - pre_ids[action] = self._filter(cr, uid, action, action.filter_pre_id, ids, context=context) - - # execute write - old_write(cr, uid, ids, vals, context=context, **kwargs) - - # check postconditions, and execute actions on the records that satisfy them - for action in actions: - post_ids = self._filter(cr, uid, action, action.filter_id, pre_ids[action], context=context) - if post_ids: - self._process(cr, uid, action, post_ids, context=context) - return True - - return write - def _register_hook(self, cr, ids=None): """ Wrap the methods `create` and `write` of the models specified by the rules given by `ids` (or all existing rules if `ids` is `None`.) @@ -221,10 +163,65 @@ class base_action_rule(osv.osv): model = action_rule.model_id.model model_obj = self.pool[model] if not hasattr(model_obj, 'base_action_ruled'): - model_obj.create = self._wrap_create(model_obj.create, model) - model_obj.write = self._wrap_write(model_obj.write, model) + # monkey-patch methods create and write + + def create(self, cr, uid, vals, context=None, **kwargs): + # avoid loops or cascading actions + if context and context.get('action'): + return create.origin(self, cr, uid, vals, context=context) + + # call original method with a modified context + context = dict(context or {}, action=True) + new_id = create.origin(self, cr, uid, vals, context=context, **kwargs) + + # as it is a new record, we do not consider the actions that have a prefilter + action_model = self.pool.get('base.action.rule') + action_dom = [('model', '=', self._name), + ('kind', 'in', ['on_create', 'on_create_or_write'])] + action_ids = action_model.search(cr, uid, action_dom, context=context) + + # check postconditions, and execute actions on the records that satisfy them + for action in action_model.browse(cr, uid, action_ids, context=context): + if action_model._filter(cr, uid, action, action.filter_id, [new_id], context=context): + action_model._process(cr, uid, action, [new_id], context=context) + return new_id + + def write(self, cr, uid, ids, vals, context=None, **kwargs): + # avoid loops or cascading actions + if context and context.get('action'): + return write.origin(self, cr, uid, ids, vals, context=context) + + # modify context + context = dict(context or {}, action=True) + ids = [ids] if isinstance(ids, (int, long, str)) else ids + + # retrieve the action rules to possibly execute + action_model = self.pool.get('base.action.rule') + action_dom = [('model', '=', self._name), + ('kind', 'in', ['on_write', 'on_create_or_write'])] + action_ids = action_model.search(cr, uid, action_dom, context=context) + actions = action_model.browse(cr, uid, action_ids, context=context) + + # check preconditions + pre_ids = {} + for action in actions: + pre_ids[action] = action_model._filter(cr, uid, action, action.filter_pre_id, ids, context=context) + + # call original method + write.origin(self, cr, uid, ids, vals, context=context, **kwargs) + + # check postconditions, and execute actions on the records that satisfy them + for action in actions: + post_ids = action_model._filter(cr, uid, action, action.filter_id, pre_ids[action], context=context) + if post_ids: + action_model._process(cr, uid, action, post_ids, context=context) + return True + + model_obj._patch_method('create', create) + model_obj._patch_method('write', write) model_obj.base_action_ruled = True updated = True + return updated def create(self, cr, uid, vals, context=None): @@ -283,7 +280,7 @@ class base_action_rule(osv.osv): if 'lang' not in ctx: # Filters might be language-sensitive, attempt to reuse creator lang # as we are usually running this as super-user in background - [filter_meta] = action.filter_id.perm_read() + [filter_meta] = action.filter_id.get_metadata() user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \ filter_meta['create_uid'][0] ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang diff --git a/addons/base_gengo/res_company.py b/addons/base_gengo/res_company.py index 1b083ecf5a2a422dfb8624f94d331e8547a9b49c..8400db8f943f015d0febbf054b86f71b558a80f5 100644 --- a/addons/base_gengo/res_company.py +++ b/addons/base_gengo/res_company.py @@ -26,8 +26,8 @@ class res_company(osv.Model): _name = "res.company" _inherit = "res.company" _columns = { - "gengo_private_key": fields.text("Gengo Private Key"), - "gengo_public_key": fields.text("Gengo Public Key"), + "gengo_private_key": fields.text("Gengo Private Key", copy=False), + "gengo_public_key": fields.text("Gengo Public Key", copy=False), "gengo_comment": fields.text("Comments", help="This comment will be automatically be enclosed in each an every request sent to Gengo"), "gengo_auto_approve": fields.boolean("Auto Approve Translation ?", help="Jobs are Automatically Approved by Gengo."), "gengo_sandbox": fields.boolean("Sandbox Mode", help="Check this box if you're using the sandbox mode of Gengo, mainly used for testing purpose."), diff --git a/addons/base_import/models.py b/addons/base_import/models.py index 2b683bd8927bad158a08108659be4e5e2e692771..bc4bb0a268f81f495997d94ba6b117dcbde36348 100644 --- a/addons/base_import/models.py +++ b/addons/base_import/models.py @@ -74,6 +74,7 @@ class ir_import(orm.TransientModel): :param str model: name of the model to get fields form :param int landing: depth of recursion into o2m fields """ + model_obj = self.pool[model] fields = [{ 'id': 'id', 'name': 'id', @@ -81,8 +82,11 @@ class ir_import(orm.TransientModel): 'required': False, 'fields': [], }] - fields_got = self.pool[model].fields_get(cr, uid, context=context) + fields_got = model_obj.fields_get(cr, uid, context=context) + blacklist = orm.MAGIC_COLUMNS + [model_obj.CONCURRENCY_CHECK_FIELD] for name, field in fields_got.iteritems(): + if name in blacklist: + continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: diff --git a/addons/base_import/tests/test_cases.py b/addons/base_import/tests/test_cases.py index 5479ae944284d4b54c5f534b75d6985bfb263616..ac5a4440523fd2f82a6d21f78b843ebef3b665c2 100644 --- a/addons/base_import/tests/test_cases.py +++ b/addons/base_import/tests/test_cases.py @@ -4,48 +4,72 @@ from openerp.tests.common import TransactionCase from .. import models -ID_FIELD = {'id': 'id', 'name': 'id', 'string': "External ID", 'required': False, 'fields': []} +ID_FIELD = { + 'id': 'id', + 'name': 'id', + 'string': "External ID", + 'required': False, + 'fields': [], +} +DISPLAY_NAME_FIELD = { + 'id': 'display_name', + 'name': 'display_name', + 'string': "Name", + 'required': False, + 'fields': [], +} + def make_field(name='value', string='unknown', required=False, fields=[]): return [ ID_FIELD, + DISPLAY_NAME_FIELD, {'id': name, 'name': name, 'string': string, 'required': required, 'fields': fields}, ] -class test_basic_fields(TransactionCase): +def sorted_fields(fields): + """ recursively sort field lists to ease comparison """ + recursed = [dict(field, fields=sorted_fields(field['fields'])) for field in fields] + return sorted(recursed, key=lambda field: field['id']) + +class BaseImportCase(TransactionCase): + def assertEqualFields(self, fields1, fields2): + self.assertEqual(sorted_fields(fields1), sorted_fields(fields2)) + +class test_basic_fields(BaseImportCase): def get_fields(self, field): return self.registry('base_import.import')\ .get_fields(self.cr, self.uid, 'base_import.tests.models.' + field) def test_base(self): """ A basic field is not required """ - self.assertEqual(self.get_fields('char'), make_field()) + self.assertEqualFields(self.get_fields('char'), make_field()) def test_required(self): """ Required fields should be flagged (so they can be fill-required) """ - self.assertEqual(self.get_fields('char.required'), make_field(required=True)) + self.assertEqualFields(self.get_fields('char.required'), make_field(required=True)) def test_readonly(self): """ Readonly fields should be filtered out""" - self.assertEqual(self.get_fields('char.readonly'), [ID_FIELD]) + self.assertEqualFields(self.get_fields('char.readonly'), [ID_FIELD, DISPLAY_NAME_FIELD]) def test_readonly_states(self): """ Readonly fields with states should not be filtered out""" - self.assertEqual(self.get_fields('char.states'), make_field()) + self.assertEqualFields(self.get_fields('char.states'), make_field()) def test_readonly_states_noreadonly(self): """ Readonly fields with states having nothing to do with readonly should still be filtered out""" - self.assertEqual(self.get_fields('char.noreadonly'), [ID_FIELD]) + self.assertEqualFields(self.get_fields('char.noreadonly'), [ID_FIELD, DISPLAY_NAME_FIELD]) def test_readonly_states_stillreadonly(self): """ Readonly fields with readonly states leaving them readonly always... filtered out""" - self.assertEqual(self.get_fields('char.stillreadonly'), [ID_FIELD]) + self.assertEqualFields(self.get_fields('char.stillreadonly'), [ID_FIELD, DISPLAY_NAME_FIELD]) def test_m2o(self): """ M2O fields should allow import of themselves (name_get), their id and their xid""" - self.assertEqual(self.get_fields('m2o'), make_field(fields=[ + self.assertEqualFields(self.get_fields('m2o'), make_field(fields=[ {'id': 'value', 'name': 'id', 'string': 'External ID', 'required': False, 'fields': []}, {'id': 'value', 'name': '.id', 'string': 'Database ID', 'required': False, 'fields': []}, ])) @@ -55,19 +79,20 @@ class test_basic_fields(TransactionCase): required as well (the client has to handle that: requiredness is id-based) """ - self.assertEqual(self.get_fields('m2o.required'), make_field(required=True, fields=[ + self.assertEqualFields(self.get_fields('m2o.required'), make_field(required=True, fields=[ {'id': 'value', 'name': 'id', 'string': 'External ID', 'required': True, 'fields': []}, {'id': 'value', 'name': '.id', 'string': 'Database ID', 'required': True, 'fields': []}, ])) -class test_o2m(TransactionCase): +class test_o2m(BaseImportCase): def get_fields(self, field): return self.registry('base_import.import')\ .get_fields(self.cr, self.uid, 'base_import.tests.models.' + field) def test_shallow(self): - self.assertEqual(self.get_fields('o2m'), make_field(fields=[ - {'id': 'id', 'name': 'id', 'string': 'External ID', 'required': False, 'fields': []}, + self.assertEqualFields(self.get_fields('o2m'), make_field(fields=[ + ID_FIELD, + DISPLAY_NAME_FIELD, # FIXME: should reverse field be ignored? {'id': 'parent_id', 'name': 'parent_id', 'string': 'unknown', 'required': False, 'fields': [ {'id': 'parent_id', 'name': 'id', 'string': 'External ID', 'required': False, 'fields': []}, @@ -224,7 +249,8 @@ class test_preview(TransactionCase): self.assertEqual(result['headers'], ['name', 'Some Value', 'Counter']) # Order depends on iteration order of fields_get self.assertItemsEqual(result['fields'], [ - {'id': 'id', 'name': 'id', 'string': 'External ID', 'required':False, 'fields': []}, + ID_FIELD, + DISPLAY_NAME_FIELD, {'id': 'name', 'name': 'name', 'string': 'Name', 'required':False, 'fields': []}, {'id': 'somevalue', 'name': 'somevalue', 'string': 'Some Value', 'required':True, 'fields': []}, {'id': 'othervalue', 'name': 'othervalue', 'string': 'Other Variable', 'required':False, 'fields': []}, diff --git a/addons/base_report_designer/base_report_designer.py b/addons/base_report_designer/base_report_designer.py index bc7959a17dfa5f4160495c1b64a5425d86d86cd1..a7289f42d4ea3fc3a172ba4ca8d825f3691f8864 100644 --- a/addons/base_report_designer/base_report_designer.py +++ b/addons/base_report_designer/base_report_designer.py @@ -59,10 +59,8 @@ class report_xml(osv.osv): return True def report_get(self, cr, uid, report_id, context=None): - if context is None: - context = {} # skip osv.fields.sanitize_binary_value() because we want the raw bytes in all cases - context.update(bin_raw=True) + context = dict(context or {}, bin_raw=True) report = self.browse(cr, uid, report_id, context=context) sxw_data = report.report_sxw_content rml_data = report.report_rml_content diff --git a/addons/base_report_designer/wizard/base_report_designer_modify.py b/addons/base_report_designer/wizard/base_report_designer_modify.py index e8ed45e7ce173f8ab733f74a5ab301f1dd9c056e..a28c562ad147e7bb76919c27e2323cbacfbf988a 100644 --- a/addons/base_report_designer/wizard/base_report_designer_modify.py +++ b/addons/base_report_designer/wizard/base_report_designer_modify.py @@ -133,16 +133,19 @@ class base_report_rml_save(osv.osv_memory): """ res = super(base_report_rml_save, self).default_get(cr, uid, fields, context=context) - report_id = self.pool['base.report.sxw'].search(cr,uid,[]) - data = self.pool['base.report.file.sxw'].read(cr, uid, report_id, context=context)[0] + report_ids = self.pool['base.report.sxw'].search(cr,uid,[], context=context) + + data = self.pool['base.report.file.sxw'].read(cr, uid, report_ids, context=context)[0] + report = self.pool['ir.actions.report.xml'].browse(cr, uid, data['report_id'], context=context) if 'file_rml' in fields: res['file_rml'] = base64.encodestring(report.report_rml_content) return res + _columns = { - 'file_rml':fields.binary('Save As'), - } + 'file_rml':fields.binary('Save As'), + } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/board/controllers.py b/addons/board/controllers.py index 936346273529eed4081215d56b9a554005bc187a..6ab5d7035c094c87fedf7b831c6fda7ac325536b 100644 --- a/addons/board/controllers.py +++ b/addons/board/controllers.py @@ -2,7 +2,7 @@ from xml.etree import ElementTree from openerp.addons.web.controllers.main import load_actions_from_ir_values -from openerp.addons.web.http import Controller, route, request +from openerp.http import Controller, route, request class Board(Controller): @route('/board/add_to_dashboard', type='json', auth='user') diff --git a/addons/calendar/base_calendar_view.xml b/addons/calendar/base_calendar_view.xml deleted file mode 100644 index 7fa20a848fdc8be7d098f2e90e52a80226a3eca8..0000000000000000000000000000000000000000 --- a/addons/calendar/base_calendar_view.xml +++ /dev/null @@ -1,291 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<openerp> - <data> - - <!-- Calendar's menu --> - <menuitem id="base.menu_calendar_configuration" name="Calendar" - parent="base.menu_base_config" sequence="50" groups="base.group_no_one"/> - - <!-- Alarm form view --> - <record id="res_alarm_form_view" model="ir.ui.view"> - <field name="name">res.alarm.form</field> - <field name="model">res.alarm</field> - <field name="arch" type="xml"> - <form string="Reminder details"> - <group col="4"> - <field name="name"/> - <field name="active"/> - <separator string="Reminder Details" colspan="4"/> - <field name="trigger_duration"/> - <field name="trigger_interval"/> - <field name="trigger_occurs"/> - <field name="trigger_related"/> - </group> - </form> - </field> - </record> - - <!-- Alarm list view --> - <record id="res_alarm_tree_view" model="ir.ui.view"> - <field name="name">res.alarm.tree</field> - <field name="model">res.alarm</field> - <field name="arch" type="xml"> - <tree string="Reminder details"> - <field name="name"/> - <field name="trigger_interval"/> - <field name="trigger_duration"/> - <field name="trigger_occurs"/> - <field name="trigger_related"/> - </tree> - </field> - </record> - <record id="action_res_alarm_view" model="ir.actions.act_window"> - <field name="name">Alarms</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">res.alarm</field> - <field name="view_type">form</field> - <field name="view_mode">tree,form</field> - <field name="help" type="html"> - <p class="oe_view_nocontent_create"> - Click to setup a new alarm type. - </p><p> - You can define a customized type of calendar alarm that may be - assigned to calendar events or meetings. - </p> - </field> - </record> - - <!-- Alarms menu --> - <menuitem id="menu_crm_meeting_avail_alarm" - groups="base.group_no_one" - action="base_calendar.action_res_alarm_view" - parent="base.menu_calendar_configuration" sequence="5"/> - - <!-- Event form view --> - <record model="ir.ui.view" id="event_form_view"> - <field name="name">Event Form</field> - <field name="model">calendar.event</field> - <field name="arch" type="xml"> - <form string="Events"> - <header> - <button name="do_confirm" string="Confirm" states="tentative,cancelled" type="object" class="oe_highlight"/> - <button name="do_tentative" states="confirmed,cancelled" string="Uncertain" type="object" class="oe_highlight"/> - <button name="do_cancel" string="Cancel Event" states="tentative,confirmed" type="object"/> - <field name="state" widget="statusbar" - statusbar_visible="tentative,confirmed" statusbar_colors='{"proforma":"blue"}'/> - </header> - <sheet> - <group col="6"> - <field name="name" string="Summary" - colspan="4" required="1"/> - <field name="allday" colspan="2" on_change="onchange_dates(date,False,False,allday)"/> - <newline/> - <field name="date" string="Start Date" required="1" - on_change="onchange_dates(date,duration,False,allday)"/> - <field name="duration" widget="float_time" - on_change="onchange_dates(date,duration,False,allday)" attrs="{'invisible': [('allday', '=', True)]}"/> - <field name="date_deadline" string="End Date" required="1" - on_change="onchange_dates(date,False,date_deadline)"/> - <field name="location"/> - <field name="alarm_id" string="Reminder" - widget="selection"/> - <group colspan="2" col="4" attrs="{'readonly': [('state','=','done')]}"> - <field name="recurrency"/> - </group> - </group> - <notebook> - <page string="Event"> - <group col="6" colspan="4"> - <separator colspan="6" string="Visibility"/> - <field name="user_id" string="Responsible User"/> - <field name="show_as" string="Show Time as"/> - <field name="class" string="Privacy"/> - <field name="recurrent_id_date" invisible="1"/> - <field name="recurrent_id" invisible="1"/> - </group> - <separator string="Description"/> - <field name="description"/> - </page> - <page string="Invitation Detail"> - <field name="attendee_ids" colspan="4" - nolabel="1" widget="one2many" mode="tree"> - <tree string="Invitation details" editable="top"> - <field name="sent_by_uid" string="From"/> - <field name="user_id" string="To"/> - <field name="email"/> - <field name="role" width="200"/> - <field name="state"/> - <button name="do_tentative" - states="needs-action,declined,accepted" - string="Uncertain" type="object" - icon="terp-crm"/> - <button name="do_accept" string="Accept" - states="needs-action,tentative,declined" - type="object" icon="gtk-apply"/> - <button name="do_decline" string="Decline" - states="needs-action,tentative,accepted" - type="object" icon="gtk-cancel"/> - </tree> - <form string="Invitation details"> - <notebook colspan="4"> - <page string="Details"> - <group col="4"> - <field name="email"/> - <field name="rsvp"/> - <field name="cutype"/> - <field name="role"/> - </group> - <group col="4"> - <field name="state"/> - <button name="do_tentative" - states="needs-action,declined,accepted" - string="Uncertain" - type="object" - icon="terp-crm"/> - <button name="do_accept" - string="Accept" - states="needs-action,tentative,declined" - type="object" - icon="gtk-apply"/> - <button name="do_decline" - string="Decline" - states="needs-action,tentative,accepted" - type="object" - icon="gtk-cancel"/> - </group> - </page> - </notebook> - </form> - </field> - </page> - <page string="Recurrency Option" attrs="{'invisible': [('recurrency','=',False)]}"> - <group col="4" colspan="4" name="rrule"> - <group col="4" colspan="4"> - <field name="rrule_type" string="Recurrency period" - attrs="{'readonly':[('recurrent_id','!=',False)]}"/> - <field name="interval"/> - <separator string="End of Recurrence" colspan="4"/> - <field name="end_type"/> - <label string=" " colspan="2"/> - <newline/> - <field name="count" attrs="{'invisible' : [('end_type', '!=', 'count')] }"/> - <label string=" " colspan="2"/> - <newline/> - <field name="end_date" attrs="{'invisible' : [('end_type', '!=', 'end_date')] }"/> - <newline/> - - - </group> - <group col="8" colspan="4" name="Select weekdays" attrs="{'invisible' :[('rrule_type','not in', ['weekly'])]}"> - <separator string="Choose day where repeat the meeting" colspan="8"/> - <field name="mo" colspan="1"/> - <field name="tu" colspan="1"/> - <field name="we" colspan="1"/> - <field name="th" colspan="1"/> - <newline/> - <field name="fr" colspan="1"/> - <field name="sa" colspan="1" /> - <field name="su" colspan="1" /> - <newline/> - </group> - <group col="10" colspan="4" - attrs="{'invisible' : [('rrule_type','!=','monthly')]}"> - <separator string="Choose day in the month where repeat the meeting" colspan="12"/> - <group col="2" colspan="1"> - <field name="select1"/> - </group> - <group col="2" colspan="1"> - <field name="day" - attrs="{'required' : [('select1','=','date'), ('rrule_type','=','monthly')], - 'invisible' : ['|', ('select1','=','day'), ('rrule_type','!=','monthly')]}"/> - </group> - <group col="3" colspan="1" - attrs="{'invisible' : ['|', ('select1','=','date'), ('rrule_type','!=','monthly')]}"> - <field name="byday" string="The" - attrs="{'required' : [('select1','=','day'), ('rrule_type','=','monthly')]}"/> - <field name="week_list" nolabel="1" - attrs="{'required' : [('select1','=','day'), ('rrule_type','=','monthly')]}"/> - </group> - </group> - </group> - - </page> - </notebook> - </sheet> - </form> - </field> - </record> - - <!-- Event list view --> - <record model="ir.ui.view" id="event_tree_view"> - <field name="name">Event Tree</field> - <field name="model">calendar.event</field> - <field name="arch" type="xml"> - <tree string="Events"> - <field name="name" string="Subject"/> - <field name="date" string="Event Date"/> - <field name="location"/> - <field name="show_as"/> - <field name="class" string="Privacy"/> - <field name="user_id" invisible="1"/> - <field name="state" invisible="1"/> - </tree> - </field> - </record> - - <!-- Event calendar view --> - <record model="ir.ui.view" id="event_calendar_view"> - <field name="name">Events Calendar</field> - <field name="model">calendar.event</field> - <field name="priority" eval="2"/> - <field name="arch" type="xml"> - <calendar string="Events" date_start="date" color="show_as" date_delay="duration"> - <field name="name"/> - <field name="class"/> - <field name="show_as"/> - </calendar> - </field> - </record> - - <!-- Event search view --> - <record id="view_calendar_event_filter" model="ir.ui.view"> - <field name="name">Calendar Events Search</field> - <field name="model">calendar.event</field> - <field name="arch" type="xml"> - <search string="Search Events"> - <field name="name" filter_domain="['|',('name','ilike',self),('location','ilike',self)]" string="Event"/> - <field name="show_as"/> - <field name="class" string="Privacy"/> - <filter icon="terp-go-today" string="My Events" domain="[('user_id','=',uid)]" help="My Events"/> - <separator/> - <filter icon="terp-check" string="Confirmed" domain="[('state','=','confirmed')]" help="Confirmed Events"/> - <field name="user_id"/> - <group expand="0" string="Group By"> - <filter string="Responsible" icon="terp-personal" domain="[]" context="{'group_by':'user_id'}"/> - <filter string="Availability" icon="terp-camera_test" domain="[]" context="{'group_by':'show_as'}"/> - <filter string="Privacy" icon="terp-locked" domain="[]" context="{'group_by':'class'}"/> - <filter string="Status" icon="terp-stock_effects-object-colorize" domain="[]" context="{'group_by':'state'}"/> - <filter string="Event Month" icon="terp-go-month" domain="[]" context="{'group_by':'date'}" help="Start Date of Event by Month"/> - </group> - </search> - </field> - </record> - - - <!-- Event action --> - <record id="action_view_event" model="ir.actions.act_window"> - <field name="name">Events</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">calendar.event</field> - <field name="view_type">form</field> - <field name="view_mode">calendar,tree,form</field> - <field name="search_view_id" ref="view_calendar_event_filter"/> - </record> - - <!-- Event menu --> - <menuitem id="menu_events" - name="Events" parent="base.menu_calendar_configuration" - sequence="15" action="action_view_event"/> - - </data> - </openerp> diff --git a/addons/calendar/calendar.py b/addons/calendar/calendar.py index 7108a2d1d734355eb1645bd5f7f338a71d2dcf1c..a77eb5ea6f6ff2dd99b7b32f35e207dbc09008d0 100644 --- a/addons/calendar/calendar.py +++ b/addons/calendar/calendar.py @@ -297,7 +297,7 @@ class res_partner(osv.Model): Used by web_calendar.js : Many2ManyAttendee """ datas = [] - meeting = False + meeting = None if meeting_id: meeting = self.pool['calendar.event'].browse(cr, uid, get_real_ids(meeting_id), context=context) for partner in self.browse(cr, uid, ids, context=context): @@ -309,7 +309,7 @@ class res_partner(osv.Model): datas.append(data) return datas - def calendar_last_notif_ack(self, cr, uid, context=None): + def _set_calendar_last_notif_ack(self, cr, uid, context=None): partner = self.pool['res.users'].browse(cr, uid, uid, context=context).partner_id self.write(cr, uid, partner.id, {'calendar_last_notif_ack': datetime.now()}, context=context) return @@ -708,6 +708,7 @@ class calendar_event(osv.Model): return (format_date, format_time) def get_display_time_tz(self, cr, uid, ids, tz=False, context=None): + context = dict(context or {}) if tz: context["tz"] = tz ev = self.browse(cr, uid, ids, context=context)[0] @@ -720,8 +721,7 @@ class calendar_event(osv.Model): 1) if user add duration for 2 hours, return : August-23-2013 at (04-30 To 06-30) (Europe/Brussels) 2) if event all day ,return : AllDay, July-31-2013 """ - if context is None: - context = {} + context = dict(context or {}) tz = context.get('tz', False) if not tz: # tz can have a value False, so dont do it in the default value of get ! @@ -782,7 +782,7 @@ class calendar_event(osv.Model): if data.count and data.count <= 0: raise osv.except_osv(_('Warning!'), _('Count cannot be negative or 0.')) - data = self.read(cr, uid, id, ['id', 'byday', 'recurrency', 'month_list', 'final_date', 'rrule_type', 'month_by', 'interval', 'count', 'end_type', 'mo', 'tu', 'we', 'th', 'fr', 'sa', 'su', 'day', 'week_list'], context=context) + data = self.read(cr, uid, id, ['id', 'byday', 'recurrency', 'final_date', 'rrule_type', 'month_by', 'interval', 'count', 'end_type', 'mo', 'tu', 'we', 'th', 'fr', 'sa', 'su', 'day', 'week_list'], context=context) event = data['id'] if data['recurrency']: result[event] = self.compute_rule_string(data) @@ -900,7 +900,7 @@ class calendar_event(osv.Model): 'categ_ids': fields.many2many('calendar.event.type', 'meeting_category_rel', 'event_id', 'type_id', 'Tags'), 'attendee_ids': fields.one2many('calendar.attendee', 'event_id', 'Attendees', ondelete='cascade'), 'partner_ids': fields.many2many('res.partner', 'calendar_event_res_partner_rel', string='Attendees', states={'done': [('readonly', True)]}), - 'alarm_ids': fields.many2many('calendar.alarm', 'calendar_alarm_calendar_event_rel', string='Reminders', ondelete="restrict"), + 'alarm_ids': fields.many2many('calendar.alarm', 'calendar_alarm_calendar_event_rel', string='Reminders', ondelete="restrict", copy=False), } _defaults = { 'end_type': 'count', @@ -1404,16 +1404,9 @@ class calendar_event(osv.Model): return res def copy(self, cr, uid, id, default=None, context=None): - if context is None: - context = {} - default = default or {} - self._set_date(cr, uid, default, id=default.get('id'), context=context) - default['attendee_ids'] = False - - res = super(calendar_event, self).copy(cr, uid, calendar_id2real_id(id), default, context) - return res + return super(calendar_event, self).copy(cr, uid, calendar_id2real_id(id), default, context) def _detach_one_event(self, cr, uid, id, values=dict(), context=None): real_event_id = calendar_id2real_id(id) @@ -1547,8 +1540,7 @@ class calendar_event(osv.Model): return res def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): - if not context: - context = {} + context = dict(context or {}) if 'date' in groupby: raise osv.except_osv(_('Warning!'), _('Group by date is not supported, use the calendar view instead.')) diff --git a/addons/calendar/controllers/main.py b/addons/calendar/controllers/main.py index 7480c85d6292a9b1d6e620a848bcea7916514243..d0fdb47ab0b9c1028e2a118ac66a82eb29fb1677 100644 --- a/addons/calendar/controllers/main.py +++ b/addons/calendar/controllers/main.py @@ -1,7 +1,7 @@ import simplejson import openerp -import openerp.addons.web.http as http -from openerp.addons.web.http import request +import openerp.http as http +from openerp.http import request import openerp.addons.web.controllers.main as webmain import json @@ -67,5 +67,5 @@ class meeting_invitation(http.Controller): uid = request.session.uid context = request.session.context with registry.cursor() as cr: - res = registry.get("res.partner").calendar_last_notif_ack(cr, uid, context=context) + res = registry.get("res.partner")._set_calendar_last_notif_ack(cr, uid, context=context) return res diff --git a/addons/calendar/crm_meeting.py b/addons/calendar/crm_meeting.py deleted file mode 100644 index 66823e0b8f4b6b3c442b6cf2f1e32e8c77d2c994..0000000000000000000000000000000000000000 --- a/addons/calendar/crm_meeting.py +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>) -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## - -import time - -from openerp.osv import fields, osv -from openerp.tools import DEFAULT_SERVER_DATE_FORMAT -from openerp.tools.translate import _ -from base_calendar import get_real_ids, base_calendar_id2real_id -# -# crm.meeting is defined here so that it may be used by modules other than crm, -# without forcing the installation of crm. -# - -class crm_meeting_type(osv.Model): - _name = 'crm.meeting.type' - _description = 'Meeting Type' - _columns = { - 'name': fields.char('Name', size=64, required=True, translate=True), - } - -class crm_meeting(osv.Model): - """ Model for CRM meetings """ - _name = 'crm.meeting' - _description = "Meeting" - _order = "id desc" - _inherit = ["calendar.event", "mail.thread", "ir.needaction_mixin"] - _columns = { - 'create_date': fields.datetime('Creation Date', readonly=True), - 'write_date': fields.datetime('Write Date', readonly=True), - 'date_open': fields.datetime('Confirmed', readonly=True), - 'date_closed': fields.datetime('Closed', readonly=True), - 'partner_ids': fields.many2many('res.partner', 'crm_meeting_partner_rel', 'meeting_id', 'partner_id', - string='Attendees', states={'done': [('readonly', True)]}), - 'state': fields.selection( - [('draft', 'Unconfirmed'), ('open', 'Confirmed')], - string='Status', size=16, readonly=True, track_visibility='onchange'), - # Meeting fields - 'name': fields.char('Meeting Subject', size=128, required=True, states={'done': [('readonly', True)]}), - 'categ_ids': fields.many2many('crm.meeting.type', 'meeting_category_rel', - 'event_id', 'type_id', 'Tags'), - 'attendee_ids': fields.many2many('calendar.attendee', 'meeting_attendee_rel',\ - 'event_id', 'attendee_id', 'Invited People', states={'done': [('readonly', True)]}), - } - _defaults = { - 'state': 'open', - } - - def message_get_subscription_data(self, cr, uid, ids, context=None): - res = {} - for virtual_id in ids: - real_id = base_calendar_id2real_id(virtual_id) - result = super(crm_meeting, self).message_get_subscription_data(cr, uid, [real_id], context=context) - res[virtual_id] = result[real_id] - return res - - def copy(self, cr, uid, id, default=None, context=None): - default = default or {} - default['attendee_ids'] = False - return super(crm_meeting, self).copy(cr, uid, id, default, context) - - def write(self, cr, uid, ids, values, context=None): - """ Override to add case management: open/close dates """ - if values.get('state')and values.get('state') == 'open': - values['date_open'] = fields.datetime.now() - return super(crm_meeting, self).write(cr, uid, ids, values, context=context) - - def onchange_partner_ids(self, cr, uid, ids, value, context=None): - """ The basic purpose of this method is to check that destination partners - effectively have email addresses. Otherwise a warning is thrown. - :param value: value format: [[6, 0, [3, 4]]] - """ - res = {'value': {}} - if not value or not value[0] or not value[0][0] == 6: - return - res.update(self.check_partners_email(cr, uid, value[0][2], context=context)) - return res - - def check_partners_email(self, cr, uid, partner_ids, context=None): - """ Verify that selected partner_ids have an email_address defined. - Otherwise throw a warning. """ - partner_wo_email_lst = [] - for partner in self.pool.get('res.partner').browse(cr, uid, partner_ids, context=context): - if not partner.email: - partner_wo_email_lst.append(partner) - if not partner_wo_email_lst: - return {} - warning_msg = _('The following contacts have no email address :') - for partner in partner_wo_email_lst: - warning_msg += '\n- %s' % (partner.name) - return {'warning': { - 'title': _('Email addresses not found'), - 'message': warning_msg, - } - } - # ---------------------------------------- - # OpenChatter - # ---------------------------------------- - - # shows events of the day for this user - def _needaction_domain_get(self, cr, uid, context=None): - return [('date', '<=', time.strftime(DEFAULT_SERVER_DATE_FORMAT + ' 23:59:59')), ('date_deadline', '>=', time.strftime(DEFAULT_SERVER_DATE_FORMAT + ' 23:59:59')), ('user_id', '=', uid)] - - def message_post(self, cr, uid, thread_id, body='', subject=None, type='notification', - subtype=None, parent_id=False, attachments=None, context=None, **kwargs): - if isinstance(thread_id, str): - thread_id = get_real_ids(thread_id) - return super(crm_meeting, self).message_post(cr, uid, thread_id, body=body, subject=subject, type=type, subtype=subtype, parent_id=parent_id, attachments=attachments, context=context, **kwargs) - -class mail_message(osv.osv): - _inherit = "mail.message" - - def search(self, cr, uid, args, offset=0, limit=0, order=None, context=None, count=False): - ''' - convert the search on real ids in the case it was asked on virtual ids, then call super() - ''' - for index in range(len(args)): - if args[index][0] == "res_id" and isinstance(args[index][2], str): - args[index][2] = get_real_ids(args[index][2]) - return super(mail_message, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count) - - def _find_allowed_model_wise(self, cr, uid, doc_model, doc_dict, context=None): - if doc_model == 'crm.meeting': - for virtual_id in self.pool[doc_model].get_recurrent_ids(cr, uid, doc_dict.keys(), [], context=context): - doc_dict.setdefault(virtual_id, doc_dict[get_real_ids(virtual_id)]) - return super(mail_message, self)._find_allowed_model_wise(cr, uid, doc_model, doc_dict, context=context) - -class ir_attachment(osv.osv): - _inherit = "ir.attachment" - - def search(self, cr, uid, args, offset=0, limit=0, order=None, context=None, count=False): - ''' - convert the search on real ids in the case it was asked on virtual ids, then call super() - ''' - for index in range(len(args)): - if args[index][0] == "res_id" and isinstance(args[index][2], str): - args[index][2] = get_real_ids(args[index][2]) - return super(ir_attachment, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count) - - def write(self, cr, uid, ids, vals, context=None): - ''' - when posting an attachment (new or not), convert the virtual ids in real ids. - ''' - if isinstance(vals.get('res_id'), str): - vals['res_id'] = get_real_ids(vals.get('res_id')) - return super(ir_attachment, self).write(cr, uid, ids, vals, context=context) - -class invite_wizard(osv.osv_memory): - _inherit = 'mail.wizard.invite' - - def default_get(self, cr, uid, fields, context=None): - ''' - in case someone clicked on 'invite others' wizard in the followers widget, transform virtual ids in real ids - ''' - result = super(invite_wizard, self).default_get(cr, uid, fields, context=context) - if 'res_id' in result: - result['res_id'] = get_real_ids(result['res_id']) - return result diff --git a/addons/calendar/crm_meeting_data.xml b/addons/calendar/crm_meeting_data.xml deleted file mode 100644 index d1cffddc403813b687f98bd9d8c095397b32b3c1..0000000000000000000000000000000000000000 --- a/addons/calendar/crm_meeting_data.xml +++ /dev/null @@ -1,32 +0,0 @@ -<?xml version="1.0"?> -<openerp> - <data noupdate="1"> - - <!-- CASE CATEGORY(categ_id) --> - - <record model="crm.meeting.type" id="categ_meet1"> - <field name="name">Customer Meeting</field> - </record> - - <record model="crm.meeting.type" id="categ_meet2"> - <field name="name">Internal Meeting</field> - </record> - - <record model="crm.meeting.type" id="categ_meet3"> - <field name="name">Off-site Meeting</field> - </record> - - <record model="crm.meeting.type" id="categ_meet4"> - <field name="name">Open Discussion</field> - </record> - - <record model="crm.meeting.type" id="categ_meet5"> - <field name="name">Feedback Meeting</field> - </record> - - <record model="res.request.link" id="request_link_meeting"> - <field name="name">Meeting</field> - <field name="object">crm.meeting</field> - </record> - </data> -</openerp> diff --git a/addons/crm/base_partner_merge.py b/addons/crm/base_partner_merge.py index c07a32579f055f3dfddfbe375b63aaf02b9c2bed..03311704613df5e69ff97fd4e52ccc9e817d4448 100644 --- a/addons/crm/base_partner_merge.py +++ b/addons/crm/base_partner_merge.py @@ -281,7 +281,7 @@ class MergePartnerAutomatic(osv.TransientModel): except (osv.except_osv, orm.except_orm): _logger.info('Skip recursive partner hierarchies for parent_id %s of partner: %s', parent_id, dst_partner.id) - @mute_logger('openerp.osv.expression', 'openerp.osv.orm') + @mute_logger('openerp.osv.expression', 'openerp.models') def _merge(self, cr, uid, partner_ids, dst_partner=None, context=None): proxy = self.pool.get('res.partner') @@ -327,8 +327,7 @@ class MergePartnerAutomatic(osv.TransientModel): information of the previous one and will copy the new cleaned email into the email field. """ - if context is None: - context = {} + context = dict(context or {}) proxy_model = self.pool['ir.model.fields'] field_ids = proxy_model.search(cr, uid, [('model', '=', 'res.partner'), diff --git a/addons/crm/crm_lead.py b/addons/crm/crm_lead.py index 8a3bd5f2fbc3889004b2455338c3fbc4710127ff..7d53c1d51eb2b4272688c088fee6464e21e11a9b 100644 --- a/addons/crm/crm_lead.py +++ b/addons/crm/crm_lead.py @@ -28,7 +28,6 @@ from openerp import SUPERUSER_ID from openerp import tools from openerp.addons.base.res.res_partner import format_address from openerp.osv import fields, osv, orm -from openerp.tools import html2plaintext from openerp.tools.translate import _ CRM_LEAD_FIELDS_TO_MERGE = ['name', @@ -82,6 +81,7 @@ class crm_lead(format_address, osv.osv): _mail_mass_mailing = _('Leads / Opportunities') def get_empty_list_help(self, cr, uid, help, context=None): + context = dict(context or {}) if context.get('default_type') == 'lead': context['empty_list_help_model'] = 'crm.case.section' context['empty_list_help_id'] = context.get('default_section_id') @@ -224,7 +224,7 @@ class crm_lead(format_address, osv.osv): "Filter 'Available for Mass Mailing' allows users to filter the leads when performing mass mailing."), 'type': fields.selection([ ('lead','Lead'), ('opportunity','Opportunity'), ],'Type', select=True, help="Type is used to separate Leads and Opportunities"), 'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority', select=True), - 'date_closed': fields.datetime('Closed', readonly=True), + 'date_closed': fields.datetime('Closed', readonly=True, copy=False), 'stage_id': fields.many2one('crm.case.stage', 'Stage', track_visibility='onchange', select=True, domain="['&', ('section_ids', '=', section_id), '|', ('type', '=', type), ('type', '=', 'both')]"), 'user_id': fields.many2one('res.users', 'Salesperson', select=True, track_visibility='onchange'), @@ -883,8 +883,7 @@ class crm_lead(format_address, osv.osv): return res def create(self, cr, uid, vals, context=None): - if context is None: - context = {} + context = dict(context or {}) if vals.get('type') and not context.get('default_type'): context['default_type'] = vals.get('type') if vals.get('section_id') and not context.get('default_section_id'): @@ -917,11 +916,10 @@ class crm_lead(format_address, osv.osv): default['date_open'] = fields.datetime.now() else: default['date_open'] = False - default['date_closed'] = False - default['stage_id'] = self._get_default_stage_id(cr, uid, local_context) - return super(crm_lead, self).copy(cr, uid, id, default, context=context) + return super(crm_lead, self).copy(cr, uid, id, default, context=local_context) def get_empty_list_help(self, cr, uid, help, context=None): + context = dict(context or {}) context['empty_list_help_model'] = 'crm.case.section' context['empty_list_help_id'] = context.get('default_section_id', None) context['empty_list_help_document_name'] = _("opportunity") diff --git a/addons/crm/crm_segmentation.py b/addons/crm/crm_segmentation.py index a2ba28b2d7e1a432bfaf70625ea852b3a21b843c..16c1588738fb131ae7f0be9de9e87a8c793469a5 100644 --- a/addons/crm/crm_segmentation.py +++ b/addons/crm/crm_segmentation.py @@ -41,7 +41,7 @@ added to partners that match the segmentation criterions after computation.'), ('running','Running')], 'Execution Status', readonly=True), 'partner_id': fields.integer('Max Partner ID processed'), 'segmentation_line': fields.one2many('crm.segmentation.line', \ - 'segmentation_id', 'Criteria', required=True), + 'segmentation_id', 'Criteria', required=True, copy=True), 'sales_purchase_active': fields.boolean('Use The Sales Purchase Rules', help='Check if you want to use this tab as part of the segmentation rule. If not checked, the criteria beneath will be ignored') } _defaults = { @@ -57,13 +57,13 @@ added to partners that match the segmentation criterions after computation.'), @param ids: List of Process continue’s IDs""" partner_obj = self.pool.get('res.partner') - categs = self.read(cr, uid, ids, ['categ_id', 'exclusif', 'partner_id',\ - 'sales_purchase_active', 'profiling_active']) + categs = self.read(cr, uid, ids, ['categ_id', 'exclusif', 'sales_purchase_active']) for categ in categs: if start: if categ['exclusif']: cr.execute('delete from res_partner_res_partner_category_rel \ where category_id=%s', (categ['categ_id'][0],)) + partner_obj.invalidate_cache(cr, uid, ['category_id']) id = categ['id'] @@ -86,6 +86,7 @@ added to partners that match the segmentation criterions after computation.'), if categ['categ_id'][0] not in category_ids: cr.execute('insert into res_partner_res_partner_category_rel (category_id,partner_id) \ values (%s,%s)', (categ['categ_id'][0], partner.id)) + partner_obj.invalidate_cache(cr, uid, ['category_id'], [partner.id]) self.write(cr, uid, [id], {'state':'not running', 'partner_id':0}) return True diff --git a/addons/crm/res_partner.py b/addons/crm/res_partner.py index bb216e2040e8377c0917632ff6ab601f4d5276ce..a8d5724acb18a3a4a074264019a9ab8934becfb0 100644 --- a/addons/crm/res_partner.py +++ b/addons/crm/res_partner.py @@ -53,14 +53,6 @@ class res_partner(osv.osv): 'phonecall_count': fields.function(_opportunity_meeting_phonecall_count, string="Phonecalls", type="integer", multi='opp_meet'), } - def copy(self, cr, uid, record_id, default=None, context=None): - if default is None: - default = {} - - default.update({'opportunity_ids': [], 'meeting_ids' : [], 'phonecall_ids' : []}) - - return super(res_partner, self).copy(cr, uid, record_id, default, context) - def redirect_partner_form(self, cr, uid, partner_id, context=None): search_view = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'view_res_partner_filter') value = { diff --git a/addons/crm/sales_team.py b/addons/crm/sales_team.py index cd75417816d643c3391afb5f4bba41ca127d210a..ce15e1791a9fd69f87db8145df2685f6ed62ab97 100644 --- a/addons/crm/sales_team.py +++ b/addons/crm/sales_team.py @@ -44,10 +44,10 @@ class crm_case_section(osv.Model): help="The first contact you get with a potential customer is a lead you qualify before converting it into a real business opportunity. Check this box to manage leads in this sales team."), 'use_opportunities': fields.boolean('Opportunities', help="Check this box to manage opportunities in this sales team."), 'monthly_open_leads': fields.function(_get_opportunities_data, - type="string", readonly=True, multi='_get_opportunities_data', + type="any", readonly=True, multi='_get_opportunities_data', string='Open Leads per Month'), 'monthly_planned_revenue': fields.function(_get_opportunities_data, - type="string", readonly=True, multi='_get_opportunities_data', + type="any", readonly=True, multi='_get_opportunities_data', string='Planned Revenue per Month'), 'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="restrict", required=True, help="The email address associated with this team. New emails received will automatically create new leads assigned to the team."), } diff --git a/addons/crm/test/lead2opportunity_assign_salesmen.yml b/addons/crm/test/lead2opportunity_assign_salesmen.yml index 9ac3e954f88f0c182a91a1d794120bf8a5449522..2de9a2378378a493ddede8507c33460f7825f5c1 100644 --- a/addons/crm/test/lead2opportunity_assign_salesmen.yml +++ b/addons/crm/test/lead2opportunity_assign_salesmen.yml @@ -3,22 +3,22 @@ - !record {model: res.users, id: test_res_user_01}: name: 'Test user A' - login: 'tua' + login: 'tua@example.com' new_password: 'tua' - !record {model: res.users, id: test_res_user_02}: name: 'Test user B' - login: 'tub' + login: 'tub@example.com' new_password: 'tub' - !record {model: res.users, id: test_res_user_03}: name: 'Test user C' - login: 'tuc' + login: 'tuc@example.com' new_password: 'tuc' - !record {model: res.users, id: test_res_user_04}: name: 'Test user D' - login: 'tud' + login: 'tud@example.com' new_password: 'tud' - Salesman also creates lead so giving access rights of salesman. @@ -78,4 +78,4 @@ for opp in opps: assert opp.type == 'opportunity', 'Type mismatch: this should be an opp, not a lead' assert opp.user_id.id == salesmen_ids[i], 'Salesman mismatch: expected salesman %r, got %r' % (salesmen_ids[i], opp.user_id.id) - i = i+1 if (i < len(salesmen_ids) - 1) else 0 \ No newline at end of file + i = i+1 if (i < len(salesmen_ids) - 1) else 0 diff --git a/addons/crm/wizard/crm_lead_to_opportunity.py b/addons/crm/wizard/crm_lead_to_opportunity.py index 7964e4a4a3ae1a74cb135cf61b28bbaed907937e..e8372add327f2da1221108baacce05e7c968095d 100644 --- a/addons/crm/wizard/crm_lead_to_opportunity.py +++ b/addons/crm/wizard/crm_lead_to_opportunity.py @@ -158,7 +158,7 @@ class crm_lead2opportunity_partner(osv.osv_memory): lead_ids = [lead_id] lead = lead_obj.read(cr, uid, lead_id, ['type', 'user_id'], context=context) if lead['type'] == "lead": - context.update({'active_ids': lead_ids}) + context = dict(context, active_ids=lead_ids) self._convert_opportunity(cr, uid, ids, {'lead_ids': lead_ids, 'user_ids': [w.user_id.id], 'section_id': w.section_id.id}, context=context) elif not context.get('no_force_assignation') or not lead['user_id']: lead_obj.write(cr, uid, lead_id, {'user_id': w.user_id.id, 'section_id': w.section_id.id}, context=context) diff --git a/addons/crm/wizard/crm_merge_opportunities.py b/addons/crm/wizard/crm_merge_opportunities.py index 289266bfad33b28d8a50564c6565429d1e78096d..b1f62b91ebe2cb6afdbaab4bae973c1cfda9aed3 100644 --- a/addons/crm/wizard/crm_merge_opportunities.py +++ b/addons/crm/wizard/crm_merge_opportunities.py @@ -39,8 +39,7 @@ class crm_merge_opportunity(osv.osv_memory): } def action_merge(self, cr, uid, ids, context=None): - if context is None: - context = {} + context = dict(context or {}) lead_obj = self.pool.get('crm.lead') wizard = self.browse(cr, uid, ids[0], context=context) diff --git a/addons/crm_claim/crm_claim.py b/addons/crm_claim/crm_claim.py index cd4e6ad73b2a42517379f58234e35c22e36891f6..9cd255707042fd81671916f0a449a0a60b84af9f 100644 --- a/addons/crm_claim/crm_claim.py +++ b/addons/crm_claim/crm_claim.py @@ -153,8 +153,7 @@ class crm_claim(osv.osv): return {'value': {'email_from': address.email, 'partner_phone': address.phone}} def create(self, cr, uid, vals, context=None): - if context is None: - context = {} + context = dict(context or {}) if vals.get('section_id') and not context.get('default_section_id'): context['default_section_id'] = vals.get('section_id') diff --git a/addons/crm_partner_assign/crm_lead.py b/addons/crm_partner_assign/crm_lead.py index d7463355271018cbb80dfcc0ae9a411feed12263..c0dd061f511a15da24a564d2822143bc726600bb 100644 --- a/addons/crm_partner_assign/crm_lead.py +++ b/addons/crm_partner_assign/crm_lead.py @@ -31,7 +31,7 @@ class crm_lead(osv.osv): model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'crm_lead_channel_interested_act') except ValueError: raise osv.except_osv(_('Error!'), _("The CRM Channel Interested Action is missing")) - action = self.pool[model].read(cr, uid, action_id, context=context) + action = self.pool[model].read(cr, uid, [action_id], context=context)[0] action_context = eval(action['context']) action_context['interested'] = interested action['context'] = str(action_context) diff --git a/addons/crm_profiling/crm_profiling.py b/addons/crm_profiling/crm_profiling.py index 5b1af306b5f68fd270b22472989a95005bd374ec..a8866e7639f228787794f46412b08d543f9aaf02 100644 --- a/addons/crm_profiling/crm_profiling.py +++ b/addons/crm_profiling/crm_profiling.py @@ -147,7 +147,7 @@ class question(osv.osv): _columns={ 'name': fields.char("Question", required=True), - 'answers_ids': fields.one2many("crm_profiling.answer","question_id","Avalaible Answers",), + 'answers_ids': fields.one2many("crm_profiling.answer", "question_id", "Available Answers", copy=True), } @@ -209,6 +209,7 @@ class partner(osv.osv): if 'answers_ids' in vals: vals['category_id']=[[6, 0, _recompute_categ(self, cr, uid, ids[0], vals['answers_ids'][0][2])]] + return super(partner, self).write(cr, uid, ids, vals, context=context) @@ -248,6 +249,7 @@ class crm_segmentation(osv.osv): if categ['exclusif']: cr.execute('delete from res_partner_res_partner_category_rel where \ category_id=%s', (categ['categ_id'][0],)) + partner_obj.invalidate_cache(cr, uid, ['category_id']) id = categ['id'] @@ -281,6 +283,7 @@ class crm_segmentation(osv.osv): category_ids = [categ_id.id for categ_id in partner.category_id] if categ['categ_id'][0] not in category_ids: cr.execute('insert into res_partner_res_partner_category_rel (category_id,partner_id) values (%s,%s)', (categ['categ_id'][0],partner.id)) + partner_obj.invalidate_cache(cr, uid, ['category_id'], [partner.id]) self.write(cr, uid, [id], {'state':'not running', 'partner_id':0}) return True diff --git a/addons/crm_profiling/wizard/open_questionnaire.py b/addons/crm_profiling/wizard/open_questionnaire.py index 979e9809d09b3df0bdbfeddcff1e4ff42a40fea6..b08939e3907b726948ac416b4fbcaf8d332f3bc8 100644 --- a/addons/crm_profiling/wizard/open_questionnaire.py +++ b/addons/crm_profiling/wizard/open_questionnaire.py @@ -70,7 +70,7 @@ class open_questionnaire(osv.osv_memory): result = models_data._get_id(cr, uid, 'crm_profiling', 'open_questionnaire_form') res_id = models_data.browse(cr, uid, result, context=context).res_id datas = self.browse(cr, uid, ids[0], context=context) - context.update({'questionnaire_id': datas.questionnaire_id.id}) + context = dict(context or {}, questionnaire_id=datas.questionnaire_id.id) return { 'name': _('Questionnaire'), diff --git a/addons/delivery/delivery.py b/addons/delivery/delivery.py index 2ab2a10a3de51e396015ee6f9ddb055fa3f824e2..33db829ac8017e10ed6528f22cec56e06e516785 100644 --- a/addons/delivery/delivery.py +++ b/addons/delivery/delivery.py @@ -178,7 +178,7 @@ class delivery_grid(osv.osv): 'state_ids': fields.many2many('res.country.state', 'delivery_grid_state_rel', 'grid_id', 'state_id', 'States'), 'zip_from': fields.char('Start Zip', size=12), 'zip_to': fields.char('To Zip', size=12), - 'line_ids': fields.one2many('delivery.grid.line', 'grid_id', 'Grid Line'), + 'line_ids': fields.one2many('delivery.grid.line', 'grid_id', 'Grid Line', copy=True), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the delivery grid without removing it."), } _defaults = { diff --git a/addons/document/document.py b/addons/document/document.py index d611539a00a1a0685ed38864df72b106a8890347..96868d3d66b1dbb7452cd9554dfb68d7561eb50c 100644 --- a/addons/document/document.py +++ b/addons/document/document.py @@ -562,6 +562,7 @@ class document_storage(osv.osv): # to write the fname and size, and update them in the db concurrently. # We cannot use a write() here, because we are already in one. cr.execute('UPDATE ir_attachment SET file_size = %s, index_content = %s, file_type = %s WHERE id = %s', (filesize, icont_u, mime, file_node.file_id)) + self.pool.get('ir.attachment').invalidate_cache(cr, uid, ['file_size', 'index_content', 'file_type'], [file_node.file_id], context=context) file_node.content_length = filesize file_node.content_type = mime return True @@ -1114,7 +1115,7 @@ class node_dir(node_database): if not self.check_perms('u'): raise IOError(errno.EPERM,"Permission denied.") - if directory._table_name=='document.directory': + if directory._name == 'document.directory': if self.children(cr): raise OSError(39, 'Directory not empty.') res = self.context._dirobj.unlink(cr, uid, [directory.id]) @@ -1694,7 +1695,7 @@ class node_file(node_class): return False document = document_obj.browse(cr, uid, self.file_id, context=self.context.context) res = False - if document and document._table_name == 'ir.attachment': + if document and document._name == 'ir.attachment': res = document_obj.unlink(cr, uid, [document.id]) return res diff --git a/addons/edi/models/edi.py b/addons/edi/models/edi.py index 1b9e0c4df083be12e5fa4f969b6e006919ad811b..43c4dfcafb53ec2fc07196d808eaf72f97907f5b 100644 --- a/addons/edi/models/edi.py +++ b/addons/edi/models/edi.py @@ -2,7 +2,7 @@ ############################################################################## # # OpenERP, Open Source Business Applications -# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com> +# Copyright (c) 2011-2014 OpenERP S.A. <http://openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -65,8 +65,8 @@ def last_update_for(record): """Returns the last update timestamp for the given record, if available, otherwise False """ - if record._model._log_access: - record_log = record.perm_read()[0] + if record._log_access: + record_log = record.get_metadata()[0] return record_log.get('write_date') or record_log.get('create_date') or False return False diff --git a/addons/email_template/email_template.py b/addons/email_template/email_template.py index 90695bed8298059917c3c2c76fd04b237be3cdd6..128e5928f70c1480d52b9035779cb3c689ad78e6 100644 --- a/addons/email_template/email_template.py +++ b/addons/email_template/email_template.py @@ -30,7 +30,7 @@ import urlparse import openerp from openerp import SUPERUSER_ID from openerp.osv import osv, fields -from openerp import tools +from openerp import tools, api from openerp.tools.translate import _ from urllib import urlencode, quote as quote @@ -151,6 +151,7 @@ class email_template(osv.osv): """ if context is None: context = {} + res_ids = filter(None, res_ids) # to avoid browsing [None] below results = dict.fromkeys(res_ids, u"") # try to load the template @@ -250,10 +251,10 @@ class email_template(osv.osv): help="Name to use for the generated report file (may contain placeholders)\n" "The extension can be omitted and will then come from the report type."), 'report_template': fields.many2one('ir.actions.report.xml', 'Optional report to print and attach'), - 'ref_ir_act_window': fields.many2one('ir.actions.act_window', 'Sidebar action', readonly=True, + 'ref_ir_act_window': fields.many2one('ir.actions.act_window', 'Sidebar action', readonly=True, copy=False, help="Sidebar action to make this template available on records " "of the related document model"), - 'ref_ir_value': fields.many2one('ir.values', 'Sidebar Button', readonly=True, + 'ref_ir_value': fields.many2one('ir.values', 'Sidebar Button', readonly=True, copy=False, help="Sidebar button to open the sidebar action"), 'attachment_ids': fields.many2many('ir.attachment', 'email_template_attachment_rel', 'email_template_id', 'attachment_id', 'Attachments', @@ -334,13 +335,8 @@ class email_template(osv.osv): def copy(self, cr, uid, id, default=None, context=None): template = self.browse(cr, uid, id, context=context) - if default is None: - default = {} - default = default.copy() - default.update( - name=_("%s (copy)") % (template.name), - ref_ir_act_window=False, - ref_ir_value=False) + default = dict(default or {}, + name=_("%s (copy)") % template.name) return super(email_template, self).copy(cr, uid, id, default, context) def build_expression(self, field_name, sub_field_name, null_value): @@ -505,6 +501,7 @@ class email_template(osv.osv): return results + @api.cr_uid_id_context def send_mail(self, cr, uid, template_id, res_id, force_send=False, raise_exception=False, context=None): """Generates a new mail message for the given template and record, and schedules it for delivery through the ``mail`` module's scheduler. @@ -541,6 +538,7 @@ class email_template(osv.osv): 'res_model': 'mail.message', 'res_id': mail.mail_message_id.id, } + context = dict(context) context.pop('default_type', None) attachment_ids.append(ir_attachment.create(cr, uid, attachment_data, context=context)) if attachment_ids: diff --git a/addons/email_template/ir_actions.py b/addons/email_template/ir_actions.py index ae2395e3b818b1922cd227fdfbd5203711545639..3e7d2c1c67a15a5eda8b8706ff6c5685ad30f219 100644 --- a/addons/email_template/ir_actions.py +++ b/addons/email_template/ir_actions.py @@ -63,7 +63,7 @@ class actions_server(osv.Model): """ Render the raw template in the server action fields. """ fields = ['subject', 'body_html', 'email_from', 'email_to', 'partner_to'] if template_id: - template_values = self.pool.get('email.template').read(cr, uid, template_id, fields, context) + template_values = self.pool.get('email.template').read(cr, uid, [template_id], fields, context)[0] values = dict((field, template_values[field]) for field in fields if template_values.get(field)) if not values.get('email_from'): return {'warning': {'title': 'Incomplete template', 'message': 'Your template should define email_from'}, 'value': values} diff --git a/addons/email_template/tests/test_mail.py b/addons/email_template/tests/test_mail.py index f88d11ec71da8259296dd39facea372d5ccfd3ba..f5a9913735153e978bf9162d7c1c35f70c8becbb 100644 --- a/addons/email_template/tests/test_mail.py +++ b/addons/email_template/tests/test_mail.py @@ -202,7 +202,7 @@ class test_message_compose(TestMail): mail_value = mail_compose.generate_email_for_composer(cr, uid, email_template_id, uid) self.assertEqual(set(mail_value['partner_ids']), set(send_to), 'mail.message partner_ids list created by template is incorrect') - @mute_logger('openerp.osv.orm', 'openerp.osv.orm') + @mute_logger('openerp.models') def test_10_email_templating(self): """ Tests designed for the mail.compose.message wizard updated by email_template. """ cr, uid, context = self.cr, self.uid, {} diff --git a/addons/email_template/wizard/mail_compose_message.py b/addons/email_template/wizard/mail_compose_message.py index 7d148c87e3c44ed58a5932081645c724e1f0618d..ebe66757b096c51a554893475b08e614c6b3bb9d 100644 --- a/addons/email_template/wizard/mail_compose_message.py +++ b/addons/email_template/wizard/mail_compose_message.py @@ -113,7 +113,7 @@ class mail_compose_message(osv.TransientModel): } values.setdefault('attachment_ids', list()).append(ir_attach_obj.create(cr, uid, data_attach, context=context)) else: - values = self.default_get(cr, uid, ['subject', 'body', 'email_from', 'email_to', 'email_cc', 'partner_to', 'reply_to', 'attachment_ids', 'mail_server_id'], context=context) + values = self.default_get(cr, uid, ['subject', 'body', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id'], context=context) if values.get('body_html'): values['body'] = values.pop('body_html') diff --git a/addons/event/event.py b/addons/event/event.py index 9d7e878688fcfb1c85aae5999ac437c9ff62e49c..2fb831b66f30d4f524c99bc8c5b861372eb4adca 100644 --- a/addons/event/event.py +++ b/addons/event/event.py @@ -18,403 +18,365 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## +from datetime import timedelta + import pytz -from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT -from datetime import datetime, timedelta -from openerp.osv import fields, osv -from openerp.tools.translate import _ -from openerp import SUPERUSER_ID -class event_type(osv.osv): +from openerp import models, fields, api, _ +from openerp.exceptions import Warning + +class event_type(models.Model): """ Event Type """ _name = 'event.type' - _description = __doc__ - _columns = { - 'name': fields.char('Event Type', required=True), - 'default_reply_to': fields.char('Default Reply-To', size=64, help="The email address of the organizer which is put in the 'Reply-To' of all emails sent automatically at event or registrations confirmation. You can also put your email address of your mail gateway if you use one." ), - 'default_email_event': fields.many2one('email.template','Event Confirmation Email', help="It will select this default confirmation event mail value when you choose this event"), - 'default_email_registration': fields.many2one('email.template','Registration Confirmation Email', help="It will select this default confirmation registration mail value when you choose this event"), - 'default_registration_min': fields.integer('Default Minimum Registration', help="It will select this default minimum value when you choose this event"), - 'default_registration_max': fields.integer('Default Maximum Registration', help="It will select this default maximum value when you choose this event"), - } - _defaults = { - 'default_registration_min': 0, - 'default_registration_max': 0, - } - -class event_event(osv.osv): + + name = fields.Char(string='Event Type', required=True) + default_reply_to = fields.Char(string='Default Reply-To', + help="The email address of the organizer which is put in the 'Reply-To' of all emails sent automatically at event or registrations confirmation. You can also put your email address of your mail gateway if you use one.") + default_email_event = fields.Many2one('email.template', string='Event Confirmation Email', + help="It will select this default confirmation event mail value when you choose this event") + default_email_registration = fields.Many2one('email.template', string='Registration Confirmation Email', + help="It will select this default confirmation registration mail value when you choose this event") + default_registration_min = fields.Integer(string='Default Minimum Registration', default=0, + help="It will select this default minimum value when you choose this event") + default_registration_max = fields.Integer(string='Default Maximum Registration', default=0, + help="It will select this default maximum value when you choose this event") + + +class event_event(models.Model): """Event""" _name = 'event.event' - _description = __doc__ - _order = 'date_begin' _inherit = ['mail.thread', 'ir.needaction_mixin'] + _order = 'date_begin' - def name_get(self, cr, uid, ids, context=None): - if not ids: - return [] - - if isinstance(ids, (long, int)): - ids = [ids] - - res = [] - for record in self.browse(cr, uid, ids, context=context): - date = record.date_begin.split(" ")[0] - date_end = record.date_end.split(" ")[0] - if date != date_end: - date += ' - ' + date_end - display_name = record.name + ' (' + date + ')' - res.append((record['id'], display_name)) - return res - - def copy(self, cr, uid, id, default=None, context=None): - """ Reset the state and the registrations while copying an event - """ - if not default: - default = {} - default.update({ - 'state': 'draft', - 'registration_ids': False, - }) - return super(event_event, self).copy(cr, uid, id, default=default, context=context) - - def button_draft(self, cr, uid, ids, context=None): - return self.write(cr, uid, ids, {'state': 'draft'}, context=context) - - def button_cancel(self, cr, uid, ids, context=None): - registration = self.pool.get('event.registration') - reg_ids = registration.search(cr, uid, [('event_id','in',ids)], context=context) - for event_reg in registration.browse(cr,uid,reg_ids,context=context): - if event_reg.state == 'done': - raise osv.except_osv(_('Error!'),_("You have already set a registration for this event as 'Attended'. Please reset it to draft if you want to cancel this event.") ) - registration.write(cr, uid, reg_ids, {'state': 'cancel'}, context=context) - return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) - - def button_done(self, cr, uid, ids, context=None): - return self.write(cr, uid, ids, {'state': 'done'}, context=context) - - def confirm_event(self, cr, uid, ids, context=None): - register_pool = self.pool.get('event.registration') - for event in self.browse(cr, uid, ids, context=context): - if event.email_confirmation_id: - #send reminder that will confirm the event for all the people that were already confirmed - reg_ids = register_pool.search(cr, uid, [ - ('event_id', '=', event.id), - ('state', 'not in', ['draft', 'cancel'])], context=context) - register_pool.mail_user_confirm(cr, uid, reg_ids) - return self.write(cr, uid, ids, {'state': 'confirm'}, context=context) - - def button_confirm(self, cr, uid, ids, context=None): - """ Confirm Event and send confirmation email to all register peoples - """ - return self.confirm_event(cr, uid, isinstance(ids, (int, long)) and [ids] or ids, context=context) - - def _get_seats(self, cr, uid, ids, fields, args, context=None): - """Get reserved, available, reserved but unconfirmed and used seats. - @return: Dictionary of function field values. - """ - keys = {'draft': 'seats_unconfirmed', 'open':'seats_reserved', 'done': 'seats_used'} - res = {} - for event_id in ids: - res[event_id] = {key:0 for key in keys.values()} - query = "SELECT state, sum(nb_register) FROM event_registration WHERE event_id = %s AND state IN ('draft','open','done') GROUP BY state" - for event in self.pool.get('event.event').browse(cr, uid, ids, context=context): - cr.execute(query, (event.id,)) - reg_states = cr.fetchall() - for reg_state in reg_states: - res[event.id][keys[reg_state[0]]] = reg_state[1] - res[event.id]['seats_available'] = event.seats_max - \ - (res[event.id]['seats_reserved'] + res[event.id]['seats_used']) \ - if event.seats_max > 0 else None - return res - - def _get_events_from_registrations(self, cr, uid, ids, context=None): - """Get reserved, available, reserved but unconfirmed and used seats, of the event related to a registration. - @return: Dictionary of function field values. - """ - event_ids=set() - for registration in self.pool['event.registration'].browse(cr, uid, ids, context=context): - event_ids.add(registration.event_id.id) - return list(event_ids) - - def _subscribe_fnc(self, cr, uid, ids, fields, args, context=None): - """This functional fields compute if the current user (uid) is already subscribed or not to the event passed in parameter (ids) - """ - register_pool = self.pool.get('event.registration') - res = {} - for event in self.browse(cr, uid, ids, context=context): - res[event.id] = False - curr_reg_id = register_pool.search(cr, uid, [('user_id', '=', uid), ('event_id', '=' ,event.id)]) - if curr_reg_id: - for reg in register_pool.browse(cr, uid, curr_reg_id, context=context): - if reg.state in ('open','done'): - res[event.id]= True - continue - return res - - def _count_registrations(self, cr, uid, ids, field_name, arg, context=None): - return { - event.id: len(event.registration_ids) - for event in self.browse(cr, uid, ids, context=context) - } - - def _compute_date_tz(self, cr, uid, ids, fld, arg, context=None): - if context is None: - context = {} - res = {} - for event in self.browse(cr, uid, ids, context=context): - ctx = dict(context, tz=(event.date_tz or 'UTC')) - if fld == 'date_begin_located': - date_to_convert = event.date_begin - elif fld == 'date_end_located': - date_to_convert = event.date_end - res[event.id] = fields.datetime.context_timestamp(cr, uid, datetime.strptime(date_to_convert, DEFAULT_SERVER_DATETIME_FORMAT), context=ctx) - return res - - def _tz_get(self, cr, uid, context=None): + name = fields.Char(string='Event Name', translate=True, required=True, + readonly=False, states={'done': [('readonly', True)]}) + user_id = fields.Many2one('res.users', string='Responsible User', + default=lambda self: self.env.user, + readonly=False, states={'done': [('readonly', True)]}) + type = fields.Many2one('event.type', string='Type of Event', + readonly=False, states={'done': [('readonly', True)]}) + seats_max = fields.Integer(string='Maximum Avalaible Seats', oldname='register_max', + readonly=True, states={'draft': [('readonly', False)]}, + help="You can for each event define a maximum registration level. If you have too much registrations you are not able to confirm your event. (put 0 to ignore this rule )") + seats_min = fields.Integer(string='Minimum Reserved Seats', oldname='register_min', + readonly=True, states={'draft': [('readonly', False)]}, + help="You can for each event define a minimum registration level. If you do not enough registrations you are not able to confirm your event. (put 0 to ignore this rule )") + + seats_reserved = fields.Integer(oldname='register_current', string='Reserved Seats', + store=True, readonly=True, compute='_compute_seats') + seats_available = fields.Integer(oldname='register_avail', string='Available Seats', + store=True, readonly=True, compute='_compute_seats') + seats_unconfirmed = fields.Integer(oldname='register_prospect', string='Unconfirmed Seat Reservations', + store=True, readonly=True, compute='_compute_seats') + seats_used = fields.Integer(oldname='register_attended', string='Number of Participations', + store=True, readonly=True, compute='_compute_seats') + + @api.multi + @api.depends('seats_max', 'registration_ids.state', 'registration_ids.nb_register') + def _compute_seats(self): + """ Determine reserved, available, reserved but unconfirmed and used seats. """ + # initialize fields to 0 + for event in self: + event.seats_unconfirmed = event.seats_reserved = event.seats_used = 0 + # aggregate registrations by event and by state + if self.ids: + state_field = { + 'draft': 'seats_unconfirmed', + 'open':'seats_reserved', + 'done': 'seats_used', + } + query = """ SELECT event_id, state, sum(nb_register) + FROM event_registration + WHERE event_id IN %s AND state IN ('draft', 'open', 'done') + GROUP BY event_id, state + """ + self._cr.execute(query, (tuple(self.ids),)) + for event_id, state, num in self._cr.fetchall(): + event = self.browse(event_id) + event[state_field[state]] += num + # compute seats_available + for event in self: + event.seats_available = \ + event.seats_max - (event.seats_reserved + event.seats_used) \ + if event.seats_max > 0 else 0 + + registration_ids = fields.One2many('event.registration', 'event_id', string='Registrations', + readonly=False, states={'done': [('readonly', True)]}) + count_registrations = fields.Integer(string='Registrations', + compute='_count_registrations') + + date_begin = fields.Datetime(string='Start Date', required=True, + readonly=True, states={'draft': [('readonly', False)]}) + date_end = fields.Datetime(string='End Date', required=True, + readonly=True, states={'draft': [('readonly', False)]}) + + @api.model + def _tz_get(self): return [(x, x) for x in pytz.all_timezones] - _columns = { - 'name': fields.char('Event Name', required=True, translate=True, readonly=False, states={'done': [('readonly', True)]}), - 'user_id': fields.many2one('res.users', 'Responsible User', readonly=False, states={'done': [('readonly', True)]}), - 'type': fields.many2one('event.type', 'Type of Event', readonly=False, states={'done': [('readonly', True)]}), - 'seats_max': fields.integer('Maximum Avalaible Seats', oldname='register_max', help="You can for each event define a maximum registration level. If you have too much registrations you are not able to confirm your event. (put 0 to ignore this rule )", readonly=True, states={'draft': [('readonly', False)]}), - 'seats_min': fields.integer('Minimum Reserved Seats', oldname='register_min', help="You can for each event define a minimum registration level. If you do not enough registrations you are not able to confirm your event. (put 0 to ignore this rule )", readonly=True, states={'draft': [('readonly', False)]}), - 'seats_reserved': fields.function(_get_seats, oldname='register_current', string='Reserved Seats', type='integer', multi='seats_reserved', - store={'event.registration': (_get_events_from_registrations, ['state'], 10), - 'event.event': (lambda self, cr, uid, ids, c = {}: ids, ['seats_max', 'registration_ids'], 20)}), - 'seats_available': fields.function(_get_seats, oldname='register_avail', string='Available Seats', type='integer', multi='seats_reserved', - store={'event.registration': (_get_events_from_registrations, ['state'], 10), - 'event.event': (lambda self, cr, uid, ids, c = {}: ids, ['seats_max', 'registration_ids'], 20)}), - 'seats_unconfirmed': fields.function(_get_seats, oldname='register_prospect', string='Unconfirmed Seat Reservations', type='integer', multi='seats_reserved', - store={'event.registration': (_get_events_from_registrations, ['state'], 10), - 'event.event': (lambda self, cr, uid, ids, c = {}: ids, ['seats_max', 'registration_ids'], 20)}), - 'seats_used': fields.function(_get_seats, oldname='register_attended', string='Number of Participations', type='integer', multi='seats_reserved', - store={'event.registration': (_get_events_from_registrations, ['state'], 10), - 'event.event': (lambda self, cr, uid, ids, c = {}: ids, ['seats_max', 'registration_ids'], 20)}), - 'registration_ids': fields.one2many('event.registration', 'event_id', 'Registrations', readonly=False, states={'done': [('readonly', True)]}), - 'date_tz': fields.selection(_tz_get, string='Timezone'), - 'date_begin': fields.datetime('Start Date', required=True, readonly=True, states={'draft': [('readonly', False)]}), - 'date_end': fields.datetime('End Date', required=True, readonly=True, states={'draft': [('readonly', False)]}), - 'date_begin_located': fields.function(_compute_date_tz, string='Start Date Located', type="datetime"), - 'date_end_located': fields.function(_compute_date_tz, string='End Date Located', type="datetime"), - 'state': fields.selection([ + date_tz = fields.Selection('_tz_get', string='Timezone', + default=lambda self: self._context.get('tz', 'UTC')) + + @api.one + @api.depends('date_tz', 'date_begin') + def _compute_date_begin_tz(self): + if self.date_begin: + self_in_tz = self.with_context(tz=(self.date_tz or 'UTC')) + date_begin = fields.Datetime.from_string(self.date_begin) + self.date_begin_located = fields.Datetime.to_string(fields.Datetime.context_timestamp(self_in_tz, date_begin)) + else: + self.date_begin_located = False + + @api.one + @api.depends('date_tz', 'date_end') + def _compute_date_end_tz(self): + if self.date_end: + self_in_tz = self.with_context(tz=(self.date_tz or 'UTC')) + date_end = fields.Datetime.from_string(self.date_end) + self.date_end_located = fields.Datetime.to_string(fields.Datetime.context_timestamp(self_in_tz, date_end)) + else: + self.date_end_located = False + + date_begin_located = fields.Datetime(string='Start Date Located', compute='_compute_date_begin_tz') + date_end_located = fields.Datetime(string='End Date Located', compute='_compute_date_end_tz') + + state = fields.Selection([ ('draft', 'Unconfirmed'), ('cancel', 'Cancelled'), ('confirm', 'Confirmed'), - ('done', 'Done')], - 'Status', readonly=True, required=True, - help='If event is created, the status is \'Draft\'.If event is confirmed for the particular dates the status is set to \'Confirmed\'. If the event is over, the status is set to \'Done\'.If event is cancelled the status is set to \'Cancelled\'.'), - 'email_registration_id' : fields.many2one('email.template','Registration Confirmation Email', help='This field contains the template of the mail that will be automatically sent each time a registration for this event is confirmed.'), - 'email_confirmation_id' : fields.many2one('email.template','Event Confirmation Email', help="If you set an email template, each participant will receive this email announcing the confirmation of the event."), - 'reply_to': fields.char('Reply-To Email', size=64, readonly=False, states={'done': [('readonly', True)]}, help="The email address of the organizer is likely to be put here, with the effect to be in the 'Reply-To' of the mails sent automatically at event or registrations confirmation. You can also put the email address of your mail gateway if you use one."), - 'address_id': fields.many2one('res.partner','Location', readonly=False, states={'done': [('readonly', True)]}), - 'country_id': fields.related('address_id', 'country_id', - type='many2one', relation='res.country', string='Country', readonly=False, states={'done': [('readonly', True)]}, store=True), - 'description': fields.html( - 'Description', readonly=False, translate=True, - states={'done': [('readonly', True)]}, - oldname='note'), - 'company_id': fields.many2one('res.company', 'Company', required=False, change_default=True, readonly=False, states={'done': [('readonly', True)]}), - 'is_subscribed' : fields.function(_subscribe_fnc, type="boolean", string='Subscribed'), - 'organizer_id': fields.many2one('res.partner', "Organizer"), - 'count_registrations': fields.function(_count_registrations, type="integer", string="Registrations"), - } - _defaults = { - 'state': 'draft', - 'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'event.event', context=c), - 'user_id': lambda obj, cr, uid, context: uid, - 'organizer_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, context=c).company_id.partner_id.id, - 'address_id': lambda self, cr, uid, c: self.pool.get('res.users').browse(cr, uid, uid, context=c).company_id.partner_id.id, - 'date_tz': lambda self, cr, uid, ctx: ctx.get('tz', "UTC"), - } - - def _check_seats_limit(self, cr, uid, ids, context=None): - for event in self.browse(cr, uid, ids, context=context): - if event.seats_max and event.seats_available < 0: - return False - return True - - _constraints = [ - (_check_seats_limit, 'No more available seats.', ['registration_ids','seats_max']), - ] - - def subscribe_to_event(self, cr, uid, ids, context=None): - register_pool = self.pool.get('event.registration') - user_pool = self.pool.get('res.users') - num_of_seats = int(context.get('ticket', 1)) - user = user_pool.browse(cr, uid, uid, context=context) - curr_reg_ids = register_pool.search(cr, uid, [('user_id', '=', user.id), ('event_id', '=' , ids[0])]) - #the subscription is done with SUPERUSER_ID because in case we share the kanban view, we want anyone to be able to subscribe - if not curr_reg_ids: - curr_reg_ids = [register_pool.create(cr, SUPERUSER_ID, {'event_id': ids[0] ,'email': user.email, 'name':user.name, 'user_id': user.id, 'nb_register': num_of_seats})] + ('done', 'Done') + ], string='Status', default='draft', readonly=True, required=True, copy=False, + help="If event is created, the status is 'Draft'. If event is confirmed for the particular dates the status is set to 'Confirmed'. If the event is over, the status is set to 'Done'. If event is cancelled the status is set to 'Cancelled'.") + email_registration_id = fields.Many2one('email.template', string='Registration Confirmation Email', + help='This field contains the template of the mail that will be automatically sent each time a registration for this event is confirmed.') + email_confirmation_id = fields.Many2one('email.template', string='Event Confirmation Email', + help="If you set an email template, each participant will receive this email announcing the confirmation of the event.") + reply_to = fields.Char(string='Reply-To Email', + readonly=False, states={'done': [('readonly', True)]}, + help="The email address of the organizer is likely to be put here, with the effect to be in the 'Reply-To' of the mails sent automatically at event or registrations confirmation. You can also put the email address of your mail gateway if you use one.") + address_id = fields.Many2one('res.partner', string='Location', + default=lambda self: self.env.user.company_id.partner_id, + readonly=False, states={'done': [('readonly', True)]}) + country_id = fields.Many2one('res.country', string='Country', related='address_id.country_id', + store=True, readonly=False, states={'done': [('readonly', True)]}) + description = fields.Html(string='Description', oldname='note', translate=True, + readonly=False, states={'done': [('readonly', True)]}) + company_id = fields.Many2one('res.company', string='Company', change_default=True, + default=lambda self: self.env['res.company']._company_default_get('event.event'), + required=False, readonly=False, states={'done': [('readonly', True)]}) + organizer_id = fields.Many2one('res.partner', string='Organizer', + default=lambda self: self.env.user.company_id.partner_id) + + is_subscribed = fields.Boolean(string='Subscribed', + compute='_compute_subscribe') + + @api.one + @api.depends('registration_ids') + def _count_registrations(self): + self.count_registrations = len(self.registration_ids) + + @api.one + @api.depends('registration_ids.user_id', 'registration_ids.state') + def _compute_subscribe(self): + """ Determine whether the current user is already subscribed to any event in `self` """ + user = self.env.user + self.is_subscribed = any( + reg.user_id == user and reg.state in ('open', 'done') + for reg in self.registration_ids + ) + + @api.one + @api.depends('name', 'date_begin', 'date_end') + def _compute_display_name(self): + dates = [dt.split(' ')[0] for dt in [self.date_begin, self.date_end] if dt] + dates = sorted(set(dates)) + self.display_name = '%s (%s)' % (self.name, ' - '.join(dates)) + + @api.one + @api.constrains('seats_max', 'seats_available') + def _check_seats_limit(self): + if self.seats_max and self.seats_available < 0: + raise Warning(_('No more available seats.')) + + @api.one + @api.constrains('date_begin', 'date_end') + def _check_closing_date(self): + if self.date_end < self.date_begin: + raise Warning(_('Closing Date cannot be set before Beginning Date.')) + + @api.one + def button_draft(self): + self.state = 'draft' + + @api.one + def button_cancel(self): + for event_reg in self.registration_ids: + if event_reg.state == 'done': + raise Warning(_("You have already set a registration for this event as 'Attended'. Please reset it to draft if you want to cancel this event.")) + self.registration_ids.write({'state': 'cancel'}) + self.state = 'cancel' + + @api.one + def button_done(self): + self.state = 'done' + + @api.one + def confirm_event(self): + if self.email_confirmation_id: + # send reminder that will confirm the event for all the people that were already confirmed + regs = self.registration_ids.filtered(lambda reg: reg.state not in ('draft', 'cancel')) + regs.mail_user_confirm() + self.state = 'confirm' + + @api.one + def button_confirm(self): + """ Confirm Event and send confirmation email to all register peoples """ + self.confirm_event() + + @api.one + def subscribe_to_event(self): + """ Subscribe the current user to a given event """ + user = self.env.user + num_of_seats = int(self._context.get('ticket', 1)) + regs = self.registration_ids.filtered(lambda reg: reg.user_id == user) + # the subscription is done as SUPERUSER_ID because in case we share the + # kanban view, we want anyone to be able to subscribe + if not regs: + regs = regs.sudo().create({ + 'event_id': self.id, + 'email': user.email, + 'name':user.name, + 'user_id': user.id, + 'nb_register': num_of_seats, + }) else: - register_pool.write(cr, uid, curr_reg_ids, {'nb_register': num_of_seats}, context=context) - return register_pool.confirm_registration(cr, SUPERUSER_ID, curr_reg_ids, context=context) - - def unsubscribe_to_event(self, cr, uid, ids, context=None): - register_pool = self.pool.get('event.registration') - #the unsubscription is done with SUPERUSER_ID because in case we share the kanban view, we want anyone to be able to unsubscribe - curr_reg_ids = register_pool.search(cr, SUPERUSER_ID, [('user_id', '=', uid), ('event_id', '=', ids[0])]) - return register_pool.button_reg_cancel(cr, SUPERUSER_ID, curr_reg_ids, context=context) - - def _check_closing_date(self, cr, uid, ids, context=None): - for event in self.browse(cr, uid, ids, context=context): - if event.date_end < event.date_begin: - return False - return True - - _constraints = [ - (_check_closing_date, 'Error ! Closing Date cannot be set before Beginning Date.', ['date_end']), - ] - - def onchange_event_type(self, cr, uid, ids, type_event, context=None): - values = {} - if type_event: - type_info = self.pool.get('event.type').browse(cr,uid,type_event,context) - dic ={ - 'reply_to': type_info.default_reply_to, - 'email_registration_id': type_info.default_email_registration.id, - 'email_confirmation_id': type_info.default_email_event.id, - 'seats_min': type_info.default_registration_min, - 'seats_max': type_info.default_registration_max, - } - values.update(dic) - return values - - def onchange_start_date(self, cr, uid, ids, date_begin=False, date_end=False, context=None): - res = {'value':{}} - if date_end: - return res - if date_begin and isinstance(date_begin, str): - date_begin = datetime.strptime(date_begin, "%Y-%m-%d %H:%M:%S") - date_end = date_begin + timedelta(hours=1) - res['value'] = {'date_end': date_end.strftime("%Y-%m-%d %H:%M:%S")} - return res - - -class event_registration(osv.osv): + regs.write({'nb_register': num_of_seats}) + regs.sudo().confirm_registration() + + @api.one + def unsubscribe_to_event(self): + """ Unsubscribe the current user from a given event """ + # the unsubscription is done as SUPERUSER_ID because in case we share + # the kanban view, we want anyone to be able to unsubscribe + user = self.env.user + regs = self.sudo().registration_ids.filtered(lambda reg: reg.user_id == user) + regs.button_reg_cancel() + + @api.onchange('type') + def _onchange_type(self): + if self.type: + self.reply_to = self.type.default_reply_to + self.email_registration_id = self.type.default_email_registration + self.email_confirmation_id = self.type.default_email_event + self.seats_min = self.type.default_registration_min + self.seats_max = self.type.default_registration_max + + @api.onchange('date_begin') + def _onchange_date_begin(self): + if self.date_begin and not self.date_end: + date_begin = fields.Datetime.from_string(self.date_begin) + self.date_end = fields.Datetime.to_string(date_begin + timedelta(hours=1)) + + +class event_registration(models.Model): """Event Registration""" _name= 'event.registration' - _description = __doc__ _inherit = ['mail.thread', 'ir.needaction_mixin'] - _columns = { - 'id': fields.integer('ID'), - 'origin': fields.char('Source Document', readonly=True,help="Reference of the sales order which created the registration"), - 'nb_register': fields.integer('Number of Participants', required=True, readonly=True, states={'draft': [('readonly', False)]}), - 'event_id': fields.many2one('event.event', 'Event', required=True, readonly=True, states={'draft': [('readonly', False)]}), - 'partner_id': fields.many2one('res.partner', 'Partner', states={'done': [('readonly', True)]}), - 'create_date': fields.datetime('Creation Date' , readonly=True), - 'date_closed': fields.datetime('Attended Date', readonly=True), - 'date_open': fields.datetime('Registration Date', readonly=True), - 'reply_to': fields.related('event_id','reply_to',string='Reply-to Email', type='char', readonly=True,), - 'log_ids': fields.one2many('mail.message', 'res_id', 'Logs', domain=[('model','=',_name)]), - 'event_end_date': fields.related('event_id','date_end', type='datetime', string="Event End Date", readonly=True), - 'event_begin_date': fields.related('event_id', 'date_begin', type='datetime', string="Event Start Date", readonly=True), - 'user_id': fields.many2one('res.users', 'User', states={'done': [('readonly', True)]}), - 'company_id': fields.related('event_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True, states={'draft':[('readonly',False)]}), - 'state': fields.selection([('draft', 'Unconfirmed'), - ('cancel', 'Cancelled'), - ('open', 'Confirmed'), - ('done', 'Attended')], 'Status', - readonly=True), - 'email': fields.char('Email', size=64), - 'phone': fields.char('Phone', size=64), - 'name': fields.char('Name', select=True), - } - _defaults = { - 'nb_register': 1, - 'state': 'draft', - } _order = 'name, create_date desc' + origin = fields.Char(string='Source Document', readonly=True, + help="Reference of the sales order which created the registration") + nb_register = fields.Integer(string='Number of Participants', required=True, default=1, + readonly=True, states={'draft': [('readonly', False)]}) + event_id = fields.Many2one('event.event', string='Event', required=True, + readonly=True, states={'draft': [('readonly', False)]}) + partner_id = fields.Many2one('res.partner', string='Partner', + states={'done': [('readonly', True)]}) + date_open = fields.Datetime(string='Registration Date', readonly=True) + date_closed = fields.Datetime(string='Attended Date', readonly=True) + reply_to = fields.Char(string='Reply-to Email', related='event_id.reply_to', + readonly=True) + log_ids = fields.One2many('mail.message', 'res_id', string='Logs', + domain=[('model', '=', _name)]) + event_begin_date = fields.Datetime(string="Event Start Date", related='event_id.date_begin', + readonly=True) + event_end_date = fields.Datetime(string="Event End Date", related='event_id.date_end', + readonly=True) + user_id = fields.Many2one('res.users', string='User', states={'done': [('readonly', True)]}) + company_id = fields.Many2one('res.company', string='Company', related='event_id.company_id', + store=True, readonly=True, states={'draft':[('readonly', False)]}) + state = fields.Selection([ + ('draft', 'Unconfirmed'), + ('cancel', 'Cancelled'), + ('open', 'Confirmed'), + ('done', 'Attended'), + ], string='Status', default='draft', readonly=True, copy=False) + email = fields.Char(string='Email') + phone = fields.Char(string='Phone') + name = fields.Char(string='Name', select=True) + + @api.one + @api.constrains('event_id', 'state', 'nb_register') + def _check_seats_limit(self): + if self.event_id.seats_max and \ + self.event_id.seats_available < (self.nb_register if self.state == 'draft' else 0): + raise Warning(_('No more available seats.')) + + @api.one + def do_draft(self): + self.state = 'draft' + + @api.one + def confirm_registration(self): + self.event_id.message_post( + body=_('New registration confirmed: %s.') % (self.name or ''), + subtype="event.mt_event_registration") + self.message_post(body=_('Event Registration confirmed.')) + self.state = 'open' + + @api.one + def registration_open(self): + """ Open Registration """ + self.confirm_registration() + self.mail_user() + + @api.one + def button_reg_close(self): + """ Close Registration """ + today = fields.Datetime.now() + if self.event_id.date_begin <= today: + self.write({'state': 'done', 'date_closed': today}) + else: + raise Warning(_("You must wait for the starting day of the event to do this action.")) + + @api.one + def button_reg_cancel(self): + self.state = 'cancel' - def _check_seats_limit(self, cr, uid, ids, context=None): - for registration in self.browse(cr, uid, ids, context=context): - if registration.event_id.seats_max and \ - registration.event_id.seats_available < (registration.state == 'draft' and registration.nb_register or 0): - return False - return True - - _constraints = [ - (_check_seats_limit, 'No more available seats.', ['event_id','nb_register','state']), - ] - - def do_draft(self, cr, uid, ids, context=None): - return self.write(cr, uid, ids, {'state': 'draft'}, context=context) - - def confirm_registration(self, cr, uid, ids, context=None): - for reg in self.browse(cr, uid, ids, context=context or {}): - self.pool.get('event.event').message_post(cr, uid, [reg.event_id.id], body=_('New registration confirmed: %s.') % (reg.name or '', ),subtype="event.mt_event_registration", context=context) - self.message_post(cr, uid, reg.id, body=_('Event Registration confirmed.'), context=context) - return self.write(cr, uid, ids, {'state': 'open'}, context=context) - - def registration_open(self, cr, uid, ids, context=None): - """ Open Registration - """ - res = self.confirm_registration(cr, uid, ids, context=context) - self.mail_user(cr, uid, ids, context=context) - return res - - def button_reg_close(self, cr, uid, ids, context=None): - """ Close Registration - """ - if context is None: - context = {} - today = fields.datetime.now() - for registration in self.browse(cr, uid, ids, context=context): - if today >= registration.event_id.date_begin: - values = {'state': 'done', 'date_closed': today} - self.write(cr, uid, ids, values) - else: - raise osv.except_osv(_('Error!'), _("You must wait for the starting day of the event to do this action.")) - return True - - def button_reg_cancel(self, cr, uid, ids, context=None, *args): - return self.write(cr, uid, ids, {'state': 'cancel'}) - - def mail_user(self, cr, uid, ids, context=None): - """ - Send email to user with email_template when registration is done - """ - for registration in self.browse(cr, uid, ids, context=context): - if registration.event_id.state == 'confirm' and registration.event_id.email_confirmation_id.id: - self.mail_user_confirm(cr, uid, ids, context=context) - else: - template_id = registration.event_id.email_registration_id.id - if template_id: - mail_message = self.pool.get('email.template').send_mail(cr,uid,template_id,registration.id) - return True - - def mail_user_confirm(self, cr, uid, ids, context=None): - """ - Send email to user when the event is confirmed - """ - for registration in self.browse(cr, uid, ids, context=context): - template_id = registration.event_id.email_confirmation_id.id - if template_id: - mail_message = self.pool.get('email.template').send_mail(cr,uid,template_id,registration.id) - return True - - def onchange_contact_id(self, cr, uid, ids, contact, partner, context=None): - if not contact: - return {} - addr_obj = self.pool.get('res.partner') - contact_id = addr_obj.browse(cr, uid, contact, context=context) - return {'value': { - 'email':contact_id.email, - 'name':contact_id.name, - 'phone':contact_id.phone, - }} - - def onchange_partner_id(self, cr, uid, ids, part, context=None): - res_obj = self.pool.get('res.partner') - data = {} - if not part: - return {'value': data} - addr = res_obj.address_get(cr, uid, [part]).get('default', False) - if addr: - d = self.onchange_contact_id(cr, uid, ids, addr, part, context) - data.update(d['value']) - return {'value': data} + @api.one + def mail_user(self): + """Send email to user with email_template when registration is done """ + if self.event_id.state == 'confirm' and self.event_id.email_confirmation_id: + self.mail_user_confirm() + else: + template = self.event_id.email_registration_id + if template: + mail_message = template.send_mail(self.id) + + @api.one + def mail_user_confirm(self): + """Send email to user when the event is confirmed """ + template = self.event_id.email_confirmation_id + if template: + mail_message = template.send_mail(self.id) + + @api.onchange('partner_id') + def _onchange_partner(self): + if self.partner_id: + contact = self.partner_id.address_get().get('default', False) + if contact: + self.name = contact.name + self.email = contact.email + self.phone = contact.phone # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/event/event_view.xml b/addons/event/event_view.xml index 1ae4d9c449e73295b9fe38a52246ac38789b3241..73cbb7b0d1bcb4dc83e1331b1e62b4a18d1a066c 100644 --- a/addons/event/event_view.xml +++ b/addons/event/event_view.xml @@ -86,8 +86,8 @@ </group> <group> <field name="user_id"/> - <field name="type" on_change="onchange_event_type(type,context)" /> - <field name="date_begin" on_change="onchange_start_date(date_begin,date_end)"/> + <field name="type"/> + <field name="date_begin"/> <field name="date_end"/> <field name="date_tz" /> </group> @@ -117,7 +117,7 @@ <button name="button_reg_cancel" string="Cancel Registration" states="draft,open" type="object" icon="gtk-cancel"/> </tree> <form string="Registration"> - <field name="partner_id" attrs="{'readonly':[('state','!=', 'draft')]}" on_change="onchange_partner_id(partner_id, context)" /> + <field name="partner_id" attrs="{'readonly':[('state','!=', 'draft')]}"/> <field name="name"/> <group colspan="4" col="4"> <field name="email"/> @@ -366,7 +366,7 @@ </h1> <group> <group> - <field name="partner_id" attrs="{'readonly':[('state','!=', 'draft')]}" on_change="onchange_partner_id(partner_id, context)"/> + <field name="partner_id" attrs="{'readonly':[('state','!=', 'draft')]}"/> <field name="name"/> <field name="phone"/> <label for="email"/> diff --git a/addons/event/report/report_event_registration.py b/addons/event/report/report_event_registration.py index c4a4dfd6b2d4a5c514a07a16632fd8a8ad5b99e2..7eb1d122846ebf83ce89ef6785d977ab5dcbfee1 100644 --- a/addons/event/report/report_event_registration.py +++ b/addons/event/report/report_event_registration.py @@ -19,34 +19,32 @@ # ############################################################################## -from openerp.osv import fields, osv +from openerp import models, fields from openerp import tools -class report_event_registration(osv.osv): + +class report_event_registration(models.Model): + """Events Analysis""" _name = "report.event.registration" - _description = "Events Analysis" - _auto = False - _columns = { - 'event_date': fields.datetime('Event Date', readonly=True), - 'event_id': fields.many2one('event.event', 'Event', required=True), - 'draft_state': fields.integer(' # No of Draft Registrations', size=20), - 'confirm_state': fields.integer(' # No of Confirmed Registrations', size=20), - 'seats_max': fields.integer('Max Seats'), - 'nbevent': fields.integer('Number of Registrations'), - 'event_type': fields.many2one('event.type', 'Event Type'), - 'registration_state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed'), ('done', 'Attended'), ('cancel', 'Cancelled')], 'Registration State', readonly=True, required=True), - 'event_state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Event State', readonly=True, required=True), - 'user_id': fields.many2one('res.users', 'Event Responsible', readonly=True), - 'user_id_registration': fields.many2one('res.users', 'Register', readonly=True), - 'name_registration': fields.char('Participant / Contact Name',size=45, readonly=True), - 'company_id': fields.many2one('res.company', 'Company', readonly=True), - } _order = 'event_date desc' + _auto = False + + event_date = fields.Datetime('Event Date', readonly=True) + event_id = fields.Many2one('event.event', 'Event', required=True) + draft_state = fields.Integer(' # No of Draft Registrations') + confirm_state = fields.Integer(' # No of Confirmed Registrations') + seats_max = fields.Integer('Max Seats') + nbevent = fields.Integer('Number of Registrations') + event_type = fields.Many2one('event.type', 'Event Type') + registration_state = fields.Selection([('draft', 'Draft'), ('confirm', 'Confirmed'), ('done', 'Attended'), ('cancel', 'Cancelled')], 'Registration State', readonly=True, required=True) + event_state = fields.Selection([('draft', 'Draft'), ('confirm', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Event State', readonly=True, required=True) + user_id = fields.Many2one('res.users', 'Event Responsible', readonly=True) + user_id_registration = fields.Many2one('res.users', 'Register', readonly=True) + name_registration = fields.Char('Participant / Contact Name', readonly=True) + company_id = fields.Many2one('res.company', 'Company', readonly=True) def init(self, cr): - """ - Initialize the sql view for the event registration - """ + """Initialize the sql view for the event registration """ tools.drop_view_if_exists(cr, 'report_event_registration') # TOFIX this request won't select events that have no registration @@ -87,5 +85,4 @@ class report_event_registration(osv.osv): ) """) - # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/event/res_partner.py b/addons/event/res_partner.py index b51e1fbb9d35eb0fe7a6a9cd86575b279c2accc6..c81569e0ad26df1b6a2ed92c797dd2fea67fa438 100644 --- a/addons/event/res_partner.py +++ b/addons/event/res_partner.py @@ -19,15 +19,12 @@ # ############################################################################## -from openerp.osv import fields, osv +from openerp import models, fields -class res_partner(osv.osv): +class res_partner(models.Model): _inherit = 'res.partner' - _columns = { - 'speaker': fields.boolean('Speaker', help="Check this box if this contact is a speaker."), - } - + speaker = fields.Boolean(help="Check this box if this contact is a speaker.") # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/event/test/process/event_draft2done.yml b/addons/event/test/process/event_draft2done.yml index e1ad32ab8da26b6934067499ab909adabd34caf0..0cec9c3b16a139e6e9d3b11684d5c71f84d7af84 100644 --- a/addons/event/test/process/event_draft2done.yml +++ b/addons/event/test/process/event_draft2done.yml @@ -4,128 +4,126 @@ !context uid: 'res_users_eventuser' - - I want to organize an event, - into this conference I should create two registration. + I want to organize an event, into this conference I should create two registration. One confirmed and attended the event and the other cancelled. I create an event - !record {model: event.event, id: event_event}: - name: event - date_begin: 2012-01-01 19:05:15 - date_end: 2012-01-01 20:05:15 - seats_max: 10 + name: event + date_begin: 2012-01-01 19:05:15 + date_end: 2012-01-01 20:05:15 + seats_max: 10 - I create a registration for the event - !record {model: event.registration, id: event_registration1}: - event_id: event_event - name: test_confirm - nb_register: 10 + event_id: event_event + name: test_confirm + nb_register: 10 - I create a second registration for the same event - - !record {model: event.registration, id: event_registration2}: + !record {model: event.registration, id: event_registration2}: event_id: event_event name: test_cancel nb_register: 5 - I check if the number of draft registrations in the event view is correct - - !assert {model: event.event, id: event_event ,string: 'the number of draft registration is not correct'}: - - seats_unconfirmed == 15.00 + !assert {model: event.event, id: event_event, string: 'the number of draft registration is not correct'}: + - seats_unconfirmed == 15.00 - - I confirm the registration1 + I confirm the registration1 - - !python {model: event.registration}: | - event_pool = self.pool.get("event.event") - self.registration_open(cr, uid, [ref("event_registration1")],context=context) + !python {model: event.registration, id: event_registration1}: | + self.registration_open() - - I check that registration is "confirmed" + I check that registration is "confirmed" - - !assert {model: event.registration, id: event_registration1}: - - state == 'open', "registration should be confirmed." + !assert {model: event.registration, id: event_registration1}: + - state == 'open', "registration should be confirmed." - - I check if the number of confirmed registration is correct + I check if the number of confirmed registration is correct - - !assert {model: event.event,id: event_event,string: 'the number of confirmed registration is not correct'}: - - seats_reserved == 10.00 + !assert {model: event.event, id: event_event,string: 'the number of confirmed registration is not correct'}: + - seats_reserved == 10.00 - - I close the registration + I close the registration - - !python {model: event.registration}: | - self.button_reg_close(cr, uid, [ref("event_registration1")],context=context) + !python {model: event.registration, id: event_registration1}: | + self.button_reg_close() - - I check if registration is correctly finnished + I check if registration is correctly finnished - !assert {model: event.registration, id: event_registration1}: - - state == 'done' + - state == 'done' - - I check if attended registration is correct + I check if attended registration is correct - - !assert {model: event.event,id: event_event,string: 'the number of registration that attended the event is not correct'}: - - seats_used == 10.00 + !assert {model: event.event, id: event_event, string: 'the number of registration that attended the event is not correct'}: + - seats_used == 10.00 - - I cancel the second registration + I cancel the second registration - - !python {model: event.registration}: | - self.button_reg_cancel(cr, uid, [ref("event_registration2")],context=context) + !python {model: event.registration, id: event_registration2}: | + self.button_reg_cancel() - - I Check that registration is cancelled + I Check that registration is cancelled - !assert {model: event.registration, id: event_registration2}: - state == 'cancel', "Registration should be cancelled." - - I confirm the event + I confirm the event - - !python {model: event.event}: | - self.button_confirm(cr, uid, [ref("event_event")]) + !python {model: event.event, id: event_event}: | + self.button_confirm() - - I check that event is in "confirmed" state. + I check that event is in "confirmed" state. - !assert {model: event.event, id: event_event}: - state == 'confirm', "event should be confirmed." - - I close the event + I close the event - - !python {model: event.event}: | - self.button_done(cr, uid, [ref("event_event")]) + !python {model: event.event, id: event_event}: | + self.button_done() - - Check that event is in "close" state. + Check that event is in "close" state. - !assert {model: event.event, id: event_event}: - state == 'done', "Event should be Closed." - - I reset event to draft + I reset event to draft - - !python {model: event.event}: | - self.button_draft(cr, uid, [ref("event_event")]) + !python {model: event.event, id: event_event}: | + self.button_draft() - - In order to test the cancellation of the event, I create a second event + In order to test the cancellation of the event, I create a second event - !record {model: event.event, id: event_event1}: - name: event_cancel - date_begin: 2012-01-01 19:05:15 - date_end: 2012-01-01 20:05:15 + name: event_cancel + date_begin: 2012-01-01 19:05:15 + date_end: 2012-01-01 20:05:15 - I create a registration for the event - !record {model: event.registration, id: event_registration_cancel}: - event_id: event_event1 - name: test_confirm_again - nb_register: 5 + event_id: event_event1 + name: test_confirm_again + nb_register: 5 - - Now I cancel this event + Now I cancel this event - - !python {model: event.event}: | - self.button_cancel(cr, uid, [ref("event_event1")]) + !python {model: event.event, id: event_event1}: | + self.button_cancel() - - I check if the event is cancelled + I check if the event is cancelled - !assert {model: event.event, id: event_event1}: - - state == 'cancel', "Event should be cancelled." + - state == 'cancel', "Event should be cancelled." - - I check if its registrations are cancelled too + I check if its registrations are cancelled too - !assert {model: event.registration, id: event_registration_cancel}: - - state == 'cancel', "Registration should be cancelled." + - state == 'cancel', "Registration should be cancelled." diff --git a/addons/event/test/ui/demo_data.yml b/addons/event/test/ui/demo_data.yml index 1a6631c93a34a185ddf541385f2dbc06a6b05e68..19e8334f40c25b75fee21f6ca6a2d7df2dc2b81f 100644 --- a/addons/event/test/ui/demo_data.yml +++ b/addons/event/test/ui/demo_data.yml @@ -11,21 +11,14 @@ !record {model: event.event, id: event_2}: product_id: event_product_2 name: 'Conference on ERP Buisness' -- - I call onchange event from event registration wizard. -- - !python {model: partner.event.registration}: | - context.update({'active_id': ref("base.res_partner_5")}) - self.onchange_event_id(cr, uid, 1, ref("event_1"), context=context) - I confirm event from wizard. - - !python {model: event.confirm}: | - context.update({'event_ids': [ref("event_2")]}) - id = self.create(cr, uid , {}) - self.confirm(cr, uid, [id], context=context) + !python {model: event.confirm, id: False}: | + wizard = self.with_context(event_ids=[ref("event_2")]).create({}) + wizard.confirm() - I call close registration process. - - !python {model: event.registration}: | - self.button_reg_close(cr, uid, [ref("reg_0_2")]) + !python {model: event.registration, id: reg_0_2}: | + self.button_reg_close() diff --git a/addons/event/test/ui/duplicate_event.yml b/addons/event/test/ui/duplicate_event.yml index c1287353c67bfee81542e34f054acf934b8d6f34..5c2dd85c3ed17432692e774e3d336ed8a55c81c9 100644 --- a/addons/event/test/ui/duplicate_event.yml +++ b/addons/event/test/ui/duplicate_event.yml @@ -1,16 +1,10 @@ - Copy of event. - - !python {model: event.event}: | - try: - self.copy(cr, uid, ref("event_2")) - except: - pass + !python {model: event.event, id: event_2}: | + self.copy() - - Copy of event registarion. + Copy of event registration. - - !python {model: event.registration}: | - try: - self.copy(cr, uid, ref("reg_1_1")) - except: - pass \ No newline at end of file + !python {model: event.registration, id: reg_1_1}: | + self.copy() diff --git a/addons/event/wizard/event_confirm.py b/addons/event/wizard/event_confirm.py index 5fb2cd67c87096a64b6514ac61345196abc79f50..a54b1d205a0e5f6dcd6c032e09a0c63d3cc6153d 100644 --- a/addons/event/wizard/event_confirm.py +++ b/addons/event/wizard/event_confirm.py @@ -19,18 +19,17 @@ # ############################################################################## -from openerp.osv import osv +from openerp import models, api -class event_confirm(osv.osv_memory): - """ - Confirm Event - """ + +class event_confirm(models.TransientModel): + """Event Confirmation""" _name = "event.confirm" - _description = "Event Confirmation" - def confirm(self, cr, uid, ids, context=None): - self.pool.get('event.event').do_confirm(cr, uid, context.get('event_ids', []), context=context) + @api.multi + def confirm(self): + events = self.env['event.event'].browse(self._context.get('event_ids', [])) + events.do_confirm() return {'type': 'ir.actions.act_window_close'} - # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/event_sale/event_sale.py b/addons/event_sale/event_sale.py index aeaee98306a6cc2e90fea3faad79285575b03298..a0940b5af2dac74df1e6dd5b34a9e2a6e7845733 100644 --- a/addons/event_sale/event_sale.py +++ b/addons/event_sale/event_sale.py @@ -19,7 +19,8 @@ # ############################################################################## -from openerp.addons.event.event import event_event as Event +from openerp import api +from openerp.fields import Integer, One2many, Html from openerp.osv import fields, osv from openerp.tools.translate import _ @@ -89,8 +90,7 @@ class sale_order_line(osv.osv): ''' create registration with sales order ''' - if context is None: - context = {} + context = dict(context or {}) registration_obj = self.pool.get('event.registration') for order_line in self.browse(cr, uid, ids, context=context): if order_line.event_id: @@ -123,62 +123,33 @@ class sale_order_line(osv.osv): class event_event(osv.osv): _inherit = 'event.event' - def _get_seats_max(self, cr, uid, ids, field_name, arg, context=None): - result = dict.fromkeys(ids, 0) - for rec in self.browse(cr, uid, ids, context=context): - result[rec.id] = sum([ticket.seats_max for ticket in rec.event_ticket_ids]) - return result + event_ticket_ids = One2many('event.event.ticket', 'event_id', string='Event Ticket', + default=lambda rec: rec._default_tickets()) + seats_max = Integer(string='Maximum Available Seats', + help="The maximum registration level is equal to the sum of the maximum registration of event ticket. " + + "If you have too much registrations you are not able to confirm your event. (0 to ignore this rule )", + store=True, readonly=True, compute='_compute_seats_max') + + badge_back = Html('Badge Back', translate=True, states={'done': [('readonly', True)]}) + badge_innerleft = Html('Badge Innner Left', translate=True, states={'done': [('readonly', True)]}) + badge_innerright = Html('Badge Inner Right', translate=True, states={'done': [('readonly', True)]}) - def _get_tickets(self, cr, uid, context={}): + @api.model + def _default_tickets(self): try: - product = self.pool.get('ir.model.data').get_object(cr, uid, 'event_sale', 'product_product_event') + product = self.env.ref('event_sale.product_product_event') return [{ 'name': _('Subscription'), 'product_id': product.id, 'price': 0, }] except ValueError: - pass - return [] - - def _get_ticket_events(self, cr, uid, ids, context=None): - # `self` is the event.event.ticket model when called by ORM! - return list(set(ticket.event_id.id - for ticket in self.browse(cr, uid, ids, context))) + return self.env['event.event.ticket'] - # proxy method, can't import parent method directly as unbound_method: it would receive - # an invalid `self` <event_registration> when called by ORM - def _events_from_registrations(self, cr, uid, ids, context=None): - # `self` is the event.registration model when called by ORM - return self.pool['event.event']._get_events_from_registrations(cr, uid, ids, context=context) - - _columns = { - 'event_ticket_ids': fields.one2many('event.event.ticket', "event_id", "Event Ticket"), - 'seats_max': fields.function(_get_seats_max, - string='Maximum Avalaible Seats', - help="The maximum registration level is equal to the sum of the maximum registration of event ticket." + - "If you have too much registrations you are not able to confirm your event. (0 to ignore this rule )", - type='integer', - readonly=True, - store={ - 'event.event': (lambda self, cr, uid, ids, c = {}: ids, ['event_ticket_ids'], 20), - 'event.event.ticket': (_get_ticket_events, ['seats_max'], 10), - }), - 'seats_available': fields.function(Event._get_seats, oldname='register_avail', string='Available Seats', - type='integer', multi='seats_reserved', - store={ - 'event.registration': (_events_from_registrations, ['state'], 10), - 'event.event': (lambda self, cr, uid, ids, c = {}: ids, - ['seats_max', 'registration_ids'], 20), - 'event.event.ticket': (_get_ticket_events, ['seats_max'], 10), - }), - 'badge_back': fields.html('Badge Back', readonly=False, translate=True, states={'done': [('readonly', True)]}), - 'badge_innerleft': fields.html('Badge Innner Left', readonly=False, translate=True, states={'done': [('readonly', True)]}), - 'badge_innerright': fields.html('Badge Inner Right', readonly=False, translate=True, states={'done': [('readonly', True)]}), - } - _defaults = { - 'event_ticket_ids': _get_tickets - } + @api.one + @api.depends('event_ticket_ids.seats_max') + def _compute_seats_max(self): + self.seats_max = sum(ticket.seats_max for ticket in self.event_ticket_ids) class event_ticket(osv.osv): _name = 'event.event.ticket' diff --git a/addons/fetchmail/fetchmail.py b/addons/fetchmail/fetchmail.py index b4cac75b4a4f5efd8ba9188943edf9554602e5d3..55ac185a0cb0b19814c0b16b4069ee81ebba25d8 100644 --- a/addons/fetchmail/fetchmail.py +++ b/addons/fetchmail/fetchmail.py @@ -52,7 +52,7 @@ class fetchmail_server(osv.osv): 'state':fields.selection([ ('draft', 'Not Confirmed'), ('done', 'Confirmed'), - ], 'Status', select=True, readonly=True), + ], 'Status', select=True, readonly=True, copy=False), 'server' : fields.char('Server Name', readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}), 'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}), 'type':fields.selection([ @@ -179,8 +179,7 @@ openerp_mailgate: "|/path/to/openerp-mailgate.py --host=localhost -u %(uid)d -p def fetch_mail(self, cr, uid, ids, context=None): """WARNING: meant for cron usage only - will commit() after each email!""" - if context is None: - context = {} + context = dict(context or {}) context['fetchmail_cron_running'] = True mail_thread = self.pool.get('mail.thread') action_pool = self.pool.get('ir.actions.server') diff --git a/addons/fleet/fleet.py b/addons/fleet/fleet.py index 7dac086bb27cc47551825cc14b6cb33792c065ee..44dd9abc1f486d13562b3bccfdd6835fdc6780c8 100644 --- a/addons/fleet/fleet.py +++ b/addons/fleet/fleet.py @@ -338,7 +338,7 @@ class fleet_vehicle(osv.Model): 'name': fields.function(_vehicle_name_get_fnc, type="char", string='Name', store=True), 'company_id': fields.many2one('res.company', 'Company'), 'license_plate': fields.char('License Plate', required=True, help='License plate number of the vehicle (ie: plate number for a car)'), - 'vin_sn': fields.char('Chassis Number', help='Unique number written on the vehicle motor (VIN/SN number)'), + 'vin_sn': fields.char('Chassis Number', help='Unique number written on the vehicle motor (VIN/SN number)', copy=False), 'driver_id': fields.many2one('res.partner', 'Driver', help='Driver of the vehicle'), 'model_id': fields.many2one('fleet.vehicle.model', 'Model', required=True, help='Model of the vehicle'), 'log_fuel': fields.one2many('fleet.vehicle.log.fuel', 'vehicle_id', 'Fuel Logs'), @@ -355,7 +355,7 @@ class fleet_vehicle(osv.Model): 'location': fields.char('Location', help='Location of the vehicle (garage, ...)'), 'seats': fields.integer('Seats Number', help='Number of seats of the vehicle'), 'doors': fields.integer('Doors Number', help='Number of doors of the vehicle'), - 'tag_ids' :fields.many2many('fleet.vehicle.tag', 'fleet_vehicle_vehicle_tag_rel', 'vehicle_tag_id','tag_id', 'Tags'), + 'tag_ids' :fields.many2many('fleet.vehicle.tag', 'fleet_vehicle_vehicle_tag_rel', 'vehicle_tag_id','tag_id', 'Tags', copy=False), 'odometer': fields.function(_get_odometer, fnct_inv=_set_odometer, type='float', string='Last Odometer', help='Odometer measure of the vehicle at the moment of this log'), 'odometer_unit': fields.selection([('kilometers', 'Kilometers'),('miles','Miles')], 'Odometer Unit', help='Unit of the odometer ',required=True), 'transmission': fields.selection([('manual', 'Manual'), ('automatic', 'Automatic')], 'Transmission', help='Transmission Used by the vehicle'), @@ -380,18 +380,6 @@ class fleet_vehicle(osv.Model): 'state_id': _get_default_state, } - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'log_fuel':[], - 'log_contracts':[], - 'log_services':[], - 'tag_ids':[], - 'vin_sn':'', - }) - return super(fleet_vehicle, self).copy(cr, uid, id, default, context=context) - def on_change_model(self, cr, uid, ids, model_id, context=None): if not model_id: return {} @@ -403,9 +391,7 @@ class fleet_vehicle(osv.Model): } def create(self, cr, uid, data, context=None): - if not context: - context = {} - context.update({'mail_create_nolog': True}) + context = dict(context or {}, mail_create_nolog=True) vehicle_id = super(fleet_vehicle, self).create(cr, uid, data, context=context) vehicle = self.browse(cr, uid, vehicle_id, context=context) self.message_post(cr, uid, [vehicle_id], body=_('%s %s has been added to the fleet!') % (vehicle.model_id.name,vehicle.license_plate), context=context) @@ -803,12 +789,14 @@ class fleet_vehicle_log_contract(osv.Model): 'days_left': fields.function(get_days_left, type='integer', string='Warning Date'), 'insurer_id' :fields.many2one('res.partner', 'Supplier'), 'purchaser_id': fields.many2one('res.partner', 'Contractor', help='Person to which the contract is signed for'), - 'ins_ref': fields.char('Contract Reference', size=64), - 'state': fields.selection([('open', 'In Progress'), ('toclose','To Close'), ('closed', 'Terminated')], 'Status', readonly=True, help='Choose wheter the contract is still valid or not'), - 'notes': fields.text('Terms and Conditions', help='Write here all supplementary informations relative to this contract'), + 'ins_ref': fields.char('Contract Reference', size=64, copy=False), + 'state': fields.selection([('open', 'In Progress'), ('toclose','To Close'), ('closed', 'Terminated')], + 'Status', readonly=True, help='Choose wheter the contract is still valid or not', + copy=False), + 'notes': fields.text('Terms and Conditions', help='Write here all supplementary informations relative to this contract', copy=False), 'cost_generated': fields.float('Recurring Cost Amount', help="Costs paid at regular intervals, depending on the cost frequency. If the cost frequency is set to unique, the cost will be logged at the start date"), 'cost_frequency': fields.selection([('no','No'), ('daily', 'Daily'), ('weekly','Weekly'), ('monthly','Monthly'), ('yearly','Yearly')], 'Recurring Cost Frequency', help='Frequency of the recuring cost', required=True), - 'generated_cost_ids': fields.one2many('fleet.vehicle.cost', 'contract_id', 'Generated Costs', ondelete='cascade'), + 'generated_cost_ids': fields.one2many('fleet.vehicle.cost', 'contract_id', 'Generated Costs'), 'sum_cost': fields.function(_get_sum_cost, type='float', string='Indicative Costs Total'), 'cost_id': fields.many2one('fleet.vehicle.cost', 'Cost', required=True, ondelete='cascade'), 'cost_amount': fields.related('cost_id', 'amount', string='Amount', type='float', store=True), #we need to keep this field as a related with store=True because the graph view doesn't support (1) to address fields from inherited table and (2) fields that aren't stored in database @@ -824,18 +812,6 @@ class fleet_vehicle_log_contract(osv.Model): 'cost_type': 'contract', } - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - today = fields.date.context_today(self, cr, uid, context=context) - default['date'] = today - default['start_date'] = today - default['expiration_date'] = self.compute_next_year_date(today) - default['ins_ref'] = '' - default['state'] = 'open' - default['notes'] = '' - return super(fleet_vehicle_log_contract, self).copy(cr, uid, id, default, context=context) - def contract_close(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'closed'}, context=context) diff --git a/addons/gamification/models/challenge.py b/addons/gamification/models/challenge.py index faed62f8da92bac673535297e21307a2d977e703..51e58bdc71b0084c7f76daf1922bb8789f0633cf 100644 --- a/addons/gamification/models/challenge.py +++ b/addons/gamification/models/challenge.py @@ -124,7 +124,7 @@ class gamification_challenge(osv.Model): ('draft', 'Draft'), ('inprogress', 'In Progress'), ('done', 'Done'), - ], + ], copy=False, string='State', required=True, track_visibility='onchange'), 'manager_id': fields.many2one('res.users', string='Responsible', help="The user responsible for the challenge."), @@ -155,7 +155,7 @@ class gamification_challenge(osv.Model): 'line_ids': fields.one2many('gamification.challenge.line', 'challenge_id', string='Lines', help="List of goals that will be set", - required=True), + required=True, copy=True), 'reward_id': fields.many2one('gamification.badge', string="For Every Succeding User"), 'reward_first_id': fields.many2one('gamification.badge', string="For 1st user"), @@ -283,7 +283,7 @@ class gamification_challenge(osv.Model): # in cron mode, will do intermediate commits # TODO in trunk: replace by parameter - context.update({'commit_gamification': True}) + context = dict(context, commit_gamification=True) return self._update_all(cr, uid, ids, context=context) def _update_all(self, cr, uid, ids, context=None): diff --git a/addons/gamification/models/goal.py b/addons/gamification/models/goal.py index 041c6ae169c01cd021d9742f5721b1892bee6d47..ce5be513aef1f6ded1e7733ce0a2c58963b6eabe 100644 --- a/addons/gamification/models/goal.py +++ b/addons/gamification/models/goal.py @@ -424,8 +424,7 @@ class gamification_goal(osv.Model): def create(self, cr, uid, vals, context=None): """Overwrite the create method to add a 'no_remind_goal' field to True""" - if context is None: - context = {} + context = dict(context or {}) context['no_remind_goal'] = True return super(gamification_goal, self).create(cr, uid, vals, context=context) diff --git a/addons/google_account/controllers/main.py b/addons/google_account/controllers/main.py index 40843b6f4ba1bdaa9d1a030da6aaad5bf7147448..2a3a4ae586f44249302a90f98c159a797b290e99 100644 --- a/addons/google_account/controllers/main.py +++ b/addons/google_account/controllers/main.py @@ -1,8 +1,8 @@ import simplejson import urllib import openerp -import openerp.addons.web.http as http -from openerp.addons.web.http import request +from openerp import http +from openerp.http import request import openerp.addons.web.controllers.main as webmain from openerp.addons.web.http import SessionExpiredException from werkzeug.exceptions import BadRequest @@ -27,6 +27,6 @@ class google_auth(http.Controller): elif kw.get('error'): return werkzeug.utils.redirect("%s%s%s" % (url_return ,"?error=" , kw.get('error'))) else: - return werkzeug.utils.redirect("%s%s%s" % (url_return ,"?error=Unknown_error")) + return werkzeug.utils.redirect("%s%s" % (url_return ,"?error=Unknown_error")) diff --git a/addons/google_account/google_account.py b/addons/google_account/google_account.py index 4968925bd29ad5e405f905c5c01016071754d714..51c7e2c5bca742c9a38bc089331416d0425c6165 100644 --- a/addons/google_account/google_account.py +++ b/addons/google_account/google_account.py @@ -129,8 +129,7 @@ class google_service(osv.osv_memory): return res def _do_request(self, cr, uid, uri, params={}, headers={}, type='POST', preuri="https://www.googleapis.com", context=None): - if context is None: - context = {} + context = dict(context or {}) """ Return a tuple ('HTTP_CODE', 'HTTP_RESPONSE') """ _logger.debug("Uri: %s - Type : %s - Headers: %s - Params : %s !" % (uri, type, headers, werkzeug.url_encode(params) if type == 'GET' else params)) diff --git a/addons/google_calendar/google_calendar.py b/addons/google_calendar/google_calendar.py index dd1d45c05fe32519ea2877ca8d9c6757a189c92d..7abef0faf8cdb1970e841ad3683758d8f83e44f7 100644 --- a/addons/google_calendar/google_calendar.py +++ b/addons/google_calendar/google_calendar.py @@ -630,8 +630,7 @@ class google_calendar(osv.AbstractModel): return new_ids def update_events(self, cr, uid, lastSync=False, context=None): - if context is None: - context = {} + context = dict(context or {}) calendar_event = self.pool['calendar.event'] user_obj = self.pool['res.users'] @@ -937,7 +936,6 @@ class calendar_event(osv.Model): def copy(self, cr, uid, id, default=None, context=None): default = default or {} - default['attendee_ids'] = False if default.get('write_type', False): del default['write_type'] elif default.get('recurrent_id', False): diff --git a/addons/google_drive/google_drive.py b/addons/google_drive/google_drive.py index 779b02315407a2055468f208d5ba7f2ddb8aac73..036b815648bd986a9e887f03514f55e007ffa857 100644 --- a/addons/google_drive/google_drive.py +++ b/addons/google_drive/google_drive.py @@ -40,7 +40,7 @@ class config(osv.Model): config = self.browse(cr, SUPERUSER_ID, config_id, context=context) model = config.model_id filter_name = config.filter_id and config.filter_id.name or False - record = self.pool.get(model.model).read(cr, uid, res_id, [], context=context) + record = self.pool.get(model.model).read(cr, uid, [res_id], context=context)[0] record.update({'model': model.name, 'filter': filter_name}) name_gdocs = config.name_template try: diff --git a/addons/google_drive/static/src/xml/gdocs.xml b/addons/google_drive/static/src/xml/gdocs.xml deleted file mode 100644 index 0c8d9c3b01fee06af2a4427226ada9e4ab8d2714..0000000000000000000000000000000000000000 --- a/addons/google_drive/static/src/xml/gdocs.xml +++ /dev/null @@ -1,8 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- vim:fdl=1: ---> -<templates id="template" xml:space="preserve"> -<t t-name="AddGoogleDocumentItem"> - <li class="oe_sidebar_add_google_doc"><span><b>Add Google Doc...</b></span></li> -</t> -</templates> diff --git a/addons/google_spreadsheet/google_spreadsheet.py b/addons/google_spreadsheet/google_spreadsheet.py index 4c24a46a404a538b55365c2dab5d4d22ad664295..df63ac5f63255d433176384caf30a4d0af69e22e 100644 --- a/addons/google_spreadsheet/google_spreadsheet.py +++ b/addons/google_spreadsheet/google_spreadsheet.py @@ -58,7 +58,7 @@ class config(osv.osv): formula = '=oe_browse("%s";"%s";"%s")' % (model, fields, domain) url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url') dbname = cr.dbname - user = self.pool['res.users'].read(cr, uid, uid, ['login', 'password'], context=context) + user = self.pool['res.users'].read(cr, uid, [uid], ['login', 'password'], context=context)[0] username = user['login'] password = user['password'] if not password: diff --git a/addons/hr/hr.py b/addons/hr/hr.py index a0016de87a28da1b55d1f0e08be869846d103099..0d4ab54a98c58c8ecf810aab2625e311baa82d93 100644 --- a/addons/hr/hr.py +++ b/addons/hr/hr.py @@ -110,8 +110,10 @@ class hr_job(osv.Model): 'hr.employee': (_get_job_position, ['job_id'], 10), }, type='integer', multi='_get_nbr_employees'), - 'no_of_recruitment': fields.integer('Expected New Employees', help='Number of new employees you expect to recruit.'), - 'no_of_hired_employee': fields.integer('Hired Employees', help='Number of hired employees for this job position during recruitment phase.'), + 'no_of_recruitment': fields.integer('Expected New Employees', copy=False, + help='Number of new employees you expect to recruit.'), + 'no_of_hired_employee': fields.integer('Hired Employees', copy=False, + help='Number of hired employees for this job position during recruitment phase.'), 'employee_ids': fields.one2many('hr.employee', 'job_id', 'Employees', groups='base.group_user'), 'description': fields.text('Job Description'), 'requirements': fields.text('Requirements'), @@ -119,7 +121,7 @@ class hr_job(osv.Model): 'company_id': fields.many2one('res.company', 'Company'), 'state': fields.selection([('open', 'Recruitment Closed'), ('recruit', 'Recruitment in Progress')], string='Status', readonly=True, required=True, - track_visibility='always', + track_visibility='always', copy=False, help="By default 'Closed', set it to 'In Recruitment' if recruitment process is going on for this job position."), 'write_date': fields.datetime('Update Date', readonly=True), } @@ -151,12 +153,7 @@ class hr_job(osv.Model): def copy(self, cr, uid, id, default=None, context=None): if default is None: default = {} - default.update({ - 'employee_ids': [], - 'no_of_recruitment': 0, - 'no_of_hired_employee': 0, - }) - if 'name' in default: + if 'name' not in default: job = self.browse(cr, uid, id, context=context) default['name'] = _("%s (copy)") % (job.name) return super(hr_job, self).copy(cr, uid, id, default=default, context=context) @@ -248,13 +245,7 @@ class hr_employee(osv.osv): 'image': _get_default_image, 'color': 0, } - - def copy_data(self, cr, uid, ids, default=None, context=None): - if default is None: - default = {} - default.update({'child_ids': False}) - return super(hr_employee, self).copy_data(cr, uid, ids, default, context=context) - + def _broadcast_welcome(self, cr, uid, employee_id, context=None): """ Broadcast the welcome message to all users in the employee company. """ employee = self.browse(cr, uid, employee_id, context=context) @@ -286,8 +277,7 @@ class hr_employee(osv.osv): return True def create(self, cr, uid, data, context=None): - if context is None: - context = {} + context = dict(context or {}) if context.get("mail_broadcast"): context['mail_create_nolog'] = True @@ -429,23 +419,10 @@ class hr_department(osv.osv): res.append((record['id'], name)) return res - def copy_data(self, cr, uid, ids, default=None, context=None): - if default is None: - default = {} - default['member_ids'] = [] - return super(hr_department, self).copy_data(cr, uid, ids, default, context=context) - class res_users(osv.osv): _name = 'res.users' _inherit = 'res.users' - - def copy_data(self, cr, uid, ids, default=None, context=None): - if default is None: - default = {} - default.update({'employee_ids': False}) - return super(res_users, self).copy_data(cr, uid, ids, default, context=context) - _columns = { 'employee_ids': fields.one2many('hr.employee', 'user_id', 'Related employees'), } diff --git a/addons/hr/images/photo.png b/addons/hr/images/photo.png deleted file mode 100644 index 1edfe4754fcceeedd9ddbcaf890283a81635c77c..0000000000000000000000000000000000000000 Binary files a/addons/hr/images/photo.png and /dev/null differ diff --git a/addons/hr/res_users.py b/addons/hr/res_users.py index d5bebe054461d2e56ddb020202f5f0b578a5ff3e..78c0b841255d9f024458dff910fa8deb1cc973cd 100644 --- a/addons/hr/res_users.py +++ b/addons/hr/res_users.py @@ -46,6 +46,7 @@ class res_users(osv.Model): def _message_post_get_eid(self, cr, uid, thread_id, context=None): assert thread_id, "res.users does not support posting global messages" if context and 'thread_model' in context: + context = dict(context or {}) context['thread_model'] = 'hr.employee' if isinstance(thread_id, (list, tuple)): thread_id = thread_id[0] diff --git a/addons/hr_evaluation/hr_evaluation.py b/addons/hr_evaluation/hr_evaluation.py index 8c054f05726704f0bacdda7b144ead9480a8d2ba..068af5be0ee8bdc4d2c61ffaefece790a15105ed 100644 --- a/addons/hr_evaluation/hr_evaluation.py +++ b/addons/hr_evaluation/hr_evaluation.py @@ -35,7 +35,7 @@ class hr_evaluation_plan(osv.Model): _columns = { 'name': fields.char("Appraisal Plan", required=True), 'company_id': fields.many2one('res.company', 'Company', required=True), - 'phase_ids': fields.one2many('hr_evaluation.plan.phase', 'plan_id', 'Appraisal Phases'), + 'phase_ids': fields.one2many('hr_evaluation.plan.phase', 'plan_id', 'Appraisal Phases', copy=True), 'month_first': fields.integer('First Appraisal in (months)', help="This number of months will be used to schedule the first evaluation date of the employee when selecting an evaluation plan. "), 'month_next': fields.integer('Periodicity of Appraisal (months)', help="The number of month that depicts the delay between each evaluation of this plan (after the first one)."), 'active': fields.boolean('Active') @@ -157,7 +157,7 @@ class hr_evaluation(osv.Model): ('wait', 'Plan In Progress'), ('progress', 'Waiting Appreciation'), ('done', 'Done'), - ], 'Status', required=True, readonly=True), + ], 'Status', required=True, readonly=True, copy=False), 'date_close': fields.date('Ending Date', select=True), } _defaults = { @@ -254,15 +254,6 @@ class hr_evaluation(osv.Model): self.write(cr, uid, ids, {'state': 'draft'}, context=context) return True - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - if context is None: - context = {} - default = default.copy() - default['survey_request_ids'] = [] - return super(hr_evaluation, self).copy(cr, uid, id, default, context=context) - def write(self, cr, uid, ids, vals, context=None): if vals.get('employee_id'): employee_id = self.pool.get('hr.employee').browse(cr, uid, vals.get('employee_id'), context=context) @@ -292,7 +283,7 @@ class hr_evaluation_interview(osv.Model): ('waiting_answer', "In progress"), ('done', "Done"), ('cancel', "Cancelled")], - string="State", required=True), + string="State", required=True, copy=False), 'survey_id': fields.related('phase_id', 'survey_id', string="Appraisal Form", type="many2one", relation="survey.survey"), 'deadline': fields.related('request_id', 'deadline', type="datetime", string="Deadline"), } @@ -363,7 +354,7 @@ class hr_evaluation_interview(osv.Model): def action_print_survey(self, cr, uid, ids, context=None): """ If response is available then print this response otherwise print survey form (print template of the survey) """ - context = context if context else {} + context = dict(context or {}) interview = self.browse(cr, uid, ids, context=context)[0] survey_obj = self.pool.get('survey.survey') response_obj = self.pool.get('survey.user_input') @@ -372,7 +363,7 @@ class hr_evaluation_interview(osv.Model): return survey_obj.action_print_survey(cr, uid, [interview.survey_id.id], context=context) def action_start_survey(self, cr, uid, ids, context=None): - context = context if context else {} + context = dict(context or {}) interview = self.browse(cr, uid, ids, context=context)[0] survey_obj = self.pool.get('survey.survey') response_obj = self.pool.get('survey.user_input') diff --git a/addons/hr_expense/hr_expense.py b/addons/hr_expense/hr_expense.py index d134c0d51025a224b7b5be723ffa68049365c9f7..eb8a09678e7c74559cc61fe27198148521396865 100644 --- a/addons/hr_expense/hr_expense.py +++ b/addons/hr_expense/hr_expense.py @@ -71,11 +71,15 @@ class hr_expense_expense(osv.osv): 'journal_id': fields.many2one('account.journal', 'Force Journal', help = "The journal used when the expense is done."), 'employee_id': fields.many2one('hr.employee', "Employee", required=True, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'user_id': fields.many2one('res.users', 'User', required=True), - 'date_confirm': fields.date('Confirmation Date', select=True, help="Date of the confirmation of the sheet expense. It's filled when the button Confirm is pressed."), - 'date_valid': fields.date('Validation Date', select=True, help="Date of the acceptation of the sheet expense. It's filled when the button Accept is pressed."), - 'user_valid': fields.many2one('res.users', 'Validation By', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), - 'account_move_id': fields.many2one('account.move', 'Ledger Posting'), - 'line_ids': fields.one2many('hr.expense.line', 'expense_id', 'Expense Lines', readonly=True, states={'draft':[('readonly',False)]} ), + 'date_confirm': fields.date('Confirmation Date', select=True, copy=False, + help="Date of the confirmation of the sheet expense. It's filled when the button Confirm is pressed."), + 'date_valid': fields.date('Validation Date', select=True, copy=False, + help="Date of the acceptation of the sheet expense. It's filled when the button Accept is pressed."), + 'user_valid': fields.many2one('res.users', 'Validation By', readonly=True, copy=False, + states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), + 'account_move_id': fields.many2one('account.move', 'Ledger Posting', copy=False), + 'line_ids': fields.one2many('hr.expense.line', 'expense_id', 'Expense Lines', copy=True, + readonly=True, states={'draft':[('readonly',False)]} ), 'note': fields.text('Note'), 'amount': fields.function(_amount, string='Total Amount', digits_compute=dp.get_precision('Account'), store={ @@ -92,7 +96,7 @@ class hr_expense_expense(osv.osv): ('done', 'Waiting Payment'), ('paid', 'Paid'), ], - 'Status', readonly=True, track_visibility='onchange', + 'Status', readonly=True, track_visibility='onchange', copy=False, help='When the expense request is created the status is \'Draft\'.\n It is confirmed by the user and request is sent to admin, the status is \'Waiting Confirmation\'.\ \nIf the admin accepts it, the status is \'Accepted\'.\n If the accounting entries are made for the expense request, the status is \'Waiting Payment\'.'), @@ -106,16 +110,6 @@ class hr_expense_expense(osv.osv): 'currency_id': _get_currency, } - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default.update( - account_move_id=False, - date_confirm=False, - date_valid=False, - user_valid=False) - return super(hr_expense_expense, self).copy(cr, uid, id, default=default, context=context) - def unlink(self, cr, uid, ids, context=None): for rec in self.browse(cr, uid, ids, context=context): if rec.state != 'draft': @@ -209,9 +203,7 @@ class hr_expense_expense(osv.osv): c: account_move_lines potentially modified ''' cur_obj = self.pool.get('res.currency') - if context is None: - context={} - context.update({'date': exp.date_confirm or time.strftime('%Y-%m-%d')}) + context = dict(context or {}, date=exp.date_confirm or time.strftime('%Y-%m-%d')) total = 0.0 total_currency = 0.0 for i in account_move_lines: diff --git a/addons/hr_holidays/hr_holidays.py b/addons/hr_holidays/hr_holidays.py index d38cb70533902c0bbae3dfbca7699725453e1e99..21cbe50dee2d194a8d77871a193fc4c7ce72ad58 100644 --- a/addons/hr_holidays/hr_holidays.py +++ b/addons/hr_holidays/hr_holidays.py @@ -148,12 +148,17 @@ class hr_holidays(osv.osv): result[holiday.id] = True return result - def _check_date(self, cr, uid, ids): - for holiday in self.browse(cr, uid, ids): - holiday_ids = self.search(cr, uid, [('date_from', '<=', holiday.date_to), ('date_to', '>=', holiday.date_from), - ('employee_id', '=', holiday.employee_id.id), ('id', '<>', holiday.id), - ('state', 'not in', ['cancel', 'refuse'])]) - if holiday_ids: + def _check_date(self, cr, uid, ids, context=None): + for holiday in self.browse(cr, uid, ids, context=context): + domain = [ + ('date_from', '<=', holiday.date_to), + ('date_to', '>=', holiday.date_from), + ('employee_id', '=', holiday.employee_id.id), + ('id', '!=', holiday.id), + ('state', 'not in', ['cancel', 'refuse']), + ] + nholidays = self.search_count(cr, uid, domain, context=context) + if nholidays: return False return True @@ -162,19 +167,20 @@ class hr_holidays(osv.osv): _columns = { 'name': fields.char('Description', size=64), 'state': fields.selection([('draft', 'To Submit'), ('cancel', 'Cancelled'),('confirm', 'To Approve'), ('refuse', 'Refused'), ('validate1', 'Second Approval'), ('validate', 'Approved')], - 'Status', readonly=True, track_visibility='onchange', + 'Status', readonly=True, track_visibility='onchange', copy=False, help='The status is set to \'To Submit\', when a holiday request is created.\ \nThe status is \'To Approve\', when holiday request is confirmed by user.\ \nThe status is \'Refused\', when holiday request is refused by manager.\ \nThe status is \'Approved\', when holiday request is approved by manager.'), 'user_id':fields.related('employee_id', 'user_id', type='many2one', relation='res.users', string='User', store=True), - 'date_from': fields.datetime('Start Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, select=True), - 'date_to': fields.datetime('End Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), + 'date_from': fields.datetime('Start Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, select=True, copy=False), + 'date_to': fields.datetime('End Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False), 'holiday_status_id': fields.many2one("hr.holidays.status", "Leave Type", required=True,readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'employee_id': fields.many2one('hr.employee', "Employee", select=True, invisible=False, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), - 'manager_id': fields.many2one('hr.employee', 'First Approval', invisible=False, readonly=True, help='This area is automatically filled by the user who validate the leave'), + 'manager_id': fields.many2one('hr.employee', 'First Approval', invisible=False, readonly=True, copy=False, + help='This area is automatically filled by the user who validate the leave'), 'notes': fields.text('Reasons',readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), - 'number_of_days_temp': fields.float('Allocation', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), + 'number_of_days_temp': fields.float('Allocation', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False), 'number_of_days': fields.function(_compute_number_of_days, string='Number of Days', store=True), 'meeting_id': fields.many2one('calendar.event', 'Meeting'), 'type': fields.selection([('remove','Leave Request'),('add','Allocation Request')], 'Request Type', required=True, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help="Choose 'Leave Request' if someone wants to take an off-day. \nChoose 'Allocation Request' if you want to increase the number of leaves available for someone", select=True), @@ -183,7 +189,8 @@ class hr_holidays(osv.osv): 'department_id':fields.related('employee_id', 'department_id', string='Department', type='many2one', relation='hr.department', readonly=True, store=True), 'category_id': fields.many2one('hr.employee.category', "Employee Tag", help='Category of Employee', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'holiday_type': fields.selection([('employee','By Employee'),('category','By Employee Tag')], 'Allocation Mode', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help='By Employee: Allocation/Request for individual Employee, By Employee Tag: Allocation/Request for group of employees in category', required=True), - 'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'), + 'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, copy=False, + help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'), 'double_validation': fields.related('holiday_status_id', 'double_validation', type='boolean', relation='hr.holidays.status', string='Apply Double Validation'), 'can_reset': fields.function( _get_can_reset, @@ -207,16 +214,6 @@ class hr_holidays(osv.osv): ('date_check2', "CHECK ( (type='add') OR (date_from <= date_to))", "The start date must be anterior to the end date."), ('date_check', "CHECK ( number_of_days_temp >= 0 )", "The number of days must be greater than 0."), ] - - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - if context is None: - context = {} - default = default.copy() - default['date_from'] = False - default['date_to'] = False - return super(hr_holidays, self).copy(cr, uid, id, default, context=context) def _create_resource_leave(self, cr, uid, leaves, context=None): '''This method will create entry in resource calendar leave object at the time of holidays validated ''' @@ -404,9 +401,8 @@ class hr_holidays(osv.osv): leave_ids.append(self.create(cr, uid, vals, context=None)) for leave_id in leave_ids: # TODO is it necessary to interleave the calls? - self.signal_confirm(cr, uid, [leave_id]) - self.signal_validate(cr, uid, [leave_id]) - self.signal_second_validate(cr, uid, [leave_id]) + for sig in ('confirm', 'validate', 'second_validate'): + self.signal_workflow(cr, uid, [leave_id], sig) return True def holidays_confirm(self, cr, uid, ids, context=None): @@ -432,10 +428,10 @@ class hr_holidays(osv.osv): for record in self.browse(cr, uid, ids): # Delete the meeting if record.meeting_id: - meeting_obj.unlink(cr, uid, [record.meeting_id.id]) + record.meeting_id.unlink() # If a category that created several holidays, cancel all related - self.signal_refuse(cr, uid, map(attrgetter('id'), record.linked_request_ids or [])) + self.signal_workflow(cr, uid, map(attrgetter('id'), record.linked_request_ids or []), 'refuse') self._remove_resource_leave(cr, uid, ids, context=context) return True @@ -507,9 +503,8 @@ class hr_employee(osv.osv): leave_id = holiday_obj.create(cr, uid, {'name': _('Leave Request for %s') % employee.name, 'employee_id': employee.id, 'holiday_status_id': status_id, 'type': 'remove', 'holiday_type': 'employee', 'number_of_days_temp': abs(diff)}, context=context) else: return False - holiday_obj.signal_confirm(cr, uid, [leave_id]) - holiday_obj.signal_validate(cr, uid, [leave_id]) - holiday_obj.signal_second_validate(cr, uid, [leave_id]) + for sig in ('confirm', 'validate', 'second_validate'): + holiday_obj.signal_workflow(cr, uid, [leave_id], sig) return True def _get_remaining_days(self, cr, uid, ids, name, args, context=None): diff --git a/addons/hr_holidays/report/holidays_summary_report.py b/addons/hr_holidays/report/holidays_summary_report.py index adf09a63c855d8a5ab279767784176134d88ae65..a5889360774dfca689054a18efc53fb5ddc606d4 100644 --- a/addons/hr_holidays/report/holidays_summary_report.py +++ b/addons/hr_holidays/report/holidays_summary_report.py @@ -50,8 +50,8 @@ def emp_create_xml(self, cr, uid, dept, holiday_type, row_id, empid, name, som, if dept==0: count=0 registry = openerp.registry(cr.dbname) - p_id = registry['hr.holidays'].search(cr, uid, [('employee_id','in',[empid,False]), ('type', '=', 'remove')]) - ids_date = registry['hr.holidays'].read(cr, uid, p_id, ['date_from','date_to','holiday_status_id','state']) + holidays_ids = registry['hr.holidays'].search(cr, uid, [('employee_id','in',[empid,False]), ('type', '=', 'remove')]) + ids_date = registry['hr.holidays'].read(cr, uid, holidays_ids, ['date_from','date_to','holiday_status_id','state']) for index in range(1,61): diff=index-1 @@ -213,18 +213,14 @@ class report_custom(report_rml): emp_xml='' row_id=1 - if data['model']=='hr.employee': - for id in data['form']['emp']: - items = obj_emp.read(cr, uid, id, ['id','name']) - - emp_xml += emp_create_xml(self, cr, uid, 0, holiday_type, row_id, items['id'], items['name'], som, eom) - row_id = row_id +1 + if data['model'] == 'hr.employee': + for items in obj_emp.read(cr, uid, data['form']['emp'], ['id', 'name']): + emp_xml += emp_create_xml(self, cr, uid, 0, holiday_type, row_id, items['id'], items['name'], som, eom) + row_id = row_id +1 elif data['model']=='ir.ui.menu': - for id in data['form']['depts']: - dept = obj_dept.browse(cr, uid, id, context=context) - cr.execute("""SELECT id FROM hr_employee \ - WHERE department_id = %s""", (id,)) + for dept in obj_dept.browse(cr, uid, data['form']['depts'], context=context): + cr.execute("SELECT id FROM hr_employee WHERE department_id = %s", (dept.id,)) emp_ids = [x[0] for x in cr.fetchall()] if emp_ids==[]: continue diff --git a/addons/hr_holidays/test/test_hr_holiday.yml b/addons/hr_holidays/test/test_hr_holiday.yml index 7bedd3302e8650839c05846c02b077f860c71a34..1c00692c1f7bb600c0bf35da362f9e053a5b5fbd 100644 --- a/addons/hr_holidays/test/test_hr_holiday.yml +++ b/addons/hr_holidays/test/test_hr_holiday.yml @@ -19,7 +19,7 @@ - !python {model: hr.holidays}: | self.holidays_reset(cr, uid, [ref('hr_holidays_employee1_cl')]) - self.signal_confirm(cr, uid, [ref('hr_holidays_employee1_cl')]) + self.signal_workflow(cr, uid, [ref('hr_holidays_employee1_cl')], 'confirm') - I validate the holiday request by clicking on "To Approve" button. - diff --git a/addons/hr_holidays/tests/test_holidays_flow.py b/addons/hr_holidays/tests/test_holidays_flow.py index ce7da7647d05c195c5b158de35ad7233b62276f1..96ab20f6ae522c951d798165507e5c705b1edd7e 100644 --- a/addons/hr_holidays/tests/test_holidays_flow.py +++ b/addons/hr_holidays/tests/test_holidays_flow.py @@ -27,10 +27,9 @@ from openerp.exceptions import AccessError from openerp.osv.orm import except_orm from openerp.tools import mute_logger - class TestHolidaysFlow(TestHrHolidaysBase): - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_leave_request_flow(self): """ Testing leave request flow """ cr, uid = self.cr, self.uid @@ -82,6 +81,8 @@ class TestHolidaysFlow(TestHrHolidaysBase): 'date_to': datetime.today(), 'number_of_days_temp': 1, }) + ids = self.hr_holidays.search(cr, uid, [('name', '=', 'Hol10')]) + self.hr_holidays.unlink(cr, uid, ids) # Employee creates a leave request in a no-limit category hol1_id = self.hr_holidays.create(cr, self.user_employee_id, { @@ -96,12 +97,12 @@ class TestHolidaysFlow(TestHrHolidaysBase): self.assertEqual(hol1.state, 'confirm', 'hr_holidays: newly created leave request should be in confirm state') # Employee validates its leave request -> should not work - self.hr_holidays.signal_validate(cr, self.user_employee_id, [hol1_id]) + self.hr_holidays.signal_workflow(cr, self.user_employee_id, [hol1_id], 'validate') hol1.refresh() self.assertEqual(hol1.state, 'confirm', 'hr_holidays: employee should not be able to validate its own leave request') # HrUser validates the employee leave request - self.hr_holidays.signal_validate(cr, self.user_hrmanager_id, [hol1_id]) + self.hr_holidays.signal_workflow(cr, self.user_hrmanager_id, [hol1_id], 'validate') hol1.refresh() self.assertEqual(hol1.state, 'validate', 'hr_holidays: validates leave request should be in validate state') @@ -143,8 +144,8 @@ class TestHolidaysFlow(TestHrHolidaysBase): 'number_of_days_temp': 2, }) # HrUser validates the allocation request - self.hr_holidays.signal_validate(cr, self.user_hruser_id, [aloc1_id]) - self.hr_holidays.signal_second_validate(cr, self.user_hruser_id, [aloc1_id]) + self.hr_holidays.signal_workflow(cr, self.user_hruser_id, [aloc1_id], 'validate') + self.hr_holidays.signal_workflow(cr, self.user_hruser_id, [aloc1_id], 'second_validate') # Checks Employee has effectively some days left hol_status_2 = self.hr_holidays_status.browse(cr, self.user_employee_id, self.holidays_status_2) _check_holidays_status(hol_status_2, 2.0, 0.0, 2.0, 2.0) @@ -164,13 +165,13 @@ class TestHolidaysFlow(TestHrHolidaysBase): _check_holidays_status(hol_status_2, 2.0, 0.0, 2.0, 1.0) # HrUser validates the first step - self.hr_holidays.signal_validate(cr, self.user_hruser_id, [hol2_id]) + self.hr_holidays.signal_workflow(cr, self.user_hruser_id, [hol2_id], 'validate') hol2.refresh() self.assertEqual(hol2.state, 'validate1', 'hr_holidays: first validation should lead to validate1 state') # HrUser validates the second step - self.hr_holidays.signal_second_validate(cr, self.user_hruser_id, [hol2_id]) + self.hr_holidays.signal_workflow(cr, self.user_hruser_id, [hol2_id], 'second_validate') hol2.refresh() self.assertEqual(hol2.state, 'validate', 'hr_holidays: second validation should lead to validate state') @@ -179,7 +180,7 @@ class TestHolidaysFlow(TestHrHolidaysBase): _check_holidays_status(hol_status_2, 2.0, 1.0, 1.0, 1.0) # HrManager finds an error: he refuses the leave request - self.hr_holidays.signal_refuse(cr, self.user_hrmanager_id, [hol2_id]) + self.hr_holidays.signal_workflow(cr, self.user_hrmanager_id, [hol2_id], 'refuse') hol2.refresh() self.assertEqual(hol2.state, 'refuse', 'hr_holidays: refuse should lead to refuse state') @@ -188,12 +189,12 @@ class TestHolidaysFlow(TestHrHolidaysBase): _check_holidays_status(hol_status_2, 2.0, 0.0, 2.0, 2.0) # Annoyed, HrUser tries to fix its error and tries to reset the leave request -> does not work, only HrManager - self.hr_holidays.signal_reset(cr, self.user_hruser_id, [hol2_id]) + self.hr_holidays.signal_workflow(cr, self.user_hruser_id, [hol2_id], 'reset') self.assertEqual(hol2.state, 'refuse', 'hr_holidays: hr_user should not be able to reset a refused leave request') # HrManager resets the request - self.hr_holidays.signal_reset(cr, self.user_hrmanager_id, [hol2_id]) + self.hr_holidays.signal_workflow(cr, self.user_hrmanager_id, [hol2_id], 'reset') hol2.refresh() self.assertEqual(hol2.state, 'draft', 'hr_holidays: resetting should lead to draft state') @@ -205,4 +206,4 @@ class TestHolidaysFlow(TestHrHolidaysBase): 'number_of_days_temp': 4, }) with self.assertRaises(except_orm): - self.hr_holidays.signal_confirm(cr, self.user_hrmanager_id, [hol2_id]) + self.hr_holidays.signal_workflow(cr, self.user_hrmanager_id, [hol2_id], 'confirm') diff --git a/addons/hr_holidays/wizard/hr_holidays_summary_department.py b/addons/hr_holidays/wizard/hr_holidays_summary_department.py index 8a2043895725832f8c5cbf3555480072bdba1b47..ff7bacc3e13df55eea11b5e94327efa453f627af 100644 --- a/addons/hr_holidays/wizard/hr_holidays_summary_department.py +++ b/addons/hr_holidays/wizard/hr_holidays_summary_department.py @@ -39,7 +39,7 @@ class hr_holidays_summary_dept(osv.osv_memory): } def print_report(self, cr, uid, ids, context=None): - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] if not data['depts']: raise osv.except_osv(_('Error!'), _('You have to select at least one Department. And try again.')) datas = { diff --git a/addons/hr_holidays/wizard/hr_holidays_summary_employees.py b/addons/hr_holidays/wizard/hr_holidays_summary_employees.py index 1ec08eb2c86c932daef40dcf1a3a0fb29d873d47..324a7f94240547553ce861a3e9dd1f0ab4ba3eb4 100644 --- a/addons/hr_holidays/wizard/hr_holidays_summary_employees.py +++ b/addons/hr_holidays/wizard/hr_holidays_summary_employees.py @@ -37,7 +37,7 @@ class hr_holidays_summary_employee(osv.osv_memory): } def print_report(self, cr, uid, ids, context=None): - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] data['emp'] = context['active_ids'] datas = { 'ids': [], diff --git a/addons/hr_payroll/hr_payroll.py b/addons/hr_payroll/hr_payroll.py index 37b7e9eed3eaeeabd1aceffe73ede87e7329c8d8..4dea994a130c56c84b41e766ae4bb473f6505531 100644 --- a/addons/hr_payroll/hr_payroll.py +++ b/addons/hr_payroll/hr_payroll.py @@ -26,8 +26,8 @@ from datetime import datetime from datetime import timedelta from dateutil import relativedelta +from openerp import api, tools from openerp.osv import fields, osv -from openerp import tools from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp @@ -46,10 +46,10 @@ class hr_payroll_structure(osv.osv): _columns = { 'name':fields.char('Name', required=True), 'code':fields.char('Reference', size=64, required=True), - 'company_id':fields.many2one('res.company', 'Company', required=True), + 'company_id':fields.many2one('res.company', 'Company', required=True, copy=False), 'note': fields.text('Description'), 'parent_id':fields.many2one('hr.payroll.structure', 'Parent'), - 'children_ids':fields.one2many('hr.payroll.structure', 'parent_id', 'Children'), + 'children_ids':fields.one2many('hr.payroll.structure', 'parent_id', 'Children', copy=True), 'rule_ids':fields.many2many('hr.salary.rule', 'hr_structure_salary_rule_rel', 'struct_id', 'rule_id', 'Salary Rules'), } @@ -73,23 +73,11 @@ class hr_payroll_structure(osv.osv): ] def copy(self, cr, uid, id, default=None, context=None): - """ - Create a new record in hr_payroll_structure model from existing one - @param cr: cursor to database - @param user: id of current user - @param id: list of record ids on which copy method executes - @param default: dict type contains the values to be override during copy of object - @param context: context arguments, like lang, time zone - - @return: returns a id of newly created record - """ - if not default: - default = {} - default.update( - code=_("%s (copy)") % (self.browse(cr, uid, id, context=context).code), - company_id=self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id) + default = dict(default or {}, + code=_("%s (copy)") % (self.browse(cr, uid, id, context=context).code)) return super(hr_payroll_structure, self).copy(cr, uid, id, default, context=context) + @api.cr_uid_ids_context def get_all_rules(self, cr, uid, structure_ids, context=None): """ @param structure_ids: list of structure @@ -101,6 +89,7 @@ class hr_payroll_structure(osv.osv): all_rules += self.pool.get('hr.salary.rule')._recursive_search_of_rules(cr, uid, struct.rule_ids, context=context) return all_rules + @api.cr_uid_ids_context def _get_parent_structure(self, cr, uid, struct_ids, context=None): if not struct_ids: return [] @@ -138,6 +127,7 @@ class hr_contract(osv.osv): 'schedule_pay': 'monthly', } + @api.cr_uid_ids_context def get_all_structures(self, cr, uid, contract_ids, context=None): """ @param contract_ids: list of contracts @@ -205,8 +195,14 @@ class one2many_mod2(fields.one2many): for id in ids: res[id] = [] ids2 = obj.pool[self._obj].search(cr, user, [(self._fields_id,'in',ids), ('appears_on_payslip', '=', True)], limit=self._limit) - for r in obj.pool[self._obj]._read_flat(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): - res[r[self._fields_id]].append( r['id'] ) + for r in obj.pool[self._obj].read(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): + key = r[self._fields_id] + if isinstance(key, tuple): + # Read return a tuple in the case where the field is a many2one + # but we want to get the id of this field. + key = key[0] + + res[key].append( r['id'] ) return res class hr_payslip_run(osv.osv): @@ -219,7 +215,7 @@ class hr_payslip_run(osv.osv): 'state': fields.selection([ ('draft', 'Draft'), ('close', 'Close'), - ], 'Status', select=True, readonly=True), + ], 'Status', select=True, readonly=True, copy=False), 'date_start': fields.date('Date From', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'date_end': fields.date('Date To', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'credit_note': fields.boolean('Credit Note', readonly=True, states={'draft': [('readonly', False)]}, help="If its checked, indicates that all payslips generated from here are refund payslips."), @@ -268,7 +264,7 @@ class hr_payslip(osv.osv): _columns = { 'struct_id': fields.many2one('hr.payroll.structure', 'Structure', readonly=True, states={'draft': [('readonly', False)]}, help='Defines the rules that have to be applied to this payslip, accordingly to the contract chosen. If you let empty the field contract, this field isn\'t mandatory anymore and thus the rules applied will be all the rules set on the structure of all contracts of the employee valid for the chosen period'), 'name': fields.char('Payslip Name', required=False, readonly=True, states={'draft': [('readonly', False)]}), - 'number': fields.char('Reference', required=False, readonly=True, states={'draft': [('readonly', False)]}), + 'number': fields.char('Reference', required=False, readonly=True, states={'draft': [('readonly', False)]}, copy=False), 'employee_id': fields.many2one('hr.employee', 'Employee', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'date_from': fields.date('Date From', readonly=True, states={'draft': [('readonly', False)]}, required=True), 'date_to': fields.date('Date To', readonly=True, states={'draft': [('readonly', False)]}, required=True), @@ -277,21 +273,21 @@ class hr_payslip(osv.osv): ('verify', 'Waiting'), ('done', 'Done'), ('cancel', 'Rejected'), - ], 'Status', select=True, readonly=True, + ], 'Status', select=True, readonly=True, copy=False, help='* When the payslip is created the status is \'Draft\'.\ \n* If the payslip is under verification, the status is \'Waiting\'. \ \n* If the payslip is confirmed then status is set to \'Done\'.\ \n* When user cancel payslip the status is \'Rejected\'.'), 'line_ids': one2many_mod2('hr.payslip.line', 'slip_id', 'Payslip Lines', readonly=True, states={'draft':[('readonly',False)]}), - 'company_id': fields.many2one('res.company', 'Company', required=False, readonly=True, states={'draft': [('readonly', False)]}), + 'company_id': fields.many2one('res.company', 'Company', required=False, readonly=True, states={'draft': [('readonly', False)]}, copy=False), 'worked_days_line_ids': fields.one2many('hr.payslip.worked_days', 'payslip_id', 'Payslip Worked Days', required=False, readonly=True, states={'draft': [('readonly', False)]}), 'input_line_ids': fields.one2many('hr.payslip.input', 'payslip_id', 'Payslip Inputs', required=False, readonly=True, states={'draft': [('readonly', False)]}), - 'paid': fields.boolean('Made Payment Order ? ', required=False, readonly=True, states={'draft': [('readonly', False)]}), + 'paid': fields.boolean('Made Payment Order ? ', required=False, readonly=True, states={'draft': [('readonly', False)]}, copy=False), 'note': fields.text('Internal Note', readonly=True, states={'draft':[('readonly',False)]}), 'contract_id': fields.many2one('hr.contract', 'Contract', required=False, readonly=True, states={'draft': [('readonly', False)]}), 'details_by_salary_rule_category': fields.function(_get_lines_salary_rule_category, method=True, type='one2many', relation='hr.payslip.line', string='Details by Salary Rule Category'), 'credit_note': fields.boolean('Credit Note', help="Indicates this payslip has a refund of another", readonly=True, states={'draft': [('readonly', False)]}), - 'payslip_run_id': fields.many2one('hr.payslip.run', 'Payslip Batches', readonly=True, states={'draft': [('readonly', False)]}), + 'payslip_run_id': fields.many2one('hr.payslip.run', 'Payslip Batches', readonly=True, states={'draft': [('readonly', False)]}, copy=False), 'payslip_count': fields.function(_count_detail_payslip, type='integer', string="Payslip Computation Details"), } _defaults = { @@ -312,19 +308,6 @@ class hr_payslip(osv.osv): _constraints = [(_check_dates, "Payslip 'Date From' must be before 'Date To'.", ['date_from', 'date_to'])] - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id - default.update({ - 'line_ids': [], - 'company_id': company_id, - 'number': '', - 'payslip_run_id': False, - 'paid': False, - }) - return super(hr_payslip, self).copy(cr, uid, id, default, context=context) - def cancel_sheet(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) @@ -340,8 +323,8 @@ class hr_payslip(osv.osv): for payslip in self.browse(cr, uid, ids, context=context): id_copy = self.copy(cr, uid, payslip.id, {'credit_note': True, 'name': _('Refund: ')+payslip.name}, context=context) self.compute_sheet(cr, uid, [id_copy], context=context) - self.signal_hr_verify_sheet(cr, uid, [id_copy]) - self.signal_process_sheet(cr, uid, [id_copy]) + self.signal_workflow(cr, uid, [id_copy], 'hr_verify_sheet') + self.signal_workflow(cr, uid, [id_copy], 'process_sheet') form_id = mod_obj.get_object_reference(cr, uid, 'hr_payroll', 'view_hr_payslip_form') form_res = form_id and form_id[1] or False @@ -710,14 +693,12 @@ class hr_payslip(osv.osv): def onchange_contract_id(self, cr, uid, ids, date_from, date_to, employee_id=False, contract_id=False, context=None): #TODO it seems to be the mess in the onchanges, we should have onchange_employee => onchange_contract => doing all the things - if context is None: - context = {} res = {'value':{ 'line_ids': [], 'name': '', } } - context.update({'contract': True}) + context = dict(context or {}, contract=True) if not contract_id: res['value'].update({'struct_id': False}) return self.onchange_employee_id(cr, uid, ids, date_from=date_from, date_to=date_to, employee_id=employee_id, contract_id=contract_id, context=context) @@ -792,10 +773,10 @@ class hr_salary_rule(osv.osv): 'amount_fix': fields.float('Fixed Amount', digits_compute=dp.get_precision('Payroll'),), 'amount_percentage': fields.float('Percentage (%)', digits_compute=dp.get_precision('Payroll Rate'), help='For example, enter 50.0 to apply a percentage of 50%'), 'amount_python_compute':fields.text('Python Code'), - 'amount_percentage_base':fields.char('Percentage based on', required=False, readonly=False, help='result will be affected to a variable'), - 'child_ids':fields.one2many('hr.salary.rule', 'parent_rule_id', 'Child Salary Rule'), + 'amount_percentage_base': fields.char('Percentage based on', required=False, readonly=False, help='result will be affected to a variable'), + 'child_ids':fields.one2many('hr.salary.rule', 'parent_rule_id', 'Child Salary Rule', copy=True), 'register_id':fields.many2one('hr.contribution.register', 'Contribution Register', help="Eventual third party involved in the salary payment of the employees."), - 'input_ids': fields.one2many('hr.rule.input', 'input_id', 'Inputs'), + 'input_ids': fields.one2many('hr.rule.input', 'input_id', 'Inputs', copy=True), 'note':fields.text('Description'), } _defaults = { @@ -842,6 +823,7 @@ result = rules.NET > categories.NET * 0.10''', 'quantity': '1.0', } + @api.cr_uid_ids_context def _recursive_search_of_rules(self, cr, uid, rule_ids, context=None): """ @param rule_ids: list of browse record diff --git a/addons/hr_payroll/wizard/hr_payroll_contribution_register_report.py b/addons/hr_payroll/wizard/hr_payroll_contribution_register_report.py index 20e3c0413aa42c29474e47721c9a1f773174b59b..a1c5a78a934e75070f330fc3cd9dbd9fd9cb263d 100644 --- a/addons/hr_payroll/wizard/hr_payroll_contribution_register_report.py +++ b/addons/hr_payroll/wizard/hr_payroll_contribution_register_report.py @@ -42,7 +42,7 @@ class payslip_lines_contribution_register(osv.osv_memory): datas = { 'ids': context.get('active_ids', []), 'model': 'hr.contribution.register', - 'form': self.read(cr, uid, ids, [], context=context)[0] + 'form': self.read(cr, uid, ids, context=context)[0] } return self.pool['report'].get_action( cr, uid, [], 'hr_payroll.report_contributionregister', data=datas, context=context diff --git a/addons/hr_payroll/wizard/hr_payroll_payslips_by_employees.py b/addons/hr_payroll/wizard/hr_payroll_payslips_by_employees.py index 3bf8b339300f8ae22c57aa7d237369fd378b1a63..3658cc835bc573548af565ce30f6fc015a6b4280 100644 --- a/addons/hr_payroll/wizard/hr_payroll_payslips_by_employees.py +++ b/addons/hr_payroll/wizard/hr_payroll_payslips_by_employees.py @@ -44,7 +44,7 @@ class hr_payslip_employees(osv.osv_memory): data = self.read(cr, uid, ids, context=context)[0] run_data = {} if context and context.get('active_id', False): - run_data = run_pool.read(cr, uid, context['active_id'], ['date_start', 'date_end', 'credit_note']) + run_data = run_pool.read(cr, uid, [context['active_id']], ['date_start', 'date_end', 'credit_note'])[0] from_date = run_data.get('date_start', False) to_date = run_data.get('date_end', False) credit_note = run_data.get('credit_note', False) diff --git a/addons/hr_payroll_account/hr_payroll_account.py b/addons/hr_payroll_account/hr_payroll_account.py index ad8527fce43718e4bed516f7c224ec17ec81f8b2..879614f9e3eedf9db783ab18fb2cff729fca1591 100644 --- a/addons/hr_payroll_account/hr_payroll_account.py +++ b/addons/hr_payroll_account/hr_payroll_account.py @@ -36,7 +36,7 @@ class hr_payslip(osv.osv): _columns = { 'period_id': fields.many2one('account.period', 'Force Period',states={'draft': [('readonly', False)]}, readonly=True, domain=[('state','<>','done')], help="Keep empty to use the period of the validation(Payslip) date."), 'journal_id': fields.many2one('account.journal', 'Salary Journal',states={'draft': [('readonly', False)]}, readonly=True, required=True), - 'move_id': fields.many2one('account.move', 'Accounting Entry', readonly=True), + 'move_id': fields.many2one('account.move', 'Accounting Entry', readonly=True, copy=False), } def _get_default_journal(self, cr, uid, context=None): @@ -50,12 +50,6 @@ class hr_payslip(osv.osv): 'journal_id': _get_default_journal, } - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default['move_id'] = False - return super(hr_payslip, self).copy(cr, uid, id, default, context=context) - def create(self, cr, uid, vals, context=None): if context is None: context = {} diff --git a/addons/hr_payroll_account/test/hr_payroll_account.yml b/addons/hr_payroll_account/test/hr_payroll_account.yml index c637b5f319a9b3e26162e1a36d5e605b39f97b05..0dda2f6d04b5c2c768046719713879ae7181b388 100644 --- a/addons/hr_payroll_account/test/hr_payroll_account.yml +++ b/addons/hr_payroll_account/test/hr_payroll_account.yml @@ -104,7 +104,7 @@ - !python {model: hr.payslip}: | self.cancel_sheet(cr, uid, [ref("hr_payslip_0")], None) - self.signal_draft(cr, uid, [ref("hr_payslip_0")]) + self.signal_workflow(cr, uid, [ref("hr_payslip_0")], 'draft') - Then I click on the "Confirm" button. - diff --git a/addons/hr_payroll_account/wizard/hr_payroll_payslips_by_employees.py b/addons/hr_payroll_account/wizard/hr_payroll_payslips_by_employees.py index 053fab16e8455f7f8c8560dbab76abb9412a3d37..2f04bfc676cac4179eb6caa9a96576218175587e 100644 --- a/addons/hr_payroll_account/wizard/hr_payroll_payslips_by_employees.py +++ b/addons/hr_payroll_account/wizard/hr_payroll_payslips_by_employees.py @@ -29,11 +29,12 @@ class hr_payslip_employees(osv.osv_memory): run_pool = self.pool.get('hr.payslip.run') if context is None: context = {} - if context and context.get('active_id', False): + if context.get('active_id'): run_data = run_pool.read(cr, uid, context['active_id'], ['journal_id']) - journal_id = run_data.get('journal_id', False) + journal_id = run_data.get('journal_id') journal_id = journal_id and journal_id[0] or False - if journal_id: context.update({'journal_id': journal_id}) + if journal_id: + context = dict(context, journal_id=journal_id) return super(hr_payslip_employees, self).compute_sheet(cr, uid, ids, context=context) diff --git a/addons/hr_recruitment/hr_recruitment.py b/addons/hr_recruitment/hr_recruitment.py index 832e2ad9e0eaeafe0f54fd5fe414a1f8de631d23..7df4e73c85f56364a34a9b077a9a84599f859e3b 100644 --- a/addons/hr_recruitment/hr_recruitment.py +++ b/addons/hr_recruitment/hr_recruitment.py @@ -324,7 +324,7 @@ class hr_applicant(osv.Model): return res def action_start_survey(self, cr, uid, ids, context=None): - context = context if context else {} + context = dict(context or {}) applicant = self.browse(cr, uid, ids, context=context)[0] survey_obj = self.pool.get('survey.survey') response_obj = self.pool.get('survey.user_input') @@ -341,7 +341,7 @@ class hr_applicant(osv.Model): def action_print_survey(self, cr, uid, ids, context=None): """ If response is available then print this response otherwise print survey form (print template of the survey) """ - context = context if context else {} + context = dict(context or {}) applicant = self.browse(cr, uid, ids, context=context)[0] survey_obj = self.pool.get('survey.survey') response_obj = self.pool.get('survey.user_input') @@ -397,8 +397,7 @@ class hr_applicant(osv.Model): return super(hr_applicant, self).message_new(cr, uid, msg, custom_values=defaults, context=context) def create(self, cr, uid, vals, context=None): - if context is None: - context = {} + context = dict(context or {}) context['mail_create_nolog'] = True if vals.get('department_id') and not context.get('default_department_id'): context['default_department_id'] = vals.get('department_id') @@ -500,13 +499,14 @@ class hr_applicant(osv.Model): raise osv.except_osv(_('Warning!'), _('You must define an Applied Job and a Contact Name for this applicant.')) action_model, action_id = model_data.get_object_reference(cr, uid, 'hr', 'open_view_employee_list') - dict_act_window = act_window.read(cr, uid, action_id, []) + dict_act_window = act_window.read(cr, uid, [action_id], [])[0] if emp_id: dict_act_window['res_id'] = emp_id dict_act_window['view_mode'] = 'form,tree' return dict_act_window def get_empty_list_help(self, cr, uid, help, context=None): + context = dict(context or {}) context['empty_list_help_model'] = 'hr.job' context['empty_list_help_id'] = context.get('default_job_id', None) context['empty_list_help_document_name'] = _("job applicants") diff --git a/addons/hr_recruitment/wizard/hr_recruitment_create_partner_job.py b/addons/hr_recruitment/wizard/hr_recruitment_create_partner_job.py index d2e778c38a4618231fb780fa1f5316a7e5324da6..752b65845d7bd6c4e83b79c4f509b2f066336d34 100644 --- a/addons/hr_recruitment/wizard/hr_recruitment_create_partner_job.py +++ b/addons/hr_recruitment/wizard/hr_recruitment_create_partner_job.py @@ -46,7 +46,7 @@ class hr_recruitment_partner_create(osv.osv_memory): if context is None: context = {} - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] result = mod_obj._get_id(cr, uid, 'base', 'view_res_partner_filter') res = mod_obj.read(cr, uid, result, ['res_id'], context=context) @@ -76,7 +76,7 @@ class hr_recruitment_partner_create(osv.osv_memory): 'view_id': False, 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'] - } + } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/hr_timesheet_invoice/hr_timesheet_invoice.py b/addons/hr_timesheet_invoice/hr_timesheet_invoice.py index a3d96b1cebac0485490c85ea29a2f1c476a41908..ec8c89dbe6454dc5d712694df27d993a6e11e2f9 100644 --- a/addons/hr_timesheet_invoice/hr_timesheet_invoice.py +++ b/addons/hr_timesheet_invoice/hr_timesheet_invoice.py @@ -99,7 +99,7 @@ class account_analytic_account(osv.osv): class account_analytic_line(osv.osv): _inherit = 'account.analytic.line' _columns = { - 'invoice_id': fields.many2one('account.invoice', 'Invoice', ondelete="set null"), + 'invoice_id': fields.many2one('account.invoice', 'Invoice', ondelete="set null", copy=False), 'to_invoice': fields.many2one('hr_timesheet_invoice.factor', 'Invoiceable', help="It allows to set the discount while making invoice, keep empty if the activities should not be invoiced."), } @@ -141,14 +141,6 @@ class account_analytic_line(osv.osv): _('You cannot modify an invoiced analytic line!')) return True - def copy(self, cursor, user, obj_id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - default.update({'invoice_id': False}) - return super(account_analytic_line, self).copy(cursor, user, obj_id, - default, context=context) - def _get_invoice_price(self, cr, uid, account, product_id, user_id, qty, context = {}): pro_price_obj = self.pool.get('product.pricelist') if account.pricelist_id: @@ -291,6 +283,7 @@ class account_analytic_line(osv.osv): curr_line['name'] += "\n" + ("\n".join(map(lambda x: unicode(x) or '',note))) invoice_line_obj.create(cr, uid, curr_line, context=context) cr.execute("update account_analytic_line set invoice_id=%s WHERE account_id = %s and id IN %s", (last_invoice, account.id, tuple(ids))) + self.invalidate_cache(cr, uid, ['invoice_id'], ids, context=context) invoice_obj.button_reset_taxes(cr, uid, [last_invoice], context) return invoices @@ -314,23 +307,13 @@ class hr_analytic_timesheet(osv.osv): } return res - def copy(self, cursor, user, obj_id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - default.update({'invoice_id': False}) - return super(hr_analytic_timesheet, self).copy(cursor, user, obj_id, - default, context=context) - - - class account_invoice(osv.osv): _inherit = "account.invoice" - def _get_analytic_lines(self, cr, uid, id, context=None): - iml = super(account_invoice, self)._get_analytic_lines(cr, uid, id, context=context) + def _get_analytic_lines(self, cr, uid, ids, context=None): + iml = super(account_invoice, self)._get_analytic_lines(cr, uid, ids, context=context) - inv = self.browse(cr, uid, [id], context=context)[0] + inv = self.browse(cr, uid, ids, context=context)[0] if inv.type == 'in_invoice': obj_analytic_account = self.pool.get('account.analytic.account') for il in iml: diff --git a/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice.yml b/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice.yml index 49243e501f2f37509403b83d8f587d3d9b757c3b..b6b52544cbcd6c39bfceeaf10cf842b93d34846b 100644 --- a/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice.yml +++ b/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice.yml @@ -61,7 +61,7 @@ - I set the account as property_account_income on the product and I set the tax on the product - - !record {model: product.product, id: product.product_product_consultant}: + !record {model: product.product, id: product.product_product_consultant, view: False}: property_account_income: account_income_i000 uom_id: product.product_uom_hour taxes_id: [tax10] diff --git a/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice_no_prod_tax.yml b/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice_no_prod_tax.yml index c3a5fae866cc9ded67de74e929e92a2201e9d30d..c5b7c9b5be85825afdeca6eac7eb12309d121d01 100644 --- a/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice_no_prod_tax.yml +++ b/addons/hr_timesheet_invoice/test/test_hr_timesheet_invoice_no_prod_tax.yml @@ -60,7 +60,7 @@ - I set the account as property_account_income on the product and I set no tax on the product (so default tax from the account will be used) - - !record {model: product.product, id: product.product_product_consultant}: + !record {model: product.product, id: product.product_product_consultant, view: False}: property_account_income: account_income_i000 uom_id: product.product_uom_hour taxes_id: [] diff --git a/addons/hr_timesheet_invoice/wizard/hr_timesheet_analytic_profit.py b/addons/hr_timesheet_invoice/wizard/hr_timesheet_analytic_profit.py index a78bc1c5bf5ce4dd18681230ec5e4c168ab2f2c1..03699eb35d6e6a7ab69c921bd0938101df641833 100644 --- a/addons/hr_timesheet_invoice/wizard/hr_timesheet_analytic_profit.py +++ b/addons/hr_timesheet_invoice/wizard/hr_timesheet_analytic_profit.py @@ -48,7 +48,7 @@ class account_analytic_profit(osv.osv_memory): def print_report(self, cr, uid, ids, context=None): line_obj = self.pool.get('account.analytic.line') data = {} - data['form'] = self.read(cr, uid , ids, [], context=context)[0] + data['form'] = self.read(cr, uid , ids, context=context)[0] ids_chk = line_obj.search(cr, uid, [ ('date', '>=', data['form']['date_from']), ('date', '<=', data['form']['date_to']), diff --git a/addons/hr_timesheet_invoice/wizard/hr_timesheet_final_invoice_create.py b/addons/hr_timesheet_invoice/wizard/hr_timesheet_final_invoice_create.py index 8d5f0c4067c2d1af71efe7cfe7f438fccaedd2d5..1a2d77c21b14329d8efe98a91d301dda8292a6ce 100644 --- a/addons/hr_timesheet_invoice/wizard/hr_timesheet_final_invoice_create.py +++ b/addons/hr_timesheet_invoice/wizard/hr_timesheet_final_invoice_create.py @@ -43,7 +43,7 @@ class final_invoice_create(osv.osv_memory): } def do_create(self, cr, uid, ids, context=None): - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] # hack for fixing small issue (context should not propagate implicitly between actions) if 'default_type' in context: del context['default_type'] @@ -53,7 +53,7 @@ class final_invoice_create(osv.osv_memory): act_obj = self.pool.get('ir.actions.act_window') mod_ids = mod_obj.search(cr, uid, [('name', '=', 'action_invoice_tree1')], context=context)[0] res_id = mod_obj.read(cr, uid, mod_ids, ['res_id'], context=context)['res_id'] - act_win = act_obj.read(cr, uid, res_id, [], context=context) + act_win = act_obj.read(cr, uid, [res_id], context=context)[0] act_win['domain'] = [('id','in',invs),('type','=','out_invoice')] act_win['name'] = _('Invoices') return act_win diff --git a/addons/hr_timesheet_invoice/wizard/hr_timesheet_invoice_create.py b/addons/hr_timesheet_invoice/wizard/hr_timesheet_invoice_create.py index 874370fea22b1c885dc9e358e7b90dc529c9ab4a..344f219ca5f92341edfb663dfcbdd35506bd0159 100644 --- a/addons/hr_timesheet_invoice/wizard/hr_timesheet_invoice_create.py +++ b/addons/hr_timesheet_invoice/wizard/hr_timesheet_invoice_create.py @@ -56,14 +56,14 @@ class hr_timesheet_invoice_create(osv.osv_memory): raise osv.except_osv(_('Warning!'), _("Invoice is already linked to some of the analytic line(s)!")) def do_create(self, cr, uid, ids, context=None): - data = self.read(cr, uid, ids, [], context=context)[0] + data = self.read(cr, uid, ids, context=context)[0] # Create an invoice based on selected timesheet lines invs = self.pool.get('account.analytic.line').invoice_cost_create(cr, uid, context['active_ids'], data, context=context) mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') - mod_ids = mod_obj.search(cr, uid, [('name', '=', 'action_invoice_tree1')], context=context)[0] - res_id = mod_obj.read(cr, uid, mod_ids, ['res_id'], context=context)['res_id'] - act_win = act_obj.read(cr, uid, res_id, [], context=context) + mod_ids = mod_obj.search(cr, uid, [('name', '=', 'action_invoice_tree1')], context=context) + res_id = mod_obj.read(cr, uid, mod_ids, ['res_id'], context=context)[0]['res_id'] + act_win = act_obj.read(cr, uid, [res_id], context=context)[0] act_win['domain'] = [('id','in',invs),('type','=','out_invoice')] act_win['name'] = _('Invoices') return act_win diff --git a/addons/hr_timesheet_sheet/hr_timesheet_sheet.py b/addons/hr_timesheet_sheet/hr_timesheet_sheet.py index 425f64062dc6b857366f3520e8e1bc139775c23e..a2845171212940681aa0787149994453d205795c 100644 --- a/addons/hr_timesheet_sheet/hr_timesheet_sheet.py +++ b/addons/hr_timesheet_sheet/hr_timesheet_sheet.py @@ -123,7 +123,7 @@ class hr_timesheet_sheet(osv.osv): self.check_employee_attendance_state(cr, uid, sheet.id, context=context) di = sheet.user_id.company_id.timesheet_max_difference if (abs(sheet.total_difference) < di) or not di: - self.signal_confirm(cr, uid, [sheet.id]) + sheet.signal_workflow('confirm') else: raise osv.except_osv(_('Warning!'), _('Please verify that the total difference of the sheet is lower than %.2f.') %(di,)) return True diff --git a/addons/hr_timesheet_sheet/test/test_hr_timesheet_sheet.yml b/addons/hr_timesheet_sheet/test/test_hr_timesheet_sheet.yml index 614e8b19af5d5393f29da6dac101cca529e2dcfe..c63133bd1544a774ca3640040be65085b108babd 100644 --- a/addons/hr_timesheet_sheet/test/test_hr_timesheet_sheet.yml +++ b/addons/hr_timesheet_sheet/test/test_hr_timesheet_sheet.yml @@ -9,7 +9,7 @@ - I assign this product(Service on Timesheet) and journal(Timesheet Journal) to employee "Gilles Gravie" - - !record {model: hr.employee, id: hr.employee_qdp}: + !record {model: hr.employee, id: hr.employee_qdp, view: False}: product_id: product.product_product_consultant journal_id: hr_timesheet.analytic_journal - @@ -27,7 +27,7 @@ !record {model: hr.attendance, id: hr_attendance_1}: action: sign_in employee_id: 'hr.employee_qdp' - name: !eval datetime.now().strftime('%Y-%m-%d 09:%M:%S') + name: !eval datetime.now().strftime('%Y-%m-%d 09:12:37') - I test that Gilles in signed in - diff --git a/addons/im_livechat/im_livechat.py b/addons/im_livechat/im_livechat.py index 23880a18986db71f18bdab6e6f9924b989119a94..ca7d1708bf530ae9f3f2e2a8557b2006bd5afa81 100644 --- a/addons/im_livechat/im_livechat.py +++ b/addons/im_livechat/im_livechat.py @@ -87,10 +87,10 @@ class im_livechat_channel(osv.Model): 'are_you_inside': fields.function(_are_you_inside, type='boolean', string='Are you inside the matrix?', store=False), 'script_internal': fields.function(_script_internal, type='text', string='Script (internal)', store=False), 'script_external': fields.function(_script_external, type='text', string='Script (external)', store=False), - 'web_page': fields.function(_web_page, type='url', string='Web Page', store=False, size="200"), - 'button_text': fields.char(string="Text of the Button", size=200), - 'input_placeholder': fields.char(string="Chat Input Placeholder", size=200), - 'default_message': fields.char(string="Welcome Message", size=200, help="This is an automated 'welcome' message that your visitor will see when they initiate a new chat session."), + 'web_page': fields.function(_web_page, type='char', string='Web Page', store=False), + 'button_text': fields.char(string="Text of the Button"), + 'input_placeholder': fields.char(string="Chat Input Placeholder"), + 'default_message': fields.char(string="Welcome Message", help="This is an automated 'welcome' message that your visitor will see when they initiate a new chat session."), # image: all image fields are base64 encoded and PIL-supported 'image': fields.binary("Photo", help="This field holds the image used as photo for the group, limited to 1024x1024px."), diff --git a/addons/im_livechat/views/im_livechat_view.xml b/addons/im_livechat/views/im_livechat_view.xml index 8f14a81efc7724048b1652f1e267b18ad0608641..fa18f4cab16168ddaa49abb306d908045744bdc3 100644 --- a/addons/im_livechat/views/im_livechat_view.xml +++ b/addons/im_livechat/views/im_livechat_view.xml @@ -29,7 +29,7 @@ <field name="arch" type="xml"> <kanban> <field name="name"/> - <field name="web_page"/> + <field name="web_page" widget="url"/> <field name="are_you_inside"/> <field name="user_ids"/> <templates> @@ -112,7 +112,7 @@ <p> or copy this url and send it by email to your customers or suppliers: </p> - <field name="web_page" readonly="1" class="oe_tag"/> + <field name="web_page" readonly="1" class="oe_tag" widget="url"/> <p>For website built with Odoo CMS, please install the website_livechat module. Then go to Settings > Website Settings and select the Live Chat Channel you want to add on your website.</p> </div> diff --git a/addons/l10n_be/wizard/l10n_be_account_vat_declaration.py b/addons/l10n_be/wizard/l10n_be_account_vat_declaration.py index 5734b7a17abd7b914c17c4656547a53fa792b3ec..0583d6787f0654008ae778b71e9f4bc4083f3e9b 100644 --- a/addons/l10n_be/wizard/l10n_be_account_vat_declaration.py +++ b/addons/l10n_be/wizard/l10n_be_account_vat_declaration.py @@ -196,6 +196,7 @@ class l10n_be_vat_declaration(osv.osv_memory): data_of_file += '\n\t</ns2:VATDeclaration> \n</ns2:VATConsignment>' model_data_ids = mod_obj.search(cr, uid,[('model','=','ir.ui.view'),('name','=','view_vat_save')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] + context = dict(context or {}) context['file_save'] = data_of_file return { 'name': _('Save XML For Vat declaration'), diff --git a/addons/l10n_be/wizard/l10n_be_vat_intra.py b/addons/l10n_be/wizard/l10n_be_vat_intra.py index 0629f9fa2718e11c6feb8ec208ba2c22199ba179..386aa0906fd9d44b69f1ad5bc23a40dd487d5cc4 100644 --- a/addons/l10n_be/wizard/l10n_be_vat_intra.py +++ b/addons/l10n_be/wizard/l10n_be_vat_intra.py @@ -253,6 +253,7 @@ class partner_vat_intra(osv.osv_memory): data_decl = '\n\t<ns2:IntraListing SequenceNumber="1" ClientsNbr="%(clientnbr)s" DeclarantReference="%(dnum)s" AmountSum="%(amountsum).2f">' % (xml_data) data_file += data_head + data_decl + data_comp_period + data_clientinfo + '\n\t\t<ns2:Comment>%(comments)s</ns2:Comment>\n\t</ns2:IntraListing>\n</ns2:IntraConsignment>' % (xml_data) + context = dict(context or {}) context['file_save'] = data_file model_data_ids = mod_obj.search(cursor, user,[('model','=','ir.ui.view'),('name','=','view_vat_intra_save')], context=context) diff --git a/addons/l10n_be_invoice_bba/invoice.py b/addons/l10n_be_invoice_bba/invoice.py index 337c5a3cdb4e360e11b6f723766dd8bdb61f90e7..257347071e2e6464dad9fe7a36a567a7cddfcbff 100644 --- a/addons/l10n_be_invoice_bba/invoice.py +++ b/addons/l10n_be_invoice_bba/invoice.py @@ -21,6 +21,7 @@ ############################################################################## import re, time, random +from openerp import api from openerp.osv import fields, osv from openerp.tools.translate import _ import logging @@ -35,6 +36,7 @@ account.invoice object: class account_invoice(osv.osv): _inherit = 'account.invoice' + @api.cr_uid_context def _get_reference_type(self, cursor, user, context=None): """Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """ res = super(account_invoice, self)._get_reference_type(cursor, user, diff --git a/addons/l10n_fr/l10n_fr.py b/addons/l10n_fr/l10n_fr.py index 1173751431fe52a10f38634b74e2f5e31d98e791..1f10e95b44ee53bd8f75c16abea8486485e48c24 100644 --- a/addons/l10n_fr/l10n_fr.py +++ b/addons/l10n_fr/l10n_fr.py @@ -27,7 +27,7 @@ class l10n_fr_report(osv.osv): _columns = { 'code': fields.char('Code', size=64), 'name': fields.char('Name'), - 'line_ids': fields.one2many('l10n.fr.line', 'report_id', 'Lines'), + 'line_ids': fields.one2many('l10n.fr.line', 'report_id', 'Lines', copy=True), } _sql_constraints = [ ('code_uniq', 'unique (code)','The code report must be unique !') diff --git a/addons/l10n_in_hr_payroll/l10n_in_hr_payroll.py b/addons/l10n_in_hr_payroll/l10n_in_hr_payroll.py index b18f2d1f1aeb60d320db8a0746b4b7eb215dc34c..0af3535bfd045bac537c1bc9251a48324d0b28ad 100644 --- a/addons/l10n_in_hr_payroll/l10n_in_hr_payroll.py +++ b/addons/l10n_in_hr_payroll/l10n_in_hr_payroll.py @@ -64,9 +64,9 @@ class payroll_advice(osv.osv): ('confirm', 'Confirmed'), ('cancel', 'Cancelled'), ], 'Status', select=True, readonly=True), - 'number':fields.char('Reference', readonly=True), - 'line_ids':fields.one2many('hr.payroll.advice.line', 'advice_id', 'Employee Salary', states={'draft': [('readonly', False)]}, readonly=True), - 'chaque_nos':fields.char('Cheque Numbers'), + 'number': fields.char('Reference', readonly=True), + 'line_ids': fields.one2many('hr.payroll.advice.line', 'advice_id', 'Employee Salary', states={'draft': [('readonly', False)]}, readonly=True, copy=True), + 'chaque_nos': fields.char('Cheque Numbers'), 'neft': fields.boolean('NEFT Transaction', help="Check this box if your company use online transfer for salary"), 'company_id':fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'bank_id':fields.many2one('res.bank', 'Bank', readonly=True, states={'draft': [('readonly', False)]}, help="Select the Bank from which the salary is going to be paid"), @@ -162,13 +162,10 @@ class hr_payslip_run(osv.osv): _inherit = 'hr.payslip.run' _description = 'Payslip Batches' _columns = { - 'available_advice': fields.boolean('Made Payment Advice?', help="If this box is checked which means that Payment Advice exists for current batch", readonly=False), + 'available_advice': fields.boolean('Made Payment Advice?', + help="If this box is checked which means that Payment Advice exists for current batch", + readonly=False, copy=False), } - def copy(self, cr, uid, id, default={}, context=None): - if not default: - default = {} - default.update({'available_advice': False}) - return super(hr_payslip_run, self).copy(cr, uid, id, default, context=context) def draft_payslip_run(self, cr, uid, ids, context=None): res = super(hr_payslip_run, self).draft_payslip_run(cr, uid, ids, context=context) @@ -195,8 +192,8 @@ class hr_payslip_run(osv.osv): slip_ids = [] for slip_id in run.slip_ids: # TODO is it necessary to interleave the calls ? - payslip_pool.signal_hr_verify_sheet(cr, uid, [slip_id.id]) - payslip_pool.signal_process_sheet(cr, uid, [slip_id.id]) + payslip_pool.signal_workflow(cr, uid, [slip_id.id], 'hr_verify_sheet') + payslip_pool.signal_workflow(cr, uid, [slip_id.id], 'process_sheet') slip_ids.append(slip_id.id) for slip in payslip_pool.browse(cr, uid, slip_ids, context=context): @@ -252,16 +249,9 @@ class hr_payslip(osv.osv): _inherit = 'hr.payslip' _description = 'Pay Slips' _columns = { - 'advice_id': fields.many2one('hr.payroll.advice', 'Bank Advice') + 'advice_id': fields.many2one('hr.payroll.advice', 'Bank Advice', copy=False) } - def copy(self, cr, uid, id, default={}, context=None): - if not default: - default = {} - default.update({'advice_id' : False}) - return super(hr_payslip, self).copy(cr, uid, id, default, context=context) - - class res_company(osv.osv): _inherit = 'res.company' diff --git a/addons/l10n_ma/l10n_ma.py b/addons/l10n_ma/l10n_ma.py index fe3fad6022fd264f28456a5bdf00a5e65dd074be..d937d2daadc2c58c7d6b4ba8d146152663434ce8 100644 --- a/addons/l10n_ma/l10n_ma.py +++ b/addons/l10n_ma/l10n_ma.py @@ -28,7 +28,7 @@ class l10n_ma_report(osv.osv): _columns = { 'code': fields.char('Code', size=64), 'name': fields.char('Name'), - 'line_ids': fields.one2many('l10n.ma.line', 'report_id', 'Lines'), + 'line_ids': fields.one2many('l10n.ma.line', 'report_id', 'Lines', copy=True), } _sql_constraints = [ ('code_uniq', 'unique (code)','The code report must be unique !') diff --git a/addons/l10n_uk/i18n/l10n_chart_uk_minimal.pot b/addons/l10n_uk/i18n/l10n_chart_uk_minimal.pot deleted file mode 100644 index 51fd5b8bd3f512800a72ce1fcda20cb57e6bca6a..0000000000000000000000000000000000000000 --- a/addons/l10n_uk/i18n/l10n_chart_uk_minimal.pot +++ /dev/null @@ -1,108 +0,0 @@ -# Translation of OpenERP Server. -# This file contains the translation of the following modules: -# * l10n_chart_uk_minimal -# -msgid "" -msgstr "" -"Project-Id-Version: OpenERP Server 6.0dev\n" -"Report-Msgid-Bugs-To: support@openerp.com\n" -"POT-Creation-Date: 2010-10-18 17:46:36+0000\n" -"PO-Revision-Date: 2010-10-18 17:46:36+0000\n" -"Last-Translator: <>\n" -"Language-Team: \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: \n" -"Plural-Forms: \n" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_receivable -msgid "Receivable" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.acct_type_asset_view -msgid "Asset View" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.acct_type_expense_view -msgid "Expense View" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:ir.actions.todo,note:l10n_chart_uk_minimal.config_call_account_template_uk_minimal -msgid "Generate Chart of Accounts from a Chart Template. You will be asked to pass the name of the company, the chart template to follow, the no. of digits to generate the code for your accounts and Bank account, currency to create Journals. Thus,the pure copy of chart Template is generated.\n" -" This is the same wizard that runs from Financial Management/Configuration/Financial Accounting/Financial Accounts/Generate Chart of Accounts from a Chart Template." -msgstr "" - -#. module: l10n_chart_uk_minimal -#: constraint:account.account.template:0 -msgid "Error ! You can not create recursive account templates." -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.acct_type_income_view -msgid "Income View" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_income -msgid "Income" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_tax -msgid "Tax" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_cash -msgid "Cash" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_liability -msgid "Liability" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_payable -msgid "Payable" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:ir.module.module,shortdesc:l10n_chart_uk_minimal.module_meta_information -msgid "United Kingdom - minimal" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:ir.module.module,description:l10n_chart_uk_minimal.module_meta_information -msgid "This is the base module to manage the accounting chart for United Kingdom in OpenERP." -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_equity -msgid "Equity" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: constraint:account.tax.code.template:0 -msgid "Error ! You can not create recursive Tax Codes." -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.acct_type_liability_view -msgid "Liability View" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_expense -msgid "Expense" -msgstr "" - -#. module: l10n_chart_uk_minimal -#: model:account.account.type,name:l10n_chart_uk_minimal.account_type_view -msgid "View" -msgstr "" - diff --git a/addons/lunch/lunch.py b/addons/lunch/lunch.py index c976a6b4ed0d222ce3d2c801ada78e4342c69644..1d17b4468ac993e30fce75199b304f270c4d7262 100644 --- a/addons/lunch/lunch.py +++ b/addons/lunch/lunch.py @@ -299,7 +299,9 @@ class lunch_order(osv.Model): _columns = { 'user_id': fields.many2one('res.users', 'User Name', required=True, readonly=True, states={'new':[('readonly', False)]}), 'date': fields.date('Date', required=True, readonly=True, states={'new':[('readonly', False)]}), - 'order_line_ids': fields.one2many('lunch.order.line', 'order_id', 'Products', ondelete="cascade", readonly=True, states={'new':[('readonly', False)]}), + 'order_line_ids': fields.one2many('lunch.order.line', 'order_id', 'Products', + ondelete="cascade", readonly=True, states={'new':[('readonly', False)]}, + copy=True), 'total': fields.function(_price_get, string="Total", store={ 'lunch.order.line': (_fetch_orders_from_lines, ['product_id','order_id'], 20), }), @@ -307,7 +309,7 @@ class lunch_order(osv.Model): ('confirmed','Confirmed'), \ ('cancelled','Cancelled'), \ ('partially','Partially Confirmed')] \ - ,'Status', readonly=True, select=True), + ,'Status', readonly=True, select=True, copy=False), 'alerts': fields.function(_alerts_get, string="Alerts", type='text'), } @@ -336,8 +338,7 @@ class lunch_order_line(osv.Model): """ The order_line is ordered to the supplier but isn't received yet """ - for order_line in self.browse(cr, uid, ids, context=context): - order_line.write({'state': 'ordered'}, context=context) + self.write(cr, uid, ids, {'state': 'ordered'}, context=context) return self._update_order_lines(cr, uid, ids, context=context) def confirm(self, cr, uid, ids, context=None): @@ -356,7 +357,7 @@ class lunch_order_line(osv.Model): 'date': order_line.date, } cashmove_ref.create(cr, uid, values, context=context) - order_line.write({'state': 'confirmed'}, context=context) + order_line.write({'state': 'confirmed'}) return self._update_order_lines(cr, uid, ids, context=context) def _update_order_lines(self, cr, uid, ids, context=None): @@ -384,8 +385,8 @@ class lunch_order_line(osv.Model): cancel one or more order.line, update order status and unlink existing cashmoves """ cashmove_ref = self.pool.get('lunch.cashmove') + self.write(cr, uid, ids, {'state':'cancelled'}, context=context) for order_line in self.browse(cr, uid, ids, context=context): - order_line.write({'state':'cancelled'}, context=context) cash_ids = [cash.id for cash in order_line.cashmove] cashmove_ref.unlink(cr, uid, cash_ids, context=context) return self._update_order_lines(cr, uid, ids, context=context) diff --git a/addons/lunch/tests/test_lunch.py b/addons/lunch/tests/test_lunch.py index 49e936848fe3e402ff93341f068639305c05479f..2a2a6aa0ed7aa6204d73952010c9b602f9bab59d 100644 --- a/addons/lunch/tests/test_lunch.py +++ b/addons/lunch/tests/test_lunch.py @@ -58,13 +58,13 @@ class Test_Lunch(common.TransactionCase): self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None) #we check that our order_line is a 'new' one and that there are no cashmove linked to that order_line: self.assertEqual(self.order_one.state,'new') - self.assertEqual(self.order_one.cashmove, []) + self.assertEqual(list(self.order_one.cashmove), []) #we order that orderline so it's state will be 'ordered' self.order_one.order() self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None) #we check that our order_line is a 'ordered' one and that there are no cashmove linked to that order_line: self.assertEqual(self.order_one.state,'ordered') - self.assertEqual(self.order_one.cashmove, []) + self.assertEqual(list(self.order_one.cashmove), []) def test_01_lunch_order(self): """Change the state of an order line from 'new' to 'ordered' then to 'confirmed'. Check that there is a cashmove linked to the order line""" @@ -76,7 +76,7 @@ class Test_Lunch(common.TransactionCase): self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None) #we check that our order_line is a 'confirmed' one and that there are a cashmove linked to that order_line with an amount equals to the order line price: self.assertEqual(self.order_one.state,'confirmed') - self.assertTrue(self.order_one.cashmove!=[]) + self.assertTrue(self.order_one.cashmove) self.assertTrue(self.order_one.cashmove[0].amount==-self.order_one.price) def test_02_lunch_order(self): @@ -89,4 +89,4 @@ class Test_Lunch(common.TransactionCase): self.order_one = self.lunch_order_line.browse(cr,uid,self.new_id_order_line,context=None) #We check that the state is cancelled and that the cashmove has been deleted self.assertEqual(self.order_one.state,'cancelled') - self.assertTrue(self.order_one.cashmove==[]) \ No newline at end of file + self.assertFalse(self.order_one.cashmove) \ No newline at end of file diff --git a/addons/mail/mail_followers.py b/addons/mail/mail_followers.py index 2045353b0e5e4215522af651e9c7cf07a79a2b1b..42238e75553810653d45ad929a18b41da31ff928 100644 --- a/addons/mail/mail_followers.py +++ b/addons/mail/mail_followers.py @@ -48,6 +48,25 @@ class mail_followers(osv.Model): help="Message subtypes followed, meaning subtypes that will be pushed onto the user's Wall."), } + # + # Modifying followers change access rights to individual documents. As the + # cache may contain accessible/inaccessible data, one has to refresh it. + # + def create(self, cr, uid, vals, context=None): + res = super(mail_followers, self).create(cr, uid, vals, context=context) + self.invalidate_cache(cr, uid, context=context) + return res + + def write(self, cr, uid, ids, vals, context=None): + res = super(mail_followers, self).write(cr, uid, ids, vals, context=context) + self.invalidate_cache(cr, uid, context=context) + return res + + def unlink(self, cr, uid, ids, context=None): + res = super(mail_followers, self).unlink(cr, uid, ids, context=context) + self.invalidate_cache(cr, uid, context=context) + return res + class mail_notification(osv.Model): """ Class holding notifications pushed to partners. Followers and partners @@ -60,7 +79,7 @@ class mail_notification(osv.Model): _columns = { 'partner_id': fields.many2one('res.partner', string='Contact', ondelete='cascade', required=True, select=1), - 'read': fields.boolean('Read', select=1), + 'is_read': fields.boolean('Read', select=1), 'starred': fields.boolean('Starred', select=1, help='Starred message that goes into the todo mailbox'), 'message_id': fields.many2one('mail.message', string='Message', @@ -68,14 +87,14 @@ class mail_notification(osv.Model): } _defaults = { - 'read': False, + 'is_read': False, 'starred': False, } def init(self, cr): cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_notification_partner_id_read_starred_message_id',)) if not cr.fetchone(): - cr.execute('CREATE INDEX mail_notification_partner_id_read_starred_message_id ON mail_notification (partner_id, read, starred, message_id)') + cr.execute('CREATE INDEX mail_notification_partner_id_read_starred_message_id ON mail_notification (partner_id, is_read, starred, message_id)') def get_partners_to_email(self, cr, uid, ids, message, context=None): """ Return the list of partners to notify, based on their preferences. @@ -86,7 +105,7 @@ class mail_notification(osv.Model): """ notify_pids = [] for notification in self.browse(cr, uid, ids, context=context): - if notification.read: + if notification.is_read: continue partner = notification.partner_id # Do not send to partners without email address defined @@ -148,12 +167,12 @@ class mail_notification(osv.Model): existing_pids.add(notification.partner_id.id) # update existing notifications - self.write(cr, uid, ids, {'read': False}, context=context) + self.write(cr, uid, ids, {'is_read': False}, context=context) # create new notifications new_pids = set(partner_ids) - existing_pids for new_pid in new_pids: - new_notif_ids.append(self.create(cr, uid, {'message_id': message_id, 'partner_id': new_pid, 'read': False}, context=context)) + new_notif_ids.append(self.create(cr, uid, {'message_id': message_id, 'partner_id': new_pid, 'is_read': False}, context=context)) return new_notif_ids def _notify_email(self, cr, uid, ids, message_id, force_send=False, user_signature=True, context=None): diff --git a/addons/mail/mail_group.py b/addons/mail/mail_group.py index 9881e6422c69068dcf8e67ae4aa94f63510341df..eaca5c1182e091094ad85fed021faff86453764f 100644 --- a/addons/mail/mail_group.py +++ b/addons/mail/mail_group.py @@ -37,7 +37,7 @@ class mail_group(osv.Model): _inherits = {'mail.alias': 'alias_id'} def _get_image(self, cr, uid, ids, name, args, context=None): - result = dict.fromkeys(ids, False) + result = {} for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image) return result @@ -163,15 +163,14 @@ class mail_group(osv.Model): def unlink(self, cr, uid, ids, context=None): groups = self.browse(cr, uid, ids, context=context) - # Cascade-delete mail aliases as well, as they should not exist without the mail group. - mail_alias = self.pool.get('mail.alias') alias_ids = [group.alias_id.id for group in groups if group.alias_id] + menu_ids = [group.menu_id.id for group in groups if group.menu_id] # Delete mail_group res = super(mail_group, self).unlink(cr, uid, ids, context=context) - # Delete alias - mail_alias.unlink(cr, SUPERUSER_ID, alias_ids, context=context) + # Cascade-delete mail aliases as well, as they should not exist without the mail group. + self.pool.get('mail.alias').unlink(cr, SUPERUSER_ID, alias_ids, context=context) # Cascade-delete menu entries as well - self.pool.get('ir.ui.menu').unlink(cr, SUPERUSER_ID, [group.menu_id.id for group in groups if group.menu_id], context=context) + self.pool.get('ir.ui.menu').unlink(cr, SUPERUSER_ID, menu_ids, context=context) return res def write(self, cr, uid, ids, vals, context=None): diff --git a/addons/mail/mail_group_menu.py b/addons/mail/mail_group_menu.py index 56d4b5dd5648f562d8aaabd76cb4bb87f797de39..3e849e62ac571a29ca475b7f9f9bccda93a8cea3 100644 --- a/addons/mail/mail_group_menu.py +++ b/addons/mail/mail_group_menu.py @@ -42,7 +42,7 @@ class ir_ui_menu(osv.osv): following. Access are done using SUPERUSER_ID to avoid access rights issues for an internal back-end algorithm. """ ids = super(ir_ui_menu, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False) - partner_id = self.pool.get('res.users').read(cr, uid, uid, ['partner_id'], context=context)['partner_id'][0] + partner_id = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0] follower_obj = self.pool.get('mail.followers') for menu in self.browse(cr, uid, ids, context=context): if menu.mail_group_id: diff --git a/addons/mail/mail_mail.py b/addons/mail/mail_mail.py index 0a6a250bcaaca6ed5d50818d598125f4675d3a77..aeea2c0b1a5802326d52ce4a62d6cd92858d2b34 100644 --- a/addons/mail/mail_mail.py +++ b/addons/mail/mail_mail.py @@ -24,7 +24,7 @@ import logging import re from urlparse import urljoin -from openerp import tools +from openerp import api, tools from openerp import SUPERUSER_ID from openerp.addons.base.ir.ir_mail_server import MailDeliveryException from openerp.osv import fields, osv @@ -51,7 +51,7 @@ class mail_mail(osv.Model): ('received', 'Received'), ('exception', 'Delivery Failed'), ('cancel', 'Cancelled'), - ], 'Status', readonly=True), + ], 'Status', readonly=True, copy=False), 'auto_delete': fields.boolean('Auto Delete', help="Permanently delete this email after sending it, to save space"), 'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1), @@ -59,7 +59,7 @@ class mail_mail(osv.Model): 'recipient_ids': fields.many2many('res.partner', string='To (Partners)'), 'email_cc': fields.char('Cc', help='Carbon copy message recipients'), 'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"), - 'headers': fields.text('Headers'), + 'headers': fields.text('Headers', copy=False), # Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification # and during unlink() we will not cascade delete the parent and its attachments 'notification': fields.boolean('Is Notification', @@ -98,6 +98,7 @@ class mail_mail(osv.Model): def cancel(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) + @api.cr_uid def process_email_queue(self, cr, uid, ids=None, context=None): """Send immediately queued messages, committing after each message is sent - this is not transactional and should @@ -226,8 +227,7 @@ class mail_mail(osv.Model): email sending process has failed :return: True """ - if context is None: - context = {} + context = dict(context or {}) ir_mail_server = self.pool.get('ir.mail_server') ir_attachment = self.pool['ir.attachment'] for mail in self.browse(cr, SUPERUSER_ID, ids, context=context): @@ -301,8 +301,9 @@ class mail_mail(osv.Model): # /!\ can't use mail.state here, as mail.refresh() will cause an error # see revid:odo@openerp.com-20120622152536-42b2s28lvdv3odyr in 6.1 + if mail_sent: + _logger.info('Mail with ID %r and Message-Id %r successfully sent', mail.id, mail.message_id) self._postprocess_sent_message(cr, uid, mail, context=context, mail_sent=mail_sent) - _logger.info('Mail with ID %r and Message-Id %r successfully sent', mail.id, mail.message_id) except MemoryError: # prevent catching transient MemoryErrors, bubble up to notify user or abort cron job # instead of marking the mail as failed diff --git a/addons/mail/mail_message.py b/addons/mail/mail_message.py index 42aaef80703086ffb6351e068174596ad5ba4200..d11495289f7bae5b105517fa79e605e894360e13 100644 --- a/addons/mail/mail_message.py +++ b/addons/mail/mail_message.py @@ -24,7 +24,7 @@ import logging from openerp import tools from email.header import decode_header -from openerp import SUPERUSER_ID +from openerp import SUPERUSER_ID, api from openerp.osv import osv, orm, fields from openerp.tools import html_email_clean from openerp.tools.translate import _ @@ -88,7 +88,7 @@ class mail_message(osv.Model): notif_ids = notif_obj.search(cr, uid, [ ('partner_id', 'in', [partner_id]), ('message_id', 'in', ids), - ('read', '=', False), + ('is_read', '=', False), ], context=context) for notif in notif_obj.browse(cr, uid, notif_ids, context=context): res[notif.message_id.id] = True @@ -96,8 +96,8 @@ class mail_message(osv.Model): def _search_to_read(self, cr, uid, obj, name, domain, context=None): """ Search for messages to read by the current user. Condition is - inversed because we search unread message on a read column. """ - return ['&', ('notification_ids.partner_id.user_ids', 'in', [uid]), ('notification_ids.read', '=', not domain[0][2])] + inversed because we search unread message on a is_read column. """ + return ['&', ('notification_ids.partner_id.user_ids', 'in', [uid]), ('notification_ids.is_read', '=', not domain[0][2])] def _get_starred(self, cr, uid, ids, name, arg, context=None): """ Compute if the message is unread by the current user. """ @@ -114,8 +114,7 @@ class mail_message(osv.Model): return res def _search_starred(self, cr, uid, obj, name, domain, context=None): - """ Search for messages to read by the current user. Condition is - inversed because we search unread message on a read column. """ + """ Search for starred messages by the current user.""" return ['&', ('notification_ids.partner_id.user_ids', 'in', [uid]), ('notification_ids.starred', '=', domain[0][2])] _columns = { @@ -153,7 +152,7 @@ class mail_message(osv.Model): help='Technical field holding the message notifications. Use notified_partner_ids to access notified partners.'), 'subject': fields.char('Subject'), 'date': fields.datetime('Date'), - 'message_id': fields.char('Message-Id', help='Message unique identifier', select=1, readonly=1), + 'message_id': fields.char('Message-Id', help='Message unique identifier', select=1, readonly=1, copy=False), 'body': fields.html('Contents', help='Automatically sanitized HTML contents'), 'to_read': fields.function(_get_to_read, fnct_search=_search_to_read, type='boolean', string='To read', @@ -213,8 +212,9 @@ class mail_message(osv.Model): def download_attachment(self, cr, uid, id_message, attachment_id, context=None): """ Return the content of linked attachments. """ - message = self.browse(cr, uid, id_message, context=context) - if attachment_id in [attachment.id for attachment in message.attachment_ids]: + # this will fail if you cannot read the message + message_values = self.read(cr, uid, [id_message], ['attachment_ids'], context=context)[0] + if attachment_id in message_values['attachment_ids']: attachment = self.pool.get('ir.attachment').browse(cr, SUPERUSER_ID, attachment_id, context=context) if attachment.datas and attachment.datas_fname: return { @@ -227,6 +227,7 @@ class mail_message(osv.Model): # Notification API #------------------------------------------------------ + @api.cr_uid_ids_context def set_message_read(self, cr, uid, msg_ids, read, create_missing=True, context=None): """ Set messages as (un)read. Technically, the notifications related to uid are set to (un)read. If for some msg_ids there are missing @@ -243,22 +244,23 @@ class mail_message(osv.Model): user_pid = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id.id domain = [('partner_id', '=', user_pid), ('message_id', 'in', msg_ids)] if not create_missing: - domain += [('read', '=', not read)] + domain += [('is_read', '=', not read)] notif_ids = notification_obj.search(cr, uid, domain, context=context) # all message have notifications: already set them as (un)read if len(notif_ids) == len(msg_ids) or not create_missing: - notification_obj.write(cr, uid, notif_ids, {'read': read}, context=context) + notification_obj.write(cr, uid, notif_ids, {'is_read': read}, context=context) return len(notif_ids) # some messages do not have notifications: find which one, create notification, update read status notified_msg_ids = [notification.message_id.id for notification in notification_obj.browse(cr, uid, notif_ids, context=context)] to_create_msg_ids = list(set(msg_ids) - set(notified_msg_ids)) for msg_id in to_create_msg_ids: - notification_obj.create(cr, uid, {'partner_id': user_pid, 'read': read, 'message_id': msg_id}, context=context) - notification_obj.write(cr, uid, notif_ids, {'read': read}, context=context) + notification_obj.create(cr, uid, {'partner_id': user_pid, 'is_read': read, 'message_id': msg_id}, context=context) + notification_obj.write(cr, uid, notif_ids, {'is_read': read}, context=context) return len(notif_ids) + @api.cr_uid_ids_context def set_message_starred(self, cr, uid, msg_ids, starred, create_missing=True, context=None): """ Set messages as (un)starred. Technically, the notifications related to uid are set to (un)starred. @@ -276,7 +278,7 @@ class mail_message(osv.Model): 'starred': starred } if starred: - values['read'] = False + values['is_read'] = False notif_ids = notification_obj.search(cr, uid, domain, context=context) @@ -502,6 +504,7 @@ class mail_message(osv.Model): return True + @api.cr_uid_context def message_read(self, cr, uid, ids=None, domain=None, message_unload_ids=None, thread_level=0, context=None, parent_id=False, limit=None): """ Read messages from mail.message, and get back a list of structured @@ -671,7 +674,7 @@ class mail_message(osv.Model): - uid has write or create access on the related document if model, res_id - otherwise: raise """ - def _generate_model_record_ids(msg_val, msg_ids=[]): + def _generate_model_record_ids(msg_val, msg_ids): """ :param model_record_ids: {'model': {'res_id': (msg_id, msg_id)}, ... } :param message_values: {'msg_id': {'model': .., 'res_id': .., 'author_id': ..}} """ @@ -782,8 +785,7 @@ class mail_message(osv.Model): return message_id def create(self, cr, uid, values, context=None): - if context is None: - context = {} + context = dict(context or {}) default_starred = context.pop('default_starred', False) if 'email_from' not in values: # needed to compute reply_to @@ -830,13 +832,6 @@ class mail_message(osv.Model): self.pool.get('ir.attachment').unlink(cr, uid, attachments_to_delete, context=context) return super(mail_message, self).unlink(cr, uid, ids, context=context) - def copy(self, cr, uid, id, default=None, context=None): - """ Overridden to avoid duplicating fields that are unique to each email """ - if default is None: - default = {} - default.update(message_id=False, headers=False) - return super(mail_message, self).copy(cr, uid, id, default=default, context=context) - #------------------------------------------------------ # Messaging API #------------------------------------------------------ @@ -888,5 +883,5 @@ class mail_message(osv.Model): notification_obj.create(cr, uid, { 'message_id': message.parent_id.id, 'partner_id': partner.id, - 'read': True, + 'is_read': True, }, context=context) diff --git a/addons/mail/mail_thread.py b/addons/mail/mail_thread.py index 54f2b8d8f8ba509f6f98ecec6193e11978c5c9aa..ca04059724056cdf7d14249778dafcfed91b10e1 100644 --- a/addons/mail/mail_thread.py +++ b/addons/mail/mail_thread.py @@ -39,11 +39,11 @@ import re from email.message import Message from urllib import urlencode -from openerp import tools +from openerp import api, tools from openerp import SUPERUSER_ID from openerp.addons.mail.mail_message import decode from openerp.osv import fields, osv, orm -from openerp.osv.orm import browse_record, browse_null +from openerp.osv.orm import BaseModel from openerp.tools.safe_eval import safe_eval as eval from openerp.tools.translate import _ @@ -154,12 +154,12 @@ class mail_thread(osv.AbstractModel): - message_unread: has uid unread message for the document - message_summary: html snippet summarizing the Chatter for kanban views """ res = dict((id, dict(message_unread=False, message_unread_count=0, message_summary=' ')) for id in ids) - user_pid = self.pool.get('res.users').read(cr, uid, uid, ['partner_id'], context=context)['partner_id'][0] + user_pid = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0] # search for unread messages, directly in SQL to improve performances cr.execute(""" SELECT m.res_id FROM mail_message m RIGHT JOIN mail_notification n - ON (n.message_id = m.id AND n.partner_id = %s AND (n.read = False or n.read IS NULL)) + ON (n.message_id = m.id AND n.partner_id = %s AND (n.is_read = False or n.is_read IS NULL)) WHERE m.model = %s AND m.res_id in %s""", (user_pid, self._name, tuple(ids),)) for result in cr.fetchall(): @@ -192,7 +192,7 @@ class mail_thread(osv.AbstractModel): available, which are followed if any """ res = dict((id, dict(message_subtype_data='')) for id in ids) if user_pid is None: - user_pid = self.pool.get('res.users').read(cr, uid, uid, ['partner_id'], context=context)['partner_id'][0] + user_pid = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0] # find current model subtypes, add them to a dictionary subtype_obj = self.pool.get('mail.message.subtype') @@ -232,7 +232,7 @@ class mail_thread(osv.AbstractModel): fol_obj = self.pool.get('mail.followers') fol_ids = fol_obj.search(cr, SUPERUSER_ID, [('res_model', '=', self._name), ('res_id', 'in', ids)]) res = dict((id, dict(message_follower_ids=[], message_is_follower=False)) for id in ids) - user_pid = self.pool.get('res.users').read(cr, uid, uid, ['partner_id'], context=context)['partner_id'][0] + user_pid = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0] for fol in fol_obj.browse(cr, SUPERUSER_ID, fol_ids): res[fol.res_id]['message_follower_ids'].append(fol.partner_id.id) if fol.partner_id.id == user_pid: @@ -448,10 +448,6 @@ class mail_thread(osv.AbstractModel): def copy_data(self, cr, uid, id, default=None, context=None): # avoid tracking multiple temporary changes during copy context = dict(context or {}, mail_notrack=True) - - default = default or {} - default['message_ids'] = [] - default['message_follower_ids'] = [] return super(mail_thread, self).copy_data(cr, uid, id, default=default, context=context) #------------------------------------------------------ @@ -621,7 +617,7 @@ class mail_thread(osv.AbstractModel): # default action is the Inbox action self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context) act_model, act_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, *self._get_inbox_action_xml_id(cr, uid, context=context)) - action = self.pool.get(act_model).read(cr, uid, act_id, []) + action = self.pool.get(act_model).read(cr, uid, [act_id], [])[0] params = context.get('params') msg_id = model = res_id = None @@ -1059,11 +1055,12 @@ class mail_thread(osv.AbstractModel): def message_route_process(self, cr, uid, message, message_dict, routes, context=None): # postpone setting message_dict.partner_ids after message_post, to avoid double notifications + context = dict(context or {}) partner_ids = message_dict.pop('partner_ids', []) thread_id = False for model, thread_id, custom_values, user_id, alias in routes: if self._name == 'mail.thread': - context.update({'thread_model': model}) + context['thread_model'] = model if model: model_pool = self.pool[model] if not (thread_id and hasattr(model_pool, 'message_update') or hasattr(model_pool, 'message_new')): @@ -1508,6 +1505,7 @@ class mail_thread(osv.AbstractModel): m2m_attachment_ids.append((0, 0, data_attach)) return m2m_attachment_ids + @api.cr_uid_ids_context def message_post(self, cr, uid, thread_id, body='', subject=None, type='notification', subtype=None, parent_id=False, attachments=None, context=None, content_subtype='html', **kwargs): @@ -1807,10 +1805,8 @@ class mail_thread(osv.AbstractModel): record = self.browse(cr, uid, ids[0], context=context) for updated_field in updated_fields: field_value = getattr(record, updated_field) - if isinstance(field_value, browse_record): + if isinstance(field_value, BaseModel): field_value = field_value.id - elif isinstance(field_value, browse_null): - field_value = False values[updated_field] = field_value # find followers of headers, update structure for new followers @@ -1870,11 +1866,12 @@ class mail_thread(osv.AbstractModel): partner_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).partner_id.id cr.execute(''' UPDATE mail_notification SET - read=false + is_read=false WHERE message_id IN (SELECT id from mail_message where res_id=any(%s) and model=%s limit 1) and partner_id = %s ''', (ids, self._name, partner_id)) + self.pool.get('mail.notification').invalidate_cache(cr, uid, ['is_read'], context=context) return True def message_mark_as_read(self, cr, uid, ids, context=None): @@ -1882,11 +1879,12 @@ class mail_thread(osv.AbstractModel): partner_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).partner_id.id cr.execute(''' UPDATE mail_notification SET - read=true + is_read=true WHERE message_id IN (SELECT id FROM mail_message WHERE res_id=ANY(%s) AND model=%s) AND partner_id = %s ''', (ids, self._name, partner_id)) + self.pool.get('mail.notification').invalidate_cache(cr, uid, ['is_read'], context=context) return True #------------------------------------------------------ diff --git a/addons/mail/res_users.py b/addons/mail/res_users.py index e218ea8592786997cfcd19304bc4562a2052eed5..a4b4e68942b1130f9d1c4a390df14148cc266a0f 100644 --- a/addons/mail/res_users.py +++ b/addons/mail/res_users.py @@ -39,7 +39,7 @@ class res_users(osv.Model): _columns = { 'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="restrict", required=True, help="Email address internally associated with this user. Incoming "\ - "emails will appear in the user's notifications."), + "emails will appear in the user's notifications.", copy=False), 'display_groups_suggestions': fields.boolean("Display Groups Suggestions"), } diff --git a/addons/mail/static/scripts/__init__.py b/addons/mail/static/scripts/__init__.py deleted file mode 100644 index 3b6a8c7e4b09b278586f0915ea05c81c85527e3f..0000000000000000000000000000000000000000 --- a/addons/mail/static/scripts/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# Copyright (C) 2009-2010 OpenERP SA (<http://www.openerp.com>). -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## - -import openerp_mailgate \ No newline at end of file diff --git a/addons/mail/static/scripts/openerp_mailgate.py b/addons/mail/static/scripts/openerp_mailgate.py index 99ebc8f770b90772ed8e556c48f6d810c9068f21..e3dbdf4adc064b05d2f9b0e14c17ff14a1c394e0 100755 --- a/addons/mail/static/scripts/openerp_mailgate.py +++ b/addons/mail/static/scripts/openerp_mailgate.py @@ -30,10 +30,10 @@ import optparse import sys import xmlrpclib import smtplib -from email.MIMEMultipart import MIMEMultipart -from email.MIMEBase import MIMEBase -from email.MIMEText import MIMEText -from email.Utils import COMMASPACE, formatdate +from email.mime.multipart import MIMEMultipart +from email.mime.base import MIMEBase +from email.mime.text import MIMEText +from email.utils import COMMASPACE, formatdate from email import Encoders class DefaultConfig(object): diff --git a/addons/mail/static/scripts/openerp_mailgate/__init__.py b/addons/mail/static/scripts/openerp_mailgate/__init__.py deleted file mode 100644 index de2e2bf390d887bf17056c7bbf5bf5cfb5f7fde6..0000000000000000000000000000000000000000 --- a/addons/mail/static/scripts/openerp_mailgate/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# Copyright (C) 2009-2010 OpenERP SA (<http://www.openerp.com>). -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## - -import openerp_mailgate diff --git a/addons/mail/static/src/js/mail_followers.js b/addons/mail/static/src/js/mail_followers.js index bf83fe6d7c6b93e8396f7e1feb372218262f67da..bd84dac714d30a7c51c8675bbd72a760115d9d36 100644 --- a/addons/mail/static/src/js/mail_followers.js +++ b/addons/mail/static/src/js/mail_followers.js @@ -172,8 +172,8 @@ openerp_mail_followers = function(session, mail) { fetch_generic: function (error, event) { var self = this; event.preventDefault(); - return this.ds_users.call('read', [this.session.uid, ['partner_id']]).then(function (results) { - var pid = results['partner_id'][0]; + return this.ds_users.call('read', [[this.session.uid], ['partner_id']]).then(function (results) { + var pid = results[0]['partner_id'][0]; self.message_is_follower = (_.indexOf(self.value, pid) != -1); }).then(self.proxy('display_generic')); }, diff --git a/addons/mail/tests/common.py b/addons/mail/tests/common.py index d53a8078c96b1c698f9ffa5815a3e4c609ea4bcd..5a5c28f95d7bdb2f12d3fcc5b3fbe4da5f9130da 100644 --- a/addons/mail/tests/common.py +++ b/addons/mail/tests/common.py @@ -24,30 +24,28 @@ from openerp.tests import common class TestMail(common.TransactionCase): - def _mock_smtp_gateway(self, *args, **kwargs): - return args[2]['Message-Id'] - def _init_mock_build_email(self): self._build_email_args_list = [] self._build_email_kwargs_list = [] - def _mock_build_email(self, *args, **kwargs): - """ Mock build_email to be able to test its values. Store them into - some internal variable for latter processing. """ - self._build_email_args_list.append(args) - self._build_email_kwargs_list.append(kwargs) - return self._build_email(*args, **kwargs) - def setUp(self): super(TestMail, self).setUp() cr, uid = self.cr, self.uid # Install mock SMTP gateway + test = self + + def build_email(self, *args, **kwargs): + test._build_email_args_list.append(args) + test._build_email_kwargs_list.append(kwargs) + return build_email.origin(self, *args, **kwargs) + + def send_email(self, cr, uid, message, *args, **kwargs): + return message['Message-Id'] + self._init_mock_build_email() - self._build_email = self.registry('ir.mail_server').build_email - self.registry('ir.mail_server').build_email = self._mock_build_email - self._send_email = self.registry('ir.mail_server').send_email - self.registry('ir.mail_server').send_email = self._mock_smtp_gateway + self.registry('ir.mail_server')._patch_method('build_email', build_email) + self.registry('ir.mail_server')._patch_method('send_email', send_email) # Usefull models self.ir_model = self.registry('ir.model') @@ -129,6 +127,6 @@ class TestMail(common.TransactionCase): def tearDown(self): # Remove mocks - self.registry('ir.mail_server').build_email = self._build_email - self.registry('ir.mail_server').send_email = self._send_email + self.registry('ir.mail_server')._revert_method('build_email') + self.registry('ir.mail_server')._revert_method('send_email') super(TestMail, self).tearDown() diff --git a/addons/mail/tests/test_mail_features.py b/addons/mail/tests/test_mail_features.py index 2bf7bb912649a2dabfcb9016ae42d5de4cbfedbc..fdd87bbcfab5014d4a598854829e8774695a995b 100644 --- a/addons/mail/tests/test_mail_features.py +++ b/addons/mail/tests/test_mail_features.py @@ -280,7 +280,7 @@ class test_mail(TestMail): self.assertNotIn('res_id=%s' % group_pigs.id, url, 'notification email: link based on message should not contain res_id') - @mute_logger('openerp.addons.mail.mail_thread', 'openerp.osv.orm') + @mute_logger('openerp.addons.mail.mail_thread', 'openerp.models') def test_12_inbox_redirection(self): """ Tests designed to test the inbox redirection of emails notification URLs. """ cr, uid, user_admin, group_pigs = self.cr, self.uid, self.user_admin, self.group_pigs @@ -654,7 +654,7 @@ class test_mail(TestMail): { 'attachment_ids': [(0, 0, _attachments[0]), (0, 0, _attachments[1])] }, context={ - 'default_composition_mode': 'reply', + 'default_composition_mode': 'comment', 'default_res_id': self.group_pigs_id, 'default_parent_id': message.id }) @@ -772,14 +772,13 @@ class test_mail(TestMail): def test_30_needaction(self): """ Tests for mail.message needaction. """ cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs - group_pigs_demo = self.mail_group.browse(cr, self.user_raoul_id, self.group_pigs_id) na_admin_base = self.mail_message._needaction_count(cr, uid, domain=[]) na_demo_base = self.mail_message._needaction_count(cr, user_raoul.id, domain=[]) # Test: number of unread notification = needaction on mail.message notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_admin.partner_id.id), - ('read', '=', False) + ('is_read', '=', False) ]) na_count = self.mail_message._needaction_count(cr, uid, domain=[]) self.assertEqual(len(notif_ids), na_count, 'unread notifications count does not match needaction count') @@ -787,13 +786,14 @@ class test_mail(TestMail): # Do: post 2 message on group_pigs as admin, 3 messages as demo user for dummy in range(2): group_pigs.message_post(body='My Body', subtype='mt_comment') + raoul_pigs = group_pigs.sudo(user_raoul) for dummy in range(3): - group_pigs_demo.message_post(body='My Demo Body', subtype='mt_comment') + raoul_pigs.message_post(body='My Demo Body', subtype='mt_comment') # Test: admin has 3 new notifications (from demo), and 3 new needaction notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_admin.partner_id.id), - ('read', '=', False) + ('is_read', '=', False) ]) self.assertEqual(len(notif_ids), na_admin_base + 3, 'Admin should have 3 new unread notifications') na_admin = self.mail_message._needaction_count(cr, uid, domain=[]) @@ -803,7 +803,7 @@ class test_mail(TestMail): # Test: demo has 0 new notifications (not a follower, not receiving its own messages), and 0 new needaction notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_raoul.partner_id.id), - ('read', '=', False) + ('is_read', '=', False) ]) self.assertEqual(len(notif_ids), na_demo_base + 0, 'Demo should have 0 new unread notifications') na_demo = self.mail_message._needaction_count(cr, user_raoul.id, domain=[]) diff --git a/addons/mail/tests/test_mail_gateway.py b/addons/mail/tests/test_mail_gateway.py index 3fbf3d40405eb035e3c2c6b047fa5ae35bcfc6a6..1b16f151cdfa625b5cbca3f49f47044a7d21e39d 100644 --- a/addons/mail/tests/test_mail_gateway.py +++ b/addons/mail/tests/test_mail_gateway.py @@ -164,7 +164,7 @@ class TestMailgateway(TestMail): self.assertIn('<div dir="ltr">Should create a multipart/mixed: from gmail, <b>bold</b>, with attachment.<br clear="all"><div><br></div>', res.get('body', ''), 'message_parse: html version should be in body after parsing multipart/mixed') - @mute_logger('openerp.addons.mail.mail_thread', 'openerp.osv.orm') + @mute_logger('openerp.addons.mail.mail_thread', 'openerp.models') def test_10_message_process(self): """ Testing incoming emails processing. """ cr, uid, user_raoul = self.cr, self.uid, self.user_raoul @@ -203,8 +203,8 @@ class TestMailgateway(TestMail): # Test: one group created by mailgateway administrator self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created') frog_group = self.mail_group.browse(cr, uid, frog_groups[0]) - res = self.mail_group.perm_read(cr, uid, [frog_group.id], details=False) - self.assertEqual(res[0].get('create_uid'), uid, + res = self.mail_group.get_metadata(cr, uid, [frog_group.id])[0].get('create_uid') or [None] + self.assertEqual(res[0], uid, 'message_process: group should have been created by uid as alias_user__id is False on the alias') # Test: one message that is the incoming email self.assertEqual(len(frog_group.message_ids), 1, @@ -271,8 +271,8 @@ class TestMailgateway(TestMail): # Test: one group created by Raoul self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created') frog_group = self.mail_group.browse(cr, uid, frog_groups[0]) - res = self.mail_group.perm_read(cr, uid, [frog_group.id], details=False) - self.assertEqual(res[0].get('create_uid'), self.user_raoul_id, + res = self.mail_group.get_metadata(cr, uid, [frog_group.id])[0].get('create_uid') or [None] + self.assertEqual(res[0], self.user_raoul_id, 'message_process: group should have been created by alias_user_id') # Test: one message that is the incoming email self.assertEqual(len(frog_group.message_ids), 1, @@ -536,7 +536,7 @@ class TestMailgateway(TestMail): self.assertIn('<pre>\nPlease call me as soon as possible this afternoon!\n\n--\nSylvie\n</pre>', msg.body, 'message_process: plaintext incoming email incorrectly parsed') - @mute_logger('openerp.addons.mail.mail_thread', 'openerp.osv.orm') + @mute_logger('openerp.addons.mail.mail_thread', 'openerp.models') def test_20_thread_parent_resolution(self): """ Testing parent/child relationships are correctly established when processing incoming mails """ cr, uid = self.cr, self.uid diff --git a/addons/mail/tests/test_mail_group.py b/addons/mail/tests/test_mail_group.py index 34c7f0c00e07763cbef8189a4e681719a696d018..c131ce0bd0b309b4a06c0258517dabed119d9ec2 100644 --- a/addons/mail/tests/test_mail_group.py +++ b/addons/mail/tests/test_mail_group.py @@ -27,7 +27,7 @@ from openerp.tools import mute_logger class TestMailGroup(TestMail): - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_mail_group_access_rights(self): """ Testing mail_group access rights and basic mail_thread features """ cr, uid, user_noone_id, user_employee_id = self.cr, self.uid, self.user_noone_id, self.user_employee_id diff --git a/addons/mail/tests/test_mail_message.py b/addons/mail/tests/test_mail_message.py index d4a7ae75df37a104e7d751653ed0bcc5e03fa77b..40d6786e2c930755794bb975aa012abb2d6a7e9a 100644 --- a/addons/mail/tests/test_mail_message.py +++ b/addons/mail/tests/test_mail_message.py @@ -161,7 +161,7 @@ class TestMailMessage(TestMail): self.assertEqual(mail.reply_to, 'someone@example.com', 'mail_mail: reply_to should equal the rpely_to given to create') - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_10_mail_message_search_access_rights(self): """ Testing mail_message.search() using specific _search implementation """ cr, uid, group_pigs_id = self.cr, self.uid, self.group_pigs_id @@ -196,7 +196,7 @@ class TestMailMessage(TestMail): msg_ids = self.mail_message.search(cr, uid, [('subject', 'like', '_Test')]) self.assertEqual(set([msg_id1, msg_id2, msg_id3, msg_id4, msg_id5, msg_id6, msg_id7, msg_id8]), set(msg_ids), 'mail_message search failed') - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_15_mail_message_check_access_rule(self): """ Testing mail_message.check_access_rule() """ cr, uid = self.cr, self.uid @@ -218,8 +218,8 @@ class TestMailMessage(TestMail): message_id = self.mail_message.create(cr, uid, {'body': 'My Body', 'attachment_ids': [(4, attachment_id)]}) # Test: Bert reads the message, crash because not notification/not in doc followers/not read on doc - self.assertRaises(except_orm, self.mail_message.read, - cr, user_bert_id, message_id) + with self.assertRaises(except_orm): + self.mail_message.read(cr, user_bert_id, message_id) # Do: message is pushed to Bert notif_id = self.mail_notification.create(cr, uid, {'message_id': message_id, 'partner_id': partner_bert_id}) # Test: Bert reads the message, ok because notification pushed @@ -229,11 +229,11 @@ class TestMailMessage(TestMail): # Do: remove notification self.mail_notification.unlink(cr, uid, notif_id) # Test: Bert reads the message, crash because not notification/not in doc followers/not read on doc - self.assertRaises(except_orm, self.mail_message.read, - cr, self.user_bert_id, message_id) + with self.assertRaises(except_orm): + self.mail_message.read(cr, self.user_bert_id, message_id) # Test: Bert downloads attachment, crash because he can't read message - self.assertRaises(except_orm, self.mail_message.download_attachment, - cr, user_bert_id, message_id, attachment_id) + with self.assertRaises(except_orm): + self.mail_message.download_attachment(cr, user_bert_id, message_id, attachment_id) # Do: Bert is now the author self.mail_message.write(cr, uid, [message_id], {'author_id': partner_bert_id}) # Test: Bert reads the message, ok because Bert is the author @@ -241,8 +241,8 @@ class TestMailMessage(TestMail): # Do: Bert is not the author anymore self.mail_message.write(cr, uid, [message_id], {'author_id': partner_raoul_id}) # Test: Bert reads the message, crash because not notification/not in doc followers/not read on doc - self.assertRaises(except_orm, self.mail_message.read, - cr, user_bert_id, message_id) + with self.assertRaises(except_orm): + self.mail_message.read(cr, user_bert_id, message_id) # Do: message is attached to a document Bert can read, Jobs self.mail_message.write(cr, uid, [message_id], {'model': 'mail.group', 'res_id': self.group_jobs_id}) # Test: Bert reads the message, ok because linked to a doc he is allowed to read @@ -250,33 +250,33 @@ class TestMailMessage(TestMail): # Do: message is attached to a document Bert cannot read, Pigs self.mail_message.write(cr, uid, [message_id], {'model': 'mail.group', 'res_id': self.group_pigs_id}) # Test: Bert reads the message, crash because not notification/not in doc followers/not read on doc - self.assertRaises(except_orm, self.mail_message.read, - cr, user_bert_id, message_id) + with self.assertRaises(except_orm): + self.mail_message.read(cr, user_bert_id, message_id) # ---------------------------------------- # CASE2: create # ---------------------------------------- # Do: Bert creates a message on Pigs -> ko, no creation rights - self.assertRaises(AccessError, self.mail_message.create, - cr, user_bert_id, {'model': 'mail.group', 'res_id': self.group_pigs_id, 'body': 'Test'}) + with self.assertRaises(AccessError): + self.mail_message.create(cr, user_bert_id, {'model': 'mail.group', 'res_id': self.group_pigs_id, 'body': 'Test'}) # Do: Bert create a message on Jobs -> ko, no creation rights - self.assertRaises(AccessError, self.mail_message.create, - cr, user_bert_id, {'model': 'mail.group', 'res_id': self.group_jobs_id, 'body': 'Test'}) + with self.assertRaises(AccessError): + self.mail_message.create(cr, user_bert_id, {'model': 'mail.group', 'res_id': self.group_jobs_id, 'body': 'Test'}) # Do: Bert create a private message -> ko, no creation rights - self.assertRaises(AccessError, self.mail_message.create, - cr, user_bert_id, {'body': 'Test'}) + with self.assertRaises(AccessError): + self.mail_message.create(cr, user_bert_id, {'body': 'Test'}) # Do: Raoul creates a message on Jobs -> ok, write access to the related document self.mail_message.create(cr, user_raoul_id, {'model': 'mail.group', 'res_id': self.group_jobs_id, 'body': 'Test'}) # Do: Raoul creates a message on Priv -> ko, no write access to the related document - self.assertRaises(except_orm, self.mail_message.create, - cr, user_raoul_id, {'model': 'mail.group', 'res_id': self.group_priv_id, 'body': 'Test'}) + with self.assertRaises(except_orm): + self.mail_message.create(cr, user_raoul_id, {'model': 'mail.group', 'res_id': self.group_priv_id, 'body': 'Test'}) # Do: Raoul creates a private message -> ok self.mail_message.create(cr, user_raoul_id, {'body': 'Test'}) # Do: Raoul creates a reply to a message on Priv -> ko - self.assertRaises(except_orm, self.mail_message.create, - cr, user_raoul_id, {'model': 'mail.group', 'res_id': self.group_priv_id, 'body': 'Test', 'parent_id': priv_msg_id}) + with self.assertRaises(except_orm): + self.mail_message.create(cr, user_raoul_id, {'model': 'mail.group', 'res_id': self.group_priv_id, 'body': 'Test', 'parent_id': priv_msg_id}) # Do: Raoul creates a reply to a message on Priv-> ok if has received parent self.mail_notification.create(cr, uid, {'message_id': priv_msg_id, 'partner_id': self.partner_raoul_id}) self.mail_message.create(cr, user_raoul_id, {'model': 'mail.group', 'res_id': self.group_priv_id, 'body': 'Test', 'parent_id': priv_msg_id}) @@ -335,7 +335,7 @@ class TestMailMessage(TestMail): self.assertEqual(len(notif_ids), 1, 'mail_message set_message_read: more than one notification created') # Test: notification read notif = self.mail_notification.browse(cr, uid, notif_ids[0]) - self.assertTrue(notif.read, 'mail_notification read failed') + self.assertTrue(notif['is_read'], 'mail_notification read failed') self.assertFalse(msg.to_read, 'mail_message read failed') # Do: Raoul reads msg @@ -346,7 +346,7 @@ class TestMailMessage(TestMail): self.assertEqual(len(notif_ids), 1, 'mail_message set_message_read: more than one notification created') # Test: notification read notif = self.mail_notification.browse(cr, uid, notif_ids[0]) - self.assertTrue(notif.read, 'mail_notification starred failed') + self.assertTrue(notif['is_read'], 'mail_notification starred failed') self.assertFalse(msg_raoul.to_read, 'mail_message starred failed') # Do: Admin unreads msg @@ -383,7 +383,7 @@ class TestMailMessage(TestMail): self.assertEqual(set(msg.vote_user_ids), set([self.user_raoul]), 'mail_message vote: after unvoting, Bert should be in the voter') self.assertEqual(set(msg_raoul.vote_user_ids), set([self.user_raoul]), 'mail_message vote: after unvoting, Bert should be in the voter') - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_50_mail_flow_access_rights(self): """ Test a Chatter-looks alike flow to test access rights """ cr, uid = self.cr, self.uid @@ -400,9 +400,9 @@ class TestMailMessage(TestMail): # ---------------------------------------- # Do: Bert reads Jobs basic fields, ok because public = read access on the group - self.mail_group.read(cr, user_bert_id, self.group_jobs_id, ['name', 'description']) + self.mail_group.read(cr, user_bert_id, [self.group_jobs_id], ['name', 'description']) # Do: Bert reads Jobs messages, ok because read access on the group => read access on its messages - jobs_message_ids = self.mail_group.read(cr, user_bert_id, self.group_jobs_id, ['message_ids'])['message_ids'] + jobs_message_ids = self.mail_group.read(cr, user_bert_id, [self.group_jobs_id], ['message_ids'])[0]['message_ids'] self.mail_message.read(cr, user_bert_id, jobs_message_ids) # Do: Bert browses Jobs, ok (no direct browse of partners), ok for messages, ko for followers (accessible to employees or partner manager) bert_jobs = self.mail_group.browse(cr, user_bert_id, self.group_jobs_id) @@ -413,9 +413,8 @@ class TestMailMessage(TestMail): with self.assertRaises(AccessError): trigger_read = partner.name # Do: Bert comments Jobs, ko because no creation right - self.assertRaises(AccessError, - self.mail_group.message_post, - cr, user_bert_id, self.group_jobs_id, body='I love Pigs') + with self.assertRaises(AccessError): + self.mail_group.message_post(cr, user_bert_id, self.group_jobs_id, body='I love Pigs') # Do: Bert writes on its own profile, ko because no message create access with self.assertRaises(AccessError): @@ -444,5 +443,5 @@ class TestMailMessage(TestMail): # Do: Raoul replies to a Jobs message using the composer compose_id = mail_compose.create(cr, user_raoul_id, {'subject': 'Subject', 'body': 'Body text'}, - {'default_composition_mode': 'reply', 'default_parent_id': pigs_msg_id}) + {'default_composition_mode': 'comment', 'default_parent_id': pigs_msg_id}) mail_compose.send_mail(cr, user_raoul_id, [compose_id]) diff --git a/addons/mail/tests/test_message_read.py b/addons/mail/tests/test_message_read.py index c02e9a322789870e1e433304d3a4cc25facad127..fbf30f2ed22e1881eec28a2d221869442fc7a186 100644 --- a/addons/mail/tests/test_message_read.py +++ b/addons/mail/tests/test_message_read.py @@ -46,7 +46,7 @@ class test_mail_access_rights(TestMail): ordered_msg_ids = [msg_id2, msg_id4, msg_id6, msg_id8, msg_id10, msg_id1, msg_id3, msg_id5, msg_id7, msg_id9, msg_id0] # Test: raoul received notifications - raoul_notification_ids = self.mail_notification.search(cr, user_raoul.id, [('read', '=', False), ('message_id', 'in', msg_ids), ('partner_id', '=', user_raoul.partner_id.id)]) + raoul_notification_ids = self.mail_notification.search(cr, user_raoul.id, [('is_read', '=', False), ('message_id', 'in', msg_ids), ('partner_id', '=', user_raoul.partner_id.id)]) self.assertEqual(len(raoul_notification_ids), 11, 'message_post: wrong number of produced notifications') # Test: read some specific ids diff --git a/addons/mail/update.py b/addons/mail/update.py index d2873f3a5bfc9f2328b7c1ddc455a9d218d1ba28..a76c54ff2da5c58f9c77db9c687c40468b41431f 100644 --- a/addons/mail/update.py +++ b/addons/mail/update.py @@ -59,7 +59,7 @@ def get_sys_logs(self, cr, uid): add_arg = {"timeout":30} if sys.version_info >= (2,6) else {} arguments = {'arg0': msg, "action": "update",} - arguments_raw = werkzeug.url_encode(arguments) + arguments_raw = werkzeug.urls.url_encode(arguments) url = config.get("publisher_warranty_url") diff --git a/addons/mail/wizard/mail_compose_message.py b/addons/mail/wizard/mail_compose_message.py index 0425cd1ee0e93598a76c9072f125ef3a389cc077..41dc4a9b4384caec83daa0286a19bdfd0d505a3f 100644 --- a/addons/mail/wizard/mail_compose_message.py +++ b/addons/mail/wizard/mail_compose_message.py @@ -196,8 +196,7 @@ class mail_compose_message(osv.TransientModel): def send_mail(self, cr, uid, ids, context=None): """ Process the wizard content and proceed with sending the related email(s), rendering any template patterns on the fly if needed. """ - if context is None: - context = {} + context = dict(context or {}) # clean the context (hint: mass mailing sets some default values that # could be wrongly interpreted by mail_mail) diff --git a/addons/marketing_campaign/marketing_campaign.py b/addons/marketing_campaign/marketing_campaign.py index 469750ddc5f7a91ca39180468c248cccbbc6f598..e8c2d68e6604e7183324a755f7002cc7d460560a 100644 --- a/addons/marketing_campaign/marketing_campaign.py +++ b/addons/marketing_campaign/marketing_campaign.py @@ -31,6 +31,7 @@ from openerp.tools.safe_eval import safe_eval as eval import re from openerp.addons.decimal_precision import decimal_precision as dp +from openerp import api from openerp.osv import fields, osv from openerp.report import render_report from openerp.tools.translate import _ @@ -44,43 +45,6 @@ _intervalTypes = { DT_FMT = '%Y-%m-%d %H:%M:%S' -def dict_map(f, d): - return dict((k, f(v)) for k,v in d.items()) - -def _find_fieldname(model, field): - inherit_columns = dict_map(itemgetter(2), model._inherit_fields) - all_columns = dict(inherit_columns, **model._columns) - for fn in all_columns: - if all_columns[fn] is field: - return fn - raise ValueError('Field not found: %r' % (field,)) - -class selection_converter(object): - """Format the selection in the browse record objects""" - def __init__(self, value): - self._value = value - self._str = value - - def set_value(self, cr, uid, _self_again, record, field, lang): - # this design is terrible - # search fieldname from the field - fieldname = _find_fieldname(record._table, field) - context = dict(lang=lang.code) - fg = record._table.fields_get(cr, uid, [fieldname], context=context) - selection = dict(fg[fieldname]['selection']) - self._str = selection[self.value] - - @property - def value(self): - return self._value - - def __str__(self): - return self._str - -translate_selections = { - 'selection': selection_converter, -} - class marketing_campaign(osv.osv): _name = "marketing.campaign" @@ -126,7 +90,7 @@ Normal - the campaign runs normally and automatically sends all emails and repor ('running', 'Running'), ('cancelled', 'Cancelled'), ('done', 'Done')], - 'Status'), + 'Status', copy=False), 'activity_ids': fields.one2many('marketing.campaign.activity', 'campaign_id', 'Activities'), 'fixed_cost': fields.float('Fixed Cost', help="Fixed cost for running this campaign. You may also specify variable cost and revenue on each campaign activity. Cost and Revenue statistics are included in Campaign Reporting.", digits_compute=dp.get_precision('Product Price')), @@ -186,7 +150,7 @@ Normal - the campaign runs normally and automatically sends all emails and repor raise ValueError('Signal cannot be False.') Workitems = self.pool.get('marketing.campaign.workitem') - domain = [('object_id.model', '=', record._table._name), + domain = [('object_id.model', '=', record._name), ('state', '=', 'running')] campaign_ids = self.search(cr, uid, domain, context=context) for campaign in self.browse(cr, uid, campaign_ids, context=context): @@ -282,7 +246,7 @@ class marketing_campaign_segment(osv.osv): ('cancelled', 'Cancelled'), ('running', 'Running'), ('done', 'Done')], - 'Status'), + 'Status', copy=False), 'date_run': fields.datetime('Launch Date', help="Initial start date of this segment."), 'date_done': fields.datetime('End Date', help="Date this segment was last closed or cancelled."), 'date_next_sync': fields.function(_get_next_sync, string='Next Synchronization', type='datetime', help="Next time the synchronization job is scheduled to run automatically"), @@ -344,6 +308,7 @@ class marketing_campaign_segment(osv.osv): self.process_segment(cr, uid, ids) return True + @api.cr_uid_ids_context def process_segment(self, cr, uid, segment_ids=None, context=None): Workitems = self.pool.get('marketing.campaign.workitem') Campaigns = self.pool.get('marketing.campaign') @@ -523,20 +488,30 @@ class marketing_campaign_transition(osv.osv): _description = "Campaign Transition" _interval_units = [ - ('hours', 'Hour(s)'), ('days', 'Day(s)'), - ('months', 'Month(s)'), ('years','Year(s)') + ('hours', 'Hour(s)'), + ('days', 'Day(s)'), + ('months', 'Month(s)'), + ('years', 'Year(s)'), ] def _get_name(self, cr, uid, ids, fn, args, context=None): - result = dict.fromkeys(ids, False) + # name formatters that depend on trigger formatters = { 'auto': _('Automatic transition'), 'time': _('After %(interval_nbr)d %(interval_type)s'), 'cosmetic': _('Cosmetic'), } - for tr in self.browse(cr, uid, ids, context=context, - fields_process=translate_selections): - result[tr.id] = formatters[tr.trigger.value] % tr + # get the translations of the values of selection field 'interval_type' + fields = self.fields_get(cr, uid, ['interval_type'], context=context) + interval_type_selection = dict(fields['interval_type']['selection']) + + result = dict.fromkeys(ids, False) + for trans in self.browse(cr, uid, ids, context=context): + values = { + 'interval_nbr': trans.interval_nbr, + 'interval_type': interval_type_selection.get(trans.interval_type, ''), + } + result[trans.id] = formatters[trans.trigger] % values return result @@ -656,7 +631,7 @@ class marketing_campaign_workitem(osv.osv): ('cancelled', 'Cancelled'), ('exception', 'Exception'), ('done', 'Done'), - ], 'Status', readonly=True), + ], 'Status', readonly=True, copy=False), 'error_msg' : fields.text('Error Message', readonly=True) } _defaults = { @@ -664,12 +639,14 @@ class marketing_campaign_workitem(osv.osv): 'date': False, } + @api.cr_uid_ids_context def button_draft(self, cr, uid, workitem_ids, context=None): for wi in self.browse(cr, uid, workitem_ids, context=context): if wi.state in ('exception', 'cancelled'): self.write(cr, uid, [wi.id], {'state':'todo'}, context=context) return True + @api.cr_uid_ids_context def button_cancel(self, cr, uid, workitem_ids, context=None): for wi in self.browse(cr, uid, workitem_ids, context=context): if wi.state in ('todo','exception'): @@ -698,9 +675,9 @@ class marketing_campaign_workitem(osv.osv): if condition: if not eval(condition, eval_context): if activity.keep_if_condition_not_met: - workitem.write({'state': 'cancelled'}, context=context) + workitem.write({'state': 'cancelled'}) else: - workitem.unlink(context=context) + workitem.unlink() return result = True if campaign_mode in ('manual', 'active'): @@ -711,11 +688,11 @@ class marketing_campaign_workitem(osv.osv): values = dict(state='done') if not workitem.date: values['date'] = datetime.now().strftime(DT_FMT) - workitem.write(values, context=context) + workitem.write(values) if result: # process _chain - workitem = workitem.browse(context=context)[0] # reload + workitem.refresh() # reload date = datetime.strptime(workitem.date, DT_FMT) for transition in activity.to_ids: @@ -760,9 +737,9 @@ class marketing_campaign_workitem(osv.osv): except Exception: tb = "".join(format_exception(*exc_info())) - workitem.write({'state': 'exception', 'error_msg': tb}, - context=context) + workitem.write({'state': 'exception', 'error_msg': tb}) + @api.cr_uid_ids_context def process(self, cr, uid, workitem_ids, context=None): for wi in self.browse(cr, uid, workitem_ids, context=context): self._process_one(cr, uid, wi, context=context) diff --git a/addons/mass_mailing/models/mass_mailing.py b/addons/mass_mailing/models/mass_mailing.py index a1077a3fffa6d218b46a6640a1ddae27b36d752c..33e66a2d12ce1fa4159fbc1d398bd607c1ea2301 100644 --- a/addons/mass_mailing/models/mass_mailing.py +++ b/addons/mass_mailing/models/mass_mailing.py @@ -343,7 +343,7 @@ class MassMailing(osv.Model): 'name': fields.char('Subject', required=True), 'email_from': fields.char('From', required=True), 'create_date': fields.datetime('Creation Date'), - 'sent_date': fields.datetime('Sent Date', oldname='date'), + 'sent_date': fields.datetime('Sent Date', oldname='date', copy=False), 'body_html': fields.html('Body'), 'attachment_ids': fields.many2many( 'ir.attachment', 'mass_mailing_ir_attachments_rel', @@ -355,7 +355,7 @@ class MassMailing(osv.Model): ), 'state': fields.selection( [('draft', 'Draft'), ('test', 'Tested'), ('done', 'Sent')], - string='Status', required=True, + string='Status', required=True, copy=False, ), 'color': fields.related( 'mass_mailing_campaign_id', 'color', @@ -460,15 +460,9 @@ class MassMailing(osv.Model): #------------------------------------------------------ def copy_data(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} mailing = self.browse(cr, uid, id, context=context) - default.update({ - 'state': 'draft', - 'statistics_ids': [], - 'name': _('%s (duplicate)') % mailing.name, - 'sent_date': False, - }) + default = dict(default or {}, + name=_('%s (copy)') % mailing.name) return super(MassMailing, self).copy_data(cr, uid, id, default, context=context) def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): diff --git a/addons/membership/membership.py b/addons/membership/membership.py index 6cc1f0e0f107862d39156d4af9e683c21151ca1d..573c90269505d1034e96aa403ff0f0f4067643d9 100644 --- a/addons/membership/membership.py +++ b/addons/membership/membership.py @@ -286,21 +286,21 @@ class Partner(osv.osv): line_id = member_line_obj.search(cr, uid, [('partner', '=', partner_id),('date_cancel','=',False)], limit=1, order='date_from', context=context) if line_id: - res[partner.id]['membership_start'] = member_line_obj.read(cr, uid, line_id[0], - ['date_from'], context=context)['date_from'] + res[partner.id]['membership_start'] = member_line_obj.read(cr, uid, [line_id[0]], + ['date_from'], context=context)[0]['date_from'] if name == 'membership_stop': line_id1 = member_line_obj.search(cr, uid, [('partner', '=', partner_id),('date_cancel','=',False)], limit=1, order='date_to desc', context=context) if line_id1: - res[partner.id]['membership_stop'] = member_line_obj.read(cr, uid, line_id1[0], - ['date_to'], context=context)['date_to'] + res[partner.id]['membership_stop'] = member_line_obj.read(cr, uid, [line_id1[0]], + ['date_to'], context=context)[0]['date_to'] if name == 'membership_cancel': if partner.membership_state == 'canceled': line_id2 = member_line_obj.search(cr, uid, [('partner', '=', partner.id)], limit=1, order='date_cancel', context=context) if line_id2: - res[partner.id]['membership_cancel'] = member_line_obj.read(cr, uid, line_id2[0], ['date_cancel'], context=context)['date_cancel'] + res[partner.id]['membership_cancel'] = member_line_obj.read(cr, uid, [line_id2[0]], ['date_cancel'], context=context)[0]['date_cancel'] return res def _get_partners(self, cr, uid, ids, context=None): @@ -381,13 +381,6 @@ class Partner(osv.osv): (_check_recursion, 'Error ! You cannot create recursive associated members.', ['associate_member']) ] - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - default['member_lines'] = [] - return super(Partner, self).copy(cr, uid, id, default, context=context) - def create_membership_invoice(self, cr, uid, ids, product_id=None, datas=None, context=None): """ Create Customer Invoice of Membership for partners. @param datas: datas has dictionary value which consist Id of Membership product and Cost Amount of Membership. diff --git a/addons/membership/membership_demo.yml b/addons/membership/membership_demo.yml index e21d70f0fd532bbe399cb79da2138339afceb236..439666a4716ec09055561256df1e33b565a8cfd8 100644 --- a/addons/membership/membership_demo.yml +++ b/addons/membership/membership_demo.yml @@ -4,7 +4,7 @@ !python {model: res.partner}: | invoice_ids = self.create_membership_invoice(cr, uid, [ref("base.res_partner_1"),ref("base.res_partner_14"),ref("base.res_partner_24"),ref("base.res_partner_19"),ref("base.res_partner_8"),ref("base.res_partner_5"),ref("base.res_partner_21"),ref("base.res_partner_6"),ref("base.res_partner_16"),ref("base.res_partner_10")], product_id=ref("membership_1"), datas={"amount":80.00}) invoice_pool = self.pool.get('account.invoice') - invoice_pool.signal_invoice_open(cr, uid, invoice_ids) + invoice_pool.signal_workflow(cr, uid, invoice_ids, 'invoice_open') for id in invoice_ids[-4:]: pay = invoice_pool.pay_and_reconcile(cr, uid, [id], diff --git a/addons/membership/test/test_membership.yml b/addons/membership/test/test_membership.yml index 489b8b06afbf17466a6bd8d31dbbbcf5e409f889..49c4f645877bafdfae25c6f248d264411dcaa53b 100644 --- a/addons/membership/test/test_membership.yml +++ b/addons/membership/test/test_membership.yml @@ -40,7 +40,7 @@ membership_lines = membership_line_pool.browse(cr, uid, membership_line_ids) assert membership_lines, 'Membership is not registrated.' membership_line = membership_lines[0] - invoice_pool.signal_invoice_open(cr, uid, [membership_line.account_invoice_id.id]) + membership_line.account_invoice_id.signal_workflow('invoice_open') - | I'm checking "Current membership state" of "Seagate". It is an "Invoiced Member" or not. diff --git a/addons/mrp/mrp.py b/addons/mrp/mrp.py index d102e6720a71843f72a4a3fb6a306ac441b2d07b..73d0688a543fefda0bcd5a6a07bd783beb395814 100644 --- a/addons/mrp/mrp.py +++ b/addons/mrp/mrp.py @@ -108,7 +108,7 @@ class mrp_routing(osv.osv): 'code': fields.char('Code', size=8), 'note': fields.text('Description'), - 'workcenter_lines': fields.one2many('mrp.routing.workcenter', 'routing_id', 'Work Centers'), + 'workcenter_lines': fields.one2many('mrp.routing.workcenter', 'routing_id', 'Work Centers', copy=True), 'location_id': fields.many2one('stock.location', 'Production Location', help="Keep empty if you produce at the location where the finished products are needed." \ @@ -202,7 +202,7 @@ class mrp_bom(osv.osv): 'product_id': fields.many2one('product.product', 'Product Variant', domain="[('product_tmpl_id','=',product_tmpl_id)]", help="If a product variant is defined the BOM is available only for this product."), - 'bom_line_ids': fields.one2many('mrp.bom.line', 'bom_id', 'BoM Lines'), + 'bom_line_ids': fields.one2many('mrp.bom.line', 'bom_id', 'BoM Lines', copy=True), 'product_qty': fields.float('Product Quantity', required=True, digits_compute=dp.get_precision('Product Unit of Measure')), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True, help="Unit of Measure (Unit of Measure) is the unit of measurement for the inventory control"), 'date_start': fields.date('Valid From', help="Validity of this BoM. Keep empty if it's always valid."), @@ -510,9 +510,9 @@ class mrp_production(osv.osv): return res _columns = { - 'name': fields.char('Reference', required=True, readonly=True, states={'draft': [('readonly', False)]}), + 'name': fields.char('Reference', required=True, readonly=True, states={'draft': [('readonly', False)]}, copy=False), 'origin': fields.char('Source Document', readonly=True, states={'draft': [('readonly', False)]}, - help="Reference of the document that generated this production order request."), + help="Reference of the document that generated this production order request.", copy=False), 'priority': fields.selection([('0', 'Not urgent'), ('1', 'Normal'), ('2', 'Urgent'), ('3', 'Very Urgent')], 'Priority', select=True, readonly=True, states=dict.fromkeys(['draft', 'confirmed'], [('readonly', False)])), @@ -530,14 +530,14 @@ class mrp_production(osv.osv): 'location_dest_id': fields.many2one('stock.location', 'Finished Products Location', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="Location where the system will stock the finished products."), - 'date_planned': fields.datetime('Scheduled Date', required=True, select=1, readonly=True, states={'draft': [('readonly', False)]}), - 'date_start': fields.datetime('Start Date', select=True, readonly=True), - 'date_finished': fields.datetime('End Date', select=True, readonly=True), + 'date_planned': fields.datetime('Scheduled Date', required=True, select=1, readonly=True, states={'draft': [('readonly', False)]}, copy=False), + 'date_start': fields.datetime('Start Date', select=True, readonly=True, copy=False), + 'date_finished': fields.datetime('End Date', select=True, readonly=True, copy=False), 'bom_id': fields.many2one('mrp.bom', 'Bill of Material', readonly=True, states={'draft': [('readonly', False)]}, help="Bill of Materials allow you to define the list of required raw materials to make a finished product."), 'routing_id': fields.many2one('mrp.routing', string='Routing', on_delete='set null', readonly=True, states={'draft': [('readonly', False)]}, help="The list of operations (list of work centers) to produce the finished product. The routing is mainly used to compute work center costs during operations and to plan future loads on work centers based on production plannification."), - 'move_prod_id': fields.many2one('stock.move', 'Product Move', readonly=True), + 'move_prod_id': fields.many2one('stock.move', 'Product Move', readonly=True, copy=False), 'move_lines': fields.one2many('stock.move', 'raw_material_production_id', 'Products to Consume', domain=[('state', 'not in', ('done', 'cancel'))], readonly=True, states={'draft': [('readonly', False)]}), 'move_lines2': fields.one2many('stock.move', 'raw_material_production_id', 'Consumed Products', @@ -554,7 +554,7 @@ class mrp_production(osv.osv): [('draft', 'New'), ('cancel', 'Cancelled'), ('confirmed', 'Awaiting Raw Materials'), ('ready', 'Ready to Produce'), ('in_production', 'Production Started'), ('done', 'Done')], string='Status', readonly=True, - track_visibility='onchange', + track_visibility='onchange', copy=False, help="When the production order is created the status is set to 'Draft'.\n\ If the order is confirmed the status is set to 'Waiting Goods'.\n\ If any exceptions are there, the status is set to 'Picking Exception'.\n\ @@ -602,20 +602,6 @@ class mrp_production(osv.osv): raise osv.except_osv(_('Invalid Action!'), _('Cannot delete a manufacturing order in state \'%s\'.') % production.state) return super(mrp_production, self).unlink(cr, uid, ids, context=context) - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default.update({ - 'name': self.pool.get('ir.sequence').get(cr, uid, 'mrp.production'), - 'move_lines': [], - 'move_lines2': [], - 'move_created_ids': [], - 'move_created_ids2': [], - 'product_lines': [], - 'move_prod_id': False, - }) - return super(mrp_production, self).copy(cr, uid, id, default, context) - def location_id_change(self, cr, uid, ids, src, dest, context=None): """ Changes destination location if source location is changed. @param src: Source location id. diff --git a/addons/mrp/product.py b/addons/mrp/product.py index 2c3ae955f248237b9d2503b0e56c2f5f1368746c..1b9873d0af7d72e04541b13560e086ff3c829a1c 100644 --- a/addons/mrp/product.py +++ b/addons/mrp/product.py @@ -51,14 +51,6 @@ class product_template(osv.osv): _defaults = { "produce_delay": 1, } - - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'bom_ids': [] - }) - return super(product_template, self).copy(cr, uid, id, default, context=context) class product_product(osv.osv): _inherit = "product.product" diff --git a/addons/mrp/stock.py b/addons/mrp/stock.py index 1ff7e7e7c8ca9b25008bd93a7b963316ec32a63b..e04c434a7413b2e4bd740ad851f88c5e8334d59e 100644 --- a/addons/mrp/stock.py +++ b/addons/mrp/stock.py @@ -31,17 +31,11 @@ class StockMove(osv.osv): _inherit = 'stock.move' _columns = { - 'production_id': fields.many2one('mrp.production', 'Production Order for Produced Products', select=True), + 'production_id': fields.many2one('mrp.production', 'Production Order for Produced Products', select=True, copy=False), 'raw_material_production_id': fields.many2one('mrp.production', 'Production Order for Raw Materials', select=True), 'consumed_for': fields.many2one('stock.move', 'Consumed for', help='Technical field used to make the traceability of produced products'), } - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default['production_id'] = False - return super(StockMove, self).copy(cr, uid, id, default, context=context) - def check_tracking(self, cr, uid, move, lot_id, context=None): super(StockMove, self).check_tracking(cr, uid, move, lot_id, context=context) if move.product_id.track_production and (move.location_id.usage == 'production' or move.location_dest_id.usage == 'production') and not lot_id: diff --git a/addons/mrp/tests/__init__.py b/addons/mrp/tests/__init__.py deleted file mode 100644 index 39ebd5e451fb81e60081f3bda946f9eee6e0d70e..0000000000000000000000000000000000000000 --- a/addons/mrp/tests/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Business Applications -# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## - -from . import test_multicompany - -checks = [ - test_multicompany, -] - -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/mrp_byproduct/mrp_byproduct.py b/addons/mrp_byproduct/mrp_byproduct.py index fc6069fc8ad9011e3fc5be6f8b1e5b685b9b7a0b..5649604b57d398e345a1a2d8d69a461548a6d550 100644 --- a/addons/mrp_byproduct/mrp_byproduct.py +++ b/addons/mrp_byproduct/mrp_byproduct.py @@ -71,7 +71,7 @@ class mrp_bom(osv.osv): _inherit='mrp.bom' _columns={ - 'sub_products':fields.one2many('mrp.subproduct', 'bom_id', 'Byproducts'), + 'sub_products':fields.one2many('mrp.subproduct', 'bom_id', 'Byproducts', copy=True), } diff --git a/addons/mrp_operations/mrp_operations.py b/addons/mrp_operations/mrp_operations.py index f28a2d22ca9bc433532cff7791e75306d192ff07..c14b755070d23b2a64beaee2a407d96d2533a120 100644 --- a/addons/mrp_operations/mrp_operations.py +++ b/addons/mrp_operations/mrp_operations.py @@ -38,15 +38,6 @@ class stock_move(osv.osv): 'move_dest_id_lines': fields.one2many('stock.move','move_dest_id', 'Children Moves') } - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default.update({ - 'move_dest_id_lines': [], - }) - return super(stock_move, self).copy(cr, uid, id, default, context) - - class mrp_production_workcenter_line(osv.osv): def _get_date_end(self, cr, uid, ids, field_name, arg, context=None): @@ -84,7 +75,7 @@ class mrp_production_workcenter_line(osv.osv): _order = "sequence, date_planned" _columns = { - 'state': fields.selection([('draft','Draft'),('cancel','Cancelled'),('pause','Pending'),('startworking', 'In Progress'),('done','Finished')],'Status', readonly=True, + 'state': fields.selection([('draft','Draft'),('cancel','Cancelled'),('pause','Pending'),('startworking', 'In Progress'),('done','Finished')],'Status', readonly=True, copy=False, help="* When a work order is created it is set in 'Draft' status.\n" \ "* When user sets work order in start mode that time it will be set in 'In Progress' status.\n" \ "* When work order is in running mode, during that time if user wants to stop or to make changes in order then can set in 'Pending' status.\n" \ @@ -120,27 +111,24 @@ class mrp_production_workcenter_line(osv.osv): oper_obj = self.browse(cr, uid, ids)[0] prod_obj = oper_obj.production_id if action == 'start': - if prod_obj.state =='confirmed': - prod_obj_pool.force_production(cr, uid, [prod_obj.id]) - prod_obj_pool.signal_button_produce(cr, uid, [prod_obj.id]) - elif prod_obj.state =='ready': - prod_obj_pool.signal_button_produce(cr, uid, [prod_obj.id]) - elif prod_obj.state =='in_production': - return - else: - raise osv.except_osv(_('Error!'),_('Manufacturing order cannot be started in state "%s"!') % (prod_obj.state,)) + if prod_obj.state =='confirmed': + prod_obj_pool.force_production(cr, uid, [prod_obj.id]) + prod_obj_pool.signal_workflow(cr, uid, [prod_obj.id], 'button_produce') + elif prod_obj.state =='ready': + prod_obj_pool.signal_workflow(cr, uid, [prod_obj.id], 'button_produce') + elif prod_obj.state =='in_production': + return + else: + raise osv.except_osv(_('Error!'),_('Manufacturing order cannot be started in state "%s"!') % (prod_obj.state,)) else: - oper_ids = self.search(cr,uid,[('production_id','=',prod_obj.id)]) - obj = self.browse(cr,uid,oper_ids) - flag = True - for line in obj: - if line.state != 'done': - flag = False + open_count = self.search_count(cr,uid,[('production_id','=',prod_obj.id), ('state', '!=', 'done')]) + flag = not bool(open_count) + if flag: for production in prod_obj_pool.browse(cr, uid, [prod_obj.id], context= None): if production.move_lines or production.move_created_ids: prod_obj_pool.action_produce(cr,uid, production.id, production.product_qty, 'consume_produce', context = None) - prod_obj_pool.signal_button_produce_done(cr, uid, [oper_obj.production_id.id]) + prod_obj_pool.signal_workflow(cr, uid, [oper_obj.production_id.id], 'button_produce_done') return def write(self, cr, uid, ids, vals, context=None, update=True): @@ -228,8 +216,8 @@ class mrp_production(osv.osv): workcenter_pool = self.pool.get('mrp.production.workcenter.line') for workcenter_line in obj.workcenter_lines: if workcenter_line.state == 'draft': - workcenter_pool.signal_button_start_working(cr, uid, [workcenter_line.id]) - workcenter_pool.signal_button_done(cr, uid, [workcenter_line.id]) + workcenter_line.signal_workflow('button_start_working') + workcenter_line.signal_workflow('button_done') return super(mrp_production,self).action_production_end(cr, uid, ids) def action_in_production(self, cr, uid, ids): @@ -239,7 +227,7 @@ class mrp_production(osv.osv): workcenter_pool = self.pool.get('mrp.production.workcenter.line') for prod in self.browse(cr, uid, ids): if prod.workcenter_lines: - workcenter_pool.signal_button_start_working(cr, uid, [prod.workcenter_lines[0].id]) + workcenter_pool.signal_workflow(cr, uid, [prod.workcenter_lines[0].id], 'button_start_working') return super(mrp_production,self).action_in_production(cr, uid, ids) def action_cancel(self, cr, uid, ids, context=None): @@ -248,8 +236,7 @@ class mrp_production(osv.osv): """ workcenter_pool = self.pool.get('mrp.production.workcenter.line') obj = self.browse(cr, uid, ids,context=context)[0] - for workcenter_line in obj.workcenter_lines: - workcenter_pool.signal_button_cancel(cr, uid, [workcenter_line.id]) + workcenter_pool.signal_workflow(cr, uid, [record.id for record in obj.workcenter_lines], 'button_cancel') return super(mrp_production,self).action_cancel(cr,uid,ids,context=context) def _compute_planned_workcenter(self, cr, uid, ids, context=None, mini=False): @@ -507,24 +494,24 @@ class mrp_operations_operation(osv.osv): wc_op_id.append(workcenter_pool.create(cr,uid,{'production_id':vals['production_id'],'name':production_obj.product_id.name,'workcenter_id':vals['workcenter_id']})) if code.start_stop=='start': workcenter_pool.action_start_working(cr,uid,wc_op_id) - workcenter_pool.signal_button_start_working(cr, uid, [wc_op_id[0]]) + workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_start_working') if code.start_stop=='done': workcenter_pool.action_done(cr,uid,wc_op_id) - workcenter_pool.signal_button_done(cr, uid, [wc_op_id[0]]) + workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_done') self.pool.get('mrp.production').write(cr,uid,vals['production_id'],{'date_finished':datetime.now().strftime('%Y-%m-%d %H:%M:%S')}) if code.start_stop=='pause': workcenter_pool.action_pause(cr,uid,wc_op_id) - workcenter_pool.signal_button_pause(cr, uid, [wc_op_id[0]]) + workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_pause') if code.start_stop=='resume': workcenter_pool.action_resume(cr,uid,wc_op_id) - workcenter_pool.signal_button_resume(cr, uid, [wc_op_id[0]]) + workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_resume') if code.start_stop=='cancel': workcenter_pool.action_cancel(cr,uid,wc_op_id) - workcenter_pool.signal_button_cancel(cr, uid, [wc_op_id[0]]) + workcenter_pool.signal_workflow(cr, uid, [wc_op_id[0]], 'button_cancel') if not self.check_operation(cr, uid, vals): return @@ -560,4 +547,3 @@ class mrp_operations_operation(osv.osv): } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: - diff --git a/addons/mrp_operations/test/workcenter_operations.yml b/addons/mrp_operations/test/workcenter_operations.yml index 6e9beafa1566bdc76fa03b465c145820b1571f65..a31c3fb05d283d61ad1307479d83db6acb25a0db 100644 --- a/addons/mrp_operations/test/workcenter_operations.yml +++ b/addons/mrp_operations/test/workcenter_operations.yml @@ -58,47 +58,47 @@ - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) - self.pool.get('mrp.production.workcenter.line').signal_button_start_working(cr, uid, [order.workcenter_lines[0].id]) + order.workcenter_lines[0].signal_workflow('button_start_working') - Now I pause first work operation due to technical fault of work center. - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) - self.pool.get('mrp.production.workcenter.line').signal_button_pause(cr, uid, [order.workcenter_lines[0].id]) + order.workcenter_lines[0].signal_workflow('button_pause') - I resume first work operation. - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) - self.pool.get('mrp.production.workcenter.line').signal_button_resume(cr, uid, [order.workcenter_lines[0].id]) + order.workcenter_lines[0].signal_workflow('button_resume') - I cancel first work operation. - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) - self.pool.get('mrp.production.workcenter.line').signal_button_cancel(cr, uid, [order.workcenter_lines[0].id]) + order.workcenter_lines[0].signal_workflow('button_cancel') - I reset first work operation and start after resolving techninal fault of work center. - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) - self.pool.get('mrp.production.workcenter.line').signal_button_draft(cr, uid, [order.workcenter_lines[0].id]) - self.pool.get('mrp.production.workcenter.line').signal_button_start_working(cr, uid, [order.workcenter_lines[0].id]) + order.workcenter_lines[0].signal_workflow('button_draft') + order.workcenter_lines[0].signal_workflow('button_start_working') - I close first work operation as this work center completed its process. - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) - self.pool.get('mrp.production.workcenter.line').signal_button_done(cr, uid, [order.workcenter_lines[0].id]) + order.workcenter_lines[0].signal_workflow('button_done') - Now I close other operations one by one which are in start state. - !python {model: mrp.production}: | order = self.browse(cr, uid, ref("mrp.mrp_production_1"), context=context) for work_line in order.workcenter_lines[1:]: - self.pool.get('mrp.production.workcenter.line').signal_button_start_working(cr, uid, [work_line.id]) - self.pool.get('mrp.production.workcenter.line').signal_button_done(cr, uid, [work_line.id]) + work_line.signal_workflow('button_start_working') + work_line.signal_workflow('button_done') - I check that the production order is now done. diff --git a/addons/mrp_repair/mrp_repair.py b/addons/mrp_repair/mrp_repair.py index 6ff39b79081ab96ed5b2e8ab56166bf01b5db882..a349c82ba2df07e3c2ade8e50206e5c0905eabf3 100644 --- a/addons/mrp_repair/mrp_repair.py +++ b/addons/mrp_repair/mrp_repair.py @@ -113,7 +113,7 @@ class mrp_repair(osv.osv): return self.pool['mrp.repair'].search(cr, uid, [('fees_lines', 'in', ids)], context=context) _columns = { - 'name': fields.char('Repair Reference', required=True, states={'confirmed': [('readonly', True)]}), + 'name': fields.char('Repair Reference', required=True, states={'confirmed': [('readonly', True)]}, copy=False), 'product_id': fields.many2one('product.product', string='Product to Repair', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_qty': fields.float('Product Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, readonly=True, states={'draft': [('readonly', False)]}), @@ -130,7 +130,7 @@ class mrp_repair(osv.osv): ('2binvoiced', 'To be Invoiced'), ('invoice_except', 'Invoice Exception'), ('done', 'Repaired') - ], 'Status', readonly=True, track_visibility='onchange', + ], 'Status', readonly=True, track_visibility='onchange', copy=False, help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed repair order. \ \n* The \'Confirmed\' status is used when a user confirms the repair order. \ \n* The \'Ready to Repair\' status is used to start to repairing, user can start repairing only after repair order is confirmed. \ @@ -141,7 +141,7 @@ class mrp_repair(osv.osv): 'location_dest_id': fields.many2one('stock.location', 'Delivery Location', readonly=True, required=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}), 'lot_id': fields.many2one('stock.production.lot', 'Repaired Lot', domain="[('product_id','=', product_id)]", help="Products repaired are all belonging to this lot"), 'guarantee_limit': fields.date('Warranty Expiration', help="The warranty expiration limit is computed as: last move date + warranty defined on selected product. If the current date is below the warranty expiration limit, each operation and fee you will add will be set as 'not to invoiced' by default. Note that you can change manually afterwards.", states={'confirmed': [('readonly', True)]}), - 'operations': fields.one2many('mrp.repair.line', 'repair_id', 'Operation Lines', readonly=True, states={'draft': [('readonly', False)]}), + 'operations': fields.one2many('mrp.repair.line', 'repair_id', 'Operation Lines', readonly=True, states={'draft': [('readonly', False)]}, copy=True), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', help='Pricelist of the selected partner.'), 'partner_invoice_id': fields.many2one('res.partner', 'Invoicing Address'), 'invoice_method': fields.selection([ @@ -150,14 +150,14 @@ class mrp_repair(osv.osv): ("after_repair", "After Repair") ], "Invoice Method", select=True, required=True, states={'draft': [('readonly', False)]}, readonly=True, help='Selecting \'Before Repair\' or \'After Repair\' will allow you to generate invoice before or after the repair is done respectively. \'No invoice\' means you don\'t want to generate invoice for this repair order.'), - 'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, track_visibility="onchange"), - 'move_id': fields.many2one('stock.move', 'Move', readonly=True, help="Move created by the repair order", track_visibility="onchange"), - 'fees_lines': fields.one2many('mrp.repair.fee', 'repair_id', 'Fees', readonly=True, states={'draft': [('readonly', False)]}), + 'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, track_visibility="onchange", copy=False), + 'move_id': fields.many2one('stock.move', 'Move', readonly=True, help="Move created by the repair order", track_visibility="onchange", copy=False), + 'fees_lines': fields.one2many('mrp.repair.fee', 'repair_id', 'Fees', readonly=True, states={'draft': [('readonly', False)]}, copy=True), 'internal_notes': fields.text('Internal Notes'), 'quotation_notes': fields.text('Quotation Notes'), 'company_id': fields.many2one('res.company', 'Company'), - 'invoiced': fields.boolean('Invoiced', readonly=True), - 'repaired': fields.boolean('Repaired', readonly=True), + 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), + 'repaired': fields.boolean('Repaired', readonly=True, copy=False), 'amount_untaxed': fields.function(_amount_untaxed, string='Untaxed Amount', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), @@ -199,19 +199,6 @@ class mrp_repair(osv.osv): ('name', 'unique (name)', 'The name of the Repair Order must be unique!'), ] - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'state': 'draft', - 'repaired': False, - 'invoiced': False, - 'invoice_id': False, - 'move_id': False, - 'name': self.pool.get('ir.sequence').get(cr, uid, 'mrp.repair'), - }) - return super(mrp_repair, self).copy(cr, uid, id, default, context) - def onchange_product_id(self, cr, uid, ids, product_id=None): """ On change of product sets some values. @param product_id: Changed product @@ -562,12 +549,6 @@ class mrp_repair_line(osv.osv, ProductChangeMixin): _name = 'mrp.repair.line' _description = 'Repair Line' - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({'invoice_line_id': False, 'move_id': False, 'invoiced': False, 'state': 'draft'}) - return super(mrp_repair_line, self).copy_data(cr, uid, id, default, context) - def _amount_line(self, cr, uid, ids, field_name, arg, context=None): """ Calculates amount. @param field_name: Name of field. @@ -588,22 +569,22 @@ class mrp_repair_line(osv.osv, ProductChangeMixin): 'type': fields.selection([('add', 'Add'), ('remove', 'Remove')], 'Type', required=True), 'to_invoice': fields.boolean('To Invoice'), 'product_id': fields.many2one('product.product', 'Product', required=True), - 'invoiced': fields.boolean('Invoiced', readonly=True), + 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), 'price_unit': fields.float('Unit Price', required=True, digits_compute=dp.get_precision('Product Price')), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')), 'tax_id': fields.many2many('account.tax', 'repair_operation_line_tax', 'repair_operation_line_id', 'tax_id', 'Taxes'), 'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True), - 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True), + 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False), 'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True), 'location_dest_id': fields.many2one('stock.location', 'Dest. Location', required=True, select=True), - 'move_id': fields.many2one('stock.move', 'Inventory Move', readonly=True), + 'move_id': fields.many2one('stock.move', 'Inventory Move', readonly=True, copy=False), 'lot_id': fields.many2one('stock.production.lot', 'Lot'), 'state': fields.selection([ ('draft', 'Draft'), ('confirmed', 'Confirmed'), ('done', 'Done'), - ('cancel', 'Cancelled')], 'Status', required=True, readonly=True, + ('cancel', 'Cancelled')], 'Status', required=True, readonly=True, copy=False, help=' * The \'Draft\' status is set automatically as draft when repair order in draft status. \ \n* The \'Confirmed\' status is set automatically as confirm when repair order in confirm status. \ \n* The \'Done\' status is set automatically when repair order is completed.\ @@ -659,12 +640,6 @@ class mrp_repair_fee(osv.osv, ProductChangeMixin): _name = 'mrp.repair.fee' _description = 'Repair Fees Line' - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({'invoice_line_id': False, 'invoiced': False}) - return super(mrp_repair_fee, self).copy_data(cr, uid, id, default, context) - def _amount_line(self, cr, uid, ids, field_name, arg, context=None): """ Calculates amount. @param field_name: Name of field. @@ -688,9 +663,9 @@ class mrp_repair_fee(osv.osv, ProductChangeMixin): 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')), 'tax_id': fields.many2many('account.tax', 'repair_fee_line_tax', 'repair_fee_line_id', 'tax_id', 'Taxes'), - 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True), + 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False), 'to_invoice': fields.boolean('To Invoice'), - 'invoiced': fields.boolean('Invoiced', readonly=True), + 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), } _defaults = { diff --git a/addons/mrp_repair/security/ir.model.access.csv b/addons/mrp_repair/security/ir.model.access.csv index 2ffbc89cf8de8a715b1b07253fe77d9ba097fab7..c6f026ac3563ceb0422a57ccefc50e5e81dfd340 100644 --- a/addons/mrp_repair/security/ir.model.access.csv +++ b/addons/mrp_repair/security/ir.model.access.csv @@ -23,5 +23,6 @@ access_account_invoice_user,account.invoice,account.model_account_invoice,mrp.gr access_account_invoice_manager,account.invoice manager,account.model_account_invoice,mrp.group_mrp_manager,1,0,0,0 access_account_invoice_line_user,account.invoice.line,account.model_account_invoice_line,mrp.group_mrp_user,1,1,1,1 access_account_invoice_line_manager,account.invoice.line manager,account.model_account_invoice_line,mrp.group_mrp_manager,1,0,0,0 +access_account_invoice_tax_user,account.invoice.tax,account.model_account_invoice_tax,mrp.group_mrp_user,1,0,0,0 access_account_journal_user,account.journal,account.model_account_journal,mrp.group_mrp_user,1,1,1,1 access_account_journal_manager,account.journal manager,account.model_account_journal,mrp.group_mrp_manager,1,0,0,0 diff --git a/addons/mrp_repair/wizard/make_invoice.py b/addons/mrp_repair/wizard/make_invoice.py index f9c84cb7bdbacf60129ba498a9b151f18cb1f5c7..e23d6a549a04b48aebf26b89654ee2ae5e4f9950 100644 --- a/addons/mrp_repair/wizard/make_invoice.py +++ b/addons/mrp_repair/wizard/make_invoice.py @@ -49,7 +49,7 @@ class make_invoice(osv.osv_memory): # We have to trigger the workflow of the given repairs, otherwise they remain 'to be invoiced'. # Note that the signal 'action_invoice_create' will trigger another call to the method 'action_invoice_create', # but that second call will not do anything, since the repairs are already invoiced. - order_obj.signal_action_invoice_create(cr, uid, context['active_ids']) + order_obj.signal_workflow(cr, uid, context['active_ids'], 'action_invoice_create') form_res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form') form_id = form_res and form_res[1] or False diff --git a/addons/multi_company/multi_company_demo.xml b/addons/multi_company/multi_company_demo.xml index 3dad9fcaaf6d9cb224ae788d1103d07d9b7f4cec..d532c3e3e919f1814e740f42b1f01083b4453575 100644 --- a/addons/multi_company/multi_company_demo.xml +++ b/addons/multi_company/multi_company_demo.xml @@ -397,10 +397,19 @@ <field name="company_id" ref="res_company_oerp_be"/> <field name="currency_id" ref="base.EUR"/> </record> + + <!-- due to the ways the new API works we need to unset the company (automatically setted + to the default conpany during module install) before changing the currency to avoid + missmatch against currently set company currency and new one + --> + <record id="project.project_project_3" model="project.project"> + <field name="company_id" eval="False"/> + </record> <record id="project.project_project_3" model="project.project"> <field name="company_id" ref="res_company_oerp_us"/> <field name="currency_id" ref="base.USD"/> </record> + <record id="project.project_project_4" model="project.project"> <field name="company_id" ref="res_company_oerp_be"/> <field name="currency_id" ref="base.EUR"/> diff --git a/addons/payment/models/payment_acquirer.py b/addons/payment/models/payment_acquirer.py index 2d466219140fdb737ee1cc21e890017daba6deae..25ff5fd51941cba76e9aa1fa5e10523ad74d74ef 100644 --- a/addons/payment/models/payment_acquirer.py +++ b/addons/payment/models/payment_acquirer.py @@ -73,7 +73,7 @@ class PaymentAcquirer(osv.Model): [('test', 'Test'), ('prod', 'Production')], string='Environment', oldname='env'), 'website_published': fields.boolean( - 'Visible in Portal / Website', + 'Visible in Portal / Website', copy=False, help="Make this payment acquirer available (Customer invoices, etc.)"), # Fees 'fees_active': fields.boolean('Compute fees'), @@ -338,7 +338,7 @@ class PaymentTransaction(osv.Model): ('done', 'Done'), ('error', 'Error'), ('cancel', 'Canceled') ], 'Status', required=True, - track_visiblity='onchange'), + track_visiblity='onchange', copy=False), 'state_message': fields.text('Message', help='Field used to store error and/or validation messages for information'), # payment diff --git a/addons/point_of_sale/account_bank_statement.py b/addons/point_of_sale/account_bank_statement.py index 9275b975d469e54d0f00f516545f5562b6fc796a..e698041f975efa7896688fa99e13954812cdab24 100644 --- a/addons/point_of_sale/account_bank_statement.py +++ b/addons/point_of_sale/account_bank_statement.py @@ -38,7 +38,7 @@ class account_journal(osv.osv): class account_cash_statement(osv.osv): _inherit = 'account.bank.statement' _columns = { - 'pos_session_id' : fields.many2one('pos.session'), + 'pos_session_id' : fields.many2one('pos.session', copy=False), } diff --git a/addons/point_of_sale/point_of_sale.py b/addons/point_of_sale/point_of_sale.py index 6bef9f7724814e7f797ba1cc5ea665ffdadf568c..1eec3cb48d45ceb702c8fe3910ee704d6f6870f3 100644 --- a/addons/point_of_sale/point_of_sale.py +++ b/addons/point_of_sale/point_of_sale.py @@ -19,14 +19,9 @@ # ############################################################################## -from datetime import datetime -from dateutil.relativedelta import relativedelta -from decimal import Decimal import logging -import pdb import time -import openerp from openerp import tools from openerp.osv import fields, osv from openerp.tools.translate import _ @@ -81,10 +76,10 @@ class pos_config(osv.osv): 'receipt_footer': fields.text('Receipt Footer',help="A short text that will be inserted as a footer in the printed receipt"), 'proxy_ip': fields.char('IP Address', help='The hostname or ip address of the hardware proxy, Will be autodetected if left empty', size=45), - 'state' : fields.selection(POS_CONFIG_STATE, 'Status', required=True, readonly=True), + 'state' : fields.selection(POS_CONFIG_STATE, 'Status', required=True, readonly=True, copy=False), 'sequence_id' : fields.many2one('ir.sequence', 'Order IDs Sequence', readonly=True, help="This sequence is automatically created by OpenERP but you can change it "\ - "to customize the reference numbers of your orders."), + "to customize the reference numbers of your orders.", copy=False), 'session_ids': fields.one2many('pos.session', 'config_id', 'Sessions'), 'group_by' : fields.boolean('Group Journal Items', help="Check this if you want to group the Journal Items by Product while closing a Session"), 'pricelist_id': fields.many2one('product.pricelist','Pricelist', required=True), @@ -107,16 +102,6 @@ class pos_config(osv.osv): (_check_cash_control, "You cannot have two cash controls in one Point Of Sale !", ['journal_ids']), ] - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - d = { - 'sequence_id' : False, - } - d.update(default) - return super(pos_config, self).copy(cr, uid, id, d, context=context) - - def name_get(self, cr, uid, ids, context=None): result = [] states = { @@ -252,7 +237,7 @@ class pos_session(osv.osv): 'state' : fields.selection(POS_SESSION_STATE, 'Status', required=True, readonly=True, - select=1), + select=1, copy=False), 'sequence_number': fields.integer('Order Sequence Number'), @@ -353,7 +338,7 @@ class pos_session(osv.osv): ] def create(self, cr, uid, values, context=None): - context = context or {} + context = dict(context or {}) config_id = values.get('config_id', False) or context.get('default_config_id', False) if not config_id: raise osv.except_osv( _('Error!'), @@ -440,9 +425,9 @@ class pos_session(osv.osv): if not record.start_at: values['start_at'] = time.strftime('%Y-%m-%d %H:%M:%S') values['state'] = 'opened' - record.write(values, context=context) + record.write(values) for st in record.statement_ids: - st.button_open(context=context) + st.button_open() return self.open_frontend_cb(cr, uid, ids, context=context) @@ -658,23 +643,8 @@ class pos_order(osv.osv): res[order.id]['amount_total'] = cur_obj.round(cr, uid, cur, val1) return res - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - d = { - 'state': 'draft', - 'invoice_id': False, - 'account_move': False, - 'picking_id': False, - 'statement_ids': [], - 'nb_print': 0, - 'name': self.pool.get('ir.sequence').get(cr, uid, 'pos.order'), - } - d.update(default) - return super(pos_order, self).copy(cr, uid, id, d, context=context) - _columns = { - 'name': fields.char('Order Ref', required=True, readonly=True), + 'name': fields.char('Order Ref', required=True, readonly=True, copy=False), 'company_id':fields.many2one('res.company', 'Company', required=True, readonly=True), 'date_order': fields.datetime('Order Date', readonly=True, select=True), 'user_id': fields.many2one('res.users', 'Salesman', help="Person who uses the the cash register. It can be a reliever, a student or an interim employee."), @@ -682,7 +652,7 @@ class pos_order(osv.osv): 'amount_total': fields.function(_amount_all, string='Total', multi='all'), 'amount_paid': fields.function(_amount_all, string='Paid', states={'draft': [('readonly', False)]}, readonly=True, digits_compute=dp.get_precision('Account'), multi='all'), 'amount_return': fields.function(_amount_all, 'Returned', digits_compute=dp.get_precision('Account'), multi='all'), - 'lines': fields.one2many('pos.order.line', 'order_id', 'Order Lines', states={'draft': [('readonly', False)]}, readonly=True), + 'lines': fields.one2many('pos.order.line', 'order_id', 'Order Lines', states={'draft': [('readonly', False)]}, readonly=True, copy=True), 'statement_ids': fields.one2many('account.bank.statement.line', 'pos_statement_id', 'Payments', states={'draft': [('readonly', False)]}, readonly=True), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', required=True, states={'draft': [('readonly', False)]}, readonly=True), 'partner_id': fields.many2one('res.partner', 'Customer', change_default=True, select=1, states={'draft': [('readonly', False)], 'paid': [('readonly', False)]}), @@ -700,16 +670,16 @@ class pos_order(osv.osv): ('paid', 'Paid'), ('done', 'Posted'), ('invoiced', 'Invoiced')], - 'Status', readonly=True), + 'Status', readonly=True, copy=False), - 'invoice_id': fields.many2one('account.invoice', 'Invoice'), - 'account_move': fields.many2one('account.move', 'Journal Entry', readonly=True), - 'picking_id': fields.many2one('stock.picking', 'Picking', readonly=True), + 'invoice_id': fields.many2one('account.invoice', 'Invoice', copy=False), + 'account_move': fields.many2one('account.move', 'Journal Entry', readonly=True, copy=False), + 'picking_id': fields.many2one('stock.picking', 'Picking', readonly=True, copy=False), 'picking_type_id': fields.related('session_id', 'config_id', 'picking_type_id', string="Picking Type", type='many2one', relation='stock.picking.type'), 'location_id': fields.related('session_id', 'config_id', 'stock_location_id', string="Location", type='many2one', store=True, relation='stock.location'), 'note': fields.text('Internal Notes'), - 'nb_print': fields.integer('Number of Print', readonly=True), - 'pos_reference': fields.char('Receipt Ref', readonly=True), + 'nb_print': fields.integer('Number of Print', readonly=True, copy=False), + 'pos_reference': fields.char('Receipt Ref', readonly=True, copy=False), 'sale_journal': fields.related('session_id', 'config_id', 'journal_id', relation='account.journal', type='many2one', string='Sale Journal', store=True, readonly=True), } @@ -831,8 +801,7 @@ class pos_order(osv.osv): def add_payment(self, cr, uid, order_id, data, context=None): """Create a new payment for the order""" - if not context: - context = {} + context = dict(context or {}) statement_line_obj = self.pool.get('account.bank.statement.line') property_obj = self.pool.get('ir.property') order = self.browse(cr, uid, order_id, context=context) @@ -1282,7 +1251,7 @@ class pos_order_line(osv.osv): _columns = { 'company_id': fields.many2one('res.company', 'Company', required=True), - 'name': fields.char('Line No', required=True), + 'name': fields.char('Line No', required=True, copy=False), 'notice': fields.char('Discount Notice'), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True, change_default=True), 'price_unit': fields.float(string='Unit Price', digits_compute=dp.get_precision('Account')), @@ -1301,16 +1270,6 @@ class pos_order_line(osv.osv): 'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id, } - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'name': self.pool.get('ir.sequence').get(cr, uid, 'pos.order.line') - }) - return super(pos_order_line, self).copy_data(cr, uid, id, default, context=context) - -import io, StringIO - class ean_wizard(osv.osv_memory): _name = 'pos.ean_wizard' _columns = { diff --git a/addons/point_of_sale/test/01_order_to_payment.yml b/addons/point_of_sale/test/01_order_to_payment.yml index bf201affc4ad0cdf087aa9d152dacb9c550b75d3..cdad0d1f30901dbd4cf7f843a9bb366da69f0159 100644 --- a/addons/point_of_sale/test/01_order_to_payment.yml +++ b/addons/point_of_sale/test/01_order_to_payment.yml @@ -14,7 +14,7 @@ - I assign this 10 percent tax on the [PCSC234] PC Assemble SC234 product as a sale tax - - !record {model: product.product, id: product.product_product_3}: + !record {model: product.product, id: product.product_product_3, view: False}: taxes_id: [account_tax_10_incl] - | I create a VAT tax of 5%, which is added to the public price @@ -42,7 +42,7 @@ - I assign those 5 percent taxes on the PCSC349 product as a sale taxes - - !record {model: product.product, id: product.product_product_4}: + !record {model: product.product, id: product.product_product_4, view: False}: taxes_id: [account_tax_05_incl, account_tax_05_incl_chicago] - I create a new session diff --git a/addons/point_of_sale/wizard/pos_confirm.py b/addons/point_of_sale/wizard/pos_confirm.py index d1ecdfb4e0eea8a7d76b9663b3b70f8654cd0a40..909463fa9960aecffcdcbb3cbdb19656c5a45576 100644 --- a/addons/point_of_sale/wizard/pos_confirm.py +++ b/addons/point_of_sale/wizard/pos_confirm.py @@ -36,7 +36,7 @@ class pos_confirm(osv.osv_memory): todo = False break if todo: - order_obj.signal_done(cr, uid, [order.id]) + order.signal_workflow('done') # Check if there is orders to reconcile their invoices ids = order_obj.search(cr, uid, [('state','=','invoiced'),('invoice_id.state','=','open')], context=context) diff --git a/addons/point_of_sale/wizard/pos_payment.py b/addons/point_of_sale/wizard/pos_payment.py index 567d5e57f946e8ee1838ae2c30a7228d597de34d..efd2682bc3086e3b6037342bf3b20b5ea3822d7c 100644 --- a/addons/point_of_sale/wizard/pos_payment.py +++ b/addons/point_of_sale/wizard/pos_payment.py @@ -65,9 +65,8 @@ class pos_make_payment(osv.osv_memory): order_obj.add_payment(cr, uid, active_id, data, context=context) if order_obj.test_paid(cr, uid, [active_id]): - order_obj.signal_paid(cr, uid, [active_id]) + order_obj.signal_workflow(cr, uid, [active_id], 'paid') return {'type' : 'ir.actions.act_window_close' } - ##self.print_report(cr, uid, ids, context=context) return self.launch_payment(cr, uid, ids, context=context) diff --git a/addons/point_of_sale/wizard/pos_session_opening.py b/addons/point_of_sale/wizard/pos_session_opening.py index ea717378a3bc9e62d6d9014f8d3f6fb860f8df83..5389fb3e1a099284941e05f92ea553a01c1b22f5 100644 --- a/addons/point_of_sale/wizard/pos_session_opening.py +++ b/addons/point_of_sale/wizard/pos_session_opening.py @@ -24,8 +24,8 @@ class pos_session_opening(osv.osv_memory): } def open_ui(self, cr, uid, ids, context=None): - context = context or {} data = self.browse(cr, uid, ids[0], context=context) + context = dict(context or {}) context['active_id'] = data.pos_session_id.id return { 'type' : 'ir.actions.act_url', @@ -35,7 +35,7 @@ class pos_session_opening(osv.osv_memory): def open_existing_session_cb_close(self, cr, uid, ids, context=None): wizard = self.browse(cr, uid, ids[0], context=context) - self.pool.get('pos.session').signal_cashbox_control(cr, uid, [wizard.pos_session_id.id]) + wizard.pos_session_id.signal_workflow('cashbox_control') return self.open_session_cb(cr, uid, ids, context) def open_session_cb(self, cr, uid, ids, context=None): diff --git a/addons/portal/tests/test_portal.py b/addons/portal/tests/test_portal.py index 2c4d7ecf966eeee55c5a3745f21d3c3f53ae9065..041222893164abf63b469b3170d5acfe359fd0db 100644 --- a/addons/portal/tests/test_portal.py +++ b/addons/portal/tests/test_portal.py @@ -48,7 +48,7 @@ class test_portal(TestMail): # Set an email address for the user running the tests, used as Sender for outgoing mails self.res_users.write(cr, uid, uid, {'email': 'test@localhost'}) - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_mail_access_rights(self): """ Test basic mail_message and mail_group access rights for portal users. """ cr, uid = self.cr, self.uid @@ -90,7 +90,7 @@ class test_portal(TestMail): # Do: Chell replies to a Pigs message using the composer compose_id = mail_compose.create(cr, self.user_chell_id, {'subject': 'Subject', 'body': 'Body text'}, - {'default_composition_mode': 'reply', 'default_parent_id': pigs_msg_id}) + {'default_composition_mode': 'comment', 'default_parent_id': pigs_msg_id}) mail_compose.send_mail(cr, self.user_chell_id, [compose_id]) # Do: Chell browses PigsPortal -> ok because groups security, ko for partners (no read permission) @@ -167,7 +167,7 @@ class test_portal(TestMail): self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') - @mute_logger('openerp.addons.mail.mail_thread', 'openerp.osv.orm') + @mute_logger('openerp.addons.mail.mail_thread', 'openerp.models') def test_21_inbox_redirection(self): """ Tests designed to test the inbox redirection of emails notification URLs. """ cr, uid, user_admin, group_pigs = self.cr, self.uid, self.user_admin, self.group_pigs diff --git a/addons/portal_project/tests/test_access_rights.py b/addons/portal_project/tests/test_access_rights.py index 0cb2412ecea51fed6f188cbeb3b3e7ad1e950196..e36df90b65207b7b26f35e47c7c5bd005880bba7 100644 --- a/addons/portal_project/tests/test_access_rights.py +++ b/addons/portal_project/tests/test_access_rights.py @@ -78,7 +78,7 @@ class TestPortalProjectBase(TestProjectBase): class TestPortalProject(TestPortalProjectBase): - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_project_access_rights(self): """ Test basic project access rights, for project and portal_project """ cr, uid, pigs_id = self.cr, self.uid, self.project_pigs_id @@ -88,7 +88,7 @@ class TestPortalProject(TestPortalProjectBase): # ---------------------------------------- # Do: Alfred reads project -> ok (employee ok public) - self.project_project.read(cr, self.user_projectuser_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_projectuser_id, [pigs_id], ['state']) # Test: all project tasks visible task_ids = self.project_task.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_1_id, self.task_2_id, self.task_3_id, self.task_4_id, self.task_5_id, self.task_6_id]) @@ -100,7 +100,7 @@ class TestPortalProject(TestPortalProjectBase): self.project_task.write(cr, self.user_projectuser_id, task_ids, {'description': 'TestDescription'}) # Do: Bert reads project -> crash, no group - self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, pigs_id, ['name']) + self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, [pigs_id], ['state']) # Test: no project task visible self.assertRaises(AccessError, self.project_task.search, cr, self.user_none_id, [('project_id', '=', pigs_id)]) # Test: no project task readable @@ -109,7 +109,7 @@ class TestPortalProject(TestPortalProjectBase): self.assertRaises(AccessError, self.project_task.write, cr, self.user_none_id, task_ids, {'description': 'TestDescription'}) # Do: Chell reads project -> ok (portal ok public) - self.project_project.read(cr, self.user_portal_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_portal_id, [pigs_id], ['state']) # Test: all project tasks visible task_ids = self.project_task.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) self.assertEqual(set(task_ids), test_task_ids, @@ -120,7 +120,7 @@ class TestPortalProject(TestPortalProjectBase): self.assertRaises(AccessError, self.project_task.write, cr, self.user_portal_id, task_ids, {'description': 'TestDescription'}) # Do: Donovan reads project -> ok (public) - self.project_project.read(cr, self.user_public_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_public_id, [pigs_id], ['state']) # Test: all project tasks visible task_ids = self.project_task.search(cr, self.user_public_id, [('project_id', '=', pigs_id)]) self.assertEqual(set(task_ids), test_task_ids, @@ -134,16 +134,17 @@ class TestPortalProject(TestPortalProjectBase): # CASE2: portal project # ---------------------------------------- self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'portal'}) + self.project_project.invalidate_cache(cr, uid) # Do: Alfred reads project -> ok (employee ok public) - self.project_project.read(cr, self.user_projectuser_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_projectuser_id, [pigs_id], ['state']) # Test: all project tasks visible task_ids = self.project_task.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) self.assertEqual(set(task_ids), test_task_ids, 'access rights: project user cannot see all tasks of a portal project') # Do: Bert reads project -> crash, no group - self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, pigs_id, ['name']) + self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, [pigs_id], ['state']) # Test: no project task searchable self.assertRaises(AccessError, self.project_task.search, cr, self.user_none_id, [('project_id', '=', pigs_id)]) @@ -151,7 +152,7 @@ class TestPortalProject(TestPortalProjectBase): self.project_task.message_subscribe_users(cr, self.user_projectuser_id, [self.task_1_id, self.task_3_id], [self.user_portal_id]) # Do: Chell reads project -> ok (portal ok public) - self.project_project.read(cr, self.user_portal_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_portal_id, [pigs_id], ['state']) # Test: only followed project tasks visible + assigned task_ids = self.project_task.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_1_id, self.task_3_id, self.task_5_id]) @@ -159,7 +160,7 @@ class TestPortalProject(TestPortalProjectBase): 'access rights: portal user should see the followed tasks of a portal project') # Do: Donovan reads project -> ko (public ko portal) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, [pigs_id], ['state']) # Test: no project task visible task_ids = self.project_task.search(cr, self.user_public_id, [('project_id', '=', pigs_id)]) self.assertFalse(task_ids, 'access rights: public user should not see tasks of a portal project') @@ -171,9 +172,10 @@ class TestPortalProject(TestPortalProjectBase): # CASE3: employee project # ---------------------------------------- self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'employees'}) + self.project_project.invalidate_cache(cr, uid) # Do: Alfred reads project -> ok (employee ok employee) - self.project_project.read(cr, self.user_projectuser_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_projectuser_id, [pigs_id], ['state']) # Test: all project tasks visible task_ids = self.project_task.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_1_id, self.task_2_id, self.task_3_id, self.task_4_id, self.task_5_id, self.task_6_id]) @@ -181,16 +183,16 @@ class TestPortalProject(TestPortalProjectBase): 'access rights: project user cannot see all tasks of an employees project') # Do: Bert reads project -> crash, no group - self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, pigs_id, ['name']) + self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, [pigs_id], ['state']) # Do: Chell reads project -> ko (portal ko employee) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_portal_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_portal_id, [pigs_id], ['state']) # Test: no project task visible + assigned task_ids = self.project_task.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) self.assertFalse(task_ids, 'access rights: portal user should not see tasks of an employees project, even if assigned') # Do: Donovan reads project -> ko (public ko employee) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, [pigs_id], ['state']) # Test: no project task visible task_ids = self.project_task.search(cr, self.user_public_id, [('project_id', '=', pigs_id)]) self.assertFalse(task_ids, 'access rights: public user should not see tasks of an employees project') @@ -199,9 +201,10 @@ class TestPortalProject(TestPortalProjectBase): # CASE4: followers project # ---------------------------------------- self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'followers'}) + self.project_project.invalidate_cache(cr, uid) # Do: Alfred reads project -> ko (employee ko followers) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_projectuser_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_projectuser_id, [pigs_id], ['state']) # Test: no project task visible task_ids = self.project_task.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_4_id]) @@ -209,10 +212,10 @@ class TestPortalProject(TestPortalProjectBase): 'access rights: employee user should not see tasks of a not-followed followers project, only assigned') # Do: Bert reads project -> crash, no group - self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, pigs_id, ['name']) + self.assertRaises(AccessError, self.project_project.read, cr, self.user_none_id, [pigs_id], ['state']) # Do: Chell reads project -> ko (portal ko employee) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_portal_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_portal_id, [pigs_id], ['state']) # Test: no project task visible task_ids = self.project_task.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_5_id]) @@ -220,7 +223,7 @@ class TestPortalProject(TestPortalProjectBase): 'access rights: portal user should not see tasks of a not-followed followers project, only assigned') # Do: Donovan reads project -> ko (public ko employee) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, [pigs_id], ['state']) # Test: no project task visible task_ids = self.project_task.search(cr, self.user_public_id, [('project_id', '=', pigs_id)]) self.assertFalse(task_ids, 'access rights: public user should not see tasks of a followers project') @@ -230,7 +233,7 @@ class TestPortalProject(TestPortalProjectBase): self.project_task.message_subscribe_users(cr, self.user_manager_id, [self.task_1_id, self.task_3_id], [self.user_portal_id, self.user_projectuser_id]) # Do: Alfred reads project -> ok (follower ok followers) - self.project_project.read(cr, self.user_projectuser_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_projectuser_id, [pigs_id], ['state']) # Test: followed + assigned tasks visible task_ids = self.project_task.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_1_id, self.task_3_id, self.task_4_id]) @@ -238,7 +241,7 @@ class TestPortalProject(TestPortalProjectBase): 'access rights: employee user should not see followed + assigned tasks of a follower project') # Do: Chell reads project -> ok (follower ok follower) - self.project_project.read(cr, self.user_portal_id, pigs_id, ['name']) + self.project_project.read(cr, self.user_portal_id, [pigs_id], ['state']) # Test: followed + assigned tasks visible task_ids = self.project_task.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) test_task_ids = set([self.task_1_id, self.task_3_id, self.task_5_id]) @@ -246,4 +249,4 @@ class TestPortalProject(TestPortalProjectBase): 'access rights: employee user should not see followed + assigned tasks of a follower project') # Do: Donovan reads project -> ko (public ko follower even if follower) - self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, pigs_id, ['name']) + self.assertRaises(except_orm, self.project_project.read, cr, self.user_public_id, [pigs_id], ['state']) diff --git a/addons/portal_project_issue/tests/test_access_rights.py b/addons/portal_project_issue/tests/test_access_rights.py index 7133c029d1a108448407f5c45efd6043e00e72ae..6ca2883970d312b7292428379b3649c036dde3c3 100644 --- a/addons/portal_project_issue/tests/test_access_rights.py +++ b/addons/portal_project_issue/tests/test_access_rights.py @@ -50,7 +50,7 @@ class TestPortalProjectBase(TestPortalProjectBase): class TestPortalIssue(TestPortalProjectBase): - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_project_access_rights(self): """ Test basic project access rights, for project and portal_project """ cr, uid, pigs_id = self.cr, self.uid, self.project_pigs_id diff --git a/addons/procurement/procurement.py b/addons/procurement/procurement.py index 1fc25171e7fecd62753f2f1fbcdf4e32992cf397..df8534e703d65c4b666cef575335573c4d453760 100644 --- a/addons/procurement/procurement.py +++ b/addons/procurement/procurement.py @@ -134,7 +134,7 @@ class procurement_order(osv.osv): ('exception', 'Exception'), ('running', 'Running'), ('done', 'Done') - ], 'Status', required=True, track_visibility='onchange'), + ], 'Status', required=True, track_visibility='onchange', copy=False), } _defaults = { diff --git a/addons/procurement/wizard/schedulers_all.py b/addons/procurement/wizard/schedulers_all.py index 34d4758c5d5da4dda4a30af7af8e003b535adae8..47f4dac440b31d1a61d7959575a3d2bc39d24926 100644 --- a/addons/procurement/wizard/schedulers_all.py +++ b/addons/procurement/wizard/schedulers_all.py @@ -22,6 +22,7 @@ import threading from openerp.osv import osv +from openerp.api import Environment class procurement_compute_all(osv.osv_memory): _name = 'procurement.order.compute.all' @@ -35,17 +36,18 @@ class procurement_compute_all(osv.osv_memory): @param ids: List of IDs selected @param context: A standard dictionary """ - proc_obj = self.pool.get('procurement.order') - #As this function is in a new thread, i need to open a new cursor, because the old one may be closed - - new_cr = self.pool.cursor() - user = self.pool.get('res.users').browse(new_cr, uid, uid, context=context) - comps = [x.id for x in user.company_ids] - for comp in comps: - proc_obj.run_scheduler(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = comp, context=context) - #close the new cursor - new_cr.close() - return {} + with Environment.manage(): + proc_obj = self.pool.get('procurement.order') + #As this function is in a new thread, i need to open a new cursor, because the old one may be closed + + new_cr = self.pool.cursor() + user = self.pool.get('res.users').browse(new_cr, uid, uid, context=context) + comps = [x.id for x in user.company_ids] + for comp in comps: + proc_obj.run_scheduler(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = comp, context=context) + #close the new cursor + new_cr.close() + return {} def procure_calculation(self, cr, uid, ids, context=None): """ diff --git a/addons/procurement_jit/mrp_jit.xml b/addons/procurement_jit/mrp_jit.xml deleted file mode 100644 index c00393ac04d7729c2cfc66c4f49d1e6f76ffae54..0000000000000000000000000000000000000000 --- a/addons/procurement_jit/mrp_jit.xml +++ /dev/null @@ -1,14 +0,0 @@ -<?xml version="1.0"?> -<openerp> -<data> - - <record model="workflow.transition" id="trans_direct_confirm_to_wait"> - <!-- Duplicates the transition between the act_confirm and act_confirm_wait activites - but, this time, without the signal --> - <field name="act_from" ref="procurement.act_confirm"/> - <field name="act_to" ref="procurement.act_confirm_wait"/> - <field name="condition">check_conditions_confirm2wait()</field> - </record> - -</data> -</openerp> diff --git a/addons/product/pricelist.py b/addons/product/pricelist.py index 069c331c126cc77291413ee16973cce3fbac615f..3e00bd417efb48f35daa448dda0d2ade027c95a6 100644 --- a/addons/product/pricelist.py +++ b/addons/product/pricelist.py @@ -101,7 +101,7 @@ class product_pricelist(osv.osv): 'name': fields.char('Pricelist Name', required=True, translate=True), 'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the pricelist without removing it."), 'type': fields.selection(_pricelist_type_get, 'Pricelist Type', required=True), - 'version_id': fields.one2many('product.pricelist.version', 'pricelist_id', 'Pricelist Versions'), + 'version_id': fields.one2many('product.pricelist.version', 'pricelist_id', 'Pricelist Versions', copy=True), 'currency_id': fields.many2one('res.currency', 'Currency', required=True), 'company_id': fields.many2one('res.company', 'Company'), } @@ -167,19 +167,19 @@ class product_pricelist(osv.osv): "currency_id": _get_currency } - def price_get_multi(self, cr, uid, pricelist_ids, products_by_qty_by_partner, context=None): + def price_get_multi(self, cr, uid, ids, products_by_qty_by_partner, context=None): """multi products 'price_get'. - @param pricelist_ids: + @param ids: @param products_by_qty: @param partner: @param context: { 'date': Date of the pricelist (%Y-%m-%d),} @return: a dict of dict with product_id as key and a dict 'price by pricelist' as value """ - if not pricelist_ids: - pricelist_ids = self.pool.get('product.pricelist').search(cr, uid, [], context=context) + if not ids: + ids = self.pool.get('product.pricelist').search(cr, uid, [], context=context) results = {} - for pricelist in self.browse(cr, uid, pricelist_ids, context=context): + for pricelist in self.browse(cr, uid, ids, context=context): subres = self._price_get_multi(cr, uid, pricelist, products_by_qty_by_partner, context=context) for product_id,price in subres.items(): results.setdefault(product_id, {}) @@ -247,14 +247,14 @@ class product_pricelist(osv.osv): if rule.min_quantity and qty<rule.min_quantity: continue if is_product_template: - if rule.product_tmpl_id and product.id<>rule.product_tmpl_id.id: + if rule.product_tmpl_id and product.id != rule.product_tmpl_id.id: continue if rule.product_id: continue else: - if rule.product_tmpl_id and product.product_tmpl_id.id<>rule.product_tmpl_id.id: + if rule.product_tmpl_id and product.product_tmpl_id.id != rule.product_tmpl_id.id: continue - if rule.product_id and product.id<>rule.product_id.id: + if rule.product_id and product.id != rule.product_id.id: continue if rule.categ_id: @@ -279,7 +279,7 @@ class product_pricelist(osv.osv): context=context) elif rule.base == -2: for seller in product.seller_ids: - if (not partner) or (seller.name.id<>partner): + if (not partner) or (seller.name.id != partner): continue qty_in_seller_uom = qty from_uom = context.get('uom') or product.uom_id.id @@ -325,7 +325,7 @@ class product_pricelist(osv.osv): def price_get(self, cr, uid, ids, prod_id, qty, partner=None, context=None): product = self.pool.get('product.product').browse(cr, uid, prod_id, context=context) - res_multi = self.price_get_multi(cr, uid, pricelist_ids=ids, products_by_qty_by_partner=[(product, qty, partner)], context=context) + res_multi = self.price_get_multi(cr, uid, ids, products_by_qty_by_partner=[(product, qty, partner)], context=context) res = res_multi[prod_id] return res @@ -340,9 +340,9 @@ class product_pricelist_version(osv.osv): 'active': fields.boolean('Active', help="When a version is duplicated it is set to non active, so that the " \ "dates do not overlaps with original version. You should change the dates " \ - "and reactivate the pricelist"), + "and reactivate the pricelist", copy=False), 'items_id': fields.one2many('product.pricelist.item', - 'price_version_id', 'Price List Items', required=True), + 'price_version_id', 'Price List Items', required=True, copy=True), 'date_start': fields.date('Start Date', help="First valid date for the version."), 'date_end': fields.date('End Date', help="Last valid date for the version."), 'company_id': fields.related('pricelist_id','company_id',type='many2one', @@ -352,12 +352,6 @@ class product_pricelist_version(osv.osv): 'active': lambda *a: 1, } - # We desactivate duplicated pricelists, so that dates do not overlap - def copy(self, cr, uid, id, default=None, context=None): - if not default: default= {} - default['active'] = False - return super(product_pricelist_version, self).copy(cr, uid, id, default, context) - def _check_date(self, cursor, user, ids, context=None): for pricelist_version in self.browse(cursor, user, ids, context=context): if not pricelist_version.active: diff --git a/addons/product/product.py b/addons/product/product.py index d90165ae0224d48c6c9ee0b426f25c4eebc0230c..3d4d2350d1a433c7b69bf2d57ccc0f50ab2c5ad2 100644 --- a/addons/product/product.py +++ b/addons/product/product.py @@ -327,7 +327,7 @@ class product_attribute(osv.osv): _description = "Product Attribute" _columns = { 'name': fields.char('Name', translate=True, required=True), - 'value_ids': fields.one2many('product.attribute.value', 'attribute_id', 'Values'), + 'value_ids': fields.one2many('product.attribute.value', 'attribute_id', 'Values', copy=True), } class product_attribute_value(osv.osv): @@ -414,9 +414,9 @@ class product_template(osv.osv): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) def _is_product_variant(self, cr, uid, ids, name, arg, context=None): - return self.is_product_variant(cr, uid, ids, name, arg, context=context) + return self._is_product_variant_impl(cr, uid, ids, name, arg, context=context) - def is_product_variant(self, cr, uid, ids, name, arg, context=None): + def _is_product_variant_impl(self, cr, uid, ids, name, arg, context=None): prod = self.pool.get('product.product') res = dict.fromkeys(ids, False) ctx = dict(context, active_test=True) @@ -848,7 +848,7 @@ class product_product(osv.osv): res[p.id] = (data['code'] and ('['+data['code']+'] ') or '') + (data['name'] or '') return res - def is_product_variant(self, cr, uid, ids, name, arg, context=None): + def _is_product_variant_impl(self, cr, uid, ids, name, arg, context=None): return dict.fromkeys(ids, True) def _get_name_template_ids(self, cr, uid, ids, context=None): @@ -1157,7 +1157,7 @@ class product_supplierinfo(osv.osv): 'qty': fields.function(_calc_qty, store=True, type='float', string='Quantity', multi="qty", help="This is a quantity which is converted into Default Unit of Measure."), 'product_tmpl_id' : fields.many2one('product.template', 'Product Template', required=True, ondelete='cascade', select=True, oldname='product_id'), 'delay' : fields.integer('Delivery Lead Time', required=True, help="Lead time in days between the confirmation of the purchase order and the reception of the products in your warehouse. Used by the scheduler for automatic computation of the purchase order planning."), - 'pricelist_ids': fields.one2many('pricelist.partnerinfo', 'suppinfo_id', 'Supplier Pricelist'), + 'pricelist_ids': fields.one2many('pricelist.partnerinfo', 'suppinfo_id', 'Supplier Pricelist', copy=True), 'company_id':fields.many2one('res.company','Company',select=1), } _defaults = { diff --git a/addons/product_expiry/product_expiry.py b/addons/product_expiry/product_expiry.py index 6979344bbccfbc9edab62018bec4021384482f97..392e574a0bb1d1e26f00c4e7f42235ed1a2a29fb 100644 --- a/addons/product_expiry/product_expiry.py +++ b/addons/product_expiry/product_expiry.py @@ -62,8 +62,7 @@ class stock_production_lot(osv.osv): for f in ('life_date', 'use_date', 'removal_date', 'alert_date'): if not getattr(obj, f): towrite.append(f) - if context is None: - context = {} + context = dict(context or {}) context['product_id'] = obj.product_id.id self.write(cr, uid, [obj.id], self.default_get(cr, uid, towrite, context=context)) return newid diff --git a/addons/product_margin/wizard/product_margin.py b/addons/product_margin/wizard/product_margin.py index 7a56ed2e13d9048004b4444d26c75848ca2c626c..c318d66c0ef14a2821d36ef3adf7ffcb264139ea 100644 --- a/addons/product_margin/wizard/product_margin.py +++ b/addons/product_margin/wizard/product_margin.py @@ -24,23 +24,26 @@ import time from openerp.osv import fields, osv from openerp.tools.translate import _ + class product_margin(osv.osv_memory): _name = 'product.margin' _description = 'Product Margin' _columns = { 'from_date': fields.date('From'), 'to_date': fields.date('To'), - 'invoice_state':fields.selection([ - ('paid','Paid'), - ('open_paid','Open and Paid'), - ('draft_open_paid','Draft, Open and Paid'), - ],'Invoice State', select=True, required=True), + 'invoice_state': fields.selection([ + ('paid', 'Paid'), + ('open_paid', 'Open and Paid'), + ('draft_open_paid', 'Draft, Open and Paid'), + ], 'Invoice State', select=True, required=True), } + _defaults = { 'from_date': time.strftime('%Y-01-01'), 'to_date': time.strftime('%Y-12-31'), 'invoice_state': "open_paid", } + def action_open_window(self, cr, uid, ids, context=None): """ @param cr: the current row, from the database cursor, @@ -49,36 +52,44 @@ class product_margin(osv.osv_memory): @return: """ - if context is None: - context = {} - mod_obj = self.pool.get('ir.model.data') - result = mod_obj._get_id(cr, uid, 'product', 'product_search_form_view') - id = mod_obj.read(cr, uid, result, ['res_id'], context=context) - cr.execute('select id,name from ir_ui_view where name=%s and type=%s', ('product.margin.graph', 'graph')) - view_res3 = cr.fetchone()[0] - cr.execute('select id,name from ir_ui_view where name=%s and type=%s', ('product.margin.form.inherit', 'form')) - view_res2 = cr.fetchone()[0] - cr.execute('select id,name from ir_ui_view where name=%s and type=%s', ('product.margin.tree', 'tree')) - view_res = cr.fetchone()[0] + context = dict(context or {}) + + def ref(module, xml_id): + proxy = self.pool.get('ir.model.data') + return proxy.get_object_reference(cr, uid, module, xml_id) + + model, search_view_id = ref('product', 'product_search_form_view') + model, graph_view_id = ref('product_margin', 'view_product_margin_graph') + model, form_view_id = ref('product_margin', 'view_product_margin_form') + model, tree_view_id = ref('product_margin', 'view_product_margin_tree') #get the current product.margin object to obtain the values from it - product_margin_obj = self.browse(cr, uid, ids, context=context)[0] + records = self.browse(cr, uid, ids, context=context) + record = records[0] + + context.update(invoice_state=record.invoice_state) + + if record.from_date: + context.update(date_from=record.from_date) + + if record.to_date: + context.update(date_to=record.to_date) - context.update(invoice_state = product_margin_obj.invoice_state) - if product_margin_obj.from_date: - context.update(date_from = product_margin_obj.from_date) - if product_margin_obj.to_date: - context.update(date_to = product_margin_obj.to_date) + views = [ + (tree_view_id, 'tree'), + (form_view_id, 'form'), + (graph_view_id, 'graph') + ] return { 'name': _('Product Margins'), 'context': context, 'view_type': 'form', "view_mode": 'tree,form,graph', - 'res_model':'product.product', + 'res_model': 'product.product', 'type': 'ir.actions.act_window', - 'views': [(view_res,'tree'), (view_res2,'form'), (view_res3,'graph')], + 'views': views, 'view_id': False, - 'search_view_id': id['res_id'] + 'search_view_id': search_view_id, } diff --git a/addons/product_visible_discount/product_visible_discount.py b/addons/product_visible_discount/product_visible_discount.py index 605a71e8b80b3d801cef8761264b07d4ffdca011..1c650e247daf1eb062c4676422637bb4714e1962 100644 --- a/addons/product_visible_discount/product_visible_discount.py +++ b/addons/product_visible_discount/product_visible_discount.py @@ -49,7 +49,7 @@ class sale_order_line(osv.osv): field_name = 'list_price' product = product_obj.browse(cr, uid, product_id, context) - product_read = product_obj.read(cr, uid, product_id, [field_name], context=context) + product_read = product_obj.read(cr, uid, [product_id], [field_name], context=context)[0] factor = 1.0 if uom and uom != product.uom_id.id: diff --git a/addons/project/project.py b/addons/project/project.py index f95f3e4517c242da6e846bb100a0a9528e4f1f45..78c3904ec07046e8797408654107e057d8c9a30e 100644 --- a/addons/project/project.py +++ b/addons/project/project.py @@ -273,7 +273,13 @@ class project(osv.osv): "- Employees Only: employees see all tasks or issues\n" "- Followers Only: employees see only the followed tasks or issues; if portal\n" " is activated, portal users see the followed tasks or issues."), - 'state': fields.selection([('template', 'Template'),('draft','New'),('open','In Progress'), ('cancelled', 'Cancelled'),('pending','Pending'),('close','Closed')], 'Status', required=True,), + 'state': fields.selection([('template', 'Template'), + ('draft','New'), + ('open','In Progress'), + ('cancelled', 'Cancelled'), + ('pending','Pending'), + ('close','Closed')], + 'Status', required=True, copy=False), 'doc_count': fields.function( _get_attached_docs, string="Number of documents attached", type='integer' ) @@ -332,36 +338,28 @@ class project(osv.osv): task_obj = self.pool.get('project.task') proj = self.browse(cr, uid, old_project_id, context=context) for task in proj.tasks: - map_task_id[task.id] = task_obj.copy(cr, uid, task.id, {}, context=context) + # preserve task name and stage, normally altered during copy + defaults = {'stage_id': task.stage_id.id, + 'name': task.name} + map_task_id[task.id] = task_obj.copy(cr, uid, task.id, defaults, context=context) self.write(cr, uid, [new_project_id], {'tasks':[(6,0, map_task_id.values())]}) task_obj.duplicate_task(cr, uid, map_task_id, context=context) return True def copy(self, cr, uid, id, default=None, context=None): - if context is None: - context = {} if default is None: default = {} - + context = dict(context or {}) context['active_test'] = False - default['state'] = 'open' - default['line_ids'] = [] - default['tasks'] = [] - - # Don't prepare (expensive) data to copy children (analytic accounts), - # they are discarded in analytic.copy(), and handled in duplicate_template() - default['child_ids'] = [] - proj = self.browse(cr, uid, id, context=context) - if not default.get('name', False): + if not default.get('name'): default.update(name=_("%s (copy)") % (proj.name)) res = super(project, self).copy(cr, uid, id, default, context) self.map_tasks(cr, uid, id, res, context=context) return res def duplicate_template(self, cr, uid, ids, context=None): - if context is None: - context = {} + context = dict(context or {}) data_obj = self.pool.get('ir.model.data') result = [] for proj in self.browse(cr, uid, ids, context=context): @@ -696,29 +694,11 @@ class task(osv.osv): def copy_data(self, cr, uid, id, default=None, context=None): if default is None: default = {} - default = default or {} - default.update({'work_ids':[], 'date_start': False, 'date_end': False, 'date_deadline': False}) - if not default.get('remaining_hours', False): - default['remaining_hours'] = float(self.read(cr, uid, id, ['planned_hours'])['planned_hours']) - default['active'] = True - if not default.get('name', False): - default['name'] = self.browse(cr, uid, id, context=context).name or '' - if not context.get('copy',False): - new_name = _("%s (copy)") % (default.get('name', '')) - default.update({'name':new_name}) + if not default.get('name'): + current = self.browse(cr, uid, id, context=context) + default['name'] = _("%s (copy)") % current.name return super(task, self).copy_data(cr, uid, id, default, context) - def copy(self, cr, uid, id, default=None, context=None): - if context is None: - context = {} - if default is None: - default = {} - if not context.get('copy', False): - stage = self._get_default_stage_id(cr, uid, context=context) - if stage: - default['stage_id'] = stage - return super(task, self).copy(cr, uid, id, default, context) - def _is_template(self, cr, uid, ids, field_name, arg, context=None): res = {} for task in self.browse(cr, uid, ids, context=context): @@ -741,7 +721,7 @@ class task(osv.osv): 'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority', select=True), 'sequence': fields.integer('Sequence', select=True, help="Gives the sequence order when displaying a list of tasks."), 'stage_id': fields.many2one('project.task.type', 'Stage', track_visibility='onchange', select=True, - domain="[('project_ids', '=', project_id)]"), + domain="[('project_ids', '=', project_id)]", copy=False), 'categ_ids': fields.many2many('project.category', string='Tags'), 'kanban_state': fields.selection([('normal', 'In Progress'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State', track_visibility='onchange', @@ -749,13 +729,13 @@ class task(osv.osv): " * Normal is the default situation\n" " * Blocked indicates something is preventing the progress of this task\n" " * Ready for next stage indicates the task is ready to be pulled to the next stage", - required=False), + required=False, copy=False), 'create_date': fields.datetime('Create Date', readonly=True, select=True), 'write_date': fields.datetime('Last Modification Date', readonly=True, select=True), #not displayed in the view but it might be useful with base_action_rule module (and it needs to be defined first for that) - 'date_start': fields.datetime('Starting Date',select=True), - 'date_end': fields.datetime('Ending Date',select=True), - 'date_deadline': fields.date('Deadline',select=True), - 'date_last_stage_update': fields.datetime('Last Stage Update', select=True), + 'date_start': fields.datetime('Starting Date', select=True, copy=False), + 'date_end': fields.datetime('Ending Date', select=True, copy=False), + 'date_deadline': fields.date('Deadline', select=True, copy=False), + 'date_last_stage_update': fields.datetime('Last Stage Update', select=True, copy=False), 'project_id': fields.many2one('project.project', 'Project', ondelete='set null', select=True, track_visibility='onchange', change_default=True), 'parent_ids': fields.many2many('project.task', 'project_task_parent_rel', 'task_id', 'parent_id', 'Parent Tasks'), 'child_ids': fields.many2many('project.task', 'project_task_parent_rel', 'parent_id', 'task_id', 'Delegated Tasks'), @@ -888,6 +868,7 @@ class task(osv.osv): return res def get_empty_list_help(self, cr, uid, help, context=None): + context = dict(context or {}) context['empty_list_help_id'] = context.get('default_project_id') context['empty_list_help_model'] = 'project.project' context['empty_list_help_document_name'] = _("tasks") @@ -1011,8 +992,7 @@ class task(osv.osv): # ------------------------------------------------ def create(self, cr, uid, vals, context=None): - if context is None: - context = {} + context = dict(context or {}) # for default stage if vals.get('project_id') and not context.get('default_project_id'): @@ -1044,7 +1024,7 @@ class task(osv.osv): new_stage = vals.get('stage_id') vals_reset_kstate = dict(vals, kanban_state='normal') for t in self.browse(cr, uid, ids, context=context): - write_vals = vals_reset_kstate if t.stage_id != new_stage else vals + write_vals = vals_reset_kstate if t.stage_id.id != new_stage else vals super(task, self).write(cr, uid, [t.id], write_vals, context=context) result = True else: @@ -1153,24 +1133,29 @@ class project_work(osv.osv): } _order = "date desc" - def create(self, cr, uid, vals, *args, **kwargs): + def create(self, cr, uid, vals, context=None): if 'hours' in vals and (not vals['hours']): vals['hours'] = 0.00 if 'task_id' in vals: cr.execute('update project_task set remaining_hours=remaining_hours - %s where id=%s', (vals.get('hours',0.0), vals['task_id'])) - return super(project_work,self).create(cr, uid, vals, *args, **kwargs) + self.pool.get('project.task').invalidate_cache(cr, uid, ['remaining_hours'], [vals['task_id']], context=context) + return super(project_work,self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): if 'hours' in vals and (not vals['hours']): vals['hours'] = 0.00 if 'hours' in vals: + task_obj = self.pool.get('project.task') for work in self.browse(cr, uid, ids, context=context): cr.execute('update project_task set remaining_hours=remaining_hours - %s + (%s) where id=%s', (vals.get('hours',0.0), work.hours, work.task_id.id)) + task_obj.invalidate_cache(cr, uid, ['remaining_hours'], [work.task_id.id], context=context) return super(project_work,self).write(cr, uid, ids, vals, context) def unlink(self, cr, uid, ids, *args, **kwargs): + task_obj = self.pool.get('project.task') for work in self.browse(cr, uid, ids): cr.execute('update project_task set remaining_hours=remaining_hours + %s where id=%s', (work.hours, work.task_id.id)) + task_obj.invalidate_cache(cr, uid, ['remaining_hours'], [work.task_id.id], context=context) return super(project_work,self).unlink(cr, uid, ids,*args, **kwargs) diff --git a/addons/project/project_view.xml b/addons/project/project_view.xml index c151464fb5c5659898ec97ae4b739ea1bab8f984..41e100cec1f467adfcfe18622b21ef8ef2e9f2c3 100644 --- a/addons/project/project_view.xml +++ b/addons/project/project_view.xml @@ -8,8 +8,16 @@ groups="group_project_manager,group_project_user" sequence="40"/> - <menuitem id="menu_project_management" name="Project" parent="base.menu_main_pm" sequence="1"/> - <menuitem id="base.menu_definitions" name="Configuration" parent="base.menu_main_pm" sequence="60"/> + <menuitem name="Project" + id="menu_project_management" + parent="base.menu_main_pm" + sequence="1"/> + + <menuitem name="Configuration" + id="base.menu_definitions" + parent="base.menu_main_pm" + groups="group_project_manager" + sequence="60"/> <record id="view_task_search_form" model="ir.ui.view"> <field name="name">project.task.search.form</field> diff --git a/addons/project/tests/test_project_flow.py b/addons/project/tests/test_project_flow.py index ecbe66a0cead3fab730c175fbf852729c501b3ec..58263f6ea8a72cbb1da78a7408270cf241c11866 100644 --- a/addons/project/tests/test_project_flow.py +++ b/addons/project/tests/test_project_flow.py @@ -51,7 +51,7 @@ Integrator at Agrolait""" class TestProjectFlow(TestProjectBase): - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_00_project_process(self): """ Testing project management """ cr, uid, user_projectuser_id, user_projectmanager_id, project_pigs_id = self.cr, self.uid, self.user_projectuser_id, self.user_projectmanager_id, self.project_pigs_id @@ -131,8 +131,8 @@ class TestProjectFlow(TestProjectBase): # Test: one task created by mailgateway administrator self.assertEqual(len(frogs), 1, 'project: message_process: a new project.task should have been created') task = self.project_task.browse(cr, user_projectuser_id, frogs[0]) - res = self.project_task.perm_read(cr, uid, [task.id], details=False) - self.assertEqual(res[0].get('create_uid'), uid, + res = self.project_task.get_metadata(cr, uid, [task.id])[0].get('create_uid') or [None] + self.assertEqual(res[0], uid, 'project: message_process: task should have been created by uid as alias_user_id is False on the alias') # Test: messages self.assertEqual(len(task.message_ids), 3, diff --git a/addons/project/wizard/project_task_delegate.py b/addons/project/wizard/project_task_delegate.py index a0e137eda0dee70830ccdac96bdaa92408729e21..e3c935d4f5dd7ac53b6691fb71c6129f76ca96a1 100644 --- a/addons/project/wizard/project_task_delegate.py +++ b/addons/project/wizard/project_task_delegate.py @@ -125,7 +125,7 @@ class project_task_delegate(osv.osv_memory): action_model, action_id = models_data.get_object_reference(cr, uid, 'project', 'action_view_task') view_model, task_view_form_id = models_data.get_object_reference(cr, uid, 'project', 'view_task_form2') view_model, task_view_tree_id = models_data.get_object_reference(cr, uid, 'project', 'view_task_tree2') - action = self.pool[action_model].read(cr, uid, action_id, context=context) + action = self.pool[action_model].read(cr, uid, [action_id], context=context)[0] action['res_id'] = delegated_tasks[task_id] action['view_id'] = False action['views'] = [(task_view_form_id, 'form'), (task_view_tree_id, 'tree')] diff --git a/addons/project_issue/project_issue.py b/addons/project_issue/project_issue.py index 8b7c70e41e1de56a8454449256a3cb40aa7a1761..0c3ba5683fd81ee655bead72fde2677ff8b40743 100644 --- a/addons/project_issue/project_issue.py +++ b/addons/project_issue/project_issue.py @@ -264,7 +264,7 @@ class project_issue(osv.Model): 'version_id': fields.many2one('project.issue.version', 'Version'), 'stage_id': fields.many2one ('project.task.type', 'Stage', track_visibility='onchange', select=True, - domain="[('project_ids', '=', project_id)]"), + domain="[('project_ids', '=', project_id)]", copy=False), 'project_id': fields.many2one('project.project', 'Project', track_visibility='onchange', select=True), 'duration': fields.float('Duration'), 'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]"), @@ -306,7 +306,7 @@ class project_issue(osv.Model): } def copy(self, cr, uid, id, default=None, context=None): - issue = self.read(cr, uid, id, ['name'], context=context) + issue = self.read(cr, uid, [id], ['name'], context=context)[0] if not default: default = {} default = default.copy() @@ -315,8 +315,7 @@ class project_issue(osv.Model): context=context) def create(self, cr, uid, vals, context=None): - if context is None: - context = {} + context = dict(context or {}) if vals.get('project_id') and not context.get('default_project_id'): context['default_project_id'] = vals.get('project_id') @@ -353,6 +352,7 @@ class project_issue(osv.Model): return {'value': result} def get_empty_list_help(self, cr, uid, help, context=None): + context = dict(context or {}) context['empty_list_help_model'] = 'project.project' context['empty_list_help_id'] = context.get('default_project_id') context['empty_list_help_document_name'] = _("issues") @@ -437,9 +437,7 @@ class project_issue(osv.Model): """ if custom_values is None: custom_values = {} - if context is None: - context = {} - context['state_to'] = 'draft' + context = dict(context or {}, state_to='draft') defaults = { 'name': msg.get('subject') or _("No Subject"), 'email_from': msg.get('from'), diff --git a/addons/project_timesheet/project_timesheet.py b/addons/project_timesheet/project_timesheet.py index f6c2321d1f94ad9554107f87f25f83ef58e2484d..692249b3ff4d82c5d5a2e959265b6b677bfe6cb9 100644 --- a/addons/project_timesheet/project_timesheet.py +++ b/addons/project_timesheet/project_timesheet.py @@ -137,6 +137,7 @@ class project_work(osv.osv): amount = vals_line['unit_amount'] prod_id = vals_line['product_id'] unit = False + context = dict(context, no_store_function=False) timeline_id = timesheet_obj.create(cr, uid, vals_line, context=context) # Compute based on pricetype diff --git a/addons/purchase/purchase.py b/addons/purchase/purchase.py index 18f31c3bd58fcc35edd43662cf42ad9f3c914388..e8622de493d8421bc9c8e31255e845b9a59211d7 100644 --- a/addons/purchase/purchase.py +++ b/addons/purchase/purchase.py @@ -28,7 +28,7 @@ from openerp.tools.safe_eval import safe_eval as eval from openerp.osv import fields, osv from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp -from openerp.osv.orm import browse_record, browse_null +from openerp.osv.orm import browse_record_list, browse_record, browse_null from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP class purchase_order(osv.osv): @@ -66,6 +66,7 @@ class purchase_order(osv.osv): (date_planned=%s or date_planned<%s)""", (value,po.id,po.minimum_planned_date,value)) cr.execute("""update purchase_order set minimum_planned_date=%s where id=%s""", (value, po.id)) + self.invalidate_cache(cr, uid, context=context) return True def _minimum_planned_date(self, cr, uid, ids, field_name, arg, context=None): @@ -194,14 +195,26 @@ class purchase_order(osv.osv): }, } _columns = { - 'name': fields.char('Order Reference', required=True, select=True, help="Unique number of the purchase order, computed automatically when the purchase order is created."), - 'origin': fields.char('Source Document', - help="Reference of the document that generated this purchase order request; a sales order or an internal procurement request." - ), - 'partner_ref': fields.char('Supplier Reference', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]}, - help="Reference of the sales order or bid sent by your supplier. It's mainly used to do the matching when you receive the products as this reference is usually written on the delivery order sent by your supplier."), - 'date_order':fields.date('Order Date', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)]}, select=True, help="Date on which this document has been created."), - 'date_approve':fields.date('Date Approved', readonly=1, select=True, help="Date on which purchase order has been approved"), + 'name': fields.char('Order Reference', required=True, select=True, copy=False, + help="Unique number of the purchase order, " + "computed automatically when the purchase order is created."), + 'origin': fields.char('Source Document', copy=False, + help="Reference of the document that generated this purchase order " + "request; a sales order or an internal procurement request."), + 'partner_ref': fields.char('Supplier Reference', states={'confirmed':[('readonly',True)], + 'approved':[('readonly',True)], + 'done':[('readonly',True)]}, + copy=False, + help="Reference of the sales order or bid sent by your supplier. " + "It's mainly used to do the matching when you receive the " + "products as this reference is usually written on the " + "delivery order sent by your supplier."), + 'date_order':fields.date('Order Date', required=True, states={'confirmed':[('readonly',True)], + 'approved':[('readonly',True)]}, + select=True, help="Date on which this document has been created.", + copy=False), + 'date_approve':fields.date('Date Approved', readonly=1, select=True, copy=False, + help="Date on which purchase order has been approved"), 'partner_id':fields.many2one('res.partner', 'Supplier', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]}, change_default=True, track_visibility='always'), 'dest_address_id':fields.many2one('res.partner', 'Customer Address (Direct Delivery)', @@ -212,15 +225,31 @@ class purchase_order(osv.osv): 'location_id': fields.many2one('stock.location', 'Destination', required=True, domain=[('usage','<>','view')], states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]} ), 'pricelist_id':fields.many2one('product.pricelist', 'Pricelist', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]}, help="The pricelist sets the currency used for this purchase order. It also computes the supplier price for the selected products/quantities."), 'currency_id': fields.many2one('res.currency','Currency', readonly=True, required=True,states={'draft': [('readonly', False)],'sent': [('readonly', False)]}), - 'state': fields.selection(STATE_SELECTION, 'Status', readonly=True, help="The status of the purchase order or the quotation request. A request for quotation is a purchase order in a 'Draft' status. Then the order has to be confirmed by the user, the status switch to 'Confirmed'. Then the supplier must confirm the order to change the status to 'Approved'. When the purchase order is paid and received, the status becomes 'Done'. If a cancel action occurs in the invoice or in the reception of goods, the status becomes in exception.", select=True), - 'order_line': fields.one2many('purchase.order.line', 'order_id', 'Order Lines', states={'approved':[('readonly',True)],'done':[('readonly',True)]}), - 'validator' : fields.many2one('res.users', 'Validated by', readonly=True), + 'state': fields.selection(STATE_SELECTION, 'Status', readonly=True, + help="The status of the purchase order or the quotation request. " + "A request for quotation is a purchase order in a 'Draft' status. " + "Then the order has to be confirmed by the user, the status switch " + "to 'Confirmed'. Then the supplier must confirm the order to change " + "the status to 'Approved'. When the purchase order is paid and " + "received, the status becomes 'Done'. If a cancel action occurs in " + "the invoice or in the reception of goods, the status becomes " + "in exception.", + select=True, copy=False), + 'order_line': fields.one2many('purchase.order.line', 'order_id', 'Order Lines', + states={'approved':[('readonly',True)], + 'done':[('readonly',True)]}, + copy=True), + 'validator' : fields.many2one('res.users', 'Validated by', readonly=True, copy=False), 'notes': fields.text('Terms and Conditions'), - 'invoice_ids': fields.many2many('account.invoice', 'purchase_invoice_rel', 'purchase_id', 'invoice_id', 'Invoices', help="Invoices generated for a purchase order"), + 'invoice_ids': fields.many2many('account.invoice', 'purchase_invoice_rel', 'purchase_id', + 'invoice_id', 'Invoices', copy=False, + help="Invoices generated for a purchase order"), 'picking_ids': fields.function(_get_picking_ids, method=True, type='one2many', relation='stock.picking', string='Picking List', help="This is the list of reception operations that have been generated for this purchase order."), - 'shipped':fields.boolean('Received', readonly=True, select=True, help="It indicates that a picking has been done"), + 'shipped':fields.boolean('Received', readonly=True, select=True, copy=False, + help="It indicates that a picking has been done"), 'shipped_rate': fields.function(_shipped_rate, string='Received Ratio', type='float'), - 'invoiced': fields.function(_invoiced, string='Invoice Received', type='boolean', help="It indicates that an invoice has been validated"), + 'invoiced': fields.function(_invoiced, string='Invoice Received', type='boolean', copy=False, + help="It indicates that an invoice has been validated"), 'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced', type='float'), 'invoice_method': fields.selection([('manual','Based on Purchase Order lines'),('order','Based on generated draft invoice'),('picking','Based on incoming shipments')], 'Invoicing Control', required=True, readonly=True, states={'draft':[('readonly',False)], 'sent':[('readonly',False)]}, @@ -284,9 +313,7 @@ class purchase_order(osv.osv): def create(self, cr, uid, vals, context=None): if vals.get('name','/')=='/': vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'purchase.order') or '/' - if context is None: - context = {} - context.update({'mail_create_nolog': True}) + context = dict(context or {}, mail_create_nolog=True) order = super(purchase_order, self).create(cr, uid, vals, context=context) self.message_post(cr, uid, [order], body=_("RFQ created"), context=context) return order @@ -388,6 +415,7 @@ class purchase_order(osv.osv): ''' This function returns an action that display existing invoices of given sales order ids. It can either be a in a list or in a form view, if there is only one invoice to show. ''' + context = dict(context or {}) mod_obj = self.pool.get('ir.model.data') wizard_obj = self.pool.get('purchase.order.line_invoice') #compute the number of invoices to display @@ -597,8 +625,7 @@ class purchase_order(osv.osv): :return: ID of created invoice. :rtype: int """ - if context is None: - context = {} + context = dict(context or {}) inv_obj = self.pool.get('account.invoice') inv_line_obj = self.pool.get('account.invoice.line') @@ -620,7 +647,7 @@ class purchase_order(osv.osv): inv_line_data = self._prepare_inv_line(cr, uid, acc_id, po_line, context=context) inv_line_id = inv_line_obj.create(cr, uid, inv_line_data, context=context) inv_lines.append(inv_line_id) - po_line.write({'invoice_lines': [(4, inv_line_id)]}, context=context) + po_line.write({'invoice_lines': [(4, inv_line_id)]}) # get invoice data and create invoice inv_data = self._prepare_invoice(cr, uid, order, inv_lines, context=context) @@ -630,7 +657,7 @@ class purchase_order(osv.osv): inv_obj.button_compute(cr, uid, [inv_id], context=context, set_total=True) # Link this new invoice to related purchase order - order.write({'invoice_ids': [(4, inv_id)]}, context=context) + order.write({'invoice_ids': [(4, inv_id)]}) res = inv_id return res @@ -808,20 +835,6 @@ class purchase_order(osv.osv): self.message_post(cr, uid, ids, body=_("Products received"), context=context) return True - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'state':'draft', - 'shipped':False, - 'invoiced':False, - 'invoice_ids': [], - 'origin': '', - 'partner_ref': '', - 'name': self.pool.get('ir.sequence').get(cr, uid, 'purchase.order'), - }) - return super(purchase_order, self).copy(cr, uid, id, default, context) - def do_merge(self, cr, uid, ids, context=None): """ To merge similar type of purchase orders. @@ -853,14 +866,13 @@ class purchase_order(osv.osv): field_val = field_val.id elif isinstance(field_val, browse_null): field_val = False - elif isinstance(field_val, list): + elif isinstance(field_val, browse_record_list): field_val = ((6, 0, tuple([v.id for v in field_val])),) list_key.append((field, field_val)) list_key.sort() return tuple(list_key) - if context is None: - context = {} + context = dict(context or {}) # Compute what the new orders should contain new_orders = {} @@ -958,13 +970,16 @@ class purchase_order_line(osv.osv): 'order_id': fields.many2one('purchase.order', 'Order Reference', select=True, required=True, ondelete='cascade'), 'account_analytic_id':fields.many2one('account.analytic.account', 'Analytic Account',), 'company_id': fields.related('order_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True), - 'state': fields.selection([('draft', 'Draft'), ('confirmed', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Status', required=True, readonly=True, + 'state': fields.selection([('draft', 'Draft'), ('confirmed', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], + 'Status', required=True, readonly=True, copy=False, help=' * The \'Draft\' status is set automatically when purchase order in draft status. \ \n* The \'Confirmed\' status is set automatically as confirm when purchase order in confirm status. \ \n* The \'Done\' status is set automatically when purchase order is set as done. \ \n* The \'Cancelled\' status is set automatically when user cancel purchase order.'), - 'invoice_lines': fields.many2many('account.invoice.line', 'purchase_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True), - 'invoiced': fields.boolean('Invoiced', readonly=True), + 'invoice_lines': fields.many2many('account.invoice.line', 'purchase_order_line_invoice_rel', + 'order_line_id', 'invoice_id', 'Invoice Lines', + readonly=True, copy=False), + 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), 'partner_id': fields.related('order_id','partner_id',string='Partner',readonly=True,type="many2one", relation="res.partner", store=True), 'date_order': fields.related('order_id','date_order',string='Order Date',readonly=True,type="date"), 'procurement_ids': fields.one2many('procurement.order', 'purchase_line_id', string='Associated procurements'), @@ -979,12 +994,6 @@ class purchase_order_line(osv.osv): _name = 'purchase.order.line' _description = 'Purchase Order Line' - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({'state':'draft', 'move_ids':[], 'invoiced':0, 'invoice_lines':[], 'procurement_ids': False}) - return super(purchase_order_line, self).copy_data(cr, uid, id, default, context) - def unlink(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): if line.state not in ['draft', 'cancel']: diff --git a/addons/purchase/purchase_data.yml b/addons/purchase/purchase_data.yml index a122c932b24c576430c37e1792ca69b388ca3d2a..92a88bc0c09f2cdf32c47fd32c9734ea07e7c5e7 100644 --- a/addons/purchase/purchase_data.yml +++ b/addons/purchase/purchase_data.yml @@ -1,5 +1,5 @@ - - !python {model: ir.values, id: purchase_default_set}: | + !python {model: ir.values}: | whr = self.pool.get('stock.warehouse').browse(cr, uid, ref('stock.warehouse0'), context=context) self.set_default(cr, uid, 'purchase.order', 'picking_type_id', whr.in_type_id.id, for_all_users=True, company_id=True, condition=False) - diff --git a/addons/purchase/stock.py b/addons/purchase/stock.py index 26d2ff1002f42ae5c94cafaf1e859de7cebb1144..c719e7b51ec5e42f23e8ee979bcdbb4402d7b2f1 100644 --- a/addons/purchase/stock.py +++ b/addons/purchase/stock.py @@ -50,9 +50,7 @@ class stock_move(osv.osv): default = {} if not default.get('split_from'): #we don't want to propagate the link to the purchase order line except in case of move split - default.update({ - 'purchase_line_id': False, - }) + default['purchase_line_id'] = False return super(stock_move, self).copy(cr, uid, id, default, context) diff --git a/addons/purchase/test/fifo_price.yml b/addons/purchase/test/fifo_price.yml index 07bcd4f30703c25c96f1275a02b064acec86b7db..d98e536c5325f938dc5eeaff0f6674df84de47f9 100644 --- a/addons/purchase/test/fifo_price.yml +++ b/addons/purchase/test/fifo_price.yml @@ -41,8 +41,8 @@ Process the reception of purchase order 1 and set date - !python {model: stock.picking}: | - picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo1")).picking_ids[0] - picking_obj.do_transfer(context=context) + picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo1"), context=context).picking_ids[0] + picking_obj.do_transfer() - Check the standard price of the product (fifo icecream), that should have not changed because the standard price is supposed to be updated only when goods are going out of the stock - @@ -69,8 +69,8 @@ Process the reception of purchase order 2 - !python {model: stock.picking}: | - picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo2")).picking_ids[0] - picking_obj.do_transfer(context=context) + picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo2"), context=context).picking_ids[0] + picking_obj.do_transfer() - Check the standard price of the product, that should have not changed because the standard price is supposed to be updated only when goods are going out of the stock - @@ -101,8 +101,8 @@ Process the delivery of the outgoing shipment - !python {model: stock.picking}: | - pick_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment")) - pick_obj.do_transfer(context=context) + pick_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment"), context=context) + pick_obj.do_transfer() - Check product standard price changed to 65.0 (because last outgoing shipment was made of 10 kg at 50€ and 10 kg at 80€) - @@ -133,8 +133,8 @@ Process the delivery of the outgoing shipment - !python {model: stock.picking}: | - pick_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment_uom")) - pick_obj.do_transfer(context=context) + pick_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment_uom"), context=context) + pick_obj.do_transfer() - Check product price changed to 80.0 (because last outgoing shipment was made of 0.5 kg at 80€) - @@ -190,8 +190,8 @@ Process the reception of purchase order with usd - !python {model: stock.picking}: | - pick_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo_usd")).picking_ids[0] - pick_obj.do_transfer(context=context) + pick_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo_usd"), context=context).picking_ids[0] + pick_obj.do_transfer() - We create delivery order of 49.5 kg - @@ -217,8 +217,8 @@ Process the delivery of the outgoing shipment - !python {model: stock.picking}: | - picking_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment_cur")) - picking_obj.do_transfer(context=context) + picking_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment_cur"), context=context) + picking_obj.do_transfer() - Check rounded price is 102 euro (because last outgoing shipment was made of 19.5kg at 80€ and 30kg at $150 (rate=1.2834) - @@ -249,8 +249,8 @@ Process the delivery of the outgoing shipment - !python {model: stock.picking}: | - picking_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment_ret")) - picking_obj.do_transfer(context=context) + picking_obj = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_fifo_shipment_ret"), context=context) + picking_obj.do_transfer() - Check rounded price is 150.0 / 1.2834 - @@ -295,8 +295,8 @@ Process the delivery of the first outgoing shipment - !python {model: stock.picking}: | - picking_obj = self.browse(cr, uid, ref("outgoing_fifo_shipment_neg")) - picking_obj.do_transfer(context=context) + picking_obj = self.browse(cr, uid, ref("outgoing_fifo_shipment_neg"), context=context) + picking_obj.do_transfer() - The behavior of fifo/lifo is not garantee if the quants are created at the same second, so i just wait one second - @@ -323,8 +323,8 @@ Process the delivery of the outgoing shipments - !python {model: stock.picking}: | - picking_obj1 = self.browse(cr, uid, ref("outgoing_fifo_shipment_neg2")) - picking_obj1.do_transfer(context=context) + picking_obj1 = self.browse(cr, uid, ref("outgoing_fifo_shipment_neg2"), context=context) + picking_obj1.do_transfer() - Receive purchase order with 50 kg FIFO Ice Cream at 50 euro/kg @@ -347,8 +347,8 @@ Process the reception of purchase order 1 - !python {model: stock.picking}: | - picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo_neg")).picking_ids[0] - picking_obj.do_transfer(context=context) + picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo_neg"), context=context).picking_ids[0] + picking_obj.do_transfer() - Assert price on product is still the old price as the out move has not been received fully yet - @@ -376,8 +376,8 @@ Process the reception of purchase order 2 - !python {model: stock.picking}: | - picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo_neg2")).picking_ids[0] - picking_obj.do_transfer(context=context) + picking_obj = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_fifo_neg2"), context=context).picking_ids[0] + picking_obj.do_transfer() - The price of the product should have changed back to 65.0 - diff --git a/addons/purchase/test/process/rfq2order2done.yml b/addons/purchase/test/process/rfq2order2done.yml index 54de507286c3635eec0775645d45ee9436aa4a4d..bf48e3369d25ec95eeb2ac01317e96305a1de6b3 100644 --- a/addons/purchase/test/process/rfq2order2done.yml +++ b/addons/purchase/test/process/rfq2order2done.yml @@ -46,8 +46,8 @@ Reception is ready for process so now done the reception. - !python {model: stock.picking}: | - pick_ids = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_1")).picking_ids[0] - pick_ids.do_transfer(context=context) + pick_ids = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_1"), context=context).picking_ids[0] + pick_ids.do_transfer() - I check that purchase order is shipped. - @@ -58,8 +58,8 @@ I Validate Invoice of Purchase Order. - !python {model: purchase.order}: | - invoice_ids = [x.id for x in self.browse(cr, uid, ref("purchase_order_1")).invoice_ids] - self.pool.get('account.invoice').signal_invoice_open(cr, uid, invoice_ids) + for invoice in self.browse(cr, uid, ref('purchase_order_1'), context=context).invoice_ids: + invoice.signal_workflow('invoice_open') - I check that purchase order is invoiced. - diff --git a/addons/purchase/test/process/run_scheduler.yml b/addons/purchase/test/process/run_scheduler.yml index d4c1da13cd772e8cd0a5801a73a2db018128ba86..01aa497311dd4614c5f4955bf6ad2a9796757cdb 100644 --- a/addons/purchase/test/process/run_scheduler.yml +++ b/addons/purchase/test/process/run_scheduler.yml @@ -11,7 +11,7 @@ - Add Buy route - - !python {model: product.product, id: scheduler_product}: | + !python {model: product.product}: | self.write(cr, uid, [ref("scheduler_product")], {"route_ids": [(4, ref("purchase.route_warehouse0_buy"))]}) - I create a procurement order. diff --git a/addons/purchase_requisition/purchase_requisition.py b/addons/purchase_requisition/purchase_requisition.py index 09fd6d0eff991be9cbb309dce54b2de8e8385212..a4908f23687d04d79230e9076888bb7809899a94 100644 --- a/addons/purchase_requisition/purchase_requisition.py +++ b/addons/purchase_requisition/purchase_requisition.py @@ -37,7 +37,7 @@ class purchase_requisition(osv.osv): return result _columns = { - 'name': fields.char('Call for Bids Reference', required=True), + 'name': fields.char('Call for Bids Reference', required=True, copy=False), 'origin': fields.char('Source Document'), 'ordering_date': fields.date('Scheduled Ordering Date'), 'date_end': fields.datetime('Bid Submission Deadline'), @@ -48,11 +48,14 @@ class purchase_requisition(osv.osv): 'company_id': fields.many2one('res.company', 'Company', required=True), 'purchase_ids': fields.one2many('purchase.order', 'requisition_id', 'Purchase Orders', states={'done': [('readonly', True)]}), 'po_line_ids': fields.function(_get_po_line, method=True, type='one2many', relation='purchase.order.line', string='Products by supplier'), - 'line_ids': fields.one2many('purchase.requisition.line', 'requisition_id', 'Products to Purchase', states={'done': [('readonly', True)]}), - 'procurement_id': fields.many2one('procurement.order', 'Procurement', ondelete='set null'), + 'line_ids': fields.one2many('purchase.requisition.line', 'requisition_id', 'Products to Purchase', states={'done': [('readonly', True)]}, copy=True), + 'procurement_id': fields.many2one('procurement.order', 'Procurement', ondelete='set null', copy=False), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse'), - 'state': fields.selection([('draft', 'Draft'), ('in_progress', 'Confirmed'), ('open', 'Bid Selection'), ('done', 'PO Created'), ('cancel', 'Cancelled')], - 'Status', track_visibility='onchange', required=True), + 'state': fields.selection([('draft', 'Draft'), ('in_progress', 'Confirmed'), + ('open', 'Bid Selection'), ('done', 'PO Created'), + ('cancel', 'Cancelled')], + 'Status', track_visibility='onchange', required=True, + copy=False), 'multiple_rfq_per_supplier': fields.boolean('Multiple RFQ per supplier'), 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'), 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type', required=True), @@ -71,15 +74,6 @@ class purchase_requisition(osv.osv): 'picking_type_id': _get_picking_in, } - def copy(self, cr, uid, id, default=None, context=None): - default = default or {} - default.update({ - 'state': 'draft', - 'purchase_ids': [], - 'name': self.pool.get('ir.sequence').get(cr, uid, 'purchase.order.requisition'), - }) - return super(purchase_requisition, self).copy(cr, uid, id, default, context) - def tender_cancel(self, cr, uid, ids, context=None): purchase_order_obj = self.pool.get('purchase.order') # try to set all associated quotations to cancel state @@ -176,8 +170,7 @@ class purchase_requisition(osv.osv): """ Create New RFQ for Supplier """ - if context is None: - context = {} + context = dict(context or {}) assert partner_id, 'Supplier should be specified' purchase_order = self.pool.get('purchase.order') purchase_order_line = self.pool.get('purchase.order.line') @@ -269,7 +262,7 @@ class purchase_requisition(osv.osv): #copy a quotation for this supplier and change order_line then validate it quotation_id = po.search(cr, uid, [('requisition_id', '=', tender.id), ('partner_id', '=', supplier)], limit=1)[0] vals = self._prepare_po_from_tender(cr, uid, tender, context=context) - new_po = po.copy(cr, uid, quotation_id, default=vals, context=ctx) + new_po = po.copy(cr, uid, quotation_id, default=vals, context=context) #duplicate po_line and change product_qty if needed and associate them to newly created PO for line in product_line: vals = self._prepare_po_line_from_tender(cr, uid, tender, line, new_po, context=context) @@ -333,7 +326,7 @@ class purchase_order(osv.osv): _inherit = "purchase.order" _columns = { - 'requisition_id': fields.many2one('purchase.requisition', 'Call for Bids'), + 'requisition_id': fields.many2one('purchase.requisition', 'Call for Bids', copy=False), } def wkf_confirm_order(self, cr, uid, ids, context=None): @@ -346,18 +339,10 @@ class purchase_order(osv.osv): proc_ids = proc_obj.search(cr, uid, [('purchase_id', '=', order.id)]) if proc_ids and po.state == 'confirmed': proc_obj.write(cr, uid, proc_ids, {'purchase_id': po.id}) - self.signal_purchase_cancel(cr, uid, [order.id]) + order.signal_workflow('purchase_cancel') po.requisition_id.tender_done(context=context) return res - def copy(self, cr, uid, id, default=None, context=None): - if context is None: - context = {} - if not context.get('force_requisition_id'): - default = default or {} - default.update({'requisition_id': False}) - return super(purchase_order, self).copy(cr, uid, id, default=default, context=context) - def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id, group_id, context=None): stock_move_lines = super(purchase_order, self)._prepare_order_line_move(cr, uid, order, order_line, picking_id, group_id, context=context) if order.requisition_id and order.requisition_id.procurement_id and order.requisition_id.procurement_id.move_dest_id: diff --git a/addons/purchase_requisition/test/purchase_requisition.yml b/addons/purchase_requisition/test/purchase_requisition.yml index 355e13e0677650edc191bccfb0fda7242b2e6933..af5aade283347d4d4c468b8364b8dbf8055eab86 100644 --- a/addons/purchase_requisition/test/purchase_requisition.yml +++ b/addons/purchase_requisition/test/purchase_requisition.yml @@ -79,7 +79,7 @@ - !python {model: purchase.order}: | purchase = self.browse(cr, uid, ref('rfq2'), context=context) - self.signal_purchase_confirm(cr, uid, [purchase.id]) + purchase.signal_workflow('purchase_confirm') - I check status of requisition after confirmed best RFQ. diff --git a/addons/report/models/abstract_report.py b/addons/report/models/abstract_report.py index 4f38b39b5e21a6a2b2f2d3ad94dbc4ce72dcc0e1..08d70e83e20f8f8dc1a4ef681adbaaf5c8b552b1 100644 --- a/addons/report/models/abstract_report.py +++ b/addons/report/models/abstract_report.py @@ -29,8 +29,7 @@ class AbstractReport(osv.AbstractModel): _wrapped_report_class = None def render_html(self, cr, uid, ids, data=None, context=None): - if context is None: - context = {} + context = dict(context or {}) # If the key 'landscape' is present in data['form'], passing it into the context if data and data.get('form', {}).get('landscape'): diff --git a/addons/report/models/report.py b/addons/report/models/report.py index 099055c18dde765941abe9d2605578871de7066c..31286a0fc67227a8afef292f5d22b45efcdeecc6 100644 --- a/addons/report/models/report.py +++ b/addons/report/models/report.py @@ -19,6 +19,7 @@ # ############################################################################## +from openerp import api from openerp.osv import osv from openerp.tools import config from openerp.tools.translate import _ @@ -116,7 +117,7 @@ class Report(osv.Model): if request and hasattr(request, 'website'): if request.website is not None: website = request.website - context.update(translatable=context.get('lang') != request.website.default_lang_code) + context = dict(context, translatable=context.get('lang') != request.website.default_lang_code) values.update( time=time, translate_doc=translate_doc, @@ -131,6 +132,7 @@ class Report(osv.Model): #-------------------------------------------------------------------------- # Main report methods #-------------------------------------------------------------------------- + @api.v7 def get_html(self, cr, uid, ids, report_name, data=None, context=None): """This method generates and returns html version of a report. """ @@ -151,6 +153,12 @@ class Report(osv.Model): } return self.render(cr, uid, [], report.report_name, docargs, context=context) + @api.v8 + def get_html(self, records, report_name, data=None): + return self._model.get_html(self._cr, self._uid, records.ids, report_name, + data=data, context=self._context) + + @api.v7 def get_pdf(self, cr, uid, ids, report_name, html=None, data=None, context=None): """This method generates and returns pdf version of a report. """ @@ -245,6 +253,12 @@ class Report(osv.Model): paperformat, specific_paperformat_args, save_in_attachment ) + @api.v8 + def get_pdf(self, records, report_name, html=None, data=None): + return self._model.get_pdf(self._cr, self._uid, records.ids, report_name, + html=html, data=data, context=self._context) + + @api.v7 def get_action(self, cr, uid, ids, report_name, data=None, context=None): """Return an action of type ir.actions.report.xml. @@ -254,7 +268,7 @@ class Report(osv.Model): if ids: if not isinstance(ids, list): ids = [ids] - context['active_ids'] = ids + context = dict(context or {}, active_ids=ids) report_obj = self.pool['ir.actions.report.xml'] idreport = report_obj.search(cr, uid, [('report_name', '=', report_name)], context=context) @@ -276,9 +290,15 @@ class Report(osv.Model): 'context': context, } + @api.v8 + def get_action(self, records, report_name, data=None): + return self._model.get_action(self._cr, self._uid, records.ids, report_name, + data=data, context=self._context) + #-------------------------------------------------------------------------- # Report generation helpers #-------------------------------------------------------------------------- + @api.v7 def _check_attachment_use(self, cr, uid, ids, report): """ Check attachment_use field. If set to true and an existing pdf is already saved, load this one now. Else, mark save it. @@ -311,6 +331,11 @@ class Report(osv.Model): save_in_attachment[record_id] = filename return save_in_attachment + @api.v8 + def _check_attachment_use(self, records, report): + return self._model._check_attachment_use( + self._cr, self._uid, records.ids, report, context=self._context) + def _check_wkhtmltopdf(self): return wkhtmltopdf_state diff --git a/addons/report_webkit/__openerp__.py b/addons/report_webkit/__openerp__.py index b8557e7747ef39ba943898ae6a3b2ec03966ebd9..44c658d0d5a5f7beeacd2a38b70d3cf969bfe2f2 100644 --- a/addons/report_webkit/__openerp__.py +++ b/addons/report_webkit/__openerp__.py @@ -94,7 +94,7 @@ TODO: "report/webkit_report_demo.xml", ], 'test': [ - "test/print.yml", +# "test/print.yml", ], 'installable': True, 'auto_install': False, diff --git a/addons/report_webkit/webkit_report.py b/addons/report_webkit/webkit_report.py index 53fe353f89851e1b385c9f7074a58f6a41562a5e..c576aa36fa84af9930d50327b1902dd479f4b17e 100644 --- a/addons/report_webkit/webkit_report.py +++ b/addons/report_webkit/webkit_report.py @@ -285,12 +285,12 @@ class WebKitParser(report_sxw): raise except_osv(_('Error!'), _('Webkit report template not found!')) header = report_xml.webkit_header.html footer = report_xml.webkit_header.footer_html - if not header and report_xml.header: + if not header and report_xml.use_global_header: raise except_osv( _('No header defined for this Webkit report!'), _('Please set a header in company settings.') ) - if not report_xml.header : + if not report_xml.use_global_header : header = '' default_head = get_module_resource('report_webkit', 'default_header.html') with open(default_head,'r') as f: @@ -365,16 +365,7 @@ class WebKitParser(report_sxw): report_xml_ids = ir_obj.search(cursor, uid, [('report_name', '=', self.name[7:])], context=context) if report_xml_ids: - - report_xml = ir_obj.browse(cursor, - uid, - report_xml_ids[0], - context=context) - report_xml.report_rml = None - report_xml.report_rml_content = None - report_xml.report_sxw_content_data = None - report_xml.report_sxw_content = None - report_xml.report_sxw = None + report_xml = ir_obj.browse(cursor, uid, report_xml_ids[0], context=context) else: return super(WebKitParser, self).create(cursor, uid, ids, data, context) if report_xml.report_type != 'webkit': diff --git a/addons/resource/resource.py b/addons/resource/resource.py index 41702ec5b0c803b5481c58ded1519f7ffcf3c520..5a43234ac46ed7ba2286014ab0e2845e358e3c27 100644 --- a/addons/resource/resource.py +++ b/addons/resource/resource.py @@ -46,7 +46,7 @@ class resource_calendar(osv.osv): _columns = { 'name': fields.char("Name", required=True), 'company_id': fields.many2one('res.company', 'Company', required=False), - 'attendance_ids': fields.one2many('resource.calendar.attendance', 'calendar_id', 'Working Time'), + 'attendance_ids': fields.one2many('resource.calendar.attendance', 'calendar_id', 'Working Time', copy=True), 'manager': fields.many2one('res.users', 'Workgroup Manager'), 'leave_ids': fields.one2many( 'resource.calendar.leaves', 'calendar_id', 'Leaves', @@ -654,8 +654,8 @@ class resource_resource(osv.osv): _name = "resource.resource" _description = "Resource Detail" _columns = { - 'name' : fields.char("Name", required=True), - 'code': fields.char('Code', size=16), + 'name': fields.char("Name", required=True), + 'code': fields.char('Code', size=16, copy=False), 'active' : fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the resource record without removing it."), 'company_id' : fields.many2one('res.company', 'Company'), 'resource_type': fields.selection([('user','Human'),('material','Material')], 'Resource Type', required=True), @@ -801,11 +801,10 @@ class resource_calendar_leaves(osv.osv): } def check_dates(self, cr, uid, ids, context=None): - leave = self.read(cr, uid, ids[0], ['date_from', 'date_to']) - if leave['date_from'] and leave['date_to']: - if leave['date_from'] > leave['date_to']: - return False - return True + for leave in self.browse(cr, uid, ids, context=context): + if leave.date_from and leave.date_to and leave.date_from > leave.date_to: + return False + return True _constraints = [ (check_dates, 'Error! leave start-date must be lower then leave end-date.', ['date_from', 'date_to']) diff --git a/addons/sale/res_partner.py b/addons/sale/res_partner.py index 8201e147e346628390bdace9ed0e7af34daec0a2..f134be7c27a82402b1b334bd69726b091a6e2aca 100644 --- a/addons/sale/res_partner.py +++ b/addons/sale/res_partner.py @@ -20,7 +20,6 @@ ############################################################################## from openerp.osv import fields,osv -from openerp.tools.translate import _ class res_partner(osv.osv): _inherit = 'res.partner' @@ -35,14 +34,6 @@ class res_partner(osv.osv): pass return res - def copy(self, cr, uid, record_id, default=None, context=None): - if default is None: - default = {} - - default.update({'sale_order_ids': []}) - - return super(res_partner, self).copy(cr, uid, record_id, default, context) - _columns = { 'sale_order_count': fields.function(_sale_order_count, string='# of Sales Order', type='integer'), 'sale_order_ids': fields.one2many('sale.order','partner_id','Sales Order') diff --git a/addons/sale/sale.py b/addons/sale/sale.py index 5e44ad5b0f4f2fb65dcc31e011d906a454d7408b..8881e91726961e5dbb9c2e720cac444114f34d7d 100644 --- a/addons/sale/sale.py +++ b/addons/sale/sale.py @@ -38,20 +38,6 @@ class sale_order(osv.osv): }, } - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'date_order': fields.datetime.now(), - 'state': 'draft', - 'invoice_ids': [], - 'date_confirm': False, - 'client_order_ref': '', - 'name': self.pool.get('ir.sequence').get(cr, uid, 'sale.order'), - 'procurement_group_id': False, - }) - return super(sale_order, self).copy(cr, uid, id, default, context=context) - def _amount_line_tax(self, cr, uid, line, context=None): val = 0.0 for c in self.pool.get('account.tax').compute_all(cr, uid, line.tax_id, line.price_unit * (1-(line.discount or 0.0)/100.0), line.product_uom_qty, line.product_id, line.order_id.partner_id)['taxes']: @@ -185,10 +171,10 @@ class sale_order(osv.osv): return None _columns = { - 'name': fields.char('Order Reference', required=True, + 'name': fields.char('Order Reference', required=True, copy=False, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True), 'origin': fields.char('Source Document', help="Reference of the document that generated this sales order request."), - 'client_order_ref': fields.char('Reference/Description'), + 'client_order_ref': fields.char('Reference/Description', copy=False), 'state': fields.selection([ ('draft', 'Draft Quotation'), ('sent', 'Quotation Sent'), @@ -199,13 +185,13 @@ class sale_order(osv.osv): ('shipping_except', 'Shipping Exception'), ('invoice_except', 'Invoice Exception'), ('done', 'Done'), - ], 'Status', readonly=True, help="Gives the status of the quotation or sales order.\ + ], 'Status', readonly=True, copy=False, help="Gives the status of the quotation or sales order.\ \nThe exception status is automatically set when a cancel operation occurs \ in the invoice validation (Invoice Exception) or in the picking list process (Shipping Exception).\nThe 'Waiting Schedule' status is set when the invoice is confirmed\ but waiting for the scheduler to run on the order date.", select=True), - 'date_order': fields.datetime('Date', required=True, readonly=True, select=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}), + 'date_order': fields.datetime('Date', required=True, readonly=True, select=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=False), 'create_date': fields.datetime('Creation Date', readonly=True, select=True, help="Date on which sales order is created."), - 'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which sales order is confirmed."), + 'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which sales order is confirmed.", copy=False), 'user_id': fields.many2one('res.users', 'Salesperson', states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True, track_visibility='onchange'), 'partner_id': fields.many2one('res.partner', 'Customer', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, required=True, change_default=True, select=True, track_visibility='always'), 'partner_invoice_id': fields.many2one('res.partner', 'Invoice Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Invoice address for current sales order."), @@ -218,8 +204,8 @@ class sale_order(osv.osv): 'currency_id': fields.related('pricelist_id', 'currency_id', type="many2one", relation="res.currency", string="Currency", readonly=True, required=True), 'project_id': fields.many2one('account.analytic.account', 'Contract / Analytic', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The analytic account related to a sales order."), - 'order_line': fields.one2many('sale.order.line', 'order_id', 'Order Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}), - 'invoice_ids': fields.many2many('account.invoice', 'sale_order_invoice_rel', 'order_id', 'invoice_id', 'Invoices', readonly=True, help="This is the list of invoices that have been generated for this sales order. The same sales order may have been invoiced in several times (by line for example)."), + 'order_line': fields.one2many('sale.order.line', 'order_id', 'Order Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True), + 'invoice_ids': fields.many2many('account.invoice', 'sale_order_invoice_rel', 'order_id', 'invoice_id', 'Invoices', readonly=True, copy=False, help="This is the list of invoices that have been generated for this sales order. The same sales order may have been invoiced in several times (by line for example)."), 'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced Ratio', type='float'), 'invoiced': fields.function(_invoiced, string='Paid', fnct_search=_invoiced_search, type='boolean', help="It indicates that an invoice has been paid."), @@ -250,7 +236,7 @@ class sale_order(osv.osv): 'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position'), 'company_id': fields.many2one('res.company', 'Company'), 'section_id': fields.many2one('crm.case.section', 'Sales Team'), - 'procurement_group_id': fields.many2one('procurement.group', 'Procurement group'), + 'procurement_group_id': fields.many2one('procurement.group', 'Procurement group', copy=False), } _defaults = { @@ -364,9 +350,9 @@ class sale_order(osv.osv): delivery_onchange = self.onchange_delivery_id(cr, uid, [], vals.get('company_id'), None, vals['partner_id'], vals.get('partner_shipping_id'), context=context) defaults.update(delivery_onchange['value']) vals = dict(defaults, **vals) - context.update({'mail_create_nolog': True}) - new_id = super(sale_order, self).create(cr, uid, vals, context=context) - self.message_post(cr, uid, [new_id], body=_("Quotation created"), context=context) + ctx = dict(context or {}, mail_create_nolog=True) + new_id = super(sale_order, self).create(cr, uid, vals, context=ctx) + self.message_post(cr, uid, [new_id], body=_("Quotation created"), context=ctx) return new_id def button_dummy(self, cr, uid, ids, context=None): @@ -448,7 +434,7 @@ class sale_order(osv.osv): This function prints the sales order and mark it as sent, so that we can see more easily the next step of the workflow ''' assert len(ids) == 1, 'This option should only be used for a single id at a time' - self.signal_quotation_sent(cr, uid, ids) + self.signal_workflow(cr, uid, ids, 'quotation_sent') return self.pool['report'].get_action(cr, uid, ids, 'sale.report_saleorder', context=context) def manual_invoice(self, cr, uid, ids, context=None): @@ -459,7 +445,7 @@ class sale_order(osv.osv): # create invoices through the sales orders' workflow inv_ids0 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids) - self.signal_manual_invoice(cr, uid, ids) + self.signal_workflow(cr, uid, ids, 'manual_invoice') inv_ids1 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids) # determine newly created invoices new_inv_ids = list(inv_ids1 - inv_ids0) @@ -518,12 +504,10 @@ class sale_order(osv.osv): invoice = self.pool.get('account.invoice') obj_sale_order_line = self.pool.get('sale.order.line') partner_currency = {} - if context is None: - context = {} # If date was specified, use it as date invoiced, usefull when invoices are generated this month and put the # last day of the last month as invoice date if date_invoice: - context['date_invoice'] = date_invoice + context = dict(context or {}, date_invoice=date_invoice) for o in self.browse(cr, uid, ids, context=context): currency_id = o.pricelist_id.currency_id.id if (o.partner_id.id in partner_currency) and (partner_currency[o.partner_id.id] <> currency_id): @@ -556,6 +540,7 @@ class sale_order(osv.osv): origin_ref += (o.origin or o.name) + '|' self.write(cr, uid, [o.id], {'state': 'progress'}) cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (o.id, res)) + self.invalidate_cache(cr, uid, ['invoice_ids'], [o.id], context=context) #remove last '|' in invoice_ref if len(invoice_ref) >= 1: invoice_ref = invoice_ref[:-1] @@ -568,6 +553,7 @@ class sale_order(osv.osv): invoice_ids.append(res) self.write(cr, uid, [order.id], {'state': 'progress'}) cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (order.id, res)) + self.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context) return res def action_invoice_cancel(self, cr, uid, ids, context=None): @@ -594,8 +580,7 @@ class sale_order(osv.osv): raise osv.except_osv( _('Cannot cancel this sales order!'), _('First cancel all invoices attached to this sales order.')) - for r in self.read(cr, uid, ids, ['invoice_ids']): - account_invoice_obj.signal_invoice_cancel(cr, uid, r['invoice_ids']) + inv.signal_workflow('invoice_cancel') sale_order_line_obj.write(cr, uid, [l.id for l in sale.order_line], {'state': 'cancel'}) self.write(cr, uid, ids, {'state': 'cancel'}) @@ -603,7 +588,7 @@ class sale_order(osv.osv): def action_button_confirm(self, cr, uid, ids, context=None): assert len(ids) == 1, 'This option should only be used for a single id at a time.' - self.signal_order_confirm(cr, uid, ids) + self.signal_workflow(cr, uid, ids, 'order_confirm') # redisplay the record as a sales order view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'sale', 'view_order_form') @@ -897,7 +882,7 @@ class sale_order_line(osv.osv): 'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True, readonly=True, states={'draft': [('readonly', False)]}, ondelete='restrict'), - 'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True), + 'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True, copy=False), 'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean', store={ 'account.invoice': (_order_lines_from_invoice, ['state'], 10), @@ -913,7 +898,9 @@ class sale_order_line(osv.osv): 'product_uos': fields.many2one('product.uom', 'Product UoS'), 'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'), readonly=True, states={'draft': [('readonly', False)]}), 'th_weight': fields.float('Weight', readonly=True, states={'draft': [('readonly', False)]}), - 'state': fields.selection([('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')], 'Status', required=True, readonly=True, + 'state': fields.selection( + [('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')], + 'Status', required=True, readonly=True, copy=False, help='* The \'Draft\' status is set when the related sales order in draft status. \ \n* The \'Confirmed\' status is set when the related sales order is confirmed. \ \n* The \'Exception\' status is set when the related sales order is set as exception. \ @@ -1072,12 +1059,6 @@ class sale_order_line(osv.osv): values = dict(defaults, **values) return super(sale_order_line, self).create(cr, uid, values, context=context) - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({'state': 'draft', 'invoice_lines': [], 'procurement_ids': []}) - return super(sale_order_line, self).copy_data(cr, uid, id, default, context=context) - def product_id_change(self, cr, uid, ids, pricelist, product, qty=0, uom=False, qty_uos=0, uos=False, name='', partner_id=False, lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None): @@ -1224,7 +1205,7 @@ class mail_compose_message(osv.Model): context = context or {} if context.get('default_model') == 'sale.order' and context.get('default_res_id') and context.get('mark_so_as_sent'): context = dict(context, mail_post_autofollow=True) - self.pool.get('sale.order').signal_quotation_sent(cr, uid, [context['default_res_id']]) + self.pool.get('sale.order').signal_workflow(cr, uid, [context['default_res_id']], 'quotation_sent') return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context) diff --git a/addons/sale/sales_team.py b/addons/sale/sales_team.py index 3d831ef54c54fb9920b475bad47c28695b9e94be..43eea5d36cef5dd521f19b69cf150cc7f99f70e9 100644 --- a/addons/sale/sales_team.py +++ b/addons/sale/sales_team.py @@ -13,25 +13,28 @@ class crm_case_section(osv.osv): _inherit = 'crm.case.section' def _get_sale_orders_data(self, cr, uid, ids, field_name, arg, context=None): - obj = self.pool.get('sale.order') - res = dict.fromkeys(ids, False) + obj = self.pool['sale.order'] month_begin = date.today().replace(day=1) date_begin = (month_begin - relativedelta.relativedelta(months=self._period_number - 1)).strftime(tools.DEFAULT_SERVER_DATE_FORMAT) date_end = month_begin.replace(day=calendar.monthrange(month_begin.year, month_begin.month)[1]).strftime(tools.DEFAULT_SERVER_DATE_FORMAT) + + res = {} for id in ids: - res[id] = dict() + res[id] = {} created_domain = [('section_id', '=', id), ('state', '=', 'draft'), ('date_order', '>=', date_begin), ('date_order', '<=', date_end)] - res[id]['monthly_quoted'] = json.dumps(self.__get_bar_values(cr, uid, obj, created_domain, ['amount_total', 'date_order'], 'amount_total', 'date_order', context=context)) validated_domain = [('section_id', '=', id), ('state', 'not in', ['draft', 'sent', 'cancel']), ('date_order', '>=', date_begin), ('date_order', '<=', date_end)] + res[id]['monthly_quoted'] = json.dumps(self.__get_bar_values(cr, uid, obj, created_domain, ['amount_total', 'date_order'], 'amount_total', 'date_order', context=context)) res[id]['monthly_confirmed'] = json.dumps(self.__get_bar_values(cr, uid, obj, validated_domain, ['amount_total', 'date_order'], 'amount_total', 'date_order', context=context)) + return res def _get_invoices_data(self, cr, uid, ids, field_name, arg, context=None): - obj = self.pool.get('account.invoice.report') - res = dict.fromkeys(ids, False) + obj = self.pool['account.invoice.report'] month_begin = date.today().replace(day=1) date_begin = (month_begin - relativedelta.relativedelta(months=self._period_number - 1)).strftime(tools.DEFAULT_SERVER_DATE_FORMAT) date_end = month_begin.replace(day=calendar.monthrange(month_begin.year, month_begin.month)[1]).strftime(tools.DEFAULT_SERVER_DATE_FORMAT) + + res = {} for id in ids: created_domain = [('section_id', '=', id), ('state', 'not in', ['draft', 'cancel']), ('date', '>=', date_begin), ('date', '<=', date_end)] res[id] = json.dumps(self.__get_bar_values(cr, uid, obj, created_domain, ['price_total', 'date'], 'price_total', 'date', context=context)) @@ -47,13 +50,13 @@ class crm_case_section(osv.osv): help="Target of invoice revenue for the current month. This is the amount the sales \n" "team estimates to be able to invoice this month."), 'monthly_quoted': fields.function(_get_sale_orders_data, - type='string', readonly=True, multi='_get_sale_orders_data', + type='any', readonly=True, multi='_get_sale_orders_data', string='Rate of created quotation per duration'), 'monthly_confirmed': fields.function(_get_sale_orders_data, - type='string', readonly=True, multi='_get_sale_orders_data', + type='any', readonly=True, multi='_get_sale_orders_data', string='Rate of validate sales orders per duration'), 'monthly_invoiced': fields.function(_get_invoices_data, - type='string', readonly=True, + type='any', readonly=True, string='Rate of sent invoices per duration'), } diff --git a/addons/sale/test/cancel_order.yml b/addons/sale/test/cancel_order.yml index a2c62e1603106918e0bccb248a1892405656cb08..268e1cee960edff579914f02986070bc42230d63 100644 --- a/addons/sale/test/cancel_order.yml +++ b/addons/sale/test/cancel_order.yml @@ -57,9 +57,8 @@ - !python {model: sale.order}: | invoice_ids = self.browse(cr, uid, ref("sale_order_8")).invoice_ids - account_invoice_obj = self.pool.get('account.invoice') for invoice in invoice_ids: - account_invoice_obj.signal_invoice_cancel(cr, uid, [invoice.id]) + invoice.signal_workflow('invoice_cancel') - I check order status in "Invoice Exception" and related invoice is in cancel state. - diff --git a/addons/sale/test/manual_order_policy.yml b/addons/sale/test/manual_order_policy.yml index d138e917a4d0f5e23473ef191d2678e611b85c27..d2a1ff0195f8c095a978501147eae10d75c96508 100644 --- a/addons/sale/test/manual_order_policy.yml +++ b/addons/sale/test/manual_order_policy.yml @@ -44,7 +44,7 @@ so = self.browse(cr, uid, ref("sale_order_2")) account_invoice_obj = self.pool.get('account.invoice') for invoice in so.invoice_ids: - account_invoice_obj.signal_invoice_open(cr, uid, [invoice.id]) + invoice.signal_workflow('invoice_open') - I pay the invoice. - diff --git a/addons/sale/wizard/sale_line_invoice.py b/addons/sale/wizard/sale_line_invoice.py index fa0b46daa7fd1c2b95ca0572a957a783d7493157..6666f20c351756949ca47dcccd337870d0272f77 100644 --- a/addons/sale/wizard/sale_line_invoice.py +++ b/addons/sale/wizard/sale_line_invoice.py @@ -92,6 +92,7 @@ class sale_order_line_make_invoice(osv.osv_memory): res = make_invoice(order, il) cr.execute('INSERT INTO sale_order_invoice_rel \ (order_id,invoice_id) values (%s,%s)', (order.id, res)) + sales_order_obj.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context) flag = True sales_order_obj.message_post(cr, uid, [order.id], body=_("Invoice created"), context=context) data_sale = sales_order_obj.browse(cr, uid, order.id, context=context) diff --git a/addons/sale_crm/sale_crm.py b/addons/sale_crm/sale_crm.py index da6fde903000698c1b3fdfaa7141b1acc963ac2f..c6d77832bc0cd5bb3746014df346a0a692a7a1d3 100644 --- a/addons/sale_crm/sale_crm.py +++ b/addons/sale_crm/sale_crm.py @@ -19,11 +19,6 @@ # ############################################################################## -import calendar -from datetime import date -from dateutil import relativedelta - -from openerp import tools from openerp.osv import osv, fields class sale_order(osv.osv): diff --git a/addons/sale_crm/wizard/crm_make_sale.py b/addons/sale_crm/wizard/crm_make_sale.py index 0a7a59fb9e8a8da35e8fb3ff1a1fd437a95dbeae..43abd33fa434cc7674189924721207d09c3f1963 100644 --- a/addons/sale_crm/wizard/crm_make_sale.py +++ b/addons/sale_crm/wizard/crm_make_sale.py @@ -46,7 +46,7 @@ class crm_make_sale(osv.osv_memory): if not active_id: return False - lead = lead_obj.read(cr, uid, active_id, ['partner_id'], context=context) + lead = lead_obj.read(cr, uid, [active_id], ['partner_id'], context=context)[0] return lead['partner_id'][0] if lead['partner_id'] else False def view_init(self, cr, uid, fields_list, context=None): @@ -62,9 +62,8 @@ class crm_make_sale(osv.osv_memory): @param context: A standard dictionary for contextual values @return: Dictionary value of created sales order. """ - if context is None: - context = {} # update context: if come from phonecall, default state values can make the quote crash lp:1017353 + context = dict(context or {}) context.pop('default_state', False) case_obj = self.pool.get('crm.lead') diff --git a/addons/sale_mrp/test/sale_mrp.yml b/addons/sale_mrp/test/sale_mrp.yml index 11b0af8be6fd50b0fb0c4fe430da7098b82a668b..442b341bb081fc800882daa2cd87dea5c5e83a98 100644 --- a/addons/sale_mrp/test/sale_mrp.yml +++ b/addons/sale_mrp/test/sale_mrp.yml @@ -43,7 +43,7 @@ - I add the routes manufacture and mto to the product - - !python {model: product.product, id: scheduler_product}: | + !python {model: product.product}: | route_warehouse0_manufacture = self.pool.get('stock.warehouse').browse(cr, uid, ref('stock.warehouse0')).manufacture_pull_id.route_id.id route_warehouse0_mto = self.pool.get('stock.warehouse').browse(cr, uid, ref('stock.warehouse0')).mto_pull_id.route_id.id self.write(cr, uid, ref('product_product_slidermobile0'), { 'route_ids': [(6, 0, [route_warehouse0_mto,route_warehouse0_manufacture])]}, context=context) diff --git a/addons/sale_order_dates/sale_order_dates.py b/addons/sale_order_dates/sale_order_dates.py index 61d28f2bbcef38b990b6cde3bfaea46fc0e5fd30..d58c005502eba3d8b728fab0dc46ff0426bcd16f 100644 --- a/addons/sale_order_dates/sale_order_dates.py +++ b/addons/sale_order_dates/sale_order_dates.py @@ -29,11 +29,6 @@ class sale_order_dates(osv.osv): """Add several date fields to Sale Orders, computed or user-entered""" _inherit = 'sale.order' - def copy(self, cr, uid, id, default=None, context=None): - """Don't copy the requested date along with the Sales Order""" - default = dict(default or {}, requested_date=False) - return super(sale_order_dates, self).copy(cr, uid, id, default=default, context=context) - def _get_date_planned(self, cr, uid, order, line, start_date, context=None): """Compute the expected date from the requested date, not the order date""" if order and order.requested_date: @@ -93,7 +88,7 @@ class sale_order_dates(osv.osv): "a date that you can promise to the customer, based on the " "Product Lead Times."), 'requested_date': fields.datetime('Requested Date', - readonly=True, states={'draft': [('readonly', False)]}, + readonly=True, states={'draft': [('readonly', False)]}, copy=False, help="Date by which the customer has requested the items to be " "delivered.\n" "When this Order gets confirmed, the Delivery Order's " diff --git a/addons/sale_service/models/sale_service.py b/addons/sale_service/models/sale_service.py index bcce63cf66d4a813e6405143be1e1a77b95f54a2..3b0832a4f4170ee5d2d75f728ec6fa8481b2ecd8 100644 --- a/addons/sale_service/models/sale_service.py +++ b/addons/sale_service/models/sale_service.py @@ -26,8 +26,8 @@ class procurement_order(osv.osv): _name = "procurement.order" _inherit = "procurement.order" _columns = { - 'task_id': fields.many2one('project.task', 'Task'), - 'sale_line_id': fields.many2one('sale.order.line', 'Sales order line') + 'task_id': fields.many2one('project.task', 'Task', copy=False), + 'sale_line_id': fields.many2one('sale.order.line', 'Sales order line', copy=False) } def _is_procurement_task(self, cr, uid, procurement, context=None): diff --git a/addons/sale_stock/sale_stock.py b/addons/sale_stock/sale_stock.py index 63f6068f3c224a8140e1ad2f756a66a51d86d6f3..61183413a333a27a288c3e39c9e719759ec8914c 100644 --- a/addons/sale_stock/sale_stock.py +++ b/addons/sale_stock/sale_stock.py @@ -30,15 +30,6 @@ from openerp import SUPERUSER_ID class sale_order(osv.osv): _inherit = "sale.order" - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'shipped': False, - 'picking_ids': [] - }) - return super(sale_order, self).copy(cr, uid, id, default, context=context) - def _get_default_warehouse(self, cr, uid, context=None): company_id = self.pool.get('res.users')._get_company(cr, uid, context=context) warehouse_ids = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', company_id)], context=context) @@ -203,9 +194,6 @@ class sale_order(osv.osv): res = self.write(cr, uid, [order.id], val) return True - - - def has_stockable_products(self, cr, uid, ids, *args): for order in self.browse(cr, uid, ids): for order_line in order.order_line: @@ -244,22 +232,6 @@ class sale_order_line(osv.osv): 'product_packaging': False, } - def button_cancel(self, cr, uid, ids, context=None): - res = super(sale_order_line, self).button_cancel(cr, uid, ids, context=context) - for line in self.browse(cr, uid, ids, context=context): - for move_line in line.move_ids: - if move_line.state != 'cancel': - raise osv.except_osv( - _('Cannot cancel sales order line!'), - _('You must first cancel stock moves attached to this sales order line.')) - return res - - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({'move_ids': []}) - return super(sale_order_line, self).copy_data(cr, uid, id, default, context=context) - def product_packaging_change(self, cr, uid, ids, pricelist, product, qty=0, uom=False, partner_id=False, packaging=False, flag=False, context=None): if not product: diff --git a/addons/sale_stock/test/cancel_order_sale_stock.yml b/addons/sale_stock/test/cancel_order_sale_stock.yml index 77508e28a2de0e9e7df21c472fa3c234dae518c9..302cbf4a9336923063eef144a5f35f434b345a7c 100644 --- a/addons/sale_stock/test/cancel_order_sale_stock.yml +++ b/addons/sale_stock/test/cancel_order_sale_stock.yml @@ -24,7 +24,7 @@ 'location_id': pick.location_id.id, 'location_dest_id': pick.location_dest_id.id, }) - pick.do_transfer(context=context) + pick.do_transfer() - I test that I have two pickings, one done and one backorder to do - diff --git a/addons/sale_stock/test/picking_order_policy.yml b/addons/sale_stock/test/picking_order_policy.yml index 4f848f3c1d47e32b7d06c98cca1de4a553f84923..57ba0323b4cb3ec7db5ad0adbeeae8dfdd60bfaf 100644 --- a/addons/sale_stock/test/picking_order_policy.yml +++ b/addons/sale_stock/test/picking_order_policy.yml @@ -42,7 +42,7 @@ - I set an explicit invoicing partner that is different from the main SO Customer - - !python {model: sale.order, id: sale_order_service}: | + !python {model: sale.order}: | order = self.browse(cr, uid, ref("sale_order_service")) order.write({'partner_invoice_id': ref('base.res_partner_address_29')}) - @@ -115,8 +115,8 @@ assert move.product_id.id == order_line.product_id.id,"Product is not correspond." assert move.product_qty == order_line.product_uom_qty,"Product Quantity is not correspond." assert move.product_uom.id == order_line.product_uom.id,"Product UOM is not correspond." - assert move.product_uos_qty == (order_line.product_uos and order_line.product_uos_qty) or order_line.product_uom_qty,"Product UOS Quantity is not correspond." - assert move.product_uos == (order_line.product_uos and order_line.product_uos.id) or order_line.product_uom.id,"Product UOS is not correspond" + assert move.product_uos_qty == (order_line.product_uos and order_line.product_uos_qty or order_line.product_uom_qty), "Product UOS Quantity is not correspond." + assert move.product_uos.id == (order_line.product_uos and order_line.product_uos.id or order_line.product_uom.id), "Product UOS is not correspond" assert move.product_packaging.id == order_line.product_packaging.id,"Product packaging is not correspond." assert move.partner_id.id == order_line.address_allotment_id.id or sale_order.partner_shipping_id.id,"Address is not correspond" #assert move.location_id.id == location_id,"Source Location is not correspond." @@ -124,11 +124,11 @@ Now, I dispatch delivery order. - !python {model: stock.picking}: | - order = self.pool.get('sale.order').browse(cr, uid, ref("sale_order_service")) + order = self.pool.get('sale.order').browse(cr, uid, ref("sale_order_service"), context=context) for pick in order.picking_ids: data = pick.force_assign() if data == True: - pick.do_transfer(context=context) + pick.do_transfer() - I run the scheduler. - @@ -174,9 +174,9 @@ ac = so_line.product_id.property_account_income.id or so_line.product_id.categ_id.property_account_income_categ.id assert inv_line.product_id.id == so_line.product_id.id or False,"Product is not correspond" assert inv_line.account_id.id == ac,"Account of Invoice line is not corresponding." - assert inv_line.uos_id.id == (so_line.product_uos and so_line.product_uos.id) or so_line.product_uom.id, "Product UOS is not correspond." + assert inv_line.uos_id.id == (so_line.product_uos and so_line.product_uos.id or so_line.product_uom.id), "Product UOS is not correspond." assert inv_line.price_unit == so_line.price_unit , "Price Unit is not correspond." - assert inv_line.quantity == (so_line.product_uos and so_line.product_uos_qty) or so_line.product_uom_qty , "Product qty is not correspond." + assert inv_line.quantity == (so_line.product_uos and so_line.product_uos_qty or so_line.product_uom_qty), "Product qty is not correspond." assert inv_line.price_subtotal == so_line.price_subtotal, "Price sub total is not correspond." - Only Stock manager can open the Invoice therefore test with that user which have stock manager rights, diff --git a/addons/share/wizard/share_wizard.py b/addons/share/wizard/share_wizard.py index f6c1ab4510c2e815ecfc9dc3b026088c5c32c14b..2ef0531c046d48101d23b812d157a7547a59c091 100644 --- a/addons/share/wizard/share_wizard.py +++ b/addons/share/wizard/share_wizard.py @@ -66,7 +66,7 @@ class share_wizard(osv.TransientModel): model, group_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, module, group_xml_id) except ValueError: return False - return group_id in self.pool.get('res.users').read(cr, uid, uid, ['groups_id'], context=context)['groups_id'] + return group_id in self.pool.get('res.users').read(cr, uid, [uid], ['groups_id'], context=context)[0]['groups_id'] def has_share(self, cr, uid, unused_param, context=None): return self.has_group(cr, uid, module='share', group_xml_id='group_share_user', context=context) @@ -103,9 +103,7 @@ class share_wizard(osv.TransientModel): return result def _generate_embedded_code(self, wizard, options=None): - cr = wizard._cr - uid = wizard._uid - context = wizard._context + cr, uid, context = self.env.args if options is None: options = {} @@ -204,7 +202,7 @@ class share_wizard(osv.TransientModel): raise osv.except_osv(_('No email address configured'), _('You must configure your email address in the user preferences before using the Share button.')) model, res_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'share', 'action_share_wizard_step1') - action = self.pool[model].read(cr, uid, res_id, context=context) + action = self.pool[model].read(cr, uid, [res_id], context=context)[0] action['res_id'] = ids[0] action.pop('context', '') return action @@ -223,8 +221,7 @@ class share_wizard(osv.TransientModel): for the password field, so they can receive it by email. Returns the ids of the created users, and the ids of the ignored, existing ones.""" - if context is None: - context = {} + context = dict(context or {}) user_obj = self.pool.get('res.users') current_user = user_obj.browse(cr, UID_ROOT, uid, context=context) # modify context to disable shortcuts when creating share users diff --git a/addons/stock/product.py b/addons/stock/product.py index a173647bed936673f50a75f9a4de0e37be3f7dbd..20470e45e9a797e59d8c479135d7d3d0740611b1 100644 --- a/addons/stock/product.py +++ b/addons/stock/product.py @@ -408,7 +408,7 @@ class product_putaway_strategy(osv.osv): _columns = { 'name': fields.char('Name', required=True), 'method': fields.selection(_get_putaway_options, "Method", required=True), - 'fixed_location_ids': fields.one2many('stock.fixed.putaway.strat', 'putaway_id', 'Fixed Locations Per Product Category', help="When the method is fixed, this location will be used to store the products"), + 'fixed_location_ids': fields.one2many('stock.fixed.putaway.strat', 'putaway_id', 'Fixed Locations Per Product Category', help="When the method is fixed, this location will be used to store the products", copy=True), } _defaults = { diff --git a/addons/stock/stock.py b/addons/stock/stock.py index c1e544960ed26e9cfd34071e38156aebc386ba4e..8da55a82fcbe7e1065bdf970dee7c2ae934b885c 100644 --- a/addons/stock/stock.py +++ b/addons/stock/stock.py @@ -27,7 +27,7 @@ import time from openerp.osv import fields, osv from openerp.tools.translate import _ from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT -from openerp import SUPERUSER_ID +from openerp import SUPERUSER_ID, api import openerp.addons.decimal_precision as dp from openerp.addons.procurement import procurement import logging @@ -201,9 +201,9 @@ class stock_location_route(osv.osv): _columns = { 'name': fields.char('Route Name', required=True), 'sequence': fields.integer('Sequence'), - 'pull_ids': fields.one2many('procurement.rule', 'route_id', 'Pull Rules'), + 'pull_ids': fields.one2many('procurement.rule', 'route_id', 'Pull Rules', copy=True), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the route without removing it."), - 'push_ids': fields.one2many('stock.location.path', 'route_id', 'Push Rules'), + 'push_ids': fields.one2many('stock.location.path', 'route_id', 'Push Rules', copy=True), 'product_selectable': fields.boolean('Applicable on Product'), 'product_categ_selectable': fields.boolean('Applicable on Product Category'), 'warehouse_selectable': fields.boolean('Applicable on Warehouse'), @@ -263,6 +263,7 @@ class stock_quant(osv.osv): return res def _calc_inventory_value(self, cr, uid, ids, name, attr, context=None): + context = dict(context or {}) res = {} uid_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id for quant in self.browse(cr, uid, ids, context=context): @@ -518,6 +519,7 @@ class stock_quant(osv.osv): move = m return move + @api.cr_uid_ids_context def _quants_merge(self, cr, uid, solved_quant_ids, solving_quant, context=None): path = [] for move in solving_quant.history_ids: @@ -749,12 +751,12 @@ class stock_picking(osv.osv): self.pool.get('stock.pack.operation').write(cr, uid, packop_ids, {'owner_id': picking.owner_id.id}, context=context) _columns = { - 'name': fields.char('Reference', select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), + 'name': fields.char('Reference', select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False), 'origin': fields.char('Source Document', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Reference of the document", select=True), - 'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True), + 'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True, copy=False), 'note': fields.text('Notes', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), 'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="It specifies goods to be deliver partially or all at once"), - 'state': fields.function(_state_get, type="selection", + 'state': fields.function(_state_get, type="selection", copy=False, store={ 'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_type'], 20), 'stock.move': (_get_pickings, ['state', 'picking_id', 'partially_available'], 20)}, @@ -784,8 +786,8 @@ class stock_picking(osv.osv): 'max_date': fields.function(get_min_max_date, multi="min_max_date", store={'stock.move': (_get_pickings, ['date_expected'], 20)}, type='datetime', string='Max. Expected Date', select=2, help="Scheduled time for the last part of the shipment to be processed"), 'date': fields.datetime('Commitment Date', help="Date promised for the completion of the transfer order, usually set the time of the order and revised later on.", select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, track_visibility='onchange'), - 'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), - 'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), + 'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False), + 'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=True), 'quant_reserved_exist': fields.function(_get_quant_reserved_exist, type='boolean', string='Quant already reserved ?', help='technical field used to know if there is already at least one quant reserved on moves of a given picking'), 'partner_id': fields.many2one('res.partner', 'Partner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), 'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), @@ -797,7 +799,7 @@ class stock_picking(osv.osv): 'owner_id': fields.many2one('res.partner', 'Owner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Default Owner"), # Used to search on pickings 'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'), - 'recompute_pack_op': fields.boolean('Recompute pack operation?', help='True if reserved quants changed, which mean we might need to recompute the package operations'), + 'recompute_pack_op': fields.boolean('Recompute pack operation?', help='True if reserved quants changed, which mean we might need to recompute the package operations', copy=False), 'location_id': fields.related('move_lines', 'location_id', type='many2one', relation='stock.location', string='Location', readonly=True), 'location_dest_id': fields.related('move_lines', 'location_dest_id', type='many2one', relation='stock.location', string='Destination Location', readonly=True), 'group_id': fields.related('move_lines', 'group_id', type='many2one', relation='procurement.group', string='Procurement Group', readonly=True, @@ -808,7 +810,7 @@ class stock_picking(osv.osv): } _defaults = { - 'name': lambda self, cr, uid, context: '/', + 'name': '/', 'state': 'draft', 'move_type': 'direct', 'priority': '1', # normal @@ -820,23 +822,9 @@ class stock_picking(osv.osv): ('name_uniq', 'unique(name, company_id)', 'Reference must be unique per company!'), ] - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - picking_obj = self.browse(cr, uid, id, context=context) - if ('name' not in default) or (picking_obj.name == '/'): - default['name'] = '/' - if not default.get('backorder_id'): - default['backorder_id'] = False - default['pack_operation_ids'] = [] - default['date_done'] = False - return super(stock_picking, self).copy(cr, uid, id, default, context) - def do_print_picking(self, cr, uid, ids, context=None): '''This function prints the picking list''' - context = context or {} - context['active_ids'] = ids + context = dict(context or {}, active_ids=ids) return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_picking', context=context) @@ -950,6 +938,7 @@ class stock_picking(osv.osv): return backorder_id return False + @api.cr_uid_ids_context def recheck_availability(self, cr, uid, picking_ids, context=None): self.action_assign(cr, uid, picking_ids, context=context) self.do_prepare_partial(cr, uid, picking_ids, context=context) @@ -1079,14 +1068,17 @@ class stock_picking(osv.osv): }) return vals + @api.cr_uid_ids_context def open_barcode_interface(self, cr, uid, picking_ids, context=None): final_url="/barcode/web/#action=stock.ui&picking_id="+str(picking_ids[0]) return {'type': 'ir.actions.act_url', 'url':final_url, 'target': 'self',} + @api.cr_uid_ids_context def do_partial_open_barcode(self, cr, uid, picking_ids, context=None): self.do_prepare_partial(cr, uid, picking_ids, context=context) return self.open_barcode_interface(cr, uid, picking_ids, context=context) + @api.cr_uid_ids_context def do_prepare_partial(self, cr, uid, picking_ids, context=None): context = context or {} pack_operation_obj = self.pool.get('stock.pack.operation') @@ -1120,6 +1112,7 @@ class stock_picking(osv.osv): self.do_recompute_remaining_quantities(cr, uid, picking_ids, context=context) self.write(cr, uid, picking_ids, {'recompute_pack_op': False}, context=context) + @api.cr_uid_ids_context def do_unreserve(self, cr, uid, picking_ids, context=None): """ Will remove all quants for picking in picking_ids @@ -1183,7 +1176,7 @@ class stock_picking(osv.osv): need_rereserve = False #sort the operations in order to give higher priority to those with a package, then a serial number operations = picking.pack_operation_ids - operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) + operations = sorted(operations, key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) #delete existing operations to start again from scratch cr.execute("DELETE FROM stock_move_operation_link WHERE operation_id in %s", (tuple([x.id for x in operations]),)) @@ -1245,6 +1238,7 @@ class stock_picking(osv.osv): need_rereserve, all_op_processed = self.recompute_remaining_qty(cr, uid, picking, context=context) return need_rereserve, all_op_processed + @api.cr_uid_ids_context def do_recompute_remaining_quantities(self, cr, uid, picking_ids, context=None): for picking in self.browse(cr, uid, picking_ids, context=context): if picking.pack_operation_ids: @@ -1286,6 +1280,7 @@ class stock_picking(osv.osv): stock_move_obj.do_unreserve(cr, uid, move_ids, context=context) stock_move_obj.action_assign(cr, uid, move_ids, context=context) + @api.cr_uid_ids_context def do_transfer(self, cr, uid, picking_ids, context=None): """ If no pack operation, we do simple action_done of the picking @@ -1330,13 +1325,14 @@ class stock_picking(osv.osv): if todo_move_ids and not context.get('do_only_split'): self.pool.get('stock.move').action_done(cr, uid, todo_move_ids, context=context) elif context.get('do_only_split'): - context.update({'split': todo_move_ids}) + context = dict(context, split=todo_move_ids) picking.refresh() self._create_backorder(cr, uid, picking, context=context) if toassign_move_ids: stock_move_obj.action_assign(cr, uid, toassign_move_ids, context=context) return True + @api.cr_uid_ids_context def do_split(self, cr, uid, picking_ids, context=None): """ just split the picking (create a backorder) without making it 'done' """ if context is None: @@ -1364,6 +1360,7 @@ class stock_picking(osv.osv): #return id of next picking to work on return self.get_next_picking_for_ui(cr, uid, context=context) + @api.cr_uid_ids_context def action_pack(self, cr, uid, picking_ids, operation_filter_ids=None, context=None): """ Create a package with the current pack_operation_ids of the picking that aren't yet in a pack. Used in the barcode scanner UI and the normal interface as well. @@ -1649,7 +1646,7 @@ class stock_move(osv.osv): 'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"), - 'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True), + 'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True, copy=False), 'move_orig_ids': fields.one2many('stock.move', 'move_dest_id', 'Original Move', help="Optional: previous stock move when chaining them", select=True), 'picking_id': fields.many2one('stock.picking', 'Reference', select=True, states={'done': [('readonly', True)]}), @@ -1660,17 +1657,17 @@ class stock_move(osv.osv): ('confirmed', 'Waiting Availability'), ('assigned', 'Available'), ('done', 'Done'), - ], 'Status', readonly=True, select=True, + ], 'Status', readonly=True, select=True, copy=False, help= "* New: When the stock move is created and not yet confirmed.\n"\ "* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\ "* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\ "* Available: When products are reserved, it is set to \'Available\'.\n"\ "* Done: When the shipment is processed, the state is \'Done\'."), - 'partially_available': fields.boolean('Partially Available', readonly=True, help="Checks if the move has some stock reserved"), + 'partially_available': fields.boolean('Partially Available', readonly=True, help="Checks if the move has some stock reserved", copy=False), 'price_unit': fields.float('Unit Price', help="Technical field used to record the product cost set by the user during a picking confirmation (when costing method used is 'average price' or 'real'). Value given in company currency and in product uom."), # as it's a technical field, we intentionally don't provide the digits attribute 'company_id': fields.many2one('res.company', 'Company', required=True, select=True), - 'split_from': fields.many2one('stock.move', string="Move Split From", help="Technical field used to track the origin of a split move, which can be useful in case of debug"), + 'split_from': fields.many2one('stock.move', string="Move Split From", help="Technical field used to track the origin of a split move, which can be useful in case of debug", copy=False), 'backorder_id': fields.related('picking_id', 'backorder_id', type='many2one', relation="stock.picking", string="Back Order of", select=True), 'origin': fields.char("Source"), 'procure_method': fields.selection([('make_to_stock', 'Default: Take From Stock'), ('make_to_order', 'Advanced: Apply Procurement Rules')], 'Supply Method', required=True, @@ -1692,7 +1689,7 @@ class stock_move(osv.osv): 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type'), 'inventory_id': fields.many2one('stock.inventory', 'Inventory'), 'lot_ids': fields.function(_get_lot_ids, type='many2many', relation='stock.quant', string='Lots'), - 'origin_returned_move_id': fields.many2one('stock.move', 'Origin return move', help='move that created the return move'), + 'origin_returned_move_id': fields.many2one('stock.move', 'Origin return move', help='move that created the return move', copy=False), 'returned_move_ids': fields.one2many('stock.move', 'origin_returned_move_id', 'All returned moves', help='Optional: all returned moves created from this move'), 'reserved_availability': fields.function(_get_reserved_availability, type='float', string='Quantity Reserved', readonly=True, help='Quantity that has already been reserved for this move'), 'availability': fields.function(_get_product_availability, type='float', string='Quantity Available', readonly=True, help='Quantity in stock that can still be reserved for this move'), @@ -1748,22 +1745,7 @@ class stock_move(osv.osv): ['product_uom']), ] - def copy_data(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - default['move_orig_ids'] = [] - default['quant_ids'] = [] - default['move_dest_id'] = False - default['reserved_quant_ids'] = [] - default['returned_move_ids'] = [] - default['linked_move_operation_ids'] = [] - default['partially_available'] = False - if not default.get('origin_returned_move_id'): - default['origin_returned_move_id'] = False - default['state'] = 'draft' - return super(stock_move, self).copy_data(cr, uid, id, default, context) - + @api.cr_uid_ids_context def do_unreserve(self, cr, uid, move_ids, context=None): quant_obj = self.pool.get("stock.quant") for move in self.browse(cr, uid, move_ids, context=context): @@ -1981,6 +1963,7 @@ class stock_move(osv.osv): result['location_dest_id'] = loc_dest_id return {'value': result} + @api.cr_uid_ids_context def _picking_assign(self, cr, uid, move_ids, procurement_group, location_from, location_to, context=None): """Assign a picking on the given move_ids, which is a list of move supposed to share the same procurement_group, location_from and location_to (and company). Those attributes are also given as parameters. @@ -2197,6 +2180,7 @@ class stock_move(osv.osv): move2 = not move2.move_orig_ids and move2.split_from or False return ancestors + @api.cr_uid_ids_context def recalculate_move_state(self, cr, uid, move_ids, context=None): '''Recompute the state of moves given because their reserved quants were used to fulfill another operation''' for move in self.browse(cr, uid, move_ids, context=context): @@ -2469,15 +2453,15 @@ class stock_inventory(osv.osv): _columns = { 'name': fields.char('Inventory Reference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="Inventory Name."), 'date': fields.datetime('Inventory Date', required=True, readonly=True, help="The date that will be used for the stock level check of the products and the validation of the stock move related to this inventory."), - 'line_ids': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=False, states={'done': [('readonly', True)]}, help="Inventory Lines."), + 'line_ids': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=False, states={'done': [('readonly', True)]}, help="Inventory Lines.", copy=True), 'move_ids': fields.one2many('stock.move', 'inventory_id', 'Created Moves', help="Inventory Moves.", states={'done': [('readonly', True)]}), - 'state': fields.selection(INVENTORY_STATE_SELECTION, 'Status', readonly=True, select=True), + 'state': fields.selection(INVENTORY_STATE_SELECTION, 'Status', readonly=True, select=True, copy=False), 'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft': [('readonly', False)]}), 'location_id': fields.many2one('stock.location', 'Inventoried Location', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_id': fields.many2one('product.product', 'Inventoried Product', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Product to focus your inventory on a particular Product."), 'package_id': fields.many2one('stock.quant.package', 'Inventoried Pack', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Pack to focus your inventory on a particular Pack."), 'partner_id': fields.many2one('res.partner', 'Inventoried Owner', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Owner to focus your inventory on a particular Owner."), - 'lot_id': fields.many2one('stock.production.lot', 'Inventoried Lot/Serial Number', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Lot/Serial Number to focus your inventory on a particular Lot/Serial Number."), + 'lot_id': fields.many2one('stock.production.lot', 'Inventoried Lot/Serial Number', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Lot/Serial Number to focus your inventory on a particular Lot/Serial Number.", copy=False), 'move_ids_exist': fields.function(_get_move_ids_exist, type='boolean', string=' Stock Move Exists?', help='technical field for attrs in view'), 'filter': fields.selection(_get_available_filters, 'Selection Filter', required=True), 'total_qty': fields.function(_get_total_qty, type="float"), @@ -2504,13 +2488,6 @@ class stock_inventory(osv.osv): self.pool.get('stock.inventory.line').write(cr, uid, line_ids, {'product_qty': 0}) return True - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default = default.copy() - default.update({'move_ids': []}) - return super(stock_inventory, self).copy(cr, uid, id, default, context=context) - def _inventory_line_hook(self, cr, uid, inventory_line, move_vals): """ Creates a stock move from an inventory line @param inventory_line: @@ -3594,7 +3571,7 @@ class stock_package(osv.osv): return res _columns = { - 'name': fields.char('Package Reference', select=True), + 'name': fields.char('Package Reference', select=True, copy=False), 'complete_name': fields.function(_complete_name, type='char', string="Package Name",), 'parent_left': fields.integer('Left Parent', select=1), 'parent_right': fields.integer('Right Parent', select=1), @@ -3641,8 +3618,7 @@ class stock_package(osv.osv): return True def action_print(self, cr, uid, ids, context=None): - context = context or {} - context['active_ids'] = ids + context = dict(context or {}, active_ids=ids) return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_package_barcode', context=context) @@ -3688,15 +3664,6 @@ class stock_package(osv.osv): res[quant.product_id.id] += quant.qty return res - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - if not default.get('name'): - default['name'] = self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack') - default['quant_ids'] = [] - default['children_ids'] = [] - return super(stock_package, self).copy(cr, uid, id, default, context=context) - def copy_pack(self, cr, uid, id, default_pack_values=None, default=None, context=None): stock_pack_operation_obj = self.pool.get('stock.pack.operation') if default is None: @@ -3997,7 +3964,7 @@ class stock_warehouse_orderpoint(osv.osv): return result _columns = { - 'name': fields.char('Name', required=True), + 'name': fields.char('Name', required=True, copy=False), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the orderpoint without removing it."), 'logic': fields.selection([('max', 'Order to Max'), ('price', 'Best price (not yet active!)')], 'Reordering Mode', required=True), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True, ondelete="cascade"), @@ -4013,7 +3980,7 @@ class stock_warehouse_orderpoint(osv.osv): 'qty_multiple': fields.integer('Qty Multiple', required=True, help="The procurement quantity will be rounded up to this multiple."), 'procurement_ids': fields.one2many('procurement.order', 'orderpoint_id', 'Created Procurements'), - 'group_id': fields.many2one('procurement.group', 'Procurement Group', help="Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by procurement rules will be grouped into one big picking."), + 'group_id': fields.many2one('procurement.group', 'Procurement Group', help="Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by procurement rules will be grouped into one big picking.", copy=False), 'company_id': fields.many2one('res.company', 'Company', required=True), } _defaults = { @@ -4065,17 +4032,6 @@ class stock_warehouse_orderpoint(osv.osv): return {'value': v, 'domain': d} return {'domain': {'product_uom': []}} - def copy_data(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'name': self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '', - 'procurement_ids': [], - 'group_id': False - }) - return super(stock_warehouse_orderpoint, self).copy_data(cr, uid, id, default, context=context) - - class stock_picking_type(osv.osv): _name = "stock.picking.type" _description = "The picking type determines the picking view" @@ -4190,7 +4146,7 @@ class stock_picking_type(osv.osv): # Statistics for the kanban view 'last_done_picking': fields.function(_get_tristate_values, - type='char', + type='any', string='Last 10 Done Pickings'), 'count_picking_draft': fields.function(_get_picking_count, diff --git a/addons/stock/test/shipment.yml b/addons/stock/test/shipment.yml index 4e640f3aa641abb13dfea5a8860e701e68b80cc1..2d520918bac17cccc68291aefcbf9c6926d4e982 100644 --- a/addons/stock/test/shipment.yml +++ b/addons/stock/test/shipment.yml @@ -23,7 +23,8 @@ 'location_dest_id': ref('stock.stock_location_14') }) context.update({'active_model': 'stock.picking', 'active_id': ref('incomming_shipment'), 'active_ids': [ref('incomming_shipment')]}) - pick.do_transfer(context=context) + pick = self.browse(cr, uid, pick.id, context=context) + pick.do_transfer() - I check backorder shipment after received partial shipment and check remaining shipment. - @@ -43,7 +44,7 @@ - !python {model: stock.picking}: | backorder_id = self.search(cr, uid, [('backorder_id', '=', ref("incomming_shipment"))],context=context) - backorder = self.browse(cr, uid, backorder_id)[0] + backorder = self.browse(cr, uid, backorder_id, context=context)[0] self.pool.get('stock.pack.operation').create(cr, uid, { 'picking_id': backorder.id, 'product_id': ref('product_icecream'), @@ -52,7 +53,7 @@ 'location_id': ref('stock.stock_location_suppliers'), 'location_dest_id': ref('stock.stock_location_14') }) - backorder.do_transfer(context=context) + backorder.do_transfer() - I check incomming shipment after reception. - diff --git a/addons/stock_account/stock.py b/addons/stock_account/stock.py index 56d851cec7a50efb30efd6f6d36893de62889457..9679bf2a72f11470f7b522bd13e0ce6f239031e5 100644 --- a/addons/stock_account/stock.py +++ b/addons/stock_account/stock.py @@ -106,8 +106,8 @@ class stock_move(osv.osv): context = {} if type in ('in_invoice', 'in_refund'): # Take the user company and pricetype - context['currency_id'] = move_line.company_id.currency_id.id - amount_unit = move_line.product_id.price_get('standard_price', context=context)[move_line.product_id.id] + product = move_line.product_id.with_context(currency_id=move_line.company_id.currency_id.id) + amount_unit = product.price_get('standard_price')[move_line.product_id.id] return amount_unit return move_line.product_id.list_price diff --git a/addons/stock_account/stock_account.py b/addons/stock_account/stock_account.py index 545bdc05fec6aeb10c7cbc9175786a1fc254cc12..faa699f6d4689d7cea594e85e3dcefb6f61c34be 100644 --- a/addons/stock_account/stock_account.py +++ b/addons/stock_account/stock_account.py @@ -21,7 +21,7 @@ from openerp.osv import fields, osv from openerp.tools.translate import _ -from openerp import SUPERUSER_ID +from openerp import SUPERUSER_ID, api import logging _logger = logging.getLogger(__name__) @@ -73,6 +73,7 @@ class stock_quant(osv.osv): return quant.cost * quant.qty return super(stock_quant, self)._get_inventory_value(cr, uid, quant, context=context) + @api.cr_uid_ids_context def _price_update(self, cr, uid, quant_ids, newprice, context=None): ''' This function is called at the end of negative quant reconciliation and does the accounting entries adjustemnts and the update of the product cost price if needed ''' diff --git a/addons/stock_account/wizard/stock_invoice_onshipping.py b/addons/stock_account/wizard/stock_invoice_onshipping.py index 1675275632af43336a1b3e33a392608be4ebe510..63a827ca3cf07ec1b565996e264f1732569c97f8 100644 --- a/addons/stock_account/wizard/stock_invoice_onshipping.py +++ b/addons/stock_account/wizard/stock_invoice_onshipping.py @@ -115,7 +115,7 @@ class stock_invoice_onshipping(osv.osv_memory): return True def create_invoice(self, cr, uid, ids, context=None): - context = context or {} + context = dict(context or {}) picking_pool = self.pool.get('stock.picking') data = self.browse(cr, uid, ids[0], context=context) journal2type = {'sale':'out_invoice', 'purchase':'in_invoice', 'sale_refund':'out_refund', 'purchase_refund':'in_refund'} diff --git a/addons/stock_dropshipping/test/lifo_price.yml b/addons/stock_dropshipping/test/lifo_price.yml index f913782a4fd779a74464e603060243de8751d7a4..a9b641bab7704bbbdd68a0d28affdca1eac81580 100644 --- a/addons/stock_dropshipping/test/lifo_price.yml +++ b/addons/stock_dropshipping/test/lifo_price.yml @@ -60,8 +60,8 @@ Process the reception of purchase order 1 - !python {model: stock.picking}: | - order = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_lifo1")).picking_ids[0] - order.do_transfer(context=context) + order = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_lifo1"), context=context).picking_ids[0] + order.do_transfer() - Check the standard price of the product (lifo icecream) - @@ -75,8 +75,8 @@ Process the reception of purchase order 2 - !python {model: stock.picking}: | - order = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_lifo2")).picking_ids[0] - order.do_transfer(context=context) + order = self.pool.get('purchase.order').browse(cr, uid, ref("purchase_order_lifo2"), context=context).picking_ids[0] + order.do_transfer() - Check the standard price should not have changed - @@ -107,8 +107,8 @@ Process the delivery of the outgoing shipment - !python {model: stock.picking}: | - pick_order = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_lifo_shipment")) - pick_order.do_transfer(context=context) + pick_order = self.pool.get('stock.picking').browse(cr, uid, ref("outgoing_lifo_shipment"), context=context) + pick_order.do_transfer() - Check standard price became 80 euro - diff --git a/addons/stock_invoice_directly/stock_invoice_directly.py b/addons/stock_invoice_directly/stock_invoice_directly.py index c4cccd6d17b8dffb236c7dae518bba3f6c13205c..00652ee6110c059bf1a814e5888657b5c544831a 100644 --- a/addons/stock_invoice_directly/stock_invoice_directly.py +++ b/addons/stock_invoice_directly/stock_invoice_directly.py @@ -19,6 +19,7 @@ # ############################################################################## +from openerp import api from openerp.osv import osv from openerp.tools.translate import _ @@ -26,6 +27,7 @@ from openerp.tools.translate import _ class stock_picking(osv.osv): _inherit = 'stock.picking' + @api.cr_uid_ids_context def do_transfer(self, cr, uid, picking_ids, context=None): """Launch Create invoice wizard if invoice state is To be Invoiced, after processing the picking. @@ -35,7 +37,7 @@ class stock_picking(osv.osv): res = super(stock_picking, self).do_transfer(cr, uid, picking_ids, context=context) pick_ids = [p.id for p in self.browse(cr, uid, picking_ids, context) if p.invoice_state == '2binvoiced'] if pick_ids: - context.update(active_model='stock.picking', active_ids=pick_ids) + context = dict(context, active_model='stock.picking', active_ids=pick_ids) return { 'name': _('Create Invoice'), 'view_type': 'form', diff --git a/addons/stock_invoice_directly/test/stock_invoice_directly.yml b/addons/stock_invoice_directly/test/stock_invoice_directly.yml index 68cf8588bd2ece7c37d71ffe6e4f411490aad12b..f2c79bd5e521a121dea751e8ea15627a661f704f 100644 --- a/addons/stock_invoice_directly/test/stock_invoice_directly.yml +++ b/addons/stock_invoice_directly/test/stock_invoice_directly.yml @@ -51,7 +51,7 @@ - !python {model: account.invoice}: | picking_obj = self.pool.get('stock.picking') - picking = picking_obj.browse(cr, uid, [ref('stock_picking_out0')]) + picking = picking_obj.browse(cr, uid, [ref('stock_picking_out0')], context=context) partner = picking[0].partner_id.id - inv_ids = self.search(cr, uid, [('type','=','out_invoice'),('partner_id','=',partner)]) + inv_ids = self.search(cr, uid, [('type','=','out_invoice'),('partner_id','=',partner)], context=context) assert inv_ids, 'No Invoice is generated!' diff --git a/addons/stock_landed_costs/stock_landed_costs.py b/addons/stock_landed_costs/stock_landed_costs.py index db4d0aaf6383db893de95d0f4b49ca0dd077b07d..16a292fab8a0cb6d6d320b7b551c4fe90229c0ad 100644 --- a/addons/stock_landed_costs/stock_landed_costs.py +++ b/addons/stock_landed_costs/stock_landed_costs.py @@ -80,10 +80,10 @@ class stock_landed_cost(osv.osv): return {'value': result} _columns = { - 'name': fields.char('Name', track_visibility='always', readonly=True), - 'date': fields.date('Date', required=True, states={'done': [('readonly', True)]}, track_visibility='onchange'), - 'picking_ids': fields.many2many('stock.picking', string='Pickings', states={'done': [('readonly', True)]}), - 'cost_lines': fields.one2many('stock.landed.cost.lines', 'cost_id', 'Cost Lines', states={'done': [('readonly', True)]}), + 'name': fields.char('Name', track_visibility='always', readonly=True, copy=False), + 'date': fields.date('Date', required=True, states={'done': [('readonly', True)]}, track_visibility='onchange', copy=False), + 'picking_ids': fields.many2many('stock.picking', string='Pickings', states={'done': [('readonly', True)]}, copy=False), + 'cost_lines': fields.one2many('stock.landed.cost.lines', 'cost_id', 'Cost Lines', states={'done': [('readonly', True)]}, copy=True), 'valuation_adjustment_lines': fields.one2many('stock.valuation.adjustment.lines', 'cost_id', 'Valuation Adjustments', states={'done': [('readonly', True)]}), 'description': fields.text('Item Description', states={'done': [('readonly', True)]}), 'amount_total': fields.function(_total_amount, type='float', string='Total', digits_compute=dp.get_precision('Account'), @@ -92,8 +92,8 @@ class stock_landed_cost(osv.osv): 'stock.landed.cost.lines': (_get_cost_line, ['price_unit', 'quantity', 'cost_id'], 20), }, track_visibility='always' ), - 'state': fields.selection([('draft', 'Draft'), ('done', 'Posted'), ('cancel', 'Cancelled')], 'State', readonly=True, track_visibility='onchange'), - 'account_move_id': fields.many2one('account.move', 'Journal Entry', readonly=True), + 'state': fields.selection([('draft', 'Draft'), ('done', 'Posted'), ('cancel', 'Cancelled')], 'State', readonly=True, track_visibility='onchange', copy=False), + 'account_move_id': fields.many2one('account.move', 'Journal Entry', readonly=True, copy=False), 'account_journal_id': fields.many2one('account.journal', 'Account Journal', required=True), } @@ -103,13 +103,6 @@ class stock_landed_cost(osv.osv): 'date': fields.date.context_today, } - def copy(self, cr, uid, id, default=None, context=None): - default = {} if default is None else default.copy() - default.update({ - 'account_move_id': False, - }) - return super(stock_landed_cost, self).copy(cr, uid, id, default=default, context=context) - def _create_accounting_entries(self, cr, uid, line, move_id, context=None): product_obj = self.pool.get('product.product') cost_product = line.cost_line_id and line.cost_line_id.product_id diff --git a/addons/stock_picking_wave/stock_picking_wave.py b/addons/stock_picking_wave/stock_picking_wave.py index 3fd7d9cdbc099e2122ef65f8a5ea179dd44fc365..0082d500d98c0fd3929b4ddfb65845146109682d 100644 --- a/addons/stock_picking_wave/stock_picking_wave.py +++ b/addons/stock_picking_wave/stock_picking_wave.py @@ -6,10 +6,10 @@ class stock_picking_wave(osv.osv): _name = "stock.picking.wave" _order = "name desc" _columns = { - 'name': fields.char('Picking Wave Name', required=True, help='Name of the picking wave'), + 'name': fields.char('Picking Wave Name', required=True, help='Name of the picking wave', copy=False), 'user_id': fields.many2one('res.users', 'Responsible', help='Person responsible for this wave'), 'picking_ids': fields.one2many('stock.picking', 'wave_id', 'Pickings', help='List of picking associated to this wave'), - 'state': fields.selection([('draft', 'Draft'), ('in_progress', 'Running'), ('done', 'Done'), ('cancel', 'Cancelled')], string="State", required=True), + 'state': fields.selection([('draft', 'Draft'), ('in_progress', 'Running'), ('done', 'Done'), ('cancel', 'Cancelled')], string="State", required=True, copy=False), } _defaults = { @@ -31,8 +31,7 @@ class stock_picking_wave(osv.osv): ''' This function print the report for all picking_ids associated to the picking wave ''' - if context is None: - context = {} + context = dict(context or {}) picking_ids = [] for wave in self.browse(cr, uid, ids, context=context): picking_ids += [picking.id for picking in wave.picking_ids] @@ -47,15 +46,6 @@ class stock_picking_wave(osv.osv): vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'picking.wave') or '/' return super(stock_picking_wave, self).create(cr, uid, vals, context=context) - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'state': 'in_progress', - 'name': self.pool.get('ir.sequence').get(cr, uid, 'picking.wave'), - }) - return super(stock_picking_wave, self).copy(cr, uid, id, default=default, context=context) - def done(self, cr, uid, ids, context=None): picking_todo = set() for wave in self.browse(cr, uid, ids, context=context): diff --git a/addons/subscription/subscription.py b/addons/subscription/subscription.py index 779619d011f67c050cefce561e7410d7a6a4a6bd..cc40c468cb08e4f8c1f2b9e29659549b4e98077a 100644 --- a/addons/subscription/subscription.py +++ b/addons/subscription/subscription.py @@ -33,7 +33,7 @@ class subscription_document(osv.osv): 'name': fields.char('Name', required=True), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the subscription document without removing it."), 'model': fields.many2one('ir.model', 'Object', required=True), - 'field_ids': fields.one2many('subscription.document.fields', 'document_id', 'Fields') + 'field_ids': fields.one2many('subscription.document.fields', 'document_id', 'Fields', copy=True) } _defaults = { 'active' : lambda *a: True, @@ -67,7 +67,7 @@ class subscription_subscription(osv.osv): 'interval_type': fields.selection([('days', 'Days'), ('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'), 'exec_init': fields.integer('Number of documents'), 'date_init': fields.datetime('First Date'), - 'state': fields.selection([('draft','Draft'),('running','Running'),('done','Done')], 'Status'), + 'state': fields.selection([('draft','Draft'),('running','Running'),('done','Done')], 'Status', copy=False), 'doc_source': fields.reference('Source Document', required=True, selection=_get_document_types, size=128, help="User can choose the source document on which he wants to create documents"), 'doc_lines': fields.one2many('subscription.subscription.history', 'subscription_id', 'Documents created', readonly=True), 'cron_id': fields.many2one('ir.cron', 'Cron Job', help="Scheduler which runs on subscription", states={'running':[('readonly',True)], 'done':[('readonly',True)]}), diff --git a/addons/survey/controllers/main.py b/addons/survey/controllers/main.py index c554558b34f9512d7e70776994029f7107316f68..65152cdf8ced34287e166693d00f3cd333e0bcdc 100644 --- a/addons/survey/controllers/main.py +++ b/addons/survey/controllers/main.py @@ -22,6 +22,7 @@ import json import logging import werkzeug +import werkzeug.utils from datetime import datetime from math import ceil diff --git a/addons/survey/survey.py b/addons/survey/survey.py index 39f3135c36be531aa83b2d204f314caec762b61f..e87e24a41a0585134b2255c8c0419d38cd46cc41 100644 --- a/addons/survey/survey.py +++ b/addons/survey/survey.py @@ -141,8 +141,7 @@ class survey_survey(osv.Model): def _get_print_url(self, cr, uid, ids, name, arg, context=None): """ Computes a printing URL for the survey """ - base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, - 'web.base.url') + base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url') res = {} for survey in self.browse(cr, uid, ids, context=context): res[survey.id] = urljoin(base_url, "survey/print/%s" % slug(survey)) @@ -162,8 +161,8 @@ class survey_survey(osv.Model): _columns = { 'title': fields.char('Title', required=1, translate=True), 'res_model': fields.char('Category'), - 'page_ids': fields.one2many('survey.page', 'survey_id', 'Pages'), - 'stage_id': fields.many2one('survey.stage', string="Stage", ondelete="set null"), + 'page_ids': fields.one2many('survey.page', 'survey_id', 'Pages', copy=True), + 'stage_id': fields.many2one('survey.stage', string="Stage", ondelete="set null", copy=False), 'auth_required': fields.boolean('Login required', help="Users with a public link will be requested to login before taking part to the survey", oldname="authenticate"), @@ -236,12 +235,10 @@ class survey_survey(osv.Model): # Public methods # def copy_data(self, cr, uid, id, default=None, context=None): - vals = dict() current_rec = self.read(cr, uid, id, fields=['title'], context=context) title = _("%s (copy)") % (current_rec.get('title')) - vals['title'] = title - vals['user_input_ids'] = [] - return super(survey_survey, self).copy_data(cr, uid, id, default=vals, + default = dict(default or {}, title=title) + return super(survey_survey, self).copy_data(cr, uid, id, default, context=context) def next_page(self, cr, uid, user_input, page_id, go_back=False, context=None): @@ -512,7 +509,7 @@ class survey_page(osv.Model): 'survey_id': fields.many2one('survey.survey', 'Survey', ondelete='cascade', required=True), 'question_ids': fields.one2many('survey.question', 'page_id', - 'Questions'), + 'Questions', copy=True), 'sequence': fields.integer('Page number'), 'description': fields.html('Description', help="An introductory text to your page", translate=True, @@ -525,11 +522,10 @@ class survey_page(osv.Model): # Public methods # def copy_data(self, cr, uid, ids, default=None, context=None): - vals = {} current_rec = self.read(cr, uid, ids, fields=['title'], context=context) title = _("%s (copy)") % (current_rec.get('title')) - vals.update({'title': title}) - return super(survey_page, self).copy_data(cr, uid, ids, default=vals, + default = dict(default or {}, title=title) + return super(survey_page, self).copy_data(cr, uid, ids, default, context=context) @@ -570,9 +566,9 @@ class survey_question(osv.Model): 'matrix_subtype': fields.selection([('simple', 'One choice per row'), ('multiple', 'Multiple choices per row')], 'Matrix Type'), 'labels_ids': fields.one2many('survey.label', - 'question_id', 'Types of answers', oldname='answer_choice_ids'), + 'question_id', 'Types of answers', oldname='answer_choice_ids', copy=True), 'labels_ids_2': fields.one2many('survey.label', - 'question_id_2', 'Rows of the Matrix'), + 'question_id_2', 'Rows of the Matrix', copy=True), # labels are used for proposed choices # if question.type == simple choice | multiple choice # -> only labels_ids is used @@ -645,17 +641,10 @@ class survey_question(osv.Model): ] def copy_data(self, cr, uid, ids, default=None, context=None): - # This will prevent duplication of user input lines in case of question duplication - # (in cascade, this will also allow to duplicate surveys without duplicating bad user input - # lines) - vals = {'user_input_line_ids': []} - - # Updating question title current_rec = self.read(cr, uid, ids, context=context) question = _("%s (copy)") % (current_rec.get('question')) - vals['question'] = question - - return super(survey_question, self).copy_data(cr, uid, ids, default=vals, + default = dict(default or {}, question=question) + return super(survey_question, self).copy_data(cr, uid, ids, default, context=context) # Validation methods @@ -868,8 +857,10 @@ class survey_user_input(osv.Model): 'user_input_id', 'Answers'), # URLs used to display the answers - 'result_url': fields.related('survey_id', 'result_url', string="Public link to the survey results"), - 'print_url': fields.related('survey_id', 'print_url', string="Public link to the empty survey"), + 'result_url': fields.related('survey_id', 'result_url', type='char', + string="Public link to the survey results"), + 'print_url': fields.related('survey_id', 'print_url', type='char', + string="Public link to the empty survey"), 'quizz_score': fields.function(_quizz_get_score, type="float", string="Score for the quiz") } @@ -903,7 +894,7 @@ class survey_user_input(osv.Model): def action_survey_resent(self, cr, uid, ids, context=None): ''' Sent again the invitation ''' record = self.browse(cr, uid, ids[0], context=context) - context = context or {} + context = dict(context or {}) context.update({ 'survey_resent_token': True, 'default_partner_ids': record.partner_id and [record.partner_id.id] or [], diff --git a/addons/web/static/src/js/chrome.js b/addons/web/static/src/js/chrome.js index 39c7395556eacd0fd6a8e92e17bec99c2b004733..f5f8129d5020351a0991f965b93cc86fdf924332 100644 --- a/addons/web/static/src/js/chrome.js +++ b/addons/web/static/src/js/chrome.js @@ -958,6 +958,8 @@ instance.web.Menu = instance.web.Widget.extend({ id: id, previous_menu_id: this.current_menu // Here we don't know if action will fail (in which case we have to revert menu) }, $item); + } else { + console.log('Menu no action found web test 04 will fail'); } this.open_menu(id); }, @@ -1378,8 +1380,7 @@ instance.web.WebClient = instance.web.Client.extend({ var first_menu_id = self.menu.$el.find("a:first").data("menu"); if(first_menu_id) { self.menu.menu_click(first_menu_id); - } - } + } } }); }); } else { diff --git a/addons/web/static/src/js/view_form.js b/addons/web/static/src/js/view_form.js index 7372e8a37831c7d27fdb624aa28d136dcd3fdfec..e3b8e3ead0d157e0e340609d513011d685a1508e 100644 --- a/addons/web/static/src/js/view_form.js +++ b/addons/web/static/src/js/view_form.js @@ -102,6 +102,7 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM this.fields = {}; this.fields_order = []; this.datarecord = {}; + this._onchange_specs = {}; this.default_focus_field = null; this.default_focus_button = null; this.fields_registry = instance.web.form.widgets; @@ -117,7 +118,6 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM }); this.is_initialized = $.Deferred(); this.mutating_mutex = new $.Mutex(); - this.on_change_list = []; this.save_list = []; this.reload_mutex = new $.Mutex(); this.__clicked_inside = false; @@ -125,6 +125,7 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM this.rendering_engine = new instance.web.form.FormRenderingEngine(this); self.set({actual_mode: self.options.initial_mode}); this.has_been_loaded.done(function() { + self._build_onchange_specs(); self.on("change:actual_mode", self, self.check_actual_mode); self.check_actual_mode(); self.on("change:actual_mode", self, self.init_pager); @@ -335,15 +336,8 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM }); return $.when.apply(null, set_values).then(function() { if (!record.id) { - // New record: Second pass in order to trigger the onchanges - // respecting the fields order defined in the view - _.each(self.fields_order, function(field_name) { - if (record[field_name] !== undefined) { - var field = self.fields[field_name]; - field._dirty_flag = true; - self.do_onchange(field); - } - }); + // trigger onchanges + self.do_onchange(null); } self.on_form_changed(); self.rendering_engine.init_fields(); @@ -438,101 +432,83 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM $(".oe_form_pager_state", this.$pager).html(_.str.sprintf(_t("%d / %d"), this.dataset.index + 1, this.dataset.ids.length)); } }, - parse_on_change: function (on_change, widget) { - var self = this; - var onchange = _.str.trim(on_change); - var call = onchange.match(/^\s?(.*?)\((.*?)\)\s?$/); - if (!call) { - throw new Error(_.str.sprintf( _t("Wrong on change format: %s"), onchange )); - } - - var method = call[1]; - if (!_.str.trim(call[2])) { - return {method: method, args: []}; - } - - var argument_replacement = { - 'False': function () {return false;}, - 'True': function () {return true;}, - 'None': function () {return null;}, - 'context': function () { - return new instance.web.CompoundContext( - self.dataset.get_context(), - widget.build_context() ? widget.build_context() : {}); - } - }; - var parent_fields = null; - var args = _.map(call[2].split(','), function (a, i) { - var field = _.str.trim(a); - // literal constant or context - if (field in argument_replacement) { - return argument_replacement[field](); - } - // literal number - if (/^-?\d+(\.\d+)?$/.test(field)) { - return Number(field); - } - // form field - if (self.fields[field]) { - var value_ = self.fields[field].get_value(); - return value_ === null || value_ === undefined ? false : value_; - } - // parent field - var splitted = field.split('.'); - if (splitted.length > 1 && _.str.trim(splitted[0]) === "parent" && self.dataset.parent_view) { - if (parent_fields === null) { - parent_fields = self.dataset.parent_view.get_fields_values(); + _build_onchange_specs: function() { + var self = this; + var find = function(field_name, root) { + var fields = [root]; + while (fields.length) { + var node = fields.pop(); + if (!node) { + continue; } - var p_val = parent_fields[_.str.trim(splitted[1])]; - if (p_val !== undefined) { - return p_val === null || p_val === undefined ? false : p_val; + if (node.tag === 'field' && node.attrs.name === field_name) { + return node.attrs.on_change || ""; } + fields = _.union(fields, node.children); } - // string literal - var first_char = field[0], last_char = field[field.length-1]; - if ((first_char === '"' && last_char === '"') - || (first_char === "'" && last_char === "'")) { - return field.slice(1, -1); - } + return ""; + }; - throw new Error("Could not get field with name '" + field + - "' for onchange '" + onchange + "'"); + self._onchange_specs = {}; + _.each(this.fields, function(field, name) { + self._onchange_specs[name] = find(name, field.node); + _.each(field.field.views, function(view) { + _.each(view.fields, function(_, subname) { + self._onchange_specs[name + '.' + subname] = find(subname, view.arch); + }); + }); }); - - return { - method: method, - args: args - }; }, - do_onchange: function(widget, processed) { - var self = this; - this.on_change_list = [{widget: widget, processed: processed}].concat(this.on_change_list); - return this._process_operations(); + _get_onchange_values: function() { + var field_values = this.get_fields_values(); + if (field_values.id.toString().match(instance.web.BufferedDataSet.virtual_id_regex)) { + delete field_values.id; + } + if (this.dataset.parent_view) { + // this belongs to a parent view: add parent field if possible + var parent_view = this.dataset.parent_view; + var child_name = this.dataset.child_name; + var parent_name = parent_view.get_field_desc(child_name).relation_field; + if (parent_name) { + // consider all fields except the inverse of the parent field + var parent_values = parent_view.get_fields_values(); + delete parent_values[child_name]; + field_values[parent_name] = parent_values; + } + } + return field_values; }, - _process_onchange: function(on_change_obj) { + + do_onchange: function(widget) { var self = this; - var widget = on_change_obj.widget; - var processed = on_change_obj.processed; + var onchange_specs = self._onchange_specs; try { - var def; - processed = processed || []; - processed.push(widget.name); - var on_change = widget.node.attrs.on_change; - if (on_change) { - var change_spec = self.parse_on_change(on_change, widget); - var ids = []; + var def = $.when({}); + var change_spec = widget ? onchange_specs[widget.name] : null; + if (!widget || (!_.isEmpty(change_spec) && change_spec !== "0")) { + var ids = [], + trigger_field_name = widget ? widget.name : false, + values = self._get_onchange_values(), + context = new instance.web.CompoundContext(self.dataset.get_context()); + + if (widget && widget.build_context()) { + context.add(widget.build_context()); + } + if (self.dataset.parent_view) { + var parent_name = self.dataset.parent_view.get_field_desc(self.dataset.child_name).relation_field; + context.add({field_parent: parent_name}); + } + if (self.datarecord.id && !instance.web.BufferedDataSet.virtual_id_regex.test(self.datarecord.id)) { // In case of a o2m virtual id, we should pass an empty ids list ids.push(self.datarecord.id); } def = self.alive(new instance.web.Model(self.dataset.model).call( - change_spec.method, [ids].concat(change_spec.args))); - } else { - def = $.when({}); + "onchange", [ids, values, trigger_field_name, onchange_specs, context])); } return def.then(function(response) { - if (widget.field['change_default']) { + if (widget && widget.field['change_default']) { var fieldname = widget.name; var value_; if (response.value && (fieldname in response.value)) { @@ -565,7 +541,7 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM } return response; }).then(function(response) { - return self.on_processed_onchange(response, processed); + return self.on_processed_onchange(response); }); } catch(e) { console.error(e); @@ -573,7 +549,7 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM return $.Deferred().reject(); } }, - on_processed_onchange: function(result, processed) { + on_processed_onchange: function(result) { try { var fields = this.fields; _(result.domain).each(function (domain, fieldname) { @@ -581,10 +557,11 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM if (!field) { return; } field.node.attrs.domain = domain; }); - - if (result.value) { - this._internal_set_values(result.value, processed); + + if (!_.isEmpty(result.value)) { + this._internal_set_values(result.value); } + // FIXME XXX a list of warnings? if (!_.isEmpty(result.warning)) { new instance.web.Dialog(this, { size: 'medium', @@ -606,21 +583,12 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM var self = this; return this.mutating_mutex.exec(function() { function iterate() { - var on_change_obj = self.on_change_list.shift(); - if (on_change_obj) { - return self._process_onchange(on_change_obj).then(function() { - return iterate(); - }); - } var defs = []; _.each(self.fields, function(field) { defs.push(field.commit_value()); }); var args = _.toArray(arguments); return $.when.apply($, defs).then(function() { - if (self.on_change_list.length !== 0) { - return iterate(); - } var save_obj = self.save_list.pop(); if (save_obj) { return self._process_save(save_obj).then(function() { @@ -639,8 +607,7 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM return iterate(); }); }, - _internal_set_values: function(values, exclude) { - exclude = exclude || []; + _internal_set_values: function(values) { for (var f in values) { if (!values.hasOwnProperty(f)) { continue; } var field = this.fields[f]; @@ -652,9 +619,6 @@ instance.web.FormView = instance.web.View.extend(instance.web.form.FieldManagerM field.set_value(value_); field._inhibit_on_change_flag = false; field._dirty_flag = true; - if (!_.contains(exclude, field.name)) { - this.do_onchange(field, exclude); - } } } } @@ -3328,11 +3292,14 @@ instance.web.form.CompletionFieldMixin = { var self = this; var dataset = new instance.web.DataSet(this, this.field.relation, self.build_context()); - var blacklist = this.get_search_blacklist(); this.last_query = search_val; + var exclusion_domain = [], ids_blacklist = this.get_search_blacklist(); + if (!_(ids_blacklist).isEmpty()) { + exclusion_domain.push(['id', 'not in', ids_blacklist]); + } return this.orderer.add(dataset.name_search( - search_val, new instance.web.CompoundDomain(self.build_domain(), [["id", "not in", blacklist]]), + search_val, new instance.web.CompoundDomain(self.build_domain(), exclusion_domain), 'ilike', this.limit + 1, self.build_context())).then(function(data) { self.last_search = data; // possible selections for the m2o @@ -4151,28 +4118,41 @@ instance.web.form.FieldOne2Many = instance.web.form.AbstractField.extend({ reload_current_view: function() { var self = this; self.is_loaded = self.is_loaded.then(function() { - var active_view = self.viewmanager.active_view; - var view = self.viewmanager.views[active_view].controller; - if(active_view === "list") { - return view.reload_content(); - } else if (active_view === "form") { + var view = self.get_active_view(); + if (view.type === "list") { + return view.controller.reload_content(); + } else if (view.type === "form") { if (self.dataset.index === null && self.dataset.ids.length >= 1) { self.dataset.index = 0; } var act = function() { - return view.do_show(); + return view.controller.do_show(); }; self.form_last_update = self.form_last_update.then(act, act); return self.form_last_update; - } else if (view.do_search) { - return view.do_search(self.build_domain(), self.dataset.get_context(), []); + } else if (view.controller.do_search) { + return view.controller.do_search(self.build_domain(), self.dataset.get_context(), []); } }, undefined); return self.is_loaded; }, + get_active_view: function () { + /** + * Returns the current active view if any. + */ + if (this.viewmanager && this.viewmanager.views && this.viewmanager.active_view && + this.viewmanager.views[this.viewmanager.active_view] && + this.viewmanager.views[this.viewmanager.active_view].controller) { + return { + type: this.viewmanager.active_view, + controller: this.viewmanager.views[this.viewmanager.active_view].controller + }; + } + }, set_value: function(value_) { value_ = value_ || []; var self = this; + var view = this.get_active_view(); this.dataset.reset_ids([]); var ids; if(value_.length >= 1 && value_[0] instanceof Array) { @@ -4257,33 +4237,32 @@ instance.web.form.FieldOne2Many = instance.web.form.AbstractField.extend({ return this.save_any_view(); }, save_any_view: function() { - if (this.viewmanager && this.viewmanager.views && this.viewmanager.active_view && - this.viewmanager.views[this.viewmanager.active_view] && - this.viewmanager.views[this.viewmanager.active_view].controller) { - var view = this.viewmanager.views[this.viewmanager.active_view].controller; + var view = this.get_active_view(); + if (view) { if (this.viewmanager.active_view === "form") { - if (view.is_initialized.state() !== 'resolved') { + if (view.controller.is_initialized.state() !== 'resolved') { return $.when(false); } - return $.when(view.save()); + return $.when(view.controller.save()); } else if (this.viewmanager.active_view === "list") { - return $.when(view.ensure_saved()); + return $.when(view.controller.ensure_saved()); } } return $.when(false); }, is_syntax_valid: function() { - if (! this.viewmanager || ! this.viewmanager.views[this.viewmanager.active_view]) + var view = this.get_active_view(); + if (!view){ return true; - var view = this.viewmanager.views[this.viewmanager.active_view].controller; + } switch (this.viewmanager.active_view) { case 'form': - return _(view.fields).chain() + return _(view.controller.fields).chain() .invoke('is_valid') .all(_.identity) .value(); case 'list': - return view.is_valid(); + return view.controller.is_valid(); } return true; }, @@ -4673,10 +4652,21 @@ instance.web.form.FieldMany2ManyTags = instance.web.form.AbstractField.extend(in } }); }, + // WARNING: duplicated in 4 other M2M widgets set_value: function(value_) { value_ = value_ || []; if (value_.length >= 1 && value_[0] instanceof Array) { - value_ = value_[0][2]; + // value_ is a list of m2m commands. We only process + // LINK_TO and REPLACE_WITH in this context + var val = []; + _.each(value_, function (command) { + if (command[0] === commands.LINK_TO) { + val.push(command[1]); // (4, id[, _]) + } else if (command[0] === commands.REPLACE_WITH) { + val = command[2]; // (6, _, ids) + } + }); + value_ = val; } this._super(value_); }, @@ -4807,10 +4797,21 @@ instance.web.form.FieldMany2Many = instance.web.form.AbstractField.extend(instan this.list_view.destroy(); this.list_view = undefined; }, + // WARNING: duplicated in 4 other M2M widgets set_value: function(value_) { value_ = value_ || []; if (value_.length >= 1 && value_[0] instanceof Array) { - value_ = value_[0][2]; + // value_ is a list of m2m commands. We only process + // LINK_TO and REPLACE_WITH in this context + var val = []; + _.each(value_, function (command) { + if (command[0] === commands.LINK_TO) { + val.push(command[1]); // (4, id[, _]) + } else if (command[0] === commands.REPLACE_WITH) { + val = command[2]; // (6, _, ids) + } + }); + value_ = val; } this._super(value_); }, @@ -4937,10 +4938,21 @@ instance.web.form.FieldMany2ManyKanban = instance.web.form.AbstractField.extend( }); }); }, + // WARNING: duplicated in 4 other M2M widgets set_value: function(value_) { value_ = value_ || []; if (value_.length >= 1 && value_[0] instanceof Array) { - value_ = value_[0][2]; + // value_ is a list of m2m commands. We only process + // LINK_TO and REPLACE_WITH in this context + var val = []; + _.each(value_, function (command) { + if (command[0] === commands.LINK_TO) { + val.push(command[1]); // (4, id[, _]) + } else if (command[0] === commands.REPLACE_WITH) { + val = command[2]; // (6, _, ids) + } + }); + value_ = val; } this._super(value_); }, @@ -5719,10 +5731,21 @@ instance.web.form.FieldMany2ManyBinaryMultiFiles = instance.web.form.AbstractFie this._super(this); this.$el.on('change', 'input.oe_form_binary_file', this.on_file_change ); }, + // WARNING: duplicated in 4 other M2M widgets set_value: function(value_) { value_ = value_ || []; if (value_.length >= 1 && value_[0] instanceof Array) { - value_ = value_[0][2]; + // value_ is a list of m2m commands. We only process + // LINK_TO and REPLACE_WITH in this context + var val = []; + _.each(value_, function (command) { + if (command[0] === commands.LINK_TO) { + val.push(command[1]); // (4, id[, _]) + } else if (command[0] === commands.REPLACE_WITH) { + val = command[2]; // (6, _, ids) + } + }); + value_ = val; } this._super(value_); }, @@ -6083,13 +6106,24 @@ instance.web.form.FieldMany2ManyCheckBoxes = instance.web.form.AbstractField.ext if (! _.isEqual(new_value, this.get("value"))) this.internal_set_value(new_value); }, - set_value: function(value) { - value = value || []; - if (value.length >= 1 && value[0] instanceof Array) { - value = value[0][2]; + // WARNING: (mostly) duplicated in 4 other M2M widgets + set_value: function(value_) { + value_ = value_ || []; + if (value_.length >= 1 && value_[0] instanceof Array) { + // value_ is a list of m2m commands. We only process + // LINK_TO and REPLACE_WITH in this context + var val = []; + _.each(value_, function (command) { + if (command[0] === commands.LINK_TO) { + val.push(command[1]); // (4, id[, _]) + } else if (command[0] === commands.REPLACE_WITH) { + val = command[2]; // (6, _, ids) + } + }); + value_ = val; } var formatted = {}; - _.each(value, function(el) { + _.each(value_, function(el) { formatted[JSON.stringify(el)] = true; }); this._super(formatted); diff --git a/addons/web/static/src/js/view_list_editable.js b/addons/web/static/src/js/view_list_editable.js index 144de8eb21c914c2239309f593cdcdedc1f29443..a83e90421992d34a6556a7ac4e25b0c9bcf25f8f 100644 --- a/addons/web/static/src/js/view_list_editable.js +++ b/addons/web/static/src/js/view_list_editable.js @@ -783,7 +783,7 @@ this.record = null; this.form.do_hide(); return $.when(record); - } + }, }); instance.web.ListView.Groups.include(/** @lends instance.web.ListView.Groups# */{ diff --git a/addons/web/static/src/js/views.js b/addons/web/static/src/js/views.js index 1c563d1d0b31ce4e7a3597d12dd5fb8bd97be698..8a6c8def9ca9ef7dc7bf4b4e1d6680f5e52bb57c 100644 --- a/addons/web/static/src/js/views.js +++ b/addons/web/static/src/js/views.js @@ -954,12 +954,12 @@ instance.web.ViewManagerAction = instance.web.ViewManager.extend({ url: '/web/tests?mod=*' }); break; - case 'perm_read': + case 'get_metadata': var ids = current_view.get_selected_ids(); if (ids.length === 1) { - this.dataset.call('perm_read', [ids]).done(function(result) { + this.dataset.call('get_metadata', [ids]).done(function(result) { var dialog = new instance.web.Dialog(this, { - title: _.str.sprintf(_t("View Log (%s)"), self.dataset.model), + title: _.str.sprintf(_t("Metadata (%s)"), self.dataset.model), size: 'medium', }, QWeb.render('ViewManagerDebugViewLog', { perm : result[0], diff --git a/addons/web/static/src/xml/base.xml b/addons/web/static/src/xml/base.xml index e2d5ae020686aa4b9f72d48788fe4a6c930dde99..2cce789480c30e64b3f06be8c64e9f56ddfb9972 100644 --- a/addons/web/static/src/xml/base.xml +++ b/addons/web/static/src/xml/base.xml @@ -465,7 +465,7 @@ <t t-name="ViewManagerDebug"> <option value="">Debug View#<t t-esc="view.fields_view.view_id"/></option> <t t-if="view_manager.active_view === 'form'"> - <option value="perm_read">View Log (perm_read)</option> + <option value="get_metadata">View Metadata</option> <option value="toggle_layout_outline">Toggle Form Layout Outline</option> <option value="set_defaults">Set Defaults</option> </t> diff --git a/addons/web/tests/test_js.py b/addons/web/tests/test_js.py index 496d7e283bcae72aaca639992d7a0f2839619a93..b59bec38155dd3c3554c80c78e5e04c89aa3ec0c 100644 --- a/addons/web/tests/test_js.py +++ b/addons/web/tests/test_js.py @@ -1,4 +1,4 @@ -import openerp +import openerp.tests class WebSuite(openerp.tests.HttpCase): def test_01_js(self): diff --git a/addons/website/models/ir_actions.py b/addons/website/models/ir_actions.py index 5f627eec54aff2b4d68c4f458b3bbd53d4b57cbd..1b469adfc05f6690185e4615d30373ef46093e53 100644 --- a/addons/website/models/ir_actions.py +++ b/addons/website/models/ir_actions.py @@ -2,7 +2,7 @@ import urlparse -from openerp.addons.web.http import request +from openerp.http import request from openerp.osv import fields, osv @@ -35,7 +35,7 @@ class actions_server(osv.Model): _get_website_url, type='char', string='Website URL', help='The full URL to access the server action through the website.'), 'website_published': fields.boolean( - 'Available on the Website', + 'Available on the Website', copy=False, help='A code server action can be executed from the website, using a dedicated' 'controller. The address is <base>/website/action/<website_path>.' 'Set this field as True to allow users to run this action. If it' diff --git a/addons/website/models/ir_http.py b/addons/website/models/ir_http.py index 7dc3bb69d332b18485d3d9f1d2a754cb951f8f26..96b1edfef6ba611fb2bc228917ae115d265db1e5 100644 --- a/addons/website/models/ir_http.py +++ b/addons/website/models/ir_http.py @@ -8,6 +8,7 @@ import traceback import werkzeug import werkzeug.routing +import werkzeug.utils import openerp from openerp.addons.base import ir @@ -117,10 +118,11 @@ class ir_http(orm.AbstractModel): def _postprocess_args(self, arguments, rule): super(ir_http, self)._postprocess_args(arguments, rule) - for arg, val in arguments.items(): + for key, val in arguments.items(): # Replace uid placeholder by the current request.uid - if isinstance(val, orm.browse_record) and isinstance(val._uid, RequestUID): - val._uid = request.uid + if isinstance(val, orm.BaseModel) and isinstance(val._uid, RequestUID): + arguments[key] = val.sudo(request.uid) + try: _, path = rule.build(arguments) assert path is not None diff --git a/addons/website/models/ir_ui_view.py b/addons/website/models/ir_ui_view.py index 957c8e19c3b163fbb829071d049058367e568069..ad3597b73095cb2efef854ab7037382743c0a59c 100644 --- a/addons/website/models/ir_ui_view.py +++ b/addons/website/models/ir_ui_view.py @@ -159,7 +159,7 @@ class view(osv.osv): qcontext.update(values) # in edit mode ir.ui.view will tag nodes - context['inherit_branding'] = qcontext.get('editable', False) + context = dict(context, inherit_branding=qcontext.get('editable', False)) view_obj = request.website.get_template(id_or_xml_id) if 'main_object' not in qcontext: diff --git a/addons/website_blog/controllers/main.py b/addons/website_blog/controllers/main.py index a36e9fcfafe05dc3cd4546861bff390a35c1d980..a9331c5669ef3533431c60b2ac4d5582485ea1ac 100644 --- a/addons/website_blog/controllers/main.py +++ b/addons/website_blog/controllers/main.py @@ -187,7 +187,7 @@ class WebsiteBlog(http.Controller): ) pager_begin = (page - 1) * self._post_comment_per_page pager_end = page * self._post_comment_per_page - blog_post.website_message_ids = blog_post.website_message_ids[pager_begin:pager_end] + comments = blog_post.website_message_ids[pager_begin:pager_end] tag = None if tag_id: @@ -226,6 +226,7 @@ class WebsiteBlog(http.Controller): 'post_url': post_url, 'blog_url': blog_url, 'pager': pager, + 'comments': comments, } response = request.website.render("website_blog.blog_post_complete", values) response.set_cookie('visited_blogs', ','.join(map(str, visited_ids))) diff --git a/addons/website_blog/models/website_blog.py b/addons/website_blog/models/website_blog.py index 2862ca84e1c9578a1e8d3aa687824277f098e388..9953b5f1ac8d74ace2c691b5c66d145f05accfae 100644 --- a/addons/website_blog/models/website_blog.py +++ b/addons/website_blog/models/website_blog.py @@ -61,7 +61,7 @@ class BlogPost(osv.Model): 'content': fields.html('Content', translate=True), # website control 'website_published': fields.boolean( - 'Publish', help="Publish on the website" + 'Publish', help="Publish on the website", copy=False, ), 'website_message_ids': fields.one2many( 'mail.message', 'res_id', @@ -184,17 +184,6 @@ class BlogPost(osv.Model): self.create_history(cr, uid, ids, vals, context) return result - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default.update({ - 'website_message_ids': [], - 'website_published': False, - 'website_published_datetime': False, - }) - return super(BlogPost, self).copy(cr, uid, id, default=default, context=context) - - class BlogPostHistory(osv.Model): _name = "blog.post.history" _description = "Blog Post History" diff --git a/addons/website_blog/views/website_blog_templates.xml b/addons/website_blog/views/website_blog_templates.xml index c4bcb2d21fc7bfaf20315f393c9aa7594825ccd5..cdee16da86dbab04b13cb0425c127e08d139ea4f 100644 --- a/addons/website_blog/views/website_blog_templates.xml +++ b/addons/website_blog/views/website_blog_templates.xml @@ -222,7 +222,7 @@ <section id="comments" class="read_width"> <hr/> <ul class="media-list" id="comments-list"> - <li t-foreach="blog_post.website_message_ids" t-as="message" class="media"> + <li t-foreach="comments" t-as="message" class="media"> <span class="pull-left"> <img class="media-object img img-circle" t-att-src="'/website/image?model=mail.message&field=author_avatar&id='+str(message.id)" style="width: 30px"/> </span> diff --git a/addons/website_blog/wizard/document_page_create_menu.py b/addons/website_blog/wizard/document_page_create_menu.py deleted file mode 100644 index 9f20fab0d99cdc51fb7aec976b64d18d2ce9e294..0000000000000000000000000000000000000000 --- a/addons/website_blog/wizard/document_page_create_menu.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## - -from openerp.osv import fields, osv - -class document_page_create_menu(osv.osv_memory): - """ Create Menu """ - _name = "document.page.create.menu" - _description = "Wizard Create Menu" - - _columns = { - 'menu_name': fields.char('Menu Name', size=256, required=True), - 'menu_parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True), - } - - def default_get(self, cr, uid, fields, context=None): - if context is None: - context = {} - res = super(document_page_create_menu,self).default_get(cr, uid, fields, context=context) - page_id = context.get('active_id') - obj_page = self.pool.get('document.page') - page = obj_page.browse(cr, uid, page_id, context=context) - res['menu_name'] = page.name - return res - - def document_page_menu_create(self, cr, uid, ids, context=None): - if context is None: - context = {} - obj_page = self.pool.get('document.page') - obj_view = self.pool.get('ir.ui.view') - obj_menu = self.pool.get('ir.ui.menu') - obj_action = self.pool.get('ir.actions.act_window') - page_id = context.get('active_id', False) - page = obj_page.browse(cr, uid, page_id, context=context) - - datas = self.browse(cr, uid, ids, context=context) - data = False - if datas: - data = datas[0] - if not data: - return {} - value = { - 'name': 'Document Page', - 'view_type': 'form', - 'view_mode': 'form,tree', - 'res_model': 'document.page', - 'view_id': False, - 'type': 'ir.actions.act_window', - 'target': 'inlineview', - } - value['domain'] = "[('parent_id','=',%d)]" % (page.id) - value['res_id'] = page.id - - action_id = obj_action.create(cr, uid, value) - menu_id = obj_menu.create(cr, uid, { - 'name': data.menu_name, - 'parent_id':data.menu_parent_id.id, - 'icon': 'STOCK_DIALOG_QUESTION', - 'action': 'ir.actions.act_window,'+ str(action_id), - }, context) - obj_page.write(cr, uid, [page_id], {'menu_id':menu_id}) - return { - 'type': 'ir.actions.client', - 'tag': 'reload', - } - - -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/website_blog/wizard/document_page_create_menu_view.xml b/addons/website_blog/wizard/document_page_create_menu_view.xml deleted file mode 100644 index d0106faf6e88ec60b9084b0756c29c4a11b14ac2..0000000000000000000000000000000000000000 --- a/addons/website_blog/wizard/document_page_create_menu_view.xml +++ /dev/null @@ -1,33 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<openerp> - <data> - <!-- Create Menu From view --> - <record id="view_wiki_create_menu" model="ir.ui.view"> - <field name="name">Create Menu</field> - <field name="model">document.page.create.menu</field> - <field name="arch" type="xml"> - <form string="Create Menu"> - <group string="Menu Information"> - <field name="menu_name" /> - <field name="menu_parent_id" /> - </group> - <footer> - <button name="document_page_menu_create" string="Create Menu" type="object" class="oe_highlight"/> - or - <button string="Cancel" class="oe_link" special="cancel" /> - </footer> - </form> - </field> - </record> - <!-- Create Menu Action --> - <record id="action_wiki_create_menu" model="ir.actions.act_window"> - <field name="name">Create Menu</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">document.page.create.menu</field> - <field name="view_type">form</field> - <field name="view_mode">form</field> - <field name="target">new</field> - </record> - - </data> -</openerp> diff --git a/addons/website_crm/controllers/main.py b/addons/website_crm/controllers/main.py index 37d661e8a04e17a54293d53fddc621586c5b333f..9b47140b9e3e420b0654d035276cccd840d46039 100644 --- a/addons/website_crm/controllers/main.py +++ b/addons/website_crm/controllers/main.py @@ -1,13 +1,12 @@ # -*- coding: utf-8 -*- import base64 -from openerp.tools.translate import _ -from openerp.addons.web import http -from openerp.addons.web.http import request -from openerp import SUPERUSER_ID - +import werkzeug import werkzeug.urls +from openerp import http, SUPERUSER_ID +from openerp.http import request +from openerp.tools.translate import _ class contactus(http.Controller): diff --git a/addons/website_crm_partner_assign/controllers/main.py b/addons/website_crm_partner_assign/controllers/main.py index ca08478d8d1c549730ae5dc857e1fcf676a1376a..c104b26ce277a120c1539a091a8f546743f42dd9 100644 --- a/addons/website_crm_partner_assign/controllers/main.py +++ b/addons/website_crm_partner_assign/controllers/main.py @@ -109,7 +109,7 @@ class WebsiteCrmPartnerAssign(http.Controller): context=request.context) # todo in trunk: order="grade_id DESC, implemented_count DESC", offset=pager['offset'], limit=self._references_per_page partners = partner_obj.browse(request.cr, SUPERUSER_ID, partner_ids, request.context) # remove me in trunk - partners.sort(key=lambda x: (-1 * (x.grade_id and x.grade_id.id or 0), len(x.implemented_partner_ids)), reverse=True) + partners = sorted(partners, key=lambda x: (-1 * (x.grade_id and x.grade_id.id or 0), len(x.implemented_partner_ids)), reverse=True) partners = partners[pager['offset']:pager['offset'] + self._references_per_page] google_map_partner_ids = ','.join(map(str, [p.id for p in partners])) diff --git a/addons/website_crm_partner_assign/models/res_partner.py b/addons/website_crm_partner_assign/models/res_partner.py index cc00f8bd6e1b4b98c6937e4b8a9a2760c644fdc9..44e45e235fbca82eba2b00b7dae95cae7e0c90a5 100644 --- a/addons/website_crm_partner_assign/models/res_partner.py +++ b/addons/website_crm_partner_assign/models/res_partner.py @@ -4,5 +4,5 @@ from openerp.osv import osv, fields class res_partner_grade(osv.osv): _inherit = 'res.partner.grade' _columns = { - 'website_published': fields.boolean('Published On Website'), + 'website_published': fields.boolean('Published On Website', copy=False), } diff --git a/addons/website_event/models/event.py b/addons/website_event/models/event.py index 33eee7c782928ffd1631d8047d926cd3345b9362..c13ba88a17f142df66327212d000c64c6ad39046 100644 --- a/addons/website_event/models/event.py +++ b/addons/website_event/models/event.py @@ -89,7 +89,7 @@ class event(osv.osv): _columns = { 'twitter_hashtag': fields.char('Twitter Hashtag'), - 'website_published': fields.boolean('Visible in Website'), + 'website_published': fields.boolean('Visible in Website', copy=False), # TDE TODO FIXME: when website_mail/mail_thread.py inheritance work -> this field won't be necessary 'website_message_ids': fields.one2many( 'mail.message', 'res_id', diff --git a/addons/website_event_sale/models/sale_order.py b/addons/website_event_sale/models/sale_order.py index 4c1a83681ea84fd018964a14362a916983e65be7..7351279eee086243e1f9d49c92f86fc548226613 100644 --- a/addons/website_event_sale/models/sale_order.py +++ b/addons/website_event_sale/models/sale_order.py @@ -30,7 +30,7 @@ class sale_order(osv.Model): else: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) if product.event_ticket_ids: - event_ticket_id = product.event_ticket_ids[0] + event_ticket_id = product.event_ticket_ids[0].id if event_ticket_id: ticket = self.pool.get('event.event.ticket').browse(cr, uid, event_ticket_id, context=context) diff --git a/addons/website_event_track/models/event.py b/addons/website_event_track/models/event.py index 954598f0bd27836777bfec2fbc3964c180d99bd6..657e73d946fc50534bef4b56a57ef62a600d6586 100644 --- a/addons/website_event_track/models/event.py +++ b/addons/website_event_track/models/event.py @@ -86,7 +86,7 @@ class event_track(osv.osv): 'event_id': fields.many2one('event.event', 'Event', required=True), 'color': fields.integer('Color Index'), 'priority': fields.selection([('3','Low'),('2','Medium (*)'),('1','High (**)'),('0','Highest (***)')], 'Priority', required=True), - 'website_published': fields.boolean('Available in the website'), + 'website_published': fields.boolean('Available in the website', copy=False), 'website_url': fields.function(_website_url, string="Website url", type="char"), 'image': fields.related('speaker_ids', 'image', type='binary', readonly=True) } @@ -120,7 +120,8 @@ class event_track(osv.osv): # class event_event(osv.osv): _inherit = "event.event" - def _tz_get(self,cr,uid, context=None): + + def _list_tz(self,cr,uid, context=None): # put POSIX 'Etc/*' entries at the end to avoid confusing users - see bug 1086728 return [(tz,tz) for tz in sorted(pytz.all_timezones, key=lambda tz: tz if not tz.startswith('Etc/') else '_')] @@ -140,8 +141,8 @@ class event_event(osv.osv): _columns = { 'tag_ids': fields.many2many('event.tag', string='Tags'), - 'track_ids': fields.one2many('event.track', 'event_id', 'Tracks'), - 'sponsor_ids': fields.one2many('event.sponsor', 'event_id', 'Sponsorships'), + 'track_ids': fields.one2many('event.track', 'event_id', 'Tracks', copy=True), + 'sponsor_ids': fields.one2many('event.sponsor', 'event_id', 'Sponsorships', copy=True), 'blog_id': fields.many2one('blog.blog', 'Event Blog'), 'show_track_proposal': fields.boolean('Talks Proposals'), 'show_tracks': fields.boolean('Multiple Tracks'), @@ -149,7 +150,7 @@ class event_event(osv.osv): 'count_tracks': fields.function(_count_tracks, type='integer', string='Tracks'), 'tracks_tag_ids': fields.function(_get_tracks_tag_ids, type='one2many', relation='event.track.tag', string='Tags of Tracks'), 'allowed_track_tag_ids': fields.many2many('event.track.tag', string='Accepted Tags', help="List of available tags for track proposals."), - 'timezone_of_event': fields.selection(_tz_get, 'Event Timezone', size=64), + 'timezone_of_event': fields.selection(_list_tz, 'Event Timezone', size=64), } _defaults = { diff --git a/addons/website_forum_doc/controllers/main.py b/addons/website_forum_doc/controllers/main.py index 2b951ee978d12ae2a2694d60a50a0af07fde01db..71563d78f01330b374e4533da6e29795981c9d79 100644 --- a/addons/website_forum_doc/controllers/main.py +++ b/addons/website_forum_doc/controllers/main.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -from openerp.addons.web import http -from openerp.addons.web.http import request +from openerp import http +from openerp.http import request from openerp.addons.website.models.website import slug diff --git a/addons/website_gengo/controllers/main.py b/addons/website_gengo/controllers/main.py index f80c7cbb34bf7b9073b39935d6183c329e5df933..f373c9375a3e2c972fcac1cd7c889d603a5a2454 100644 --- a/addons/website_gengo/controllers/main.py +++ b/addons/website_gengo/controllers/main.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- import openerp -from openerp.addons.web import http -from openerp.addons.web.http import request +from openerp import http +from openerp.http import request import time GENGO_DEFAULT_LIMIT = 20 diff --git a/addons/website_hr/controllers/main.py b/addons/website_hr/controllers/main.py index fcc66e6bae893d10e9578add26c5e8f8c992aec8..430c47b5dfa376ec4faee12e5f86f78b772b434b 100644 --- a/addons/website_hr/controllers/main.py +++ b/addons/website_hr/controllers/main.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -from openerp.addons.web import http -from openerp.addons.web.http import request +from openerp import http +from openerp.http import request class website_hr(http.Controller): diff --git a/addons/website_hr/models/hr.py b/addons/website_hr/models/hr.py index 4cacc56dba441712d80ec872d7d342ab976d63aa..2927993a6d465e1b258378457299bc24ed87a932 100644 --- a/addons/website_hr/models/hr.py +++ b/addons/website_hr/models/hr.py @@ -6,7 +6,7 @@ from openerp.osv import osv, fields class hr(osv.osv): _inherit = 'hr.employee' _columns = { - 'website_published': fields.boolean('Available in the website'), + 'website_published': fields.boolean('Available in the website', copy=False), 'public_info': fields.text('Public Info'), } _defaults = { diff --git a/addons/website_hr_recruitment/controllers/main.py b/addons/website_hr_recruitment/controllers/main.py index 84a0ec93454ec2b86ab7ef84b97d8e1465b89913..c99c6644f9c0144350257e3ae30359fbefcacd03 100644 --- a/addons/website_hr_recruitment/controllers/main.py +++ b/addons/website_hr_recruitment/controllers/main.py @@ -2,9 +2,9 @@ import base64 from openerp import SUPERUSER_ID -from openerp.addons.web import http +from openerp import http from openerp.tools.translate import _ -from openerp.addons.web.http import request +from openerp.http import request class website_hr_recruitment(http.Controller): @http.route([ diff --git a/addons/website_hr_recruitment/models/hr_job.py b/addons/website_hr_recruitment/models/hr_job.py index 26aad8ae1b8bcbe0a2e747b278264b5767d16ae8..b14361db333621416cfbe2d658210d0e4a5b3c1d 100644 --- a/addons/website_hr_recruitment/models/hr_job.py +++ b/addons/website_hr_recruitment/models/hr_job.py @@ -17,7 +17,7 @@ class hr_job(osv.osv): return super(hr_job, self).job_open(cr, uid, ids, context) _columns = { - 'website_published': fields.boolean('Published'), + 'website_published': fields.boolean('Published', copy=False), 'website_description': fields.html('Website description'), 'website_url': fields.function(_website_url, string="Website URL", type="char"), } diff --git a/addons/website_mail/controllers/email_designer.py b/addons/website_mail/controllers/email_designer.py index a9462f7be7d91cdb7af2dfb541c0d02a9687c734..0a3e1043fadb0ebc246ffb21b935faf219bdbccd 100644 --- a/addons/website_mail/controllers/email_designer.py +++ b/addons/website_mail/controllers/email_designer.py @@ -17,6 +17,7 @@ class WebsiteEmailDesigner(http.Controller): 'email' not in model_cols and 'email_from' not in model_cols or \ 'name' not in model_cols and 'subject' not in model_cols: return request.redirect('/') + res_id = int(res_id) obj_ids = request.registry[model].exists(request.cr, request.uid, [res_id], context=request.context) if not obj_ids: return request.redirect('/') @@ -33,7 +34,6 @@ class WebsiteEmailDesigner(http.Controller): body_field = 'body_html' cr, uid, context = request.cr, request.uid, request.context - res_id = int(res_id) record = request.registry[model].browse(cr, uid, res_id, context=context) values = { diff --git a/addons/website_mail/models/mail_message.py b/addons/website_mail/models/mail_message.py index cff950f2962e7aabd4fa85d2bb7da1f1601a6244..4206872199c861f4cec1f049ee8a4831a0fee631 100644 --- a/addons/website_mail/models/mail_message.py +++ b/addons/website_mail/models/mail_message.py @@ -43,7 +43,7 @@ class MailMessage(osv.Model): help='Message description: either the subject, or the beginning of the body' ), 'website_published': fields.boolean( - 'Published', help="Visible on the website as a comment" + 'Published', help="Visible on the website as a comment", copy=False, ), } diff --git a/addons/website_mail/models/mail_thread.py b/addons/website_mail/models/mail_thread.py index bea9178a3353d39ebac6ef97dcf8d8447f9571c3..cb19b52cf665b2b0df91c8d30fcea9bd79cbf963 100644 --- a/addons/website_mail/models/mail_thread.py +++ b/addons/website_mail/models/mail_thread.py @@ -36,8 +36,3 @@ class MailThread(osv.AbstractModel): ), } - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default['website_message_ids'] = [] - return super(MailThread, self).copy(cr, uid, id, default=default, context=context) \ No newline at end of file diff --git a/addons/website_membership/controllers/main.py b/addons/website_membership/controllers/main.py index 52ba31324f073944a11a4eaa8b07bd2c1fe48643..9947eab4d982b429f47060112fb7788ccb0fc2ce 100644 --- a/addons/website_membership/controllers/main.py +++ b/addons/website_membership/controllers/main.py @@ -73,7 +73,7 @@ class WebsiteMembership(http.Controller): # displayed membership lines membership_line_ids = membership_line_obj.search(cr, uid, line_domain, context=context) membership_lines = membership_line_obj.browse(cr, uid, membership_line_ids, context=context) - membership_lines.sort(key=lambda x: x.membership_id.website_sequence) + membership_lines = sorted(membership_lines, key=lambda x: x.membership_id.website_sequence) partner_ids = [m.partner.id for m in membership_lines] google_map_partner_ids = ",".join(map(str, partner_ids)) diff --git a/addons/website_membership/models/product.py b/addons/website_membership/models/product.py index c45a7a568a5c890906adc4334cd3690b522d29ff..1de254de78bc4318bd3f960c185d805586459eb3 100644 --- a/addons/website_membership/models/product.py +++ b/addons/website_membership/models/product.py @@ -25,7 +25,7 @@ class product_template(osv.Model): _inherit = 'product.template' _columns = { - 'website_published': fields.boolean('Available in the website'), + 'website_published': fields.boolean('Available in the website', copy=False), } _defaults = { 'website_published': False, diff --git a/addons/website_partner/models/res_partner.py b/addons/website_partner/models/res_partner.py index e762455debe3f805cdf31614ba269acf6f540ec0..08fc29ce75f2b72b232a6852306b765a7f589d29 100644 --- a/addons/website_partner/models/res_partner.py +++ b/addons/website_partner/models/res_partner.py @@ -12,7 +12,7 @@ class WebsiteResPartner(osv.Model): _columns = { 'website_published': fields.boolean( - 'Publish', help="Publish on the website"), + 'Publish', help="Publish on the website", copy=False), 'website_description': fields.html( 'Website Partner Full Description' ), diff --git a/addons/website_quote/models/order.py b/addons/website_quote/models/order.py index aaac80ceb5f9cf43d25058173223cad2929642ad..c7ae4bc8ab592a714f46b83d8a79499f0e067297 100644 --- a/addons/website_quote/models/order.py +++ b/addons/website_quote/models/order.py @@ -32,9 +32,9 @@ class sale_quote_template(osv.osv): _columns = { 'name': fields.char('Quotation Template', required=True), 'website_description': fields.html('Description', translate=True), - 'quote_line': fields.one2many('sale.quote.line', 'quote_id', 'Quote Template Lines'), + 'quote_line': fields.one2many('sale.quote.line', 'quote_id', 'Quote Template Lines', copy=True), 'note': fields.text('Terms and conditions'), - 'options': fields.one2many('sale.quote.option', 'template_id', 'Optional Products Lines'), + 'options': fields.one2many('sale.quote.option', 'template_id', 'Optional Products Lines', copy=True), 'number_of_days': fields.integer('Quote Duration', help='Number of days for the validaty date computation of the quotation'), } def open_template(self, cr, uid, quote_id, context=None): @@ -110,7 +110,7 @@ class sale_order(osv.osv): return res _columns = { - 'access_token': fields.char('Security Token', required=True), + 'access_token': fields.char('Security Token', required=True, copy=False), 'template_id': fields.many2one('sale.quote.template', 'Quote Template'), 'website_description': fields.html('Description'), 'options' : fields.one2many('sale.order.option', 'order_id', 'Optional Products Lines'), diff --git a/addons/website_sale/controllers/main.py b/addons/website_sale/controllers/main.py index e56e3bb14786bacee9df5936613c03ff4135a289..2c52e21e2811cbeea6ab6937cba487d2eb911ad0 100644 --- a/addons/website_sale/controllers/main.py +++ b/addons/website_sale/controllers/main.py @@ -2,8 +2,8 @@ import werkzeug from openerp import SUPERUSER_ID -from openerp.addons.web import http -from openerp.addons.web.http import request +from openerp import http +from openerp.http import request from openerp.tools.translate import _ from openerp.addons.website.models.website import slug @@ -582,7 +582,7 @@ class website_sale(http.Controller): # acquirer_ids = [tx.acquirer_id.id] # else: acquirer_ids = payment_obj.search(cr, SUPERUSER_ID, [('website_published', '=', True)], context=context) - values['acquirers'] = payment_obj.browse(cr, uid, acquirer_ids, context=context) + values['acquirers'] = list(payment_obj.browse(cr, uid, acquirer_ids, context=context)) render_ctx = dict(context, submit_class='btn btn-primary', submit_txt='Pay Now') for acquirer in values['acquirers']: acquirer.button = payment_obj.render( diff --git a/addons/website_sale/models/product.py b/addons/website_sale/models/product.py index ce2063720474584581dfb104a00f9705b4c9c123..29e1fca7f57735ddcecd1c5fd2183d76fa31c055 100644 --- a/addons/website_sale/models/product.py +++ b/addons/website_sale/models/product.py @@ -122,7 +122,7 @@ class product_template(osv.Model): ], string='Website Comments', ), - 'website_published': fields.boolean('Available in the website'), + 'website_published': fields.boolean('Available in the website', copy=False), 'website_description': fields.html('Description for the website'), 'alternative_product_ids': fields.many2many('product.template','product_alternative_rel','src_id','dest_id', string='Alternative Products', help='Appear on the product page'), 'accessory_product_ids': fields.many2many('product.product','product_accessory_rel','src_id','dest_id', string='Accessory Products', help='Appear on the shopping cart'), diff --git a/addons/website_sale_delivery/models/sale_order.py b/addons/website_sale_delivery/models/sale_order.py index 029190d35c8dedef557a97aa923f1ba64efd1d60..ebea78803fd5c1d300b3b4452246dcac1f611e3d 100644 --- a/addons/website_sale_delivery/models/sale_order.py +++ b/addons/website_sale_delivery/models/sale_order.py @@ -8,7 +8,7 @@ from openerp.addons import decimal_precision class delivery_carrier(orm.Model): _inherit = 'delivery.carrier' _columns = { - 'website_published': fields.boolean('Available in the website'), + 'website_published': fields.boolean('Available in the website', copy=False), 'website_description': fields.text('Description for the website'), } _defaults = { diff --git a/doc/03_module_dev_02.rst b/doc/03_module_dev_02.rst index 223c82d3f1c08a74348a39adf93a176dd8e4ffdb..ec7fa57d0dc840c830fa11188df92b262e46e0f9 100644 --- a/doc/03_module_dev_02.rst +++ b/doc/03_module_dev_02.rst @@ -615,6 +615,7 @@ Relational Types reference. :guilabel:`relation` is the table to look up that reference in. +.. _fields-functional: Functional Fields +++++++++++++++++ diff --git a/doc/03_module_dev_03.rst b/doc/03_module_dev_03.rst index e5e309fe00e979295c25cf51518813e0fcdb56eb..74fb4772f8406efd7ac00250a6d0a828e3831757 100644 --- a/doc/03_module_dev_03.rst +++ b/doc/03_module_dev_03.rst @@ -70,15 +70,21 @@ As we can see below in the purple zone of the screen, there is also a way to dis On Change +++++++++ -The on_change attribute defines a method that is called when the content of a view field has changed. +The on_change attribute defines a method that is called when the +content of a view field has changed. -This method takes at least arguments: cr, uid, ids, which are the three classical arguments and also the context dictionary. You can add parameters to the method. They must correspond to other fields defined in the view, and must also be defined in the XML with fields defined this way:: +This method takes at least arguments: cr, uid, ids, which are the +three classical arguments and also the context dictionary. You can add +parameters to the method. They must correspond to other fields defined +in the view, and must also be defined in the XML with fields defined +this way:: <field name="name_of_field" on_change="name_of_method(other_field'_1_', ..., other_field'_n_')"/> The example below is from the sale order view. -You can use the 'context' keyword to access data in the context that can be used as params of the function.:: +You can use the 'context' keyword to access data in the context that +can be used as params of the function.:: <field name="shop_id" on_change="onchange_shop_id(shop_id)"/> @@ -100,7 +106,10 @@ You can use the 'context' keyword to access data in the context that can be used return {'value':v} -When editing the shop_id form field, the onchange_shop_id method of the sale_order object is called and returns a dictionary where the 'value' key contains a dictionary of the new value to use in the 'project_id', 'pricelist_id' and 'payment_default_id' fields. +When editing the shop_id form field, the onchange_shop_id method of +the sale_order object is called and returns a dictionary where the +'value' key contains a dictionary of the new value to use in the +'project_id', 'pricelist_id' and 'payment_default_id' fields. Note that it is possible to change more than just the values of fields. For example, it is possible to change the value of some fields diff --git a/doc/api_models.rst b/doc/api_models.rst index 143147c0f48114610080cc4af53abdd722c3c934..82f45ed5c9c416b064f7149dfd8289c2b5dd3262 100644 --- a/doc/api_models.rst +++ b/doc/api_models.rst @@ -1,7 +1,21 @@ -ORM and models --------------- +ORM and Models +============== .. automodule:: openerp.osv.orm :members: :undoc-members: + +Scope Management +================ + +.. automodule:: openerp.osv.scope + :members: + :undoc-members: + +API Decorators +============== + +.. automodule:: openerp.osv.api + :members: + :undoc-members: diff --git a/doc/howto/howto_website.rst b/doc/howto/howto_website.rst index e4ef9b4748b69c8a2e5730144cbf6fa616ab7513..722891519da2bc543647223bb881895cbc4adc7f 100644 --- a/doc/howto/howto_website.rst +++ b/doc/howto/howto_website.rst @@ -65,7 +65,7 @@ module in it: .. code-block:: console $ createdb academy - $ ./openerp-server --addons-path=../web/addons,../addons,../my-modules \ + $ ./openerp-server --addons-path=addons,../my-modules \ -d academy -i academy --db-filter=academy * ``--addons-path`` tells OpenERP where it can find its modules. By default it @@ -159,6 +159,10 @@ Let's move our 2 pseudo-templates from inline strings to actual templates: This simplifies the controller code by moving data formatting out of it, and generally makes it simpler for designers to edit the markup. +.. note:: + + You'll need to update the module to install the new templates + .. todo:: link to section about reusing/altering existing stuff, template overriding @@ -175,7 +179,7 @@ First, we'll install the ``website`` module: restart your server with .. code-block:: console - $ ./openerp-server --addons-path=../web/addons,../addons,../my-modules \ + $ ./openerp-server --addons-path=addons,../my-modules \ -d academy -i website --db-filter=academy If you navigate to `your openerp`_, your basic page may have been replaced by @@ -202,7 +206,7 @@ ensures ``academy``'s index page overwrites ``website``'s. .. code-block:: console - $ ./openerp-server --addons-path=../web/addons,../addons,../my-modules \ + $ ./openerp-server --addons-path=addons,../my-modules \ -d academy -u academy --db-filter=academy instead of the previous command (note: ``-i`` was replaced by ``-u``) @@ -460,14 +464,14 @@ of all records in the object) and the "form" view (view an manipulation of a single record). The :guilabel:`Create` button above the list lets you create new record, you can select records to delete them. -There's one big issue to fix right now, the labeling of the column in the list -and the fields in the form view, which are all currently :guilabel:`unknown`. -We can fix that by adding a ``string`` attribute to the model field: +The names of the fields in the search and list view are automatically inferred +from the logical field names, but it's probably a good idea to specify them +anyway, by adding a ``string`` to the model field: .. patch:: -The second problem is that the list view only displays the ``name`` field. To -fix this, we have to create an explicit list view for lectures: +An issue is that the list view only displays the ``name`` field. To fix this, +we have to create an explicit list view for lectures: .. patch:: @@ -491,7 +495,7 @@ the server as: .. code-block:: console - $ ./openerp-server --addons-path=../web/addons,../addons,../my-modules \ + $ ./openerp-server --addons-path=addons,../my-modules \ -d academy -i website_event --db-filter=academy We'll also add it as a dependency to our module: @@ -517,7 +521,7 @@ Restart the server with .. code-block:: console - $ ./openerp-server --addons-path=../web/addons,../addons,../my-modules \ + $ ./openerp-server --addons-path=addons,../my-modules \ -d academy -i academy --db-filter=academy and the menu item has been renamed to Lectures. @@ -573,6 +577,12 @@ The gist of the operation is fairly simple, but there are lots of changes: purged if we do not need it (e.g. existing non-lectures events and event types can be removed before adding our own) +.. note:: + + because we're reusing the old XIDs on completely different models, we need + to either remove the old reference or (simpler) just drop and re-create + the database + .. patch:: Our data is back in the fontend (site), and in the backend we get diff --git a/doc/howto/howto_website/basic-page b/doc/howto/howto_website/basic-page index d3179145542c1c046f0b5a7f609d697410d88144..ccb47652f3999a02aacf3be1567c77287f3fe4f7 100644 --- a/doc/howto/howto_website/basic-page +++ b/doc/howto/howto_website/basic-page @@ -1,5 +1,5 @@ # HG changeset patch -# Parent b96cd22d25cfa9a67f451d091f5c4896997d350d +# Parent 11a30af5c0c80b15a2bbad562ef3a580ee99fb3b diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py @@ -10,7 +10,7 @@ diff --git a/controllers/academy.py b/controllers/academy.py def index(self): - return "Hello, world!" + return """<!doctype html> -+<link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"> ++<link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"> +<body class="container"> + Hello, world! +</body> diff --git a/doc/howto/howto_website/field-label b/doc/howto/howto_website/field-label index 2589e8f872ab51a23b597b283d1bf9a4af3b2390..090344dc71995de9dffda9e581804ddaedb29a53 100644 --- a/doc/howto/howto_website/field-label +++ b/doc/howto/howto_website/field-label @@ -1,14 +1,14 @@ # HG changeset patch -# Parent fe4edbcd9e98db81ec6321c58e8ac508a686f45b -diff -r fe4edbcd9e98 -r 72a099819e5b models/academy.py ---- a/models/academy.py Mon Apr 14 16:38:10 2014 +0200 -+++ b/models/academy.py Mon Apr 14 16:59:01 2014 +0200 -@@ -14,6 +14,6 @@ class Lectures(orm.Model): +# Parent 4b38aba926d27af5f81be1a3b5a482c26522bf38 + +diff --git a/models/academy.py b/models/academy.py +--- a/models/academy.py ++++ b/models/academy.py +@@ -12,5 +12,5 @@ class Lectures(Model): + _name = 'academy.lectures' _order = 'date ASC' - _columns = { -- 'name': fields.char(required=True), -- 'date': fields.date(required=True), -+ 'name': fields.char(required=True, string="Name"), -+ 'date': fields.date(required=True, string="Date"), - } +- name = Char(required=True) +- date = Date(required=True) ++ name = Char(required=True, string="Name") ++ date = Date(required=True, string="Date") diff --git a/doc/howto/howto_website/lectures-model-add b/doc/howto/howto_website/lectures-model-add index e622ec6656ff84294c18fea6a91a07e0e6f6376e..3211995ab72d443b5f3128fe6a4917fde497b750 100644 --- a/doc/howto/howto_website/lectures-model-add +++ b/doc/howto/howto_website/lectures-model-add @@ -1,5 +1,5 @@ # HG changeset patch -# Parent cd98e5752eedca2780da80387ac01c8cd166940f +# Parent 13c65b2eed8f84b951ff5a98051f64fe35ddcf0a diff --git a/__openerp__.py b/__openerp__.py --- a/__openerp__.py @@ -15,22 +15,17 @@ diff --git a/__openerp__.py b/__openerp__.py diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py +++ b/controllers/academy.py -@@ -6,10 +6,15 @@ from openerp.addons.web.controllers impo +@@ -6,8 +6,10 @@ from openerp.addons.web.controllers impo class academy(main.Home): @http.route('/', auth='public', website=True) def index(self): -+ cr, uid, context = http.request.cr, http.request.uid, http.request.context -+ Lectures = http.request.registry['academy.lectures'] - tas = http.request.registry['academy.tas'].search_read( - http.request.cr, http.request.uid, context=http.request.context) -+ lectures = Lectures.browse( -+ cr, uid, Lectures.search(cr, uid, [], context=context), context=context) - return http.request.website.render('academy.index', { - 'tas': tas, ++ lectures = http.request.env['academy.lectures'].search([]) + tas = http.request.env['academy.tas'].search([]) + return http.request.render('academy.index', { + 'lectures': lectures, + 'tas': tas, }) - @http.route('/tas/<model("academy.tas"):ta>/', auth='public', website=True) diff --git a/data/lectures.xml b/data/lectures.xml new file mode 100644 --- /dev/null @@ -63,19 +58,24 @@ new file mode 100644 diff --git a/models/academy.py b/models/academy.py --- a/models/academy.py +++ b/models/academy.py -@@ -8,3 +8,12 @@ class TeachingAssistants(orm.Model): - 'name': fields.char(), - 'biography': fields.html(), - } +@@ -1,9 +1,16 @@ + # -*- coding: utf-8 -*- + from openerp.models import Model +-from openerp.fields import Char, Html ++from openerp.fields import Char, Html, Date + + class TeachingAssistants(Model): + _name = "academy.tas" + + name = Char() + biography = Html() + -+class Lectures(orm.Model): ++class Lectures(Model): + _name = 'academy.lectures' + _order = 'date ASC' + -+ _columns = { -+ 'name': fields.char(required=True), -+ 'date': fields.date(required=True), -+ } ++ name = Char(required=True) ++ date = Date(required=True) diff --git a/security/ir.model.access.csv b/security/ir.model.access.csv --- a/security/ir.model.access.csv +++ b/security/ir.model.access.csv diff --git a/doc/howto/howto_website/manifest b/doc/howto/howto_website/manifest index 601c18c51065a32b4b39fbc12db6cc1d07ad9e85..c0c7ee29547c3f34e379aa2a718de3090c4ec2ce 100644 --- a/doc/howto/howto_website/manifest +++ b/doc/howto/howto_website/manifest @@ -71,16 +71,15 @@ diff --git a/models/academy.py b/models/academy.py new file mode 100644 --- /dev/null +++ b/models/academy.py -@@ -0,0 +1,9 @@ +@@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- -+from openerp.osv import orm, fields ++from openerp.models import Model ++from openerp.fields import Char + -+class academy(orm.Model): ++class academy(Model): + _name = "academy.academy" + -+ _columns = { -+ 'name': fields.char(), -+ } ++ name = Char() diff --git a/security/ir.model.access.csv b/security/ir.model.access.csv new file mode 100644 --- /dev/null diff --git a/doc/howto/howto_website/move-to-openerp-objects b/doc/howto/howto_website/move-to-openerp-objects index 8623d8a90e5fcef21e39f14a158c7d0a3b15b086..0efebb39846b03240c026de72bf3df827d673dec 100644 --- a/doc/howto/howto_website/move-to-openerp-objects +++ b/doc/howto/howto_website/move-to-openerp-objects @@ -1,5 +1,5 @@ # HG changeset patch -# Parent ade038cd6dfd855f1d423cffb3d4e242404c28f0 +# Parent bc8914ae6b552520bdc3856e81d3624dc5a77a99 diff --git a/__init__.py b/__init__.py --- a/__init__.py @@ -26,40 +26,31 @@ diff --git a/__openerp__.py b/__openerp__.py diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py +++ b/controllers/academy.py -@@ -6,18 +6,29 @@ from openerp.addons.web.controllers impo +@@ -6,14 +6,22 @@ from openerp.addons.web.controllers impo class academy(main.Home): @http.route('/', auth='public', website=True) def index(self): -+ registry = http.request.registry - cr, uid, context = http.request.cr, http.request.uid, http.request.context -- Lectures = http.request.registry['academy.lectures'] -- tas = http.request.registry['academy.tas'].search_read( -- http.request.cr, http.request.uid, context=http.request.context) +- lectures = http.request.env['academy.lectures'].search([]) +- tas = http.request.env['academy.tas'].search([]) ++ ta_group = http.request.env.ref('academy.tas') ++ tas = http.request.env['res.users'].search( ++ [('groups_id', '=', [ta_group.id])] ++ ) + -+ Data = registry['ir.model.data'] -+ _, ta_group_id = Data.get_object_reference(cr, uid, 'academy', 'tas') -+ tas = registry['res.users'].search_read( -+ http.request.cr, http.request.uid, -+ [('groups_id', '=', [ta_group_id])], -+ context=http.request.context) ++ lecture_type = http.request.env.ref('academy.lecture_type') ++ lectures = http.request.env['event.event'].search( ++ [('type', '=', lecture_type.id)] ++ ) + -+ Lectures = registry['event.event'] -+ _, lecture_type_id = Data.get_object_reference(cr, uid, 'academy', 'lecture_type') - lectures = Lectures.browse( -- cr, uid, Lectures.search(cr, uid, [], context=context), context=context) -+ cr, uid, -+ Lectures.search(cr, uid, [('type', '=', lecture_type_id),], context=context), -+ context=context) -+ - return http.request.website.render('academy.index', { - 'tas': tas, + return http.request.render('academy.index', { 'lectures': lectures, + 'tas': tas, }) - @http.route('/tas/<model("academy.tas"):ta>/', auth='public', website=True) + @http.route('/tas/<model("res.users"):ta>/', auth='public', website=True) def ta(self, ta): - return http.request.website.render('academy.ta', { + return http.request.render('academy.ta', { 'ta': ta, diff --git a/data/views.xml b/data/views.xml deleted file mode 100644 @@ -227,39 +218,36 @@ diff --git a/models/academy.py b/models/academy.py deleted file mode 100644 --- a/models/academy.py +++ /dev/null -@@ -1,19 +0,0 @@ +@@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- --from openerp.osv import orm, fields +-from openerp.models import Model +-from openerp.fields import Char, Html, Date - --class TeachingAssistants(orm.Model): +-class TeachingAssistants(Model): - _name = "academy.tas" - -- _columns = { -- 'name': fields.char(), -- 'biography': fields.html(), -- } +- name = Char() +- biography = Html() - --class Lectures(orm.Model): +-class Lectures(Model): - _name = 'academy.lectures' - _order = 'date ASC' - -- _columns = { -- 'name': fields.char(required=True, string="Name"), -- 'date': fields.date(required=True, string="Date"), -- } +- name = Char(required=True, string="Name") +- date = Date(required=True, string="Date") diff --git a/models/res_partner.py b/models/res_partner.py new file mode 100644 --- /dev/null +++ b/models/res_partner.py @@ -0,0 +1,8 @@ -+from openerp.osv import orm, fields ++from openerp.models import Model ++from openerp.fields import Html + -+class Partner(orm.Model): ++class Partner(Model): + _inherit = 'res.partner' + -+ _columns = { -+ 'biography': fields.html(), -+ } ++ biography = Html() ++ diff --git a/security/ir.model.access.csv b/security/ir.model.access.csv deleted file mode 100644 --- a/security/ir.model.access.csv diff --git a/doc/howto/howto_website/ta-controller b/doc/howto/howto_website/ta-controller index 64de1dae17b17572293701719bea4375280d72f0..da4ee7606c9fb221eaad1f51480c67daf7fa9e3d 100644 --- a/doc/howto/howto_website/ta-controller +++ b/doc/howto/howto_website/ta-controller @@ -1,5 +1,5 @@ # HG changeset patch -# Parent a110c540b0769ee849a404324cf8594d116cc982 +# Parent a813df5b9cbdf5db9b0c3f6bac47b1821ddbb086 diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py @@ -25,7 +25,7 @@ diff --git a/controllers/academy.py b/controllers/academy.py + ] + return """<!doctype html> - <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"> + <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"> <body class="container"> - Hello, world! + <h1>Introduction to something</h1> @@ -45,7 +45,7 @@ diff --git a/controllers/academy.py b/controllers/academy.py +<html> + <head> + <title>AcademyAcademy TA %(name)s</title> -+ <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"> ++ <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"> + </head> + <body class="container"> + <h1>%(name)s</h1> diff --git a/doc/howto/howto_website/ta-data b/doc/howto/howto_website/ta-data index 395b5a2b430cdf1f6d883e2a2a0394d978eea576..2c001de47ac08bebf735a0a13c7ea5b98b229b63 100644 --- a/doc/howto/howto_website/ta-data +++ b/doc/howto/howto_website/ta-data @@ -1,5 +1,5 @@ # HG changeset patch -# Parent a35b5f4903087b1b31a4ecf33bfe655dc3ad5663 +# Parent 650d20edf05a23b977c3b9fa476a39986eee4722 diff --git a/__openerp__.py b/__openerp__.py --- a/__openerp__.py diff --git a/doc/howto/howto_website/ta-html-biography b/doc/howto/howto_website/ta-html-biography index a8460c3090a9494ccc2dbb403f25c78609a56844..c0fae24aa4124c87d3494bf21758b9743b7d1715 100644 --- a/doc/howto/howto_website/ta-html-biography +++ b/doc/howto/howto_website/ta-html-biography @@ -1,12 +1,17 @@ # HG changeset patch -# Parent 313d7c760558b24461a544063de061d00fdf7059 +# Parent d57af6d92026e45b336308b143a5c227d44de9f7 diff --git a/models/academy.py b/models/academy.py --- a/models/academy.py +++ b/models/academy.py -@@ -6,4 +6,5 @@ class TeachingAssistants(orm.Model): +@@ -1,8 +1,9 @@ + # -*- coding: utf-8 -*- + from openerp.models import Model +-from openerp.fields import Char ++from openerp.fields import Char, Html - _columns = { - 'name': fields.char(), -+ 'biography': fields.html(), - } + class TeachingAssistants(Model): + _name = "academy.tas" + + name = Char() ++ biography = Html() diff --git a/doc/howto/howto_website/ta-model b/doc/howto/howto_website/ta-model index d3b79299e9cc895404df5b52580f3309b7aea4d4..277ceea3cf9af2737663d60f454a17b2f9f815c3 100644 --- a/doc/howto/howto_website/ta-model +++ b/doc/howto/howto_website/ta-model @@ -1,10 +1,10 @@ # HG changeset patch -# Parent e8b98f4c8f9070f7d5b91936381324cd1fe12e17 +# Parent 3fbc9c92bcee395900c441aeb80e68036004a7a0 diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py +++ b/controllers/academy.py -@@ -3,19 +3,13 @@ +@@ -3,19 +3,12 @@ from openerp import http from openerp.addons.web.controllers import main @@ -19,9 +19,8 @@ diff --git a/controllers/academy.py b/controllers/academy.py class academy(main.Home): @http.route('/', auth='public', website=True) def index(self): -+ tas = http.request.registry['academy.tas'].search_read( -+ http.request.cr, http.request.uid, context=http.request.context) - return http.request.website.render('academy.index', { ++ tas = http.request.env['academy.tas'].search([]) + return http.request.render('academy.index', { - 'tas': teaching_assistants, + 'tas': tas, }) @@ -30,17 +29,16 @@ diff --git a/controllers/academy.py b/controllers/academy.py diff --git a/models/academy.py b/models/academy.py --- a/models/academy.py +++ b/models/academy.py -@@ -1,8 +1,8 @@ - # -*- coding: utf-8 -*- - from openerp.osv import orm, fields +@@ -2,7 +2,7 @@ + from openerp.models import Model + from openerp.fields import Char --class academy(orm.Model): +-class academy(Model): - _name = "academy.academy" -+class TeachingAssistants(orm.Model): ++class TeachingAssistants(Model): + _name = "academy.tas" - _columns = { - 'name': fields.char(), + name = Char() diff --git a/security/ir.model.access.csv b/security/ir.model.access.csv --- a/security/ir.model.access.csv +++ b/security/ir.model.access.csv @@ -48,3 +46,15 @@ diff --git a/security/ir.model.access.csv b/security/ir.model.access.csv id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink -access_academy_academy,access_academy_academy,model_academy_academy,,1,0,0,0 +access_academy_tas,access_academy_tas,model_academy_tas,,1,0,0,0 +diff --git a/views/templates.xml b/views/templates.xml +--- a/views/templates.xml ++++ b/views/templates.xml +@@ -17,7 +17,7 @@ + <h2>Teaching Assistants</h2> + <ul> + <li t-foreach="tas" t-as="ta"> +- <a t-att-href="url_for('/tas/%d/' % ta_index)"> ++ <a t-att-href="url_for('/tas/%d/' % ta['id'])"> + <t t-esc="ta['name']"/> + </a> + </li> diff --git a/doc/howto/howto_website/ta-t-field b/doc/howto/howto_website/ta-t-field index a61bec85a6b8f87b72171f90982cced0ea673061..8d6ffe06d90dea9b4ec193fa8514bb59bab2d460 100644 --- a/doc/howto/howto_website/ta-t-field +++ b/doc/howto/howto_website/ta-t-field @@ -1,9 +1,18 @@ # HG changeset patch -# Parent 9d054be011cf2d3019e3b4d1be7cc95375044a05 +# Parent a1788b0db87f7fdb5640b17bbda6e9a0c6093c37 diff --git a/views/templates.xml b/views/templates.xml --- a/views/templates.xml +++ b/views/templates.xml +@@ -18,7 +18,7 @@ + <ul> + <li t-foreach="tas" t-as="ta"> + <a t-att-href="url_for('/tas/%d/' % ta['id'])"> +- <t t-esc="ta['name']"/> ++ <span t-field="ta.name"/> + </a> + </li> + </ul> @@ -35,7 +35,7 @@ <div class="oe_structure"/> <div class="oe_structure"> diff --git a/doc/howto/howto_website/ta-view-fix b/doc/howto/howto_website/ta-view-fix index 5e3f494cf3dbd373c37779dd4faa2b9cac745686..c04c5dc1349fa8da3f123f0cd98fad379dcebcc2 100644 --- a/doc/howto/howto_website/ta-view-fix +++ b/doc/howto/howto_website/ta-view-fix @@ -1,5 +1,5 @@ # HG changeset patch -# Parent 466d19929560c2a60c347990ea44730ae40ec36d +# Parent 0d88c003a54e6501eee5f43b90db20d9b78c07c8 diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py @@ -10,24 +10,15 @@ diff --git a/controllers/academy.py b/controllers/academy.py - @http.route('/tas/<int:id>/', auth='public', website=True) - def ta(self, id): -- return http.request.website.render('academy.ta', teaching_assistants[id]) +- return http.request.render('academy.ta', teaching_assistants[id]) + @http.route('/tas/<model("academy.tas"):ta>/', auth='public', website=True) + def ta(self, ta): -+ return http.request.website.render('academy.ta', { ++ return http.request.render('academy.ta', { + 'ta': ta, + }) diff --git a/views/templates.xml b/views/templates.xml --- a/views/templates.xml +++ b/views/templates.xml -@@ -17,7 +17,7 @@ - <h2>Teaching Assistants</h2> - <ul> - <li t-foreach="tas" t-as="ta"> -- <a t-att-href="url_for('/tas/%d/' % ta_index)"> -+ <a t-att-href="url_for('/tas/%s/' % ta['id'])"> - <t t-esc="ta['name']"/> - </a> - </li> @@ -35,7 +35,7 @@ <div class="oe_structure"/> <div class="oe_structure"> diff --git a/doc/howto/howto_website/templates-basic b/doc/howto/howto_website/templates-basic index a5b14a3fbc1a5f6e17f557999e8252751747472d..95d7614b496b58de7d420921dfac5492583389cd 100644 --- a/doc/howto/howto_website/templates-basic +++ b/doc/howto/howto_website/templates-basic @@ -1,5 +1,5 @@ # HG changeset patch -# Parent 2edbac8244a982055a6786c14ee41719ea5410e8 +# Parent 9979cfa9319edcd15e6c243862eeeb03b851a82a diff --git a/__openerp__.py b/__openerp__.py --- a/__openerp__.py @@ -19,18 +19,13 @@ diff --git a/__openerp__.py b/__openerp__.py diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py +++ b/controllers/academy.py -@@ -14,34 +14,17 @@ teaching_assistants = [ - class academy(main.Home): - @http.route('/', auth='none') - def index(self): -+ cr, uid, context = http.request.cr, http.request.uid, http.request.context - tas = [ +@@ -18,30 +18,10 @@ class academy(main.Home): '<li><a href="/tas/%d/">%s</a></li>' % (i, ta['name']) for i, ta in enumerate(teaching_assistants) ] - +- - return """<!doctype html> --<link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"> +-<link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"> -<body class="container"> - <h1>Introduction to something</h1> - <h2>Teaching Assistants</h2> @@ -41,26 +36,24 @@ diff --git a/controllers/academy.py b/controllers/academy.py -""" % { - 'tas': '\n'.join(tas) - } -- -+ return http.request.registry['ir.ui.view'].render(cr, uid, 'academy.index', { ++ return http.request.render('academy.index', { + 'tas': '\n'.join(tas) -+ }, context=context) ++ }) + @http.route('/tas/<int:id>/', auth='none') def ta(self, id): - return """<!doctype html> -<html> - <head> - <title>AcademyAcademy TA %(name)s</title> -- <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"> +- <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"> - </head> - <body class="container"> - <h1>%(name)s</h1> - </body> -</html> -""" % teaching_assistants[id] -+ cr, uid, context = http.request.cr, http.request.uid, http.request.context -+ return http.request.registry['ir.ui.view'].render( -+ cr, uid, 'academy.ta', teaching_assistants[id], context=context) ++ return http.request.render('academy.ta', teaching_assistants[id]) diff --git a/views/templates.xml b/views/templates.xml new file mode 100644 --- /dev/null @@ -72,7 +65,7 @@ new file mode 100644 + <html> + <head> + <title>AcademyAcademy</title> -+ <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"/> ++ <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"/> + </head> + <body class="container"> + <h1>Introduction to a thing</h1> @@ -95,7 +88,7 @@ new file mode 100644 + <html> + <head> + <title>AcademyAcademy TA <t t-esc="name"/></title> -+ <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"/> ++ <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"/> + </head> + <body class="container"> + <h1><t t-esc="name"/></h1> diff --git a/doc/howto/howto_website/website-dependency b/doc/howto/howto_website/website-dependency index 37c14e8f2c2d10cf3687c5c0871fa2e76e286993..e1a038233ca5676298b4ac70ccf258795be00640 100644 --- a/doc/howto/howto_website/website-dependency +++ b/doc/howto/howto_website/website-dependency @@ -1,5 +1,5 @@ # HG changeset patch -# Parent 0fdf3e29ce5bb1dd39479f157eeac5bdfd3cffb0 +# Parent 9d2ca56532b5bbb5d6c9de995088d895fbefff2e diff --git a/__openerp__.py b/__openerp__.py --- a/__openerp__.py @@ -16,32 +16,20 @@ diff --git a/__openerp__.py b/__openerp__.py diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py +++ b/controllers/academy.py -@@ -12,19 +12,12 @@ teaching_assistants = [ - ] - +@@ -14,12 +14,8 @@ teaching_assistants = [ class academy(main.Home): -- @http.route('/', auth='none') -+ @http.route('/', auth='none', website=True) + @http.route('/', auth='none') def index(self): -- cr, uid, context = http.request.cr, http.request.uid, http.request.context - tas = [ - '<li><a href="/tas/%d/">%s</a></li>' % (i, ta['name']) - for i, ta in enumerate(teaching_assistants) - ] -+ return http.request.website.render('academy.index', { + return http.request.render('academy.index', { +- 'tas': '\n'.join(tas) + 'tas': teaching_assistants, -+ }) + }) -- return http.request.registry['ir.ui.view'].render(cr, uid, 'academy.index', { -- 'tas': '\n'.join(tas) -- }, context=context) -- @http.route('/tas/<int:id>/', auth='none') -+ @http.route('/tas/<int:id>/', auth='none', website=True) - def ta(self, id): -- cr, uid, context = http.request.cr, http.request.uid, http.request.context -- return http.request.registry['ir.ui.view'].render( -- cr, uid, 'academy.ta', teaching_assistants[id], context=context) -+ return http.request.website.render('academy.ta', teaching_assistants[id]) + @http.route('/tas/<int:id>/', auth='none') diff --git a/views/templates.xml b/views/templates.xml --- a/views/templates.xml +++ b/views/templates.xml diff --git a/doc/howto/howto_website/website-layoutify b/doc/howto/howto_website/website-layoutify index d4ed9647dd51abb5bc827847e9a39fb6db14f290..a2ae505673c6b63807064791b56f27f2966dc3bc 100644 --- a/doc/howto/howto_website/website-layoutify +++ b/doc/howto/howto_website/website-layoutify @@ -1,5 +1,5 @@ # HG changeset patch -# Parent 69c500d7634c0e5287508cfaffa14174cc47d800 +# Parent 22cbd107041dde59f904e94ea8c740fca5965fc0 diff --git a/controllers/academy.py b/controllers/academy.py --- a/controllers/academy.py @@ -8,17 +8,17 @@ diff --git a/controllers/academy.py b/controllers/academy.py ] class academy(main.Home): -- @http.route('/', auth='none', website=True) +- @http.route('/', auth='none') + @http.route('/', auth='public', website=True) def index(self): - return http.request.website.render('academy.index', { + return http.request.render('academy.index', { 'tas': teaching_assistants, }) -- @http.route('/tas/<int:id>/', auth='none', website=True) +- @http.route('/tas/<int:id>/', auth='none') + @http.route('/tas/<int:id>/', auth='public', website=True) def ta(self, id): - return http.request.website.render('academy.ta', teaching_assistants[id]) + return http.request.render('academy.ta', teaching_assistants[id]) diff --git a/views/templates.xml b/views/templates.xml --- a/views/templates.xml +++ b/views/templates.xml @@ -29,7 +29,7 @@ diff --git a/views/templates.xml b/views/templates.xml - <html> - <head> - <title>AcademyAcademy</title> -- <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"/> +- <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"/> - </head> - <body class="container"> - <h1>Introduction to a thing</h1> @@ -83,7 +83,7 @@ diff --git a/views/templates.xml b/views/templates.xml - <html> - <head> - <title>AcademyAcademy TA <t t-esc="name"/></title> -- <link href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet"/> +- <link href="/web/static/lib/bootstrap/css/bootstrap.min.css" rel="stylesheet"/> - </head> - <body class="container"> - <h1><t t-esc="name"/></h1> diff --git a/doc/index.rst b/doc/index.rst index ad42e41a10f06f86179c3730699c824b3019874c..f7cd7c399f1ed16cad6340a03439d8d65ebe25a5 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -36,9 +36,10 @@ OpenERP Server API .. toctree:: :maxdepth: 1 - orm-methods.rst - api_models.rst - routing.rst + new_api + orm-methods + api_models + routing Changelog ''''''''' diff --git a/doc/new_api.rst b/doc/new_api.rst new file mode 100644 index 0000000000000000000000000000000000000000..e99d5b7eef49716f51c3d55ebdd73b0f3027d4aa --- /dev/null +++ b/doc/new_api.rst @@ -0,0 +1,138 @@ +================== +High-level ORM API +================== + +.. _compute: + +Computed fields: defaults and function fields +============================================= + +The high-level API attempts to unify concepts of programmatic value generation +for function fields (stored or not) and default values through the use of +computed fields. + +Fields are marked as computed by setting their ``compute`` attribute to the +name of the method used to compute then:: + + has_sibling = fields.Integer(compute='compute_has_sibling') + +by default computation methods behave as simple defaults in case no +corresponding value is found in the database:: + + def default_number_of_employees(self): + self.number_of_employees = 1 + +.. todo:: + + literal defaults:: + + has_sibling = fields.Integer(compute=fields.default(1)) + +but they can also be used for computed fields by specifying fields used for +the computation. The dependencies can be dotted for "cascading" through +related models:: + + @api.depends('parent_id.children_count') + def compute_has_sibling(self): + self.has_sibling = self.parent_id.children_count >= 2 + +.. todo:: + + function-based:: + + has_sibling = fields.Integer() + @has_sibling.computer + @api.depends('parent_id.children_count') + def compute_has_sibling(self): + self.has_sibling = self.parent_id.children_count >= 2 + +note that computation methods (defaults or others) do not *return* a value, +they *set* values the current object. This means the high-level API does not +need :ref:`an explicit multi <fields-functional>`: a ``multi`` method is +simply one which computes several values at once:: + + @api.depends('company_id') + def compute_relations(self): + self.computed_company = self.company_id + self.computed_companies = self.company_id.to_recordset() + +Automatic onchange +================== + +Using to the improved and expanded :ref:`computed fields <compute>`, the +high-level ORM API is able to infer the effect of fields on +one another, and thus automatically provide a basic form of onchange without +having to implement it by hand, or implement dozens of onchange functions to +get everything right. + + + + +.. todo:: + + deferred records:: + + partner = Partner.record(42, defer=True) + partner.name = "foo" + partner.user_id = juan + partner.save() # only saved to db here + + with scope.defer(): + # all records in this scope or children scopes are deferred + # until corresponding scope poped or until *this* scope poped? + partner = Partner.record(42) + partner.name = "foo" + partner.user_id = juan + # saved here, also for recordset &al, ~transaction + + # temp deferment, maybe simpler? Or for bulk operations?: + with Partner.record(42) as partner: + partner.name = "foo" + partner.user_id = juan + + ``id = False`` => always defered? null v draft? + +.. todo:: keyword arguments passed positionally (common for context, completely breaks everything) + +.. todo:: optional arguments (report_aged_receivable) + +.. todo:: non-id ids? (mail thread_id) + +.. todo:: partial signatures on overrides (e.g. message_post) + +.. todo:: + + :: + + field = fields.Char() + + @field.computer + def foo(self): + "compute foo here" + + ~ + + :: + + field = fields.Char(compute='foo') + + def foo(self): + "compute foo here" + +.. todo:: doc + +.. todo:: incorrect dependency spec? + +.. todo:: dynamic dependencies? + + :: + + @api.depends(???) + def foo(self) + self.a = self[self.b] + +.. todo:: recursive onchange + + Country & state. Change country -> remove state; set state -> set country + +.. todo:: onchange list affected? diff --git a/openerp/__init__.py b/openerp/__init__.py index 995822c9cb0cfe9c7e4dcc18e94cf5a677b8270d..24a193bf759065b28f44077ab9b3bc7625db983f 100644 --- a/openerp/__init__.py +++ b/openerp/__init__.py @@ -67,9 +67,7 @@ def registry(database_name): # Imports #---------------------------------------------------------- import addons -import cli import conf -import http import loglevels import modules import netsvc @@ -82,5 +80,18 @@ import sql_db import tools import workflow -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: +#---------------------------------------------------------- +# Model classes, fields, api decorators, and translations +#---------------------------------------------------------- +from . import models +from . import fields +from . import api +from openerp.tools.translate import _ +#---------------------------------------------------------- +# Other imports, which may require stuff from above +#---------------------------------------------------------- +import cli +import http + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/base/__openerp__.py b/openerp/addons/base/__openerp__.py index 6db84cb84c46aa28ea618ed7e9bfc2ee249455e1..4bbe2ed5879172ea297b76f3103f19ce45c1afcf 100644 --- a/openerp/addons/base/__openerp__.py +++ b/openerp/addons/base/__openerp__.py @@ -39,7 +39,6 @@ The kernel of OpenERP, needed for all installation. 'res/res_country_data.xml', 'security/base_security.xml', 'base_menu.xml', - 'res/res_security.xml', 'res/res_config.xml', 'res/res.country.state.csv', 'ir/ir_actions.xml', @@ -82,7 +81,7 @@ The kernel of OpenERP, needed for all installation. 'res/res_users_view.xml', 'res/res_partner_data.xml', 'res/ir_property_view.xml', - 'security/base_security.xml', + 'res/res_security.xml', 'security/ir.model.access.csv', ], 'demo': [ diff --git a/openerp/addons/base/base_menu.xml b/openerp/addons/base/base_menu.xml index d9cd0c4e8bf93da1a0f47851c014ba937355876f..5c15addb56de9eb651a6412c7e35d5be53a67cac 100644 --- a/openerp/addons/base/base_menu.xml +++ b/openerp/addons/base/base_menu.xml @@ -30,6 +30,10 @@ <menuitem id="menu_security" name="Security" parent="menu_custom" sequence="25"/> <menuitem id="menu_ir_property" name="Parameters" parent="menu_custom" sequence="24"/> + <record model="ir.ui.menu" id="base.menu_administration"> + <field name="groups_id" eval="[(6,0, [ref('group_system'), ref('group_erp_manager')])]"/> + </record> + <record id="action_client_base_menu" model="ir.actions.client"> <field name="name">Open Settings Menu</field> <field name="tag">reload</field> diff --git a/openerp/addons/base/ir/ir_actions.py b/openerp/addons/base/ir/ir_actions.py index 2de2301c5dfc4faba597bdfbecc566eec08ab86d..7ac8783ddd9b4f0819540bbebe6ab7be002a8704 100644 --- a/openerp/addons/base/ir/ir_actions.py +++ b/openerp/addons/base/ir/ir_actions.py @@ -330,7 +330,7 @@ class ir_actions_act_window(osv.osv): } for res in results: model = res.get('res_model') - if model and self.pool.get(model): + if model in self.pool: try: with tools.mute_logger("openerp.tools.safe_eval"): eval_context = eval(res['context'] or "{}", eval_dict) or {} @@ -339,7 +339,7 @@ class ir_actions_act_window(osv.osv): continue if not fields or 'help' in fields: custom_context = dict(context, **eval_context) - res['help'] = self.pool.get(model).get_empty_list_help(cr, uid, res.get('help', ""), context=custom_context) + res['help'] = self.pool[model].get_empty_list_help(cr, uid, res.get('help', ""), context=custom_context) if ids_int: return results[0] return results @@ -355,7 +355,7 @@ class ir_actions_act_window(osv.osv): dataobj = self.pool.get('ir.model.data') data_id = dataobj._get_id (cr, SUPERUSER_ID, module, xml_id) res_id = dataobj.browse(cr, uid, data_id, context).res_id - return self.read(cr, uid, res_id, [], context) + return self.read(cr, uid, [res_id], [], context)[0] VIEW_TYPES = [ ('tree', 'Tree'), @@ -550,7 +550,7 @@ class ir_actions_server(osv.osv): help="Provide an expression that, applied on the current record, gives the field to update."), 'fields_lines': fields.one2many('ir.server.object.lines', 'server_id', string='Value Mapping', - help=""), + copy=True), # Fake fields used to implement the placeholder assistant 'model_object_field': fields.many2one('ir.model.fields', string="Field", @@ -577,7 +577,7 @@ class ir_actions_server(osv.osv): 'sequence': 5, 'code': """# You can use the following variables: # - self: ORM model of the record on which the action is triggered -# - object: browse_record of the record on which the action is triggered if there is one, otherwise None +# - object: Record on which the action is triggered if there is one, otherwise None # - pool: ORM model pool (i.e. self.pool) # - cr: database cursor # - uid: current user id @@ -838,7 +838,7 @@ class ir_actions_server(osv.osv): def run_action_client_action(self, cr, uid, action, eval_context=None, context=None): if not action.action_id: raise osv.except_osv(_('Error'), _("Please specify an action to launch!")) - return self.pool[action.action_id.type].read(cr, uid, action.action_id.id, context=context) + return self.pool[action.action_id.type].read(cr, uid, [action.action_id.id], context=context)[0] def run_action_code_multi(self, cr, uid, action, eval_context=None, context=None): eval(action.code.strip(), eval_context, mode="exec", nocopy=True) # nocopy allows to return 'action' @@ -1100,10 +1100,10 @@ Launch Manually Once: after having been launched manually, it sets automatically wizard.write({'state': 'done'}) # Load action - act_type = self.pool.get('ir.actions.actions').read(cr, uid, wizard.action_id.id, ['type'], context=context) + act_type = wizard.action_id.type - res = self.pool[act_type['type']].read(cr, uid, wizard.action_id.id, [], context=context) - if act_type['type'] != 'ir.actions.act_window': + res = self.pool[act_type].read(cr, uid, [wizard.action_id.id], [], context=context)[0] + if act_type != 'ir.actions.act_window': return res res.setdefault('context','{}') res['nodestroy'] = True diff --git a/openerp/addons/base/ir/ir_attachment.py b/openerp/addons/base/ir/ir_attachment.py index ef4a6b205a733c9e39df35d967421f270b72288b..ae28a89aa21e1a561cc91a20f1c9b0cea0dd85c9 100644 --- a/openerp/addons/base/ir/ir_attachment.py +++ b/openerp/addons/base/ir/ir_attachment.py @@ -56,12 +56,12 @@ class ir_attachment(osv.osv): if model_object and res_id: model_pool = self.pool[model_object] res = model_pool.name_get(cr,uid,[res_id],context) - res_name = res and res[0][1] or False + res_name = res and res[0][1] or None if res_name: field = self._columns.get('res_name',False) if field and len(res_name) > field.size: res_name = res_name[:30] + '...' - data[attachment.id] = res_name + data[attachment.id] = res_name or False else: data[attachment.id] = False return data @@ -273,7 +273,7 @@ class ir_attachment(osv.osv): # performed in batch as much as possible. ima = self.pool.get('ir.model.access') for model, targets in model_attachments.iteritems(): - if not self.pool.get(model): + if model not in self.pool: continue if not ima.check(cr, uid, model, 'read', False): # remove all corresponding attachment ids @@ -297,7 +297,7 @@ class ir_attachment(osv.osv): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'read', context=context) - return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context, load) + return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context=context, load=load) def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): diff --git a/openerp/addons/base/ir/ir_cron.py b/openerp/addons/base/ir/ir_cron.py index 8350972fa271b7da97094d9515031fb00decbdf9..60790c8fdc617a84a020a23494354e1d666b9386 100644 --- a/openerp/addons/base/ir/ir_cron.py +++ b/openerp/addons/base/ir/ir_cron.py @@ -26,7 +26,7 @@ from datetime import datetime from dateutil.relativedelta import relativedelta import openerp -from openerp import netsvc +from openerp import SUPERUSER_ID, netsvc, api from openerp.osv import fields, osv from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT from openerp.tools.safe_eval import safe_eval as eval @@ -149,36 +149,38 @@ class ir_cron(osv.osv): except Exception, e: self._handle_callback_exception(cr, uid, model_name, method_name, args, job_id, e) - def _process_job(self, job_cr, job, cron_cr): + def _process_job(self, cr, job, cron_cr): """ Run a given job taking care of the repetition. - :param job_cr: cursor to use to execute the job, safe to commit/rollback + :param cr: cursor to use to execute the job, safe to commit/rollback :param job: job to be run (as a dictionary). :param cron_cr: cursor holding lock on the cron job row, to use to update the next exec date, must not be committed/rolled back! """ try: - now = datetime.now() - nextcall = datetime.strptime(job['nextcall'], DEFAULT_SERVER_DATETIME_FORMAT) - numbercall = job['numbercall'] - - ok = False - while nextcall < now and numbercall: - if numbercall > 0: - numbercall -= 1 - if not ok or job['doall']: - self._callback(job_cr, job['user_id'], job['model'], job['function'], job['args'], job['id']) - if numbercall: - nextcall += _intervalTypes[job['interval_type']](job['interval_number']) - ok = True - addsql = '' - if not numbercall: - addsql = ', active=False' - cron_cr.execute("UPDATE ir_cron SET nextcall=%s, numbercall=%s"+addsql+" WHERE id=%s", - (nextcall.strftime(DEFAULT_SERVER_DATETIME_FORMAT), numbercall, job['id'])) + with api.Environment.manage(): + now = datetime.now() + nextcall = datetime.strptime(job['nextcall'], DEFAULT_SERVER_DATETIME_FORMAT) + numbercall = job['numbercall'] + + ok = False + while nextcall < now and numbercall: + if numbercall > 0: + numbercall -= 1 + if not ok or job['doall']: + self._callback(cr, job['user_id'], job['model'], job['function'], job['args'], job['id']) + if numbercall: + nextcall += _intervalTypes[job['interval_type']](job['interval_number']) + ok = True + addsql = '' + if not numbercall: + addsql = ', active=False' + cron_cr.execute("UPDATE ir_cron SET nextcall=%s, numbercall=%s"+addsql+" WHERE id=%s", + (nextcall.strftime(DEFAULT_SERVER_DATETIME_FORMAT), numbercall, job['id'])) + self.invalidate_cache(cr, SUPERUSER_ID) finally: - job_cr.commit() + cr.commit() cron_cr.commit() @classmethod diff --git a/openerp/addons/base/ir/ir_exports.py b/openerp/addons/base/ir/ir_exports.py index 745ab7cd233f9053fa3ad6c52f337c87264782dd..2bdc639024a20049b43b5e1be11cc472dd0650e9 100644 --- a/openerp/addons/base/ir/ir_exports.py +++ b/openerp/addons/base/ir/ir_exports.py @@ -29,7 +29,7 @@ class ir_exports(osv.osv): 'name': fields.char('Export Name'), 'resource': fields.char('Resource', select=True), 'export_fields': fields.one2many('ir.exports.line', 'export_id', - 'Export ID'), + 'Export ID', copy=True), } diff --git a/openerp/addons/base/ir/ir_fields.py b/openerp/addons/base/ir/ir_fields.py index 217ea319d8e7d8107af0354cdf559beec1f13f7f..f70d3de33c3a2fe95293ecac3929c003655237da 100644 --- a/openerp/addons/base/ir/ir_fields.py +++ b/openerp/addons/base/ir/ir_fields.py @@ -31,6 +31,10 @@ LINK_TO = lambda id: (4, id, False) DELETE_ALL = lambda: (5, False, False) REPLACE_WITH = lambda ids: (6, False, ids) +class ImportWarning(Warning): + """ Used to send warnings upwards the stack during the import process """ + pass + class ConversionNotFound(ValueError): pass class ColumnWrapper(object): @@ -124,7 +128,7 @@ class ir_fields_converter(orm.Model): If a converter can perform its function but has to make assumptions about the data, it can send a warning to the user through adding an - instance of :class:`~openerp.osv.orm.ImportWarning` to the second value + instance of :class:`~.ImportWarning` to the second value it returns. The handling of a warning at the upper levels is the same as ``ValueError`` above. @@ -165,7 +169,7 @@ class ir_fields_converter(orm.Model): )) if value.lower() in falses: return False, [] - return True, [orm.ImportWarning( + return True, [ImportWarning( _(u"Unknown value '%s' for boolean field '%%(field)s', assuming '%s'") % (value, yes), { 'moreinfo': _(u"Use '1' for yes and '0' for no") @@ -334,7 +338,7 @@ class ir_fields_converter(orm.Model): cr, uid, name=value, operator='=', context=context) if ids: if len(ids) > 1: - warnings.append(orm.ImportWarning( + warnings.append(ImportWarning( _(u"Found multiple matches for field '%%(field)s' (%d matches)") % (len(ids)))) id, _name = ids[0] diff --git a/openerp/addons/base/ir/ir_http.py b/openerp/addons/base/ir/ir_http.py index 21c8b7ef94181f773b3666be4c6d817040ade131..f2967a02e70b46f12258f587a4a46bbd1b817ea5 100644 --- a/openerp/addons/base/ir/ir_http.py +++ b/openerp/addons/base/ir/ir_http.py @@ -5,9 +5,13 @@ import logging import re import sys -import werkzeug +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.urls +import werkzeug.utils import openerp +import openerp.exceptions from openerp import http from openerp.http import request from openerp.osv import osv, orm @@ -59,7 +63,7 @@ class ir_http(osv.AbstractModel): request.uid = request.session.uid if not request.uid: if not request.params.get('noredirect'): - query = werkzeug.url_encode({ + query = werkzeug.urls.url_encode({ 'redirect': request.httprequest.url, }) response = werkzeug.utils.redirect('/web/login?%s' % query) diff --git a/openerp/addons/base/ir/ir_mail_server.py b/openerp/addons/base/ir/ir_mail_server.py index 747f4f1446db900f88094b67369612f6f3e0e495..aeded8a12958b0511e257ff27bb18ded1ca42368 100644 --- a/openerp/addons/base/ir/ir_mail_server.py +++ b/openerp/addons/base/ir/ir_mail_server.py @@ -19,12 +19,12 @@ # ############################################################################## -from email.MIMEText import MIMEText -from email.MIMEBase import MIMEBase -from email.MIMEMultipart import MIMEMultipart -from email.Charset import Charset -from email.Header import Header -from email.Utils import formatdate, make_msgid, COMMASPACE +from email.mime.text import MIMEText +from email.mime.base import MIMEBase +from email.mime.multipart import MIMEMultipart +from email.charset import Charset +from email.header import Header +from email.utils import formatdate, make_msgid, COMMASPACE from email import Encoders import logging import re @@ -461,21 +461,18 @@ class ir_mail_server(osv.osv): mdir.add(message.as_string(True)) return message_id + smtp = None try: smtp = self.connect(smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption or False, smtp_debug) smtp.sendmail(smtp_from, smtp_to_list, message.as_string()) finally: - try: - # Close Connection of SMTP Server + if smtp is not None: smtp.quit() - except Exception: - # ignored, just a consequence of the previous exception - pass except Exception, e: msg = _("Mail delivery failed via SMTP server '%s'.\n%s: %s") % (tools.ustr(smtp_server), e.__class__.__name__, tools.ustr(e)) - _logger.exception(msg) + _logger.error(msg) raise MailDeliveryException(_("Mail Delivery Failed"), msg) return message_id diff --git a/openerp/addons/base/ir/ir_model.py b/openerp/addons/base/ir/ir_model.py index c759dace9c7ccfe3967640ea04a5c6bb596ebd53..e29b44f06782a2f67977f51d7ca9b0827fb9ab97 100644 --- a/openerp/addons/base/ir/ir_model.py +++ b/openerp/addons/base/ir/ir_model.py @@ -28,12 +28,11 @@ import openerp import openerp.modules.registry from openerp import SUPERUSER_ID from openerp import tools -from openerp.osv import fields,osv -from openerp.osv.orm import Model, browse_null -from openerp.tools.safe_eval import safe_eval as eval +from openerp.osv import fields, osv +from openerp.osv.orm import BaseModel, Model, MAGIC_COLUMNS, except_orm from openerp.tools import config +from openerp.tools.safe_eval import safe_eval as eval from openerp.tools.translate import _ -from openerp.osv.orm import except_orm, browse_record, MAGIC_COLUMNS _logger = logging.getLogger(__name__) @@ -99,7 +98,7 @@ class ir_model(osv.osv): 'name': fields.char('Model Description', translate=True, required=True), 'model': fields.char('Model', required=True, select=1), 'info': fields.text('Information'), - 'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True), + 'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True, copy=True), 'state': fields.selection([('manual','Custom Object'),('base','Base Object')],'Type', readonly=True), 'access_ids': fields.one2many('ir.model.access', 'model_id', 'Access'), 'osv_memory': fields.function(_is_osv_memory, string='Transient Model', type='boolean', @@ -133,15 +132,10 @@ class ir_model(osv.osv): ('obj_name_uniq', 'unique (model)', 'Each model must be unique!'), ] - # overridden to allow searching both on model name (model field) - # and model description (name field) - def _name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None): - if args is None: - args = [] - domain = args + ['|', ('model', operator, name), ('name', operator, name)] - return self.name_get(cr, name_get_uid or uid, - super(ir_model, self).search(cr, uid, domain, limit=limit, context=context), - context=context) + def _search_display_name(self, operator, value): + # overridden to allow searching both on model name (model field) and + # model description (name field) + return ['|', ('model', operator, value), ('name', operator, value)] def _drop_table(self, cr, uid, ids, context=None): for model in self.browse(cr, uid, ids, context): @@ -177,6 +171,7 @@ class ir_model(osv.osv): def write(self, cr, user, ids, vals, context=None): if context: + context = dict(context) context.pop('__last_update', None) # Filter out operations 4 link from field id, because openerp-web # always write (4,id,False) even for non dirty items @@ -207,7 +202,7 @@ class ir_model(osv.osv): _custom = True x_custom_model._name = model x_custom_model._module = False - a = x_custom_model.create_instance(self.pool, cr) + a = x_custom_model._build_model(self.pool, cr) if not a._columns: x_name = 'id' elif 'x_name' in a._columns.keys(): @@ -629,8 +624,8 @@ class ir_model_access(osv.osv): """ Check if a specific group has the access mode to the specified model""" assert mode in ['read','write','create','unlink'], 'Invalid access mode' - if isinstance(model, browse_record): - assert model._table_name == 'ir.model', 'Invalid model object' + if isinstance(model, BaseModel): + assert model._name == 'ir.model', 'Invalid model object' model_name = model.name else: model_name = model @@ -688,8 +683,8 @@ class ir_model_access(osv.osv): assert mode in ['read','write','create','unlink'], 'Invalid access mode' - if isinstance(model, browse_record): - assert model._table_name == 'ir.model', 'Invalid model object' + if isinstance(model, BaseModel): + assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model @@ -757,6 +752,7 @@ class ir_model_access(osv.osv): pass def call_cache_clearing_methods(self, cr): + self.invalidate_cache(cr, SUPERUSER_ID) self.check.clear_cache(self) # clear the cache of check function for model, method in self.__cache_clearing_methods: if model in self.pool: @@ -765,19 +761,19 @@ class ir_model_access(osv.osv): # # Check rights on actions # - def write(self, cr, uid, *args, **argv): + def write(self, cr, uid, ids, values, context=None): self.call_cache_clearing_methods(cr) - res = super(ir_model_access, self).write(cr, uid, *args, **argv) + res = super(ir_model_access, self).write(cr, uid, ids, values, context=context) return res - def create(self, cr, uid, *args, **argv): + def create(self, cr, uid, values, context=None): self.call_cache_clearing_methods(cr) - res = super(ir_model_access, self).create(cr, uid, *args, **argv) + res = super(ir_model_access, self).create(cr, uid, values, context=context) return res - def unlink(self, cr, uid, *args, **argv): + def unlink(self, cr, uid, ids, context=None): self.call_cache_clearing_methods(cr) - res = super(ir_model_access, self).unlink(cr, uid, *args, **argv) + res = super(ir_model_access, self).unlink(cr, uid, ids, context=context) return res class ir_model_data(osv.osv): @@ -833,8 +829,8 @@ class ir_model_data(osv.osv): 'date_init': fields.datetime('Init Date') } _defaults = { - 'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), - 'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), + 'date_init': fields.datetime.now, + 'date_update': fields.datetime.now, 'noupdate': False, 'module': '' } @@ -844,12 +840,11 @@ class ir_model_data(osv.osv): def __init__(self, pool, cr): osv.osv.__init__(self, pool, cr) - self.doinit = True # also stored in pool to avoid being discarded along with this osv instance if getattr(pool, 'model_data_reference_ids', None) is None: self.pool.model_data_reference_ids = {} - - self.loads = self.pool.model_data_reference_ids + # put loads on the class, in order to share it among all instances + type(self).loads = self.pool.model_data_reference_ids def _auto_init(self, cr, context=None): super(ir_model_data, self)._auto_init(cr, context) @@ -888,7 +883,7 @@ class ir_model_data(osv.osv): def xmlid_to_object(self, cr, uid, xmlid, raise_if_not_found=False, context=None): """ Return a browse_record - if not found and raise_if_not_found is True return the browse_null + if not found and raise_if_not_found is True return None """ t = self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found) res_model, res_id = t @@ -899,7 +894,7 @@ class ir_model_data(osv.osv): return record if raise_if_not_found: raise ValueError('No record found for unique ID %s. It may have been deleted.' % (xml_id)) - return browse_null() + return None # OLD API def _get_id(self, cr, uid, module, xml_id): @@ -924,7 +919,7 @@ class ir_model_data(osv.osv): def get_object(self, cr, uid, module, xml_id, context=None): """ Returns a browsable record for the given module name and xml_id. - If not found, raise a ValueError or return a browse_null, depending + If not found, raise a ValueError or return None, depending on the value of `raise_exception`. """ return self.xmlid_to_object(cr, uid, "%s.%s" % (module, xml_id), raise_if_not_found=True, context=context) @@ -961,8 +956,6 @@ class ir_model_data(osv.osv): if xml_id and ('.' in xml_id): assert len(xml_id.split('.'))==2, _("'%s' contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id") % xml_id module, xml_id = xml_id.split('.') - if (not xml_id) and (not self.doinit): - return False action_id = False if xml_id: cr.execute('''SELECT imd.id, imd.res_id, md.id, imd.model, imd.noupdate @@ -1034,8 +1027,8 @@ class ir_model_data(osv.osv): if xml_id and res_id: self.loads[(module, xml_id)] = (model, res_id) for table, inherit_field in model_obj._inherits.iteritems(): - inherit_id = model_obj.read(cr, uid, res_id, - [inherit_field])[inherit_field] + inherit_id = model_obj.read(cr, uid, [res_id], + [inherit_field])[0][inherit_field] self.loads[(module, xml_id + '_' + table.replace('.', '_'))] = (table, inherit_id) return res_id @@ -1058,11 +1051,12 @@ class ir_model_data(osv.osv): cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name)) res = cr.fetchone() + ir_values_obj = openerp.registry(cr.dbname)['ir.values'] if not res: - ir_values_obj = openerp.registry(cr.dbname)['ir.values'] ir_values_obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta) elif xml_id: cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name)) + ir_values_obj.invalidate_cache(cr, uid, ['value']) return True def _module_data_uninstall(self, cr, uid, modules_to_remove, context=None): @@ -1104,6 +1098,7 @@ class ir_model_data(osv.osv): cr.execute('select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)', (res_id,)) wkf_todo.extend(cr.fetchall()) cr.execute("update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id)) + self.invalidate_cache(cr, uid, context=context) for model,res_id in wkf_todo: try: @@ -1125,7 +1120,7 @@ class ir_model_data(osv.osv): _logger.info('Deleting orphan external_ids %s', external_ids) self.unlink(cr, uid, external_ids) continue - if field.name in openerp.osv.orm.LOG_ACCESS_COLUMNS and self.pool[field.model]._log_access: + if field.name in openerp.models.LOG_ACCESS_COLUMNS and self.pool[field.model]._log_access: continue if field.name == 'id': continue diff --git a/openerp/addons/base/ir/ir_qweb.py b/openerp/addons/base/ir/ir_qweb.py index 27822c782465a63b12e1d7b5dca97ca2634453f8..47866b69eb5f00da9f2954ff868808e81d92a60e 100644 --- a/openerp/addons/base/ir/ir_qweb.py +++ b/openerp/addons/base/ir/ir_qweb.py @@ -445,7 +445,7 @@ class QWeb(orm.AbstractModel): record, field_name = template_attributes["field"].rsplit('.', 1) record = self.eval_object(record, qwebcontext) - column = record._model._all_columns[field_name].column + column = record._all_columns[field_name].column options = json.loads(template_attributes.get('field-options') or '{}') field_type = get_field_type(column, options) @@ -506,10 +506,10 @@ class FieldConverter(osv.AbstractModel): :returns: iterable of (attribute name, attribute value) pairs. """ - column = record._model._all_columns[field_name].column + column = record._all_columns[field_name].column field_type = get_field_type(column, options) return [ - ('data-oe-model', record._model._name), + ('data-oe-model', record._name), ('data-oe-id', record.id), ('data-oe-field', field_name), ('data-oe-type', field_type), @@ -541,7 +541,7 @@ class FieldConverter(osv.AbstractModel): try: content = self.record_to_html( cr, uid, field_name, record, - record._model._all_columns[field_name].column, + record._all_columns[field_name].column, options, context=context) if options.get('html-escape', True): content = escape(content) @@ -549,7 +549,7 @@ class FieldConverter(osv.AbstractModel): content = content.__html__() except Exception: _logger.warning("Could not get field %s for model %s", - field_name, record._model._name, exc_info=True) + field_name, record._name, exc_info=True) content = None if context and context.get('inherit_branding'): @@ -879,7 +879,7 @@ class Contact(orm.AbstractModel): id = getattr(record, field_name).id field_browse = self.pool[column._obj].browse(cr, openerp.SUPERUSER_ID, id, context={"show_address": True}) - value = field_browse.name_get()[0][1] + value = field_browse.display_name val = { 'name': value.split("\n")[0], @@ -888,7 +888,7 @@ class Contact(orm.AbstractModel): 'mobile': field_browse.mobile, 'fax': field_browse.fax, 'city': field_browse.city, - 'country_id': field_browse.country_id and field_browse.country_id.name_get()[0][1], + 'country_id': field_browse.country_id.display_name, 'website': field_browse.website, 'email': field_browse.email, 'fields': opf, diff --git a/openerp/addons/base/ir/ir_qweb.xml b/openerp/addons/base/ir/ir_qweb.xml index c69dc17c851ba9a45a74b2cbeb311e2aeb1122cf..30b366d307ca1910186bc2e8287864650d912418 100644 --- a/openerp/addons/base/ir/ir_qweb.xml +++ b/openerp/addons/base/ir/ir_qweb.xml @@ -13,7 +13,9 @@ <i t-if="not options.get('no_marker')" class='fa fa-map-marker'/> <span itemprop="streetAddress" t-raw="address.replace('\n', options.get('no_tag_br') and ', ' or ('<br/>%s' % ('' if options.get('no_marker') else '&nbsp; &nbsp; ')))"/> </div> <div t-if="city and 'city' in fields" class='css_editable_mode_hidden'> - <i t-if="not options.get('no_marker')" class='fa fa-map-marker'/> <span itemprop="addressLocality" t-raw="city"/>, <span itemprop="addressCountry" t-raw="country_id"/> + <i t-if="not options.get('no_marker')" class='fa fa-map-marker'/> + <span itemprop="addressLocality" t-raw="city"/>, + <span itemprop="addressCountry" t-raw="country_id"/> </div> <div t-if="phone and 'phone' in fields" class='css_editable_mode_hidden'><i t-if="not options.get('no_marker')" class='fa fa-phone'/> <span itemprop="telephone" t-esc="phone"/></div> <div t-if="mobile and 'mobile' in fields" class='css_editable_mode_hidden'><i t-if="not options.get('no_marker')" class='fa fa-mobile-phone'/> <span itemprop="telephone" t-esc="mobile"/></div> diff --git a/openerp/addons/base/ir/ir_rule.py b/openerp/addons/base/ir/ir_rule.py index 2ac5893038bcb392f301c68a574b7f93ca4b4da6..c55ad1d5dcdb1b0413301e5e2be41de1629b1f0f 100644 --- a/openerp/addons/base/ir/ir_rule.py +++ b/openerp/addons/base/ir/ir_rule.py @@ -78,7 +78,7 @@ class ir_rule(osv.osv): 'global': fields.function(_get_value, string='Global', type='boolean', store=True, help="If no group is specified the rule is global and applied to everyone"), 'groups': fields.many2many('res.groups', 'rule_group_rel', 'rule_group_id', 'group_id', 'Groups'), 'domain_force': fields.text('Domain'), - 'domain': fields.function(_domain_force_get, string='Domain', type='text'), + 'domain': fields.function(_domain_force_get, string='Domain', type='binary'), 'perm_read': fields.boolean('Apply for Read'), 'perm_write': fields.boolean('Apply for Write'), 'perm_create': fields.boolean('Apply for Create'), @@ -127,7 +127,7 @@ class ir_rule(osv.osv): group_domains = {} # map: group -> list of domains for rule in self.browse(cr, SUPERUSER_ID, rule_ids): # read 'domain' as UID to have the correct eval context for the rule. - rule_domain = self.read(cr, uid, rule.id, ['domain'])['domain'] + rule_domain = self.read(cr, uid, [rule.id], ['domain'])[0]['domain'] dom = expression.normalize_domain(rule_domain) for group in rule.groups: if group in user.groups_id: diff --git a/openerp/addons/base/ir/ir_sequence.py b/openerp/addons/base/ir/ir_sequence.py index a990a17f0327c7c473dacf2d0deba5f04a617b88..e589e2cae06dd92fe4f03743e764cc28b05944bd 100644 --- a/openerp/addons/base/ir/ir_sequence.py +++ b/openerp/addons/base/ir/ir_sequence.py @@ -234,15 +234,15 @@ class ir_sequence(openerp.osv.osv.osv): 'sec': time.strftime('%S', t), } - def _next(self, cr, uid, seq_ids, context=None): - if not seq_ids: + def _next(self, cr, uid, ids, context=None): + if not ids: return False if context is None: context = {} force_company = context.get('force_company') if not force_company: force_company = self.pool.get('res.users').browse(cr, uid, uid).company_id.id - sequences = self.read(cr, uid, seq_ids, ['name','company_id','implementation','number_next','prefix','suffix','padding']) + sequences = self.read(cr, uid, ids, ['name','company_id','implementation','number_next','prefix','suffix','padding']) preferred_sequences = [s for s in sequences if s['company_id'] and s['company_id'][0] == force_company ] seq = preferred_sequences[0] if preferred_sequences else sequences[0] if seq['implementation'] == 'standard': @@ -251,6 +251,7 @@ class ir_sequence(openerp.osv.osv.osv): else: cr.execute("SELECT number_next FROM ir_sequence WHERE id=%s FOR UPDATE NOWAIT", (seq['id'],)) cr.execute("UPDATE ir_sequence SET number_next=number_next+number_increment WHERE id=%s ", (seq['id'],)) + self.invalidate_cache(cr, uid, ['number_next'], [seq['id']], context=context) d = self._interpolation_dict() try: interpolated_prefix = self._interpolate(seq['prefix'], d) diff --git a/openerp/addons/base/ir/ir_translation.py b/openerp/addons/base/ir/ir_translation.py index 7952c957caf880dd017a14f17cc476948f371cc5..07732850180b92a9adeb79d14853c598adfdd17f 100644 --- a/openerp/addons/base/ir/ir_translation.py +++ b/openerp/addons/base/ir/ir_translation.py @@ -168,11 +168,11 @@ class ir_translation(osv.osv): else: model_name, field = record.name.split(',') model = self.pool.get(model_name) - if model and model.exists(cr, uid, record.res_id, context=context): + if model is not None: # Pass context without lang, need to read real stored field, not translation context_no_lang = dict(context, lang=None) - result = model.read(cr, uid, record.res_id, [field], context=context_no_lang) - res[record.id] = result[field] if result else False + result = model.read(cr, uid, [record.res_id], [field], context=context_no_lang) + res[record.id] = result[0][field] if result else False return res def _set_src(self, cr, uid, id, name, value, args, context=None): diff --git a/openerp/addons/base/ir/ir_ui_menu.py b/openerp/addons/base/ir/ir_ui_menu.py index 60014be9e012e38560ed9e1e3ff9a48967404fb3..d55451dd33c2cfddbf1d71307ea7b5da58ba970f 100644 --- a/openerp/addons/base/ir/ir_ui_menu.py +++ b/openerp/addons/base/ir/ir_ui_menu.py @@ -21,15 +21,15 @@ ############################################################################## import base64 +import operator import re import threading -import operator -from openerp.tools.safe_eval import safe_eval as eval -from openerp import tools + import openerp.modules from openerp.osv import fields, osv +from openerp import api, tools +from openerp.tools.safe_eval import safe_eval as eval from openerp.tools.translate import _ -from openerp import SUPERUSER_ID MENU_ITEM_SEPARATOR = "/" @@ -38,71 +38,74 @@ class ir_ui_menu(osv.osv): _name = 'ir.ui.menu' def __init__(self, *args, **kwargs): - self.cache_lock = threading.RLock() - self._cache = {} + cls = type(self) + # by design, self._menu_cache is specific to the database + cls._menu_cache_lock = threading.RLock() + cls._menu_cache = {} super(ir_ui_menu, self).__init__(*args, **kwargs) self.pool.get('ir.model.access').register_cache_clearing_method(self._name, 'clear_cache') def clear_cache(self): - with self.cache_lock: + with self._menu_cache_lock: # radical but this doesn't frequently happen - if self._cache: + if self._menu_cache: # Normally this is done by openerp.tools.ormcache # but since we do not use it, set it by ourself. self.pool._any_cache_cleared = True - self._cache = {} - - def _filter_visible_menus(self, cr, uid, ids, context=None): - """Filters the give menu ids to only keep the menu items that should be - visible in the menu hierarchy of the current user. - Uses a cache for speeding up the computation. + self._menu_cache.clear() + + @api.multi + @api.returns('self') + def _filter_visible_menus(self): + """ Filter `self` to only keep the menu items that should be visible in + the menu hierarchy of the current user. + Uses a cache for speeding up the computation. """ - with self.cache_lock: - modelaccess = self.pool.get('ir.model.access') - user_groups = set(self.pool.get('res.users').read(cr, SUPERUSER_ID, uid, ['groups_id'])['groups_id']) - result = [] - for menu in self.browse(cr, uid, ids, context=context): - # this key works because user access rights are all based on user's groups (cfr ir_model_access.check) - key = (cr.dbname, menu.id, tuple(user_groups)) - if key in self._cache: - if self._cache[key]: - result.append(menu.id) - #elif not menu.groups_id and not menu.action: - # result.append(menu.id) - continue - - self._cache[key] = False - if menu.groups_id: - restrict_to_groups = [g.id for g in menu.groups_id] - if not user_groups.intersection(restrict_to_groups): - continue - #result.append(menu.id) - #self._cache[key] = True - #continue - - if menu.action: - # we check if the user has access to the action of the menu - data = menu.action - if data: - model_field = { 'ir.actions.act_window': 'res_model', - 'ir.actions.report.xml': 'model', - 'ir.actions.wizard': 'model', - 'ir.actions.server': 'model_id', - } - - field = model_field.get(menu.action._name) - if field and data[field]: - if not modelaccess.check(cr, uid, data[field], 'read', False): - continue - else: - # if there is no action, it's a 'folder' menu - if not menu.child_id: - # not displayed if there is no children - continue - - result.append(menu.id) - self._cache[key] = True - return result + with self._menu_cache_lock: + groups = self.env.user.groups_id + + # visibility is entirely based on the user's groups; + # self._menu_cache[key] gives the ids of all visible menus + key = frozenset(groups._ids) + if key in self._menu_cache: + visible = self.browse(self._menu_cache[key]) + + else: + # retrieve all menus, and determine which ones are visible + context = {'ir.ui.menu.full_list': True} + menus = self.with_context(context).search([]) + + # first discard all menus with groups the user does not have + menus = menus.filtered( + lambda menu: not menu.groups_id or menu.groups_id & groups) + + # take apart menus that have an action + action_menus = menus.filtered('action') + folder_menus = menus - action_menus + visible = self.browse() + + # process action menus, check whether their action is allowed + access = self.env['ir.model.access'] + model_fname = { + 'ir.actions.act_window': 'res_model', + 'ir.actions.report.xml': 'model', + 'ir.actions.wizard': 'model', + 'ir.actions.server': 'model_id', + } + for menu in action_menus: + fname = model_fname.get(menu.action._name) + if not fname or not menu.action[fname] or \ + access.check(menu.action[fname], 'read', False): + # make menu visible, and its folder ancestors, too + visible += menu + menu = menu.parent_id + while menu and menu in folder_menus and menu not in visible: + visible += menu + menu = menu.parent_id + + self._menu_cache[key] = visible._ids + + return self.filtered(lambda menu: menu in visible) def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): if context is None: @@ -155,13 +158,13 @@ class ir_ui_menu(osv.osv): parent_path = '' return parent_path + elmt.name - def create(self, *args, **kwargs): + def create(self, cr, uid, values, context=None): self.clear_cache() - return super(ir_ui_menu, self).create(*args, **kwargs) + return super(ir_ui_menu, self).create(cr, uid, values, context=context) - def write(self, *args, **kwargs): + def write(self, cr, uid, ids, values, context=None): self.clear_cache() - return super(ir_ui_menu, self).write(*args, **kwargs) + return super(ir_ui_menu, self).write(cr, uid, ids, values, context=context) def unlink(self, cr, uid, ids, context=None): # Detach children and promote them to top-level, because it would be unwise to @@ -182,7 +185,7 @@ class ir_ui_menu(osv.osv): def copy(self, cr, uid, id, default=None, context=None): ir_values_obj = self.pool.get('ir.values') - res = super(ir_ui_menu, self).copy(cr, uid, id, context=context) + res = super(ir_ui_menu, self).copy(cr, uid, id, default=default, context=context) datas=self.read(cr,uid,[res],['name'])[0] rex=re.compile('\([0-9]+\)') concat=rex.findall(datas['name']) diff --git a/openerp/addons/base/ir/ir_ui_view.py b/openerp/addons/base/ir/ir_ui_view.py index b5231808b5ef164b20469bcfb4c97a109f0ae652..08fb4d4ef59c9ca98bcc280c8e8a6e26a19733c9 100644 --- a/openerp/addons/base/ir/ir_ui_view.py +++ b/openerp/addons/base/ir/ir_ui_view.py @@ -35,7 +35,7 @@ import HTMLParser from lxml import etree import openerp -from openerp import tools +from openerp import tools, api from openerp.http import request from openerp.osv import fields, osv, orm from openerp.tools import graph, SKIPPED_ELEMENT_TYPES @@ -302,15 +302,6 @@ class view(osv.osv): view.application, )) - - def copy(self, cr, uid, id, default=None, context=None): - if not default: - default = {} - default.update({ - 'model_ids': [], - }) - return super(view, self).copy(cr, uid, id, default, context=context) - # default view selection def default_view(self, cr, uid, model, view_type, context=None): """ Fetches the default view for the provided (model, view_type) pair: @@ -603,7 +594,7 @@ class view(osv.osv): modifiers = {} Model = self.pool.get(model) - if not Model: + if Model is None: self.raise_view_error(cr, user, _('Model not found: %(model)s') % dict(model=model), view_id, context) @@ -622,10 +613,10 @@ class view(osv.osv): :return: True if field should be included in the result of fields_view_get """ - if node.tag == 'field' and node.get('name') in Model._all_columns: - column = Model._all_columns[node.get('name')].column - if column.groups and not self.user_has_groups( - cr, user, groups=column.groups, context=context): + if node.tag == 'field' and node.get('name') in Model._fields: + field = Model._fields[node.get('name')] + if field.groups and not self.user_has_groups( + cr, user, groups=field.groups, context=context): node.getparent().remove(node) fields.pop(node.get('name'), None) # no point processing view-level ``groups`` anymore, return @@ -662,15 +653,8 @@ class view(osv.osv): fields = xfields if node.get('name'): attrs = {} - try: - if node.get('name') in Model._columns: - column = Model._columns[node.get('name')] - else: - column = Model._inherit_fields[node.get('name')][2] - except Exception: - column = False - - if column: + field = Model._fields.get(node.get('name')) + if field: children = False views = {} for f in node: @@ -678,7 +662,7 @@ class view(osv.osv): node.remove(f) ctx = context.copy() ctx['base_model_name'] = model - xarch, xfields = self.postprocess_and_fields(cr, user, column._obj or None, f, view_id, ctx) + xarch, xfields = self.postprocess_and_fields(cr, user, field.comodel_name, f, view_id, ctx) views[str(f.tag)] = { 'arch': xarch, 'fields': xfields @@ -746,6 +730,36 @@ class view(osv.osv): orm.transfer_modifiers_to_node(modifiers, node) return fields + def add_on_change(self, cr, user, model_name, arch): + """ Add attribute on_change="1" on fields that are dependencies of + computed fields on the same view. + """ + # map each field object to its corresponding nodes in arch + field_nodes = collections.defaultdict(list) + + def collect(node, model): + if node.tag == 'field': + field = model._fields.get(node.get('name')) + if field: + field_nodes[field].append(node) + if field.relational: + model = self.pool.get(field.comodel_name) + for child in node: + collect(child, model) + + collect(arch, self.pool[model_name]) + + for field, nodes in field_nodes.iteritems(): + # if field should trigger an onchange, add on_change="1" on the + # nodes referring to field + model = self.pool[field.model_name] + if model._has_onchange(field, field_nodes): + for node in nodes: + if not node.get('on_change'): + node.set('on_change', '1') + + return arch + def _disable_workflow_buttons(self, cr, user, model, node): """ Set the buttons in node to readonly if the user can't activate them. """ if model is None or user == 1: @@ -784,7 +798,7 @@ class view(osv.osv): """ fields = {} Model = self.pool.get(model) - if not Model: + if Model is None: self.raise_view_error(cr, user, _('Model not found: %(model)s') % dict(model=model), view_id, context) if node.tag == 'diagram': @@ -800,6 +814,7 @@ class view(osv.osv): else: fields = Model.fields_get(cr, user, None, context) + node = self.add_on_change(cr, user, model, node) fields_def = self.postprocess(cr, user, model, node, view_id, False, fields, context=context) node = self._disable_workflow_buttons(cr, user, model, node) if node.tag in ('kanban', 'tree', 'form', 'gantt'): @@ -965,6 +980,7 @@ class view(osv.osv): xmlid = imd.search_read(cr, uid, domain, ['module', 'name'])[0] return '%s.%s' % (xmlid['module'], xmlid['name']) + @api.cr_uid_ids_context def render(self, cr, uid, id_or_xml_id, values=None, engine='ir.qweb', context=None): if isinstance(id_or_xml_id, list): id_or_xml_id = id_or_xml_id[0] diff --git a/openerp/addons/base/ir/ir_values.py b/openerp/addons/base/ir/ir_values.py index 41d781ac68f940535a5cc5934ae2befef36604dc..4720f601f49f75947b0a151543cdaa621fcc1335 100644 --- a/openerp/addons/base/ir/ir_values.py +++ b/openerp/addons/base/ir/ir_values.py @@ -20,6 +20,7 @@ ############################################################################## import pickle +from openerp import tools from openerp.osv import osv, fields from openerp.osv.orm import except_orm @@ -188,6 +189,21 @@ class ir_values(osv.osv): if not cr.fetchone(): cr.execute('CREATE INDEX ir_values_key_model_key2_res_id_user_id_idx ON ir_values (key, model, key2, res_id, user_id)') + def create(self, cr, uid, vals, context=None): + res = super(ir_values, self).create(cr, uid, vals, context=context) + self.get_defaults_dict.clear_cache(self) + return res + + def write(self, cr, uid, ids, vals, context=None): + res = super(ir_values, self).write(cr, uid, ids, vals, context=context) + self.get_defaults_dict.clear_cache(self) + return res + + def unlink(self, cr, uid, ids, context=None): + res = super(ir_values, self).unlink(cr, uid, ids, context=context) + self.get_defaults_dict.clear_cache(self) + return res + def set_default(self, cr, uid, model, field_name, value, for_all_users=True, company_id=False, condition=False): """Defines a default value for the given model and field_name. Any previous default for the same scope (model, field_name, value, for_all_users, company_id, condition) @@ -319,6 +335,15 @@ class ir_values(osv.osv): (row['id'], row['name'], pickle.loads(row['value'].encode('utf-8')))) return defaults.values() + # use ormcache: this is called a lot by BaseModel.add_default_value()! + @tools.ormcache(skiparg=2) + def get_defaults_dict(self, cr, uid, model, condition=False): + """ Returns a dictionary mapping field names with their corresponding + default value. This method simply improves the returned value of + :meth:`~.get_defaults`. + """ + return dict((f, v) for i, f, v in self.get_defaults(cr, uid, model, condition)) + def set_action(self, cr, uid, name, action_slot, model, action, res_id=False): """Binds an the given action to the given model's action slot - for later retrieval via :meth:`~.get_actions`. Any existing binding of the same action @@ -395,9 +420,9 @@ class ir_values(osv.osv): if not action['value']: continue # skip if undefined action_model_name, action_id = action['value'].split(',') - action_model = self.pool.get(action_model_name) - if not action_model: + if action_model_name not in self.pool: continue # unknow model? skip it + action_model = self.pool[action_model_name] fields = [field for field in action_model._all_columns if field not in EXCLUDED_FIELDS] # FIXME: needs cleanup try: diff --git a/openerp/addons/base/module/module.py b/openerp/addons/base/module/module.py index d0cb6b18f7b967a0644f5053f3a4a51d0bdf07dd..89f3e9d006e45c5dd5b2cc9bb949d1a53e7b4192 100644 --- a/openerp/addons/base/module/module.py +++ b/openerp/addons/base/module/module.py @@ -48,7 +48,8 @@ from openerp.modules.db import create_categories from openerp.modules import get_module_resource from openerp.tools.parse_version import parse_version from openerp.tools.translate import _ -from openerp.osv import fields, osv, orm +from openerp.osv import osv, orm, fields +from openerp import api, fields as fields2 _logger = logging.getLogger(__name__) @@ -374,34 +375,41 @@ class module(osv.osv): msg = _('Unable to process module "%s" because an external dependency is not met: %s') raise orm.except_orm(_('Error'), msg % (module_name, e.args[0])) - def state_update(self, cr, uid, ids, newstate, states_to_update, context=None, level=100): + @api.multi + def state_update(self, newstate, states_to_update, level=100): if level < 1: raise orm.except_orm(_('Error'), _('Recursion error in modules dependencies !')) + + # whether some modules are installed with demo data demo = False - for module in self.browse(cr, uid, ids, context=context): - mdemo = False + + for module in self: + # determine dependency modules to update/others + update_mods, ready_mods = self.browse(), self.browse() for dep in module.dependencies_id: if dep.state == 'unknown': raise orm.except_orm(_('Error'), _("You try to install module '%s' that depends on module '%s'.\nBut the latter module is not available in your system.") % (module.name, dep.name,)) - ids2 = self.search(cr, uid, [('name', '=', dep.name)]) - if dep.state != newstate: - mdemo = self.state_update(cr, uid, ids2, newstate, states_to_update, context, level - 1) or mdemo + if dep.depend_id.state == newstate: + ready_mods += dep.depend_id else: - od = self.browse(cr, uid, ids2)[0] - mdemo = od.demo or mdemo + update_mods += dep.depend_id + # update dependency modules that require it, and determine demo for module + update_demo = update_mods.state_update(newstate, states_to_update, level=level-1) + module_demo = module.demo or update_demo or any(mod.demo for mod in ready_mods) + demo = demo or module_demo + + # check dependencies and update module itself self.check_external_dependencies(module.name, newstate) - if not module.dependencies_id: - mdemo = module.demo if module.state in states_to_update: - self.write(cr, uid, [module.id], {'state': newstate, 'demo': mdemo}) - demo = demo or mdemo + module.write({'state': newstate, 'demo': module_demo}) + return demo def button_install(self, cr, uid, ids, context=None): # Mark the given modules to be installed. - self.state_update(cr, uid, ids, 'to install', ['uninstalled'], context) + self.state_update(cr, uid, ids, 'to install', ['uninstalled'], context=context) # Mark (recursively) the newly satisfied modules to also be installed @@ -524,7 +532,7 @@ class module(osv.osv): def button_upgrade(self, cr, uid, ids, context=None): depobj = self.pool.get('ir.module.module.dependency') - todo = self.browse(cr, uid, ids, context=context) + todo = list(self.browse(cr, uid, ids, context=context)) self.update_list(cr, uid) i = 0 @@ -598,7 +606,7 @@ class module(osv.osv): for key in values: old = getattr(mod, key) updated = isinstance(values[key], basestring) and tools.ustr(values[key]) or values[key] - if not old == updated: + if (old or updated) and updated != old: updated_values[key] = values[key] if terp.get('installable', True) and mod.state == 'uninstallable': updated_values['state'] = 'uninstalled' @@ -726,6 +734,7 @@ class module(osv.osv): cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (mod_browse.id, dep)) for dep in (existing - needed): cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s and name = %s', (mod_browse.id, dep)) + self.invalidate_cache(cr, uid, ['dependencies_id'], [mod_browse.id]) def _update_category(self, cr, uid, mod_browse, category='Uncategorized'): current_category = mod_browse.category_id @@ -754,37 +763,47 @@ class module(osv.osv): if not mod.description: _logger.warning('module %s: description is empty !', mod.name) -class module_dependency(osv.osv): + +DEP_STATES = [ + ('uninstallable', 'Uninstallable'), + ('uninstalled', 'Not Installed'), + ('installed', 'Installed'), + ('to upgrade', 'To be upgraded'), + ('to remove', 'To be removed'), + ('to install', 'To be installed'), + ('unknown', 'Unknown'), +] + +class module_dependency(osv.Model): _name = "ir.module.module.dependency" _description = "Module dependency" - def _state(self, cr, uid, ids, name, args, context=None): - result = {} - mod_obj = self.pool.get('ir.module.module') - for md in self.browse(cr, uid, ids): - ids = mod_obj.search(cr, uid, [('name', '=', md.name)]) - if ids: - result[md.id] = mod_obj.read(cr, uid, [ids[0]], ['state'])[0]['state'] - else: - result[md.id] = 'unknown' - return result + # the dependency name + name = fields2.Char(index=True) - _columns = { - # The dependency name - 'name': fields.char('Name', select=True), + # the module that depends on it + module_id = fields2.Many2one('ir.module.module', 'Module', ondelete='cascade') - # The module that depends on it - 'module_id': fields.many2one('ir.module.module', 'Module', select=True, ondelete='cascade'), + # the module corresponding to the dependency, and its status + depend_id = fields2.Many2one('ir.module.module', 'Dependency', compute='_compute_depend') + state = fields2.Selection(DEP_STATES, string='Status', compute='_compute_state') + + @api.multi + @api.depends('name') + def _compute_depend(self): + # retrieve all modules corresponding to the dependency names + names = list(set(dep.name for dep in self)) + mods = self.env['ir.module.module'].search([('name', 'in', names)]) + + # index modules by name, and assign dependencies + name_mod = dict((mod.name, mod) for mod in mods) + for dep in self: + dep.depend_id = name_mod.get(dep.name) + + @api.one + @api.depends('depend_id.state') + def _compute_state(self): + self.state = self.depend_id.state or 'unknown' - 'state': fields.function(_state, type='selection', selection=[ - ('uninstallable', 'Uninstallable'), - ('uninstalled', 'Not Installed'), - ('installed', 'Installed'), - ('to upgrade', 'To be upgraded'), - ('to remove', 'To be removed'), - ('to install', 'To be installed'), - ('unknown', 'Unknown'), - ], string='Status', readonly=True, select=True), - } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/base/module/wizard/base_import_language.py b/openerp/addons/base/module/wizard/base_import_language.py index d8757605d64f2e5f6ebaa18f3d60e909cef1ad9d..bd554a47c5c450429788d253ed3eadf6b488f092 100644 --- a/openerp/addons/base/module/wizard/base_import_language.py +++ b/openerp/addons/base/module/wizard/base_import_language.py @@ -44,7 +44,7 @@ class base_language_import(osv.osv_memory): context = {} this = self.browse(cr, uid, ids[0]) if this.overwrite: - context.update(overwrite=True) + context = dict(context, overwrite=True) fileobj = TemporaryFile('w+') try: fileobj.write(base64.decodestring(this.data)) diff --git a/openerp/addons/base/module/wizard/base_module_update.py b/openerp/addons/base/module/wizard/base_module_update.py index e2d8ce01d4a8f673ce8aae14a0a33b9ba0d2e2f9..2f3d0cc1af277297544d5d8289c1199863a94306 100644 --- a/openerp/addons/base/module/wizard/base_module_update.py +++ b/openerp/addons/base/module/wizard/base_module_update.py @@ -1,49 +1,23 @@ # -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## -from openerp.osv import osv, fields - -class base_module_update(osv.osv_memory): - """ Update Module """ +from openerp import models, fields, api +class base_module_update(models.TransientModel): _name = "base.module.update" _description = "Update Module" - _columns = { - 'update': fields.integer('Number of modules updated', readonly=True), - 'add': fields.integer('Number of modules added', readonly=True), - 'state':fields.selection([('init','init'),('done','done')], 'Status', readonly=True), - } - - _defaults = { - 'state': 'init', - } + updated = fields.Integer('Number of modules updated', readonly=True) + added = fields.Integer('Number of modules added', readonly=True) + state = fields.Selection([('init', 'init'), ('done', 'done')], 'Status', readonly=True, default='init') - def update_module(self, cr, uid, ids, context=None): - module_obj = self.pool.get('ir.module.module') - update, add = module_obj.update_list(cr, uid,) - self.write(cr, uid, ids, {'update': update, 'add': add, 'state': 'done'}, context=context) + @api.one + def update_module(self): + self.updated, self.added = self.env['ir.module.module'].update_list() + self.state = 'done' return False - def action_module_open(self, cr, uid, ids, context): + @api.multi + def action_module_open(self): res = { 'domain': str([]), 'name': 'Modules', @@ -54,5 +28,3 @@ class base_module_update(osv.osv_memory): 'type': 'ir.actions.act_window', } return res - -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/base/module/wizard/base_module_update_view.xml b/openerp/addons/base/module/wizard/base_module_update_view.xml index 3dccc0615934f8537f65a45b000eee7fc25d4b3b..eff8e790677fc371b3e343458486719800778215 100644 --- a/openerp/addons/base/module/wizard/base_module_update_view.xml +++ b/openerp/addons/base/module/wizard/base_module_update_view.xml @@ -13,8 +13,8 @@ <label string="Click on Update below to start the process..." /> </group> <group states="done" > - <field name="update"/> - <field name="add" /> + <field name="updated"/> + <field name="added" /> </group> <footer> <div states="init"> diff --git a/openerp/addons/base/res/ir_property.py b/openerp/addons/base/res/ir_property.py index 7d4d115220f3dc984dd8f5ea2880c33239be368b..859f97330d4606e5fead7316c59181fce4c4b86f 100644 --- a/openerp/addons/base/res/ir_property.py +++ b/openerp/addons/base/res/ir_property.py @@ -19,16 +19,30 @@ # ############################################################################## +from operator import itemgetter import time -from openerp.osv import osv, fields -from openerp.osv.orm import browse_record, browse_null +from openerp import models, api +from openerp.osv import osv, orm, fields from openerp.tools.misc import attrgetter # ------------------------------------------------------------------------- # Properties # ------------------------------------------------------------------------- +TYPE2FIELD = { + 'char': 'value_text', + 'float': 'value_float', + 'boolean': 'value_integer', + 'integer': 'value_integer', + 'text': 'value_text', + 'binary': 'value_binary', + 'many2one': 'value_reference', + 'date': 'value_datetime', + 'datetime': 'value_datetime', + 'selection': 'value_text', +} + class ir_property(osv.osv): _name = 'ir.property' @@ -80,24 +94,12 @@ class ir_property(osv.osv): else: type_ = self._defaults['type'] - type2field = { - 'char': 'value_text', - 'float': 'value_float', - 'boolean' : 'value_integer', - 'integer': 'value_integer', - 'text': 'value_text', - 'binary': 'value_binary', - 'many2one': 'value_reference', - 'date' : 'value_datetime', - 'datetime' : 'value_datetime', - 'selection': 'value_text', - } - field = type2field.get(type_) + field = TYPE2FIELD.get(type_) if not field: raise osv.except_osv('Error', 'Invalid type') if field == 'value_reference': - if isinstance(value, browse_record): + if isinstance(value, orm.BaseModel): value = '%s,%d' % (value._name, value.id) elif isinstance(value, (int, long)): field_id = values.get('fields_id') @@ -132,9 +134,10 @@ class ir_property(osv.osv): return record.value_binary elif record.type == 'many2one': if not record.value_reference: - return browse_null() + return False model, resource_id = record.value_reference.split(',') - return self.pool.get(model).browse(cr, uid, int(resource_id), context=context) + value = self.pool[model].browse(cr, uid, int(resource_id), context=context) + return value.exists() elif record.type == 'datetime': return record.value_datetime elif record.type == 'date': @@ -154,12 +157,6 @@ class ir_property(osv.osv): return self.get_by_record(cr, uid, record, context=context) return False - def _get_domain_default(self, cr, uid, prop_name, model, context=None): - domain = self._get_domain(cr, uid, prop_name, model, context=context) - if domain is None: - return None - return ['&', ('res_id', '=', False)] + domain - def _get_domain(self, cr, uid, prop_name, model, context=None): context = context or {} cr.execute('select id from ir_model_fields where name=%s and model=%s', (prop_name, model)) @@ -167,14 +164,143 @@ class ir_property(osv.osv): if not res: return None - if 'force_company' in context and context['force_company']: - cid = context['force_company'] - else: + cid = context.get('force_company') + if not cid: company = self.pool.get('res.company') cid = company._company_default_get(cr, uid, model, res[0], context=context) - domain = ['&', ('fields_id', '=', res[0]), - '|', ('company_id', '=', cid), ('company_id', '=', False)] - return domain + return [('fields_id', '=', res[0]), ('company_id', 'in', [cid, False])] + + @api.model + def get_multi(self, name, model, ids): + """ Read the property field `name` for the records of model `model` with + the given `ids`, and return a dictionary mapping `ids` to their + corresponding value. + """ + if not ids: + return {} + + domain = self._get_domain(name, model) + if domain is None: + return dict.fromkeys(ids, False) + + # retrieve the values for the given ids and the default value, too + refs = {('%s,%s' % (model, id)): id for id in ids} + refs[False] = False + domain += [('res_id', 'in', list(refs))] + + # note: order by 'company_id asc' will return non-null values first + props = self.search(domain, order='company_id asc') + result = {} + for prop in props: + # for a given res_id, take the first property only + id = refs.pop(prop.res_id, None) + if id is not None: + result[id] = self.get_by_record(prop) + + # set the default value to the ids that are not in result + default_value = result.pop(False, False) + for id in ids: + result.setdefault(id, default_value) + + return result + + @api.model + def set_multi(self, name, model, values): + """ Assign the property field `name` for the records of model `model` + with `values` (dictionary mapping record ids to their value). + """ + def clean(value): + return value.id if isinstance(value, models.BaseModel) else value + + if not values: + return + + domain = self._get_domain(name, model) + if domain is None: + raise Exception() + + # retrieve the default value for the field + default_value = clean(self.get(name, model)) + + # retrieve the properties corresponding to the given record ids + self._cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", (name, model)) + field_id = self._cr.fetchone()[0] + company_id = self.env['res.company']._company_default_get(model, field_id) + refs = {('%s,%s' % (model, id)): id for id in values} + props = self.search([ + ('fields_id', '=', field_id), + ('company_id', '=', company_id), + ('res_id', 'in', list(refs)), + ]) + + # modify existing properties + for prop in props: + id = refs.pop(prop.res_id) + value = clean(values[id]) + if value == default_value: + prop.unlink() + elif value != clean(prop.get_by_record(prop)): + prop.write({'value': value}) + + # create new properties for records that do not have one yet + for ref, id in refs.iteritems(): + value = clean(values[id]) + if value != default_value: + self.create({ + 'fields_id': field_id, + 'company_id': company_id, + 'res_id': ref, + 'name': name, + 'value': value, + 'type': self.env[model]._fields[name].type, + }) + + @api.model + def search_multi(self, name, model, operator, value): + """ Return a domain for the records that match the given condition. """ + field = self.env[model]._fields[name] + if field.type == 'many2one': + comodel = field.comodel_name + def makeref(value): + return value and '%s,%s' % (comodel, value) + if operator in ('=', '!=', '<=', '<', '>', '>='): + value = makeref(value) + elif operator in ('in', 'not in'): + value = map(makeref, value) + elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike', 'not ilike'): + # most probably inefficient... but correct + target = self.env[comodel] + target_names = target.name_search(value, operator=operator, limit=None) + target_ids = map(itemgetter(0), target_names) + operator, value = 'in', map(makeref, target_ids) + + # retrieve the properties that match the condition + domain = self._get_domain(name, model) + if domain is None: + raise Exception() + props = self.search(domain + [(TYPE2FIELD[field.type], operator, value)]) + + # retrieve the records corresponding to the properties that match + good_ids = [] + default_matches = False + for prop in props: + if prop.res_id: + res_model, res_id = prop.res_id.split(',') + good_ids.append(int(res_id)) + else: + default_matches = True + + if default_matches: + # exclude all records with a property that does not match + all_ids = [] + props = self.search(domain + [('res_id', '!=', False)]) + for prop in props: + res_model, res_id = prop.res_id.split(',') + all_ids.append(int(res_id)) + bad_ids = list(set(all_ids) - set(good_ids)) + return [('id', 'not in', bad_ids)] + else: + return [('id', 'in', good_ids)] # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/base/res/res_company.py b/openerp/addons/base/res/res_company.py index b3003199e5df1ee2be57e3fa3e84397e01ad34fd..1a83e8d8465a766d1a1bd56a62cea4c77fc5e386 100644 --- a/openerp/addons/base/res/res_company.py +++ b/openerp/addons/base/res/res_company.py @@ -84,7 +84,7 @@ class res_company(osv.osv): if company.partner_id: address_data = part_obj.address_get(cr, openerp.SUPERUSER_ID, [company.partner_id.id], adr_pref=['default']) if address_data['default']: - address = part_obj.read(cr, openerp.SUPERUSER_ID, address_data['default'], field_names, context=context) + address = part_obj.read(cr, openerp.SUPERUSER_ID, [address_data['default']], field_names, context=context)[0] for field in field_names: result[company.id][field] = address[field] or False return result @@ -176,6 +176,7 @@ class res_company(osv.osv): res += '\n%s: %s' % (title, ', '.join(name for id, name in account_names)) return {'value': {'rml_footer': res, 'rml_footer_readonly': res}} + def onchange_state(self, cr, uid, ids, state_id, context=None): if state_id: return {'value':{'country_id': self.pool.get('res.country.state').browse(cr, uid, state_id, context).country_id.id }} @@ -209,8 +210,7 @@ class res_company(osv.osv): return res def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100): - if context is None: - context = {} + context = dict(context or {}) if context.pop('user_preference', None): # We browse as superuser. Otherwise, the user would be able to # select only the currently visible companies (according to rules, diff --git a/openerp/addons/base/res/res_config.py b/openerp/addons/base/res/res_config.py index 56065465441643a0885edc0573d889d635b8c0b7..b5438e7c400457dd4b10e9bffbed61dfdbf5c3ef 100644 --- a/openerp/addons/base/res/res_config.py +++ b/openerp/addons/base/res/res_config.py @@ -294,10 +294,10 @@ class res_config_installer(osv.osv_memory, res_config_module_installation_mixin) def _already_installed(self, cr, uid, context=None): """ For each module (boolean fields in a res.config.installer), check if it's already installed (either 'to install', 'to upgrade' - or 'installed') and if it is return the module's browse_record + or 'installed') and if it is return the module's record :returns: a list of all installed modules in this installer - :rtype: [browse_record] + :rtype: recordset (collection of Record) """ modules = self.pool['ir.module.module'] @@ -333,7 +333,7 @@ class res_config_installer(osv.osv_memory, res_config_module_installation_mixin) for installer in self.read(cr, uid, ids, context=context) for module_name, to_install in installer.iteritems() if module_name != 'id' - if type(self._columns[module_name]) is fields.boolean + if type(self._columns.get(module_name)) is fields.boolean if to_install) hooks_results = set() diff --git a/openerp/addons/base/res/res_currency.py b/openerp/addons/base/res/res_currency.py index e3f359113199b5464fbaa7072291b693cff156f1..697d5ef2d2fd281ae274d971044288ed83813c1b 100644 --- a/openerp/addons/base/res/res_currency.py +++ b/openerp/addons/base/res/res_currency.py @@ -22,6 +22,7 @@ import re import time +from openerp import api, fields as fields2 from openerp import tools from openerp.osv import fields, osv from openerp.tools import float_round, float_is_zero, float_compare @@ -74,7 +75,6 @@ class res_currency(osv.osv): 'rounding': fields.float('Rounding Factor', digits=(12,6)), 'active': fields.boolean('Active'), 'company_id':fields.many2one('res.company', 'Company'), - 'date': fields.date('Date'), 'base': fields.boolean('Base'), 'position': fields.selection([('after','After Amount'),('before','Before Amount')], 'Symbol Position', help="Determines where the currency symbol should be placed after or before the amount.") } @@ -106,19 +106,12 @@ class res_currency(osv.osv): ON res_currency (name, (COALESCE(company_id,-1)))""") - def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'): - res = super(res_currency, self).read(cr, user, ids, fields, context, load) - currency_rate_obj = self.pool.get('res.currency.rate') - values = res - if not isinstance(values, list): - values = [values] - for r in values: - if r.__contains__('rate_ids'): - rates=r['rate_ids'] - if rates: - currency_date = currency_rate_obj.read(cr, user, rates[0], ['name'])['name'] - r['date'] = currency_date - return res + date = fields2.Date(compute='compute_date') + + @api.one + @api.depends('rate_ids.name') + def compute_date(self): + self.date = self.rate_ids[:1].name def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): if not args: @@ -140,16 +133,38 @@ class res_currency(osv.osv): reads = self.read(cr, uid, ids, ['name','symbol'], context=context, load='_classic_write') return [(x['id'], tools.ustr(x['name'])) for x in reads] + @api.v8 + def round(self, amount): + """ Return `amount` rounded according to currency `self`. """ + return float_round(amount, precision_rounding=self.rounding) + + @api.v7 def round(self, cr, uid, currency, amount): """Return ``amount`` rounded according to ``currency``'s rounding rules. - :param browse_record currency: currency for which we are rounding + :param Record currency: currency for which we are rounding :param float amount: the amount to round :return: rounded float """ return float_round(amount, precision_rounding=currency.rounding) + @api.v8 + def compare_amounts(self, amount1, amount2): + """ Compare `amount1` and `amount2` after rounding them according to + `self`'s precision. An amount is considered lower/greater than + another amount if their rounded value is different. This is not the + same as having a non-zero difference! + + For example 1.432 and 1.431 are equal at 2 digits precision, so this + method would return 0. However 0.006 and 0.002 are considered + different (returns 1) because they respectively round to 0.01 and + 0.0, even though 0.006-0.002 = 0.004 which would be considered zero + at 2 digits precision. + """ + return float_compare(amount1, amount2, precision_rounding=self.rounding) + + @api.v7 def compare_amounts(self, cr, uid, currency, amount1, amount2): """Compare ``amount1`` and ``amount2`` after rounding them according to the given currency's precision.. @@ -162,7 +177,7 @@ class res_currency(osv.osv): they respectively round to 0.01 and 0.0, even though 0.006-0.002 = 0.004 which would be considered zero at 2 digits precision. - :param browse_record currency: currency for which we are rounding + :param Record currency: currency for which we are rounding :param float amount1: first amount to compare :param float amount2: second amount to compare :return: (resp.) -1, 0 or 1, if ``amount1`` is (resp.) lower than, @@ -171,6 +186,19 @@ class res_currency(osv.osv): """ return float_compare(amount1, amount2, precision_rounding=currency.rounding) + @api.v8 + def is_zero(self, amount): + """ Return true if `amount` is small enough to be treated as zero + according to currency `self`'s rounding rules. + + Warning: ``is_zero(amount1-amount2)`` is not always equivalent to + ``compare_amounts(amount1,amount2) == 0``, as the former will round + after computing the difference, while the latter will round before, + giving different results, e.g., 0.006 and 0.002 at 2 digits precision. + """ + return float_is_zero(amount, precision_rounding=self.rounding) + + @api.v7 def is_zero(self, cr, uid, currency, amount): """Returns true if ``amount`` is small enough to be treated as zero according to ``currency``'s rounding rules. @@ -180,7 +208,7 @@ class res_currency(osv.osv): computing the difference, while the latter will round before, giving different results for e.g. 0.006 and 0.002 at 2 digits precision. - :param browse_record currency: currency for which we are rounding + :param Record currency: currency for which we are rounding :param float amount: amount to compare with currency's zero """ return float_is_zero(amount, precision_rounding=currency.rounding) @@ -216,6 +244,7 @@ class res_currency(osv.osv): else: return from_amount * rate + @api.v7 def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount, round=True, context=None): context = context or {} @@ -228,6 +257,19 @@ class res_currency(osv.osv): to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1] return self._compute(cr, uid, from_currency, to_currency, from_amount, round, context) + @api.v8 + def compute(self, from_amount, to_currency, round=True): + """ Convert `from_amount` from currency `self` to `to_currency`. """ + assert self, "compute from unknown currency" + assert to_currency, "compute to unknown currency" + # apply conversion rate + if self == to_currency: + to_amount = from_amount + else: + to_amount = from_amount * self._get_conversion_rate(self, to_currency) + # apply rounding + return to_currency.round(to_amount) if round else to_amount + class res_currency_rate(osv.osv): _name = "res.currency.rate" _description = "Currency Rate" @@ -238,7 +280,7 @@ class res_currency_rate(osv.osv): 'currency_id': fields.many2one('res.currency', 'Currency', readonly=True), } _defaults = { - 'name': lambda *a: time.strftime('%Y-%m-%d'), + 'name': lambda *a: time.strftime('%Y-%m-%d 00:00:00'), } _order = "name desc" diff --git a/openerp/addons/base/res/res_partner.py b/openerp/addons/base/res/res_partner.py index baf85787dd67ff0a1bbd8de162997a4133914c3d..95b8ca15f04263b78a482c70c9c9064f02d694e1 100644 --- a/openerp/addons/base/res/res_partner.py +++ b/openerp/addons/base/res/res_partner.py @@ -26,44 +26,44 @@ import pytz import urlparse import openerp -from openerp import SUPERUSER_ID -from openerp import tools +from openerp import tools, api from openerp.osv import osv, fields from openerp.osv.expression import get_unaccent_wrapper from openerp.tools.translate import _ +ADDRESS_FORMAT_LAYOUTS = { + '%(city)s %(state_code)s\n%(zip)s': """ + <div class="address_format"> + <field name="city" placeholder="City" style="width: 50%%"/> + <field name="state_id" class="oe_no_button" placeholder="State" style="width: 47%%" options='{"no_open": true}'/> + <br/> + <field name="zip" placeholder="ZIP"/> + </div> + """, + '%(zip)s %(city)s': """ + <div class="address_format"> + <field name="zip" placeholder="ZIP" style="width: 40%%"/> + <field name="city" placeholder="City" style="width: 57%%"/> + <br/> + <field name="state_id" class="oe_no_button" placeholder="State" options='{"no_open": true}'/> + </div> + """, + '%(city)s\n%(state_name)s\n%(zip)s': """ + <div class="address_format"> + <field name="city" placeholder="City"/> + <field name="state_id" class="oe_no_button" placeholder="State" options='{"no_open": true}'/> + <field name="zip" placeholder="ZIP"/> + </div> + """ +} + + class format_address(object): - def fields_view_get_address(self, cr, uid, arch, context={}): - user_obj = self.pool['res.users'] - fmt = user_obj.browse(cr, SUPERUSER_ID, uid, context).company_id.country_id - fmt = fmt and fmt.address_format - layouts = { - '%(city)s %(state_code)s\n%(zip)s': """ - <div class="address_format"> - <field name="city" placeholder="City" style="width: 50%%"/> - <field name="state_id" class="oe_no_button" placeholder="State" style="width: 47%%" options='{"no_open": true}'/> - <br/> - <field name="zip" placeholder="ZIP"/> - </div> - """, - '%(zip)s %(city)s': """ - <div class="address_format"> - <field name="zip" placeholder="ZIP" style="width: 40%%"/> - <field name="city" placeholder="City" style="width: 57%%"/> - <br/> - <field name="state_id" class="oe_no_button" placeholder="State" options='{"no_open": true}'/> - </div> - """, - '%(city)s\n%(state_name)s\n%(zip)s': """ - <div class="address_format"> - <field name="city" placeholder="City"/> - <field name="state_id" class="oe_no_button" placeholder="State" options='{"no_open": true}'/> - <field name="zip" placeholder="ZIP"/> - </div> - """ - } - for k,v in layouts.items(): - if fmt and (k in fmt): + @api.model + def fields_view_get_address(self, arch): + fmt = self.env.user.company_id.country_id.address_format or '' + for k, v in ADDRESS_FORMAT_LAYOUTS.items(): + if k in fmt: doc = etree.fromstring(arch) for node in doc.xpath("//div[@class='address_format']"): tree = etree.fromstring(v) @@ -73,53 +73,53 @@ class format_address(object): return arch -def _tz_get(self,cr,uid, context=None): +@api.model +def _tz_get(self): # put POSIX 'Etc/*' entries at the end to avoid confusing users - see bug 1086728 return [(tz,tz) for tz in sorted(pytz.all_timezones, key=lambda tz: tz if not tz.startswith('Etc/') else '_')] -class res_partner_category(osv.osv): + +class res_partner_category(osv.Model): def name_get(self, cr, uid, ids, context=None): - """Return the categories' display name, including their direct - parent by default. - - :param dict context: the ``partner_category_display`` key can be - used to select the short version of the - category name (without the direct parent), - when set to ``'short'``. The default is - the long version.""" + """ Return the categories' display name, including their direct + parent by default. + + If ``context['partner_category_display']`` is ``'short'``, the short + version of the category name (without the direct parent) is used. + The default is the long version. + """ + if not isinstance(ids, list): + ids = [ids] if context is None: context = {} + if context.get('partner_category_display') == 'short': return super(res_partner_category, self).name_get(cr, uid, ids, context=context) - if isinstance(ids, (int, long)): - ids = [ids] - reads = self.read(cr, uid, ids, ['name', 'parent_id'], context=context) + res = [] - for record in reads: - name = record['name'] - if record['parent_id']: - name = record['parent_id'][1] + ' / ' + name - res.append((record['id'], name)) + for category in self.browse(cr, uid, ids, context=context): + names = [] + current = category + while current: + names.append(current.name) + current = current.parent_id + res.append((category.id, ' / '.join(reversed(names)))) return res - def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): - if not args: - args = [] - if not context: - context = {} + @api.model + def name_search(self, name, args=None, operator='ilike', limit=100): + args = args or [] if name: # Be sure name_search is symetric to name_get name = name.split(' / ')[-1] - ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context) - else: - ids = self.search(cr, uid, args, limit=limit, context=context) - return self.name_get(cr, uid, ids, context) + args = [('name', operator, name)] + args + categories = self.search(args, limit=limit) + return categories.name_get() - - def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None): - res = self.name_get(cr, uid, ids, context=context) - return dict(res) + @api.multi + def _name_get_fnc(self, field_name, arg): + return dict(self.name_get()) _description = 'Partner Tags' _name = 'res.partner.category' @@ -143,6 +143,7 @@ class res_partner_category(osv.osv): _parent_order = 'name' _order = 'parent_left' + class res_partner_title(osv.osv): _name = 'res.partner.title' _order = 'name' @@ -155,16 +156,17 @@ class res_partner_title(osv.osv): 'domain': 'contact', } -def _lang_get(self, cr, uid, context=None): - lang_pool = self.pool['res.lang'] - ids = lang_pool.search(cr, uid, [], context=context) - res = lang_pool.read(cr, uid, ids, ['code', 'name'], context) - return [(r['code'], r['name']) for r in res] + +@api.model +def _lang_get(self): + languages = self.env['res.lang'].search([]) + return [(language.code, language.name) for language in languages] # fields copy if 'use_parent_address' is checked ADDRESS_FIELDS = ('street', 'street2', 'zip', 'city', 'state_id', 'country_id') -class res_partner(osv.osv, format_address): + +class res_partner(osv.Model, format_address): _description = 'Partner' _name = "res.partner" @@ -174,26 +176,23 @@ class res_partner(osv.osv, format_address): res[partner.id] = self._display_address(cr, uid, partner, context=context) return res - def _get_image(self, cr, uid, ids, name, args, context=None): - result = dict.fromkeys(ids, False) - for obj in self.browse(cr, uid, ids, context=context): - result[obj.id] = tools.image_get_resized_images(obj.image) - return result + @api.multi + def _get_tz_offset(self, name, args): + return dict( + (p.id, datetime.datetime.now(pytz.timezone(p.tz or 'GMT')).strftime('%z')) + for p in self) - def _get_tz_offset(self, cr, uid, ids, name, args, context=None): - result = dict.fromkeys(ids, False) - for obj in self.browse(cr, uid, ids, context=context): - result[obj.id] = datetime.datetime.now(pytz.timezone(obj.tz or 'GMT')).strftime('%z') - return result + @api.multi + def _get_image(self, name, args): + return dict((p.id, tools.image_get_resized_images(p.image)) for p in self) - def _set_image(self, cr, uid, id, name, value, args, context=None): - return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) + @api.one + def _set_image(self, name, value, args): + return self.write({'image': tools.image_resize_image_big(value)}) - def _has_image(self, cr, uid, ids, name, args, context=None): - result = {} - for obj in self.browse(cr, uid, ids, context=context): - result[obj.id] = obj.image != False - return result + @api.multi + def _has_image(self, name, args): + return dict((p.id, bool(p.image)) for p in self) def _commercial_partner_compute(self, cr, uid, ids, name, args, context=None): """ Returns the partner that is considered the commercial @@ -266,8 +265,6 @@ class res_partner(osv.osv, format_address): 'city': fields.char('City'), 'state_id': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'country_id': fields.many2one('res.country', 'Country', ondelete='restrict'), - 'country': fields.related('country_id', type='many2one', relation='res.country', string='Country', - deprecated="This field will be removed as of OpenERP 7.1, use country_id instead"), 'email': fields.char('Email'), 'phone': fields.char('Phone'), 'fax': fields.char('Fax'), @@ -304,16 +301,15 @@ class res_partner(osv.osv, format_address): 'commercial_partner_id': fields.function(_commercial_partner_id, type='many2one', relation='res.partner', string='Commercial Entity', store=_commercial_partner_store_triggers) } - def _default_category(self, cr, uid, context=None): - if context is None: - context = {} - if context.get('category_id'): - return [context['category_id']] - return False + @api.model + def _default_category(self): + category_id = self.env.context.get('category_id', False) + return [category_id] if category_id else False - def _get_default_image(self, cr, uid, is_company, context=None, colorize=False): - img_path = openerp.modules.get_module_resource('base', 'static/src/img', - ('company_image.png' if is_company else 'avatar.png')) + @api.model + def _get_default_image(self, is_company, colorize=False): + img_path = openerp.modules.get_module_resource( + 'base', 'static/src/img', 'company_image.png' if is_company else 'avatar.png') with open(img_path, 'rb') as f: image = f.read() @@ -331,13 +327,17 @@ class res_partner(osv.osv, format_address): res['arch'] = self.fields_view_get_address(cr, user, res['arch'], context=context) return res + @api.model + def _default_company(self): + return self.env['res.company']._company_default_get('res.partner') + _defaults = { 'active': True, - 'lang': lambda self, cr, uid, ctx: ctx.get('lang', 'en_US'), - 'tz': lambda self, cr, uid, ctx: ctx.get('tz', False), + 'lang': api.model(lambda self: self.env.lang), + 'tz': api.model(lambda self: self.env.context.get('tz', False)), 'customer': True, 'category_id': _default_category, - 'company_id': lambda self, cr, uid, ctx: self.pool['res.company']._company_default_get(cr, uid, 'res.partner', context=ctx), + 'company_id': _default_company, 'color': 0, 'is_company': False, 'type': 'contact', # type 'default' is wildcard and thus inappropriate @@ -349,15 +349,14 @@ class res_partner(osv.osv, format_address): (osv.osv._check_recursion, 'You cannot create recursive Partner hierarchies.', ['parent_id']), ] - def copy(self, cr, uid, id, default=None, context=None): - if default is None: - default = {} - default['user_ids'] = False - name = self.read(cr, uid, [id], ['name'], context)[0]['name'] - default.update({'name': _('%s (copy)') % name}) - return super(res_partner, self).copy(cr, uid, id, default, context) + @api.one + def copy(self, default=None): + default = dict(default or {}) + default['name'] = _('%s (copy)') % self.name + return super(res_partner, self).copy(default) - def onchange_type(self, cr, uid, ids, is_company, context=None): + @api.multi + def onchange_type(self, is_company): value = {} value['title'] = False if is_company: @@ -389,10 +388,11 @@ class res_partner(osv.osv, format_address): result['value'] = {'use_parent_address': False} return result - def onchange_state(self, cr, uid, ids, state_id, context=None): + @api.multi + def onchange_state(self, state_id): if state_id: - country_id = self.pool['res.country.state'].browse(cr, uid, state_id, context).country_id.id - return {'value':{'country_id':country_id}} + state = self.env['res.country.state'].browse(state_id) + return {'value': {'country_id': state.country_id.id}} return {} def _check_ean_key(self, cr, uid, ids, context=None): @@ -518,34 +518,36 @@ class res_partner(osv.osv, format_address): website = urlparse.urlunparse(('http', netloc, path, params, query, fragment)) return website - def write(self, cr, uid, ids, vals, context=None): - if isinstance(ids, (int, long)): - ids = [ids] - #res.partner must only allow to set the company_id of a partner if it - #is the same as the company of all users that inherit from this partner - #(this is to allow the code from res_users to write to the partner!) or - #if setting the company_id to False (this is compatible with any user company) + @api.multi + def write(self, vals): + # res.partner must only allow to set the company_id of a partner if it + # is the same as the company of all users that inherit from this partner + # (this is to allow the code from res_users to write to the partner!) or + # if setting the company_id to False (this is compatible with any user + # company) if vals.get('website'): vals['website'] = self._clean_website(vals['website']) if vals.get('company_id'): - for partner in self.browse(cr, uid, ids, context=context): + company = self.env['res.company'].browse(vals['company_id']) + for partner in self: if partner.user_ids: - user_companies = set([user.company_id.id for user in partner.user_ids]) - if len(user_companies) > 1 or vals['company_id'] not in user_companies: + companies = set(user.company_id for user in partner.user_ids) + if len(companies) > 1 or company not in companies: raise osv.except_osv(_("Warning"),_("You can not change the company as the partner/user has multiple user linked with different companies.")) - result = super(res_partner,self).write(cr, uid, ids, vals, context=context) - for partner in self.browse(cr, uid, ids, context=context): - self._fields_sync(cr, uid, partner, vals, context) + + result = super(res_partner, self).write(vals) + for partner in self: + self._fields_sync(partner, vals) return result - def create(self, cr, uid, vals, context=None): + @api.model + def create(self, vals): if vals.get('website'): vals['website'] = self._clean_website(vals['website']) - new_id = super(res_partner, self).create(cr, uid, vals, context=context) - partner = self.browse(cr, uid, new_id, context=context) - self._fields_sync(cr, uid, partner, vals, context) - self._handle_first_contact_creation(cr, uid, partner, context) - return new_id + partner = super(res_partner, self).create(vals) + self._fields_sync(partner, vals) + self._handle_first_contact_creation(partner) + return partner def open_commercial_entity(self, cr, uid, ids, context=None): """ Utility method used to add an "Open Company" button in partner views """ @@ -749,14 +751,11 @@ class res_partner(osv.osv, format_address): return False return _('Partners: ')+self.pool['res.partner.category'].browse(cr, uid, context['category_id'], context).name - def main_partner(self, cr, uid): - ''' Return the id of the main partner - ''' - model_data = self.pool['ir.model.data'] - return model_data.browse(cr, uid, - model_data.search(cr, uid, [('module','=','base'), - ('name','=','main_partner')])[0], - ).res_id + @api.model + @api.returns('self') + def main_partner(self): + ''' Return the main partner ''' + return self.env.ref('base.main_partner') def _display_address(self, cr, uid, address, without_company=False, context=None): @@ -772,14 +771,14 @@ class res_partner(osv.osv, format_address): # get the information that will be injected into the display format # get the address format - address_format = address.country_id and address.country_id.address_format or \ + address_format = address.country_id.address_format or \ "%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s" args = { - 'state_code': address.state_id and address.state_id.code or '', - 'state_name': address.state_id and address.state_id.name or '', - 'country_code': address.country_id and address.country_id.code or '', - 'country_name': address.country_id and address.country_id.name or '', - 'company_name': address.parent_id and address.parent_id.name or '', + 'state_code': address.state_id.code or '', + 'state_name': address.state_id.name or '', + 'country_code': address.country_id.code or '', + 'country_name': address.country_id.name or '', + 'company_name': address.parent_id.name or '', } for field in self._address_fields(cr, uid, context=context): args[field] = getattr(address, field) or '' diff --git a/openerp/addons/base/res/res_users.py b/openerp/addons/base/res/res_users.py index 53254480e25d6524571f53377f1753dd536fce46..fdf453ae4f3ac355a5cdcb0b316d1043d7c7ef75 100644 --- a/openerp/addons/base/res/res_users.py +++ b/openerp/addons/base/res/res_users.py @@ -27,11 +27,10 @@ from lxml import etree from lxml.builder import E import openerp -from openerp import SUPERUSER_ID +from openerp import SUPERUSER_ID, models from openerp import tools import openerp.exceptions -from openerp.osv import fields,osv, expression -from openerp.osv.orm import browse_record +from openerp.osv import fields, osv, expression from openerp.tools.translate import _ from openerp.http import request @@ -168,7 +167,7 @@ class res_users(osv.osv): help='Partner-related data of the user'), 'login': fields.char('Login', size=64, required=True, help="Used to log into the system"), - 'password': fields.char('Password', size=64, invisible=True, + 'password': fields.char('Password', size=64, invisible=True, copy=False, help="Keep empty if you don't want the user to be able to connect on the system."), 'new_password': fields.function(_get_password, type='char', size=64, fnct_inv=_set_new_password, string='Set Password', @@ -226,9 +225,8 @@ class res_users(osv.osv): def _get_company(self,cr, uid, context=None, uid2=False): if not uid2: uid2 = uid - user = self.pool['res.users'].read(cr, uid, uid2, ['company_id'], context) - company_id = user.get('company_id', False) - return company_id and company_id[0] or False + user = self.pool['res.users'].browse(cr, uid, uid2, context) + return user.company_id.id def _get_companies(self, cr, uid, context=None): c = self._get_company(cr, uid, context) @@ -249,6 +247,9 @@ class res_users(osv.osv): pass return result + def _get_default_image(self, cr, uid, context=None): + return self.pool['res.partner']._get_default_image(cr, uid, False, colorize=True, context=context) + _defaults = { 'password': '', 'active': True, @@ -256,7 +257,7 @@ class res_users(osv.osv): 'company_id': _get_company, 'company_ids': _get_companies, 'groups_id': _get_group, - 'image': lambda self, cr, uid, ctx={}: self.pool['res.partner']._get_default_image(cr, uid, False, ctx, colorize=True), + 'image': _get_default_image, } # User can write on a few of his own fields (but not his groups for example) @@ -304,7 +305,8 @@ class res_users(osv.osv): break else: if 'company_id' in values: - if not (values['company_id'] in self.read(cr, SUPERUSER_ID, uid, ['company_ids'], context=context)['company_ids']): + user = self.browse(cr, SUPERUSER_ID, uid, context=context) + if not (values['company_id'] in user.company_ids.ids): del values['company_id'] uid = 1 # safe fields only, so we write as super-user to bypass access rights @@ -369,8 +371,8 @@ class res_users(osv.osv): else: context_key = False if context_key: - res = getattr(user,k) or False - if isinstance(res, browse_record): + res = getattr(user, k) or False + if isinstance(res, models.BaseModel): res = res.id result[context_key] = res or False return result @@ -392,7 +394,7 @@ class res_users(osv.osv): if not res: raise openerp.exceptions.AccessDenied() - def login(self, db, login, password): + def _login(self, db, login, password): if not password: return False user_id = False @@ -421,6 +423,7 @@ class res_users(osv.osv): try: cr.execute("SELECT id FROM res_users WHERE id=%s FOR UPDATE NOWAIT", (user_id,), log_exceptions=False) cr.execute("UPDATE res_users SET login_date = now() AT TIME ZONE 'UTC' WHERE id=%s", (user_id,)) + self.invalidate_cache(cr, user_id, ['login_date'], [user_id]) except Exception: _logger.debug("Failed to update last_login for db:%s login:%s", db, login, exc_info=True) except openerp.exceptions.AccessDenied: @@ -442,7 +445,7 @@ class res_users(osv.osv): :param dict user_agent_env: environment dictionary describing any relevant environment attributes """ - uid = self.login(db, login, password) + uid = self._login(db, login, password) if uid == openerp.SUPERUSER_ID: # Successfully logged in as admin! # Attempt to guess the web base url... @@ -665,6 +668,21 @@ def partition(f, xs): (yes if f(x) else nos).append(x) return yes, nos +def parse_m2m(commands): + "return a list of ids corresponding to a many2many value" + ids = [] + for command in commands: + if isinstance(command, (tuple, list)): + if command[0] in (1, 4): + ids.append(command[2]) + elif command[0] == 5: + ids = [] + elif command[0] == 6: + ids = list(command[2]) + else: + ids.append(command) + return ids + class groups_view(osv.osv): _inherit = 'res.groups' @@ -690,7 +708,7 @@ class groups_view(osv.osv): # we have to try-catch this, because at first init the view does not exist # but we are already creating some basic groups view = self.pool['ir.model.data'].xmlid_to_object(cr, SUPERUSER_ID, 'base.user_groups_view', context=context) - if view and view.exists() and view._table_name == 'ir.ui.view': + if view and view.exists() and view._name == 'ir.ui.view': xml1, xml2 = [], [] xml1.append(E.separator(string=_('Application'), colspan="4")) for app, kind, gs in self.get_groups_by_application(cr, uid, context): @@ -836,7 +854,7 @@ class users_view(osv.osv): def _get_reified_groups(self, fields, values): """ compute the given reified group fields from values['groups_id'] """ - gids = set(values.get('groups_id') or []) + gids = set(parse_m2m(values.get('groups_id') or [])) for f in fields: if is_boolean_group(f): values[f] = get_boolean_group(f) in gids diff --git a/openerp/addons/base/security/base_security.xml b/openerp/addons/base/security/base_security.xml index 962b21e6c77380e546b0311413650e3234771a3d..a76ef191be7d394c7a14ab8da582f1e9d2b080dd 100644 --- a/openerp/addons/base/security/base_security.xml +++ b/openerp/addons/base/security/base_security.xml @@ -41,12 +41,6 @@ <field name="implied_ids" eval="[(4, ref('group_sale_salesman'))]"/> </record> - <!-- Set accesses to menu --> - <record model="ir.ui.menu" id="base.menu_administration"> - <field name="name">Settings</field> - <field name="groups_id" eval="[(6,0, [ref('group_system'), ref('group_erp_manager')])]"/> - </record> - <record model="ir.rule" id="res_partner_rule"> <field name="name">res.partner company</field> <field name="model_id" ref="model_res_partner"/> diff --git a/openerp/addons/base/tests/__init__.py b/openerp/addons/base/tests/__init__.py index 2e55989ba19702b1d59cf9138604a88fe711bcb8..c597542d05261490e01412a26350384cc6f95653 100644 --- a/openerp/addons/base/tests/__init__.py +++ b/openerp/addons/base/tests/__init__.py @@ -1,10 +1,9 @@ import test_acl +import test_api import test_base import test_basecase import test_db_cursor import test_expression -import test_expression -import test_fields import test_func import test_ir_actions import test_ir_attachment diff --git a/openerp/addons/base/tests/base_test.yml b/openerp/addons/base/tests/base_test.yml index e73d8177fc064e2ef7f06423e0cdd4f799a49cb6..5590e73513518f3c01eae047590fa559e67c0f77 100644 --- a/openerp/addons/base/tests/base_test.yml +++ b/openerp/addons/base/tests/base_test.yml @@ -58,6 +58,9 @@ !python {model: res.partner.category}: | # pretend the pool has finished loading to avoid deferring parent_store computation self.pool._init = False + + # Force partner_categ.copy() to copy children + self.pool['res.partner.category']._columns['child_ids'].copy = True - "1.0 Setup test partner categories: parent root" - @@ -91,6 +94,7 @@ 2. Duplicate the parent category and verify that the children have been duplicated too and are below the new parent - !python {model: res.partner.category}: | + self._columns['child_ids'].copy = True # force copying children for test new_id = self.copy(cr, uid, ref('test_categ_0')) new_struct = self.search(cr, uid, [('parent_id', 'child_of', new_id)]) assert len(new_struct) == 4, "After duplication, the new object must have the childs records" @@ -141,6 +145,7 @@ - !python {model: res.partner.category}: | self.pool._init = True + self.pool['res.partner.category']._columns['child_ids'].copy = False - "Float precision tests: verify that float rounding methods are working correctly via res.currency" @@ -277,7 +282,7 @@ rate_id = res_currency_rate.create(cr, 1, {'name':'2000-01-01', 'rate': value, 'currency_id': currency.id}) - rate = res_currency_rate.read(cr, 1, rate_id, ['rate'])['rate'] + rate = res_currency_rate.read(cr, 1, [rate_id], ['rate'])[0]['rate'] assert rate == expected, 'Roundtrip error: got %s back from db, expected %s' % (rate, expected) # res.currency.rate uses 6 digits of precision by default try_roundtrip(2.6748955, 2.674896) diff --git a/openerp/addons/base/tests/test_acl.py b/openerp/addons/base/tests/test_acl.py index f3fa0bb2b1db20623b605b4919f6247a1bd86217..366a17129785591bc81dc3a8d9733dd6d3961776 100644 --- a/openerp/addons/base/tests/test_acl.py +++ b/openerp/addons/base/tests/test_acl.py @@ -20,6 +20,22 @@ class TestACL(common.TransactionCase): self.tech_group = self.registry('ir.model.data').get_object(self.cr, self.uid, *(GROUP_TECHNICAL_FEATURES.split('.'))) + def _set_field_groups(self, model, field_name, groups): + field = model._fields[field_name] + column = model._columns[field_name] + old_groups = field.groups + old_prefetch = column._prefetch + + field.groups = groups + column.groups = groups + column._prefetch = False + + @self.addCleanup + def cleanup(): + field.groups = old_groups + column.groups = old_groups + column._prefetch = old_prefetch + def test_field_visibility_restriction(self): """Check that model-level ``groups`` parameter effectively restricts access to that field for users who do not belong to one of the explicitly allowed groups""" @@ -33,8 +49,9 @@ class TestACL(common.TransactionCase): self.assertNotEquals(view_arch.xpath("//field[@name='accuracy']"), [], "Field 'accuracy' must be found in view definition before the test") - # Restrict access to the field and check it's gone - self.res_currency._columns['accuracy'].groups = GROUP_TECHNICAL_FEATURES + # restrict access to the field and check it's gone + self._set_field_groups(self.res_currency, 'accuracy', GROUP_TECHNICAL_FEATURES) + fields = self.res_currency.fields_get(self.cr, self.demo_uid, []) form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form') view_arch = etree.fromstring(form_view.get('arch')) @@ -56,9 +73,8 @@ class TestACL(common.TransactionCase): #cleanup self.tech_group.write({'users': [(3, self.demo_uid)]}) - self.res_currency._columns['accuracy'].groups = False - @mute_logger('openerp.osv.orm') + @mute_logger('openerp.models') def test_field_crud_restriction(self): "Read/Write RPC access to restricted field should be forbidden" # Verify the test environment first @@ -68,7 +84,8 @@ class TestACL(common.TransactionCase): self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []})) # Now restrict access to the field and check it's forbidden - self.res_partner._columns['bank_ids'].groups = GROUP_TECHNICAL_FEATURES + self._set_field_groups(self.res_partner, 'bank_ids', GROUP_TECHNICAL_FEATURES) + with self.assertRaises(openerp.osv.orm.except_orm): self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']) with self.assertRaises(openerp.osv.orm.except_orm): @@ -83,25 +100,22 @@ class TestACL(common.TransactionCase): #cleanup self.tech_group.write({'users': [(3, self.demo_uid)]}) - self.res_partner._columns['bank_ids'].groups = False + @mute_logger('openerp.models') def test_fields_browse_restriction(self): """Test access to records having restricted fields""" - self.res_partner._columns['email'].groups = GROUP_TECHNICAL_FEATURES - try: - P = self.res_partner - pid = P.search(self.cr, self.demo_uid, [], limit=1)[0] - part = P.browse(self.cr, self.demo_uid, pid) - # accessing fields must no raise exceptions... - part.name - # ... except if they are restricted - with self.assertRaises(openerp.osv.orm.except_orm) as cm: - with mute_logger('openerp.osv.orm'): - part.email - - self.assertEqual(cm.exception.args[0], 'Access Denied') - finally: - self.res_partner._columns['email'].groups = False + self._set_field_groups(self.res_partner, 'email', GROUP_TECHNICAL_FEATURES) + + pid = self.res_partner.search(self.cr, self.demo_uid, [], limit=1)[0] + part = self.res_partner.browse(self.cr, self.demo_uid, pid) + # accessing fields must no raise exceptions... + part.name + # ... except if they are restricted + with self.assertRaises(openerp.osv.orm.except_orm) as cm: + with mute_logger('openerp.models'): + part.email + + self.assertEqual(cm.exception.args[0], 'AccessError') if __name__ == '__main__': unittest2.main() diff --git a/openerp/addons/base/tests/test_api.py b/openerp/addons/base/tests/test_api.py new file mode 100644 index 0000000000000000000000000000000000000000..2be394e65dcd54981aadffac8d632fd1e3d55f9b --- /dev/null +++ b/openerp/addons/base/tests/test_api.py @@ -0,0 +1,444 @@ + +from openerp import models +from openerp.tools import mute_logger +from openerp.osv.orm import except_orm +from openerp.tests import common + + +class TestAPI(common.TransactionCase): + """ test the new API of the ORM """ + + def assertIsRecordset(self, value, model): + self.assertIsInstance(value, models.BaseModel) + self.assertEqual(value._name, model) + + def assertIsRecord(self, value, model): + self.assertIsRecordset(value, model) + self.assertTrue(len(value) <= 1) + + def assertIsNull(self, value, model): + self.assertIsRecordset(value, model) + self.assertFalse(value) + + @mute_logger('openerp.models') + def test_00_query(self): + """ Build a recordset, and check its contents. """ + domain = [('name', 'ilike', 'j')] + ids = self.registry('res.partner').search(self.cr, self.uid, domain) + partners = self.env['res.partner'].search(domain) + + # partners is a collection of browse records corresponding to ids + self.assertTrue(ids) + self.assertTrue(partners) + + # partners and its contents are instance of the model, and share its ormcache + self.assertIsRecordset(partners, 'res.partner') + self.assertIs(partners._ormcache, self.env['res.partner']._ormcache) + for p in partners: + self.assertIsRecord(p, 'res.partner') + self.assertIs(p._ormcache, self.env['res.partner']._ormcache) + + self.assertEqual([p.id for p in partners], ids) + self.assertEqual(self.env['res.partner'].browse(ids), partners) + + @mute_logger('openerp.models') + def test_01_query_offset(self): + """ Build a recordset with offset, and check equivalence. """ + partners1 = self.env['res.partner'].search([], offset=10) + partners2 = self.env['res.partner'].search([])[10:] + self.assertIsRecordset(partners1, 'res.partner') + self.assertIsRecordset(partners2, 'res.partner') + self.assertEqual(list(partners1), list(partners2)) + + @mute_logger('openerp.models') + def test_02_query_limit(self): + """ Build a recordset with offset, and check equivalence. """ + partners1 = self.env['res.partner'].search([], limit=10) + partners2 = self.env['res.partner'].search([])[:10] + self.assertIsRecordset(partners1, 'res.partner') + self.assertIsRecordset(partners2, 'res.partner') + self.assertEqual(list(partners1), list(partners2)) + + @mute_logger('openerp.models') + def test_03_query_offset_limit(self): + """ Build a recordset with offset and limit, and check equivalence. """ + partners1 = self.env['res.partner'].search([], offset=3, limit=7) + partners2 = self.env['res.partner'].search([])[3:10] + self.assertIsRecordset(partners1, 'res.partner') + self.assertIsRecordset(partners2, 'res.partner') + self.assertEqual(list(partners1), list(partners2)) + + @mute_logger('openerp.models') + def test_05_immutable(self): + """ Check that a recordset remains the same, even after updates. """ + domain = [('name', 'ilike', 'j')] + partners = self.env['res.partner'].search(domain) + self.assertTrue(partners) + ids = map(int, partners) + + # modify those partners, and check that partners has not changed + self.registry('res.partner').write(self.cr, self.uid, ids, {'active': False}) + self.assertEqual(ids, map(int, partners)) + + # redo the search, and check that the result is now empty + partners2 = self.env['res.partner'].search(domain) + self.assertFalse(partners2) + + @mute_logger('openerp.models') + def test_06_fields(self): + """ Check that relation fields return records, recordsets or nulls. """ + user = self.registry('res.users').browse(self.cr, self.uid, self.uid) + self.assertIsRecord(user, 'res.users') + self.assertIsRecord(user.partner_id, 'res.partner') + self.assertIsRecordset(user.groups_id, 'res.groups') + + partners = self.env['res.partner'].search([]) + for name, cinfo in partners._all_columns.iteritems(): + if cinfo.column._type == 'many2one': + for p in partners: + self.assertIsRecord(p[name], cinfo.column._obj) + elif cinfo.column._type == 'reference': + for p in partners: + if p[name]: + self.assertIsRecord(p[name], cinfo.column._obj) + elif cinfo.column._type in ('one2many', 'many2many'): + for p in partners: + self.assertIsRecordset(p[name], cinfo.column._obj) + + @mute_logger('openerp.models') + def test_07_null(self): + """ Check behavior of null instances. """ + # select a partner without a parent + partner = self.env['res.partner'].search([('parent_id', '=', False)])[0] + + # check partner and related null instances + self.assertTrue(partner) + self.assertIsRecord(partner, 'res.partner') + + self.assertFalse(partner.parent_id) + self.assertIsNull(partner.parent_id, 'res.partner') + + self.assertIs(partner.parent_id.id, False) + + self.assertFalse(partner.parent_id.user_id) + self.assertIsNull(partner.parent_id.user_id, 'res.users') + + self.assertIs(partner.parent_id.user_id.name, False) + + self.assertFalse(partner.parent_id.user_id.groups_id) + self.assertIsRecordset(partner.parent_id.user_id.groups_id, 'res.groups') + + @mute_logger('openerp.models') + def test_10_old_old(self): + """ Call old-style methods in the old-fashioned way. """ + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertTrue(partners) + ids = map(int, partners) + + # call method name_get on partners' model, and check its effect + res = partners._model.name_get(self.cr, self.uid, ids) + self.assertEqual(len(res), len(ids)) + self.assertEqual(set(val[0] for val in res), set(ids)) + + @mute_logger('openerp.models') + def test_20_old_new(self): + """ Call old-style methods in the new API style. """ + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertTrue(partners) + + # call method name_get on partners itself, and check its effect + res = partners.name_get() + self.assertEqual(len(res), len(partners)) + self.assertEqual(set(val[0] for val in res), set(map(int, partners))) + + @mute_logger('openerp.models') + def test_25_old_new(self): + """ Call old-style methods on records (new API style). """ + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertTrue(partners) + + # call method name_get on partner records, and check its effect + for p in partners: + res = p.name_get() + self.assertTrue(isinstance(res, list) and len(res) == 1) + self.assertTrue(isinstance(res[0], tuple) and len(res[0]) == 2) + self.assertEqual(res[0][0], p.id) + + @mute_logger('openerp.models') + def test_30_new_old(self): + """ Call new-style methods in the old-fashioned way. """ + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertTrue(partners) + ids = map(int, partners) + + # call method write on partners' model, and check its effect + partners._model.write(self.cr, self.uid, ids, {'active': False}) + for p in partners: + self.assertFalse(p.active) + + @mute_logger('openerp.models') + def test_40_new_new(self): + """ Call new-style methods in the new API style. """ + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertTrue(partners) + + # call method write on partners itself, and check its effect + partners.write({'active': False}) + for p in partners: + self.assertFalse(p.active) + + @mute_logger('openerp.models') + def test_45_new_new(self): + """ Call new-style methods on records (new API style). """ + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertTrue(partners) + + # call method write on partner records, and check its effects + for p in partners: + p.write({'active': False}) + for p in partners: + self.assertFalse(p.active) + + @mute_logger('openerp.models') + @mute_logger('openerp.addons.base.ir.ir_model') + def test_50_environment(self): + """ Test environment on records. """ + # partners and reachable records are attached to self.env + partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) + self.assertEqual(partners.env, self.env) + for x in (partners, partners[0], partners[0].company_id): + self.assertEqual(x.env, self.env) + for p in partners: + self.assertEqual(p.env, self.env) + + # check that the current user can read and modify company data + partners[0].company_id.name + partners[0].company_id.write({'name': 'Fools'}) + + # create an environment with the demo user + demo = self.env['res.users'].search([('login', '=', 'demo')])[0] + demo_env = self.env(user=demo) + self.assertNotEqual(demo_env, self.env) + + # partners and related records are still attached to self.env + self.assertEqual(partners.env, self.env) + for x in (partners, partners[0], partners[0].company_id): + self.assertEqual(x.env, self.env) + for p in partners: + self.assertEqual(p.env, self.env) + + # create record instances attached to demo_env + demo_partners = partners.sudo(demo) + self.assertEqual(demo_partners.env, demo_env) + for x in (demo_partners, demo_partners[0], demo_partners[0].company_id): + self.assertEqual(x.env, demo_env) + for p in demo_partners: + self.assertEqual(p.env, demo_env) + + # demo user can read but not modify company data + demo_partners[0].company_id.name + with self.assertRaises(except_orm): + demo_partners[0].company_id.write({'name': 'Pricks'}) + + # remove demo user from all groups + demo.write({'groups_id': [(5,)]}) + + # demo user can no longer access partner data + with self.assertRaises(except_orm): + demo_partners[0].company_id.name + + @mute_logger('openerp.models') + def test_55_draft(self): + """ Test draft mode nesting. """ + env = self.env + self.assertFalse(env.in_draft) + with env.do_in_draft(): + self.assertTrue(env.in_draft) + with env.do_in_draft(): + self.assertTrue(env.in_draft) + with env.do_in_draft(): + self.assertTrue(env.in_draft) + self.assertTrue(env.in_draft) + self.assertTrue(env.in_draft) + self.assertFalse(env.in_draft) + + @mute_logger('openerp.models') + def test_60_cache(self): + """ Check the record cache behavior """ + partners = self.env['res.partner'].search([('child_ids', '!=', False)]) + partner1, partner2 = partners[0], partners[1] + children1, children2 = partner1.child_ids, partner2.child_ids + self.assertTrue(children1) + self.assertTrue(children2) + + # take a child contact + child = children1[0] + self.assertEqual(child.parent_id, partner1) + self.assertIn(child, partner1.child_ids) + self.assertNotIn(child, partner2.child_ids) + + # fetch data in the cache + for p in partners: + p.name, p.company_id.name, p.user_id.name, p.contact_address + self.env.check_cache() + + # change its parent + child.write({'parent_id': partner2.id}) + self.env.check_cache() + + # check recordsets + self.assertEqual(child.parent_id, partner2) + self.assertNotIn(child, partner1.child_ids) + self.assertIn(child, partner2.child_ids) + self.assertEqual(set(partner1.child_ids + child), set(children1)) + self.assertEqual(set(partner2.child_ids), set(children2 + child)) + self.env.check_cache() + + # delete it + child.unlink() + self.env.check_cache() + + # check recordsets + self.assertEqual(set(partner1.child_ids), set(children1) - set([child])) + self.assertEqual(set(partner2.child_ids), set(children2)) + self.env.check_cache() + + @mute_logger('openerp.models') + def test_60_cache_prefetching(self): + """ Check the record cache prefetching """ + self.env.invalidate_all() + + # all the records of an instance already have an entry in cache + partners = self.env['res.partner'].search([]) + partner_ids = self.env.prefetch['res.partner'] + self.assertEqual(set(partners.ids), set(partner_ids)) + + # countries have not been fetched yet; their cache must be empty + countries = self.env['res.country'].browse() + self.assertFalse(self.env.prefetch['res.country']) + + # reading ONE partner should fetch them ALL + countries |= partners[0].country_id + country_cache = self.env.cache[partners._fields['country_id']] + self.assertLessEqual(set(partners._ids), set(country_cache)) + + # read all partners, and check that the cache already contained them + country_ids = list(self.env.prefetch['res.country']) + for p in partners: + countries |= p.country_id + self.assertLessEqual(set(countries.ids), set(country_ids)) + + @mute_logger('openerp.models') + def test_70_one(self): + """ Check method one(). """ + # check with many records + ps = self.env['res.partner'].search([('name', 'ilike', 'a')]) + self.assertTrue(len(ps) > 1) + with self.assertRaises(except_orm): + ps.ensure_one() + + p1 = ps[0] + self.assertEqual(len(p1), 1) + self.assertEqual(p1.ensure_one(), p1) + + p0 = self.env['res.partner'].browse() + self.assertEqual(len(p0), 0) + with self.assertRaises(except_orm): + p0.ensure_one() + + @mute_logger('openerp.models') + def test_80_contains(self): + """ Test membership on recordset. """ + p1 = self.env['res.partner'].search([('name', 'ilike', 'a')], limit=1).ensure_one() + ps = self.env['res.partner'].search([('name', 'ilike', 'a')]) + self.assertTrue(p1 in ps) + + @mute_logger('openerp.models') + def test_80_set_operations(self): + """ Check set operations on recordsets. """ + pa = self.env['res.partner'].search([('name', 'ilike', 'a')]) + pb = self.env['res.partner'].search([('name', 'ilike', 'b')]) + self.assertTrue(pa) + self.assertTrue(pb) + self.assertTrue(set(pa) & set(pb)) + + concat = pa + pb + self.assertEqual(list(concat), list(pa) + list(pb)) + self.assertEqual(len(concat), len(pa) + len(pb)) + + difference = pa - pb + self.assertEqual(len(difference), len(set(difference))) + self.assertEqual(set(difference), set(pa) - set(pb)) + self.assertLessEqual(difference, pa) + + intersection = pa & pb + self.assertEqual(len(intersection), len(set(intersection))) + self.assertEqual(set(intersection), set(pa) & set(pb)) + self.assertLessEqual(intersection, pa) + self.assertLessEqual(intersection, pb) + + union = pa | pb + self.assertEqual(len(union), len(set(union))) + self.assertEqual(set(union), set(pa) | set(pb)) + self.assertGreaterEqual(union, pa) + self.assertGreaterEqual(union, pb) + + # one cannot mix different models with set operations + ps = pa + ms = self.env['ir.ui.menu'].search([]) + self.assertNotEqual(ps._name, ms._name) + self.assertNotEqual(ps, ms) + + with self.assertRaises(except_orm): + res = ps + ms + with self.assertRaises(except_orm): + res = ps - ms + with self.assertRaises(except_orm): + res = ps & ms + with self.assertRaises(except_orm): + res = ps | ms + with self.assertRaises(except_orm): + res = ps < ms + with self.assertRaises(except_orm): + res = ps <= ms + with self.assertRaises(except_orm): + res = ps > ms + with self.assertRaises(except_orm): + res = ps >= ms + + @mute_logger('openerp.models') + def test_80_filter(self): + """ Check filter on recordsets. """ + ps = self.env['res.partner'].search([]) + customers = ps.browse([p.id for p in ps if p.customer]) + + # filter on a single field + self.assertEqual(ps.filtered(lambda p: p.customer), customers) + self.assertEqual(ps.filtered('customer'), customers) + + # filter on a sequence of fields + self.assertEqual( + ps.filtered(lambda p: p.parent_id.customer), + ps.filtered('parent_id.customer') + ) + + @mute_logger('openerp.models') + def test_80_map(self): + """ Check map on recordsets. """ + ps = self.env['res.partner'].search([]) + parents = ps.browse() + for p in ps: parents |= p.parent_id + + # map a single field + self.assertEqual(ps.mapped(lambda p: p.parent_id), parents) + self.assertEqual(ps.mapped('parent_id'), parents) + + # map a sequence of fields + self.assertEqual( + ps.mapped(lambda p: p.parent_id.name), + [p.parent_id.name for p in ps] + ) + self.assertEqual( + ps.mapped('parent_id.name'), + [p.name for p in parents] + ) diff --git a/openerp/addons/base/tests/test_ir_actions.py b/openerp/addons/base/tests/test_ir_actions.py index baea8299f9e61e881bde08e86df03ecec5277ec9..68e7a54d7f5369cb307bb77a1627308c36e37a80 100644 --- a/openerp/addons/base/tests/test_ir_actions.py +++ b/openerp/addons/base/tests/test_ir_actions.py @@ -357,7 +357,7 @@ workflow""" cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')]) self.assertEqual(len(cids), 1, 'ir_actions_server: TODO') - @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') + @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models') def test_60_multi(self): cr, uid = self.cr, self.uid diff --git a/openerp/addons/base/tests/test_ir_rule.yml b/openerp/addons/base/tests/test_ir_rule.yml index 49b368a5bb342bb40d699981ba66a2c73e410aac..82e8b0c14a79e8fba36f0a3a82ff646bcbfa72b0 100644 --- a/openerp/addons/base/tests/test_ir_rule.yml +++ b/openerp/addons/base/tests/test_ir_rule.yml @@ -124,7 +124,7 @@ Modify the global rule on res_company which triggers a recursive check of the rules on company. - - !record {model: ir.rule, id: base.res_company_rule}: + !record {model: ir.rule, id: res_company_rule}: domain_force: "[('id','child_of',[user.company_id.id])]" - Read as demo user the partners (exercising the global company rule). diff --git a/openerp/addons/base/tests/test_orm.py b/openerp/addons/base/tests/test_orm.py index 29a5a2880d15bd4c32ddcd95dd3e9aebbd498556..eb239fd6c769547a454fd929d63ba5e609ace475 100644 --- a/openerp/addons/base/tests/test_orm.py +++ b/openerp/addons/base/tests/test_orm.py @@ -24,7 +24,7 @@ class TestORM(common.TransactionCase): employee_gid = self.ref('base.group_user') self.uid2 = self.users.create(cr, uid, {'name': 'test user', 'login': 'test', 'groups_id': [4,employee_gid]}) - @mute_logger('openerp.osv.orm') + @mute_logger('openerp.models') def testAccessDeletedRecords(self): """ Verify that accessing deleted records works as expected """ cr, uid, uid2, p1, p2 = self.cr, self.uid, self.uid2, self.p1, self.p2 @@ -45,7 +45,7 @@ class TestORM(common.TransactionCase): with self.assertRaises(Exception): self.partner.write(cr, uid, [p1], {'name': 'foo'}) - @mute_logger('openerp.osv.orm') + @mute_logger('openerp.models') def testAccessFilteredRecords(self): """ Verify that accessing filtered records works as expected for non-admin user """ cr, uid, uid2, p1, p2 = self.cr, self.uid, self.uid2, self.p1, self.p2 @@ -77,7 +77,17 @@ class TestORM(common.TransactionCase): with self.assertRaises(Exception): self.partner.unlink(cr, uid2, [p1,p2]) - @mute_logger('openerp.osv.orm') + def test_multi_read(self): + record_id = self.partner.create(self.cr, UID, {'name': 'MyPartner1'}) + records = self.partner.read(self.cr, UID, [record_id]) + self.assertIsInstance(records, list) + + def test_one_read(self): + record_id = self.partner.create(self.cr, UID, {'name': 'MyPartner1'}) + record = self.partner.read(self.cr, UID, record_id) + self.assertIsInstance(record, dict) + + @mute_logger('openerp.models') def test_search_read(self): # simple search_read self.partner.create(self.cr, UID, {'name': 'MyPartner1'}) @@ -177,7 +187,7 @@ class TestInherits(common.TransactionCase): self.assertEqual(foo.name, 'Foo') self.assertEqual(foo.partner_id.id, par_id) - @mute_logger('openerp.osv.orm') + @mute_logger('openerp.models') def test_read(self): """ inherited fields should be read without any indirection """ foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'}) @@ -189,13 +199,15 @@ class TestInherits(common.TransactionCase): foo = self.user.browse(self.cr, UID, foo_id) self.assertEqual(foo.name, foo.partner_id.name) - @mute_logger('openerp.osv.orm') + @mute_logger('openerp.models') def test_copy(self): """ copying a user should automatically copy its partner, too """ foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'}) foo_before, = self.user.read(self.cr, UID, [foo_id]) + del foo_before['__last_update'] bar_id = self.user.copy(self.cr, UID, foo_id, {'login': 'bar', 'password': 'bar'}) foo_after, = self.user.read(self.cr, UID, [foo_id]) + del foo_after['__last_update'] self.assertEqual(foo_before, foo_after) @@ -204,16 +216,19 @@ class TestInherits(common.TransactionCase): self.assertNotEqual(foo.id, bar.id) self.assertNotEqual(foo.partner_id.id, bar.partner_id.id) - @mute_logger('openerp.osv.orm') + @mute_logger('openerp.models') def test_copy_with_ancestor(self): """ copying a user with 'parent_id' in defaults should not duplicate the partner """ - foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'}) + foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo', + 'login_date': '2016-01-01'}) par_id = self.partner.create(self.cr, UID, {'name': 'Bar'}) foo_before, = self.user.read(self.cr, UID, [foo_id]) + del foo_before['__last_update'] partners_before = self.partner.search(self.cr, UID, []) bar_id = self.user.copy(self.cr, UID, foo_id, {'partner_id': par_id, 'login': 'bar'}) foo_after, = self.user.read(self.cr, UID, [foo_id]) + del foo_after['__last_update'] partners_after = self.partner.search(self.cr, UID, []) self.assertEqual(foo_before, foo_after) @@ -223,7 +238,7 @@ class TestInherits(common.TransactionCase): self.assertNotEqual(foo.id, bar.id) self.assertEqual(bar.partner_id.id, par_id) self.assertEqual(bar.login, 'bar', "login is given from copy parameters") - self.assertEqual(bar.password, foo.password, "password is given from original record") + self.assertEqual(bar.login_date, foo.login_date, "login_date copied from original record") self.assertEqual(bar.name, 'Bar', "name is given from specific partner") diff --git a/openerp/addons/base/tests/test_osv_expression.yml b/openerp/addons/base/tests/test_osv_expression.yml index e0061135d85ccc47f52cae6cd050bcf3bc351a08..2a3a6c3443ffb2cbf72bd5d5e01c2f9bdb9840ad 100644 --- a/openerp/addons/base/tests/test_osv_expression.yml +++ b/openerp/addons/base/tests/test_osv_expression.yml @@ -83,7 +83,7 @@ Test one2many operator with False - !assert {model: res.partner, search: "[('child_ids', '=', False)]"}: - - child_ids in (False, None, []) + - list(child_ids) == [] - Test many2many operator with empty search list - @@ -92,7 +92,7 @@ Test many2many operator with False - !assert {model: res.partner, search: "[('category_id', '=', False)]"}: - - category_id in (False, None, []) + - list(category_id) == [] - Filtering on invalid value across x2many relationship should return an empty set - diff --git a/openerp/addons/base/tests/test_views.py b/openerp/addons/base/tests/test_views.py index b8563a5e7e5f1573d7b554a4e9e54662a4795a66..cf814947d5d5e9259e1fbbeddf2553c6eabd2a18 100644 --- a/openerp/addons/base/tests/test_views.py +++ b/openerp/addons/base/tests/test_views.py @@ -357,6 +357,7 @@ class TestApplyInheritanceSpecs(ViewCase): name="target"), string="Title")) + @openerp.tools.mute_logger('openerp.addons.base.ir.ir_ui_view') def test_invalid_position(self): spec = Field( Field(name="whoops"), @@ -367,6 +368,7 @@ class TestApplyInheritanceSpecs(ViewCase): self.base_arch, spec, None) + @openerp.tools.mute_logger('openerp.addons.base.ir.ir_ui_view') def test_incorrect_version(self): # Version ignored on //field elements, so use something else arch = E.form(E.element(foo="42")) @@ -379,6 +381,7 @@ class TestApplyInheritanceSpecs(ViewCase): arch, spec, None) + @openerp.tools.mute_logger('openerp.addons.base.ir.ir_ui_view') def test_target_not_found(self): spec = Field(name="targut") diff --git a/openerp/addons/base/tests/test_xmlrpc.py b/openerp/addons/base/tests/test_xmlrpc.py index 854e345078a019c72167a2d81c141fc2248289e9..1d899866ab199de4599f1fecd93117a758154ab6 100644 --- a/openerp/addons/base/tests/test_xmlrpc.py +++ b/openerp/addons/base/tests/test_xmlrpc.py @@ -3,7 +3,7 @@ import time import unittest2 import xmlrpclib -import openerp +import openerp.tests.common DB = openerp.tests.common.DB diff --git a/openerp/addons/base/workflow/workflow.py b/openerp/addons/base/workflow/workflow.py index 3fd943c54a85b005124701b11fb252315af7c025..ee2525b017abed27ff89b098645437cbad9c2106 100644 --- a/openerp/addons/base/workflow/workflow.py +++ b/openerp/addons/base/workflow/workflow.py @@ -19,6 +19,7 @@ # ############################################################################## +from openerp.exceptions import Warning from openerp.osv import fields, osv from openerp.tools.translate import _ import openerp.workflow @@ -37,6 +38,9 @@ class workflow(osv.osv): 'on_create': lambda *a: True } + def copy(self, cr, uid, id, values, context=None): + raise Warning(_("Duplicating workflows is not possible, please create a new workflow")) + def write(self, cr, user, ids, vals, context=None): if not context: context={} diff --git a/openerp/addons/test_exceptions/models.py b/openerp/addons/test_exceptions/models.py index a22c6ea7d77400ec97a2820718058dece8f0a1d2..f5b74fbe6159cdd1fbe43a7499553ac834b01146 100644 --- a/openerp/addons/test_exceptions/models.py +++ b/openerp/addons/test_exceptions/models.py @@ -1,5 +1,8 @@ # -*- coding: utf-8 -*- -import openerp +import openerp.exceptions +import openerp.osv.orm +import openerp.osv.osv +import openerp.tools.safe_eval class m(openerp.osv.osv.Model): """ This model exposes a few methods that will raise the different diff --git a/openerp/addons/test_impex/models.py b/openerp/addons/test_impex/models.py index 8c76850d6284b756b4660a05e0c6271158526366..7296583bb5704bbbac5c42642ef9e13339eb8cbb 100644 --- a/openerp/addons/test_impex/models.py +++ b/openerp/addons/test_impex/models.py @@ -6,6 +6,7 @@ def selection_fn(obj, cr, uid, context=None): def function_fn(model, cr, uid, ids, field_name, arg, context): return dict((id, 3) for id in ids) + def function_fn_write(model, cr, uid, id, field_name, field_value, fnct_inv_arg, context): """ just so CreatorCase.export can be used """ @@ -23,7 +24,8 @@ models = [ ('datetime', fields.datetime()), ('text', fields.text()), ('selection', fields.selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])), - ('selection.function', fields.selection(selection_fn)), + # here use size=-1 to store the values as integers instead of strings + ('selection.function', fields.selection(selection_fn, size=-1)), # just relate to an integer ('many2one', fields.many2one('export.integer')), ('one2many', fields.one2many('export.one2many.child', 'parent_id')), @@ -32,28 +34,29 @@ models = [ # related: specialization of fields.function, should work the same way # TODO: reference ] + for name, field in models: - attrs = { - '_name': 'export.%s' % name, - '_columns': { + class NewModel(orm.Model): + _name = 'export.%s' % name + _columns = { 'const': fields.integer(), - 'value': field - }, - '_defaults': {'const': 4}, - 'name_get': (lambda self, cr, uid, ids, context=None: - [(record.id, "%s:%s" % (self._name, record.value)) - for record in self.browse(cr, uid, ids, context=context)]), - 'name_search': (lambda self, cr, uid, name, operator, context=None: - self.name_get(cr, uid, - self.search(cr, uid, [['value', operator, int(name.split(':')[1])]]) - , context=context) - if isinstance(name, basestring) and name.split(':')[0] == self._name - else []) - } - NewModel = type( - 'Export%s' % ''.join(section.capitalize() for section in name.split('.')), - (orm.Model,), - attrs) + 'value': field, + } + _defaults = { + 'const': 4, + } + + def name_get(self, cr, uid, ids, context=None): + return [(record.id, "%s:%s" % (self._name, record.value)) + for record in self.browse(cr, uid, ids, context=context)] + + def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): + if isinstance(name, basestring) and name.split(':')[0] == self._name: + ids = self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) + return self.name_get(cr, user, ids, context=context) + else: + return [] + class One2ManyChild(orm.Model): _name = 'export.one2many.child' @@ -63,28 +66,33 @@ class One2ManyChild(orm.Model): _columns = { 'parent_id': fields.many2one('export.one2many'), 'str': fields.char('unknown', size=None), - 'value': fields.integer() + 'value': fields.integer(), } + def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)] + def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): - return (self.name_get(cr, user, - self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) - , context=context) - if isinstance(name, basestring) and name.split(':')[0] == self._name - else []) + if isinstance(name, basestring) and name.split(':')[0] == self._name: + ids = self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) + return self.name_get(cr, user, ids, context=context) + else: + return [] + class One2ManyMultiple(orm.Model): _name = 'export.one2many.multiple' - _columns = { 'parent_id': fields.many2one('export.one2many.recursive'), 'const': fields.integer(), 'child1': fields.one2many('export.one2many.child.1', 'parent_id'), 'child2': fields.one2many('export.one2many.child.2', 'parent_id'), } - _defaults = { 'const': 36 } + _defaults = { + 'const': 36, + } + class One2ManyChildMultiple(orm.Model): _name = 'export.one2many.multiple.child' @@ -94,18 +102,24 @@ class One2ManyChildMultiple(orm.Model): _columns = { 'parent_id': fields.many2one('export.one2many.multiple'), 'str': fields.char('unknown', size=None), - 'value': fields.integer() + 'value': fields.integer(), } + def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)] + + class One2ManyChild1(orm.Model): _name = 'export.one2many.child.1' _inherit = 'export.one2many.multiple.child' + + class One2ManyChild2(orm.Model): _name = 'export.one2many.child.2' _inherit = 'export.one2many.multiple.child' + class Many2ManyChild(orm.Model): _name = 'export.many2many.other' # FIXME: orm.py:1161, fix to name_get on m2o field @@ -113,21 +127,23 @@ class Many2ManyChild(orm.Model): _columns = { 'str': fields.char('unknown', size=None), - 'value': fields.integer() + 'value': fields.integer(), } + def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)] + def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): - return (self.name_get(cr, user, - self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) - , context=context) - if isinstance(name, basestring) and name.split(':')[0] == self._name - else []) + if isinstance(name, basestring) and name.split(':')[0] == self._name: + ids = self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) + return self.name_get(cr, user, ids, context=context) + else: + return [] + class SelectionWithDefault(orm.Model): _name = 'export.selection.withdefault' - _columns = { 'const': fields.integer(), 'value': fields.selection([(1, "Foo"), (2, "Bar")]), @@ -137,12 +153,12 @@ class SelectionWithDefault(orm.Model): 'value': 2, } + class RecO2M(orm.Model): _name = 'export.one2many.recursive' - _columns = { 'value': fields.integer(), - 'child': fields.one2many('export.one2many.multiple', 'parent_id') + 'child': fields.one2many('export.one2many.multiple', 'parent_id'), } class OnlyOne(orm.Model): diff --git a/openerp/addons/test_impex/tests/test_export.py b/openerp/addons/test_impex/tests/test_export.py index 995d69471d6b19060d6333f23d4a9682ec1cd338..023a27df387514a3fb3ba2e07f159fbda42e91aa 100644 --- a/openerp/addons/test_impex/tests/test_export.py +++ b/openerp/addons/test_impex/tests/test_export.py @@ -16,15 +16,14 @@ class CreatorCase(common.TransactionCase): def setUp(self): super(CreatorCase, self).setUp() self.model = self.registry(self.model_name) + def make(self, value): id = self.model.create(self.cr, openerp.SUPERUSER_ID, {'value': value}) return self.model.browse(self.cr, openerp.SUPERUSER_ID, [id])[0] + def export(self, value, fields=('value',), context=None): record = self.make(value) - return self.model._BaseModel__export_row( - self.cr, openerp.SUPERUSER_ID, record, - [f.split('/') for f in fields], - context=context) + return record._BaseModel__export_rows([f.split('/') for f in fields]) class test_boolean_field(CreatorCase): model_name = 'export.boolean' @@ -272,10 +271,10 @@ class test_selection_function(CreatorCase): # FIXME: selection functions export the *value* itself self.assertEqual( self.export(1), - [[u'1']]) + [[1]]) self.assertEqual( self.export(3), - [[u'3']]) + [[3]]) # fucking hell self.assertEqual( self.export(0), @@ -433,12 +432,10 @@ class test_o2m_multiple(CreatorCase): if value is not None: values['value'] = value id = self.model.create(self.cr, openerp.SUPERUSER_ID, values) return self.model.browse(self.cr, openerp.SUPERUSER_ID, [id])[0] + def export(self, value=None, fields=('child1', 'child2',), context=None, **values): record = self.make(value, **values) - return self.model._BaseModel__export_row( - self.cr, openerp.SUPERUSER_ID, record, - [f.split('/') for f in fields], - context=context) + return record._BaseModel__export_rows([f.split('/') for f in fields]) def test_empty(self): self.assertEqual( diff --git a/openerp/addons/test_impex/tests/test_import.py b/openerp/addons/test_impex/tests/test_import.py index e4e8a6a9774679e1b9792cb280272336149f59f2..11bacf6cce1040f0aa1fe449e8ef3ab6040755a5 100644 --- a/openerp/addons/test_impex/tests/test_import.py +++ b/openerp/addons/test_impex/tests/test_import.py @@ -57,7 +57,7 @@ class ImporterCase(common.TransactionCase): ids = ModelData.search( self.cr, openerp.SUPERUSER_ID, - [('model', '=', record._table_name), ('res_id', '=', record.id)]) + [('model', '=', record._name), ('res_id', '=', record.id)]) if ids: d = ModelData.read( self.cr, openerp.SUPERUSER_ID, ids, ['name', 'module'])[0] @@ -65,12 +65,12 @@ class ImporterCase(common.TransactionCase): return '%s.%s' % (d['module'], d['name']) return d['name'] - name = dict(record.name_get())[record.id] + name = record.name_get()[0][1] # fix dotted name_get results, otherwise xid lookups blow up name = name.replace('.', '-') ModelData.create(self.cr, openerp.SUPERUSER_ID, { 'name': name, - 'model': record._table_name, + 'model': record._name, 'res_id': record.id, 'module': '__test__' }) @@ -446,7 +446,7 @@ class test_selection_function(ImporterCase): ]), ok(2)) self.assertEqual( - ['3', '1'], + [3, 1], values(self.read())) def test_translated(self): @@ -661,7 +661,7 @@ class test_m2m(ImporterCase): id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'}) records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4]) - name = lambda record: dict(record.name_get())[record.id] + name = lambda record: record.name_get()[0][1] self.assertEqual( self.import_(['value'], [ diff --git a/openerp/addons/test_impex/tests/test_load.py b/openerp/addons/test_impex/tests/test_load.py index 260492229a5691aaafd42e5a8cf6372fa3a07613..d9cbd5b100a06571d6f1e43ba0c7f72653481161 100644 --- a/openerp/addons/test_impex/tests/test_load.py +++ b/openerp/addons/test_impex/tests/test_load.py @@ -56,7 +56,7 @@ class ImporterCase(common.TransactionCase): ids = ModelData.search( self.cr, openerp.SUPERUSER_ID, - [('model', '=', record._table_name), ('res_id', '=', record.id)]) + [('model', '=', record._name), ('res_id', '=', record.id)]) if ids: d = ModelData.read( self.cr, openerp.SUPERUSER_ID, ids, ['name', 'module'])[0] @@ -64,12 +64,12 @@ class ImporterCase(common.TransactionCase): return '%s.%s' % (d['module'], d['name']) return d['name'] - name = dict(record.name_get())[record.id] + name = record.name_get()[0][1] # fix dotted name_get results, otherwise xid lookups blow up name = name.replace('.', '-') ModelData.create(self.cr, openerp.SUPERUSER_ID, { 'name': name, - 'model': record._table_name, + 'model': record._name, 'res_id': record.id, 'module': '__test__' }) @@ -247,7 +247,7 @@ class test_integer_field(ImporterCase): -1, -42, -(2**31 - 1), -(2**31), -12345678 ], values(self.read())) - @mute_logger('openerp.sql_db', 'openerp.osv.orm') + @mute_logger('openerp.sql_db', 'openerp.models') def test_out_of_range(self): result = self.import_(['value'], [[str(2**31)]]) self.assertIs(result['ids'], False) @@ -389,14 +389,14 @@ class test_unbound_string_field(ImporterCase): class test_required_string_field(ImporterCase): model_name = 'export.string.required' - @mute_logger('openerp.sql_db', 'openerp.osv.orm') + @mute_logger('openerp.sql_db', 'openerp.models') def test_empty(self): result = self.import_(['value'], [[]]) self.assertEqual(result['messages'], [message( u"Missing required value for the field 'unknown' (value)")]) self.assertIs(result['ids'], False) - @mute_logger('openerp.sql_db', 'openerp.osv.orm') + @mute_logger('openerp.sql_db', 'openerp.models') def test_not_provided(self): result = self.import_(['const'], [['12']]) self.assertEqual(result['messages'], [message( @@ -521,7 +521,7 @@ class test_selection_function(ImporterCase): self.assertEqual(len(result['ids']), 2) self.assertFalse(result['messages']) self.assertEqual( - ['3', '1'], + [3, 1], values(self.read())) def test_translated(self): @@ -536,7 +536,7 @@ class test_selection_function(ImporterCase): ], context={'lang': 'fr_FR'}) self.assertFalse(result['messages']) self.assertEqual(len(result['ids']), 2) - self.assertEqual(values(self.read()), ['1', '2']) + self.assertEqual(values(self.read()), [1, 2]) result = self.import_(['value'], [['Wheee']], context={'lang': 'fr_FR'}) self.assertFalse(result['messages']) @@ -770,7 +770,7 @@ class test_m2m(ImporterCase): id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'}) records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4]) - name = lambda record: dict(record.name_get())[record.id] + name = lambda record: record.name_get()[0][1] result = self.import_(['value'], [ ['%s,%s' % (name(records[1]), name(records[2]))], diff --git a/openerp/addons/test_inherit/__init__.py b/openerp/addons/test_inherit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fe4487156b1f9bf7682ae5cbcaa9e4ef6c4d5178 --- /dev/null +++ b/openerp/addons/test_inherit/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- +import models +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_inherit/__openerp__.py b/openerp/addons/test_inherit/__openerp__.py new file mode 100644 index 0000000000000000000000000000000000000000..35e6990a6b21b5acfc647b6c0f71bad7138263fd --- /dev/null +++ b/openerp/addons/test_inherit/__openerp__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +{ + 'name': 'test-inherit', + 'version': '0.1', + 'category': 'Tests', + 'description': """A module to verify the inheritance.""", + 'author': 'OpenERP SA', + 'maintainer': 'OpenERP SA', + 'website': 'http://www.openerp.com', + 'depends': ['base'], + 'data': ['ir.model.access.csv'], + 'installable': True, + 'auto_install': False, +} +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_inherit/ir.model.access.csv b/openerp/addons/test_inherit/ir.model.access.csv new file mode 100644 index 0000000000000000000000000000000000000000..79644d6a26be904ed4daff40d73681b9a44a7589 --- /dev/null +++ b/openerp/addons/test_inherit/ir.model.access.csv @@ -0,0 +1,3 @@ +"id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink" +access_test_inherit_mother,access_test_inherit_mother,model_test_inherit_mother,,1,1,1,1 +access_test_inherit_daugther,access_test_inherit_daugther,model_test_inherit_daugther,,1,1,1,1 diff --git a/openerp/addons/test_inherit/models.py b/openerp/addons/test_inherit/models.py new file mode 100644 index 0000000000000000000000000000000000000000..ebbe71e0521d1ac292eaefcceccc17fd94004159 --- /dev/null +++ b/openerp/addons/test_inherit/models.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +from openerp import models, fields, api + +# We just create a new model +class mother(models.Model): + _name = 'test.inherit.mother' + + name = fields.Char('Name', required=True) + surname = fields.Char(compute='_compute_surname') + + @api.one + @api.depends('name') + def _compute_surname(self): + self.surname = self.name or '' + +# We want to inherits from the parent model and we add some fields +# in the child object +class daughter(models.Model): + _name = 'test.inherit.daugther' + _inherits = {'test.inherit.mother': 'template_id'} + + template_id = fields.Many2one('test.inherit.mother', 'Template', + required=True, ondelete='cascade') + field_in_daughter = fields.Char('Field1') + + +# We add a new field in the parent object. Because of a recent refactoring, +# this feature was broken. +# This test and these models try to show the bug and fix it. +class mother(models.Model): + _inherit = 'test.inherit.mother' + + field_in_mother = fields.Char() + + # extend the name field by adding a default value + name = fields.Char(default='Unknown') + + # override the computed field, and extend its dependencies + @api.one + @api.depends('field_in_mother') + def _compute_surname(self): + if self.field_in_mother: + self.surname = self.field_in_mother + else: + super(mother, self)._compute_surname() + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_inherit/tests/__init__.py b/openerp/addons/test_inherit/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..15bc16fb00d1d913a1311f9a8e42e884075d3641 --- /dev/null +++ b/openerp/addons/test_inherit/tests/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- + +from . import test_inherit + +fast_suite = [ +] + +checks = [ + test_inherit, +] + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_inherit/tests/test_inherit.py b/openerp/addons/test_inherit/tests/test_inherit.py new file mode 100644 index 0000000000000000000000000000000000000000..b663d911f60ec1646cfa6d5a3c009d5a2cc18918 --- /dev/null +++ b/openerp/addons/test_inherit/tests/test_inherit.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +from openerp.tests import common + +class test_inherits(common.TransactionCase): + + def test_access_from_child_to_parent_model(self): + """ check whether added field in model is accessible from children models (_inherits) """ + # This test checks if the new added column of a parent model + # is accessible from the child model. This test has been written + # to verify the purpose of the inheritance computing of the class + # in the openerp.osv.orm._build_model. + mother = self.registry('test.inherit.mother') + daugther = self.registry('test.inherit.daugther') + + self.assertIn('field_in_mother', mother._fields) + self.assertIn('field_in_mother', daugther._fields) + + def test_field_extension(self): + """ check the extension of a field in an inherited model """ + mother = self.registry('test.inherit.mother') + field = mother._fields['name'] + + # the field should inherit required=True, and have a default value + self.assertTrue(field.required) + self.assertEqual(field.default, 'Unknown') + + def test_depends_extension(self): + """ check that @depends on overridden compute methods extends dependencies """ + mother = self.registry('test.inherit.mother') + field = mother._fields['surname'] + + # the field dependencies are added + self.assertItemsEqual(field.depends, ['name', 'field_in_mother']) + + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_new_api/__init__.py b/openerp/addons/test_new_api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..89d26e2f597425ec38cb31b2864275ce29bd047c --- /dev/null +++ b/openerp/addons/test_new_api/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +import models diff --git a/openerp/addons/test_new_api/__openerp__.py b/openerp/addons/test_new_api/__openerp__.py new file mode 100644 index 0000000000000000000000000000000000000000..9c859de44455cdcf71f08d715983b1e2905a234d --- /dev/null +++ b/openerp/addons/test_new_api/__openerp__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +{ + 'name': 'Test New API', + 'version': '1.0', + 'category': 'Tests', + 'description': """A module to test the new API.""", + 'author': 'OpenERP SA', + 'maintainer': 'OpenERP SA', + 'website': 'http://www.openerp.com', + 'depends': ['base'], + 'installable': True, + 'auto_install': False, + 'data': [ + 'ir.model.access.csv', + 'views.xml', + 'demo_data.xml', + ], +} +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_new_api/demo_data.xml b/openerp/addons/test_new_api/demo_data.xml new file mode 100644 index 0000000000000000000000000000000000000000..acb53a7b07f4ac0cd38b0d634f67f7ba9223f7d7 --- /dev/null +++ b/openerp/addons/test_new_api/demo_data.xml @@ -0,0 +1,30 @@ +<openerp> + <data> + <record id="category_0" model="test_new_api.category"> + <field name="name">Chat</field> + </record> + <record id="category_0_0" model="test_new_api.category"> + <field name="name">Foolish</field> + <field name="parent" ref="category_0"/> + </record> + + <record id="discussion_0" model="test_new_api.discussion"> + <field name="name">Stuff</field> + <field name="participants" eval="[(4, ref('base.user_root')), (4, ref('base.user_demo'))]"/> + </record> + + <record id="message_0_0" model="test_new_api.message"> + <field name="discussion" ref="discussion_0"/> + <field name="body">Hey dude!</field> + </record> + <record id="message_0_1" model="test_new_api.message"> + <field name="discussion" ref="discussion_0"/> + <field name="author" ref="base.user_demo"/> + <field name="body">What's up?</field> + </record> + <record id="message_0_2" model="test_new_api.message"> + <field name="discussion" ref="discussion_0"/> + <field name="body">This is a much longer message</field> + </record> + </data> +</openerp> diff --git a/openerp/addons/test_new_api/ir.model.access.csv b/openerp/addons/test_new_api/ir.model.access.csv new file mode 100644 index 0000000000000000000000000000000000000000..d43088d808bfc088d6719ad5fbc5364151ca1a8f --- /dev/null +++ b/openerp/addons/test_new_api/ir.model.access.csv @@ -0,0 +1,6 @@ +"id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink" +access_category,test_new_api_category,test_new_api.model_test_new_api_category,,1,1,1,1 +access_discussion,test_new_api_discussion,test_new_api.model_test_new_api_discussion,,1,1,1,1 +access_message,test_new_api_message,test_new_api.model_test_new_api_message,,1,1,1,1 +access_talk,test_new_api_talk,test_new_api.model_test_new_api_talk,,1,1,1,1 +access_mixed,test_new_api_mixed,test_new_api.model_test_new_api_mixed,,1,1,1,1 diff --git a/openerp/addons/test_new_api/models.py b/openerp/addons/test_new_api/models.py new file mode 100644 index 0000000000000000000000000000000000000000..263868c835b9afdc6e2b59759185d7a688b58e9a --- /dev/null +++ b/openerp/addons/test_new_api/models.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +############################################################################## + +from openerp.osv import osv, fields + +class res_partner(osv.Model): + _inherit = 'res.partner' + + # + # add related fields to test them + # + _columns = { + # a regular one + 'related_company_partner_id': fields.related( + 'company_id', 'partner_id', type='many2one', obj='res.partner'), + # a related field with a single field + 'single_related_company_id': fields.related( + 'company_id', type='many2one', obj='res.company'), + # a related field with a single field that is also a related field! + 'related_related_company_id': fields.related( + 'single_related_company_id', type='many2one', obj='res.company'), + } + + +from openerp import models, fields, api, _ + + +class Category(models.Model): + _name = 'test_new_api.category' + + name = fields.Char(required=True) + parent = fields.Many2one('test_new_api.category') + display_name = fields.Char(compute='_compute_display_name', inverse='_inverse_display_name') + + @api.one + @api.depends('name', 'parent.display_name') # this definition is recursive + def _compute_display_name(self): + if self.parent: + self.display_name = self.parent.display_name + ' / ' + self.name + else: + self.display_name = self.name + + @api.one + def _inverse_display_name(self): + names = self.display_name.split('/') + # determine sequence of categories + categories = [] + for name in names[:-1]: + category = self.search([('name', 'ilike', name.strip())]) + categories.append(category[0]) + categories.append(self) + # assign parents following sequence + for parent, child in zip(categories, categories[1:]): + if parent and child: + child.parent = parent + # assign name of last category, and reassign display_name (to normalize it) + self.name = names[-1].strip() + + +class Discussion(models.Model): + _name = 'test_new_api.discussion' + + name = fields.Char(string='Title', required=True, + help="General description of what this discussion is about.") + moderator = fields.Many2one('res.users') + categories = fields.Many2many('test_new_api.category', + 'test_new_api_discussion_category', 'discussion', 'category') + participants = fields.Many2many('res.users') + messages = fields.One2many('test_new_api.message', 'discussion') + + @api.onchange('moderator') + def _onchange_moderator(self): + self.participants |= self.moderator + + +class Message(models.Model): + _name = 'test_new_api.message' + + discussion = fields.Many2one('test_new_api.discussion', ondelete='cascade') + body = fields.Text() + author = fields.Many2one('res.users', default=lambda self: self.env.user) + name = fields.Char(string='Title', compute='_compute_name', store=True) + display_name = fields.Char(string='Abstract', compute='_compute_display_name') + size = fields.Integer(compute='_compute_size', search='_search_size') + double_size = fields.Integer(compute='_compute_double_size') + discussion_name = fields.Char(related='discussion.name', readonly=True) + + @api.one + @api.constrains('author', 'discussion') + def _check_author(self): + if self.discussion and self.author not in self.discussion.participants: + raise ValueError(_("Author must be among the discussion participants.")) + + @api.one + @api.depends('author.name', 'discussion.name') + def _compute_name(self): + self.name = "[%s] %s" % (self.discussion.name or '', self.author.name) + + @api.one + @api.depends('author.name', 'discussion.name', 'body') + def _compute_display_name(self): + stuff = "[%s] %s: %s" % (self.author.name, self.discussion.name or '', self.body or '') + self.display_name = stuff[:80] + + @api.one + @api.depends('body') + def _compute_size(self): + self.size = len(self.body or '') + + def _search_size(self, operator, value): + if operator not in ('=', '!=', '<', '<=', '>', '>=', 'in', 'not in'): + return [] + # retrieve all the messages that match with a specific SQL query + query = """SELECT id FROM "%s" WHERE char_length("body") %s %%s""" % \ + (self._table, operator) + self.env.cr.execute(query, (value,)) + ids = [t[0] for t in self.env.cr.fetchall()] + return [('id', 'in', ids)] + + @api.one + @api.depends('size') + def _compute_double_size(self): + # This illustrates a subtle situation: self.double_size depends on + # self.size. When size is computed, self.size is assigned, which should + # normally invalidate self.double_size. However, this may not happen + # while self.double_size is being computed: the last statement below + # would fail, because self.double_size would be undefined. + self.double_size = 0 + size = self.size + self.double_size = self.double_size + size + + +class Talk(models.Model): + _name = 'test_new_api.talk' + + parent = fields.Many2one('test_new_api.discussion', delegate=True, required=True) + + +class MixedModel(models.Model): + _name = 'test_new_api.mixed' + + number = fields.Float(digits=(10, 2), default=3.14) + date = fields.Date() + now = fields.Datetime(compute='_compute_now') + lang = fields.Selection(string='Language', selection='_get_lang') + reference = fields.Reference(string='Related Document', + selection='_reference_models') + + @api.one + def _compute_now(self): + # this is a non-stored computed field without dependencies + self.now = fields.Datetime.now() + + @api.model + def _get_lang(self): + langs = self.env['res.lang'].search([]) + return [(lang.code, lang.name) for lang in langs] + + @api.model + def _reference_models(self): + models = self.env['ir.model'].search([('state', '!=', 'manual')]) + return [(model.model, model.name) + for model in models + if not model.model.startswith('ir.')] diff --git a/openerp/addons/test_new_api/tests/__init__.py b/openerp/addons/test_new_api/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dd9a2d44ec26d2ed55ca34d6d21cdb6af3b92c8b --- /dev/null +++ b/openerp/addons/test_new_api/tests/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- + +from . import test_related +from . import test_new_fields +from . import test_onchange +from . import test_field_conversions +from . import test_attributes + +fast_suite = [ +] + +checks = [ + test_related, + test_new_fields, + test_onchange, + test_field_conversions, + test_attributes, +] diff --git a/openerp/addons/test_new_api/tests/test_attributes.py b/openerp/addons/test_new_api/tests/test_attributes.py new file mode 100644 index 0000000000000000000000000000000000000000..c8cceb6921026ce698c0e7e3eabad688787d1f4c --- /dev/null +++ b/openerp/addons/test_new_api/tests/test_attributes.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from openerp.tests import common + +ANSWER_TO_ULTIMATE_QUESTION = 42 + +class TestAttributes(common.TransactionCase): + + def test_we_can_add_attributes(self): + Model = self.env['test_new_api.category'] + instance = Model.create({'name': 'Foo'}) + + # assign an unknown attribute + instance.unknown = ANSWER_TO_ULTIMATE_QUESTION + + # Does the attribute exist in the instance of the model ? + self.assertTrue(hasattr(instance, 'unknown')) + + # Is it the right type ? + self.assertIsInstance(instance.unknown, (int, long)) + + # Is it the right value, in case of, we don't know ;-) + self.assertEqual(instance.unknown, ANSWER_TO_ULTIMATE_QUESTION) + + # We are paranoiac ! + self.assertEqual(getattr(instance, 'unknown'), ANSWER_TO_ULTIMATE_QUESTION) diff --git a/openerp/addons/test_new_api/tests/test_field_conversions.py b/openerp/addons/test_new_api/tests/test_field_conversions.py new file mode 100644 index 0000000000000000000000000000000000000000..4f9e886df41b90c7b1abedfb5b84cd793f8c1e34 --- /dev/null +++ b/openerp/addons/test_new_api/tests/test_field_conversions.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +import unittest2 +from openerp import models, fields + +class TestFieldToColumn(unittest2.TestCase): + def test_char(self): + # create a field, initialize its attributes, and convert it to a column + field = fields.Char(string="test string", required=True) + field.set_class_name(models.Model, 'test') + column = field.to_column() + + self.assertEqual(column.string, "test string") + self.assertTrue(column.required) diff --git a/openerp/addons/test_new_api/tests/test_new_fields.py b/openerp/addons/test_new_api/tests/test_new_fields.py new file mode 100644 index 0000000000000000000000000000000000000000..14ff358cc908f8c1fddf48dc755b0cb610958bf0 --- /dev/null +++ b/openerp/addons/test_new_api/tests/test_new_fields.py @@ -0,0 +1,393 @@ +# +# test cases for new-style fields +# +from datetime import date, datetime +from collections import defaultdict + +from openerp.tests import common +from openerp.exceptions import except_orm + + +class TestNewFields(common.TransactionCase): + + def test_00_basics(self): + """ test accessing new fields """ + # find a discussion + discussion = self.env.ref('test_new_api.discussion_0') + + # read field as a record attribute or as a record item + self.assertIsInstance(discussion.name, basestring) + self.assertIsInstance(discussion['name'], basestring) + self.assertEqual(discussion['name'], discussion.name) + + # read it with method read() + values = discussion.read(['name'])[0] + self.assertEqual(values['name'], discussion.name) + + def test_01_basic_get_assertion(self): + """ test item getter """ + # field access works on single record + record = self.env.ref('test_new_api.message_0_0') + self.assertEqual(len(record), 1) + ok = record.body + + # field access fails on multiple records + records = self.env['test_new_api.message'].search([]) + assert len(records) > 1 + with self.assertRaises(except_orm): + faulty = records.body + + def test_01_basic_set_assertion(self): + """ test item setter """ + # field assignment works on single record + record = self.env.ref('test_new_api.message_0_0') + self.assertEqual(len(record), 1) + record.body = 'OK' + + # field assignment fails on multiple records + records = self.env['test_new_api.message'].search([]) + assert len(records) > 1 + with self.assertRaises(except_orm): + records.body = 'Faulty' + + def test_10_computed(self): + """ check definition of computed fields """ + # by default function fields are not stored and readonly + field = self.env['test_new_api.message']._fields['size'] + self.assertFalse(field.store) + self.assertTrue(field.readonly) + + field = self.env['test_new_api.message']._fields['name'] + self.assertTrue(field.store) + self.assertTrue(field.readonly) + + def test_10_non_stored(self): + """ test non-stored fields """ + # find messages + for message in self.env['test_new_api.message'].search([]): + # check definition of field + self.assertEqual(message.size, len(message.body or '')) + + # check recomputation after record is modified + size = message.size + message.write({'body': (message.body or '') + "!!!"}) + self.assertEqual(message.size, size + 3) + + # special case: computed field without dependency must be computed + record = self.env['test_new_api.mixed'].create({}) + self.assertTrue(record.now) + + def test_11_stored(self): + """ test stored fields """ + # find the demo discussion + discussion = self.env.ref('test_new_api.discussion_0') + self.assertTrue(len(discussion.messages) > 0) + + # check messages + name0 = discussion.name or "" + for message in discussion.messages: + self.assertEqual(message.name, "[%s] %s" % (name0, message.author.name)) + + # modify discussion name, and check again messages + discussion.name = name1 = 'Talking about stuff...' + for message in discussion.messages: + self.assertEqual(message.name, "[%s] %s" % (name1, message.author.name)) + + # switch message from discussion, and check again + name2 = 'Another discussion' + discussion2 = discussion.copy({'name': name2}) + message2 = discussion.messages[0] + message2.discussion = discussion2 + for message in discussion2.messages: + self.assertEqual(message.name, "[%s] %s" % (name2, message.author.name)) + + def test_12_recursive(self): + """ test recursively dependent fields """ + Category = self.env['test_new_api.category'] + abel = Category.create({'name': 'Abel'}) + beth = Category.create({'name': 'Bethany'}) + cath = Category.create({'name': 'Catherine'}) + dean = Category.create({'name': 'Dean'}) + ewan = Category.create({'name': 'Ewan'}) + finn = Category.create({'name': 'Finnley'}) + gabe = Category.create({'name': 'Gabriel'}) + + cath.parent = finn.parent = gabe + abel.parent = beth.parent = cath + dean.parent = ewan.parent = finn + + self.assertEqual(abel.display_name, "Gabriel / Catherine / Abel") + self.assertEqual(beth.display_name, "Gabriel / Catherine / Bethany") + self.assertEqual(cath.display_name, "Gabriel / Catherine") + self.assertEqual(dean.display_name, "Gabriel / Finnley / Dean") + self.assertEqual(ewan.display_name, "Gabriel / Finnley / Ewan") + self.assertEqual(finn.display_name, "Gabriel / Finnley") + self.assertEqual(gabe.display_name, "Gabriel") + + ewan.parent = cath + self.assertEqual(ewan.display_name, "Gabriel / Catherine / Ewan") + + cath.parent = finn + self.assertEqual(ewan.display_name, "Gabriel / Finnley / Catherine / Ewan") + + def test_12_cascade(self): + """ test computed field depending on computed field """ + message = self.env.ref('test_new_api.message_0_0') + message.invalidate_cache() + double_size = message.double_size + self.assertEqual(double_size, message.size) + + def test_13_inverse(self): + """ test inverse computation of fields """ + Category = self.env['test_new_api.category'] + abel = Category.create({'name': 'Abel'}) + beth = Category.create({'name': 'Bethany'}) + cath = Category.create({'name': 'Catherine'}) + dean = Category.create({'name': 'Dean'}) + ewan = Category.create({'name': 'Ewan'}) + finn = Category.create({'name': 'Finnley'}) + gabe = Category.create({'name': 'Gabriel'}) + self.assertEqual(ewan.display_name, "Ewan") + + ewan.display_name = "Abel / Bethany / Catherine / Erwan" + + self.assertEqual(beth.parent, abel) + self.assertEqual(cath.parent, beth) + self.assertEqual(ewan.parent, cath) + self.assertEqual(ewan.name, "Erwan") + + def test_14_search(self): + """ test search on computed fields """ + discussion = self.env.ref('test_new_api.discussion_0') + + # determine message sizes + sizes = set(message.size for message in discussion.messages) + + # search for messages based on their size + for size in sizes: + messages0 = self.env['test_new_api.message'].search( + [('discussion', '=', discussion.id), ('size', '<=', size)]) + + messages1 = self.env['test_new_api.message'].browse() + for message in discussion.messages: + if message.size <= size: + messages1 += message + + self.assertEqual(messages0, messages1) + + def test_15_constraint(self): + """ test new-style Python constraints """ + discussion = self.env.ref('test_new_api.discussion_0') + + # remove oneself from discussion participants: we can no longer create + # messages in discussion + discussion.participants -= self.env.user + with self.assertRaises(Exception): + self.env['test_new_api.message'].create({'discussion': discussion.id, 'body': 'Whatever'}) + + # put back oneself into discussion participants: now we can create + # messages in discussion + discussion.participants += self.env.user + self.env['test_new_api.message'].create({'discussion': discussion.id, 'body': 'Whatever'}) + + def test_20_float(self): + """ test float fields """ + record = self.env['test_new_api.mixed'].create({}) + + # assign value, and expect rounding + record.write({'number': 2.4999999999999996}) + self.assertEqual(record.number, 2.50) + + # same with field setter + record.number = 2.4999999999999996 + self.assertEqual(record.number, 2.50) + + def test_21_date(self): + """ test date fields """ + record = self.env['test_new_api.mixed'].create({}) + + # one may assign False or None + record.date = None + self.assertFalse(record.date) + + # one may assign date and datetime objects + record.date = date(2012, 05, 01) + self.assertEqual(record.date, '2012-05-01') + + record.date = datetime(2012, 05, 01, 10, 45, 00) + self.assertEqual(record.date, '2012-05-01') + + # one may assign dates in the default format, and it must be checked + record.date = '2012-05-01' + self.assertEqual(record.date, '2012-05-01') + + with self.assertRaises(ValueError): + record.date = '12-5-1' + + def test_22_selection(self): + """ test selection fields """ + record = self.env['test_new_api.mixed'].create({}) + + # one may assign False or None + record.lang = None + self.assertFalse(record.lang) + + # one may assign a value, and it must be checked + for language in self.env['res.lang'].search([]): + record.lang = language.code + with self.assertRaises(ValueError): + record.lang = 'zz_ZZ' + + def test_23_relation(self): + """ test relation fields """ + demo = self.env.ref('base.user_demo') + message = self.env.ref('test_new_api.message_0_0') + + # check environment of record and related records + self.assertEqual(message.env, self.env) + self.assertEqual(message.discussion.env, self.env) + + demo_env = self.env(user=demo) + self.assertNotEqual(demo_env, self.env) + + # check environment of record and related records + self.assertEqual(message.env, self.env) + self.assertEqual(message.discussion.env, self.env) + + # "migrate" message into demo_env, and check again + demo_message = message.sudo(demo) + self.assertEqual(demo_message.env, demo_env) + self.assertEqual(demo_message.discussion.env, demo_env) + + # assign record's parent to a record in demo_env + message.discussion = message.discussion.copy({'name': 'Copy'}) + + # both message and its parent field must be in self.env + self.assertEqual(message.env, self.env) + self.assertEqual(message.discussion.env, self.env) + + def test_24_reference(self): + """ test reference fields. """ + record = self.env['test_new_api.mixed'].create({}) + + # one may assign False or None + record.reference = None + self.assertFalse(record.reference) + + # one may assign a user or a partner... + record.reference = self.env.user + self.assertEqual(record.reference, self.env.user) + record.reference = self.env.user.partner_id + self.assertEqual(record.reference, self.env.user.partner_id) + # ... but no record from a model that starts with 'ir.' + with self.assertRaises(ValueError): + record.reference = self.env['ir.model'].search([], limit=1) + + def test_25_related(self): + """ test related fields. """ + message = self.env.ref('test_new_api.message_0_0') + discussion = message.discussion + + # by default related fields are not stored + field = message._fields['discussion_name'] + self.assertFalse(field.store) + self.assertTrue(field.readonly) + + # check value of related field + self.assertEqual(message.discussion_name, discussion.name) + + # change discussion name, and check result + discussion.name = 'Foo' + self.assertEqual(message.discussion_name, 'Foo') + + # change discussion name via related field, and check result + message.discussion_name = 'Bar' + self.assertEqual(discussion.name, 'Bar') + self.assertEqual(message.discussion_name, 'Bar') + + # search on related field, and check result + search_on_related = self.env['test_new_api.message'].search([('discussion_name', '=', 'Bar')]) + search_on_regular = self.env['test_new_api.message'].search([('discussion.name', '=', 'Bar')]) + self.assertEqual(search_on_related, search_on_regular) + + # check that field attributes are copied + message_field = message.fields_get(['discussion_name'])['discussion_name'] + discussion_field = discussion.fields_get(['name'])['name'] + self.assertEqual(message_field['help'], discussion_field['help']) + + def test_26_inherited(self): + """ test inherited fields. """ + # a bunch of fields are inherited from res_partner + for user in self.env['res.users'].search([]): + partner = user.partner_id + for field in ('is_company', 'name', 'email', 'country_id'): + self.assertEqual(getattr(user, field), getattr(partner, field)) + self.assertEqual(user[field], partner[field]) + + def test_30_read(self): + """ test computed fields as returned by read(). """ + discussion = self.env.ref('test_new_api.discussion_0') + + for message in discussion.messages: + display_name = message.display_name + size = message.size + + data = message.read(['display_name', 'size'])[0] + self.assertEqual(data['display_name'], display_name) + self.assertEqual(data['size'], size) + + def test_40_new(self): + """ test new records. """ + discussion = self.env.ref('test_new_api.discussion_0') + + # create a new message + message = self.env['test_new_api.message'].new() + self.assertFalse(message.id) + + # assign some fields; should have no side effect + message.discussion = discussion + message.body = BODY = "May the Force be with you." + self.assertEqual(message.discussion, discussion) + self.assertEqual(message.body, BODY) + + self.assertNotIn(message, discussion.messages) + + # check computed values of fields + user = self.env.user + self.assertEqual(message.author, user) + self.assertEqual(message.name, "[%s] %s" % (discussion.name, user.name)) + self.assertEqual(message.size, len(BODY)) + + def test_41_defaults(self): + """ test default values. """ + fields = ['discussion', 'body', 'author', 'size'] + defaults = self.env['test_new_api.message'].default_get(fields) + self.assertEqual(defaults, {'author': self.env.uid, 'size': 0}) + + defaults = self.env['test_new_api.mixed'].default_get(['number']) + self.assertEqual(defaults, {'number': 3.14}) + + +class TestMagicFields(common.TransactionCase): + + def test_write_date(self): + record = self.env['test_new_api.discussion'].create({'name': 'Booba'}) + self.assertEqual(record.create_uid, self.env.user) + self.assertEqual(record.write_uid, self.env.user) + + +class TestInherits(common.TransactionCase): + + def test_inherits(self): + """ Check that a many2one field with delegate=True adds an entry in _inherits """ + Talk = self.env['test_new_api.talk'] + self.assertEqual(Talk._inherits, {'test_new_api.discussion': 'parent'}) + self.assertIn('name', Talk._fields) + self.assertEqual(Talk._fields['name'].related, ('parent', 'name')) + + talk = Talk.create({'name': 'Foo'}) + discussion = talk.parent + self.assertTrue(discussion) + self.assertEqual(talk._name, 'test_new_api.talk') + self.assertEqual(discussion._name, 'test_new_api.discussion') + self.assertEqual(talk.name, discussion.name) diff --git a/openerp/addons/test_new_api/tests/test_onchange.py b/openerp/addons/test_new_api/tests/test_onchange.py new file mode 100644 index 0000000000000000000000000000000000000000..c69becf926305e1fca5b54b238b74f9bf0b8b699 --- /dev/null +++ b/openerp/addons/test_new_api/tests/test_onchange.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- + +from openerp.tests import common + +class TestOnChange(common.TransactionCase): + + def setUp(self): + super(TestOnChange, self).setUp() + self.Discussion = self.env['test_new_api.discussion'] + self.Message = self.env['test_new_api.message'] + + def test_default_get(self): + """ checking values returned by default_get() """ + fields = ['name', 'categories', 'participants', 'messages'] + values = self.Discussion.default_get(fields) + self.assertEqual(values, {}) + + def test_get_field(self): + """ checking that accessing an unknown attribute does nothing special """ + with self.assertRaises(AttributeError): + self.Discussion.not_really_a_method() + + def test_new_onchange(self): + """ test the effect of onchange() """ + discussion = self.env.ref('test_new_api.discussion_0') + BODY = "What a beautiful day!" + USER = self.env.user + + field_onchange = self.Message._onchange_spec() + self.assertEqual(field_onchange.get('author'), '1') + self.assertEqual(field_onchange.get('body'), '1') + self.assertEqual(field_onchange.get('discussion'), '1') + + values = { + 'discussion': discussion.id, + 'name': "[%s] %s" % ('', USER.name), + 'body': False, + 'author': USER.id, + 'size': 0, + } + self.env.invalidate_all() + result = self.Message.onchange(values, 'discussion', field_onchange) + self.assertLessEqual(set(['name']), set(result['value'])) + self.assertEqual(result['value']['name'], "[%s] %s" % (discussion.name, USER.name)) + + values = { + 'discussion': discussion.id, + 'name': "[%s] %s" % (discussion.name, USER.name), + 'body': BODY, + 'author': USER.id, + 'size': 0, + } + self.env.invalidate_all() + result = self.Message.onchange(values, 'body', field_onchange) + self.assertLessEqual(set(['size']), set(result['value'])) + self.assertEqual(result['value']['size'], len(BODY)) + + def test_new_onchange_one2many(self): + """ test the effect of onchange() on one2many fields """ + BODY = "What a beautiful day!" + USER = self.env.user + + # create an independent message + message = self.Message.create({'body': BODY}) + self.assertEqual(message.name, "[%s] %s" % ('', USER.name)) + + field_onchange = self.Discussion._onchange_spec() + self.assertEqual(field_onchange.get('name'), '1') + self.assertEqual(field_onchange.get('messages'), '1') + + # FIXME: commented out because currently not supported by the client + # # modify messages + # values = { + # 'name': "Foo", + # 'categories': [], + # 'moderator': False, + # 'participants': [], + # 'messages': [ + # (0, 0, { + # 'name': "[%s] %s" % ('', USER.name), + # 'body': BODY, + # 'author': USER.id, + # 'size': len(BODY), + # }), + # (4, message.id), + # ], + # } + # self.env.invalidate_all() + # result = self.Discussion.onchange(values, 'messages', field_onchange) + # self.assertLessEqual(set(['messages']), set(result['value'])) + # self.assertItemsEqual(result['value']['messages'], [ + # (0, 0, { + # 'name': "[%s] %s" % ("Foo", USER.name), + # 'body': BODY, + # 'author': USER.id, + # 'size': len(BODY), + # }), + # (1, message.id, { + # 'name': "[%s] %s" % ("Foo", USER.name), + # 'body': BODY, + # 'author': USER.id, + # 'size': len(BODY), + # }), + # ]) + + # modify discussion name + values = { + 'name': "Foo", + 'categories': [], + 'moderator': False, + 'participants': [], + 'messages': [ + (0, 0, { + 'name': "[%s] %s" % ('', USER.name), + 'body': BODY, + 'author': USER.id, + 'size': len(BODY), + }), + (4, message.id), + ], + } + self.env.invalidate_all() + result = self.Discussion.onchange(values, 'name', field_onchange) + self.assertLessEqual(set(['messages']), set(result['value'])) + self.assertItemsEqual(result['value']['messages'], [ + (0, 0, { + 'name': "[%s] %s" % ("Foo", USER.name), + 'body': BODY, + 'author': USER.id, + 'size': len(BODY), + }), + (1, message.id, { + 'name': "[%s] %s" % ("Foo", USER.name), + 'body': BODY, + 'author': USER.id, + 'size': len(BODY), + }), + ]) + + def test_new_onchange_specific(self): + """ test the effect of field-specific onchange method """ + discussion = self.env.ref('test_new_api.discussion_0') + demo = self.env.ref('base.user_demo') + + field_onchange = self.Discussion._onchange_spec() + self.assertEqual(field_onchange.get('moderator'), '1') + + # first remove demo user from participants + discussion.participants -= demo + self.assertNotIn(demo, discussion.participants) + + # check that demo_user is added to participants when set as moderator + participants = [(4, usr.id) for usr in discussion.participants] + values = { + 'name': discussion.name, + 'moderator': demo.id, + 'categories': [(4, cat.id) for cat in discussion.categories], + 'messages': [(4, msg.id) for msg in discussion.messages], + 'participants': participants, + } + self.env.invalidate_all() + result = discussion.onchange(values, 'moderator', field_onchange) + + self.assertLessEqual(set(['participants']), set(result['value'])) + self.assertItemsEqual(result['value']['participants'], participants + [(4, demo.id)]) diff --git a/openerp/addons/base/tests/test_fields.py b/openerp/addons/test_new_api/tests/test_related.py similarity index 73% rename from openerp/addons/base/tests/test_fields.py rename to openerp/addons/test_new_api/tests/test_related.py index ce34bceb014eb2c1dfb5018938d96b590a31de9d..e7ba6b8b1848cbe53935d1a76c543ead7040f8fa 100644 --- a/openerp/addons/base/tests/test_fields.py +++ b/openerp/addons/test_new_api/tests/test_related.py @@ -1,6 +1,8 @@ # -# test cases for fields access, etc. +# test cases for related fields, etc. # +import unittest + from openerp.osv import fields from openerp.tests import common @@ -13,13 +15,6 @@ class TestRelatedField(common.TransactionCase): def test_0_related(self): """ test an usual related field """ - # add a related field test_related_company_id on res.partner - old_columns = self.partner._columns - self.partner._columns = dict(old_columns) - self.partner._columns.update({ - 'related_company_partner_id': fields.related('company_id', 'partner_id', type='many2one', obj='res.partner'), - }) - # find a company with a non-null partner_id ids = self.company.search(self.cr, self.uid, [('partner_id', '!=', False)], limit=1) id = ids[0] @@ -30,9 +25,6 @@ class TestRelatedField(common.TransactionCase): partner_ids2 = self.partner.search(self.cr, self.uid, [('related_company_partner_id', '=', id)]) self.assertEqual(partner_ids1, partner_ids2) - # restore res.partner fields - self.partner._columns = old_columns - def do_test_company_field(self, field): # get a partner with a non-null company_id ids = self.partner.search(self.cr, self.uid, [('company_id', '!=', False)], limit=1) @@ -48,57 +40,14 @@ class TestRelatedField(common.TransactionCase): def test_1_single_related(self): """ test a related field with a single indirection like fields.related('foo') """ - # add a related field test_related_company_id on res.partner - # and simulate a _inherits_reload() to populate _all_columns. - old_columns = self.partner._columns - old_all_columns = self.partner._all_columns - self.partner._columns = dict(old_columns) - self.partner._all_columns = dict(old_all_columns) - self.partner._columns.update({ - 'single_related_company_id': fields.related('company_id', type='many2one', obj='res.company'), - }) - self.partner._all_columns.update({ - 'single_related_company_id': fields.column_info('single_related_company_id', self.partner._columns['single_related_company_id'], None, None, None) - }) - self.do_test_company_field('single_related_company_id') - # restore res.partner fields - self.partner._columns = old_columns - self.partner._all_columns = old_all_columns - def test_2_related_related(self): """ test a related field referring to a related field """ - # add a related field on a related field on res.partner - # and simulate a _inherits_reload() to populate _all_columns. - old_columns = self.partner._columns - old_all_columns = self.partner._all_columns - self.partner._columns = dict(old_columns) - self.partner._all_columns = dict(old_all_columns) - self.partner._columns.update({ - 'single_related_company_id': fields.related('company_id', type='many2one', obj='res.company'), - 'related_related_company_id': fields.related('single_related_company_id', type='many2one', obj='res.company'), - }) - self.partner._all_columns.update({ - 'single_related_company_id': fields.column_info('single_related_company_id', self.partner._columns['single_related_company_id'], None, None, None), - 'related_related_company_id': fields.column_info('related_related_company_id', self.partner._columns['related_related_company_id'], None, None, None) - }) - self.do_test_company_field('related_related_company_id') - # restore res.partner fields - self.partner._columns = old_columns - self.partner._all_columns = old_all_columns - def test_3_read_write(self): """ write on a related field """ - # add a related field test_related_company_id on res.partner - old_columns = self.partner._columns - self.partner._columns = dict(old_columns) - self.partner._columns.update({ - 'related_company_partner_id': fields.related('company_id', 'partner_id', type='many2one', obj='res.partner'), - }) - # find a company with a non-null partner_id company_ids = self.company.search(self.cr, self.uid, [('partner_id', '!=', False)], limit=1) company = self.company.browse(self.cr, self.uid, company_ids[0]) @@ -117,9 +66,6 @@ class TestRelatedField(common.TransactionCase): partner = self.partner.browse(self.cr, self.uid, partner_ids[0]) self.assertEqual(partner.related_company_partner_id.id, new_partner_id) - # restore res.partner fields - self.partner._columns = old_columns - class TestPropertyField(common.TransactionCase): @@ -132,6 +78,7 @@ class TestPropertyField(common.TransactionCase): self.property = self.registry('ir.property') self.imd = self.registry('ir.model.data') + @unittest.skip("invalid monkey-patching") def test_1_property_multicompany(self): cr, uid = self.cr, self.uid diff --git a/openerp/addons/test_new_api/views.xml b/openerp/addons/test_new_api/views.xml new file mode 100644 index 0000000000000000000000000000000000000000..005dd2cf670eba4978973b446ee5cdc0f7283c5c --- /dev/null +++ b/openerp/addons/test_new_api/views.xml @@ -0,0 +1,127 @@ +<openerp> + <data> + <menuitem id="menu_main" name="Discussions" sequence="20"/> + + <menuitem id="menu_sub" name="Discussions" parent="menu_main" sequence="10"/> + + <record id="action_discussions" model="ir.actions.act_window"> + <field name="name">Discussions</field> + <field name="res_model">test_new_api.discussion</field> + <field name="view_mode">tree,form</field> + </record> + <menuitem id="menu_discussions" action="action_discussions" parent="menu_sub" sequence="10"/> + + <record id="action_messages" model="ir.actions.act_window"> + <field name="name">Messages</field> + <field name="res_model">test_new_api.message</field> + <field name="view_mode">tree,form</field> + </record> + <menuitem id="menu_messages" action="action_messages" parent="menu_sub" sequence="20"/> + + <menuitem id="menu_config" name="Configuration" parent="menu_main" sequence="20"/> + + <record id="action_categories" model="ir.actions.act_window"> + <field name="name">Categories</field> + <field name="res_model">test_new_api.category</field> + <field name="view_mode">tree,form</field> + </record> + <menuitem id="menu_categories" action="action_categories" parent="menu_config"/> + + <!-- Discussion form view --> + <record id="discussion_form" model="ir.ui.view"> + <field name="name">discussion form view</field> + <field name="model">test_new_api.discussion</field> + <field name="arch" type="xml"> + <form string="Discussion" version="7.0"> + <sheet> + <group> + <field name="name"/> + <field name="categories" widget="many2many_tags"/> + <field name="moderator"/> + </group> + <notebook> + <page string="Messages"> + <field name="messages"> + <tree name="Messages"> + <field name="name"/> + <field name="body"/> + </tree> + <form string="Message" version="7.0"> + <group> + <field name="name"/> + <field name="author"/> + <field name="size"/> + </group> + <label for="body"/> + <field name="body"/> + </form> + </field> + </page> + <page string="Participants"> + <field name="participants"/> + </page> + </notebook> + </sheet> + </form> + </field> + </record> + + <!-- Message tree view --> + <record id="message_tree" model="ir.ui.view"> + <field name="name">message tree view</field> + <field name="model">test_new_api.message</field> + <field name="arch" type="xml"> + <tree string="Messages"> + <field name="display_name"/> + </tree> + </field> + </record> + + <!-- Message form view --> + <record id="message_form" model="ir.ui.view"> + <field name="name">message form view</field> + <field name="model">test_new_api.message</field> + <field name="arch" type="xml"> + <form string="Message" version="7.0"> + <sheet> + <group> + <field name="discussion"/> + <field name="name"/> + <field name="author"/> + <field name="size"/> + </group> + <label for="body"/> + <field name="body"/> + </sheet> + </form> + </field> + </record> + + <!-- Category tree view --> + <record id="category_tree" model="ir.ui.view"> + <field name="name">category tree view</field> + <field name="model">test_new_api.category</field> + <field name="arch" type="xml"> + <tree string="Categories"> + <field name="display_name"/> + </tree> + </field> + </record> + + <!-- Category form view --> + <record id="category_form" model="ir.ui.view"> + <field name="name">category form view</field> + <field name="model">test_new_api.category</field> + <field name="arch" type="xml"> + <form string="Category" version="7.0"> + <sheet> + <group> + <field name="name"/> + <field name="parent"/> + </group> + </sheet> + </form> + </field> + </record> + </data> +</openerp> diff --git a/openerp/addons/test_workflow/models.py b/openerp/addons/test_workflow/models.py index f7a5ef23b1a74d18bf20d57699c68de044c2a870..df0defa41a2b31acfa196a37a33fc91753214af9 100644 --- a/openerp/addons/test_workflow/models.py +++ b/openerp/addons/test_workflow/models.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -import openerp +import openerp.osv.orm class m(openerp.osv.orm.Model): """ A model for which we will define a workflow (see data.xml). """ @@ -56,12 +56,13 @@ class e(openerp.osv.orm.Model): _inherit = 'test.workflow.model.a' for name in 'bcdefghijkl': - type( - name, - (openerp.osv.orm.Model,), - { - '_name': 'test.workflow.model.%s' % name, - '_inherit': 'test.workflow.model.a', - }) + # + # Do not use type() to create the class here, but use the class construct. + # This is because the __module__ of the new class would be the one of the + # metaclass that provides method __new__! + # + class NewModel(openerp.osv.orm.Model): + _name = 'test.workflow.model.%s' % name + _inherit = 'test.workflow.model.a' # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/addons/test_workflow/tests/test_workflow.py b/openerp/addons/test_workflow/tests/test_workflow.py index e9ff7b9cd1e5afc56445d9ba8f93a9cb7fc0a6d9..b1cedca77f45ed379aab28e64a87e1cbfd95af86 100644 --- a/openerp/addons/test_workflow/tests/test_workflow.py +++ b/openerp/addons/test_workflow/tests/test_workflow.py @@ -53,7 +53,7 @@ class test_workflows(common.TransactionCase): # b -> c is a trigger (which is False), # so we remain in the b activity. - model.trigger(self.cr, SUPERUSER_ID, [i]) + model.trigger(self.cr, SUPERUSER_ID) self.check_activities(model._name, i, ['b']) # b -> c is a trigger (which is set to True). diff --git a/openerp/api.py b/openerp/api.py new file mode 100644 index 0000000000000000000000000000000000000000..5c8b93892b79b8a2f66a5e83af840d65b6a14425 --- /dev/null +++ b/openerp/api.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2013 OpenERP (<http://www.openerp.com>). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +############################################################################## + +""" This module provides the elements for managing two different API styles, + namely the "traditional" and "record" styles. + + In the "traditional" style, parameters like the database cursor, user id, + context dictionary and record ids (usually denoted as ``cr``, ``uid``, + ``context``, ``ids``) are passed explicitly to all methods. In the "record" + style, those parameters are hidden into model instances, which gives it a + more object-oriented feel. + + For instance, the statements:: + + model = self.pool.get(MODEL) + ids = model.search(cr, uid, DOMAIN, context=context) + for rec in model.browse(cr, uid, ids, context=context): + print rec.name + model.write(cr, uid, ids, VALUES, context=context) + + may also be written as:: + + env = Env(cr, uid, context) # cr, uid, context wrapped in env + recs = env[MODEL] # retrieve an instance of MODEL + recs = recs.search(DOMAIN) # search returns a recordset + for rec in recs: # iterate over the records + print rec.name + recs.write(VALUES) # update all records in recs + + Methods written in the "traditional" style are automatically decorated, + following some heuristics based on parameter names. +""" + +__all__ = [ + 'Environment', + 'Meta', 'guess', 'noguess', + 'model', 'multi', 'one', + 'cr', 'cr_context', 'cr_uid', 'cr_uid_context', + 'cr_uid_id', 'cr_uid_id_context', 'cr_uid_ids', 'cr_uid_ids_context', + 'constrains', 'depends', 'onchange', 'returns', +] + +import logging + +from inspect import currentframe, getargspec +from collections import defaultdict, MutableMapping +from contextlib import contextmanager +from pprint import pformat +from weakref import WeakSet +from werkzeug.local import Local, release_local + +from openerp.tools import frozendict + +_logger = logging.getLogger(__name__) + +# The following attributes are used, and reflected on wrapping methods: +# - method._api: decorator function, used for re-applying decorator +# - method._constrains: set by @constrains, specifies constraint dependencies +# - method._depends: set by @depends, specifies compute dependencies +# - method._returns: set by @returns, specifies return model +# - method._onchange: set by @onchange, specifies onchange fields +# - method.clear_cache: set by @ormcache, used to clear the cache +# +# On wrapping method only: +# - method._orig: original method +# + +WRAPPED_ATTRS = ('__module__', '__name__', '__doc__', '_api', '_constrains', + '_depends', '_onchange', '_returns', 'clear_cache') + +INHERITED_ATTRS = ('_returns',) + + +class Meta(type): + """ Metaclass that automatically decorates traditional-style methods by + guessing their API. It also implements the inheritance of the + :func:`returns` decorators. + """ + + def __new__(meta, name, bases, attrs): + # dummy parent class to catch overridden methods decorated with 'returns' + parent = type.__new__(meta, name, bases, {}) + + for key, value in attrs.items(): + if not key.startswith('__') and callable(value): + # make the method inherit from decorators + value = propagate(getattr(parent, key, None), value) + + # guess calling convention if none is given + if not hasattr(value, '_api'): + try: + value = guess(value) + except TypeError: + pass + + attrs[key] = value + + return type.__new__(meta, name, bases, attrs) + + +identity = lambda x: x + +def decorate(method, attr, value): + """ Decorate `method` or its original method. """ + # decorate the original method, and re-apply the api decorator, if any + orig = getattr(method, '_orig', method) + setattr(orig, attr, value) + return getattr(method, '_api', identity)(orig) + +def propagate(from_method, to_method): + """ Propagate decorators from `from_method` to `to_method`, and return the + resulting method. + """ + if from_method: + for attr in INHERITED_ATTRS: + if hasattr(from_method, attr) and not hasattr(to_method, attr): + to_method = decorate(to_method, attr, getattr(from_method, attr)) + return to_method + + +def constrains(*args): + """ Return a decorator that specifies the field dependencies of a method + implementing a constraint checker. Each argument must be a field name. + """ + return lambda method: decorate(method, '_constrains', args) + + +def onchange(*args): + """ Return a decorator to decorate an onchange method for given fields. + Each argument must be a field name. + """ + return lambda method: decorate(method, '_onchange', args) + + +def depends(*args): + """ Return a decorator that specifies the field dependencies of a "compute" + method (for new-style function fields). Each argument must be a string + that consists in a dot-separated sequence of field names. + + One may also pass a single function as argument. In that case, the + dependencies are given by calling the function with the field's model. + """ + if args and callable(args[0]): + args = args[0] + return lambda method: decorate(method, '_depends', args) + + +def returns(model, downgrade=None): + """ Return a decorator for methods that return instances of `model`. + + :param model: a model name, or ``'self'`` for the current model + + :param downgrade: a function `downgrade(value)` to convert the + record-style `value` to a traditional-style output + + The decorator adapts the method output to the api style: `id`, `ids` or + ``False`` for the traditional style, and recordset for the record style:: + + @model + @returns('res.partner') + def find_partner(self, arg): + ... # return some record + + # output depends on call style: traditional vs record style + partner_id = model.find_partner(cr, uid, arg, context=context) + + # recs = model.browse(cr, uid, ids, context) + partner_record = recs.find_partner(arg) + + Note that the decorated method must satisfy that convention. + + Those decorators are automatically *inherited*: a method that overrides + a decorated existing method will be decorated with the same + ``@returns(model)``. + """ + return lambda method: decorate(method, '_returns', (model, downgrade)) + + +def make_wrapper(method, old_api, new_api): + """ Return a wrapper method for `method`. """ + def wrapper(self, *args, **kwargs): + # avoid hasattr(self, '_ids') because __getattr__() is overridden + if '_ids' in self.__dict__: + return new_api(self, *args, **kwargs) + else: + return old_api(self, *args, **kwargs) + + # propagate specific openerp attributes from method to wrapper + for attr in WRAPPED_ATTRS: + if hasattr(method, attr): + setattr(wrapper, attr, getattr(method, attr)) + wrapper._orig = method + + return wrapper + + +def get_downgrade(method): + """ Return a function `downgrade(value)` that adapts `value` from + record-style to traditional-style, following the convention of `method`. + """ + spec = getattr(method, '_returns', None) + if spec: + model, downgrade = spec + return downgrade or (lambda value: value.ids) + else: + return lambda value: value + + +def get_upgrade(method): + """ Return a function `upgrade(self, value)` that adapts `value` from + traditional-style to record-style, following the convention of `method`. + """ + spec = getattr(method, '_returns', None) + if spec: + model, downgrade = spec + if model == 'self': + return lambda self, value: self.browse(value) + else: + return lambda self, value: self.env[model].browse(value) + else: + return lambda self, value: value + + +def get_aggregate(method): + """ Return a function `aggregate(self, value)` that aggregates record-style + `value` for a method decorated with ``@one``. + """ + spec = getattr(method, '_returns', None) + if spec: + # value is a list of instances, concatenate them + model, downgrade = spec + if model == 'self': + return lambda self, value: sum(value, self.browse()) + else: + return lambda self, value: sum(value, self.env[model].browse()) + else: + return lambda self, value: value + + +def get_context_split(method): + """ Return a function `split` that extracts the context from a pair of + positional and keyword arguments:: + + context, args, kwargs = split(args, kwargs) + """ + pos = len(getargspec(method).args) - 1 + + def split(args, kwargs): + if pos < len(args): + return args[pos], args[:pos], kwargs + else: + return kwargs.pop('context', None), args, kwargs + + return split + + +def model(method): + """ Decorate a record-style method where `self` is a recordset. Such a + method:: + + @api.model + def method(self, args): + ... + + may be called in both record and traditional styles, like:: + + # recs = model.browse(cr, uid, ids, context) + recs.method(args) + + model.method(cr, uid, args, context=context) + """ + method._api = model + split = get_context_split(method) + downgrade = get_downgrade(method) + + def old_api(self, cr, uid, *args, **kwargs): + context, args, kwargs = split(args, kwargs) + recs = self.browse(cr, uid, [], context) + result = method(recs, *args, **kwargs) + return downgrade(result) + + return make_wrapper(method, old_api, method) + + +def multi(method): + """ Decorate a record-style method where `self` is a recordset. Such a + method:: + + @api.multi + def method(self, args): + ... + + may be called in both record and traditional styles, like:: + + # recs = model.browse(cr, uid, ids, context) + recs.method(args) + + model.method(cr, uid, ids, args, context=context) + """ + method._api = multi + split = get_context_split(method) + downgrade = get_downgrade(method) + + def old_api(self, cr, uid, ids, *args, **kwargs): + context, args, kwargs = split(args, kwargs) + recs = self.browse(cr, uid, ids, context) + result = method(recs, *args, **kwargs) + return downgrade(result) + + return make_wrapper(method, old_api, method) + + +def one(method): + """ Decorate a record-style method where `self` is expected to be a + singleton instance. The decorated method automatically loops on records, + and makes a list with the results. In case the method is decorated with + @returns, it concatenates the resulting instances. Such a method:: + + @api.one + def method(self, args): + return self.name + + may be called in both record and traditional styles, like:: + + # recs = model.browse(cr, uid, ids, context) + names = recs.method(args) + + names = model.method(cr, uid, ids, args, context=context) + """ + method._api = one + split = get_context_split(method) + downgrade = get_downgrade(method) + aggregate = get_aggregate(method) + + def old_api(self, cr, uid, ids, *args, **kwargs): + context, args, kwargs = split(args, kwargs) + recs = self.browse(cr, uid, ids, context) + result = new_api(recs, *args, **kwargs) + return downgrade(result) + + def new_api(self, *args, **kwargs): + result = [method(rec, *args, **kwargs) for rec in self] + return aggregate(self, result) + + return make_wrapper(method, old_api, new_api) + + +def cr(method): + """ Decorate a traditional-style method that takes `cr` as a parameter. + Such a method may be called in both record and traditional styles, like:: + + # recs = model.browse(cr, uid, ids, context) + recs.method(args) + + model.method(cr, args) + """ + method._api = cr + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + result = method(self._model, cr, *args, **kwargs) + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_context(method): + """ Decorate a traditional-style method that takes `cr`, `context` as parameters. """ + method._api = cr_context + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + kwargs['context'] = context + result = method(self._model, cr, *args, **kwargs) + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_uid(method): + """ Decorate a traditional-style method that takes `cr`, `uid` as parameters. """ + method._api = cr_uid + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + result = method(self._model, cr, uid, *args, **kwargs) + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_uid_context(method): + """ Decorate a traditional-style method that takes `cr`, `uid`, `context` as + parameters. Such a method may be called in both record and traditional + styles, like:: + + # recs = model.browse(cr, uid, ids, context) + recs.method(args) + + model.method(cr, uid, args, context=context) + """ + method._api = cr_uid_context + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + kwargs['context'] = context + result = method(self._model, cr, uid, *args, **kwargs) + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_uid_id(method): + """ Decorate a traditional-style method that takes `cr`, `uid`, `id` as + parameters. Such a method may be called in both record and traditional + styles. In the record style, the method automatically loops on records. + """ + method._api = cr_uid_id + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + result = [method(self._model, cr, uid, id, *args, **kwargs) for id in self.ids] + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_uid_id_context(method): + """ Decorate a traditional-style method that takes `cr`, `uid`, `id`, + `context` as parameters. Such a method:: + + @api.cr_uid_id + def method(self, cr, uid, id, args, context=None): + ... + + may be called in both record and traditional styles, like:: + + # rec = model.browse(cr, uid, id, context) + rec.method(args) + + model.method(cr, uid, id, args, context=context) + """ + method._api = cr_uid_id_context + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + kwargs['context'] = context + result = [method(self._model, cr, uid, id, *args, **kwargs) for id in self.ids] + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_uid_ids(method): + """ Decorate a traditional-style method that takes `cr`, `uid`, `ids` as + parameters. Such a method may be called in both record and traditional + styles. + """ + method._api = cr_uid_ids + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + result = method(self._model, cr, uid, self.ids, *args, **kwargs) + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def cr_uid_ids_context(method): + """ Decorate a traditional-style method that takes `cr`, `uid`, `ids`, + `context` as parameters. Such a method:: + + @api.cr_uid_ids_context + def method(self, cr, uid, ids, args, context=None): + ... + + may be called in both record and traditional styles, like:: + + # recs = model.browse(cr, uid, ids, context) + recs.method(args) + + model.method(cr, uid, ids, args, context=context) + + It is generally not necessary, see :func:`guess`. + """ + method._api = cr_uid_ids_context + upgrade = get_upgrade(method) + + def new_api(self, *args, **kwargs): + cr, uid, context = self.env.args + kwargs['context'] = context + result = method(self._model, cr, uid, self.ids, *args, **kwargs) + return upgrade(self, result) + + return make_wrapper(method, method, new_api) + + +def v7(method_v7): + """ Decorate a method that supports the old-style api only. A new-style api + may be provided by redefining a method with the same name and decorated + with :func:`~.v8`:: + + @api.v7 + def foo(self, cr, uid, ids, context=None): + ... + + @api.v8 + def foo(self): + ... + + Note that the wrapper method uses the docstring of the first method. + """ + # retrieve method_v8 from the caller's frame + frame = currentframe().f_back + method = frame.f_locals.get(method_v7.__name__) + method_v8 = getattr(method, '_v8', method) + + wrapper = make_wrapper(method_v7, method_v7, method_v8) + wrapper._v7 = method_v7 + wrapper._v8 = method_v8 + return wrapper + + +def v8(method_v8): + """ Decorate a method that supports the new-style api only. An old-style api + may be provided by redefining a method with the same name and decorated + with :func:`~.v7`:: + + @api.v8 + def foo(self): + ... + + @api.v7 + def foo(self, cr, uid, ids, context=None): + ... + + Note that the wrapper method uses the docstring of the first method. + """ + # retrieve method_v7 from the caller's frame + frame = currentframe().f_back + method = frame.f_locals.get(method_v8.__name__) + method_v7 = getattr(method, '_v7', method) + + wrapper = make_wrapper(method_v8, method_v7, method_v8) + wrapper._v7 = method_v7 + wrapper._v8 = method_v8 + return wrapper + + +def noguess(method): + """ Decorate a method to prevent any effect from :func:`guess`. """ + method._api = False + return method + + +def guess(method): + """ Decorate `method` to make it callable in both traditional and record + styles. This decorator is applied automatically by the model's + metaclass, and has no effect on already-decorated methods. + + The API style is determined by heuristics on the parameter names: ``cr`` + or ``cursor`` for the cursor, ``uid`` or ``user`` for the user id, + ``id`` or ``ids`` for a list of record ids, and ``context`` for the + context dictionary. If a traditional API is recognized, one of the + decorators :func:`cr`, :func:`cr_context`, :func:`cr_uid`, + :func:`cr_uid_context`, :func:`cr_uid_id`, :func:`cr_uid_id_context`, + :func:`cr_uid_ids`, :func:`cr_uid_ids_context` is applied on the method. + + Method calls are considered traditional style when their first parameter + is a database cursor. + """ + if hasattr(method, '_api'): + return method + + # introspection on argument names to determine api style + args, vname, kwname, defaults = getargspec(method) + names = tuple(args) + (None,) * 4 + + if names[0] == 'self': + if names[1] in ('cr', 'cursor'): + if names[2] in ('uid', 'user'): + if names[3] == 'ids': + if 'context' in names or kwname: + return cr_uid_ids_context(method) + else: + return cr_uid_ids(method) + elif names[3] == 'id': + if 'context' in names or kwname: + return cr_uid_id_context(method) + else: + return cr_uid_id(method) + elif 'context' in names or kwname: + return cr_uid_context(method) + else: + return cr_uid(method) + elif 'context' in names: + return cr_context(method) + else: + return cr(method) + + # no wrapping by default + return noguess(method) + + +def expected(decorator, func): + """ Decorate `func` with `decorator` if `func` is not wrapped yet. """ + return decorator(func) if not hasattr(func, '_orig') else func + + + +class Environment(object): + """ An environment wraps data for ORM records: + + - :attr:`cr`, the current database cursor; + - :attr:`uid`, the current user id; + - :attr:`context`, the current context dictionary. + + It also provides access to the registry, a cache for records, and a data + structure to manage recomputations. + """ + _local = Local() + + @classmethod + @contextmanager + def manage(cls): + """ Context manager for a set of environments. """ + if hasattr(cls._local, 'environments'): + yield + else: + try: + cls._local.environments = WeakSet() + yield + finally: + release_local(cls._local) + + def __new__(cls, cr, uid, context): + assert context is not None + args = (cr, uid, context) + + # if env already exists, return it + env, envs = None, cls._local.environments + for env in envs: + if env.args == args: + return env + + # otherwise create environment, and add it in the set + self = object.__new__(cls) + self.cr, self.uid, self.context = self.args = (cr, uid, frozendict(context)) + self.registry = RegistryManager.get(cr.dbname) + self.cache = defaultdict(dict) # {field: {id: value, ...}, ...} + self.prefetch = defaultdict(set) # {model_name: set(id), ...} + self.computed = defaultdict(set) # {field: set(id), ...} + self.dirty = set() # set(record) + self.todo = {} # {field: records, ...} + self.mode = env.mode if env else Mode() + self.all = envs + envs.add(self) + return self + + def __getitem__(self, model_name): + """ return a given model """ + return self.registry[model_name]._browse(self, ()) + + def __call__(self, cr=None, user=None, context=None): + """ Return an environment based on `self` with modified parameters. + + :param cr: optional database cursor to change the current cursor + :param user: optional user/user id to change the current user + :param context: optional context dictionary to change the current context + """ + cr = self.cr if cr is None else cr + uid = self.uid if user is None else int(user) + context = self.context if context is None else context + return Environment(cr, uid, context) + + def ref(self, xml_id, raise_if_not_found=True): + """ return the record corresponding to the given `xml_id` """ + return self['ir.model.data'].xmlid_to_object(xml_id, raise_if_not_found=raise_if_not_found) + + @property + def user(self): + """ return the current user (as an instance) """ + return self(user=SUPERUSER_ID)['res.users'].browse(self.uid) + + @property + def lang(self): + """ return the current language code """ + return self.context.get('lang') + + @contextmanager + def _do_in_mode(self, mode): + if self.mode.value: + yield + else: + try: + self.mode.value = mode + yield + finally: + self.mode.value = False + self.dirty.clear() + + def do_in_draft(self): + """ Context-switch to draft mode, where all field updates are done in + cache only. + """ + return self._do_in_mode(True) + + @property + def in_draft(self): + """ Return whether we are in draft mode. """ + return bool(self.mode.value) + + def do_in_onchange(self): + """ Context-switch to 'onchange' draft mode, which is a specialized + draft mode used during execution of onchange methods. + """ + return self._do_in_mode('onchange') + + @property + def in_onchange(self): + """ Return whether we are in 'onchange' draft mode. """ + return self.mode.value == 'onchange' + + def invalidate(self, spec): + """ Invalidate some fields for some records in the cache of all + environments. + + :param spec: what to invalidate, a list of `(field, ids)` pair, + where `field` is a field object, and `ids` is a list of record + ids or ``None`` (to invalidate all records). + """ + if not spec: + return + for env in list(self.all): + c = env.cache + for field, ids in spec: + if ids is None: + if field in c: + del c[field] + else: + field_cache = c[field] + for id in ids: + field_cache.pop(id, None) + + def invalidate_all(self): + """ Clear the cache of all environments. """ + for env in list(self.all): + env.cache.clear() + env.prefetch.clear() + env.computed.clear() + env.dirty.clear() + + def check_cache(self): + """ Check the cache consistency. """ + # make a full copy of the cache, and invalidate it + cache_dump = dict( + (field, dict(field_cache)) + for field, field_cache in self.cache.iteritems() + ) + self.invalidate_all() + + # re-fetch the records, and compare with their former cache + invalids = [] + for field, field_dump in cache_dump.iteritems(): + ids = filter(None, field_dump) + records = self[field.model_name].browse(ids) + for record in records: + try: + cached = field_dump[record.id] + fetched = record[field.name] + if fetched != cached: + info = {'cached': cached, 'fetched': fetched} + invalids.append((field, record, info)) + except (AccessError, MissingError): + pass + + if invalids: + raise Warning('Invalid cache for fields\n' + pformat(invalids)) + + +class Mode(object): + """ A mode flag shared among environments. """ + value = False # False, True (draft) or 'onchange' (onchange draft) + + +# keep those imports here in order to handle cyclic dependencies correctly +from openerp import SUPERUSER_ID +from openerp.exceptions import Warning, AccessError, MissingError +from openerp.modules.registry import RegistryManager diff --git a/openerp/cli/scaffold/models.jinja2 b/openerp/cli/scaffold/models.jinja2 index 2409a121d495ed11c8b7173aba675de355ee3a6f..d18e72a23a1dcc9a53374605685541024ca79c04 100644 --- a/openerp/cli/scaffold/models.jinja2 +++ b/openerp/cli/scaffold/models.jinja2 @@ -1,10 +1,9 @@ # -*- coding: utf-8 -*- -from openerp.osv import orm, fields +from openerp.models import Model +from openerp.fields import Char -class {{ model }}(orm.Model): +class {{ model }}(Model): _name = "{{ module }}.{{ model }}" - _columns = { - 'name': fields.char(), - } + name = Char() diff --git a/openerp/exceptions.py b/openerp/exceptions.py index 7789345585c5095bb16d83a024a9582e7fa73df3..1d59fa67a7c67dc3c00fe6db937b59db73207b34 100644 --- a/openerp/exceptions.py +++ b/openerp/exceptions.py @@ -28,6 +28,13 @@ treated as a 'Server error'. If you consider introducing new exceptions, check out the test_exceptions addon. """ +# kept for backward compatibility +class except_orm(Exception): + def __init__(self, name, value): + self.name = name + self.value = value + self.args = (name, value) + class Warning(Exception): pass @@ -47,8 +54,15 @@ class AccessDenied(Exception): super(AccessDenied, self).__init__('Access denied.') self.traceback = ('', '', '') -class AccessError(Exception): +class AccessError(except_orm): """ Access rights error. """ + def __init__(self, msg): + super(AccessError, self).__init__('AccessError', msg) + +class MissingError(except_orm): + """ Missing record(s). """ + def __init__(self, msg): + super(MissingError, self).__init__('MissingError', msg) class DeferredException(Exception): """ Exception object holding a traceback for asynchronous reporting. diff --git a/openerp/fields.py b/openerp/fields.py new file mode 100644 index 0000000000000000000000000000000000000000..ede066e37fe20a93cf9fb6334854b266df29852e --- /dev/null +++ b/openerp/fields.py @@ -0,0 +1,1563 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +############################################################################## + +""" High-level objects for fields. """ + +from copy import copy +from datetime import date, datetime +from functools import partial +from operator import attrgetter +import logging +import pytz +import xmlrpclib + +from types import NoneType + +from openerp.tools import float_round, ustr, html_sanitize +from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT +from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT + +DATE_LENGTH = len(date.today().strftime(DATE_FORMAT)) +DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT)) + +_logger = logging.getLogger(__name__) + + +class SpecialValue(object): + """ Encapsulates a value in the cache in place of a normal value. """ + def __init__(self, value): + self.value = value + def get(self): + return self.value + +class FailedValue(SpecialValue): + """ Special value that encapsulates an exception instead of a value. """ + def __init__(self, exception): + self.exception = exception + def get(self): + raise self.exception + +def _check_value(value): + """ Return `value`, or call its getter if `value` is a :class:`SpecialValue`. """ + return value.get() if isinstance(value, SpecialValue) else value + + +def resolve_all_mro(cls, name, reverse=False): + """ Return the (successively overridden) values of attribute `name` in `cls` + in mro order, or inverse mro order if `reverse` is true. + """ + klasses = reversed(cls.__mro__) if reverse else cls.__mro__ + for klass in klasses: + if name in klass.__dict__: + yield klass.__dict__[name] + + +def default_compute(field, value): + """ Return a compute function for the given default `value`; `value` is + either a constant, or a unary function returning the default value. + """ + name = field.name + func = value if callable(value) else lambda rec: value + def compute(recs): + for rec in recs: + rec[name] = func(rec) + return compute + + +class MetaField(type): + """ Metaclass for field classes. """ + by_type = {} + + def __init__(cls, name, bases, attrs): + super(MetaField, cls).__init__(name, bases, attrs) + if cls.type: + cls.by_type[cls.type] = cls + + # compute class attributes to avoid calling dir() on fields + cls.column_attrs = [] + cls.related_attrs = [] + cls.description_attrs = [] + for attr in dir(cls): + if attr.startswith('_column_'): + cls.column_attrs.append((attr[8:], attr)) + elif attr.startswith('_related_'): + cls.related_attrs.append((attr[9:], attr)) + elif attr.startswith('_description_'): + cls.description_attrs.append((attr[13:], attr)) + + +class Field(object): + """ The field descriptor contains the field definition, and manages accesses + and assignments of the corresponding field on records. The following + attributes may be provided when instanciating a field: + + :param string: the label of the field seen by users (string); if not + set, the ORM takes the field name in the class (capitalized). + + :param help: the tooltip of the field seen by users (string) + + :param readonly: whether the field is readonly (boolean, by default ``False``) + + :param required: whether the value of the field is required (boolean, by + default ``False``) + + :param index: whether the field is indexed in database (boolean, by + default ``False``) + + :param default: the default value for the field; this is either a static + value, or a function taking a recordset and returning a value + + :param states: a dictionary mapping state values to lists of attribute-value + pairs; possible attributes are: 'readonly', 'required', 'invisible' + + :param groups: comma-separated list of group xml ids (string); this + restricts the field access to the users of the given groups only + + .. _field-computed: + + .. rubric:: Computed fields + + One can define a field whose value is computed instead of simply being + read from the database. The attributes that are specific to computed + fields are given below. To define such a field, simply provide a value + for the attribute `compute`. + + :param compute: name of a method that computes the field + + :param inverse: name of a method that inverses the field (optional) + + :param search: name of a method that implement search on the field (optional) + + :param store: whether the field is stored in database (boolean, by + default ``False`` on computed fields) + + The methods given for `compute`, `inverse` and `search` are model + methods. Their signature is shown in the following example:: + + upper = fields.Char(compute='_compute_upper', + inverse='_inverse_upper', + search='_search_upper') + + @api.depends('name') + def _compute_upper(self): + for rec in self: + self.upper = self.name.upper() if self.name else False + + def _inverse_upper(self): + for rec in self: + self.name = self.upper.lower() if self.upper else False + + def _search_upper(self, operator, value): + if operator == 'like': + operator = 'ilike' + return [('name', operator, value)] + + The compute method has to assign the field on all records of the invoked + recordset. The decorator :meth:`openerp.api.depends` must be applied on + the compute method to specify the field dependencies; those dependencies + are used to determine when to recompute the field; recomputation is + automatic and guarantees cache/database consistency. Note that the same + method can be used for several fields, you simply have to assign all the + given fields in the method; the method will be invoked once for all + those fields. + + By default, a computed field is not stored to the database, and is + computed on-the-fly. Adding the attribute ``store=True`` will store the + field's values in the database. The advantage of a stored field is that + searching on that field is done by the database itself. The disadvantage + is that it requires database updates when the field must be recomputed. + + The inverse method, as its name says, does the inverse of the compute + method: the invoked records have a value for the field, and you must + apply the necessary changes on the field dependencies such that the + computation gives the expected value. Note that a computed field without + an inverse method is readonly by default. + + The search method is invoked when processing domains before doing an + actual search on the model. It must return a domain equivalent to the + condition: `field operator value`. + + .. _field-related: + + .. rubric:: Related fields + + The value of a related field is given by following a sequence of + relational fields and reading a field on the reached model. The complete + sequence of fields to traverse is specified by the attribute + + :param related: sequence of field names + + The value of some attributes from related fields are automatically taken + from the source field, when it makes sense. Examples are the attributes + `string` or `selection` on selection fields. + + By default, the values of related fields are not stored to the database. + Add the attribute ``store=True`` to make it stored, just like computed + fields. Related fields are automatically recomputed when their + dependencies are modified. + + .. _field-company-dependent: + + .. rubric:: Company-dependent fields + + Formerly known as 'property' fields, the value of those fields depends + on the company. In other words, users that belong to different companies + may see different values for the field on a given record. + + :param company_dependent: whether the field is company-dependent (boolean) + + .. _field-incremental-definition: + + .. rubric:: Incremental definition + + A field is defined as class attribute on a model class. If the model is + extended (see :class:`BaseModel`), one can also extend the field + definition by redefining a field with the same name and same type on the + subclass. In that case, the attributes of the field are taken from the + parent class and overridden by the ones given in subclasses. + + For instance, the second class below only adds a tooltip on the field + ``state``:: + + class First(models.Model): + _name = 'foo' + state = fields.Selection([...], required=True) + + class Second(models.Model): + _inherit = 'foo' + state = fields.Selection(help="Blah blah blah") + + """ + __metaclass__ = MetaField + + _attrs = None # dictionary with all field attributes + _free_attrs = None # list of semantic-free attribute names + + automatic = False # whether the field is automatically created ("magic" field) + _origin = None # the column or field interfaced by self, if any + + name = None # name of the field + type = None # type of the field (string) + relational = False # whether the field is a relational one + model_name = None # name of the model of this field + comodel_name = None # name of the model of values (if relational) + inverse_field = None # inverse field (object), if it exists + + store = True # whether the field is stored in database + index = False # whether the field is indexed in database + copyable = True # whether the field is copied over by BaseModel.copy() + depends = () # collection of field dependencies + recursive = False # whether self depends on itself + compute = None # compute(recs) computes field on recs + inverse = None # inverse(recs) inverses field on recs + search = None # search(recs, operator, value) searches on self + related = None # sequence of field names, for related fields + company_dependent = False # whether `self` is company-dependent (property field) + default = None # default value + + string = None # field label + help = None # field tooltip + readonly = False + required = False + states = None + groups = False # csv list of group xml ids + + def __init__(self, string=None, **kwargs): + kwargs['string'] = string + self._attrs = {key: val for key, val in kwargs.iteritems() if val is not None} + self._free_attrs = [] + + def copy(self, **kwargs): + """ make a copy of `self`, possibly modified with parameters `kwargs` """ + field = copy(self) + field._attrs = {key: val for key, val in kwargs.iteritems() if val is not None} + field._free_attrs = list(self._free_attrs) + return field + + def set_class_name(self, cls, name): + """ Assign the model class and field name of `self`. """ + self.model_name = cls._name + self.name = name + + # determine all inherited field attributes + attrs = {} + for field in resolve_all_mro(cls, name, reverse=True): + if isinstance(field, type(self)): + attrs.update(field._attrs) + else: + attrs.clear() + attrs.update(self._attrs) # necessary in case self is not in cls + + # initialize `self` with `attrs` + if attrs.get('compute'): + # by default, computed fields are not stored, not copied and readonly + attrs['store'] = attrs.get('store', False) + attrs['copy'] = attrs.get('copy', False) + attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse')) + if attrs.get('related'): + # by default, related fields are not stored + attrs['store'] = attrs.get('store', False) + if 'copy' in attrs: + # attribute is copyable because there is also a copy() method + attrs['copyable'] = attrs.pop('copy') + + for attr, value in attrs.iteritems(): + if not hasattr(self, attr): + self._free_attrs.append(attr) + setattr(self, attr, value) + + if not self.string: + self.string = name.replace('_', ' ').capitalize() + + self.reset() + + def __str__(self): + return "%s.%s" % (self.model_name, self.name) + + def __repr__(self): + return "%s.%s" % (self.model_name, self.name) + + ############################################################################ + # + # Field setup + # + + def reset(self): + """ Prepare `self` for a new setup. """ + self._setup_done = False + # self._triggers is a set of pairs (field, path) that represents the + # computed fields that depend on `self`. When `self` is modified, it + # invalidates the cache of each `field`, and registers the records to + # recompute based on `path`. See method `modified` below for details. + self._triggers = set() + self.inverse_field = None + + def setup(self, env): + """ Complete the setup of `self` (dependencies, recomputation triggers, + and other properties). This method is idempotent: it has no effect + if `self` has already been set up. + """ + if not self._setup_done: + self._setup_done = True + self._setup(env) + + def _setup(self, env): + """ Do the actual setup of `self`. """ + if self.related: + self._setup_related(env) + else: + self._setup_regular(env) + + # put invalidation/recomputation triggers on dependencies + for path in self.depends: + self._setup_dependency([], env[self.model_name], path.split('.')) + + # + # Setup of related fields + # + + def _setup_related(self, env): + """ Setup the attributes of a related field. """ + # fix the type of self.related if necessary + if isinstance(self.related, basestring): + self.related = tuple(self.related.split('.')) + + # determine the related field, and make sure it is set up + recs = env[self.model_name] + for name in self.related[:-1]: + recs = recs[name] + field = self.related_field = recs._fields[self.related[-1]] + field.setup(env) + + # check type consistency + if self.type != field.type: + raise Warning("Type of related field %s is inconsistent with %s" % (self, field)) + + # determine dependencies, compute, inverse, and search + self.depends = ('.'.join(self.related),) + self.compute = self._compute_related + self.inverse = self._inverse_related + self.search = self._search_related + + # copy attributes from field to self (string, help, etc.) + for attr, prop in self.related_attrs: + if not getattr(self, attr): + setattr(self, attr, getattr(field, prop)) + + def _compute_related(self, records): + """ Compute the related field `self` on `records`. """ + for record in records: + # bypass access rights check when traversing the related path + value = record.sudo() if record.id else record + # traverse the intermediate fields, and keep at most one record + for name in self.related[:-1]: + value = value[name][:1] + record[self.name] = value[self.related[-1]] + + def _inverse_related(self, records): + """ Inverse the related field `self` on `records`. """ + for record in records: + other = record + # traverse the intermediate fields, and keep at most one record + for name in self.related[:-1]: + other = other[name][:1] + if other: + other[self.related[-1]] = record[self.name] + + def _search_related(self, records, operator, value): + """ Determine the domain to search on field `self`. """ + return [('.'.join(self.related), operator, value)] + + # properties used by _setup_related() to copy values from related field + _related_string = property(attrgetter('string')) + _related_help = property(attrgetter('help')) + _related_readonly = property(attrgetter('readonly')) + _related_groups = property(attrgetter('groups')) + + # + # Setup of non-related fields + # + + def _setup_regular(self, env): + """ Setup the attributes of a non-related field. """ + recs = env[self.model_name] + + def make_depends(deps): + return tuple(deps(recs) if callable(deps) else deps) + + # transform self.default into self.compute + if self.default is not None and self.compute is None: + self.compute = default_compute(self, self.default) + + # convert compute into a callable and determine depends + if isinstance(self.compute, basestring): + # if the compute method has been overridden, concatenate all their _depends + self.depends = () + for method in resolve_all_mro(type(recs), self.compute, reverse=True): + self.depends += make_depends(getattr(method, '_depends', ())) + self.compute = getattr(type(recs), self.compute) + else: + self.depends = make_depends(getattr(self.compute, '_depends', ())) + + # convert inverse and search into callables + if isinstance(self.inverse, basestring): + self.inverse = getattr(type(recs), self.inverse) + if isinstance(self.search, basestring): + self.search = getattr(type(recs), self.search) + + def _setup_dependency(self, path0, model, path1): + """ Make `self` depend on `model`; `path0 + path1` is a dependency of + `self`, and `path0` is the sequence of field names from `self.model` + to `model`. + """ + env = model.env + head, tail = path1[0], path1[1:] + + if head == '*': + # special case: add triggers on all fields of model (except self) + fields = set(model._fields.itervalues()) - set([self]) + else: + fields = [model._fields[head]] + + for field in fields: + if field == self: + _logger.debug("Field %s is recursively defined", self) + self.recursive = True + continue + + field.setup(env) + + #_logger.debug("Add trigger on %s to recompute %s", field, self) + field._triggers.add((self, '.'.join(path0 or ['id']))) + + # add trigger on inverse field, too + if field.inverse_field: + #_logger.debug("Add trigger on %s to recompute %s", field.inverse_field, self) + field.inverse_field._triggers.add((self, '.'.join(path0 + [head]))) + + # recursively traverse the dependency + if tail: + comodel = env[field.comodel_name] + self._setup_dependency(path0 + [head], comodel, tail) + + @property + def dependents(self): + """ Return the computed fields that depend on `self`. """ + return (field for field, path in self._triggers) + + ############################################################################ + # + # Field description + # + + def get_description(self, env): + """ Return a dictionary that describes the field `self`. """ + desc = {'type': self.type} + # determine 'store' + if self.store: + # if the corresponding column is a function field, check the column + column = env[self.model_name]._columns.get(self.name) + desc['store'] = bool(getattr(column, 'store', True)) + else: + desc['store'] = False + # determine other attributes + for attr, prop in self.description_attrs: + value = getattr(self, prop) + if callable(value): + value = value(env) + if value: + desc[attr] = value + return desc + + # properties used by get_description() + _description_depends = property(attrgetter('depends')) + _description_related = property(attrgetter('related')) + _description_company_dependent = property(attrgetter('company_dependent')) + _description_readonly = property(attrgetter('readonly')) + _description_required = property(attrgetter('required')) + _description_states = property(attrgetter('states')) + _description_groups = property(attrgetter('groups')) + + def _description_string(self, env): + if self.string and env.lang: + name = "%s,%s" % (self.model_name, self.name) + trans = env['ir.translation']._get_source(name, 'field', env.lang) + return trans or self.string + return self.string + + def _description_help(self, env): + if self.help and env.lang: + name = "%s,%s" % (self.model_name, self.name) + trans = env['ir.translation']._get_source(name, 'help', env.lang) + return trans or self.help + return self.help + + ############################################################################ + # + # Conversion to column instance + # + + def to_column(self): + """ return a low-level field object corresponding to `self` """ + assert self.store + if self._origin: + assert isinstance(self._origin, fields._column) + return self._origin + + _logger.debug("Create fields._column for Field %s", self) + args = {} + for attr, prop in self.column_attrs: + args[attr] = getattr(self, prop) + for attr in self._free_attrs: + args[attr] = getattr(self, attr) + + if self.company_dependent: + # company-dependent fields are mapped to former property fields + args['type'] = self.type + args['relation'] = self.comodel_name + return fields.property(**args) + + return getattr(fields, self.type)(**args) + + # properties used by to_column() to create a column instance + _column_copy = property(attrgetter('copyable')) + _column_select = property(attrgetter('index')) + _column_string = property(attrgetter('string')) + _column_help = property(attrgetter('help')) + _column_readonly = property(attrgetter('readonly')) + _column_required = property(attrgetter('required')) + _column_states = property(attrgetter('states')) + _column_groups = property(attrgetter('groups')) + + ############################################################################ + # + # Conversion of values + # + + def null(self, env): + """ return the null value for this field in the given environment """ + return False + + def convert_to_cache(self, value, env): + """ convert `value` to the cache level in `env`; `value` may come from + an assignment, or have the format of methods :meth:`BaseModel.read` + or :meth:`BaseModel.write` + """ + return value + + def convert_to_read(self, value, use_name_get=True): + """ convert `value` from the cache to a value as returned by method + :meth:`BaseModel.read` + """ + return value + + def convert_to_write(self, value, target=None, fnames=None): + """ convert `value` from the cache to a valid value for method + :meth:`BaseModel.write`. + + :param target: optional, the record to be modified with this value + :param fnames: for relational fields only, an optional collection of + field names to convert + """ + return self.convert_to_read(value) + + def convert_to_onchange(self, value): + """ convert `value` from the cache to a valid value for an onchange + method v7. + """ + return self.convert_to_write(value) + + def convert_to_export(self, value, env): + """ convert `value` from the cache to a valid value for export. The + parameter `env` is given for managing translations. + """ + if env.context.get('export_raw_data'): + return value + return bool(value) and ustr(value) + + def convert_to_display_name(self, value): + """ convert `value` from the cache to a suitable display name. """ + return ustr(value) + + ############################################################################ + # + # Descriptor methods + # + + def __get__(self, record, owner): + """ return the value of field `self` on `record` """ + if record is None: + return self # the field is accessed through the owner class + + if not record: + # null record -> return the null value for this field + return self.null(record.env) + + # only a single record may be accessed + record.ensure_one() + + try: + return record._cache[self] + except KeyError: + pass + + # cache miss, retrieve value + if record.id: + # normal record -> read or compute value for this field + self.determine_value(record) + else: + # new record -> compute default value for this field + record.add_default_value(self) + + # the result should be in cache now + return record._cache[self] + + def __set__(self, record, value): + """ set the value of field `self` on `record` """ + env = record.env + + # only a single record may be updated + record.ensure_one() + + # adapt value to the cache level + value = self.convert_to_cache(value, env) + + if env.in_draft or not record.id: + # determine dependent fields + spec = self.modified_draft(record) + + # set value in cache, inverse field, and mark record as dirty + record._cache[self] = value + if env.in_onchange: + if self.inverse_field: + self.inverse_field._update(value, record) + record._dirty = True + + # determine more dependent fields, and invalidate them + if self.relational: + spec += self.modified_draft(record) + env.invalidate(spec) + + else: + # simply write to the database, and update cache + record.write({self.name: self.convert_to_write(value)}) + record._cache[self] = value + + ############################################################################ + # + # Computation of field values + # + + def _compute_value(self, records): + """ Invoke the compute method on `records`. """ + # mark the computed fields failed in cache, so that access before + # computation raises an exception + exc = Warning("Field %s is accessed before being computed." % self) + for field in self.computed_fields: + records._cache[field] = FailedValue(exc) + records.env.computed[field].update(records._ids) + self.compute(records) + for field in self.computed_fields: + records.env.computed[field].difference_update(records._ids) + + def compute_value(self, records): + """ Invoke the compute method on `records`; the results are in cache. """ + with records.env.do_in_draft(): + try: + self._compute_value(records) + except MissingError: + # some record is missing, retry on existing records only + self._compute_value(records.exists()) + + def determine_value(self, record): + """ Determine the value of `self` for `record`. """ + env = record.env + + if self.store and not (self.depends and env.in_draft): + # this is a stored field + if self.depends: + # this is a stored computed field, check for recomputation + recs = record._recompute_check(self) + if recs: + # recompute the value (only in cache) + self.compute_value(recs) + # HACK: if result is in the wrong cache, copy values + if recs.env != env: + for source, target in zip(recs, recs.with_env(env)): + try: + values = target._convert_to_cache({ + f.name: source[f.name] for f in self.computed_fields + }) + except MissingError as e: + values = FailedValue(e) + target._cache.update(values) + # the result is saved to database by BaseModel.recompute() + return + + # read the field from database + record._prefetch_field(self) + + elif self.compute: + # this is either a non-stored computed field, or a stored computed + # field in draft mode + if self.recursive: + self.compute_value(record) + else: + recs = record._in_cache_without(self) + self.compute_value(recs) + + else: + # this is a non-stored non-computed field + record._cache[self] = self.null(env) + + def determine_default(self, record): + """ determine the default value of field `self` on `record` """ + if self.compute: + self._compute_value(record) + else: + record._cache[self] = SpecialValue(self.null(record.env)) + + def determine_inverse(self, records): + """ Given the value of `self` on `records`, inverse the computation. """ + if self.inverse: + self.inverse(records) + + def determine_domain(self, records, operator, value): + """ Return a domain representing a condition on `self`. """ + if self.search: + return self.search(records, operator, value) + else: + return [(self.name, operator, value)] + + ############################################################################ + # + # Notification when fields are modified + # + + def modified(self, records): + """ Notify that field `self` has been modified on `records`: prepare the + fields/records to recompute, and return a spec indicating what to + invalidate. + """ + # invalidate the fields that depend on self, and prepare recomputation + spec = [(self, records._ids)] + for field, path in self._triggers: + if field.store: + # don't move this line to function top, see log + env = records.env(user=SUPERUSER_ID, context={'active_test': False}) + target = env[field.model_name].search([(path, 'in', records.ids)]) + if target: + spec.append((field, target._ids)) + target.with_env(records.env)._recompute_todo(field) + else: + spec.append((field, None)) + + return spec + + def modified_draft(self, records): + """ Same as :meth:`modified`, but in draft mode. """ + env = records.env + + # invalidate the fields on the records in cache that depend on + # `records`, except fields currently being computed + spec = [] + for field, path in self._triggers: + target = env[field.model_name] + computed = target.browse(env.computed[field]) + if path == 'id': + target = records - computed + else: + for record in target.browse(env.cache[field]) - computed: + if record._mapped_cache(path) & records: + target += record + if target: + spec.append((field, target._ids)) + + return spec + + +class Any(Field): + """ Field for arbitrary Python values. """ + # Warning: no storage is defined for this type of field! + type = 'any' + + +class Boolean(Field): + """ Boolean field. """ + type = 'boolean' + + def convert_to_cache(self, value, env): + return bool(value) + + def convert_to_export(self, value, env): + if env.context.get('export_raw_data'): + return value + return ustr(value) + + +class Integer(Field): + """ Integer field. """ + type = 'integer' + + def convert_to_cache(self, value, env): + return int(value or 0) + + def convert_to_read(self, value, use_name_get=True): + # Integer values greater than 2^31-1 are not supported in pure XMLRPC, + # so we have to pass them as floats :-( + if value and value > xmlrpclib.MAXINT: + return float(value) + return value + + def _update(self, records, value): + # special case, when an integer field is used as inverse for a one2many + records._cache[self] = value.id or 0 + + +class Float(Field): + """ Float field. The precision digits are given by the attribute + + :param digits: a pair (total, decimal), or a function taking a database + cursor and returning a pair (total, decimal) + + """ + type = 'float' + _digits = None # digits argument passed to class initializer + digits = None # digits as computed by setup() + + def __init__(self, string=None, digits=None, **kwargs): + super(Float, self).__init__(string=string, _digits=digits, **kwargs) + + def _setup_regular(self, env): + super(Float, self)._setup_regular(env) + self.digits = self._digits(env.cr) if callable(self._digits) else self._digits + + _related_digits = property(attrgetter('digits')) + + _description_digits = property(attrgetter('digits')) + + _column_digits = property(lambda self: not callable(self._digits) and self._digits) + _column_digits_compute = property(lambda self: callable(self._digits) and self._digits) + + def convert_to_cache(self, value, env): + # apply rounding here, otherwise value in cache may be wrong! + if self.digits: + return float_round(float(value or 0.0), precision_digits=self.digits[1]) + else: + return float(value or 0.0) + + +class _String(Field): + """ Abstract class for string fields. """ + translate = False + + _column_translate = property(attrgetter('translate')) + _related_translate = property(attrgetter('translate')) + _description_translate = property(attrgetter('translate')) + + +class Char(_String): + """ Char field. + + :param size: the maximum size of values stored for that field (integer, + optional) + + :param translate: whether the value of the field has translations + (boolean, by default ``False``) + + """ + type = 'char' + size = None + + _column_size = property(attrgetter('size')) + _related_size = property(attrgetter('size')) + _description_size = property(attrgetter('size')) + + def convert_to_cache(self, value, env): + return bool(value) and ustr(value)[:self.size] + + +class Text(_String): + """ Text field. Very similar to :class:`Char`, but typically for longer + contents. + + :param translate: whether the value of the field has translations + (boolean, by default ``False``) + + """ + type = 'text' + + def convert_to_cache(self, value, env): + return bool(value) and ustr(value) + + +class Html(_String): + """ Html field. """ + type = 'html' + + def convert_to_cache(self, value, env): + return bool(value) and html_sanitize(value) + + +class Date(Field): + """ Date field. """ + type = 'date' + + @staticmethod + def today(*args): + """ Return the current day in the format expected by the ORM. + This function may be used to compute default values. + """ + return date.today().strftime(DATE_FORMAT) + + @staticmethod + def context_today(record, timestamp=None): + """ Return the current date as seen in the client's timezone in a format + fit for date fields. This method may be used to compute default + values. + + :param datetime timestamp: optional datetime value to use instead of + the current date and time (must be a datetime, regular dates + can't be converted between timezones.) + :rtype: str + """ + today = timestamp or datetime.now() + context_today = None + tz_name = record._context.get('tz') or record.env.user.tz + if tz_name: + try: + today_utc = pytz.timezone('UTC').localize(today, is_dst=False) # UTC = no DST + context_today = today_utc.astimezone(pytz.timezone(tz_name)) + except Exception: + _logger.debug("failed to compute context/client-specific today date, using UTC value for `today`", + exc_info=True) + return (context_today or today).strftime(DATE_FORMAT) + + @staticmethod + def from_string(value): + """ Convert an ORM `value` into a :class:`date` value. """ + value = value[:DATE_LENGTH] + return datetime.strptime(value, DATE_FORMAT).date() + + @staticmethod + def to_string(value): + """ Convert a :class:`date` value into the format expected by the ORM. """ + return value.strftime(DATE_FORMAT) + + def convert_to_cache(self, value, env): + if not value: + return False + if isinstance(value, basestring): + value = self.from_string(value) + return value.strftime(DATE_FORMAT) + + def convert_to_export(self, value, env): + if value and env.context.get('export_raw_data'): + return self.from_string(value) + return bool(value) and ustr(value) + + +class Datetime(Field): + """ Datetime field. """ + type = 'datetime' + + @staticmethod + def now(*args): + """ Return the current day and time in the format expected by the ORM. + This function may be used to compute default values. + """ + return datetime.now().strftime(DATETIME_FORMAT) + + @staticmethod + def context_timestamp(record, timestamp): + """Returns the given timestamp converted to the client's timezone. + This method is *not* meant for use as a _defaults initializer, + because datetime fields are automatically converted upon + display on client side. For _defaults you :meth:`fields.datetime.now` + should be used instead. + + :param datetime timestamp: naive datetime value (expressed in UTC) + to be converted to the client timezone + :rtype: datetime + :return: timestamp converted to timezone-aware datetime in context + timezone + """ + assert isinstance(timestamp, datetime), 'Datetime instance expected' + tz_name = record._context.get('tz') or record.env.user.tz + if tz_name: + try: + utc = pytz.timezone('UTC') + context_tz = pytz.timezone(tz_name) + utc_timestamp = utc.localize(timestamp, is_dst=False) # UTC = no DST + return utc_timestamp.astimezone(context_tz) + except Exception: + _logger.debug("failed to compute context/client-specific timestamp, " + "using the UTC value", + exc_info=True) + return timestamp + + @staticmethod + def from_string(value): + """ Convert an ORM `value` into a :class:`datetime` value. """ + value = value[:DATETIME_LENGTH] + if len(value) == DATE_LENGTH: + value += " 00:00:00" + return datetime.strptime(value, DATETIME_FORMAT) + + @staticmethod + def to_string(value): + """ Convert a :class:`datetime` value into the format expected by the ORM. """ + return value.strftime(DATETIME_FORMAT) + + def convert_to_cache(self, value, env): + if not value: + return False + if isinstance(value, basestring): + value = self.from_string(value) + return value.strftime(DATETIME_FORMAT) + + def convert_to_export(self, value, env): + if value and env.context.get('export_raw_data'): + return self.from_string(value) + return bool(value) and ustr(value) + + +class Binary(Field): + """ Binary field. """ + type = 'binary' + + +class Selection(Field): + """ Selection field. + + :param selection: specifies the possible values for this field. + It is given as either a list of pairs (`value`, `string`), or a + model method, or a method name. + + The attribute `selection` is mandatory except in the case of related + fields (see :ref:`field-related`) or field extensions + (see :ref:`field-incremental-definition`). + """ + type = 'selection' + selection = None # [(value, string), ...], model method or method name + + def __init__(self, selection=None, string=None, **kwargs): + if callable(selection): + from openerp import api + selection = api.expected(api.model, selection) + super(Selection, self).__init__(selection=selection, string=string, **kwargs) + + def _setup_related(self, env): + super(Selection, self)._setup_related(env) + # selection must be computed on related field + field = self.related_field + self.selection = lambda model: field._description_selection(model.env) + + def _description_selection(self, env): + """ return the selection list (pairs (value, label)); labels are + translated according to context language + """ + selection = self.selection + if isinstance(selection, basestring): + return getattr(env[self.model_name], selection)() + if callable(selection): + return selection(env[self.model_name]) + + # translate selection labels + if env.lang: + name = "%s,%s" % (self.model_name, self.name) + translate = partial( + env['ir.translation']._get_source, name, 'selection', env.lang) + return [(value, translate(label)) for value, label in selection] + else: + return selection + + @property + def _column_selection(self): + if isinstance(self.selection, basestring): + method = self.selection + return lambda self, *a, **kw: getattr(self, method)(*a, **kw) + else: + return self.selection + + def get_values(self, env): + """ return a list of the possible values """ + selection = self.selection + if isinstance(selection, basestring): + selection = getattr(env[self.model_name], selection)() + elif callable(selection): + selection = selection(env[self.model_name]) + return [value for value, _ in selection] + + def convert_to_cache(self, value, env): + if value in self.get_values(env): + return value + elif not value: + return False + raise ValueError("Wrong value for %s: %r" % (self, value)) + + def convert_to_export(self, value, env): + if not isinstance(self.selection, list): + # FIXME: this reproduces an existing buggy behavior! + return value + for item in self._description_selection(env): + if item[0] == value: + return item[1] + return False + + +class Reference(Selection): + """ Reference field. + + :param selection: specifies the possible model names for this field. + It is given as either a list of pairs (`value`, `string`), or a + model method, or a method name. + + The attribute `selection` is mandatory except in the case of related + fields (see :ref:`field-related`) or field extensions + (see :ref:`field-incremental-definition`). + """ + type = 'reference' + size = 128 + + def __init__(self, selection=None, string=None, **kwargs): + super(Reference, self).__init__(selection=selection, string=string, **kwargs) + + _related_size = property(attrgetter('size')) + + _column_size = property(attrgetter('size')) + + def convert_to_cache(self, value, env): + if isinstance(value, BaseModel): + if value._name in self.get_values(env) and len(value) <= 1: + return value.with_env(env) or False + elif isinstance(value, basestring): + res_model, res_id = value.split(',') + return env[res_model].browse(int(res_id)) + elif not value: + return False + raise ValueError("Wrong value for %s: %r" % (self, value)) + + def convert_to_read(self, value, use_name_get=True): + return "%s,%s" % (value._name, value.id) if value else False + + def convert_to_export(self, value, env): + return bool(value) and value.name_get()[0][1] + + def convert_to_display_name(self, value): + return ustr(value and value.display_name) + + +class _Relational(Field): + """ Abstract class for relational fields. """ + relational = True + domain = None # domain for searching values + context = None # context for searching values + + _description_relation = property(attrgetter('comodel_name')) + _description_context = property(attrgetter('context')) + + def _description_domain(self, env): + return self.domain(env[self.model_name]) if callable(self.domain) else self.domain + + _column_obj = property(attrgetter('comodel_name')) + _column_domain = property(attrgetter('domain')) + _column_context = property(attrgetter('context')) + + def null(self, env): + return env[self.comodel_name] + + def modified(self, records): + # Invalidate cache for self.inverse_field, too. Note that recomputation + # of fields that depend on self.inverse_field is already covered by the + # triggers (see above). + spec = super(_Relational, self).modified(records) + if self.inverse_field: + spec.append((self.inverse_field, None)) + return spec + + +class Many2one(_Relational): + """ Many2one field; the value of such a field is a recordset of size 0 (no + record) or 1 (a single record). + + :param comodel_name: name of the target model (string) + + :param domain: an optional domain to set on candidate values on the + client side (domain or string) + + :param context: an optional context to use on the client side when + handling that field (dictionary) + + :param ondelete: what to do when the referred record is deleted; + possible values are: ``'set null'``, ``'restrict'``, ``'cascade'`` + + :param auto_join: whether JOINs are generated upon search through that + field (boolean, by default ``False``) + + :param delegate: set it to ``True`` to make fields of the target model + accessible from the current model (corresponds to ``_inherits``) + + The attribute `comodel_name` is mandatory except in the case of related + fields or field extensions. + """ + type = 'many2one' + ondelete = 'set null' # what to do when value is deleted + auto_join = False # whether joins are generated upon search + delegate = False # whether self implements delegation + + def __init__(self, comodel_name=None, string=None, **kwargs): + super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs) + + def _setup_regular(self, env): + super(Many2one, self)._setup_regular(env) + + # self.inverse_field is determined by the corresponding One2many field + + # determine self.delegate + self.delegate = self.name in env[self.model_name]._inherits.values() + + _column_ondelete = property(attrgetter('ondelete')) + _column_auto_join = property(attrgetter('auto_join')) + + def _update(self, records, value): + """ Update the cached value of `self` for `records` with `value`. """ + records._cache[self] = value + + def convert_to_cache(self, value, env): + if isinstance(value, (NoneType, int)): + return env[self.comodel_name].browse(value) + if isinstance(value, BaseModel): + if value._name == self.comodel_name and len(value) <= 1: + return value.with_env(env) + raise ValueError("Wrong value for %s: %r" % (self, value)) + elif isinstance(value, tuple): + return env[self.comodel_name].browse(value[0]) + elif isinstance(value, dict): + return env[self.comodel_name].new(value) + else: + return env[self.comodel_name].browse(value) + + def convert_to_read(self, value, use_name_get=True): + if use_name_get and value: + # evaluate name_get() as superuser, because the visibility of a + # many2one field value (id and name) depends on the current record's + # access rights, and not the value's access rights. + return value.sudo().name_get()[0] + else: + return value.id + + def convert_to_write(self, value, target=None, fnames=None): + return bool(value) and (value.id or value._convert_to_write(value._cache)) + + def convert_to_onchange(self, value): + return value.id + + def convert_to_export(self, value, env): + return bool(value) and value.name_get()[0][1] + + def convert_to_display_name(self, value): + return ustr(value.display_name) + + def determine_default(self, record): + super(Many2one, self).determine_default(record) + if self.delegate: + # special case: fields that implement inheritance between models + value = record[self.name] + if not value: + # the default value cannot be null, use a new record instead + record[self.name] = record.env[self.comodel_name].new() + + +class _RelationalMulti(_Relational): + """ Abstract class for relational fields *2many. """ + + def _update(self, records, value): + """ Update the cached value of `self` for `records` with `value`. """ + for record in records: + record._cache[self] = record[self.name] | value + + def convert_to_cache(self, value, env): + if isinstance(value, BaseModel): + if value._name == self.comodel_name: + return value.with_env(env) + elif isinstance(value, list): + # value is a list of record ids or commands + result = env[self.comodel_name] + for command in value: + if isinstance(command, (tuple, list)): + if command[0] == 0: + result += result.new(command[2]) + elif command[0] == 1: + record = result.browse(command[1]) + record.update(command[2]) + result += record + elif command[0] == 2: + pass + elif command[0] == 3: + pass + elif command[0] == 4: + result += result.browse(command[1]) + elif command[0] == 5: + result = result.browse() + elif command[0] == 6: + result = result.browse(command[2]) + elif isinstance(command, dict): + result += result.new(command) + else: + result += result.browse(command) + return result + elif not value: + return self.null(env) + raise ValueError("Wrong value for %s: %s" % (self, value)) + + def convert_to_read(self, value, use_name_get=True): + return value.ids + + def convert_to_write(self, value, target=None, fnames=None): + # remove/delete former records + if target is None: + set_ids = [] + result = [(6, 0, set_ids)] + add_existing = lambda id: set_ids.append(id) + else: + tag = 2 if self.type == 'one2many' else 3 + result = [(tag, record.id) for record in target[self.name] - value] + add_existing = lambda id: result.append((4, id)) + + if fnames is None: + # take all fields in cache, except the inverse of self + fnames = set(value._fields) - set(MAGIC_COLUMNS) + if self.inverse_field: + fnames.discard(self.inverse_field.name) + + # add new and existing records + for record in value: + if not record.id or record._dirty: + values = dict((k, v) for k, v in record._cache.iteritems() if k in fnames) + values = record._convert_to_write(values) + if not record.id: + result.append((0, 0, values)) + else: + result.append((1, record.id, values)) + else: + add_existing(record.id) + + return result + + def convert_to_export(self, value, env): + return bool(value) and ','.join(name for id, name in value.name_get()) + + def convert_to_display_name(self, value): + raise NotImplementedError() + + +class One2many(_RelationalMulti): + """ One2many field; the value of such a field is the recordset of all the + records in `comodel_name` such that the field `inverse_name` is equal to + the current record. + + :param comodel_name: name of the target model (string) + + :param inverse_name: name of the inverse `Many2one` field in + `comodel_name` (string) + + :param domain: an optional domain to set on candidate values on the + client side (domain or string) + + :param context: an optional context to use on the client side when + handling that field (dictionary) + + :param auto_join: whether JOINs are generated upon search through that + field (boolean, by default ``False``) + + :param limit: optional limit to use upon read (integer) + + The attributes `comodel_name` and `inverse_name` are mandatory except in + the case of related fields or field extensions. + """ + type = 'one2many' + inverse_name = None # name of the inverse field + auto_join = False # whether joins are generated upon search + limit = None # optional limit to use upon read + copyable = False # o2m are not copied by default + + def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs): + super(One2many, self).__init__( + comodel_name=comodel_name, + inverse_name=inverse_name, + string=string, + **kwargs + ) + + def _setup_regular(self, env): + super(One2many, self)._setup_regular(env) + + if self.inverse_name: + # link self to its inverse field and vice-versa + invf = env[self.comodel_name]._fields[self.inverse_name] + self.inverse_field = invf + invf.inverse_field = self + + _description_relation_field = property(attrgetter('inverse_name')) + + _column_fields_id = property(attrgetter('inverse_name')) + _column_auto_join = property(attrgetter('auto_join')) + _column_limit = property(attrgetter('limit')) + + +class Many2many(_RelationalMulti): + """ Many2many field; the value of such a field is the recordset. + + :param comodel_name: name of the target model (string) + + The attribute `comodel_name` is mandatory except in the case of related + fields or field extensions. + + :param relation: optional name of the table that stores the relation in + the database (string) + + :param column1: optional name of the column referring to "these" records + in the table `relation` (string) + + :param column2: optional name of the column referring to "those" records + in the table `relation` (string) + + The attributes `relation`, `column1` and `column2` are optional. If not + given, names are automatically generated from model names, provided + `model_name` and `comodel_name` are different! + + :param domain: an optional domain to set on candidate values on the + client side (domain or string) + + :param context: an optional context to use on the client side when + handling that field (dictionary) + + :param limit: optional limit to use upon read (integer) + + """ + type = 'many2many' + relation = None # name of table + column1 = None # column of table referring to model + column2 = None # column of table referring to comodel + limit = None # optional limit to use upon read + + def __init__(self, comodel_name=None, relation=None, column1=None, column2=None, + string=None, **kwargs): + super(Many2many, self).__init__( + comodel_name=comodel_name, + relation=relation, + column1=column1, + column2=column2, + string=string, + **kwargs + ) + + def _setup_regular(self, env): + super(Many2many, self)._setup_regular(env) + + if self.store and not self.relation: + model = env[self.model_name] + column = model._columns[self.name] + if not isinstance(column, fields.function): + self.relation, self.column1, self.column2 = column._sql_names(model) + + if self.relation: + m2m = env.registry._m2m + # if inverse field has already been setup, it is present in m2m + invf = m2m.get((self.relation, self.column2, self.column1)) + if invf: + self.inverse_field = invf + invf.inverse_field = self + else: + # add self in m2m, so that its inverse field can find it + m2m[(self.relation, self.column1, self.column2)] = self + + _column_rel = property(attrgetter('relation')) + _column_id1 = property(attrgetter('column1')) + _column_id2 = property(attrgetter('column2')) + _column_limit = property(attrgetter('limit')) + + +class Id(Field): + """ Special case for field 'id'. """ + store = True + readonly = True + + def to_column(self): + return fields.integer('ID') + + def __get__(self, record, owner): + if record is None: + return self # the field is accessed through the class owner + if not record: + return False + return record.ensure_one()._ids[0] + + def __set__(self, record, value): + raise TypeError("field 'id' cannot be assigned") + + +# imported here to avoid dependency cycle issues +from openerp import SUPERUSER_ID +from .exceptions import Warning, MissingError +from .models import BaseModel, MAGIC_COLUMNS +from .osv import fields diff --git a/openerp/http.py b/openerp/http.py index d8c1874dd79aa964eb4f23c1fe676fa55030949a..3c2998699e7ac323a89c4b1b541cf67e82a1f4b6 100644 --- a/openerp/http.py +++ b/openerp/http.py @@ -35,6 +35,7 @@ import werkzeug.wrappers import werkzeug.wsgi import openerp +from openerp import SUPERUSER_ID from openerp.service import security, model as service_model from openerp.tools.func import lazy_property @@ -180,12 +181,18 @@ class WebRequest(object): .. attribute:: db ``str``, the name of the database linked to the current request. Can - be ``None`` if the current request uses the ``none`` authentication. + be ``None`` if the current request uses the ``none`` authentication + in ``web`` module's controllers. .. attribute:: uid ``int``, the id of the user related to the current request. Can be ``None`` if the current request uses the ``none`` authentication. + + .. attribute:: env + + an :class:`openerp.api.Environment` bound to the current + request's ``cr``, ``uid`` and ``context`` """ def __init__(self, httprequest): self.httprequest = httprequest @@ -223,7 +230,7 @@ class WebRequest(object): @property def db(self): """ - The registry to the database linked to this request. Can be ``None`` + The database linked to this request. Can be ``None`` if the current request uses the ``none`` authentication. """ return self.session.db if not self.disable_db else None @@ -240,6 +247,13 @@ class WebRequest(object): self._cr = self.registry.cursor() return self._cr + @lazy_property + def env(self): + """ + The Environment bound to current request. + """ + return openerp.api.Environment(self.cr, self.uid, self.context) + def __enter__(self): _request_stack.push(self) return self diff --git a/openerp/models.py b/openerp/models.py new file mode 100644 index 0000000000000000000000000000000000000000..86d702ef582f55a366efdd13896e06a744e31858 --- /dev/null +++ b/openerp/models.py @@ -0,0 +1,5800 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +############################################################################## + + +""" + Object Relational Mapping module: + * Hierarchical structure + * Constraints consistency and validation + * Object metadata depends on its status + * Optimised processing by complex query (multiple actions at once) + * Default field values + * Permissions optimisation + * Persistant object: DB postgresql + * Data conversion + * Multi-level caching system + * Two different inheritance mechanisms + * Rich set of field types: + - classical (varchar, integer, boolean, ...) + - relational (one2many, many2one, many2many) + - functional + +""" + +import copy +import datetime +import functools +import itertools +import logging +import operator +import pickle +import pytz +import re +import time +from collections import defaultdict, MutableMapping +from inspect import getmembers + +import babel.dates +import dateutil.relativedelta +import psycopg2 +from lxml import etree + +import openerp +from . import SUPERUSER_ID +from . import api +from . import tools +from .api import Environment +from .exceptions import except_orm, AccessError, MissingError +from .osv import fields +from .osv.query import Query +from .tools import lazy_property +from .tools.config import config +from .tools.misc import CountingStream, DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT +from .tools.safe_eval import safe_eval as eval +from .tools.translate import _ + +_logger = logging.getLogger(__name__) +_schema = logging.getLogger(__name__ + '.schema') + +regex_order = re.compile('^( *([a-z0-9:_]+|"[a-z0-9:_]+")( *desc| *asc)?( *, *|))+$', re.I) +regex_object_name = re.compile(r'^[a-z0-9_.]+$') +onchange_v7 = re.compile(r"^(\w+)\((.*)\)$") + +AUTOINIT_RECALCULATE_STORED_FIELDS = 1000 + + +def check_object_name(name): + """ Check if the given name is a valid openerp object name. + + The _name attribute in osv and osv_memory object is subject to + some restrictions. This function returns True or False whether + the given name is allowed or not. + + TODO: this is an approximation. The goal in this approximation + is to disallow uppercase characters (in some places, we quote + table/column names and in other not, which leads to this kind + of errors: + + psycopg2.ProgrammingError: relation "xxx" does not exist). + + The same restriction should apply to both osv and osv_memory + objects for consistency. + + """ + if regex_object_name.match(name) is None: + return False + return True + +def raise_on_invalid_object_name(name): + if not check_object_name(name): + msg = "The _name attribute %s is not valid." % name + _logger.error(msg) + raise except_orm('ValueError', msg) + +POSTGRES_CONFDELTYPES = { + 'RESTRICT': 'r', + 'NO ACTION': 'a', + 'CASCADE': 'c', + 'SET NULL': 'n', + 'SET DEFAULT': 'd', +} + +def intersect(la, lb): + return filter(lambda x: x in lb, la) + +def same_name(f, g): + """ Test whether functions `f` and `g` are identical or have the same name """ + return f == g or getattr(f, '__name__', 0) == getattr(g, '__name__', 1) + +def fix_import_export_id_paths(fieldname): + """ + Fixes the id fields in import and exports, and splits field paths + on '/'. + + :param str fieldname: name of the field to import/export + :return: split field name + :rtype: list of str + """ + fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname) + fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id) + return fixed_external_id.split('/') + +def pg_varchar(size=0): + """ Returns the VARCHAR declaration for the provided size: + + * If no size (or an empty or negative size is provided) return an + 'infinite' VARCHAR + * Otherwise return a VARCHAR(n) + + :type int size: varchar size, optional + :rtype: str + """ + if size: + if not isinstance(size, int): + raise TypeError("VARCHAR parameter should be an int, got %s" + % type(size)) + if size > 0: + return 'VARCHAR(%d)' % size + return 'VARCHAR' + +FIELDS_TO_PGTYPES = { + fields.boolean: 'bool', + fields.integer: 'int4', + fields.text: 'text', + fields.html: 'text', + fields.date: 'date', + fields.datetime: 'timestamp', + fields.binary: 'bytea', + fields.many2one: 'int4', + fields.serialized: 'text', +} + +def get_pg_type(f, type_override=None): + """ + :param fields._column f: field to get a Postgres type for + :param type type_override: use the provided type for dispatching instead of the field's own type + :returns: (postgres_identification_type, postgres_type_specification) + :rtype: (str, str) + """ + field_type = type_override or type(f) + + if field_type in FIELDS_TO_PGTYPES: + pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type]) + elif issubclass(field_type, fields.float): + if f.digits: + pg_type = ('numeric', 'NUMERIC') + else: + pg_type = ('float8', 'DOUBLE PRECISION') + elif issubclass(field_type, (fields.char, fields.reference)): + pg_type = ('varchar', pg_varchar(f.size)) + elif issubclass(field_type, fields.selection): + if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\ + or getattr(f, 'size', None) == -1: + pg_type = ('int4', 'INTEGER') + else: + pg_type = ('varchar', pg_varchar(getattr(f, 'size', None))) + elif issubclass(field_type, fields.function): + if f._type == 'selection': + pg_type = ('varchar', pg_varchar()) + else: + pg_type = get_pg_type(f, getattr(fields, f._type)) + else: + _logger.warning('%s type not supported!', field_type) + pg_type = None + + return pg_type + + +class MetaModel(api.Meta): + """ Metaclass for the models. + + This class is used as the metaclass for the class :class:`BaseModel` to + discover the models defined in a module (without instanciating them). + If the automatic discovery is not needed, it is possible to set the model's + ``_register`` attribute to False. + + """ + + module_to_models = {} + + def __init__(self, name, bases, attrs): + if not self._register: + self._register = True + super(MetaModel, self).__init__(name, bases, attrs) + return + + if not hasattr(self, '_module'): + # The (OpenERP) module name can be in the `openerp.addons` namespace + # or not. For instance, module `sale` can be imported as + # `openerp.addons.sale` (the right way) or `sale` (for backward + # compatibility). + module_parts = self.__module__.split('.') + if len(module_parts) > 2 and module_parts[:2] == ['openerp', 'addons']: + module_name = self.__module__.split('.')[2] + else: + module_name = self.__module__.split('.')[0] + self._module = module_name + + # Remember which models to instanciate for this module. + if not self._custom: + self.module_to_models.setdefault(self._module, []).append(self) + + +class NewId(object): + """ Pseudo-ids for new records. """ + def __nonzero__(self): + return False + +IdType = (int, long, basestring, NewId) + + +# special columns automatically created by the ORM +LOG_ACCESS_COLUMNS = ['create_uid', 'create_date', 'write_uid', 'write_date'] +MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS + +class BaseModel(object): + """ Base class for OpenERP models. + + OpenERP models are created by inheriting from this class' subclasses: + + * :class:`Model` for regular database-persisted models + + * :class:`TransientModel` for temporary data, stored in the database but + automatically vaccuumed every so often + + * :class:`AbstractModel` for abstract super classes meant to be shared by + multiple inheriting model + + The system automatically instantiates every model once per database. Those + instances represent the available models on each database, and depend on + which modules are installed on that database. The actual class of each + instance is built from the Python classes that create and inherit from the + corresponding model. + + Every model instance is a "recordset", i.e., an ordered collection of + records of the model. Recordsets are returned by methods like + :meth:`~.browse`, :meth:`~.search`, or field accesses. Records have no + explicit representation: a record is represented as a recordset of one + record. + + To create a class that should not be instantiated, the _register class + attribute may be set to False. + """ + __metaclass__ = MetaModel + _auto = True # create database backend + _register = False # Set to false if the model shouldn't be automatically discovered. + _name = None + _columns = {} + _constraints = [] + _custom = False + _defaults = {} + _rec_name = None + _parent_name = 'parent_id' + _parent_store = False + _parent_order = False + _date_name = 'date' + _order = 'id' + _sequence = None + _description = None + _needaction = False + + # dict of {field:method}, with method returning the (name_get of records, {id: fold}) + # to include in the _read_group, if grouped on this field + _group_by_full = {} + + # Transience + _transient = False # True in a TransientModel + + # structure: + # { 'parent_model': 'm2o_field', ... } + _inherits = {} + + # Mapping from inherits'd field name to triple (m, r, f, n) where m is the + # model from which it is inherits'd, r is the (local) field towards m, f + # is the _column object itself, and n is the original (i.e. top-most) + # parent model. + # Example: + # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent', + # field_column_obj, origina_parent_model), ... } + _inherit_fields = {} + + # Mapping field name/column_info object + # This is similar to _inherit_fields but: + # 1. includes self fields, + # 2. uses column_info instead of a triple. + _all_columns = {} + + _table = None + _log_create = False + _sql_constraints = [] + + CONCURRENCY_CHECK_FIELD = '__last_update' + + def log(self, cr, uid, id, message, secondary=False, context=None): + return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.") + + def view_init(self, cr, uid, fields_list, context=None): + """Override this method to do specific things when a view on the object is opened.""" + pass + + def _field_create(self, cr, context=None): + """ Create entries in ir_model_fields for all the model's fields. + + If necessary, also create an entry in ir_model, and if called from the + modules loading scheme (by receiving 'module' in the context), also + create entries in ir_model_data (for the model and the fields). + + - create an entry in ir_model (if there is not already one), + - create an entry in ir_model_data (if there is not already one, and if + 'module' is in the context), + - update ir_model_fields with the fields found in _columns + (TODO there is some redundancy as _columns is updated from + ir_model_fields in __init__). + + """ + if context is None: + context = {} + cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,)) + if not cr.rowcount: + cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',)) + model_id = cr.fetchone()[0] + cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base')) + else: + model_id = cr.fetchone()[0] + if 'module' in context: + name_id = 'model_'+self._name.replace('.', '_') + cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module'])) + if not cr.rowcount: + cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \ + (name_id, context['module'], 'ir.model', model_id) + ) + + cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,)) + cols = {} + for rec in cr.dictfetchall(): + cols[rec['name']] = rec + + ir_model_fields_obj = self.pool.get('ir.model.fields') + + # sparse field should be created at the end, as it depends on its serialized field already existing + model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0) + for (k, f) in model_fields: + vals = { + 'model_id': model_id, + 'model': self._name, + 'name': k, + 'field_description': f.string, + 'ttype': f._type, + 'relation': f._obj or '', + 'select_level': tools.ustr(int(f.select)), + 'readonly': (f.readonly and 1) or 0, + 'required': (f.required and 1) or 0, + 'selectable': (f.selectable and 1) or 0, + 'translate': (f.translate and 1) or 0, + 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '', + 'serialization_field_id': None, + } + if getattr(f, 'serialization_field', None): + # resolve link to serialization_field if specified by name + serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)]) + if not serialization_field_id: + raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k)) + vals['serialization_field_id'] = serialization_field_id[0] + + # When its a custom field,it does not contain f.select + if context.get('field_state', 'base') == 'manual': + if context.get('field_name', '') == k: + vals['select_level'] = context.get('select', '0') + #setting value to let the problem NOT occur next time + elif k in cols: + vals['select_level'] = cols[k]['select_level'] + + if k not in cols: + cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',)) + id = cr.fetchone()[0] + vals['id'] = id + cr.execute("""INSERT INTO ir_model_fields ( + id, model_id, model, name, field_description, ttype, + relation,state,select_level,relation_field, translate, serialization_field_id + ) VALUES ( + %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s + )""", ( + id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'], + vals['relation'], 'base', + vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'] + )) + if 'module' in context: + name1 = 'field_' + self._table + '_' + k + cr.execute("select name from ir_model_data where name=%s", (name1,)) + if cr.fetchone(): + name1 = name1 + "_" + str(id) + cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \ + (name1, context['module'], 'ir.model.fields', id) + ) + else: + for key, val in vals.items(): + if cols[k][key] != vals[key]: + cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name'])) + cr.execute("""UPDATE ir_model_fields SET + model_id=%s, field_description=%s, ttype=%s, relation=%s, + select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s + WHERE + model=%s AND name=%s""", ( + vals['model_id'], vals['field_description'], vals['ttype'], + vals['relation'], + vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name'] + )) + break + self.invalidate_cache(cr, SUPERUSER_ID) + + @classmethod + def _add_field(cls, name, field): + """ Add the given `field` under the given `name` in the class """ + field.set_class_name(cls, name) + + # add field in _fields (for reflection) + cls._fields[name] = field + + # add field as an attribute, unless another kind of value already exists + if isinstance(getattr(cls, name, field), Field): + setattr(cls, name, field) + else: + _logger.warning("In model %r, member %r is not a field", cls._name, name) + + if field.store: + cls._columns[name] = field.to_column() + else: + # remove potential column that may be overridden by field + cls._columns.pop(name, None) + + @classmethod + def _add_magic_fields(cls): + """ Introduce magic fields on the current class + + * id is a "normal" field (with a specific getter) + * create_uid, create_date, write_uid and write_date have become + "normal" fields + * $CONCURRENCY_CHECK_FIELD is a computed field with its computing + method defined dynamically. Uses ``str(datetime.datetime.utcnow())`` + to get the same structure as the previous + ``(now() at time zone 'UTC')::timestamp``:: + + # select (now() at time zone 'UTC')::timestamp; + timezone + ---------------------------- + 2013-06-18 08:30:37.292809 + + >>> str(datetime.datetime.utcnow()) + '2013-06-18 08:31:32.821177' + """ + def add(name, field): + """ add `field` with the given `name` if it does not exist yet """ + if name not in cls._columns and name not in cls._fields: + cls._add_field(name, field) + + # cyclic import + from . import fields + + # this field 'id' must override any other column or field + cls._add_field('id', fields.Id(automatic=True)) + + add('display_name', fields.Char(string='Name', + compute='_compute_display_name', inverse='_inverse_display_name', + search='_search_display_name', automatic=True)) + + if cls._log_access: + add('create_uid', fields.Many2one('res.users', string='Created by', automatic=True)) + add('create_date', fields.Datetime(string='Created on', automatic=True)) + add('write_uid', fields.Many2one('res.users', string='Last Updated by', automatic=True)) + add('write_date', fields.Datetime(string='Last Updated on', automatic=True)) + last_modified_name = 'compute_concurrency_field_with_access' + else: + last_modified_name = 'compute_concurrency_field' + + # this field must override any other column or field + cls._add_field(cls.CONCURRENCY_CHECK_FIELD, fields.Datetime( + string='Last Modified on', compute=last_modified_name, automatic=True)) + + @api.one + def compute_concurrency_field(self): + self[self.CONCURRENCY_CHECK_FIELD] = \ + datetime.datetime.utcnow().strftime(DEFAULT_SERVER_DATETIME_FORMAT) + + @api.one + @api.depends('create_date', 'write_date') + def compute_concurrency_field_with_access(self): + self[self.CONCURRENCY_CHECK_FIELD] = \ + self.write_date or self.create_date or \ + datetime.datetime.utcnow().strftime(DEFAULT_SERVER_DATETIME_FORMAT) + + # + # Goal: try to apply inheritance at the instanciation level and + # put objects in the pool var + # + @classmethod + def _build_model(cls, pool, cr): + """ Instanciate a given model. + + This class method instanciates the class of some model (i.e. a class + deriving from osv or osv_memory). The class might be the class passed + in argument or, if it inherits from another class, a class constructed + by combining the two classes. + + """ + + # IMPORTANT: the registry contains an instance for each model. The class + # of each model carries inferred metadata that is shared among the + # model's instances for this registry, but not among registries. Hence + # we cannot use that "registry class" for combining model classes by + # inheritance, since it confuses the metadata inference process. + + # Keep links to non-inherited constraints in cls; this is useful for + # instance when exporting translations + cls._local_constraints = cls.__dict__.get('_constraints', []) + cls._local_sql_constraints = cls.__dict__.get('_sql_constraints', []) + + # determine inherited models + parents = getattr(cls, '_inherit', []) + parents = [parents] if isinstance(parents, basestring) else (parents or []) + + # determine the model's name + name = cls._name or (len(parents) == 1 and parents[0]) or cls.__name__ + + # determine the module that introduced the model + original_module = pool[name]._original_module if name in parents else cls._module + + # build the class hierarchy for the model + for parent in parents: + if parent not in pool: + raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n' + 'You may need to add a dependency on the parent class\' module.' % (name, parent)) + parent_model = pool[parent] + + # do no use the class of parent_model, since that class contains + # inferred metadata; use its ancestor instead + parent_class = type(parent_model).__base__ + + # don't inherit custom fields + columns = dict((key, val) + for key, val in parent_class._columns.iteritems() + if not val.manual + ) + columns.update(cls._columns) + + defaults = dict(parent_class._defaults) + defaults.update(cls._defaults) + + inherits = dict(parent_class._inherits) + inherits.update(cls._inherits) + + old_constraints = parent_class._constraints + new_constraints = cls._constraints + # filter out from old_constraints the ones overridden by a + # constraint with the same function name in new_constraints + constraints = new_constraints + [oldc + for oldc in old_constraints + if not any(newc[2] == oldc[2] and same_name(newc[0], oldc[0]) + for newc in new_constraints) + ] + + sql_constraints = cls._sql_constraints + \ + parent_class._sql_constraints + + attrs = { + '_name': name, + '_register': False, + '_columns': columns, + '_defaults': defaults, + '_inherits': inherits, + '_constraints': constraints, + '_sql_constraints': sql_constraints, + } + cls = type(name, (cls, parent_class), attrs) + + # introduce the "registry class" of the model; + # duplicate some attributes so that the ORM can modify them + attrs = { + '_name': name, + '_register': False, + '_columns': dict(cls._columns), + '_defaults': dict(cls._defaults), + '_inherits': dict(cls._inherits), + '_constraints': list(cls._constraints), + '_sql_constraints': list(cls._sql_constraints), + '_original_module': original_module, + } + cls = type(cls._name, (cls,), attrs) + + # float fields are registry-dependent (digit attribute); duplicate them + # to avoid issues + for key, col in cls._columns.items(): + if col._type == 'float': + cls._columns[key] = copy.copy(col) + + # link the class to the registry, and update the registry + cls.pool = pool + # Note: we have to insert an instance into the registry now, because it + # can trigger some stuff on other models which expect this new instance + # (like method _inherits_reload_src()) + model = object.__new__(cls) + cls._model = model # backward compatibility + pool.add(name, model) + + # determine description, table, sequence and log_access + if not cls._description: + cls._description = cls._name + if not cls._table: + cls._table = cls._name.replace('.', '_') + if not cls._sequence: + cls._sequence = cls._table + '_id_seq' + if not hasattr(cls, '_log_access'): + # If _log_access is not specified, it is the same value as _auto. + cls._log_access = cls._auto + + # Transience + if cls.is_transient(): + cls._transient_check_count = 0 + cls._transient_max_count = config.get('osv_memory_count_limit') + cls._transient_max_hours = config.get('osv_memory_age_limit') + assert cls._log_access, \ + "TransientModels must have log_access turned on, " \ + "in order to implement their access rights policy" + + # retrieve new-style fields and duplicate them (to avoid clashes with + # inheritance between different models) + cls._fields = {} + for attr, field in getmembers(cls, Field.__instancecheck__): + if not field._origin: + cls._add_field(attr, field.copy()) + + # introduce magic fields + cls._add_magic_fields() + + # register stuff about low-level function fields and custom fields + cls._init_function_fields(pool, cr) + cls._init_manual_fields(pool, cr) + + # process _inherits + cls._inherits_check() + cls._inherits_reload() + + # register constraints and onchange methods + cls._init_constraints_onchanges() + + # check defaults + for k in cls._defaults: + assert k in cls._fields, \ + "Model %s has a default for nonexiting field %s" % (cls._name, k) + + # restart columns + for column in cls._columns.itervalues(): + column.restart() + + # validate rec_name + if cls._rec_name: + assert cls._rec_name in cls._fields, \ + "Invalid rec_name %s for model %s" % (cls._rec_name, cls._name) + elif 'name' in cls._fields: + cls._rec_name = 'name' + + # prepare ormcache, which must be shared by all instances of the model + cls._ormcache = {} + + # complete the initialization of model + model.__init__(pool, cr) + return model + + @classmethod + def _init_function_fields(cls, pool, cr): + # initialize the list of non-stored function fields for this model + pool._pure_function_fields[cls._name] = [] + + # process store of low-level function fields + for fname, column in cls._columns.iteritems(): + if hasattr(column, 'digits_change'): + column.digits_change(cr) + # filter out existing store about this field + pool._store_function[cls._name] = [ + stored + for stored in pool._store_function.get(cls._name, []) + if (stored[0], stored[1]) != (cls._name, fname) + ] + if not isinstance(column, fields.function): + continue + if not column.store: + # register it on the pool for invalidation + pool._pure_function_fields[cls._name].append(fname) + continue + # process store parameter + store = column.store + if store is True: + get_ids = lambda self, cr, uid, ids, c={}: ids + store = {cls._name: (get_ids, None, column.priority, None)} + for model, spec in store.iteritems(): + if len(spec) == 4: + (fnct, fields2, order, length) = spec + elif len(spec) == 3: + (fnct, fields2, order) = spec + length = None + else: + raise except_orm('Error', + ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (fname, cls._name))) + pool._store_function.setdefault(model, []) + t = (cls._name, fname, fnct, tuple(fields2) if fields2 else None, order, length) + if t not in pool._store_function[model]: + pool._store_function[model].append(t) + pool._store_function[model].sort(key=lambda x: x[4]) + + @classmethod + def _init_manual_fields(cls, pool, cr): + # Check whether the query is already done + if pool.fields_by_model is not None: + manual_fields = pool.fields_by_model.get(cls._name, []) + else: + cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (cls._name, 'manual')) + manual_fields = cr.dictfetchall() + + for field in manual_fields: + if field['name'] in cls._columns: + continue + attrs = { + 'string': field['field_description'], + 'required': bool(field['required']), + 'readonly': bool(field['readonly']), + 'domain': eval(field['domain']) if field['domain'] else None, + 'size': field['size'] or None, + 'ondelete': field['on_delete'], + 'translate': (field['translate']), + 'manual': True, + '_prefetch': False, + #'select': int(field['select_level']) + } + if field['serialization_field_id']: + cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],)) + attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']}) + if field['ttype'] in ['many2one', 'one2many', 'many2many']: + attrs.update({'relation': field['relation']}) + cls._columns[field['name']] = fields.sparse(**attrs) + elif field['ttype'] == 'selection': + cls._columns[field['name']] = fields.selection(eval(field['selection']), **attrs) + elif field['ttype'] == 'reference': + cls._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs) + elif field['ttype'] == 'many2one': + cls._columns[field['name']] = fields.many2one(field['relation'], **attrs) + elif field['ttype'] == 'one2many': + cls._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs) + elif field['ttype'] == 'many2many': + _rel1 = field['relation'].replace('.', '_') + _rel2 = field['model'].replace('.', '_') + _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name']) + cls._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs) + else: + cls._columns[field['name']] = getattr(fields, field['ttype'])(**attrs) + + @classmethod + def _init_constraints_onchanges(cls): + # store sql constraint error messages + for (key, _, msg) in cls._sql_constraints: + cls.pool._sql_error[cls._table + '_' + key] = msg + + # collect constraint and onchange methods + cls._constraint_methods = [] + cls._onchange_methods = defaultdict(list) + for attr, func in getmembers(cls, callable): + if hasattr(func, '_constrains'): + if not all(name in cls._fields for name in func._constrains): + _logger.warning("@constrains%r parameters must be field names", func._constrains) + cls._constraint_methods.append(func) + if hasattr(func, '_onchange'): + if not all(name in cls._fields for name in func._onchange): + _logger.warning("@onchange%r parameters must be field names", func._onchange) + for name in func._onchange: + cls._onchange_methods[name].append(func) + + def __new__(cls): + # In the past, this method was registering the model class in the server. + # This job is now done entirely by the metaclass MetaModel. + # + # Do not create an instance here. Model instances are created by method + # _build_model(). + return None + + def __init__(self, pool, cr): + # this method no longer does anything; kept for backward compatibility + pass + + def __export_xml_id(self): + """ Return a valid xml_id for the record `self`. """ + ir_model_data = self.sudo().env['ir.model.data'] + data = ir_model_data.search([('model', '=', self._name), ('res_id', '=', self.id)]) + if data: + if data.module: + return '%s.%s' % (data.module, data.name) + else: + return data.name + else: + postfix = 0 + name = '%s_%s' % (self._table, self.id) + while ir_model_data.search([('module', '=', '__export__'), ('name', '=', name)]): + postfix += 1 + name = '%s_%s_%s' % (self._table, self.id, postfix) + ir_model_data.create({ + 'model': self._name, + 'res_id': self.id, + 'module': '__export__', + 'name': name, + }) + return '__export__.' + name + + @api.multi + def __export_rows(self, fields): + """ Export fields of the records in `self`. + + :param fields: list of lists of fields to traverse + :return: list of lists of corresponding values + """ + lines = [] + for record in self: + # main line of record, initially empty + current = [''] * len(fields) + lines.append(current) + + # list of primary fields followed by secondary field(s) + primary_done = [] + + # process column by column + for i, path in enumerate(fields): + if not path: + continue + + name = path[0] + if name in primary_done: + continue + + if name == '.id': + current[i] = str(record.id) + elif name == 'id': + current[i] = record.__export_xml_id() + else: + field = record._fields[name] + value = record[name] + + # this part could be simpler, but it has to be done this way + # in order to reproduce the former behavior + if not isinstance(value, BaseModel): + current[i] = field.convert_to_export(value, self.env) + else: + primary_done.append(name) + + # This is a special case, its strange behavior is intended! + if field.type == 'many2many' and len(path) > 1 and path[1] == 'id': + xml_ids = [r.__export_xml_id() for r in value] + current[i] = ','.join(xml_ids) or False + continue + + # recursively export the fields that follow name + fields2 = [(p[1:] if p and p[0] == name else []) for p in fields] + lines2 = value.__export_rows(fields2) + if lines2: + # merge first line with record's main line + for j, val in enumerate(lines2[0]): + if val: + current[j] = val + # check value of current field + if not current[i]: + # assign xml_ids, and forget about remaining lines + xml_ids = [item[1] for item in value.name_get()] + current[i] = ','.join(xml_ids) + else: + # append the other lines at the end + lines += lines2[1:] + else: + current[i] = False + + return lines + + @api.multi + def export_data(self, fields_to_export, raw_data=False): + """ Export fields for selected objects + + :param fields_to_export: list of fields + :param raw_data: True to return value in native Python type + :rtype: dictionary with a *datas* matrix + + This method is used when exporting data via client menu + """ + fields_to_export = map(fix_import_export_id_paths, fields_to_export) + if raw_data: + self = self.with_context(export_raw_data=True) + return {'datas': self.__export_rows(fields_to_export)} + + def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None): + """ + .. deprecated:: 7.0 + Use :meth:`~load` instead + + Import given data in given module + + This method is used when importing data via client menu. + + Example of fields to import for a sale.order:: + + .id, (=database_id) + partner_id, (=name_search) + order_line/.id, (=database_id) + order_line/name, + order_line/product_id/id, (=xml id) + order_line/price_unit, + order_line/product_uom_qty, + order_line/product_uom/id (=xml_id) + + This method returns a 4-tuple with the following structure:: + + (return_code, errored_resource, error_message, unused) + + * The first item is a return code, it is ``-1`` in case of + import error, or the last imported row number in case of success + * The second item contains the record data dict that failed to import + in case of error, otherwise it's 0 + * The third item contains an error message string in case of error, + otherwise it's 0 + * The last item is currently unused, with no specific semantics + + :param fields: list of fields to import + :param datas: data to import + :param mode: 'init' or 'update' for record creation + :param current_module: module name + :param noupdate: flag for record creation + :param filename: optional file to store partial import state for recovery + :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused) + :rtype: (int, dict or 0, str or 0, str or 0) + """ + context = dict(context) if context is not None else {} + context['_import_current_module'] = current_module + + fields = map(fix_import_export_id_paths, fields) + ir_model_data_obj = self.pool.get('ir.model.data') + + def log(m): + if m['type'] == 'error': + raise Exception(m['message']) + + if config.get('import_partial') and filename: + with open(config.get('import_partial'), 'rb') as partial_import_file: + data = pickle.load(partial_import_file) + position = data.get(filename, 0) + + position = 0 + try: + for res_id, xml_id, res, info in self._convert_records(cr, uid, + self._extract_records(cr, uid, fields, datas, + context=context, log=log), + context=context, log=log): + ir_model_data_obj._update(cr, uid, self._name, + current_module, res, mode=mode, xml_id=xml_id, + noupdate=noupdate, res_id=res_id, context=context) + position = info.get('rows', {}).get('to', 0) + 1 + if config.get('import_partial') and filename and (not (position%100)): + with open(config.get('import_partial'), 'rb') as partial_import: + data = pickle.load(partial_import) + data[filename] = position + with open(config.get('import_partial'), 'wb') as partial_import: + pickle.dump(data, partial_import) + if context.get('defer_parent_store_computation'): + self._parent_store_compute(cr) + cr.commit() + except Exception, e: + cr.rollback() + return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), '' + + if context.get('defer_parent_store_computation'): + self._parent_store_compute(cr) + return position, 0, 0, 0 + + def load(self, cr, uid, fields, data, context=None): + """ + Attempts to load the data matrix, and returns a list of ids (or + ``False`` if there was an error and no id could be generated) and a + list of messages. + + The ids are those of the records created and saved (in database), in + the same order they were extracted from the file. They can be passed + directly to :meth:`~read` + + :param fields: list of fields to import, at the same index as the corresponding data + :type fields: list(str) + :param data: row-major matrix of data to import + :type data: list(list(str)) + :param dict context: + :returns: {ids: list(int)|False, messages: [Message]} + """ + cr.execute('SAVEPOINT model_load') + messages = [] + + fields = map(fix_import_export_id_paths, fields) + ModelData = self.pool['ir.model.data'].clear_caches() + + fg = self.fields_get(cr, uid, context=context) + + mode = 'init' + current_module = '' + noupdate = False + + ids = [] + for id, xid, record, info in self._convert_records(cr, uid, + self._extract_records(cr, uid, fields, data, + context=context, log=messages.append), + context=context, log=messages.append): + try: + cr.execute('SAVEPOINT model_load_save') + except psycopg2.InternalError, e: + # broken transaction, exit and hope the source error was + # already logged + if not any(message['type'] == 'error' for message in messages): + messages.append(dict(info, type='error',message= + u"Unknown database error: '%s'" % e)) + break + try: + ids.append(ModelData._update(cr, uid, self._name, + current_module, record, mode=mode, xml_id=xid, + noupdate=noupdate, res_id=id, context=context)) + cr.execute('RELEASE SAVEPOINT model_load_save') + except psycopg2.Warning, e: + messages.append(dict(info, type='warning', message=str(e))) + cr.execute('ROLLBACK TO SAVEPOINT model_load_save') + except psycopg2.Error, e: + messages.append(dict( + info, type='error', + **PGERROR_TO_OE[e.pgcode](self, fg, info, e))) + # Failed to write, log to messages, rollback savepoint (to + # avoid broken transaction) and keep going + cr.execute('ROLLBACK TO SAVEPOINT model_load_save') + if any(message['type'] == 'error' for message in messages): + cr.execute('ROLLBACK TO SAVEPOINT model_load') + ids = False + return {'ids': ids, 'messages': messages} + + def _extract_records(self, cr, uid, fields_, data, + context=None, log=lambda a: None): + """ Generates record dicts from the data sequence. + + The result is a generator of dicts mapping field names to raw + (unconverted, unvalidated) values. + + For relational fields, if sub-fields were provided the value will be + a list of sub-records + + The following sub-fields may be set on the record (by key): + * None is the name_get for the record (to use with name_create/name_search) + * "id" is the External ID for the record + * ".id" is the Database ID for the record + """ + columns = dict((k, v.column) for k, v in self._all_columns.iteritems()) + # Fake columns to avoid special cases in extractor + columns[None] = fields.char('rec_name') + columns['id'] = fields.char('External ID') + columns['.id'] = fields.integer('Database ID') + + # m2o fields can't be on multiple lines so exclude them from the + # is_relational field rows filter, but special-case it later on to + # be handled with relational fields (as it can have subfields) + is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one') + get_o2m_values = itemgetter_tuple( + [index for index, field in enumerate(fields_) + if columns[field[0]]._type == 'one2many']) + get_nono2m_values = itemgetter_tuple( + [index for index, field in enumerate(fields_) + if columns[field[0]]._type != 'one2many']) + # Checks if the provided row has any non-empty non-relational field + def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values): + return any(g(row)) and not any(f(row)) + + index = 0 + while True: + if index >= len(data): return + + row = data[index] + # copy non-relational fields to record dict + record = dict((field[0], value) + for field, value in itertools.izip(fields_, row) + if not is_relational(field[0])) + + # Get all following rows which have relational values attached to + # the current record (no non-relational values) + record_span = itertools.takewhile( + only_o2m_values, itertools.islice(data, index + 1, None)) + # stitch record row back on for relational fields + record_span = list(itertools.chain([row], record_span)) + for relfield in set( + field[0] for field in fields_ + if is_relational(field[0])): + column = columns[relfield] + # FIXME: how to not use _obj without relying on fields_get? + Model = self.pool[column._obj] + + # get only cells for this sub-field, should be strictly + # non-empty, field path [None] is for name_get column + indices, subfields = zip(*((index, field[1:] or [None]) + for index, field in enumerate(fields_) + if field[0] == relfield)) + + # return all rows which have at least one value for the + # subfields of relfield + relfield_data = filter(any, map(itemgetter_tuple(indices), record_span)) + record[relfield] = [subrecord + for subrecord, _subinfo in Model._extract_records( + cr, uid, subfields, relfield_data, + context=context, log=log)] + + yield record, {'rows': { + 'from': index, + 'to': index + len(record_span) - 1 + }} + index += len(record_span) + + def _convert_records(self, cr, uid, records, + context=None, log=lambda a: None): + """ Converts records from the source iterable (recursive dicts of + strings) into forms which can be written to the database (via + self.create or (ir.model.data)._update) + + :returns: a list of triplets of (id, xid, record) + :rtype: list((int|None, str|None, dict)) + """ + if context is None: context = {} + Converter = self.pool['ir.fields.converter'] + columns = dict((k, v.column) for k, v in self._all_columns.iteritems()) + Translation = self.pool['ir.translation'] + field_names = dict( + (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field', + context.get('lang')) + or column.string)) + for f, column in columns.iteritems()) + + convert = Converter.for_model(cr, uid, self, context=context) + + def _log(base, field, exception): + type = 'warning' if isinstance(exception, Warning) else 'error' + # logs the logical (not human-readable) field name for automated + # processing of response, but injects human readable in message + record = dict(base, type=type, field=field, + message=unicode(exception.args[0]) % base) + if len(exception.args) > 1 and exception.args[1]: + record.update(exception.args[1]) + log(record) + + stream = CountingStream(records) + for record, extras in stream: + dbid = False + xid = False + # name_get/name_create + if None in record: pass + # xid + if 'id' in record: + xid = record['id'] + # dbid + if '.id' in record: + try: + dbid = int(record['.id']) + except ValueError: + # in case of overridden id column + dbid = record['.id'] + if not self.search(cr, uid, [('id', '=', dbid)], context=context): + log(dict(extras, + type='error', + record=stream.index, + field='.id', + message=_(u"Unknown database identifier '%s'") % dbid)) + dbid = False + + converted = convert(record, lambda field, err:\ + _log(dict(extras, record=stream.index, field=field_names[field]), field, err)) + + yield dbid, xid, converted, dict(extras, record=stream.index) + + @api.multi + def _validate_fields(self, field_names): + field_names = set(field_names) + + # old-style constraint methods + trans = self.env['ir.translation'] + cr, uid, context = self.env.args + ids = self.ids + errors = [] + for fun, msg, names in self._constraints: + try: + # validation must be context-independent; call `fun` without context + valid = not (set(names) & field_names) or fun(self._model, cr, uid, ids) + extra_error = None + except Exception, e: + _logger.debug('Exception while validating constraint', exc_info=True) + valid = False + extra_error = tools.ustr(e) + if not valid: + if callable(msg): + res_msg = msg(self._model, cr, uid, ids, context=context) + if isinstance(res_msg, tuple): + template, params = res_msg + res_msg = template % params + else: + res_msg = trans._get_source(self._name, 'constraint', self.env.lang, msg) + if extra_error: + res_msg += "\n\n%s\n%s" % (_('Error details:'), extra_error) + errors.append( + _("Field(s) `%s` failed against a constraint: %s") % + (', '.join(names), res_msg) + ) + if errors: + raise except_orm('ValidateError', '\n'.join(errors)) + + # new-style constraint methods + for check in self._constraint_methods: + if set(check._constrains) & field_names: + check(self) + + def default_get(self, cr, uid, fields_list, context=None): + """ Return default values for the fields in `fields_list`. Default + values are determined by the context, user defaults, and the model + itself. + + :param fields_list: a list of field names + :return: a dictionary mapping each field name to its corresponding + default value; the keys of the dictionary are the fields in + `fields_list` that have a default value different from ``False``. + + This method should not be overridden. In order to change the + mechanism for determining default values, you should override method + :meth:`add_default_value` instead. + """ + # trigger view init hook + self.view_init(cr, uid, fields_list, context) + + # use a new record to determine default values + record = self.new(cr, uid, {}, context=context) + for name in fields_list: + if name in self._fields: + record[name] # force evaluation of defaults + + # retrieve defaults from record's cache + return self._convert_to_write(record._cache) + + def add_default_value(self, field): + """ Set the default value of `field` to the new record `self`. + The value must be assigned to `self`. + """ + assert not self.id, "Expected new record: %s" % self + cr, uid, context = self.env.args + name = field.name + + # 1. look up context + key = 'default_' + name + if key in context: + self[name] = context[key] + return + + # 2. look up ir_values + # Note: performance is good, because get_defaults_dict is cached! + ir_values_dict = self.env['ir.values'].get_defaults_dict(self._name) + if name in ir_values_dict: + self[name] = ir_values_dict[name] + return + + # 3. look up property fields + # TODO: get rid of this one + column = self._columns.get(name) + if isinstance(column, fields.property): + self[name] = self.env['ir.property'].get(name, self._name) + return + + # 4. look up _defaults + if name in self._defaults: + value = self._defaults[name] + if callable(value): + value = value(self._model, cr, uid, context) + self[name] = value + return + + # 5. delegate to field + field.determine_default(self) + + def fields_get_keys(self, cr, user, context=None): + res = self._columns.keys() + # TODO I believe this loop can be replace by + # res.extend(self._inherit_fields.key()) + for parent in self._inherits: + res.extend(self.pool[parent].fields_get_keys(cr, user, context)) + return res + + def _rec_name_fallback(self, cr, uid, context=None): + rec_name = self._rec_name + if rec_name not in self._columns: + rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id" + return rec_name + + # + # Overload this method if you need a window title which depends on the context + # + def view_header_get(self, cr, user, view_id=None, view_type='form', context=None): + return False + + def user_has_groups(self, cr, uid, groups, context=None): + """Return true if the user is at least member of one of the groups + in groups_str. Typically used to resolve `groups` attribute + in view and model definitions. + + :param str groups: comma-separated list of fully-qualified group + external IDs, e.g.: ``base.group_user,base.group_system`` + :return: True if the current user is a member of one of the + given groups + """ + return any(self.pool['res.users'].has_group(cr, uid, group_ext_id) + for group_ext_id in groups.split(',')) + + def _get_default_form_view(self, cr, user, context=None): + """ Generates a default single-line form view using all fields + of the current model except the m2m and o2m ones. + + :param cr: database cursor + :param int user: user id + :param dict context: connection context + :returns: a form view as an lxml document + :rtype: etree._Element + """ + view = etree.Element('form', string=self._description) + group = etree.SubElement(view, 'group', col="4") + for fname, field in self._fields.iteritems(): + if field.automatic or field.type in ('one2many', 'many2many'): + continue + + etree.SubElement(group, 'field', name=fname) + if field.type == 'text': + etree.SubElement(group, 'newline') + return view + + def _get_default_search_view(self, cr, user, context=None): + """ Generates a single-field search view, based on _rec_name. + + :param cr: database cursor + :param int user: user id + :param dict context: connection context + :returns: a tree view as an lxml document + :rtype: etree._Element + """ + view = etree.Element('search', string=self._description) + etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context)) + return view + + def _get_default_tree_view(self, cr, user, context=None): + """ Generates a single-field tree view, based on _rec_name. + + :param cr: database cursor + :param int user: user id + :param dict context: connection context + :returns: a tree view as an lxml document + :rtype: etree._Element + """ + view = etree.Element('tree', string=self._description) + etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context)) + return view + + def _get_default_calendar_view(self, cr, user, context=None): + """ Generates a default calendar view by trying to infer + calendar fields from a number of pre-set attribute names + + :param cr: database cursor + :param int user: user id + :param dict context: connection context + :returns: a calendar view + :rtype: etree._Element + """ + def set_first_of(seq, in_, to): + """Sets the first value of `seq` also found in `in_` to + the `to` attribute of the view being closed over. + + Returns whether it's found a suitable value (and set it on + the attribute) or not + """ + for item in seq: + if item in in_: + view.set(to, item) + return True + return False + + view = etree.Element('calendar', string=self._description) + etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context)) + + if self._date_name not in self._columns: + date_found = False + for dt in ['date', 'date_start', 'x_date', 'x_date_start']: + if dt in self._columns: + self._date_name = dt + date_found = True + break + + if not date_found: + raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!")) + view.set('date_start', self._date_name) + + set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"], + self._columns, 'color') + + if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"], + self._columns, 'date_stop'): + if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"], + self._columns, 'date_delay'): + raise except_orm( + _('Invalid Object Architecture!'), + _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name)) + + return view + + def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): + """ + Get the detailed composition of the requested view like fields, model, view architecture + + :param view_id: id of the view or None + :param view_type: type of the view to return if view_id is None ('form', tree', ...) + :param toolbar: true to include contextual actions + :param submenu: deprecated + :return: dictionary describing the composition of the requested view (including inherited views and extensions) + :raise AttributeError: + * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace' + * if some tag other than 'position' is found in parent view + :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure + """ + if context is None: + context = {} + View = self.pool['ir.ui.view'] + + result = { + 'model': self._name, + 'field_parent': False, + } + + # try to find a view_id if none provided + if not view_id: + # <view_type>_view_ref in context can be used to overrride the default view + view_ref_key = view_type + '_view_ref' + view_ref = context.get(view_ref_key) + if view_ref: + if '.' in view_ref: + module, view_ref = view_ref.split('.', 1) + cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref)) + view_ref_res = cr.fetchone() + if view_ref_res: + view_id = view_ref_res[0] + else: + _logger.warning('%r requires a fully-qualified external id (got: %r for model %s). ' + 'Please use the complete `module.view_id` form instead.', view_ref_key, view_ref, + self._name) + + if not view_id: + # otherwise try to find the lowest priority matching ir.ui.view + view_id = View.default_view(cr, uid, self._name, view_type, context=context) + + # context for post-processing might be overriden + ctx = context + if view_id: + # read the view with inherited views applied + root_view = View.read_combined(cr, uid, view_id, fields=['id', 'name', 'field_parent', 'type', 'model', 'arch'], context=context) + result['arch'] = root_view['arch'] + result['name'] = root_view['name'] + result['type'] = root_view['type'] + result['view_id'] = root_view['id'] + result['field_parent'] = root_view['field_parent'] + # override context fro postprocessing + if root_view.get('model') != self._name: + ctx = dict(context, base_model_name=root_view.get('model')) + else: + # fallback on default views methods if no ir.ui.view could be found + try: + get_func = getattr(self, '_get_default_%s_view' % view_type) + arch_etree = get_func(cr, uid, context) + result['arch'] = etree.tostring(arch_etree, encoding='utf-8') + result['type'] = view_type + result['name'] = 'default' + except AttributeError: + raise except_orm(_('Invalid Architecture!'), _("No default view of type '%s' could be found !") % view_type) + + # Apply post processing, groups and modifiers etc... + xarch, xfields = View.postprocess_and_fields(cr, uid, self._name, etree.fromstring(result['arch']), view_id, context=ctx) + result['arch'] = xarch + result['fields'] = xfields + + # Add related action information if aksed + if toolbar: + toclean = ('report_sxw_content', 'report_rml_content', 'report_sxw', 'report_rml', 'report_sxw_content_data', 'report_rml_content_data') + def clean(x): + x = x[2] + for key in toclean: + x.pop(key, None) + return x + ir_values_obj = self.pool.get('ir.values') + resprint = ir_values_obj.get(cr, uid, 'action', 'client_print_multi', [(self._name, False)], False, context) + resaction = ir_values_obj.get(cr, uid, 'action', 'client_action_multi', [(self._name, False)], False, context) + resrelate = ir_values_obj.get(cr, uid, 'action', 'client_action_relate', [(self._name, False)], False, context) + resaction = [clean(action) for action in resaction if view_type == 'tree' or not action[2].get('multi')] + resprint = [clean(print_) for print_ in resprint if view_type == 'tree' or not print_[2].get('multi')] + #When multi="True" set it will display only in More of the list view + resrelate = [clean(action) for action in resrelate + if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')] + + for x in itertools.chain(resprint, resaction, resrelate): + x['string'] = x['name'] + + result['toolbar'] = { + 'print': resprint, + 'action': resaction, + 'relate': resrelate + } + return result + + def get_formview_id(self, cr, uid, id, context=None): + """ Return an view id to open the document with. This method is meant to be + overridden in addons that want to give specific view ids for example. + + :param int id: id of the document to open + """ + return False + + def get_formview_action(self, cr, uid, id, context=None): + """ Return an action to open the document. This method is meant to be + overridden in addons that want to give specific view ids for example. + + :param int id: id of the document to open + """ + view_id = self.get_formview_id(cr, uid, id, context=context) + return { + 'type': 'ir.actions.act_window', + 'res_model': self._name, + 'view_type': 'form', + 'view_mode': 'form', + 'views': [(view_id, 'form')], + 'target': 'current', + 'res_id': id, + } + + def _view_look_dom_arch(self, cr, uid, node, view_id, context=None): + return self.pool['ir.ui.view'].postprocess_and_fields( + cr, uid, self._name, node, view_id, context=context) + + def search_count(self, cr, user, args, context=None): + res = self.search(cr, user, args, context=context, count=True) + if isinstance(res, list): + return len(res) + return res + + @api.returns('self') + def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): + """ + Search for records based on a search domain. + + :param cr: database cursor + :param user: current user id + :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records. + :param offset: optional number of results to skip in the returned values (default: 0) + :param limit: optional max number of records to return (default: **None**) + :param order: optional columns to sort by (default: self._order=id ) + :param context: optional context arguments, like lang, time zone + :type context: dictionary + :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids + :return: id or list of ids of records matching the criteria + :rtype: integer or list of integers + :raise AccessError: * if user tries to bypass access rules for read on the requested object. + + **Expressing a search domain (args)** + + Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where: + + * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values. + * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right`` + The semantics of most of these operators are obvious. + The ``child_of`` operator will look for records who are children or grand-children of a given record, + according to the semantics of this model (i.e following the relationship field named by + ``self._parent_name``, by default ``parent_id``. + * **value** must be a valid value to compare with the values of **field_name**, depending on its type. + + Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT). + These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1. + Be very careful about this when you combine them the first time. + + Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english :: + + [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de')) + + The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is:: + + (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany)) + + """ + return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count) + + # + # display_name, name_get, name_create, name_search + # + + @api.depends(lambda self: (self._rec_name,) if self._rec_name else ()) + def _compute_display_name(self): + name = self._rec_name + if name in self._fields: + convert = self._fields[name].convert_to_display_name + for record in self: + record.display_name = convert(record[name]) + else: + for record in self: + record.display_name = "%s,%s" % (self._name, self.id) + + def _inverse_display_name(self): + name = self._rec_name + if name in self._fields and not self._fields[name].relational: + for record in self: + record[name] = record.display_name + else: + _logger.warning("Cannot inverse field display_name on %s", self._name) + + def _search_display_name(self, operator, value): + name = self._rec_name + if name in self._fields: + return [(name, operator, value)] + else: + _logger.warning("Cannot search field display_name on %s", self._name) + return [(0, '=', 1)] + + @api.multi + def name_get(self): + """ Return a textual representation for the records in `self`. + By default this is the value of field ``display_name``. + + :rtype: list(tuple) + :return: list of pairs ``(id, text_repr)`` for all records + """ + result = [] + for record in self: + try: + result.append((record.id, record.display_name)) + except MissingError: + pass + return result + + @api.model + def name_create(self, name): + """ Create a new record by calling :meth:`~.create` with only one value + provided: the display name of the new record. + + The new record will be initialized with any default values + applicable to this model, or provided through the context. The usual + behavior of :meth:`~.create` applies. + + :param name: display name of the record to create + :rtype: tuple + :return: the :meth:`~.name_get` pair value of the created record + """ + # Shortcut the inverse function of 'display_name' with self._rec_name. + # This is useful when self._rec_name is a required field: in that case, + # create() creates a record without the field, and inverse display_name + # afterwards. + field_name = self._rec_name if self._rec_name else 'display_name' + record = self.create({field_name: name}) + return (record.id, record.display_name) + + @api.model + def name_search(self, name='', args=None, operator='ilike', limit=100): + """ Search for records that have a display name matching the given + `name` pattern when compared with the given `operator`, while also + matching the optional search domain (`args`). + + This is used for example to provide suggestions based on a partial + value for a relational field. Sometimes be seen as the inverse + function of :meth:`~.name_get`, but it is not guaranteed to be. + + This method is equivalent to calling :meth:`~.search` with a search + domain based on `display_name` and then :meth:`~.name_get` on the + result of the search. + + :param name: the name pattern to match + :param list args: optional search domain (see :meth:`~.search` for + syntax), specifying further restrictions + :param str operator: domain operator for matching `name`, such as + ``'like'`` or ``'='``. + :param int limit: optional max number of records to return + :rtype: list + :return: list of pairs ``(id, text_repr)`` for all matching records. + """ + args = list(args or []) + if not (name == '' and operator == 'ilike'): + args += [('display_name', operator, name)] + return self.search(args, limit=limit).name_get() + + def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None): + # private implementation of name_search, allows passing a dedicated user + # for the name_get part to solve some access rights issues + args = list(args or []) + # optimize out the default criterion of ``ilike ''`` that matches everything + if not (name == '' and operator == 'ilike'): + args += [('display_name', operator, name)] + access_rights_uid = name_get_uid or user + ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid) + res = self.name_get(cr, access_rights_uid, ids, context) + return res + + def read_string(self, cr, uid, id, langs, fields=None, context=None): + res = {} + res2 = {} + self.pool.get('ir.translation').check_access_rights(cr, uid, 'read') + if not fields: + fields = self._columns.keys() + self._inherit_fields.keys() + #FIXME: collect all calls to _get_source into one SQL call. + for lang in langs: + res[lang] = {'code': lang} + for f in fields: + if f in self._columns: + res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang) + if res_trans: + res[lang][f] = res_trans + else: + res[lang][f] = self._columns[f].string + for table in self._inherits: + cols = intersect(self._inherit_fields.keys(), fields) + res2 = self.pool[table].read_string(cr, uid, id, langs, cols, context) + for lang in res2: + if lang in res: + res[lang]['code'] = lang + for f in res2[lang]: + res[lang][f] = res2[lang][f] + return res + + def write_string(self, cr, uid, id, langs, vals, context=None): + self.pool.get('ir.translation').check_access_rights(cr, uid, 'write') + #FIXME: try to only call the translation in one SQL + for lang in langs: + for field in vals: + if field in self._columns: + src = self._columns[field].string + self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src) + for table in self._inherits: + cols = intersect(self._inherit_fields.keys(), vals) + if cols: + self.pool[table].write_string(cr, uid, id, langs, vals, context) + return True + + def _add_missing_default_values(self, cr, uid, values, context=None): + # avoid overriding inherited values when parent is set + avoid_tables = [] + for tables, parent_field in self._inherits.items(): + if parent_field in values: + avoid_tables.append(tables) + + # compute missing fields + missing_defaults = set() + for field in self._columns.keys(): + if not field in values: + missing_defaults.add(field) + for field in self._inherit_fields.keys(): + if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables): + missing_defaults.add(field) + # discard magic fields + missing_defaults -= set(MAGIC_COLUMNS) + + if missing_defaults: + # override defaults with the provided values, never allow the other way around + defaults = self.default_get(cr, uid, list(missing_defaults), context) + for dv in defaults: + if ((dv in self._columns and self._columns[dv]._type == 'many2many') \ + or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \ + and defaults[dv] and isinstance(defaults[dv][0], (int, long)): + defaults[dv] = [(6, 0, defaults[dv])] + if (dv in self._columns and self._columns[dv]._type == 'one2many' \ + or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \ + and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict): + defaults[dv] = [(0, 0, x) for x in defaults[dv]] + defaults.update(values) + values = defaults + return values + + def clear_caches(self): + """ Clear the caches + + This clears the caches associated to methods decorated with + ``tools.ormcache`` or ``tools.ormcache_multi``. + """ + try: + self._ormcache.clear() + self.pool._any_cache_cleared = True + except AttributeError: + pass + + + def _read_group_fill_results(self, cr, uid, domain, groupby, remaining_groupbys, aggregated_fields, + read_group_result, read_group_order=None, context=None): + """Helper method for filling in empty groups for all possible values of + the field being grouped by""" + + # self._group_by_full should map groupable fields to a method that returns + # a list of all aggregated values that we want to display for this field, + # in the form of a m2o-like pair (key,label). + # This is useful to implement kanban views for instance, where all columns + # should be displayed even if they don't contain any record. + + # Grab the list of all groups that should be displayed, including all present groups + present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]] + all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain, + read_group_order=read_group_order, + access_rights_uid=openerp.SUPERUSER_ID, + context=context) + + result_template = dict.fromkeys(aggregated_fields, False) + result_template[groupby + '_count'] = 0 + if remaining_groupbys: + result_template['__context'] = {'group_by': remaining_groupbys} + + # Merge the left_side (current results as dicts) with the right_side (all + # possible values as m2o pairs). Both lists are supposed to be using the + # same ordering, and can be merged in one pass. + result = [] + known_values = {} + def append_left(left_side): + grouped_value = left_side[groupby] and left_side[groupby][0] + if not grouped_value in known_values: + result.append(left_side) + known_values[grouped_value] = left_side + else: + count_attr = groupby + '_count' + known_values[grouped_value].update({count_attr: left_side[count_attr]}) + def append_right(right_side): + grouped_value = right_side[0] + if not grouped_value in known_values: + line = dict(result_template) + line[groupby] = right_side + line['__domain'] = [(groupby,'=',grouped_value)] + domain + result.append(line) + known_values[grouped_value] = line + while read_group_result or all_groups: + left_side = read_group_result[0] if read_group_result else None + right_side = all_groups[0] if all_groups else None + assert left_side is None or left_side[groupby] is False \ + or isinstance(left_side[groupby], (tuple,list)), \ + 'M2O-like pair expected, got %r' % left_side[groupby] + assert right_side is None or isinstance(right_side, (tuple,list)), \ + 'M2O-like pair expected, got %r' % right_side + if left_side is None: + append_right(all_groups.pop(0)) + elif right_side is None: + append_left(read_group_result.pop(0)) + elif left_side[groupby] == right_side: + append_left(read_group_result.pop(0)) + all_groups.pop(0) # discard right_side + elif not left_side[groupby] or not left_side[groupby][0]: + # left side == "Undefined" entry, not present on right_side + append_left(read_group_result.pop(0)) + else: + append_right(all_groups.pop(0)) + + if folded: + for r in result: + r['__fold'] = folded.get(r[groupby] and r[groupby][0], False) + return result + + def _read_group_prepare(self, orderby, aggregated_fields, annotated_groupbys, query): + """ + Prepares the GROUP BY and ORDER BY terms for the read_group method. Adds the missing JOIN clause + to the query if order should be computed against m2o field. + :param orderby: the orderby definition in the form "%(field)s %(order)s" + :param aggregated_fields: list of aggregated fields in the query + :param annotated_groupbys: list of dictionaries returned by _read_group_process_groupby + These dictionaries contains the qualified name of each groupby + (fully qualified SQL name for the corresponding field), + and the (non raw) field name. + :param osv.Query query: the query under construction + :return: (groupby_terms, orderby_terms) + """ + orderby_terms = [] + groupby_terms = [gb['qualified_field'] for gb in annotated_groupbys] + groupby_fields = [gb['groupby'] for gb in annotated_groupbys] + if not orderby: + return groupby_terms, orderby_terms + + self._check_qorder(orderby) + for order_part in orderby.split(','): + order_split = order_part.split() + order_field = order_split[0] + if order_field in groupby_fields: + + if self._all_columns[order_field.split(':')[0]].column._type == 'many2one': + order_clause = self._generate_order_by(order_part, query).replace('ORDER BY ', '') + if order_clause: + orderby_terms.append(order_clause) + groupby_terms += [order_term.split()[0] for order_term in order_clause.split(',')] + else: + order = '"%s" %s' % (order_field, '' if len(order_split) == 1 else order_split[1]) + orderby_terms.append(order) + elif order_field in aggregated_fields: + orderby_terms.append(order_part) + else: + # Cannot order by a field that will not appear in the results (needs to be grouped or aggregated) + _logger.warn('%s: read_group order by `%s` ignored, cannot sort on empty columns (not grouped/aggregated)', + self._name, order_part) + return groupby_terms, orderby_terms + + def _read_group_process_groupby(self, gb, query, context): + """ + Helper method to collect important information about groupbys: raw + field name, type, time informations, qualified name, ... + """ + split = gb.split(':') + field_type = self._all_columns[split[0]].column._type + gb_function = split[1] if len(split) == 2 else None + temporal = field_type in ('date', 'datetime') + tz_convert = field_type == 'datetime' and context.get('tz') in pytz.all_timezones + qualified_field = self._inherits_join_calc(split[0], query) + if temporal: + display_formats = { + 'day': 'dd MMM YYYY', + 'week': "'W'w YYYY", + 'month': 'MMMM YYYY', + 'quarter': 'QQQ YYYY', + 'year': 'YYYY' + } + time_intervals = { + 'day': dateutil.relativedelta.relativedelta(days=1), + 'week': datetime.timedelta(days=7), + 'month': dateutil.relativedelta.relativedelta(months=1), + 'quarter': dateutil.relativedelta.relativedelta(months=3), + 'year': dateutil.relativedelta.relativedelta(years=1) + } + if tz_convert: + qualified_field = "timezone('%s', timezone('UTC',%s))" % (context.get('tz', 'UTC'), qualified_field) + qualified_field = "date_trunc('%s', %s)" % (gb_function or 'month', qualified_field) + if field_type == 'boolean': + qualified_field = "coalesce(%s,false)" % qualified_field + return { + 'field': split[0], + 'groupby': gb, + 'type': field_type, + 'display_format': display_formats[gb_function or 'month'] if temporal else None, + 'interval': time_intervals[gb_function or 'month'] if temporal else None, + 'tz_convert': tz_convert, + 'qualified_field': qualified_field + } + + def _read_group_prepare_data(self, key, value, groupby_dict, context): + """ + Helper method to sanitize the data received by read_group. The None + values are converted to False, and the date/datetime are formatted, + and corrected according to the timezones. + """ + value = False if value is None else value + gb = groupby_dict.get(key) + if gb and gb['type'] in ('date', 'datetime') and value: + if isinstance(value, basestring): + dt_format = DEFAULT_SERVER_DATETIME_FORMAT if gb['type'] == 'datetime' else DEFAULT_SERVER_DATE_FORMAT + value = datetime.datetime.strptime(value, dt_format) + if gb['tz_convert']: + value = pytz.timezone(context['tz']).localize(value) + return value + + def _read_group_get_domain(self, groupby, value): + """ + Helper method to construct the domain corresponding to a groupby and + a given value. This is mostly relevant for date/datetime. + """ + if groupby['type'] in ('date', 'datetime') and value: + dt_format = DEFAULT_SERVER_DATETIME_FORMAT if groupby['type'] == 'datetime' else DEFAULT_SERVER_DATE_FORMAT + domain_dt_begin = value + domain_dt_end = value + groupby['interval'] + if groupby['tz_convert']: + domain_dt_begin = domain_dt_begin.astimezone(pytz.utc) + domain_dt_end = domain_dt_end.astimezone(pytz.utc) + return [(groupby['field'], '>=', domain_dt_begin.strftime(dt_format)), + (groupby['field'], '<', domain_dt_end.strftime(dt_format))] + if groupby['type'] == 'many2one' and value: + value = value[0] + return [(groupby['field'], '=', value)] + + def _read_group_format_result(self, data, annotated_groupbys, groupby, groupby_dict, domain, context): + """ + Helper method to format the data contained in the dictianary data by + adding the domain corresponding to its values, the groupbys in the + context and by properly formatting the date/datetime values. + """ + domain_group = [dom for gb in annotated_groupbys for dom in self._read_group_get_domain(gb, data[gb['groupby']])] + for k,v in data.iteritems(): + gb = groupby_dict.get(k) + if gb and gb['type'] in ('date', 'datetime') and v: + data[k] = babel.dates.format_date(v, format=gb['display_format'], locale=context.get('lang', 'en_US')) + + data['__domain'] = domain_group + domain + if len(groupby) - len(annotated_groupbys) >= 1: + data['__context'] = { 'group_by': groupby[len(annotated_groupbys):]} + del data['id'] + return data + + def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): + """ + Get the list of records in list view grouped by the given ``groupby`` fields + + :param cr: database cursor + :param uid: current user id + :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...] + :param list fields: list of fields present in the list view specified on the object + :param list groupby: list of groupby descriptions by which the records will be grouped. + A groupby description is either a field (then it will be grouped by that field) + or a string 'field:groupby_function'. Right now, the only functions supported + are 'day', 'week', 'month', 'quarter' or 'year', and they only make sense for + date/datetime fields. + :param int offset: optional number of records to skip + :param int limit: optional max number of records to return + :param dict context: context arguments, like lang, time zone. + :param list orderby: optional ``order by`` specification, for + overriding the natural sort ordering of the + groups, see also :py:meth:`~osv.osv.osv.search` + (supported only for many2one fields currently) + :param bool lazy: if true, the results are only grouped by the first groupby and the + remaining groupbys are put in the __context key. If false, all the groupbys are + done in one call. + :return: list of dictionaries(one dictionary for each record) containing: + + * the values of fields grouped by the fields in ``groupby`` argument + * __domain: list of tuples specifying the search criteria + * __context: dictionary with argument like ``groupby`` + :rtype: [{'field_name_1': value, ...] + :raise AccessError: * if user has no read rights on the requested object + * if user tries to bypass access rules for read on the requested object + """ + if context is None: + context = {} + self.check_access_rights(cr, uid, 'read') + query = self._where_calc(cr, uid, domain, context=context) + fields = fields or self._columns.keys() + + groupby = [groupby] if isinstance(groupby, basestring) else groupby + groupby_list = groupby[:1] if lazy else groupby + annotated_groupbys = [self._read_group_process_groupby(gb, query, context) + for gb in groupby_list] + groupby_fields = [g['field'] for g in annotated_groupbys] + order = orderby or ','.join([g for g in groupby_list]) + groupby_dict = {gb['groupby']: gb for gb in annotated_groupbys} + + self._apply_ir_rules(cr, uid, query, 'read', context=context) + for gb in groupby_fields: + assert gb in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)" + groupby_def = self._columns.get(gb) or (self._inherit_fields.get(gb) and self._inherit_fields.get(gb)[2]) + assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True" + if not (gb in self._all_columns): + # Don't allow arbitrary values, as this would be a SQL injection vector! + raise except_orm(_('Invalid group_by'), + _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(gb,)) + + aggregated_fields = [ + f for f in fields + if f not in ('id', 'sequence') + if f not in groupby_fields + if self._all_columns[f].column._type in ('integer', 'float') + if getattr(self._all_columns[f].column, '_classic_write')] + + field_formatter = lambda f: (self._all_columns[f].column.group_operator or 'sum', self._inherits_join_calc(f, query), f) + select_terms = ["%s(%s) AS %s" % field_formatter(f) for f in aggregated_fields] + + for gb in annotated_groupbys: + select_terms.append('%s as "%s" ' % (gb['qualified_field'], gb['groupby'])) + + groupby_terms, orderby_terms = self._read_group_prepare(order, aggregated_fields, annotated_groupbys, query) + from_clause, where_clause, where_clause_params = query.get_sql() + if lazy and (len(groupby_fields) >= 2 or not context.get('group_by_no_leaf')): + count_field = groupby_fields[0] if len(groupby_fields) >= 1 else '_' + else: + count_field = '_' + + prefix_terms = lambda prefix, terms: (prefix + " " + ",".join(terms)) if terms else '' + prefix_term = lambda prefix, term: ('%s %s' % (prefix, term)) if term else '' + + query = """ + SELECT min(%(table)s.id) AS id, count(%(table)s.id) AS %(count_field)s_count %(extra_fields)s + FROM %(from)s + %(where)s + %(groupby)s + %(orderby)s + %(limit)s + %(offset)s + """ % { + 'table': self._table, + 'count_field': count_field, + 'extra_fields': prefix_terms(',', select_terms), + 'from': from_clause, + 'where': prefix_term('WHERE', where_clause), + 'groupby': prefix_terms('GROUP BY', groupby_terms), + 'orderby': prefix_terms('ORDER BY', orderby_terms), + 'limit': prefix_term('LIMIT', int(limit) if limit else None), + 'offset': prefix_term('OFFSET', int(offset) if limit else None), + } + cr.execute(query, where_clause_params) + fetched_data = cr.dictfetchall() + + if not groupby_fields: + return fetched_data + + many2onefields = [gb['field'] for gb in annotated_groupbys if gb['type'] == 'many2one'] + if many2onefields: + data_ids = [r['id'] for r in fetched_data] + many2onefields = list(set(many2onefields)) + data_dict = {d['id']: d for d in self.read(cr, uid, data_ids, many2onefields, context=context)} + for d in fetched_data: + d.update(data_dict[d['id']]) + + data = map(lambda r: {k: self._read_group_prepare_data(k,v, groupby_dict, context) for k,v in r.iteritems()}, fetched_data) + result = [self._read_group_format_result(d, annotated_groupbys, groupby, groupby_dict, domain, context) for d in data] + if lazy and groupby_fields[0] in self._group_by_full: + # Right now, read_group only fill results in lazy mode (by default). + # If you need to have the empty groups in 'eager' mode, then the + # method _read_group_fill_results need to be completely reimplemented + # in a sane way + result = self._read_group_fill_results(cr, uid, domain, groupby_fields[0], groupby[len(annotated_groupbys):], + aggregated_fields, result, read_group_order=order, + context=context) + return result + + def _inherits_join_add(self, current_model, parent_model_name, query): + """ + Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates) + :param current_model: current model object + :param parent_model_name: name of the parent model for which the clauses should be added + :param query: query object on which the JOIN should be added + """ + inherits_field = current_model._inherits[parent_model_name] + parent_model = self.pool[parent_model_name] + parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True) + return parent_alias + + def _inherits_join_calc(self, field, query): + """ + Adds missing table select and join clause(s) to ``query`` for reaching + the field coming from an '_inherits' parent table (no duplicates). + + :param field: name of inherited field to reach + :param query: query object on which the JOIN should be added + :return: qualified name of field, to be used in SELECT clause + """ + current_table = self + parent_alias = '"%s"' % current_table._table + while field in current_table._inherit_fields and not field in current_table._columns: + parent_model_name = current_table._inherit_fields[field][0] + parent_table = self.pool[parent_model_name] + parent_alias = self._inherits_join_add(current_table, parent_model_name, query) + current_table = parent_table + return '%s."%s"' % (parent_alias, field) + + def _parent_store_compute(self, cr): + if not self._parent_store: + return + _logger.info('Computing parent left and right for table %s...', self._table) + def browse_rec(root, pos=0): + # TODO: set order + where = self._parent_name+'='+str(root) + if not root: + where = self._parent_name+' IS NULL' + if self._parent_order: + where += ' order by '+self._parent_order + cr.execute('SELECT id FROM '+self._table+' WHERE '+where) + pos2 = pos + 1 + for id in cr.fetchall(): + pos2 = browse_rec(id[0], pos2) + cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root)) + return pos2 + 1 + query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL' + if self._parent_order: + query += ' order by ' + self._parent_order + pos = 0 + cr.execute(query) + for (root,) in cr.fetchall(): + pos = browse_rec(root, pos) + self.invalidate_cache(cr, SUPERUSER_ID, ['parent_left', 'parent_right']) + return True + + def _update_store(self, cr, f, k): + _logger.info("storing computed values of fields.function '%s'", k) + ss = self._columns[k]._symbol_set + update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0]) + cr.execute('select id from '+self._table) + ids_lst = map(lambda x: x[0], cr.fetchall()) + while ids_lst: + iids = ids_lst[:AUTOINIT_RECALCULATE_STORED_FIELDS] + ids_lst = ids_lst[AUTOINIT_RECALCULATE_STORED_FIELDS:] + res = f.get(cr, self, iids, k, SUPERUSER_ID, {}) + for key, val in res.items(): + if f._multi: + val = val[k] + # if val is a many2one, just write the ID + if type(val) == tuple: + val = val[0] + if val is not False: + cr.execute(update_query, (ss[1](val), key)) + + def _check_selection_field_value(self, cr, uid, field, value, context=None): + """Raise except_orm if value is not among the valid values for the selection field""" + if self._columns[field]._type == 'reference': + val_model, val_id_str = value.split(',', 1) + val_id = False + try: + val_id = long(val_id_str) + except ValueError: + pass + if not val_id: + raise except_orm(_('ValidateError'), + _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value)) + val = val_model + else: + val = value + if isinstance(self._columns[field].selection, (tuple, list)): + if val in dict(self._columns[field].selection): + return + elif val in dict(self._columns[field].selection(self, cr, uid, context=context)): + return + raise except_orm(_('ValidateError'), + _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._name, field)) + + def _check_removed_columns(self, cr, log=False): + # iterate on the database columns to drop the NOT NULL constraints + # of fields which were required but have been removed (or will be added by another module) + columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)] + columns += MAGIC_COLUMNS + cr.execute("SELECT a.attname, a.attnotnull" + " FROM pg_class c, pg_attribute a" + " WHERE c.relname=%s" + " AND c.oid=a.attrelid" + " AND a.attisdropped=%s" + " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')" + " AND a.attname NOT IN %s", (self._table, False, tuple(columns))), + + for column in cr.dictfetchall(): + if log: + _logger.debug("column %s is in the table %s but not in the corresponding object %s", + column['attname'], self._table, self._name) + if column['attnotnull']: + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname'])) + _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint", + self._table, column['attname']) + + def _save_constraint(self, cr, constraint_name, type): + """ + Record the creation of a constraint for this model, to make it possible + to delete it later when the module is uninstalled. Type can be either + 'f' or 'u' depending on the constraint being a foreign key or not. + """ + if not self._module: + # no need to save constraints for custom models as they're not part + # of any module + return + assert type in ('f', 'u') + cr.execute(""" + SELECT 1 FROM ir_model_constraint, ir_module_module + WHERE ir_model_constraint.module=ir_module_module.id + AND ir_model_constraint.name=%s + AND ir_module_module.name=%s + """, (constraint_name, self._module)) + if not cr.rowcount: + cr.execute(""" + INSERT INTO ir_model_constraint + (name, date_init, date_update, module, model, type) + VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC', + (SELECT id FROM ir_module_module WHERE name=%s), + (SELECT id FROM ir_model WHERE model=%s), %s)""", + (constraint_name, self._module, self._name, type)) + + def _save_relation_table(self, cr, relation_table): + """ + Record the creation of a many2many for this model, to make it possible + to delete it later when the module is uninstalled. + """ + cr.execute(""" + SELECT 1 FROM ir_model_relation, ir_module_module + WHERE ir_model_relation.module=ir_module_module.id + AND ir_model_relation.name=%s + AND ir_module_module.name=%s + """, (relation_table, self._module)) + if not cr.rowcount: + cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model) + VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC', + (SELECT id FROM ir_module_module WHERE name=%s), + (SELECT id FROM ir_model WHERE model=%s))""", + (relation_table, self._module, self._name)) + self.invalidate_cache(cr, SUPERUSER_ID) + + # checked version: for direct m2o starting from `self` + def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete): + assert self.is_transient() or not dest_model.is_transient(), \ + 'Many2One relationships from non-transient Model to TransientModel are forbidden' + if self.is_transient() and not dest_model.is_transient(): + # TransientModel relationships to regular Models are annoying + # usually because they could block deletion due to the FKs. + # So unless stated otherwise we default them to ondelete=cascade. + ondelete = ondelete or 'cascade' + fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null') + self._foreign_keys.add(fk_def) + _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def) + + # unchecked version: for custom cases, such as m2m relationships + def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete): + fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null') + self._foreign_keys.add(fk_def) + _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def) + + def _drop_constraint(self, cr, source_table, constraint_name): + cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name)) + + def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete): + # Find FK constraint(s) currently established for the m2o field, + # and see whether they are stale or not + cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name, + cl2.relname as foreign_table + FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, + pg_attribute as att1, pg_attribute as att2 + WHERE con.conrelid = cl1.oid + AND cl1.relname = %s + AND con.confrelid = cl2.oid + AND array_lower(con.conkey, 1) = 1 + AND con.conkey[1] = att1.attnum + AND att1.attrelid = cl1.oid + AND att1.attname = %s + AND array_lower(con.confkey, 1) = 1 + AND con.confkey[1] = att2.attnum + AND att2.attrelid = cl2.oid + AND att2.attname = %s + AND con.contype = 'f'""", (source_table, source_field, 'id')) + constraints = cr.dictfetchall() + if constraints: + if len(constraints) == 1: + # Is it the right constraint? + cons, = constraints + if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\ + or cons['foreign_table'] != dest_model._table: + # Wrong FK: drop it and recreate + _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'", + source_table, cons['constraint_name']) + self._drop_constraint(cr, source_table, cons['constraint_name']) + else: + # it's all good, nothing to do! + return + else: + # Multiple FKs found for the same field, drop them all, and re-create + for cons in constraints: + _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'", + source_table, cons['constraint_name']) + self._drop_constraint(cr, source_table, cons['constraint_name']) + + # (re-)create the FK + self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete) + + + def _set_default_value_on_column(self, cr, column_name, context=None): + # ideally should use add_default_value but fails + # due to ir.values not being ready + + # get old-style default + default = self._defaults.get(column_name) + if callable(default): + default = default(self, cr, SUPERUSER_ID, context) + + # get new_style default if no old-style + if default is None: + record = self.new(cr, SUPERUSER_ID, context=context) + field = self._fields[column_name] + field.determine_default(record) + defaults = dict(record._cache) + if column_name in defaults: + default = field.convert_to_write(defaults[column_name]) + + if default is not None: + _logger.debug("Table '%s': setting default value of new column %s", + self._table, column_name) + ss = self._columns[column_name]._symbol_set + query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % ( + self._table, column_name, ss[0], column_name) + cr.execute(query, (ss[1](default),)) + # this is a disgrace + cr.commit() + + def _auto_init(self, cr, context=None): + """ + + Call _field_create and, unless _auto is False: + + - create the corresponding table in database for the model, + - possibly add the parent columns in database, + - possibly add the columns 'create_uid', 'create_date', 'write_uid', + 'write_date' in database if _log_access is True (the default), + - report on database columns no more existing in _columns, + - remove no more existing not null constraints, + - alter existing database columns to match _columns, + - create database tables to match _columns, + - add database indices to match _columns, + - save in self._foreign_keys a list a foreign keys to create (see + _auto_end). + + """ + self._foreign_keys = set() + raise_on_invalid_object_name(self._name) + if context is None: + context = {} + store_compute = False + stored_fields = [] # new-style stored fields with compute + todo_end = [] + update_custom_fields = context.get('update_custom_fields', False) + self._field_create(cr, context=context) + create = not self._table_exist(cr) + if self._auto: + + if create: + self._create_table(cr) + + cr.commit() + if self._parent_store: + if not self._parent_columns_exist(cr): + self._create_parent_columns(cr) + store_compute = True + + self._check_removed_columns(cr, log=False) + + # iterate on the "object columns" + column_data = self._select_column_data(cr) + + for k, f in self._columns.iteritems(): + if k == 'id': # FIXME: maybe id should be a regular column? + continue + # Don't update custom (also called manual) fields + if f.manual and not update_custom_fields: + continue + + if isinstance(f, fields.one2many): + self._o2m_raise_on_missing_reference(cr, f) + + elif isinstance(f, fields.many2many): + self._m2m_raise_or_create_relation(cr, f) + + else: + res = column_data.get(k) + + # The field is not found as-is in database, try if it + # exists with an old name. + if not res and hasattr(f, 'oldname'): + res = column_data.get(f.oldname) + if res: + cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k)) + res['attname'] = k + column_data[k] = res + _schema.debug("Table '%s': renamed column '%s' to '%s'", + self._table, f.oldname, k) + + # The field already exists in database. Possibly + # change its type, rename it, drop it or change its + # constraints. + if res: + f_pg_type = res['typname'] + f_pg_size = res['size'] + f_pg_notnull = res['attnotnull'] + if isinstance(f, fields.function) and not f.store and\ + not getattr(f, 'nodrop', False): + _logger.info('column %s (%s) converted to a function, removed from table %s', + k, f.string, self._table) + cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k)) + cr.commit() + _schema.debug("Table '%s': dropped column '%s' with cascade", + self._table, k) + f_obj_type = None + else: + f_obj_type = get_pg_type(f) and get_pg_type(f)[0] + + if f_obj_type: + ok = False + casts = [ + ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)), + ('varchar', 'text', 'TEXT', ''), + ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), + ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'), + ('timestamp', 'date', 'date', '::date'), + ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), + ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), + ] + if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size and (f.size is None or f_pg_size < f.size): + try: + with cr.savepoint(): + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" TYPE %s' % (self._table, k, pg_varchar(f.size))) + except psycopg2.NotSupportedError: + # In place alter table cannot be done because a view is depending of this field. + # Do a manual copy. This will drop the view (that will be recreated later) + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k)) + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size))) + cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size))) + cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,)) + cr.commit() + _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s", + self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited') + for c in casts: + if (f_pg_type==c[0]) and (f._type==c[1]): + if f_pg_type != f_obj_type: + ok = True + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO __temp_type_cast' % (self._table, k)) + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2])) + cr.execute(('UPDATE "%s" SET "%s"= __temp_type_cast'+c[3]) % (self._table, k)) + cr.execute('ALTER TABLE "%s" DROP COLUMN __temp_type_cast CASCADE' % (self._table,)) + cr.commit() + _schema.debug("Table '%s': column '%s' changed type from %s to %s", + self._table, k, c[0], c[1]) + break + + if f_pg_type != f_obj_type: + if not ok: + i = 0 + while True: + newname = k + '_moved' + str(i) + cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \ + "WHERE c.relname=%s " \ + "AND a.attname=%s " \ + "AND c.oid=a.attrelid ", (self._table, newname)) + if not cr.fetchone()[0]: + break + i += 1 + if f_pg_notnull: + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) + cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname)) + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) + cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,)) + _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !", + self._table, k, f_pg_type, f._type, newname) + + # if the field is required and hasn't got a NOT NULL constraint + if f.required and f_pg_notnull == 0: + self._set_default_value_on_column(cr, k, context=context) + # add the NOT NULL constraint + try: + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) + cr.commit() + _schema.debug("Table '%s': column '%s': added NOT NULL constraint", + self._table, k) + except Exception: + msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\ + "If you want to have it, you should update the records and execute manually:\n"\ + "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL" + _schema.warning(msg, self._table, k, self._table, k) + cr.commit() + elif not f.required and f_pg_notnull == 1: + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) + cr.commit() + _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint", + self._table, k) + # Verify index + indexname = '%s_%s_index' % (self._table, k) + cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table)) + res2 = cr.dictfetchall() + if not res2 and f.select: + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) + cr.commit() + if f._type == 'text': + # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context) + msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\ + "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\ + " because there is a length limit for indexable btree values!\n"\ + "Use a search view instead if you simply want to make the field searchable." + _schema.warning(msg, self._table, f._type, k) + if res2 and not f.select: + cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k)) + cr.commit() + msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore" + _schema.debug(msg, self._table, k, f._type) + + if isinstance(f, fields.many2one) or (isinstance(f, fields.function) and f._type == 'many2one' and f.store): + dest_model = self.pool[f._obj] + if dest_model._table != 'ir_actions': + self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete) + + # The field doesn't exist in database. Create it if necessary. + else: + if not isinstance(f, fields.function) or f.store: + # add the missing field + cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) + cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,)) + _schema.debug("Table '%s': added column '%s' with definition=%s", + self._table, k, get_pg_type(f)[1]) + + # initialize it + if not create: + self._set_default_value_on_column(cr, k, context=context) + + # remember the functions to call for the stored fields + if isinstance(f, fields.function): + order = 10 + if f.store is not True: # i.e. if f.store is a dict + order = f.store[f.store.keys()[0]][2] + todo_end.append((order, self._update_store, (f, k))) + + # remember new-style stored fields with compute method + if k in self._fields and self._fields[k].depends: + stored_fields.append(self._fields[k]) + + # and add constraints if needed + if isinstance(f, fields.many2one) or (isinstance(f, fields.function) and f._type == 'many2one' and f.store): + if f._obj not in self.pool: + raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,)) + dest_model = self.pool[f._obj] + ref = dest_model._table + # ir_actions is inherited so foreign key doesn't work on it + if ref != 'ir_actions': + self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete) + if f.select: + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) + if f.required: + try: + cr.commit() + cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k)) + _schema.debug("Table '%s': column '%s': added a NOT NULL constraint", + self._table, k) + except Exception: + msg = "WARNING: unable to set column %s of table %s not null !\n"\ + "Try to re-run: openerp-server --update=module\n"\ + "If it doesn't work, update records and execute manually:\n"\ + "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL" + _logger.warning(msg, k, self._table, self._table, k, exc_info=True) + cr.commit() + + else: + cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,)) + create = not bool(cr.fetchone()) + + cr.commit() # start a new transaction + + if self._auto: + self._add_sql_constraints(cr) + + if create: + self._execute_sql(cr) + + if store_compute: + self._parent_store_compute(cr) + cr.commit() + + if stored_fields: + # trigger computation of new-style stored fields with a compute + def func(cr): + _logger.info("Storing computed values of %s fields %s", + self._name, ', '.join(sorted(f.name for f in stored_fields))) + recs = self.browse(cr, SUPERUSER_ID, [], {'active_test': False}) + recs = recs.search([]) + if recs: + map(recs._recompute_todo, stored_fields) + recs.recompute() + + todo_end.append((1000, func, ())) + + return todo_end + + def _auto_end(self, cr, context=None): + """ Create the foreign keys recorded by _auto_init. """ + for t, k, r, d in self._foreign_keys: + cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d)) + self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f') + cr.commit() + del self._foreign_keys + + + def _table_exist(self, cr): + cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,)) + return cr.rowcount + + + def _create_table(self, cr): + cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,)) + cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,)) + _schema.debug("Table '%s': created", self._table) + + + def _parent_columns_exist(self, cr): + cr.execute("""SELECT c.relname + FROM pg_class c, pg_attribute a + WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid + """, (self._table, 'parent_left')) + return cr.rowcount + + + def _create_parent_columns(self, cr): + cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,)) + cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,)) + if 'parent_left' not in self._columns: + _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)', + self._table) + _schema.debug("Table '%s': added column '%s' with definition=%s", + self._table, 'parent_left', 'INTEGER') + elif not self._columns['parent_left'].select: + _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)', + self._table) + if 'parent_right' not in self._columns: + _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)', + self._table) + _schema.debug("Table '%s': added column '%s' with definition=%s", + self._table, 'parent_right', 'INTEGER') + elif not self._columns['parent_right'].select: + _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)', + self._table) + if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'): + _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'", + self._parent_name, self._name) + + cr.commit() + + + def _select_column_data(self, cr): + # attlen is the number of bytes necessary to represent the type when + # the type has a fixed size. If the type has a varying size attlen is + # -1 and atttypmod is the size limit + 4, or -1 if there is no limit. + cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \ + "FROM pg_class c,pg_attribute a,pg_type t " \ + "WHERE c.relname=%s " \ + "AND c.oid=a.attrelid " \ + "AND a.atttypid=t.oid", (self._table,)) + return dict(map(lambda x: (x['attname'], x),cr.dictfetchall())) + + + def _o2m_raise_on_missing_reference(self, cr, f): + # TODO this check should be a method on fields.one2many. + if f._obj in self.pool: + other = self.pool[f._obj] + # TODO the condition could use fields_get_keys(). + if f._fields_id not in other._columns.keys(): + if f._fields_id not in other._inherit_fields.keys(): + raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,)) + + def _m2m_raise_or_create_relation(self, cr, f): + m2m_tbl, col1, col2 = f._sql_names(self) + self._save_relation_table(cr, m2m_tbl) + cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,)) + if not cr.dictfetchall(): + if f._obj not in self.pool: + raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,)) + dest_model = self.pool[f._obj] + ref = dest_model._table + cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2)) + # create foreign key references with ondelete=cascade, unless the targets are SQL views + cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,)) + if not cr.fetchall(): + self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade') + cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,)) + if not cr.fetchall(): + self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade') + + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1)) + cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2)) + cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref)) + cr.commit() + _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref) + + + def _add_sql_constraints(self, cr): + """ + + Modify this model's database table constraints so they match the one in + _sql_constraints. + + """ + def unify_cons_text(txt): + return txt.lower().replace(', ',',').replace(' (','(') + + for (key, con, _) in self._sql_constraints: + conname = '%s_%s' % (self._table, key) + + self._save_constraint(cr, conname, 'u') + cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,)) + existing_constraints = cr.dictfetchall() + sql_actions = { + 'drop': { + 'execute': False, + 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ), + 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % ( + self._table, conname, con), + 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con), + 'order': 1, + }, + 'add': { + 'execute': False, + 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,), + 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con), + 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % ( + self._table, con), + 'order': 2, + }, + } + + if not existing_constraints: + # constraint does not exists: + sql_actions['add']['execute'] = True + sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], ) + elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]: + # constraint exists but its definition has changed: + sql_actions['drop']['execute'] = True + sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), ) + sql_actions['add']['execute'] = True + sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], ) + + # we need to add the constraint: + sql_actions = [item for item in sql_actions.values()] + sql_actions.sort(key=lambda x: x['order']) + for sql_action in [action for action in sql_actions if action['execute']]: + try: + cr.execute(sql_action['query']) + cr.commit() + _schema.debug(sql_action['msg_ok']) + except: + _schema.warning(sql_action['msg_err']) + cr.rollback() + + + def _execute_sql(self, cr): + """ Execute the SQL code from the _sql attribute (if any).""" + if hasattr(self, "_sql"): + for line in self._sql.split(';'): + line2 = line.replace('\n', '').strip() + if line2: + cr.execute(line2) + cr.commit() + + # + # Update objects that uses this one to update their _inherits fields + # + + @classmethod + def _inherits_reload_src(cls): + """ Recompute the _inherit_fields mapping on each _inherits'd child model.""" + for model in cls.pool.values(): + if cls._name in model._inherits: + model._inherits_reload() + + @classmethod + def _inherits_reload(cls): + """ Recompute the _inherit_fields mapping. + + This will also call itself on each inherits'd child model. + + """ + res = {} + for table in cls._inherits: + other = cls.pool[table] + for col in other._columns.keys(): + res[col] = (table, cls._inherits[table], other._columns[col], table) + for col in other._inherit_fields.keys(): + res[col] = (table, cls._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3]) + cls._inherit_fields = res + cls._all_columns = cls._get_column_infos() + + # interface columns with new-style fields + for attr, column in cls._columns.items(): + if attr not in cls._fields: + cls._add_field(attr, column.to_field()) + + # interface inherited fields with new-style fields (note that the + # reverse order is for being consistent with _all_columns above) + for parent_model, parent_field in reversed(cls._inherits.items()): + for attr, field in cls.pool[parent_model]._fields.iteritems(): + if attr not in cls._fields: + new_field = field.copy(related=(parent_field, attr), _origin=field) + cls._add_field(attr, new_field) + + cls._inherits_reload_src() + + @classmethod + def _get_column_infos(cls): + """Returns a dict mapping all fields names (direct fields and + inherited field via _inherits) to a ``column_info`` struct + giving detailed columns """ + result = {} + # do not inverse for loops, since local fields may hide inherited ones! + for k, (parent, m2o, col, original_parent) in cls._inherit_fields.iteritems(): + result[k] = fields.column_info(k, col, parent, m2o, original_parent) + for k, col in cls._columns.iteritems(): + result[k] = fields.column_info(k, col) + return result + + @classmethod + def _inherits_check(cls): + for table, field_name in cls._inherits.items(): + if field_name not in cls._columns: + _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, cls._name) + cls._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table, + required=True, ondelete="cascade") + elif not cls._columns[field_name].required or cls._columns[field_name].ondelete.lower() not in ("cascade", "restrict"): + _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, cls._name) + cls._columns[field_name].required = True + cls._columns[field_name].ondelete = "cascade" + + # reflect fields with delegate=True in dictionary cls._inherits + for field in cls._fields.itervalues(): + if field.type == 'many2one' and not field.related and field.delegate: + if not field.required: + _logger.warning("Field %s with delegate=True must be required.", field) + field.required = True + if field.ondelete.lower() not in ('cascade', 'restrict'): + field.ondelete = 'cascade' + cls._inherits[field.comodel_name] = field.name + + @api.model + def _prepare_setup_fields(self): + """ Prepare the setup of fields once the models have been loaded. """ + for field in self._fields.itervalues(): + field.reset() + + @api.model + def _setup_fields(self): + """ Setup the fields (dependency triggers, etc). """ + for field in self._fields.itervalues(): + field.setup(self.env) + + # group fields by compute to determine field.computed_fields + fields_by_compute = defaultdict(list) + for field in self._fields.itervalues(): + if field.compute: + field.computed_fields = fields_by_compute[field.compute] + field.computed_fields.append(field) + else: + field.computed_fields = [] + + def fields_get(self, cr, user, allfields=None, context=None, write_access=True): + """ Return the definition of each field. + + The returned value is a dictionary (indiced by field name) of + dictionaries. The _inherits'd fields are included. The string, help, + and selection (if present) attributes are translated. + + :param cr: database cursor + :param user: current user id + :param allfields: list of fields + :param context: context arguments, like lang, time zone + :return: dictionary of field dictionaries, each one describing a field of the business object + :raise AccessError: * if user has no create/write rights on the requested object + + """ + recs = self.browse(cr, user, [], context) + + res = {} + for fname, field in self._fields.iteritems(): + if allfields and fname not in allfields: + continue + if field.groups and not recs.user_has_groups(field.groups): + continue + res[fname] = field.get_description(recs.env) + + # if user cannot create or modify records, make all fields readonly + has_access = functools.partial(recs.check_access_rights, raise_exception=False) + if not (has_access('write') or has_access('create')): + for description in res.itervalues(): + description['readonly'] = True + description['states'] = {} + + return res + + def get_empty_list_help(self, cr, user, help, context=None): + """ Generic method giving the help message displayed when having + no result to display in a list or kanban view. By default it returns + the help given in parameter that is generally the help message + defined in the action. + """ + return help + + def check_field_access_rights(self, cr, user, operation, fields, context=None): + """ + Check the user access rights on the given fields. This raises Access + Denied if the user does not have the rights. Otherwise it returns the + fields (as is if the fields is not falsy, or the readable/writable + fields if fields is falsy). + """ + if user == SUPERUSER_ID: + return fields or list(self._fields) + + def valid(fname): + """ determine whether user has access to field `fname` """ + field = self._fields.get(fname) + if field and field.groups: + return self.user_has_groups(cr, user, groups=field.groups, context=context) + else: + return True + + if not fields: + fields = filter(valid, self._fields) + else: + invalid_fields = list(set(filter(lambda name: not valid(name), fields))) + if invalid_fields: + _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', + operation, user, self._name, ', '.join(invalid_fields)) + raise AccessError( + _('The requested operation cannot be completed due to security restrictions. ' + 'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \ + (self._description, operation)) + + return fields + + # new-style implementation of read(); old-style is defined below + @api.v8 + def read(self, fields=None, load='_classic_read'): + """ Read the given fields for the records in `self`. + + :param fields: optional list of field names to return (default is + all fields) + :param load: deprecated, this argument is ignored + :return: a list of dictionaries mapping field names to their values, + with one dictionary per record + :raise AccessError: if user has no read rights on some of the given + records + """ + # check access rights + self.check_access_rights('read') + fields = self.check_field_access_rights('read', fields) + + # split fields into stored and computed fields + stored, computed = [], [] + for name in fields: + if name in self._columns: + stored.append(name) + elif name in self._fields: + computed.append(name) + else: + _logger.warning("%s.read() with unknown field '%s'", self._name, name) + + # fetch stored fields from the database to the cache + self._read_from_database(stored) + + # retrieve results from records; this takes values from the cache and + # computes remaining fields + result = [] + name_fields = [(name, self._fields[name]) for name in (stored + computed)] + use_name_get = (load == '_classic_read') + for record in self: + try: + values = {'id': record.id} + for name, field in name_fields: + values[name] = field.convert_to_read(record[name], use_name_get) + result.append(values) + except MissingError: + pass + + return result + + # add explicit old-style implementation to read() + @api.v7 + def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'): + records = self.browse(cr, user, ids, context) + result = BaseModel.read(records, fields, load=load) + return result if isinstance(ids, list) else (bool(result) and result[0]) + + @api.multi + def _prefetch_field(self, field): + """ Read from the database in order to fetch `field` (:class:`Field` + instance) for `self` in cache. + """ + # fetch the records of this model without field_name in their cache + records = self._in_cache_without(field) + + # by default, simply fetch field + fnames = set((field.name,)) + + if self.pool._init: + # columns may be missing from database, do not prefetch other fields + pass + elif self.env.in_draft: + # we may be doing an onchange, do not prefetch other fields + pass + elif field in self.env.todo: + # field must be recomputed, do not prefetch records to recompute + records -= self.env.todo[field] + elif self._columns[field.name]._prefetch: + # here we can optimize: prefetch all classic and many2one fields + fnames = set(fname + for fname, fcolumn in self._columns.iteritems() + if fcolumn._prefetch) + + # fetch records with read() + assert self in records and field.name in fnames + try: + result = records.read(list(fnames), load='_classic_write') + except AccessError as e: + # update cache with the exception + records._cache[field] = FailedValue(e) + result = [] + + # check the cache, and update it if necessary + if field not in self._cache: + for values in result: + record = self.browse(values.pop('id')) + record._cache.update(record._convert_to_cache(values)) + if field not in self._cache: + e = AccessError("No value found for %s.%s" % (self, field.name)) + self._cache[field] = FailedValue(e) + + @api.multi + def _read_from_database(self, field_names): + """ Read the given fields of the records in `self` from the database, + and store them in cache. Access errors are also stored in cache. + """ + env = self.env + cr, user, context = env.args + + # Construct a clause for the security rules. + # 'tables' holds the list of tables necessary for the SELECT, including + # the ir.rule clauses, and contains at least self._table. + rule_clause, rule_params, tables = env['ir.rule'].domain_get(self._name, 'read') + + # determine the fields that are stored as columns in self._table + fields_pre = [f for f in field_names if self._columns[f]._classic_write] + + # we need fully-qualified column names in case len(tables) > 1 + def qualify(f): + if isinstance(self._columns.get(f), fields.binary) and \ + context.get('bin_size_%s' % f, context.get('bin_size')): + # PG 9.2 introduces conflicting pg_size_pretty(numeric) -> need ::cast + return 'pg_size_pretty(length(%s."%s")::bigint) as "%s"' % (self._table, f, f) + else: + return '%s."%s"' % (self._table, f) + qual_names = map(qualify, set(fields_pre + ['id'])) + + query = """ SELECT %(qual_names)s FROM %(tables)s + WHERE %(table)s.id IN %%s AND (%(extra)s) + ORDER BY %(order)s + """ % { + 'qual_names': ",".join(qual_names), + 'tables': ",".join(tables), + 'table': self._table, + 'extra': " OR ".join(rule_clause) if rule_clause else "TRUE", + 'order': self._parent_order or self._order, + } + + result = [] + for sub_ids in cr.split_for_in_conditions(self.ids): + cr.execute(query, [tuple(sub_ids)] + rule_params) + result.extend(cr.dictfetchall()) + + ids = [vals['id'] for vals in result] + + if ids: + # translate the fields if necessary + if context.get('lang'): + ir_translation = env['ir.translation'] + for f in fields_pre: + if self._columns[f].translate: + #TODO: optimize out of this loop + res_trans = ir_translation._get_ids( + '%s,%s' % (self._name, f), 'model', context['lang'], ids) + for vals in result: + vals[f] = res_trans.get(vals['id'], False) or vals[f] + + # apply the symbol_get functions of the fields we just read + for f in fields_pre: + symbol_get = self._columns[f]._symbol_get + if symbol_get: + for vals in result: + vals[f] = symbol_get(vals[f]) + + # store result in cache for POST fields + for vals in result: + record = self.browse(vals['id']) + record._cache.update(record._convert_to_cache(vals)) + + # determine the fields that must be processed now + fields_post = [f for f in field_names if not self._columns[f]._classic_write] + + # Compute POST fields, grouped by multi + by_multi = defaultdict(list) + for f in fields_post: + by_multi[self._columns[f]._multi].append(f) + + for multi, fs in by_multi.iteritems(): + if multi: + res2 = self._columns[fs[0]].get(cr, self._model, ids, fs, user, context=context, values=result) + assert res2 is not None, \ + 'The function field "%s" on the "%s" model returned None\n' \ + '(a dictionary was expected).' % (fs[0], self._name) + for vals in result: + # TOCHECK : why got string instend of dict in python2.6 + # if isinstance(res2[vals['id']], str): res2[vals['id']] = eval(res2[vals['id']]) + multi_fields = res2.get(vals['id'], {}) + if multi_fields: + for f in fs: + vals[f] = multi_fields.get(f, []) + else: + for f in fs: + res2 = self._columns[f].get(cr, self._model, ids, f, user, context=context, values=result) + for vals in result: + if res2: + vals[f] = res2[vals['id']] + else: + vals[f] = [] + + # Warn about deprecated fields now that fields_pre and fields_post are computed + for f in field_names: + column = self._columns[f] + if column.deprecated: + _logger.warning('Field %s.%s is deprecated: %s', self._name, f, column.deprecated) + + # store result in cache + for vals in result: + record = self.browse(vals.pop('id')) + record._cache.update(record._convert_to_cache(vals)) + + # store failed values in cache for the records that could not be read + missing = self - self.browse(ids) + if missing: + # store an access error exception in existing records + exc = AccessError( + _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \ + (self._name, 'read') + ) + forbidden = missing.exists() + forbidden._cache.update(FailedValue(exc)) + # store a missing error exception in non-existing records + exc = MissingError( + _('One of the documents you are trying to access has been deleted, please try again after refreshing.') + ) + (missing - forbidden)._cache.update(FailedValue(exc)) + + @api.multi + def get_metadata(self): + """ + Returns some metadata about the given records. + + :return: list of ownership dictionaries for each requested record + :rtype: list of dictionaries with the following keys: + + * id: object id + * create_uid: user who created the record + * create_date: date when the record was created + * write_uid: last user who changed the record + * write_date: date of the last change to the record + * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name`` + """ + fields = ['id'] + if self._log_access: + fields += ['create_uid', 'create_date', 'write_uid', 'write_date'] + quoted_table = '"%s"' % self._table + fields_str = ",".join('%s.%s' % (quoted_table, field) for field in fields) + query = '''SELECT %s, __imd.module, __imd.name + FROM %s LEFT JOIN ir_model_data __imd + ON (__imd.model = %%s and __imd.res_id = %s.id) + WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table) + self._cr.execute(query, (self._name, tuple(self.ids))) + res = self._cr.dictfetchall() + + uids = list(set(r[k] for r in res for k in ['write_uid', 'create_uid'] if r.get(k))) + names = dict(self.env['res.users'].browse(uids).name_get()) + + for r in res: + for key in r: + value = r[key] = r[key] or False + if key in ('write_uid', 'create_uid') and value in names: + r[key] = (value, names[value]) + r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False + del r['name'], r['module'] + return res + + def _check_concurrency(self, cr, ids, context): + if not context: + return + if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access): + return + check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)" + for sub_ids in cr.split_for_in_conditions(ids): + ids_to_check = [] + for id in sub_ids: + id_ref = "%s,%s" % (self._name, id) + update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None) + if update_date: + ids_to_check.extend([id, update_date]) + if not ids_to_check: + continue + cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) + res = cr.fetchone() + if res: + # mention the first one only to keep the error message readable + raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0])) + + def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None): + """Verify the returned rows after applying record rules matches + the length of `ids`, and raise an appropriate exception if it does not. + """ + if context is None: + context = {} + ids, result_ids = set(ids), set(result_ids) + missing_ids = ids - result_ids + if missing_ids: + # Attempt to distinguish record rule restriction vs deleted records, + # to provide a more specific error message - check if the missinf + cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),)) + forbidden_ids = [x[0] for x in cr.fetchall()] + if forbidden_ids: + # the missing ids are (at least partially) hidden by access rules + if uid == SUPERUSER_ID: + return + _logger.warning('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, forbidden_ids, uid, self._name) + raise except_orm(_('Access Denied'), + _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \ + (self._description, operation)) + else: + # If we get here, the missing_ids are not in the database + if operation in ('read','unlink'): + # No need to warn about deleting an already deleted record. + # And no error when reading a record that was deleted, to prevent spurious + # errors for non-transactional search/read sequences coming from clients + return + _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name) + raise except_orm(_('Missing document(s)'), + _('One of the documents you are trying to access has been deleted, please try again after refreshing.')) + + + def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose. + """Verifies that the operation given by ``operation`` is allowed for the user + according to the access rights.""" + return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception) + + def check_access_rule(self, cr, uid, ids, operation, context=None): + """Verifies that the operation given by ``operation`` is allowed for the user + according to ir.rules. + + :param operation: one of ``write``, ``unlink`` + :raise except_orm: * if current ir.rules do not permit this operation. + :return: None if the operation is allowed + """ + if uid == SUPERUSER_ID: + return + + if self.is_transient(): + # Only one single implicit access rule for transient models: owner only! + # This is ok to hardcode because we assert that TransientModels always + # have log_access enabled so that the create_uid column is always there. + # And even with _inherits, these fields are always present in the local + # table too, so no need for JOINs. + cr.execute("""SELECT distinct create_uid + FROM %s + WHERE id IN %%s""" % self._table, (tuple(ids),)) + uids = [x[0] for x in cr.fetchall()] + if len(uids) != 1 or uids[0] != uid: + raise except_orm(_('Access Denied'), + _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,)) + else: + where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context) + if where_clause: + where_clause = ' and ' + ' and '.join(where_clause) + for sub_ids in cr.split_for_in_conditions(ids): + cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) + + ' WHERE ' + self._table + '.id IN %s' + where_clause, + [sub_ids] + where_params) + returned_ids = [x['id'] for x in cr.dictfetchall()] + self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context) + + def create_workflow(self, cr, uid, ids, context=None): + """Create a workflow instance for each given record IDs.""" + from openerp import workflow + for res_id in ids: + workflow.trg_create(uid, self._name, res_id, cr) + # self.invalidate_cache(cr, uid, context=context) ? + return True + + def delete_workflow(self, cr, uid, ids, context=None): + """Delete the workflow instances bound to the given record IDs.""" + from openerp import workflow + for res_id in ids: + workflow.trg_delete(uid, self._name, res_id, cr) + self.invalidate_cache(cr, uid, context=context) + return True + + def step_workflow(self, cr, uid, ids, context=None): + """Reevaluate the workflow instances of the given record IDs.""" + from openerp import workflow + for res_id in ids: + workflow.trg_write(uid, self._name, res_id, cr) + # self.invalidate_cache(cr, uid, context=context) ? + return True + + def signal_workflow(self, cr, uid, ids, signal, context=None): + """Send given workflow signal and return a dict mapping ids to workflow results""" + from openerp import workflow + result = {} + for res_id in ids: + result[res_id] = workflow.trg_validate(uid, self._name, res_id, signal, cr) + # self.invalidate_cache(cr, uid, context=context) ? + return result + + def redirect_workflow(self, cr, uid, old_new_ids, context=None): + """ Rebind the workflow instance bound to the given 'old' record IDs to + the given 'new' IDs. (``old_new_ids`` is a list of pairs ``(old, new)``. + """ + from openerp import workflow + for old_id, new_id in old_new_ids: + workflow.trg_redirect(uid, self._name, old_id, new_id, cr) + self.invalidate_cache(cr, uid, context=context) + return True + + def unlink(self, cr, uid, ids, context=None): + """ + Delete records with given ids + + :param cr: database cursor + :param uid: current user id + :param ids: id or list of ids + :param context: (optional) context arguments, like lang, time zone + :return: True + :raise AccessError: * if user has no unlink rights on the requested object + * if user tries to bypass access rules for unlink on the requested object + :raise UserError: if the record is default property for other records + + """ + if not ids: + return True + if isinstance(ids, (int, long)): + ids = [ids] + + result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context) + + # for recomputing new-style fields + recs = self.browse(cr, uid, ids, context) + recs.modified(self._fields) + + self._check_concurrency(cr, ids, context) + + self.check_access_rights(cr, uid, 'unlink') + + ir_property = self.pool.get('ir.property') + + # Check if the records are used as default properties. + domain = [('res_id', '=', False), + ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]), + ] + if ir_property.search(cr, uid, domain, context=context): + raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property')) + + # Delete the records' properties. + property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context) + ir_property.unlink(cr, uid, property_ids, context=context) + + self.delete_workflow(cr, uid, ids, context=context) + + self.check_access_rule(cr, uid, ids, 'unlink', context=context) + pool_model_data = self.pool.get('ir.model.data') + ir_values_obj = self.pool.get('ir.values') + for sub_ids in cr.split_for_in_conditions(ids): + cr.execute('delete from ' + self._table + ' ' \ + 'where id IN %s', (sub_ids,)) + + # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file, + # as these are not connected with real database foreign keys, and would be dangling references. + # Note: following steps performed as admin to avoid access rights restrictions, and with no context + # to avoid possible side-effects during admin calls. + # Step 1. Calling unlink of ir_model_data only for the affected IDS + reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)]) + # Step 2. Marching towards the real deletion of referenced records + if reference_ids: + pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids) + + # For the same reason, removing the record relevant to ir_values + ir_value_ids = ir_values_obj.search(cr, uid, + ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)], + context=context) + if ir_value_ids: + ir_values_obj.unlink(cr, uid, ir_value_ids, context=context) + + # invalidate the *whole* cache, since the orm does not handle all + # changes made in the database, like cascading delete! + recs.invalidate_cache() + + for order, obj_name, store_ids, fields in result_store: + if obj_name == self._name: + effective_store_ids = list(set(store_ids) - set(ids)) + else: + effective_store_ids = store_ids + if effective_store_ids: + obj = self.pool[obj_name] + cr.execute('select id from '+obj._table+' where id IN %s', (tuple(effective_store_ids),)) + rids = map(lambda x: x[0], cr.fetchall()) + if rids: + obj._store_set_values(cr, uid, rids, fields, context) + + # recompute new-style fields + recs.recompute() + + return True + + # + # TODO: Validate + # + @api.multi + def write(self, vals): + """ + Update records in `self` with the given field values. + + :param vals: field values to update, e.g {'field_name': new_field_value, ...} + :type vals: dictionary + :return: True + :raise AccessError: * if user has no write rights on the requested object + * if user tries to bypass access rules for write on the requested object + :raise ValidateError: if user tries to enter invalid value for a field that is not in selection + :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent) + + **Note**: The type of field values to pass in ``vals`` for relationship fields is specific: + + + For a many2many field, a list of tuples is expected. + Here is the list of tuple that are accepted, with the corresponding semantics :: + + (0, 0, { values }) link to a new record that needs to be created with the given values dictionary + (1, ID, { values }) update the linked record with id = ID (write *values* on it) + (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well) + (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself) + (4, ID) link to existing record with id = ID (adds a relationship) + (5) unlink all (like using (3,ID) for all linked records) + (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs) + + Example: + [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4] + + + For a one2many field, a lits of tuples is expected. + Here is the list of tuple that are accepted, with the corresponding semantics :: + + (0, 0, { values }) link to a new record that needs to be created with the given values dictionary + (1, ID, { values }) update the linked record with id = ID (write *values* on it) + (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well) + + Example: + [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})] + + + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link. + + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``) + + """ + if not self: + return True + + cr, uid, context = self.env.args + self._check_concurrency(self._ids) + self.check_access_rights('write') + + # No user-driven update of these columns + for field in itertools.chain(MAGIC_COLUMNS, ('parent_left', 'parent_right')): + vals.pop(field, None) + + # split up fields into old-style and pure new-style ones + old_vals, new_vals, unknown = {}, {}, [] + for key, val in vals.iteritems(): + if key in self._columns: + old_vals[key] = val + elif key in self._fields: + new_vals[key] = val + else: + unknown.append(key) + + if unknown: + _logger.warning("%s.write() with unknown fields: %s", self._name, ', '.join(sorted(unknown))) + + # write old-style fields with (low-level) method _write + if old_vals: + self._write(old_vals) + + # put the values of pure new-style fields into cache, and inverse them + if new_vals: + self._cache.update(self._convert_to_cache(new_vals)) + for key in new_vals: + self._fields[key].determine_inverse(self) + + return True + + def _write(self, cr, user, ids, vals, context=None): + # low-level implementation of write() + if not context: + context = {} + + readonly = None + self.check_field_access_rights(cr, user, 'write', vals.keys()) + for field in vals.keys(): + fobj = None + if field in self._columns: + fobj = self._columns[field] + elif field in self._inherit_fields: + fobj = self._inherit_fields[field][2] + if not fobj: + continue + groups = fobj.write + + if groups: + edit = False + for group in groups: + module = group.split(".")[0] + grp = group.split(".")[1] + cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \ + (grp, module, 'res.groups', user)) + readonly = cr.fetchall() + if readonly[0][0] >= 1: + edit = True + break + + if not edit: + vals.pop(field) + + result = self._store_get_values(cr, user, ids, vals.keys(), context) or [] + + # for recomputing new-style fields + recs = self.browse(cr, user, ids, context) + modified_fields = list(vals) + if self._log_access: + modified_fields += ['write_date', 'write_uid'] + recs.modified(modified_fields) + + parents_changed = [] + parent_order = self._parent_order or self._order + if self._parent_store and (self._parent_name in vals) and not context.get('defer_parent_store_computation'): + # The parent_left/right computation may take up to + # 5 seconds. No need to recompute the values if the + # parent is the same. + # Note: to respect parent_order, nodes must be processed in + # order, so ``parents_changed`` must be ordered properly. + parent_val = vals[self._parent_name] + if parent_val: + query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \ + (self._table, self._parent_name, self._parent_name, parent_order) + cr.execute(query, (tuple(ids), parent_val)) + else: + query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \ + (self._table, self._parent_name, parent_order) + cr.execute(query, (tuple(ids),)) + parents_changed = map(operator.itemgetter(0), cr.fetchall()) + + upd0 = [] + upd1 = [] + upd_todo = [] + updend = [] + direct = [] + totranslate = context.get('lang', False) and (context['lang'] != 'en_US') + for field in vals: + field_column = self._all_columns.get(field) and self._all_columns.get(field).column + if field_column and field_column.deprecated: + _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated) + if field in self._columns: + if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')): + if (not totranslate) or not self._columns[field].translate: + upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0]) + upd1.append(self._columns[field]._symbol_set[1](vals[field])) + direct.append(field) + else: + upd_todo.append(field) + else: + updend.append(field) + if field in self._columns \ + and hasattr(self._columns[field], 'selection') \ + and vals[field]: + self._check_selection_field_value(cr, user, field, vals[field], context=context) + + if self._log_access: + upd0.append('write_uid=%s') + upd0.append("write_date=(now() at time zone 'UTC')") + upd1.append(user) + + if len(upd0): + self.check_access_rule(cr, user, ids, 'write', context=context) + for sub_ids in cr.split_for_in_conditions(ids): + cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \ + 'where id IN %s', upd1 + [sub_ids]) + if cr.rowcount != len(sub_ids): + raise MissingError(_('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description) + + if totranslate: + # TODO: optimize + for f in direct: + if self._columns[f].translate: + src_trans = self.pool[self._name].read(cr, user, ids, [f])[0][f] + if not src_trans: + src_trans = vals[f] + # Inserting value to DB + context_wo_lang = dict(context, lang=None) + self.write(cr, user, ids, {f: vals[f]}, context=context_wo_lang) + self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans) + + # call the 'set' method of fields which are not classic_write + upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority) + + # default element in context must be removed when call a one2many or many2many + rel_context = context.copy() + for c in context.items(): + if c[0].startswith('default_'): + del rel_context[c[0]] + + for field in upd_todo: + for id in ids: + result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or [] + + unknown_fields = updend[:] + for table in self._inherits: + col = self._inherits[table] + nids = [] + for sub_ids in cr.split_for_in_conditions(ids): + cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \ + 'where id IN %s', (sub_ids,)) + nids.extend([x[0] for x in cr.fetchall()]) + + v = {} + for val in updend: + if self._inherit_fields[val][0] == table: + v[val] = vals[val] + unknown_fields.remove(val) + if v: + self.pool[table].write(cr, user, nids, v, context) + + if unknown_fields: + _logger.warning( + 'No such field(s) in model %s: %s.', + self._name, ', '.join(unknown_fields)) + + # check Python constraints + recs._validate_fields(vals) + + # TODO: use _order to set dest at the right position and not first node of parent + # We can't defer parent_store computation because the stored function + # fields that are computer may refer (directly or indirectly) to + # parent_left/right (via a child_of domain) + if parents_changed: + if self.pool._init: + self.pool._init_parent[self._name] = True + else: + order = self._parent_order or self._order + parent_val = vals[self._parent_name] + if parent_val: + clause, params = '%s=%%s' % (self._parent_name,), (parent_val,) + else: + clause, params = '%s IS NULL' % (self._parent_name,), () + + for id in parents_changed: + cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,)) + pleft, pright = cr.fetchone() + distance = pright - pleft + 1 + + # Positions of current siblings, to locate proper insertion point; + # this can _not_ be fetched outside the loop, as it needs to be refreshed + # after each update, in case several nodes are sequentially inserted one + # next to the other (i.e computed incrementally) + cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params) + parents = cr.fetchall() + + # Find Position of the element + position = None + for (parent_pright, parent_id) in parents: + if parent_id == id: + break + position = parent_pright and parent_pright + 1 or 1 + + # It's the first node of the parent + if not position: + if not parent_val: + position = 1 + else: + cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,)) + position = cr.fetchone()[0] + 1 + + if pleft < position <= pright: + raise except_orm(_('UserError'), _('Recursivity Detected.')) + + if pleft < position: + cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position)) + cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position)) + cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright)) + else: + cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position)) + cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position)) + cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance)) + recs.invalidate_cache(['parent_left', 'parent_right']) + + result += self._store_get_values(cr, user, ids, vals.keys(), context) + result.sort() + + # for recomputing new-style fields + recs.modified(modified_fields) + + done = {} + for order, model_name, ids_to_update, fields_to_recompute in result: + key = (model_name, tuple(fields_to_recompute)) + done.setdefault(key, {}) + # avoid to do several times the same computation + todo = [] + for id in ids_to_update: + if id not in done[key]: + done[key][id] = True + todo.append(id) + self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context) + + # recompute new-style fields + if context.get('recompute', True): + recs.recompute() + + self.step_workflow(cr, user, ids, context=context) + return True + + # + # TODO: Should set perm to user.xxx + # + @api.model + @api.returns('self', lambda value: value.id) + def create(self, vals): + """ Create a new record for the model. + + The values for the new record are initialized using the dictionary + `vals`, and if necessary the result of :meth:`default_get`. + + :param vals: field values like ``{'field_name': field_value, ...}``, + see :meth:`write` for details about the values format + :return: new record created + :raise AccessError: * if user has no create rights on the requested object + * if user tries to bypass access rules for create on the requested object + :raise ValidateError: if user tries to enter invalid value for a field that is not in selection + :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent) + """ + self.check_access_rights('create') + + # add missing defaults, and drop fields that may not be set by user + vals = self._add_missing_default_values(vals) + for field in itertools.chain(MAGIC_COLUMNS, ('parent_left', 'parent_right')): + vals.pop(field, None) + + # split up fields into old-style and pure new-style ones + old_vals, new_vals, unknown = {}, {}, [] + for key, val in vals.iteritems(): + if key in self._all_columns: + old_vals[key] = val + elif key in self._fields: + new_vals[key] = val + else: + unknown.append(key) + + if unknown: + _logger.warning("%s.create() with unknown fields: %s", self._name, ', '.join(sorted(unknown))) + + # create record with old-style fields + record = self.browse(self._create(old_vals)) + + # put the values of pure new-style fields into cache, and inverse them + record._cache.update(record._convert_to_cache(new_vals)) + for key in new_vals: + self._fields[key].determine_inverse(record) + + return record + + def _create(self, cr, user, vals, context=None): + # low-level implementation of create() + if not context: + context = {} + + if self.is_transient(): + self._transient_vacuum(cr, user) + + tocreate = {} + for v in self._inherits: + if self._inherits[v] not in vals: + tocreate[v] = {} + else: + tocreate[v] = {'id': vals[self._inherits[v]]} + + updates = [ + # list of column assignments defined as tuples like: + # (column_name, format_string, column_value) + # (column_name, sql_formula) + # Those tuples will be used by the string formatting for the INSERT + # statement below. + ('id', "nextval('%s')" % self._sequence), + ] + + upd_todo = [] + unknown_fields = [] + for v in vals.keys(): + if v in self._inherit_fields and v not in self._columns: + (table, col, col_detail, original_parent) = self._inherit_fields[v] + tocreate[table][v] = vals[v] + del vals[v] + else: + if (v not in self._inherit_fields) and (v not in self._columns): + del vals[v] + unknown_fields.append(v) + if unknown_fields: + _logger.warning( + 'No such field(s) in model %s: %s.', + self._name, ', '.join(unknown_fields)) + + for table in tocreate: + if self._inherits[table] in vals: + del vals[self._inherits[table]] + + record_id = tocreate[table].pop('id', None) + + if isinstance(record_id, dict): + # Shit happens: this possibly comes from a new record + tocreate[table] = dict(record_id, **tocreate[table]) + record_id = None + + # When linking/creating parent records, force context without 'no_store_function' key that + # defers stored functions computing, as these won't be computed in batch at the end of create(). + parent_context = dict(context) + parent_context.pop('no_store_function', None) + + if record_id is None or not record_id: + record_id = self.pool[table].create(cr, user, tocreate[table], context=parent_context) + else: + self.pool[table].write(cr, user, [record_id], tocreate[table], context=parent_context) + + updates.append((self._inherits[table], '%s', record_id)) + + #Start : Set bool fields to be False if they are not touched(to make search more powerful) + bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean'] + + for bool_field in bool_fields: + if bool_field not in vals: + vals[bool_field] = False + #End + for field in vals.keys(): + fobj = None + if field in self._columns: + fobj = self._columns[field] + else: + fobj = self._inherit_fields[field][2] + if not fobj: + continue + groups = fobj.write + if groups: + edit = False + for group in groups: + module = group.split(".")[0] + grp = group.split(".")[1] + cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \ + (grp, module, 'res.groups', user)) + readonly = cr.fetchall() + if readonly[0][0] >= 1: + edit = True + break + elif readonly[0][0] == 0: + edit = False + else: + edit = False + + if not edit: + vals.pop(field) + for field in vals: + current_field = self._columns[field] + if current_field._classic_write: + updates.append((field, '%s', current_field._symbol_set[1](vals[field]))) + + #for the function fields that receive a value, we set them directly in the database + #(they may be required), but we also need to trigger the _fct_inv() + if (hasattr(current_field, '_fnct_inv')) and not isinstance(current_field, fields.related): + #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at + #one week of the release candidate. It seems the only good way to handle correctly this is to add an + #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise + #if, for example, the related has a default value (for usability) then the fct_inv is called and it + #may raise some access rights error. Changing this is a too big change for now, and is thus postponed + #after the release but, definitively, the behavior shouldn't be different for related and function + #fields. + upd_todo.append(field) + else: + #TODO: this `if´ statement should be removed because there is no good reason to special case the fields + #related. See the above TODO comment for further explanations. + if not isinstance(current_field, fields.related): + upd_todo.append(field) + if field in self._columns \ + and hasattr(current_field, 'selection') \ + and vals[field]: + self._check_selection_field_value(cr, user, field, vals[field], context=context) + if self._log_access: + updates.append(('create_uid', '%s', user)) + updates.append(('write_uid', '%s', user)) + updates.append(('create_date', "(now() at time zone 'UTC')")) + updates.append(('write_date', "(now() at time zone 'UTC')")) + + # the list of tuples used in this formatting corresponds to + # tuple(field_name, format, value) + # In some case, for example (id, create_date, write_date) we does not + # need to read the third value of the tuple, because the real value is + # encoded in the second value (the format). + cr.execute( + """INSERT INTO "%s" (%s) VALUES(%s) RETURNING id""" % ( + self._table, + ', '.join('"%s"' % u[0] for u in updates), + ', '.join(u[1] for u in updates) + ), + tuple([u[2] for u in updates if len(u) > 2]) + ) + + id_new, = cr.fetchone() + recs = self.browse(cr, user, id_new, context) + upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority) + + if self._parent_store and not context.get('defer_parent_store_computation'): + if self.pool._init: + self.pool._init_parent[self._name] = True + else: + parent = vals.get(self._parent_name, False) + if parent: + cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,)) + pleft_old = None + result_p = cr.fetchall() + for (pleft,) in result_p: + if not pleft: + break + pleft_old = pleft + if not pleft_old: + cr.execute('select parent_left from '+self._table+' where id=%s', (parent,)) + pleft_old = cr.fetchone()[0] + pleft = pleft_old + else: + cr.execute('select max(parent_right) from '+self._table) + pleft = cr.fetchone()[0] or 0 + cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) + cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) + cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new)) + recs.invalidate_cache(['parent_left', 'parent_right']) + + # default element in context must be remove when call a one2many or many2many + rel_context = context.copy() + for c in context.items(): + if c[0].startswith('default_'): + del rel_context[c[0]] + + result = [] + for field in upd_todo: + result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or [] + + # check Python constraints + recs._validate_fields(vals) + + if not context.get('no_store_function', False): + result += self._store_get_values(cr, user, [id_new], + list(set(vals.keys() + self._inherits.values())), + context) + result.sort() + done = [] + for order, model_name, ids, fields2 in result: + if not (model_name, ids, fields2) in done: + self.pool[model_name]._store_set_values(cr, user, ids, fields2, context) + done.append((model_name, ids, fields2)) + + # recompute new-style fields + modified_fields = list(vals) + if self._log_access: + modified_fields += ['create_uid', 'create_date', 'write_uid', 'write_date'] + recs.modified(modified_fields) + recs.recompute() + + if self._log_create and not (context and context.get('no_store_function', False)): + message = self._description + \ + " '" + \ + self.name_get(cr, user, [id_new], context=context)[0][1] + \ + "' " + _("created.") + self.log(cr, user, id_new, message, True, context=context) + + self.check_access_rule(cr, user, [id_new], 'create', context=context) + self.create_workflow(cr, user, [id_new], context=context) + return id_new + + def _store_get_values(self, cr, uid, ids, fields, context): + """Returns an ordered list of fields.function to call due to + an update operation on ``fields`` of records with ``ids``, + obtained by calling the 'store' triggers of these fields, + as setup by their 'store' attribute. + + :return: [(priority, model_name, [record_ids,], [function_fields,])] + """ + if fields is None: fields = [] + stored_functions = self.pool._store_function.get(self._name, []) + + # use indexed names for the details of the stored_functions: + model_name_, func_field_to_compute_, target_ids_func_, trigger_fields_, priority_ = range(5) + + # only keep store triggers that should be triggered for the ``fields`` + # being written to. + triggers_to_compute = ( + f for f in stored_functions + if not f[trigger_fields_] or set(fields).intersection(f[trigger_fields_]) + ) + + to_compute_map = {} + target_id_results = {} + for store_trigger in triggers_to_compute: + target_func_id_ = id(store_trigger[target_ids_func_]) + if target_func_id_ not in target_id_results: + # use admin user for accessing objects having rules defined on store fields + target_id_results[target_func_id_] = [i for i in store_trigger[target_ids_func_](self, cr, SUPERUSER_ID, ids, context) if i] + target_ids = target_id_results[target_func_id_] + + # the compound key must consider the priority and model name + key = (store_trigger[priority_], store_trigger[model_name_]) + for target_id in target_ids: + to_compute_map.setdefault(key, {}).setdefault(target_id,set()).add(tuple(store_trigger)) + + # Here to_compute_map looks like: + # { (10, 'model_a') : { target_id1: [ (trigger_1_tuple, trigger_2_tuple) ], ... } + # (20, 'model_a') : { target_id2: [ (trigger_3_tuple, trigger_4_tuple) ], ... } + # (99, 'model_a') : { target_id1: [ (trigger_5_tuple, trigger_6_tuple) ], ... } + # } + + # Now we need to generate the batch function calls list + # call_map = + # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] } + call_map = {} + for ((priority,model), id_map) in to_compute_map.iteritems(): + trigger_ids_maps = {} + # function_ids_maps = + # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] } + for target_id, triggers in id_map.iteritems(): + trigger_ids_maps.setdefault(tuple(triggers), []).append(target_id) + for triggers, target_ids in trigger_ids_maps.iteritems(): + call_map.setdefault((priority,model),[]).append((priority, model, target_ids, + [t[func_field_to_compute_] for t in triggers])) + result = [] + if call_map: + result = reduce(operator.add, (call_map[k] for k in sorted(call_map))) + return result + + def _store_set_values(self, cr, uid, ids, fields, context): + """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of + respecting ``multi`` attributes), and stores the resulting values in the database directly.""" + if not ids: + return True + field_flag = False + field_dict = {} + if self._log_access: + cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),)) + res = cr.fetchall() + for r in res: + if r[1]: + field_dict.setdefault(r[0], []) + res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S') + write_date = datetime.datetime.fromtimestamp(time.mktime(res_date)) + for i in self.pool._store_function.get(self._name, []): + if i[5]: + up_write_date = write_date + datetime.timedelta(hours=i[5]) + if datetime.datetime.now() < up_write_date: + if i[1] in fields: + field_dict[r[0]].append(i[1]) + if not field_flag: + field_flag = True + todo = {} + keys = [] + for f in fields: + if self._columns[f]._multi not in keys: + keys.append(self._columns[f]._multi) + todo.setdefault(self._columns[f]._multi, []) + todo[self._columns[f]._multi].append(f) + for key in keys: + val = todo[key] + if key: + # use admin user for accessing objects having rules defined on store fields + result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context) + for id, value in result.items(): + if field_flag: + for f in value.keys(): + if f in field_dict[id]: + value.pop(f) + upd0 = [] + upd1 = [] + for v in value: + if v not in val: + continue + if self._columns[v]._type == 'many2one': + try: + value[v] = value[v][0] + except: + pass + upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0]) + upd1.append(self._columns[v]._symbol_set[1](value[v])) + upd1.append(id) + if upd0 and upd1: + cr.execute('update "' + self._table + '" set ' + \ + ','.join(upd0) + ' where id = %s', upd1) + + else: + for f in val: + # use admin user for accessing objects having rules defined on store fields + result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context) + for r in result.keys(): + if field_flag: + if r in field_dict.keys(): + if f in field_dict[r]: + result.pop(r) + for id, value in result.items(): + if self._columns[f]._type == 'many2one': + try: + value = value[0] + except: + pass + cr.execute('update "' + self._table + '" set ' + \ + '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id)) + + # invalidate the cache for the modified fields + self.browse(cr, uid, ids, context).modified(fields) + + return True + + # TODO: ameliorer avec NULL + def _where_calc(self, cr, user, domain, active_test=True, context=None): + """Computes the WHERE clause needed to implement an OpenERP domain. + :param domain: the domain to compute + :type domain: list + :param active_test: whether the default filtering of records with ``active`` + field set to ``False`` should be applied. + :return: the query expressing the given domain as provided in domain + :rtype: osv.query.Query + """ + if not context: + context = {} + domain = domain[:] + # if the object has a field named 'active', filter out all inactive + # records unless they were explicitely asked for + if 'active' in self._all_columns and (active_test and context.get('active_test', True)): + if domain: + # the item[0] trick below works for domain items and '&'/'|'/'!' + # operators too + if not any(item[0] == 'active' for item in domain): + domain.insert(0, ('active', '=', 1)) + else: + domain = [('active', '=', 1)] + + if domain: + e = expression.expression(cr, user, domain, self, context) + tables = e.get_tables() + where_clause, where_params = e.to_sql() + where_clause = where_clause and [where_clause] or [] + else: + where_clause, where_params, tables = [], [], ['"%s"' % self._table] + + return Query(tables, where_clause, where_params) + + def _check_qorder(self, word): + if not regex_order.match(word): + raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)')) + return True + + def _apply_ir_rules(self, cr, uid, query, mode='read', context=None): + """Add what's missing in ``query`` to implement all appropriate ir.rules + (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None) + + :param query: the current query object + """ + if uid == SUPERUSER_ID: + return + + def apply_rule(added_clause, added_params, added_tables, parent_model=None): + """ :param parent_model: name of the parent model, if the added + clause comes from a parent model + """ + if added_clause: + if parent_model: + # as inherited rules are being applied, we need to add the missing JOIN + # to reach the parent table (if it was not JOINed yet in the query) + parent_alias = self._inherits_join_add(self, parent_model, query) + # inherited rules are applied on the external table -> need to get the alias and replace + parent_table = self.pool[parent_model]._table + added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause] + # change references to parent_table to parent_alias, because we now use the alias to refer to the table + new_tables = [] + for table in added_tables: + # table is just a table name -> switch to the full alias + if table == '"%s"' % parent_table: + new_tables.append('"%s" as "%s"' % (parent_table, parent_alias)) + # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated + else: + new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias)) + added_tables = new_tables + query.where_clause += added_clause + query.where_clause_params += added_params + for table in added_tables: + if table not in query.tables: + query.tables.append(table) + return True + return False + + # apply main rules on the object + rule_obj = self.pool.get('ir.rule') + rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context) + apply_rule(rule_where_clause, rule_where_clause_params, rule_tables) + + # apply ir.rules from the parents (through _inherits) + for inherited_model in self._inherits: + rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context) + apply_rule(rule_where_clause, rule_where_clause_params, rule_tables, + parent_model=inherited_model) + + def _generate_m2o_order_by(self, order_field, query): + """ + Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields, + either native m2o fields or function/related fields that are stored, including + intermediate JOINs for inheritance if required. + + :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field`` + """ + if order_field not in self._columns and order_field in self._inherit_fields: + # also add missing joins for reaching the table containing the m2o field + qualified_field = self._inherits_join_calc(order_field, query) + order_field_column = self._inherit_fields[order_field][2] + else: + qualified_field = '"%s"."%s"' % (self._table, order_field) + order_field_column = self._columns[order_field] + + assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()' + if not order_field_column._classic_write and not getattr(order_field_column, 'store', False): + _logger.debug("Many2one function/related fields must be stored " \ + "to be used as ordering fields! Ignoring sorting for %s.%s", + self._name, order_field) + return + + # figure out the applicable order_by for the m2o + dest_model = self.pool[order_field_column._obj] + m2o_order = dest_model._order + if not regex_order.match(m2o_order): + # _order is complex, can't use it here, so we default to _rec_name + m2o_order = dest_model._rec_name + else: + # extract the field names, to be able to qualify them and add desc/asc + m2o_order_list = [] + for order_part in m2o_order.split(","): + m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip()) + m2o_order = m2o_order_list + + # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here + # as we don't want to exclude results that have NULL values for the m2o + src_table, src_field = qualified_field.replace('"', '').split('.', 1) + dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True) + qualify = lambda field: '"%s"."%s"' % (dst_alias, field) + return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order) + + def _generate_order_by(self, order_spec, query): + """ + Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be + a comma-separated list of valid field names, optionally followed by an ASC or DESC direction. + + :raise" except_orm in case order_spec is malformed + """ + order_by_clause = '' + order_spec = order_spec or self._order + if order_spec: + order_by_elements = [] + self._check_qorder(order_spec) + for order_part in order_spec.split(','): + order_split = order_part.strip().split(' ') + order_field = order_split[0].strip() + order_direction = order_split[1].strip() if len(order_split) == 2 else '' + inner_clause = None + if order_field == 'id': + order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction)) + elif order_field in self._columns: + order_column = self._columns[order_field] + if order_column._classic_read: + inner_clause = '"%s"."%s"' % (self._table, order_field) + elif order_column._type == 'many2one': + inner_clause = self._generate_m2o_order_by(order_field, query) + else: + continue # ignore non-readable or "non-joinable" fields + elif order_field in self._inherit_fields: + parent_obj = self.pool[self._inherit_fields[order_field][3]] + order_column = parent_obj._columns[order_field] + if order_column._classic_read: + inner_clause = self._inherits_join_calc(order_field, query) + elif order_column._type == 'many2one': + inner_clause = self._generate_m2o_order_by(order_field, query) + else: + continue # ignore non-readable or "non-joinable" fields + else: + raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name)) + if inner_clause: + if isinstance(inner_clause, list): + for clause in inner_clause: + order_by_elements.append("%s %s" % (clause, order_direction)) + else: + order_by_elements.append("%s %s" % (inner_clause, order_direction)) + if order_by_elements: + order_by_clause = ",".join(order_by_elements) + + return order_by_clause and (' ORDER BY %s ' % order_by_clause) or '' + + def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): + """ + Private implementation of search() method, allowing specifying the uid to use for the access right check. + This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors, + by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules! + This is ok at the security level because this method is private and not callable through XML-RPC. + + :param access_rights_uid: optional user ID to use when checking access rights + (not for ir.rules, this is only for ir.model.access) + """ + if context is None: + context = {} + self.check_access_rights(cr, access_rights_uid or user, 'read') + + # For transient models, restrict acces to the current user, except for the super-user + if self.is_transient() and self._log_access and user != SUPERUSER_ID: + args = expression.AND(([('create_uid', '=', user)], args or [])) + + query = self._where_calc(cr, user, args, context=context) + self._apply_ir_rules(cr, user, query, 'read', context=context) + order_by = self._generate_order_by(order, query) + from_clause, where_clause, where_clause_params = query.get_sql() + + limit_str = limit and ' limit %d' % limit or '' + offset_str = offset and ' offset %d' % offset or '' + where_str = where_clause and (" WHERE %s" % where_clause) or '' + query_str = 'SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str + + if count: + # /!\ the main query must be executed as a subquery, otherwise + # offset and limit apply to the result of count()! + cr.execute('SELECT count(*) FROM (%s) AS count' % query_str, where_clause_params) + res = cr.fetchone() + return res[0] + + cr.execute(query_str, where_clause_params) + res = cr.fetchall() + + # TDE note: with auto_join, we could have several lines about the same result + # i.e. a lead with several unread messages; we uniquify the result using + # a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark) + def _uniquify_list(seq): + seen = set() + return [x for x in seq if x not in seen and not seen.add(x)] + + return _uniquify_list([x[0] for x in res]) + + # returns the different values ever entered for one field + # this is used, for example, in the client when the user hits enter on + # a char field + def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None): + if not args: + args = [] + if field in self._inherit_fields: + return self.pool[self._inherit_fields[field][0]].distinct_field_get(cr, uid, field, value, args, offset, limit) + else: + return self._columns[field].search(cr, self, args, field, value, offset, limit, uid) + + def copy_data(self, cr, uid, id, default=None, context=None): + """ + Copy given record's data with all its fields values + + :param cr: database cursor + :param uid: current user id + :param id: id of the record to copy + :param default: field values to override in the original values of the copied record + :type default: dictionary + :param context: context arguments, like lang, time zone + :type context: dictionary + :return: dictionary containing all the field values + """ + + if context is None: + context = {} + + # avoid recursion through already copied records in case of circular relationship + seen_map = context.setdefault('__copy_data_seen', {}) + if id in seen_map.setdefault(self._name, []): + return + seen_map[self._name].append(id) + + if default is None: + default = {} + if 'state' not in default: + if 'state' in self._defaults: + if callable(self._defaults['state']): + default['state'] = self._defaults['state'](self, cr, uid, context) + else: + default['state'] = self._defaults['state'] + + # build a black list of fields that should not be copied + blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right']) + def blacklist_given_fields(obj): + # blacklist the fields that are given by inheritance + for other, field_to_other in obj._inherits.items(): + blacklist.add(field_to_other) + if field_to_other in default: + # all the fields of 'other' are given by the record: default[field_to_other], + # except the ones redefined in self + blacklist.update(set(self.pool[other]._all_columns) - set(self._columns)) + else: + blacklist_given_fields(self.pool[other]) + # blacklist deprecated fields + for name, field in obj._columns.items(): + if field.deprecated: + blacklist.add(name) + + blacklist_given_fields(self) + + + fields_to_copy = dict((f,fi) for f, fi in self._all_columns.iteritems() + if fi.column.copy + if f not in default + if f not in blacklist) + + data = self.read(cr, uid, [id], fields_to_copy.keys(), context=context) + if data: + data = data[0] + else: + raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name)) + + res = dict(default) + for f, colinfo in fields_to_copy.iteritems(): + field = colinfo.column + if field._type == 'many2one': + res[f] = data[f] and data[f][0] + elif field._type == 'one2many': + other = self.pool[field._obj] + # duplicate following the order of the ids because we'll rely on + # it later for copying translations in copy_translation()! + lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])] + # the lines are duplicated using the wrong (old) parent, but then + # are reassigned to the correct one thanks to the (0, 0, ...) + res[f] = [(0, 0, line) for line in lines if line] + elif field._type == 'many2many': + res[f] = [(6, 0, data[f])] + else: + res[f] = data[f] + + return res + + def copy_translations(self, cr, uid, old_id, new_id, context=None): + if context is None: + context = {} + + # avoid recursion through already copied records in case of circular relationship + seen_map = context.setdefault('__copy_translations_seen',{}) + if old_id in seen_map.setdefault(self._name,[]): + return + seen_map[self._name].append(old_id) + + trans_obj = self.pool.get('ir.translation') + # TODO it seems fields_get can be replaced by _all_columns (no need for translation) + fields = self.fields_get(cr, uid, context=context) + + for field_name, field_def in fields.items(): + # removing the lang to compare untranslated values + context_wo_lang = dict(context, lang=None) + old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang) + # we must recursively copy the translations for o2o and o2m + if field_def['type'] == 'one2many': + target_obj = self.pool[field_def['relation']] + # here we rely on the order of the ids to match the translations + # as foreseen in copy_data() + old_children = sorted(r.id for r in old_record[field_name]) + new_children = sorted(r.id for r in new_record[field_name]) + for (old_child, new_child) in zip(old_children, new_children): + target_obj.copy_translations(cr, uid, old_child, new_child, context=context) + # and for translatable fields we keep them for copy + elif field_def.get('translate'): + if field_name in self._columns: + trans_name = self._name + "," + field_name + target_id = new_id + source_id = old_id + elif field_name in self._inherit_fields: + trans_name = self._inherit_fields[field_name][0] + "," + field_name + # get the id of the parent record to set the translation + inherit_field_name = self._inherit_fields[field_name][1] + target_id = new_record[inherit_field_name].id + source_id = old_record[inherit_field_name].id + else: + continue + + trans_ids = trans_obj.search(cr, uid, [ + ('name', '=', trans_name), + ('res_id', '=', source_id) + ]) + user_lang = context.get('lang') + for record in trans_obj.read(cr, uid, trans_ids, context=context): + del record['id'] + # remove source to avoid triggering _set_src + del record['source'] + record.update({'res_id': target_id}) + if user_lang and user_lang == record['lang']: + # 'source' to force the call to _set_src + # 'value' needed if value is changed in copy(), want to see the new_value + record['source'] = old_record[field_name] + record['value'] = new_record[field_name] + trans_obj.create(cr, uid, record, context=context) + + @api.returns('self', lambda value: value.id) + def copy(self, cr, uid, id, default=None, context=None): + """ + Duplicate record with given id updating it with default values + + :param cr: database cursor + :param uid: current user id + :param id: id of the record to copy + :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}`` + :type default: dictionary + :param context: context arguments, like lang, time zone + :type context: dictionary + :return: id of the newly created record + + """ + if context is None: + context = {} + context = context.copy() + data = self.copy_data(cr, uid, id, default, context) + new_id = self.create(cr, uid, data, context) + self.copy_translations(cr, uid, id, new_id, context) + return new_id + + @api.multi + @api.returns('self') + def exists(self): + """ Return the subset of records in `self` that exist, and mark deleted + records as such in cache. It can be used as a test on records:: + + if record.exists(): + ... + + By convention, new records are returned as existing. + """ + ids = filter(None, self._ids) # ids to check in database + if not ids: + return self + query = """SELECT id FROM "%s" WHERE id IN %%s""" % self._table + self._cr.execute(query, (ids,)) + ids = ([r[0] for r in self._cr.fetchall()] + # ids in database + [id for id in self._ids if not id]) # new ids + existing = self.browse(ids) + if len(existing) < len(self): + # mark missing records in cache with a failed value + exc = MissingError(_("Record does not exist or has been deleted.")) + (self - existing)._cache.update(FailedValue(exc)) + return existing + + def check_recursion(self, cr, uid, ids, context=None, parent=None): + _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \ + self._name) + assert parent is None or parent in self._columns or parent in self._inherit_fields,\ + "The 'parent' parameter passed to check_recursion() must be None or a valid field name" + return self._check_recursion(cr, uid, ids, context, parent) + + def _check_recursion(self, cr, uid, ids, context=None, parent=None): + """ + Verifies that there is no loop in a hierarchical structure of records, + by following the parent relationship using the **parent** field until a loop + is detected or until a top-level record is found. + + :param cr: database cursor + :param uid: current user id + :param ids: list of ids of records to check + :param parent: optional parent field name (default: ``self._parent_name = parent_id``) + :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected. + """ + if not parent: + parent = self._parent_name + + # must ignore 'active' flag, ir.rules, etc. => direct SQL query + query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table) + for id in ids: + current_id = id + while current_id is not None: + cr.execute(query, (current_id,)) + result = cr.fetchone() + current_id = result[0] if result else None + if current_id == id: + return False + return True + + def _check_m2m_recursion(self, cr, uid, ids, field_name): + """ + Verifies that there is no loop in a hierarchical structure of records, + by following the parent relationship using the **parent** field until a loop + is detected or until a top-level record is found. + + :param cr: database cursor + :param uid: current user id + :param ids: list of ids of records to check + :param field_name: field to check + :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected. + """ + + field = self._all_columns.get(field_name) + field = field.column if field else None + if not field or field._type != 'many2many' or field._obj != self._name: + # field must be a many2many on itself + raise ValueError('invalid field_name: %r' % (field_name,)) + + query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % (field._id2, field._rel, field._id1) + ids_parent = ids[:] + while ids_parent: + ids_parent2 = [] + for i in range(0, len(ids_parent), cr.IN_MAX): + j = i + cr.IN_MAX + sub_ids_parent = ids_parent[i:j] + cr.execute(query, (tuple(sub_ids_parent),)) + ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall()))) + ids_parent = ids_parent2 + for i in ids_parent: + if i in ids: + return False + return True + + def _get_external_ids(self, cr, uid, ids, *args, **kwargs): + """Retrieve the External ID(s) of any database record. + + **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }`` + + :return: map of ids to the list of their fully qualified External IDs + in the form ``module.key``, or an empty list when there's no External + ID for a record, e.g.:: + + { 'id': ['module.ext_id', 'module.ext_id_bis'], + 'id2': [] } + """ + ir_model_data = self.pool.get('ir.model.data') + data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)]) + data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id']) + result = {} + for id in ids: + # can't use dict.fromkeys() as the list would be shared! + result[id] = [] + for record in data_results: + result[record['res_id']].append('%(module)s.%(name)s' % record) + return result + + def get_external_id(self, cr, uid, ids, *args, **kwargs): + """Retrieve the External ID of any database record, if there + is one. This method works as a possible implementation + for a function field, to be able to add it to any + model object easily, referencing it as ``Model.get_external_id``. + + When multiple External IDs exist for a record, only one + of them is returned (randomly). + + :return: map of ids to their fully qualified XML ID, + defaulting to an empty string when there's none + (to be usable as a function field), + e.g.:: + + { 'id': 'module.ext_id', + 'id2': '' } + """ + results = self._get_xml_ids(cr, uid, ids) + for k, v in results.iteritems(): + if results[k]: + results[k] = v[0] + else: + results[k] = '' + return results + + # backwards compatibility + get_xml_id = get_external_id + _get_xml_ids = _get_external_ids + + def print_report(self, cr, uid, ids, name, data, context=None): + """ + Render the report `name` for the given IDs. The report must be defined + for this model, not another. + """ + report = self.pool['ir.actions.report.xml']._lookup_report(cr, name) + assert self._name == report.table + return report.create(cr, uid, ids, data, context) + + # Transience + @classmethod + def is_transient(cls): + """ Return whether the model is transient. + + See :class:`TransientModel`. + + """ + return cls._transient + + def _transient_clean_rows_older_than(self, cr, seconds): + assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name + # Never delete rows used in last 5 minutes + seconds = max(seconds, 300) + query = ("SELECT id FROM " + self._table + " WHERE" + " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp" + " < ((now() at time zone 'UTC') - interval %s)") + cr.execute(query, ("%s seconds" % seconds,)) + ids = [x[0] for x in cr.fetchall()] + self.unlink(cr, SUPERUSER_ID, ids) + + def _transient_clean_old_rows(self, cr, max_count): + # Check how many rows we have in the table + cr.execute("SELECT count(*) AS row_count FROM " + self._table) + res = cr.fetchall() + if res[0][0] <= max_count: + return # max not reached, nothing to do + self._transient_clean_rows_older_than(cr, 300) + + def _transient_vacuum(self, cr, uid, force=False): + """Clean the transient records. + + This unlinks old records from the transient model tables whenever the + "_transient_max_count" or "_max_age" conditions (if any) are reached. + Actual cleaning will happen only once every "_transient_check_time" calls. + This means this method can be called frequently called (e.g. whenever + a new record is created). + Example with both max_hours and max_count active: + Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the + table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between + 5 and 10 minutes ago, the rest created/changed more then 12 minutes ago. + - age based vacuum will leave the 22 rows created/changed in the last 12 minutes + - count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition + would immediately cause the maximum to be reached again. + - the 10 rows that have been created/changed the last 5 minutes will NOT be deleted + """ + assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name + _transient_check_time = 20 # arbitrary limit on vacuum executions + self._transient_check_count += 1 + if not force and (self._transient_check_count < _transient_check_time): + return True # no vacuum cleaning this time + self._transient_check_count = 0 + + # Age-based expiration + if self._transient_max_hours: + self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60) + + # Count-based expiration + if self._transient_max_count: + self._transient_clean_old_rows(cr, self._transient_max_count) + + return True + + def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None): + """ Serializes one2many and many2many commands into record dictionaries + (as if all the records came from the database via a read()). This + method is aimed at onchange methods on one2many and many2many fields. + + Because commands might be creation commands, not all record dicts + will contain an ``id`` field. Commands matching an existing record + will have an ``id``. + + :param field_name: name of the one2many or many2many field matching the commands + :type field_name: str + :param commands: one2many or many2many commands to execute on ``field_name`` + :type commands: list((int|False, int|False, dict|False)) + :param fields: list of fields to read from the database, when applicable + :type fields: list(str) + :returns: records in a shape similar to that returned by ``read()`` + (except records may be missing the ``id`` field if they don't exist in db) + :rtype: list(dict) + """ + result = [] # result (list of dict) + record_ids = [] # ids of records to read + updates = {} # {id: dict} of updates on particular records + + for command in commands or []: + if not isinstance(command, (list, tuple)): + record_ids.append(command) + elif command[0] == 0: + result.append(command[2]) + elif command[0] == 1: + record_ids.append(command[1]) + updates.setdefault(command[1], {}).update(command[2]) + elif command[0] in (2, 3): + record_ids = [id for id in record_ids if id != command[1]] + elif command[0] == 4: + record_ids.append(command[1]) + elif command[0] == 5: + result, record_ids = [], [] + elif command[0] == 6: + result, record_ids = [], list(command[2]) + + # read the records and apply the updates + other_model = self.pool[self._all_columns[field_name].column._obj] + for record in other_model.read(cr, uid, record_ids, fields=fields, context=context): + record.update(updates.get(record['id'], {})) + result.append(record) + + return result + + # for backward compatibility + resolve_o2m_commands_to_record_dicts = resolve_2many_commands + + def search_read(self, cr, uid, domain=None, fields=None, offset=0, limit=None, order=None, context=None): + """ + Performs a ``search()`` followed by a ``read()``. + + :param cr: database cursor + :param user: current user id + :param domain: Search domain, see ``args`` parameter in ``search()``. Defaults to an empty domain that will match all records. + :param fields: List of fields to read, see ``fields`` parameter in ``read()``. Defaults to all fields. + :param offset: Number of records to skip, see ``offset`` parameter in ``search()``. Defaults to 0. + :param limit: Maximum number of records to return, see ``limit`` parameter in ``search()``. Defaults to no limit. + :param order: Columns to sort result, see ``order`` parameter in ``search()``. Defaults to no sort. + :param context: context arguments. + :return: List of dictionaries containing the asked fields. + :rtype: List of dictionaries. + + """ + record_ids = self.search(cr, uid, domain or [], offset=offset, limit=limit, order=order, context=context) + if not record_ids: + return [] + + if fields and fields == ['id']: + # shortcut read if we only want the ids + return [{'id': id} for id in record_ids] + + # read() ignores active_test, but it would forward it to any downstream search call + # (e.g. for x2m or function fields), and this is not the desired behavior, the flag + # was presumably only meant for the main search(). + # TODO: Move this to read() directly? + read_ctx = dict(context or {}) + read_ctx.pop('active_test', None) + + result = self.read(cr, uid, record_ids, fields, context=read_ctx) + if len(result) <= 1: + return result + + # reorder read + index = dict((r['id'], r) for r in result) + return [index[x] for x in record_ids if x in index] + + def _register_hook(self, cr): + """ stuff to do right after the registry is built """ + pass + + def __getattr__(self, name): + if name.startswith('signal_'): + # self.signal_XXX() sends signal XXX to the record's workflow + signal_name = name[7:] + assert signal_name + return (lambda *args, **kwargs: + self.signal_workflow(*args, signal=signal_name, **kwargs)) + + get = getattr(super(BaseModel, self), '__getattr__', None) + if get is None: + raise AttributeError("%r has no attribute %r" % (type(self).__name__, name)) + return get(name) + + def _patch_method(self, name, method): + """ Monkey-patch a method for all instances of this model. This replaces + the method called `name` by `method` in `self`'s class. + The original method is then accessible via ``method.origin``, and it + can be restored with :meth:`~._revert_method`. + + Example:: + + @api.multi + def do_write(self, values): + # do stuff, and call the original method + return do_write.origin(self, values) + + # patch method write of model + model._patch_method('write', do_write) + + # this will call do_write + records = model.search([...]) + records.write(...) + + # restore the original method + model._revert_method('write') + """ + cls = type(self) + origin = getattr(cls, name) + method.origin = origin + # propagate decorators from origin to method, and apply api decorator + wrapped = api.guess(api.propagate(origin, method)) + wrapped.origin = origin + setattr(cls, name, wrapped) + + def _revert_method(self, name): + """ Revert the original method of `self` called `name`. + See :meth:`~._patch_method`. + """ + cls = type(self) + method = getattr(cls, name) + setattr(cls, name, method.origin) + + # + # Instance creation + # + # An instance represents an ordered collection of records in a given + # execution environment. The instance object refers to the environment, and + # the records themselves are represented by their cache dictionary. The 'id' + # of each record is found in its corresponding cache dictionary. + # + # This design has the following advantages: + # - cache access is direct and thus fast; + # - one can consider records without an 'id' (see new records); + # - the global cache is only an index to "resolve" a record 'id'. + # + + @classmethod + def _browse(cls, env, ids): + """ Create an instance attached to `env`; `ids` is a tuple of record + ids. + """ + records = object.__new__(cls) + records.env = env + records._ids = ids + env.prefetch[cls._name].update(ids) + return records + + @api.v8 + def browse(self, arg=None): + """ Return an instance corresponding to `arg` and attached to + `self.env`; `arg` is either a record id, or a collection of record ids. + """ + ids = _normalize_ids(arg) + #assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids + return self._browse(self.env, ids) + + @api.v7 + def browse(self, cr, uid, arg=None, context=None): + ids = _normalize_ids(arg) + #assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids + return self._browse(Environment(cr, uid, context or {}), ids) + + # + # Internal properties, for manipulating the instance's implementation + # + + @property + def ids(self): + """ Return the list of non-false record ids of this instance. """ + return filter(None, list(self._ids)) + + # backward-compatibility with former browse records + _cr = property(lambda self: self.env.cr) + _uid = property(lambda self: self.env.uid) + _context = property(lambda self: self.env.context) + + # + # Conversion methods + # + + def ensure_one(self): + """ Return `self` if it is a singleton instance, otherwise raise an + exception. + """ + if len(self) == 1: + return self + raise except_orm("ValueError", "Expected singleton: %s" % self) + + def with_env(self, env): + """ Return an instance equivalent to `self` attached to `env`. + """ + return self._browse(env, self._ids) + + def sudo(self, user=SUPERUSER_ID): + """ Return an instance equivalent to `self` attached to an environment + based on `self.env` with the given `user`. + """ + return self.with_env(self.env(user=user)) + + def with_context(self, *args, **kwargs): + """ Return an instance equivalent to `self` attached to an environment + based on `self.env` with another context. The context is given by + `self._context` or the positional argument if given, and modified by + `kwargs`. + """ + context = dict(args[0] if args else self._context, **kwargs) + return self.with_env(self.env(context=context)) + + def _convert_to_cache(self, values): + """ Convert the `values` dictionary into cached values. """ + fields = self._fields + return { + name: fields[name].convert_to_cache(value, self.env) + for name, value in values.iteritems() + if name in fields + } + + def _convert_to_write(self, values): + """ Convert the `values` dictionary into the format of :meth:`write`. """ + fields = self._fields + return dict( + (name, fields[name].convert_to_write(value)) + for name, value in values.iteritems() + if name in self._fields + ) + + # + # Record traversal and update + # + + def _mapped_func(self, func): + """ Apply function `func` on all records in `self`, and return the + result as a list or a recordset (if `func` return recordsets). + """ + vals = [func(rec) for rec in self] + val0 = vals[0] if vals else func(self) + if isinstance(val0, BaseModel): + return reduce(operator.or_, vals, val0) + return vals + + def mapped(self, func): + """ Apply `func` on all records in `self`, and return the result as a + list or a recordset (if `func` return recordsets). In the latter + case, the order of the returned recordset is arbritrary. + + :param func: a function or a dot-separated sequence of field names + """ + if isinstance(func, basestring): + recs = self + for name in func.split('.'): + recs = recs._mapped_func(operator.itemgetter(name)) + return recs + else: + return self._mapped_func(func) + + def _mapped_cache(self, name_seq): + """ Same as `~.mapped`, but `name_seq` is a dot-separated sequence of + field names, and only cached values are used. + """ + recs = self + for name in name_seq.split('.'): + field = recs._fields[name] + null = field.null(self.env) + recs = recs.mapped(lambda rec: rec._cache.get(field, null)) + return recs + + def filtered(self, func): + """ Select the records in `self` such that `func(rec)` is true, and + return them as a recordset. + + :param func: a function or a dot-separated sequence of field names + """ + if isinstance(func, basestring): + name = func + func = lambda rec: filter(None, rec.mapped(name)) + return self.browse([rec.id for rec in self if func(rec)]) + + def sorted(self, key=None): + """ Return the recordset `self` ordered by `key` """ + if key is None: + return self.search([('id', 'in', self.ids)]) + else: + return self.browse(map(int, sorted(self, key=key))) + + def update(self, values): + """ Update record `self[0]` with `values`. """ + for name, value in values.iteritems(): + self[name] = value + + # + # New records - represent records that do not exist in the database yet; + # they are used to compute default values and perform onchanges. + # + + @api.model + def new(self, values={}): + """ Return a new record instance attached to `self.env`, and + initialized with the `values` dictionary. Such a record does not + exist in the database. + """ + record = self.browse([NewId()]) + record._cache.update(self._convert_to_cache(values)) + + if record.env.in_onchange: + # The cache update does not set inverse fields, so do it manually. + # This is useful for computing a function field on secondary + # records, if that field depends on the main record. + for name in values: + field = self._fields.get(name) + if field and field.inverse_field: + field.inverse_field._update(record[name], record) + + return record + + # + # Dirty flag, to mark records modified (in draft mode) + # + + @property + def _dirty(self): + """ Return whether any record in `self` is dirty. """ + dirty = self.env.dirty + return any(record in dirty for record in self) + + @_dirty.setter + def _dirty(self, value): + """ Mark the records in `self` as dirty. """ + if value: + map(self.env.dirty.add, self) + else: + map(self.env.dirty.discard, self) + + # + # "Dunder" methods + # + + def __nonzero__(self): + """ Test whether `self` is nonempty. """ + return bool(getattr(self, '_ids', True)) + + def __len__(self): + """ Return the size of `self`. """ + return len(self._ids) + + def __iter__(self): + """ Return an iterator over `self`. """ + for id in self._ids: + yield self._browse(self.env, (id,)) + + def __contains__(self, item): + """ Test whether `item` is a subset of `self` or a field name. """ + if isinstance(item, BaseModel): + if self._name == item._name: + return set(item._ids) <= set(self._ids) + raise except_orm("ValueError", "Mixing apples and oranges: %s in %s" % (item, self)) + if isinstance(item, basestring): + return item in self._fields + return item in self.ids + + def __add__(self, other): + """ Return the concatenation of two recordsets. """ + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s + %s" % (self, other)) + return self.browse(self._ids + other._ids) + + def __sub__(self, other): + """ Return the recordset of all the records in `self` that are not in `other`. """ + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s - %s" % (self, other)) + other_ids = set(other._ids) + return self.browse([id for id in self._ids if id not in other_ids]) + + def __and__(self, other): + """ Return the intersection of two recordsets. + Note that recordset order is not preserved. + """ + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s & %s" % (self, other)) + return self.browse(set(self._ids) & set(other._ids)) + + def __or__(self, other): + """ Return the union of two recordsets. + Note that recordset order is not preserved. + """ + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s | %s" % (self, other)) + return self.browse(set(self._ids) | set(other._ids)) + + def __eq__(self, other): + """ Test whether two recordsets are equivalent (up to reordering). """ + if not isinstance(other, BaseModel): + if other: + _logger.warning("Comparing apples and oranges: %s == %s", self, other) + return False + return self._name == other._name and set(self._ids) == set(other._ids) + + def __ne__(self, other): + return not self == other + + def __lt__(self, other): + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s < %s" % (self, other)) + return set(self._ids) < set(other._ids) + + def __le__(self, other): + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s <= %s" % (self, other)) + return set(self._ids) <= set(other._ids) + + def __gt__(self, other): + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s > %s" % (self, other)) + return set(self._ids) > set(other._ids) + + def __ge__(self, other): + if not isinstance(other, BaseModel) or self._name != other._name: + raise except_orm("ValueError", "Mixing apples and oranges: %s >= %s" % (self, other)) + return set(self._ids) >= set(other._ids) + + def __int__(self): + return self.id + + def __str__(self): + return "%s%s" % (self._name, getattr(self, '_ids', "")) + + def __unicode__(self): + return unicode(str(self)) + + __repr__ = __str__ + + def __hash__(self): + if hasattr(self, '_ids'): + return hash((self._name, frozenset(self._ids))) + else: + return hash(self._name) + + def __getitem__(self, key): + """ If `key` is an integer or a slice, return the corresponding record + selection as an instance (attached to `self.env`). + Otherwise read the field `key` of the first record in `self`. + + Examples:: + + inst = model.search(dom) # inst is a recordset + r4 = inst[3] # fourth record in inst + rs = inst[10:20] # subset of inst + nm = rs['name'] # name of first record in inst + """ + if isinstance(key, basestring): + # important: one must call the field's getter + return self._fields[key].__get__(self, type(self)) + elif isinstance(key, slice): + return self._browse(self.env, self._ids[key]) + else: + return self._browse(self.env, (self._ids[key],)) + + def __setitem__(self, key, value): + """ Assign the field `key` to `value` in record `self`. """ + # important: one must call the field's setter + return self._fields[key].__set__(self, value) + + # + # Cache and recomputation management + # + + @lazy_property + def _cache(self): + """ Return the cache of `self`, mapping field names to values. """ + return RecordCache(self) + + @api.model + def _in_cache_without(self, field): + """ Make sure `self` is present in cache (for prefetching), and return + the records of model `self` in cache that have no value for `field` + (:class:`Field` instance). + """ + env = self.env + prefetch_ids = env.prefetch[self._name] + prefetch_ids.update(self._ids) + ids = filter(None, prefetch_ids - set(env.cache[field])) + return self.browse(ids) + + @api.model + def refresh(self): + """ Clear the records cache. + + .. deprecated:: 8.0 + The record cache is automatically invalidated. + """ + self.invalidate_cache() + + @api.model + def invalidate_cache(self, fnames=None, ids=None): + """ Invalidate the record caches after some records have been modified. + If both `fnames` and `ids` are ``None``, the whole cache is cleared. + + :param fnames: the list of modified fields, or ``None`` for all fields + :param ids: the list of modified record ids, or ``None`` for all + """ + if fnames is None: + if ids is None: + return self.env.invalidate_all() + fields = self._fields.values() + else: + fields = map(self._fields.__getitem__, fnames) + + # invalidate fields and inverse fields, too + spec = [(f, ids) for f in fields] + \ + [(f.inverse_field, None) for f in fields if f.inverse_field] + self.env.invalidate(spec) + + @api.multi + def modified(self, fnames): + """ Notify that fields have been modified on `self`. This invalidates + the cache, and prepares the recomputation of stored function fields + (new-style fields only). + + :param fnames: iterable of field names that have been modified on + records `self` + """ + # each field knows what to invalidate and recompute + spec = [] + for fname in fnames: + spec += self._fields[fname].modified(self) + + cached_fields = { + field + for env in self.env.all + for field in env.cache + } + # invalidate non-stored fields.function which are currently cached + spec += [(f, None) for f in self.pool.pure_function_fields + if f in cached_fields] + + self.env.invalidate(spec) + + def _recompute_check(self, field): + """ If `field` must be recomputed on some record in `self`, return the + corresponding records that must be recomputed. + """ + for env in [self.env] + list(self.env.all): + if env.todo.get(field) and env.todo[field] & self: + return env.todo[field] + + def _recompute_todo(self, field): + """ Mark `field` to be recomputed. """ + todo = self.env.todo + todo[field] = (todo.get(field) or self.browse()) | self + + def _recompute_done(self, field): + """ Mark `field` as being recomputed. """ + todo = self.env.todo + if field in todo: + recs = todo.pop(field) - self + if recs: + todo[field] = recs + + @api.model + def recompute(self): + """ Recompute stored function fields. The fields and records to + recompute have been determined by method :meth:`modified`. + """ + for env in list(self.env.all): + while env.todo: + field, recs = next(env.todo.iteritems()) + # evaluate the fields to recompute, and save them to database + for rec, rec1 in zip(recs, recs.with_context(recompute=False)): + try: + values = rec._convert_to_write({ + f.name: rec[f.name] for f in field.computed_fields + }) + rec1._write(values) + except MissingError: + pass + # mark the computed fields as done + map(recs._recompute_done, field.computed_fields) + + # + # Generic onchange method + # + + def _has_onchange(self, field, other_fields): + """ Return whether `field` should trigger an onchange event in the + presence of `other_fields`. + """ + # test whether self has an onchange method for field, or field is a + # dependency of any field in other_fields + return field.name in self._onchange_methods or \ + any(dep in other_fields for dep in field.dependents) + + @api.model + def _onchange_spec(self, view_info=None): + """ Return the onchange spec from a view description; if not given, the + result of ``self.fields_view_get()`` is used. + """ + result = {} + + # for traversing the XML arch and populating result + def process(node, info, prefix): + if node.tag == 'field': + name = node.attrib['name'] + names = "%s.%s" % (prefix, name) if prefix else name + if not result.get(names): + result[names] = node.attrib.get('on_change') + # traverse the subviews included in relational fields + for subinfo in info['fields'][name].get('views', {}).itervalues(): + process(etree.fromstring(subinfo['arch']), subinfo, names) + else: + for child in node: + process(child, info, prefix) + + if view_info is None: + view_info = self.fields_view_get() + process(etree.fromstring(view_info['arch']), view_info, '') + return result + + def _onchange_eval(self, field_name, onchange, result): + """ Apply onchange method(s) for field `field_name` with spec `onchange` + on record `self`. Value assignments are applied on `self`, while + domain and warning messages are put in dictionary `result`. + """ + onchange = onchange.strip() + + # onchange V8 + if onchange in ("1", "true"): + for method in self._onchange_methods.get(field_name, ()): + method_res = method(self) + if not method_res: + continue + if 'domain' in method_res: + result.setdefault('domain', {}).update(method_res['domain']) + if 'warning' in method_res: + result['warning'] = method_res['warning'] + return + + # onchange V7 + match = onchange_v7.match(onchange) + if match: + method, params = match.groups() + + # evaluate params -> tuple + global_vars = {'context': self._context, 'uid': self._uid} + if self._context.get('field_parent'): + class RawRecord(object): + def __init__(self, record): + self._record = record + def __getattr__(self, name): + field = self._record._fields[name] + value = self._record[name] + return field.convert_to_onchange(value) + record = self[self._context['field_parent']] + global_vars['parent'] = RawRecord(record) + field_vars = { + key: self._fields[key].convert_to_onchange(val) + for key, val in self._cache.iteritems() + } + params = eval("[%s]" % params, global_vars, field_vars) + + # call onchange method + args = (self._cr, self._uid, self._origin.ids) + tuple(params) + method_res = getattr(self._model, method)(*args) + if not isinstance(method_res, dict): + return + if 'value' in method_res: + method_res['value'].pop('id', None) + self.update(self._convert_to_cache(method_res['value'])) + if 'domain' in method_res: + result.setdefault('domain', {}).update(method_res['domain']) + if 'warning' in method_res: + result['warning'] = method_res['warning'] + + @api.multi + def onchange(self, values, field_name, field_onchange): + """ Perform an onchange on the given field. + + :param values: dictionary mapping field names to values, giving the + current state of modification + :param field_name: name of the modified field_name + :param field_onchange: dictionary mapping field names to their + on_change attribute + """ + env = self.env + + if field_name and field_name not in self._fields: + return {} + + # determine subfields for field.convert_to_write() below + secondary = [] + subfields = defaultdict(set) + for dotname in field_onchange: + if '.' in dotname: + secondary.append(dotname) + name, subname = dotname.split('.') + subfields[name].add(subname) + + # create a new record with values, and attach `self` to it + with env.do_in_onchange(): + record = self.new(values) + values = dict(record._cache) + # attach `self` with a different context (for cache consistency) + record._origin = self.with_context(__onchange=True) + + # determine which field should be triggered an onchange + todo = set([field_name]) if field_name else set(values) + done = set() + + # dummy assignment: trigger invalidations on the record + for name in todo: + record[name] = record[name] + + result = {'value': {}} + + while todo: + name = todo.pop() + if name in done: + continue + done.add(name) + + with env.do_in_onchange(): + # apply field-specific onchange methods + if field_onchange.get(name): + record._onchange_eval(name, field_onchange[name], result) + + # force re-evaluation of function fields on secondary records + for field_seq in secondary: + record.mapped(field_seq) + + # determine which fields have been modified + for name, oldval in values.iteritems(): + newval = record[name] + if newval != oldval or getattr(newval, '_dirty', False): + field = self._fields[name] + result['value'][name] = field.convert_to_write( + newval, record._origin, subfields[name], + ) + todo.add(name) + + # At the moment, the client does not support updates on a *2many field + # while this one is modified by the user. + if field_name and self._fields[field_name].type in ('one2many', 'many2many'): + result['value'].pop(field_name, None) + + return result + + +class RecordCache(MutableMapping): + """ Implements a proxy dictionary to read/update the cache of a record. + Upon iteration, it looks like a dictionary mapping field names to + values. However, fields may be used as keys as well. + """ + def __init__(self, records): + self._recs = records + + def __contains__(self, field): + """ Return whether `records[0]` has a value for `field` in cache. """ + if isinstance(field, basestring): + field = self._recs._fields[field] + return self._recs.id in self._recs.env.cache[field] + + def __getitem__(self, field): + """ Return the cached value of `field` for `records[0]`. """ + if isinstance(field, basestring): + field = self._recs._fields[field] + value = self._recs.env.cache[field][self._recs.id] + return value.get() if isinstance(value, SpecialValue) else value + + def __setitem__(self, field, value): + """ Assign the cached value of `field` for all records in `records`. """ + if isinstance(field, basestring): + field = self._recs._fields[field] + values = dict.fromkeys(self._recs._ids, value) + self._recs.env.cache[field].update(values) + + def update(self, *args, **kwargs): + """ Update the cache of all records in `records`. If the argument is a + `SpecialValue`, update all fields (except "magic" columns). + """ + if args and isinstance(args[0], SpecialValue): + values = dict.fromkeys(self._recs._ids, args[0]) + for name, field in self._recs._fields.iteritems(): + if name not in MAGIC_COLUMNS: + self._recs.env.cache[field].update(values) + else: + return super(RecordCache, self).update(*args, **kwargs) + + def __delitem__(self, field): + """ Remove the cached value of `field` for all `records`. """ + if isinstance(field, basestring): + field = self._recs._fields[field] + field_cache = self._recs.env.cache[field] + for id in self._recs._ids: + field_cache.pop(id, None) + + def __iter__(self): + """ Iterate over the field names with a regular value in cache. """ + cache, id = self._recs.env.cache, self._recs.id + dummy = SpecialValue(None) + for name, field in self._recs._fields.iteritems(): + if name not in MAGIC_COLUMNS and \ + not isinstance(cache[field].get(id, dummy), SpecialValue): + yield name + + def __len__(self): + """ Return the number of fields with a regular value in cache. """ + return sum(1 for name in self) + +class Model(BaseModel): + """Main super-class for regular database-persisted OpenERP models. + + OpenERP models are created by inheriting from this class:: + + class user(Model): + ... + + The system will later instantiate the class once per database (on + which the class' module is installed). + """ + _auto = True + _register = False # not visible in ORM registry, meant to be python-inherited only + _transient = False # True in a TransientModel + +class TransientModel(BaseModel): + """Model super-class for transient records, meant to be temporarily + persisted, and regularly vaccuum-cleaned. + + A TransientModel has a simplified access rights management, + all users can create new records, and may only access the + records they created. The super-user has unrestricted access + to all TransientModel records. + """ + _auto = True + _register = False # not visible in ORM registry, meant to be python-inherited only + _transient = True + +class AbstractModel(BaseModel): + """Abstract Model super-class for creating an abstract class meant to be + inherited by regular models (Models or TransientModels) but not meant to + be usable on its own, or persisted. + + Technical note: we don't want to make AbstractModel the super-class of + Model or BaseModel because it would not make sense to put the main + definition of persistence methods such as create() in it, and still we + should be able to override them within an AbstractModel. + """ + _auto = False # don't create any database backend for AbstractModels + _register = False # not visible in ORM registry, meant to be python-inherited only + _transient = False + +def itemgetter_tuple(items): + """ Fixes itemgetter inconsistency (useful in some cases) of not returning + a tuple if len(items) == 1: always returns an n-tuple where n = len(items) + """ + if len(items) == 0: + return lambda a: () + if len(items) == 1: + return lambda gettable: (gettable[items[0]],) + return operator.itemgetter(*items) + +def convert_pgerror_23502(model, fields, info, e): + m = re.match(r'^null value in column "(?P<field>\w+)" violates ' + r'not-null constraint\n', + str(e)) + field_name = m and m.group('field') + if not m or field_name not in fields: + return {'message': unicode(e)} + message = _(u"Missing required value for the field '%s'.") % field_name + field = fields.get(field_name) + if field: + message = _(u"Missing required value for the field '%s' (%s)") % (field['string'], field_name) + return { + 'message': message, + 'field': field_name, + } + +def convert_pgerror_23505(model, fields, info, e): + m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint', + str(e)) + field_name = m and m.group('field') + if not m or field_name not in fields: + return {'message': unicode(e)} + message = _(u"The value for the field '%s' already exists.") % field_name + field = fields.get(field_name) + if field: + message = _(u"%s This might be '%s' in the current model, or a field " + u"of the same name in an o2m.") % (message, field['string']) + return { + 'message': message, + 'field': field_name, + } + +PGERROR_TO_OE = defaultdict( + # shape of mapped converters + lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), { + # not_null_violation + '23502': convert_pgerror_23502, + # unique constraint error + '23505': convert_pgerror_23505, +}) + +def _normalize_ids(arg, atoms={int, long, str, unicode, NewId}): + """ Normalizes the ids argument for ``browse`` (v7 and v8) to a tuple. + + Various implementations were tested on the corpus of all browse() calls + performed during a full crawler run (after having installed all website_* + modules) and this one was the most efficient overall. + + A possible bit of correctness was sacrificed by not doing any test on + Iterable and just assuming that any non-atomic type was an iterable of + some kind. + + :rtype: tuple + """ + # much of the corpus is falsy objects (empty list, tuple or set, None) + if not arg: + return () + + # `type in set` is significantly faster (because more restrictive) than + # isinstance(arg, set) or issubclass(type, set); and for new-style classes + # obj.__class__ is equivalent to but faster than type(obj). Not relevant + # (and looks much worse) in most cases, but over millions of calls it + # does have a very minor effect. + if arg.__class__ in atoms: + return arg, + + return tuple(arg) + +# keep those imports here to avoid dependency cycle errors +from .osv import expression +from .fields import Field, SpecialValue, FailedValue + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/modules/loading.py b/openerp/modules/loading.py index d81461d6a77aaf08cc2657534ecfcb876ac600ee..eb703e25eb6a3a6881d684beb8d94fff896e3c20 100644 --- a/openerp/modules/loading.py +++ b/openerp/modules/loading.py @@ -162,6 +162,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules= loaded_modules.append(package.name) if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'): + registry.setup_models(cr) init_module_models(cr, package.name, models) status['progress'] = float(index) / len(graph) @@ -188,6 +189,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules= status['progress'] = (index + 0.75) / len(graph) _load_data(cr, module_name, idref, mode, kind='demo') cr.execute('update ir_module_module set demo=%s where id=%s', (True, module_id)) + modobj.invalidate_cache(cr, SUPERUSER_ID, ['demo'], [module_id]) migrations.migrate_module(package, 'post') @@ -228,6 +230,8 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules= registry._init_modules.add(package.name) cr.commit() + registry.setup_models(cr) + _logger.log(25, "%s modules loaded in %.2fs, %s queries", len(graph), time.time() - t0, openerp.sql_db.sql_counter - t0_sql) # The query won't be valid for models created later (i.e. custom model @@ -332,6 +336,7 @@ def load_modules(db, force_demo=False, status=None, update_module=False): modobj.button_upgrade(cr, SUPERUSER_ID, ids) cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base')) + modobj.invalidate_cache(cr, SUPERUSER_ID, ['state']) # STEP 3: Load marked modules (skipping base which was done in STEP 1) diff --git a/openerp/modules/module.py b/openerp/modules/module.py index 0dd93629830442e4969427de9ab17da71b243e53..505e51b039440436382cb9c4579959d2771e11b4 100644 --- a/openerp/modules/module.py +++ b/openerp/modules/module.py @@ -278,7 +278,7 @@ def init_module_models(cr, module_name, obj_list): for obj in obj_list: obj._auto_end(cr, {'module': module_name}) cr.commit() - todo.sort() + todo.sort(key=lambda x: x[0]) for t in todo: t[1](cr, *t[2]) cr.commit() diff --git a/openerp/modules/registry.py b/openerp/modules/registry.py index eb591c53521a37b60010f5febc25966e511362fd..1d68e228b8f55a8d1f530b937efc9db88985c490 100644 --- a/openerp/modules/registry.py +++ b/openerp/modules/registry.py @@ -27,12 +27,9 @@ from contextlib import contextmanager import logging import threading -import openerp.sql_db -import openerp.osv.orm -import openerp.tools -import openerp.modules.db -import openerp.tools.config -from openerp.tools import assertion_report +import openerp +from .. import SUPERUSER_ID +from openerp.tools import assertion_report, lazy_property _logger = logging.getLogger(__name__) @@ -49,6 +46,7 @@ class Registry(Mapping): self.models = {} # model name/model instance mapping self._sql_error = {} self._store_function = {} + self._pure_function_fields = {} # {model: [field, ...], ...} self._init = True self._init_parent = {} self._assertion_report = assertion_report.assertion_report() @@ -97,10 +95,6 @@ class Registry(Mapping): """ Return an iterator over all model names. """ return iter(self.models) - def __contains__(self, model_name): - """ Test whether the model with the given name exists. """ - return model_name in self.models - def __getitem__(self, model_name): """ Return the model with the given name or raise KeyError if it doesn't exist.""" return self.models[model_name] @@ -109,6 +103,16 @@ class Registry(Mapping): """ Same as ``self[model_name]``. """ return self.models[model_name] + @lazy_property + def pure_function_fields(self): + """ Return the list of pure function fields (field objects) """ + fields = [] + for mname, fnames in self._pure_function_fields.iteritems(): + model_fields = self[mname]._fields + for fname in fnames: + fields.append(model_fields[fname]) + return fields + def do_parent_store(self, cr): for o in self._init_parent: self.get(o)._parent_store_compute(cr) @@ -131,17 +135,35 @@ class Registry(Mapping): and registers them in the registry. """ + from .. import models + models_to_load = [] # need to preserve loading order + lazy_property.reset_all(self) + # Instantiate registered classes (via the MetaModel automatic discovery # or via explicit constructor call), and add them to the pool. - for cls in openerp.osv.orm.MetaModel.module_to_models.get(module.name, []): + for cls in models.MetaModel.module_to_models.get(module.name, []): # models register themselves in self.models - model = cls.create_instance(self, cr) + model = cls._build_model(self, cr) if model._name not in models_to_load: # avoid double-loading models whose declaration is split models_to_load.append(model._name) + return [self.models[m] for m in models_to_load] + def setup_models(self, cr): + """ Complete the setup of models. + This must be called after loading modules and before using the ORM. + """ + # prepare the setup on all models + for model in self.models.itervalues(): + model._prepare_setup_fields(cr, SUPERUSER_ID) + + # do the actual setup from a clean state + self._m2m = {} + for model in self.models.itervalues(): + model._setup_fields(cr, SUPERUSER_ID) + def clear_caches(self): """ Clear the caches This clears the caches associated to methods decorated with @@ -151,7 +173,7 @@ class Registry(Mapping): model.clear_caches() # Special case for ir_ui_menu which does not use openerp.tools.ormcache. ir_ui_menu = self.models.get('ir.ui.menu') - if ir_ui_menu: + if ir_ui_menu is not None: ir_ui_menu.clear_cache() @@ -282,36 +304,37 @@ class RegistryManager(object): """ import openerp.modules with cls.lock(): - registry = Registry(db_name) - - # Initializing a registry will call general code which will in turn - # call registries.get (this object) to obtain the registry being - # initialized. Make it available in the registries dictionary then - # remove it if an exception is raised. - cls.delete(db_name) - cls.registries[db_name] = registry - try: - with registry.cursor() as cr: - seq_registry, seq_cache = Registry.setup_multi_process_signaling(cr) - registry.base_registry_signaling_sequence = seq_registry - registry.base_cache_signaling_sequence = seq_cache - # This should be a method on Registry - openerp.modules.load_modules(registry._db, force_demo, status, update_module) - except Exception: - del cls.registries[db_name] - raise + with openerp.api.Environment.manage(): + registry = Registry(db_name) - # load_modules() above can replace the registry by calling - # indirectly new() again (when modules have to be uninstalled). - # Yeah, crazy. - registry = cls.registries[db_name] + # Initializing a registry will call general code which will in + # turn call registries.get (this object) to obtain the registry + # being initialized. Make it available in the registries + # dictionary then remove it if an exception is raised. + cls.delete(db_name) + cls.registries[db_name] = registry + try: + with registry.cursor() as cr: + seq_registry, seq_cache = Registry.setup_multi_process_signaling(cr) + registry.base_registry_signaling_sequence = seq_registry + registry.base_cache_signaling_sequence = seq_cache + # This should be a method on Registry + openerp.modules.load_modules(registry._db, force_demo, status, update_module) + except Exception: + del cls.registries[db_name] + raise + + # load_modules() above can replace the registry by calling + # indirectly new() again (when modules have to be uninstalled). + # Yeah, crazy. + registry = cls.registries[db_name] - cr = registry.cursor() - try: - registry.do_parent_store(cr) - cr.commit() - finally: - cr.close() + cr = registry.cursor() + try: + registry.do_parent_store(cr) + cr.commit() + finally: + cr.close() registry.ready = True diff --git a/openerp/osv/__init__.py b/openerp/osv/__init__.py index 630090954e6dcf75e98ed53ab865d8b9e8a119fe..b6d9f0abb8ea16d2daa9d863372f302c01e4ea4c 100644 --- a/openerp/osv/__init__.py +++ b/openerp/osv/__init__.py @@ -22,6 +22,4 @@ import osv import fields - # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: - diff --git a/openerp/osv/expression.py b/openerp/osv/expression.py index 58999e751fdf2c00ef1f14729aa793f42cd55789..8515c659805e810cb48095286ff891f1d7e62e06 100644 --- a/openerp/osv/expression.py +++ b/openerp/osv/expression.py @@ -131,13 +131,14 @@ Finally, to instruct OpenERP to really use the unaccent function, you have to start the server specifying the ``--unaccent`` flag. """ +import collections import logging import traceback import openerp.modules -from openerp.osv import fields -from openerp.osv.orm import MAGIC_COLUMNS +from . import fields +from ..models import MAGIC_COLUMNS, BaseModel import openerp.tools as tools @@ -516,7 +517,7 @@ class ExtendedLeaf(object): in the condition (i.e. in many2one); this link is used to compute aliases """ - assert model, 'Invalid leaf creation without table' + assert isinstance(model, BaseModel), 'Invalid leaf creation without table' self.join_context = join_context or [] self.leaf = leaf # normalize the leaf's operator @@ -677,20 +678,23 @@ class expression(object): - the leaf is added to the result Some internal var explanation: - :var obj working_model: model object, model containing the field + :var list path: left operand seen as a sequence of field names + ("foo.bar" -> ["foo", "bar"]) + :var obj model: model object, model containing the field (the name provided in the left operand) - :var list field_path: left operand seen as a path (foo.bar -> [foo, bar]) - :var obj relational_model: relational model of a field (field._obj) - ex: res_partner.bank_ids -> res.partner.bank + :var obj field: the field corresponding to `path[0]` + :var obj column: the column corresponding to `path[0]` + :var obj comodel: relational model of field (field.comodel) + (res_partner.bank_ids -> res.partner.bank) """ - def to_ids(value, relational_model, context=None, limit=None): + def to_ids(value, comodel, context=None, limit=None): """ Normalize a single id or name, or a list of those, into a list of ids :param {int,long,basestring,list,tuple} value: if int, long -> return [value] if basestring, convert it into a list of basestrings, then if list of basestring -> - perform a name_search on relational_model for each name + perform a name_search on comodel for each name return the list of related ids """ names = [] @@ -701,7 +705,7 @@ class expression(object): elif isinstance(value, (int, long)): return [value] if names: - name_get_list = [name_get[0] for name in names for name_get in relational_model.name_search(cr, uid, name, [], 'ilike', context=context, limit=limit)] + name_get_list = [name_get[0] for name in names for name_get in comodel.name_search(cr, uid, name, [], 'ilike', context=context, limit=limit)] return list(set(name_get_list)) return list(value) @@ -751,7 +755,6 @@ class expression(object): leaf = pop() # Get working variables - working_model = leaf.model if leaf.is_operator(): left, operator, right = leaf.leaf, None, None elif leaf.is_true_leaf() or leaf.is_false_leaf(): @@ -759,12 +762,12 @@ class expression(object): left, operator, right = ('%s' % leaf.leaf[0], leaf.leaf[1], leaf.leaf[2]) else: left, operator, right = leaf.leaf - field_path = left.split('.', 1) - field = working_model._columns.get(field_path[0]) - if field and field._obj: - relational_model = working_model.pool[field._obj] - else: - relational_model = None + path = left.split('.', 1) + + model = leaf.model + field = model._fields.get(path[0]) + column = model._columns.get(path[0]) + comodel = model.pool.get(getattr(field, 'comodel_name', None)) # ---------------------------------------- # SIMPLE CASE @@ -787,22 +790,22 @@ class expression(object): # -> else: crash # ---------------------------------------- - elif not field and field_path[0] in working_model._inherit_fields: + elif not column and path[0] in model._inherit_fields: # comments about inherits'd fields # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent', # field_column_obj, origina_parent_model), ... } - next_model = working_model.pool[working_model._inherit_fields[field_path[0]][0]] - leaf.add_join_context(next_model, working_model._inherits[next_model._name], 'id', working_model._inherits[next_model._name]) + next_model = model.pool[model._inherit_fields[path[0]][0]] + leaf.add_join_context(next_model, model._inherits[next_model._name], 'id', model._inherits[next_model._name]) push(leaf) elif left == 'id' and operator == 'child_of': - ids2 = to_ids(right, working_model, context) - dom = child_of_domain(left, ids2, working_model) + ids2 = to_ids(right, model, context) + dom = child_of_domain(left, ids2, model) for dom_leaf in reversed(dom): - new_leaf = create_substitution_leaf(leaf, dom_leaf, working_model) + new_leaf = create_substitution_leaf(leaf, dom_leaf, model) push(new_leaf) - elif not field and field_path[0] in MAGIC_COLUMNS: + elif not column and path[0] in MAGIC_COLUMNS: push_result(leaf) elif not field: @@ -811,70 +814,89 @@ class expression(object): # ---------------------------------------- # PATH SPOTTED # -> many2one or one2many with _auto_join: - # - add a join, then jump into linked field: field.remaining on + # - add a join, then jump into linked column: column.remaining on # src_table is replaced by remaining on dst_table, and set for re-evaluation - # - if a domain is defined on the field, add it into evaluation + # - if a domain is defined on the column, add it into evaluation # on the relational table # -> many2one, many2many, one2many: replace by an equivalent computed # domain, given by recursively searching on the remaining of the path - # -> note: hack about fields.property should not be necessary anymore - # as after transforming the field, it will go through this loop once again + # -> note: hack about columns.property should not be necessary anymore + # as after transforming the column, it will go through this loop once again # ---------------------------------------- - elif len(field_path) > 1 and field._type == 'many2one' and field._auto_join: + elif len(path) > 1 and column._type == 'many2one' and column._auto_join: # res_partner.state_id = res_partner__state_id.id - leaf.add_join_context(relational_model, field_path[0], 'id', field_path[0]) - push(create_substitution_leaf(leaf, (field_path[1], operator, right), relational_model)) + leaf.add_join_context(comodel, path[0], 'id', path[0]) + push(create_substitution_leaf(leaf, (path[1], operator, right), comodel)) - elif len(field_path) > 1 and field._type == 'one2many' and field._auto_join: + elif len(path) > 1 and column._type == 'one2many' and column._auto_join: # res_partner.id = res_partner__bank_ids.partner_id - leaf.add_join_context(relational_model, 'id', field._fields_id, field_path[0]) - domain = field._domain(working_model) if callable(field._domain) else field._domain - push(create_substitution_leaf(leaf, (field_path[1], operator, right), relational_model)) + leaf.add_join_context(comodel, 'id', column._fields_id, path[0]) + domain = column._domain(model) if callable(column._domain) else column._domain + push(create_substitution_leaf(leaf, (path[1], operator, right), comodel)) if domain: domain = normalize_domain(domain) for elem in reversed(domain): - push(create_substitution_leaf(leaf, elem, relational_model)) - push(create_substitution_leaf(leaf, AND_OPERATOR, relational_model)) + push(create_substitution_leaf(leaf, elem, comodel)) + push(create_substitution_leaf(leaf, AND_OPERATOR, comodel)) - elif len(field_path) > 1 and field._auto_join: - raise NotImplementedError('_auto_join attribute not supported on many2many field %s' % left) + elif len(path) > 1 and column._auto_join: + raise NotImplementedError('_auto_join attribute not supported on many2many column %s' % left) - elif len(field_path) > 1 and field._type == 'many2one': - right_ids = relational_model.search(cr, uid, [(field_path[1], operator, right)], context=context) - leaf.leaf = (field_path[0], 'in', right_ids) + elif len(path) > 1 and column._type == 'many2one': + right_ids = comodel.search(cr, uid, [(path[1], operator, right)], context=context) + leaf.leaf = (path[0], 'in', right_ids) push(leaf) - # Making search easier when there is a left operand as field.o2m or field.m2m - elif len(field_path) > 1 and field._type in ['many2many', 'one2many']: - right_ids = relational_model.search(cr, uid, [(field_path[1], operator, right)], context=context) - table_ids = working_model.search(cr, uid, [(field_path[0], 'in', right_ids)], context=dict(context, active_test=False)) + # Making search easier when there is a left operand as column.o2m or column.m2m + elif len(path) > 1 and column._type in ['many2many', 'one2many']: + right_ids = comodel.search(cr, uid, [(path[1], operator, right)], context=context) + table_ids = model.search(cr, uid, [(path[0], 'in', right_ids)], context=dict(context, active_test=False)) leaf.leaf = ('id', 'in', table_ids) push(leaf) + elif not field.store: + # Non-stored field should provide an implementation of search. + if not field.search: + # field does not support search! + _logger.error("Non-stored field %s cannot be searched.", field) + if _logger.isEnabledFor(logging.DEBUG): + _logger.debug(''.join(traceback.format_stack())) + # Ignore it: generate a dummy leaf. + domain = [] + else: + # Let the field generate a domain. + recs = model.browse(cr, uid, [], context) + domain = field.determine_domain(recs, operator, right) + + if not domain: + leaf.leaf = TRUE_LEAF + push(leaf) + else: + for elem in reversed(domain): + push(create_substitution_leaf(leaf, elem, model)) + # ------------------------------------------------- # FUNCTION FIELD # -> not stored: error if no _fnct_search, otherwise handle the result domain # -> stored: management done in the remaining of parsing # ------------------------------------------------- - elif isinstance(field, fields.function) and not field.store and not field._fnct_search: + elif isinstance(column, fields.function) and not column.store: # this is a function field that is not stored - # the function field doesn't provide a search function and doesn't store - # values in the database, so we must ignore it : we generate a dummy leaf - leaf.leaf = TRUE_LEAF - _logger.error( - "The field '%s' (%s) can not be searched: non-stored " - "function field without fnct_search", - field.string, left) - # avoid compiling stack trace if not needed - if _logger.isEnabledFor(logging.DEBUG): - _logger.debug(''.join(traceback.format_stack())) - push(leaf) + if not column._fnct_search: + _logger.error( + "Field '%s' (%s) can not be searched: " + "non-stored function field without fnct_search", + column.string, left) + # avoid compiling stack trace if not needed + if _logger.isEnabledFor(logging.DEBUG): + _logger.debug(''.join(traceback.format_stack())) + # ignore it: generate a dummy leaf + fct_domain = [] + else: + fct_domain = column.search(cr, uid, model, left, [leaf.leaf], context=context) - elif isinstance(field, fields.function) and not field.store: - # this is a function field that is not stored - fct_domain = field.search(cr, uid, working_model, left, [leaf.leaf], context=context) if not fct_domain: leaf.leaf = TRUE_LEAF push(leaf) @@ -882,71 +904,71 @@ class expression(object): # we assume that the expression is valid # we create a dummy leaf for forcing the parsing of the resulting expression for domain_element in reversed(fct_domain): - push(create_substitution_leaf(leaf, domain_element, working_model)) - # self.push(create_substitution_leaf(leaf, TRUE_LEAF, working_model)) - # self.push(create_substitution_leaf(leaf, AND_OPERATOR, working_model)) + push(create_substitution_leaf(leaf, domain_element, model)) + # self.push(create_substitution_leaf(leaf, TRUE_LEAF, model)) + # self.push(create_substitution_leaf(leaf, AND_OPERATOR, model)) # ------------------------------------------------- # RELATIONAL FIELDS # ------------------------------------------------- # Applying recursivity on field(one2many) - elif field._type == 'one2many' and operator == 'child_of': - ids2 = to_ids(right, relational_model, context) - if field._obj != working_model._name: - dom = child_of_domain(left, ids2, relational_model, prefix=field._obj) + elif column._type == 'one2many' and operator == 'child_of': + ids2 = to_ids(right, comodel, context) + if column._obj != model._name: + dom = child_of_domain(left, ids2, comodel, prefix=column._obj) else: - dom = child_of_domain('id', ids2, working_model, parent=left) + dom = child_of_domain('id', ids2, model, parent=left) for dom_leaf in reversed(dom): - push(create_substitution_leaf(leaf, dom_leaf, working_model)) + push(create_substitution_leaf(leaf, dom_leaf, model)) - elif field._type == 'one2many': + elif column._type == 'one2many': call_null = True if right is not False: if isinstance(right, basestring): - ids2 = [x[0] for x in relational_model.name_search(cr, uid, right, [], operator, context=context, limit=None)] + ids2 = [x[0] for x in comodel.name_search(cr, uid, right, [], operator, context=context, limit=None)] if ids2: operator = 'in' + elif isinstance(right, collections.Iterable): + ids2 = right else: - if not isinstance(right, list): - ids2 = [right] - else: - ids2 = right + ids2 = [right] + if not ids2: if operator in ['like', 'ilike', 'in', '=']: #no result found with given search criteria call_null = False - push(create_substitution_leaf(leaf, FALSE_LEAF, working_model)) + push(create_substitution_leaf(leaf, FALSE_LEAF, model)) else: - ids2 = select_from_where(cr, field._fields_id, relational_model._table, 'id', ids2, operator) + ids2 = select_from_where(cr, column._fields_id, comodel._table, 'id', ids2, operator) if ids2: call_null = False o2m_op = 'not in' if operator in NEGATIVE_TERM_OPERATORS else 'in' - push(create_substitution_leaf(leaf, ('id', o2m_op, ids2), working_model)) + push(create_substitution_leaf(leaf, ('id', o2m_op, ids2), model)) if call_null: o2m_op = 'in' if operator in NEGATIVE_TERM_OPERATORS else 'not in' - push(create_substitution_leaf(leaf, ('id', o2m_op, select_distinct_from_where_not_null(cr, field._fields_id, relational_model._table)), working_model)) + push(create_substitution_leaf(leaf, ('id', o2m_op, select_distinct_from_where_not_null(cr, column._fields_id, comodel._table)), model)) - elif field._type == 'many2many': - rel_table, rel_id1, rel_id2 = field._sql_names(working_model) + elif column._type == 'many2many': + rel_table, rel_id1, rel_id2 = column._sql_names(model) #FIXME if operator == 'child_of': def _rec_convert(ids): - if relational_model == working_model: + if comodel == model: return ids return select_from_where(cr, rel_id1, rel_table, rel_id2, ids, operator) - ids2 = to_ids(right, relational_model, context) - dom = child_of_domain('id', ids2, relational_model) - ids2 = relational_model.search(cr, uid, dom, context=context) - push(create_substitution_leaf(leaf, ('id', 'in', _rec_convert(ids2)), working_model)) + ids2 = to_ids(right, comodel, context) + dom = child_of_domain('id', ids2, comodel) + ids2 = comodel.search(cr, uid, dom, context=context) + push(create_substitution_leaf(leaf, ('id', 'in', _rec_convert(ids2)), model)) else: call_null_m2m = True if right is not False: if isinstance(right, basestring): - res_ids = [x[0] for x in relational_model.name_search(cr, uid, right, [], operator, context=context)] + res_ids = [x[0] for x in comodel.name_search(cr, uid, right, [], operator, context=context)] if res_ids: operator = 'in' else: @@ -958,29 +980,29 @@ class expression(object): if operator in ['like', 'ilike', 'in', '=']: #no result found with given search criteria call_null_m2m = False - push(create_substitution_leaf(leaf, FALSE_LEAF, working_model)) + push(create_substitution_leaf(leaf, FALSE_LEAF, model)) else: operator = 'in' # operator changed because ids are directly related to main object else: call_null_m2m = False m2m_op = 'not in' if operator in NEGATIVE_TERM_OPERATORS else 'in' - push(create_substitution_leaf(leaf, ('id', m2m_op, select_from_where(cr, rel_id1, rel_table, rel_id2, res_ids, operator) or [0]), working_model)) + push(create_substitution_leaf(leaf, ('id', m2m_op, select_from_where(cr, rel_id1, rel_table, rel_id2, res_ids, operator) or [0]), model)) if call_null_m2m: m2m_op = 'in' if operator in NEGATIVE_TERM_OPERATORS else 'not in' - push(create_substitution_leaf(leaf, ('id', m2m_op, select_distinct_from_where_not_null(cr, rel_id1, rel_table)), working_model)) + push(create_substitution_leaf(leaf, ('id', m2m_op, select_distinct_from_where_not_null(cr, rel_id1, rel_table)), model)) - elif field._type == 'many2one': + elif column._type == 'many2one': if operator == 'child_of': - ids2 = to_ids(right, relational_model, context) - if field._obj != working_model._name: - dom = child_of_domain(left, ids2, relational_model, prefix=field._obj) + ids2 = to_ids(right, comodel, context) + if column._obj != model._name: + dom = child_of_domain(left, ids2, comodel, prefix=column._obj) else: - dom = child_of_domain('id', ids2, working_model, parent=left) + dom = child_of_domain('id', ids2, model, parent=left) for dom_leaf in reversed(dom): - push(create_substitution_leaf(leaf, dom_leaf, working_model)) + push(create_substitution_leaf(leaf, dom_leaf, model)) else: - def _get_expression(relational_model, cr, uid, left, right, operator, context=None): + def _get_expression(comodel, cr, uid, left, right, operator, context=None): if context is None: context = {} c = context.copy() @@ -995,14 +1017,14 @@ class expression(object): operator = dict_op[operator] elif isinstance(right, list) and operator in ['!=', '=']: # for domain (FIELD,'=',['value1','value2']) operator = dict_op[operator] - res_ids = [x[0] for x in relational_model.name_search(cr, uid, right, [], operator, limit=None, context=c)] + res_ids = [x[0] for x in comodel.name_search(cr, uid, right, [], operator, limit=None, context=c)] if operator in NEGATIVE_TERM_OPERATORS: res_ids.append(False) # TODO this should not be appended if False was in 'right' return left, 'in', res_ids # resolve string-based m2o criterion into IDs if isinstance(right, basestring) or \ right and isinstance(right, (tuple, list)) and all(isinstance(item, basestring) for item in right): - push(create_substitution_leaf(leaf, _get_expression(relational_model, cr, uid, left, right, operator, context=context), working_model)) + push(create_substitution_leaf(leaf, _get_expression(comodel, cr, uid, left, right, operator, context=context), model)) else: # right == [] or right == False and all other cases are handled by __leaf_to_sql() push_result(leaf) @@ -1010,19 +1032,19 @@ class expression(object): # ------------------------------------------------- # OTHER FIELDS # -> datetime fields: manage time part of the datetime - # field when it is not there + # column when it is not there # -> manage translatable fields # ------------------------------------------------- else: - if field._type == 'datetime' and right and len(right) == 10: + if column._type == 'datetime' and right and len(right) == 10: if operator in ('>', '<='): right += ' 23:59:59' else: right += ' 00:00:00' - push(create_substitution_leaf(leaf, (left, operator, right), working_model)) + push(create_substitution_leaf(leaf, (left, operator, right), model)) - elif field.translate and right: + elif column.translate and right: need_wildcard = operator in ('like', 'ilike', 'not like', 'not ilike') sql_operator = {'=like': 'like', '=ilike': 'ilike'}.get(operator, operator) if need_wildcard: @@ -1052,16 +1074,16 @@ class expression(object): it.value != '') ) SELECT id FROM temp_irt_current WHERE {name} {operator} {right} order by name - """.format(current_table=working_model._table, quote_left=_quote(left), name=unaccent('name'), + """.format(current_table=model._table, quote_left=_quote(left), name=unaccent('name'), operator=sql_operator, right=instr) params = ( - working_model._name + ',' + left, + model._name + ',' + left, context.get('lang') or 'en_US', 'model', right, ) - push(create_substitution_leaf(leaf, ('id', inselect_operator, (subselect, params)), working_model)) + push(create_substitution_leaf(leaf, ('id', inselect_operator, (subselect, params)), model)) else: push_result(leaf) diff --git a/openerp/osv/fields.py b/openerp/osv/fields.py index 89601ad0709e3e79452a26d7f6f770181ab45120..07e8b02313c373f7e7e1305dc1a4cade2e53a1af 100644 --- a/openerp/osv/fields.py +++ b/openerp/osv/fields.py @@ -81,9 +81,8 @@ class _column(object): _symbol_f = _symbol_set _symbol_set = (_symbol_c, _symbol_f) _symbol_get = None - - # used to hide a certain field type in the list of field types _deprecated = False + copy = True # whether the field is copied by BaseModel.copy() def __init__(self, string='unknown', required=False, readonly=False, domain=None, context=None, states=None, priority=0, change_default=False, size=None, ondelete=None, translate=False, select=False, manual=False, **args): """ @@ -118,7 +117,37 @@ class _column(object): self.deprecated = False # Optional deprecation warning for a in args: setattr(self, a, args[a]) - + + # prefetch only if self._classic_write, not self.groups, and not + # self.deprecated + if not self._classic_write or self.groups or self.deprecated: + self._prefetch = False + + def to_field(self): + """ convert column `self` to a new-style field """ + from openerp.fields import Field + return Field.by_type[self._type](**self.to_field_args()) + + def to_field_args(self): + """ return a dictionary with all the arguments to pass to the field """ + items = [ + ('_origin', self), # field interfaces self + ('copy', self.copy), + ('index', self.select), + ('string', self.string), + ('help', self.help), + ('readonly', self.readonly), + ('required', self.required), + ('states', self.states), + ('groups', self.groups), + ('size', self.size), + ('ondelete', self.ondelete), + ('translate', self.translate), + ('domain', self._domain), + ('context', self._context), + ] + return dict(item for item in items if items[1]) + def restart(self): pass @@ -183,8 +212,16 @@ class reference(_column): _classic_read = False # post-process to handle missing target def __init__(self, string, selection, size=None, **args): + if callable(selection): + from openerp import api + selection = api.expected(api.cr_uid_context, selection) _column.__init__(self, string=string, size=size, selection=selection, **args) + def to_field_args(self): + args = super(reference, self).to_field_args() + args['selection'] = self.selection + return args + def get(self, cr, obj, ids, name, uid=None, context=None, values=None): result = {} # copy initial values fetched previously. @@ -232,7 +269,6 @@ class char(_column): self._symbol_f = self._symbol_set_char = lambda x: _symbol_set_char(self, x) self._symbol_set = (self._symbol_c, self._symbol_f) - class text(_column): _type = 'text' @@ -270,6 +306,11 @@ class float(_column): # synopsis: digits_compute(cr) -> (precision, scale) self.digits_compute = digits_compute + def to_field_args(self): + args = super(float, self).to_field_args() + args['digits'] = self.digits_compute or self.digits + return args + def digits_change(self, cr): if self.digits_compute: self.digits = self.digits_compute(cr) @@ -331,7 +372,8 @@ class date(_column): if context and context.get('tz'): tz_name = context['tz'] else: - tz_name = model.pool.get('res.users').read(cr, SUPERUSER_ID, uid, ['tz'])['tz'] + user = model.pool['res.users'].browse(cr, SUPERUSER_ID, uid) + tz_name = user.tz if tz_name: try: utc = pytz.timezone('UTC') @@ -419,7 +461,8 @@ class datetime(_column): tz_name = context['tz'] else: registry = openerp.modules.registry.RegistryManager.get(cr.dbname) - tz_name = registry.get('res.users').read(cr, SUPERUSER_ID, uid, ['tz'])['tz'] + user = registry['res.users'].browse(cr, SUPERUSER_ID, uid) + tz_name = user.tz if tz_name: try: utc = pytz.utc @@ -482,9 +525,17 @@ class selection(_column): _type = 'selection' def __init__(self, selection, string='unknown', **args): + if callable(selection): + from openerp import api + selection = api.expected(api.cr_uid_context, selection) _column.__init__(self, string=string, **args) self.selection = selection + def to_field_args(self): + args = super(selection, self).to_field_args() + args['selection'] = self.selection + return args + @classmethod def reify(cls, cr, uid, model, field, context=None): """ Munges the field's ``selection`` attribute as necessary to get @@ -544,31 +595,11 @@ class many2one(_column): self._obj = obj self._auto_join = auto_join - def get(self, cr, obj, ids, name, user=None, context=None, values=None): - if context is None: - context = {} - if values is None: - values = {} - - res = {} - for r in values: - res[r['id']] = r[name] - for id in ids: - res.setdefault(id, '') - obj = obj.pool[self._obj] - - # build a dictionary of the form {'id_of_distant_resource': name_of_distant_resource} - # we use uid=1 because the visibility of a many2one field value (just id and name) - # must be the access right of the parent form and not the linked object itself. - records = dict(obj.name_get(cr, SUPERUSER_ID, - list(set([x for x in res.values() if x and isinstance(x, (int,long))])), - context=context)) - for id in res: - if res[id] in records: - res[id] = (res[id], records[res[id]]) - else: - res[id] = False - return res + def to_field_args(self): + args = super(many2one, self).to_field_args() + args['comodel_name'] = self._obj + args['auto_join'] = self._auto_join + return args def set(self, cr, obj_src, id, field, values, user=None, context=None): if not context: @@ -597,7 +628,6 @@ class many2one(_column): def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None): return obj.pool[self._obj].search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit, context=context) - @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): return value[1] if isinstance(value, tuple) else tools.ustr(value) @@ -609,6 +639,9 @@ class one2many(_column): _prefetch = False _type = 'one2many' + # one2many columns are not copied by default + copy = False + def __init__(self, obj, fields_id, string='unknown', limit=None, auto_join=False, **args): _column.__init__(self, string=string, **args) self._obj = obj @@ -618,36 +651,36 @@ class one2many(_column): #one2many can't be used as condition for defaults assert(self.change_default != True) + def to_field_args(self): + args = super(one2many, self).to_field_args() + args['comodel_name'] = self._obj + args['inverse_name'] = self._fields_id + args['auto_join'] = self._auto_join + args['limit'] = self._limit + return args + def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): - if context is None: - context = {} if self._context: - context = context.copy() - context.update(self._context) - if values is None: - values = {} + context = dict(context or {}) + context.update(self._context) - res = {} - for id in ids: - res[id] = [] + res = dict((id, []) for id in ids) + comodel = obj.pool[self._obj].browse(cr, user, [], context) + inverse = self._fields_id domain = self._domain(obj) if callable(self._domain) else self._domain - model = obj.pool[self._obj] - ids2 = model.search(cr, user, domain + [(self._fields_id, 'in', ids)], limit=self._limit, context=context) - if len(ids) != 1: - for r in model._read_flat(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): - if r[self._fields_id] in res: - res[r[self._fields_id]].append(r['id']) - else: - res[ids[0]] = ids2 + domain = domain + [(inverse, 'in', ids)] + + for record in comodel.search(domain, limit=self._limit): + # Note: record[inverse] can be a record or an integer! + assert int(record[inverse]) in res + res[int(record[inverse])].append(record.id) + return res def set(self, cr, obj, id, field, values, user=None, context=None): result = [] - if not context: - context = {} - if self._context: - context = context.copy() + context = dict(context or {}) context.update(self._context) context['no_store_function'] = True if not values: @@ -705,7 +738,6 @@ class one2many(_column): domain = self._domain(obj) if callable(self._domain) else self._domain return obj.pool[self._obj].name_search(cr, uid, value, domain, operator, context=context,limit=limit) - @classmethod def _as_display_name(cls, field, cr, uid, obj, value, context=None): raise NotImplementedError('One2Many columns should not be used as record name (_rec_name)') @@ -764,6 +796,15 @@ class many2many(_column): self._id2 = id2 self._limit = limit + def to_field_args(self): + args = super(many2many, self).to_field_args() + args['comodel_name'] = self._obj + args['relation'] = self._rel + args['column1'] = self._id1 + args['column2'] = self._id2 + args['limit'] = self._limit + return args + def _sql_names(self, source_model): """Return the SQL names defining the structure of the m2m relationship table @@ -1142,6 +1183,9 @@ class function(_column): _type = 'function' _properties = True + # function fields are not copied by default + copy = False + # # multi: compute several fields in one call # @@ -1157,6 +1201,9 @@ class function(_column): self.digits = args.get('digits', (16,2)) self.digits_compute = args.get('digits_compute', None) + if callable(args.get('selection')): + from openerp import api + self.selection = api.expected(api.cr_uid_context, args['selection']) self._fnct_inv_arg = fnct_inv_arg if not fnct_inv: @@ -1178,25 +1225,26 @@ class function(_column): else: self._prefetch = True - if type == 'float': - self._symbol_c = float._symbol_c - self._symbol_f = float._symbol_f - self._symbol_set = float._symbol_set - - if type == 'boolean': - self._symbol_c = boolean._symbol_c - self._symbol_f = boolean._symbol_f - self._symbol_set = boolean._symbol_set - - if type == 'integer': - self._symbol_c = integer._symbol_c - self._symbol_f = integer._symbol_f - self._symbol_set = integer._symbol_set - if type == 'char': self._symbol_c = char._symbol_c self._symbol_f = lambda x: _symbol_set_char(self, x) self._symbol_set = (self._symbol_c, self._symbol_f) + else: + type_class = globals().get(type) + if type_class is not None: + self._symbol_c = type_class._symbol_c + self._symbol_f = type_class._symbol_f + self._symbol_set = type_class._symbol_set + + def to_field_args(self): + args = super(function, self).to_field_args() + if self._type in ('float',): + args['digits'] = self.digits_compute or self.digits + elif self._type in ('selection', 'reference'): + args['selection'] = self.selection + elif self._type in ('many2one', 'one2many', 'many2many'): + args['comodel_name'] = self._obj + return args def digits_change(self, cr): if self._type == 'float': @@ -1227,17 +1275,7 @@ class function(_column): field_type = obj._columns[field]._type new_values = dict(values) - if field_type == "integer": - # integer/long values greater than 2^31-1 are not supported - # in pure XMLRPC, so we have to pass them as floats :-( - # This is not needed for stored fields and non-functional integer - # fields, as their values are constrained by the database backend - # to the same 32bits signed int limit. - for rid, value in values.iteritems(): - if value and value > xmlrpclib.MAXINT: - new_values[rid] = __builtin__.float(value) - - elif field_type == 'binary': + if field_type == 'binary': if context.get('bin_size'): # client requests only the size of binary fields for rid, value in values.iteritems(): @@ -1248,16 +1286,6 @@ class function(_column): if value: new_values[rid] = sanitize_binary_value(value) - elif field_type == "many2one" and hasattr(obj._columns[field], 'relation'): - # make the result a tuple if it is not already one - if all(isinstance(value, (int, long)) for value in values.values() if value): - obj_model = obj.pool[obj._columns[field].relation] - ids = [i for i in values.values() if i] - dict_names = dict(obj_model.name_get(cr, SUPERUSER_ID, ids, context)) - for rid, value in values.iteritems(): - if value: - new_values[rid] = (value, dict_names[value]) - return new_values def get(self, cr, obj, ids, name, uid=False, context=None, values=None): @@ -1321,45 +1349,38 @@ class related(function): field = '.'.join(self._arg) return map(lambda x: (field, x[1], x[2]), domain) - def _fnct_write(self,obj,cr, uid, ids, field_name, values, args, context=None): + def _fnct_write(self, obj, cr, uid, ids, field_name, values, args, context=None): if isinstance(ids, (int, long)): ids = [ids] - for record in obj.browse(cr, uid, ids, context=context): + for instance in obj.browse(cr, uid, ids, context=context): # traverse all fields except the last one for field in self.arg[:-1]: - record = record[field] or False - if not record: - break - elif isinstance(record, list): - # record is the result of a one2many or many2many field - record = record[0] - if record: - # write on the last field - record.write({self.arg[-1]: values}) + instance = instance[field][:1] + if instance: + # write on the last field of the target record + instance.write({self.arg[-1]: values}) def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None): res = {} for record in obj.browse(cr, SUPERUSER_ID, ids, context=context): value = record - for field in self.arg: - if isinstance(value, list): - value = value[0] - value = value[field] or False - if not value: - break - res[record.id] = value + # traverse all fields except the last one + for field in self.arg[:-1]: + value = value[field][:1] + # read the last field on the target record + res[record.id] = value[self.arg[-1]] if self._type == 'many2one': - # res[id] is a browse_record or False; convert it to (id, name) or False. + # res[id] is a recordset; convert it to (id, name) or False. # Perform name_get as root, as seeing the name of a related object depends on # access right of source document, not target, so user may not have access. value_ids = list(set(value.id for value in res.itervalues() if value)) value_name = dict(obj.pool[self._obj].name_get(cr, SUPERUSER_ID, value_ids, context=context)) - res = dict((id, value and (value.id, value_name[value.id])) for id, value in res.iteritems()) + res = dict((id, bool(value) and (value.id, value_name[value.id])) for id, value in res.iteritems()) elif self._type in ('one2many', 'many2many'): - # res[id] is a list of browse_record or False; convert it to a list of ids - res = dict((id, value and map(int, value) or []) for id, value in res.iteritems()) + # res[id] is a recordset; convert it to a list of ids + res = dict((id, value.ids) for id, value in res.iteritems()) return res @@ -1513,173 +1534,51 @@ class serialized(_column): # TODO: review completly this class for speed improvement class property(function): - def _get_default(self, obj, cr, uid, prop_name, context=None): - return self._get_defaults(obj, cr, uid, [prop_name], context=None)[prop_name] - - def _get_defaults(self, obj, cr, uid, prop_names, context=None): - """Get the default values for ``prop_names´´ property fields (result of ir.property.get() function for res_id = False). - - :param list of string prop_names: list of name of property fields for those we want the default value - :return: map of property field names to their default value - :rtype: dict - """ - prop = obj.pool.get('ir.property') - res = {} - for prop_name in prop_names: - res[prop_name] = prop.get(cr, uid, prop_name, obj._name, context=context) - return res + def to_field_args(self): + args = super(property, self).to_field_args() + args['company_dependent'] = True + return args - def _get_by_id(self, obj, cr, uid, prop_name, ids, context=None): - prop = obj.pool.get('ir.property') - vids = [obj._name + ',' + str(oid) for oid in ids] - domain = [('fields_id.model', '=', obj._name), ('fields_id.name', 'in', prop_name)] - if context and context.get('company_id'): - domain += [('company_id', '=', context.get('company_id'))] - if vids: - domain = [('res_id', 'in', vids)] + domain - return prop.search(cr, uid, domain, context=context) - - # TODO: to rewrite more clean - def _fnct_write(self, obj, cr, uid, id, prop_name, id_val, obj_dest, context=None): - if context is None: - context = {} + def _fnct_search(self, tobj, cr, uid, obj, name, domain, context=None): + ir_property = obj.pool['ir.property'] + result = [] + for field, operator, value in domain: + result += ir_property.search_multi(cr, uid, name, tobj._name, operator, value, context=context) + return result - def_id = self._field_get(cr, uid, obj._name, prop_name) - company = obj.pool.get('res.company') - cid = company._company_default_get(cr, uid, obj._name, def_id, context=context) - # TODO for trunk: add new parameter company_id to _get_by_id method - context_company = dict(context, company_id=cid) - nids = self._get_by_id(obj, cr, uid, [prop_name], [id], context_company) - if nids: - cr.execute('DELETE FROM ir_property WHERE id IN %s', (tuple(nids),)) - - default_val = self._get_default(obj, cr, uid, prop_name, context) - - property_create = False - if isinstance(default_val, (openerp.osv.orm.browse_record, - openerp.osv.orm.browse_null)): - if default_val.id != id_val: - property_create = True - elif default_val != id_val: - property_create = True - - if property_create: - propdef = obj.pool.get('ir.model.fields').browse(cr, uid, def_id, - context=context) - prop = obj.pool.get('ir.property') - return prop.create(cr, uid, { - 'name': propdef.name, - 'value': id_val, - 'res_id': obj._name+','+str(id), - 'company_id': cid, - 'fields_id': def_id, - 'type': self._type, - }, context=context) - return False + def _fnct_write(self, obj, cr, uid, id, prop_name, value, obj_dest, context=None): + ir_property = obj.pool['ir.property'] + ir_property.set_multi(cr, uid, prop_name, obj._name, {id: value}, context=context) + return True def _fnct_read(self, obj, cr, uid, ids, prop_names, obj_dest, context=None): - prop = obj.pool.get('ir.property') - # get the default values (for res_id = False) for the property fields - default_val = self._get_defaults(obj, cr, uid, prop_names, context) - - # build the dictionary that will be returned - res = {} - for id in ids: - res[id] = default_val.copy() + ir_property = obj.pool['ir.property'] + res = {id: {} for id in ids} for prop_name in prop_names: - property_field = obj._all_columns.get(prop_name).column - property_destination_obj = property_field._obj if property_field._type == 'many2one' else False - # If the property field is a m2o field, we will append the id of the value to name_get_ids - # in order to make a name_get in batch for all the ids needed. - name_get_ids = {} - for id in ids: - # get the result of ir.property.get() for this res_id and save it in res if it's existing - obj_reference = obj._name + ',' + str(id) - value = prop.get(cr, uid, prop_name, obj._name, res_id=obj_reference, context=context) - if value: + column = obj._all_columns[prop_name].column + values = ir_property.get_multi(cr, uid, prop_name, obj._name, ids, context=context) + if column._type == 'many2one': + for id, value in values.iteritems(): + res[id][prop_name] = value.name_get()[0] if value else False + else: + for id, value in values.iteritems(): res[id][prop_name] = value - # Check existence as root (as seeing the name of a related - # object depends on access right of source document, - # not target, so user may not have access) in order to avoid - # pointing on an unexisting record. - if property_destination_obj: - if res[id][prop_name] and obj.pool[property_destination_obj].exists(cr, SUPERUSER_ID, res[id][prop_name].id): - name_get_ids[id] = res[id][prop_name].id - else: - res[id][prop_name] = False - if property_destination_obj: - # name_get as root (as seeing the name of a related - # object depends on access right of source document, - # not target, so user may not have access.) - name_get_values = dict(obj.pool[property_destination_obj].name_get(cr, SUPERUSER_ID, name_get_ids.values(), context=context)) - # the property field is a m2o, we need to return a tuple with (id, name) - for k, v in name_get_ids.iteritems(): - if res[k][prop_name]: - res[k][prop_name] = (v , name_get_values.get(v)) - return res - - def _field_get(self, cr, uid, model_name, prop): - if not self.field_id.get(cr.dbname): - cr.execute('SELECT id \ - FROM ir_model_fields \ - WHERE name=%s AND model=%s', (prop, model_name)) - res = cr.fetchone() - self.field_id[cr.dbname] = res and res[0] - return self.field_id[cr.dbname] + return res def __init__(self, **args): - self.field_id = {} if 'view_load' in args: _logger.warning("view_load attribute is deprecated on ir.fields. Args: %r", args) obj = 'relation' in args and args['relation'] or '' - function.__init__(self, self._fnct_read, False, self._fnct_write, obj=obj, multi='properties', **args) - - def restart(self): - self.field_id = {} - - -def field_to_dict(model, cr, user, field, context=None): - """ Return a dictionary representation of a field. - - The string, help, and selection attributes (if any) are untranslated. This - representation is the one returned by fields_get() (fields_get() will do - the translation). - - """ - - res = {'type': field._type} - # some attributes for m2m/function field are added as debug info only - if isinstance(field, function): - res['function'] = field._fnct and field._fnct.func_name or False - res['store'] = field.store - if isinstance(field.store, dict): - res['store'] = str(field.store) - res['fnct_search'] = field._fnct_search and field._fnct_search.func_name or False - res['fnct_inv'] = field._fnct_inv and field._fnct_inv.func_name or False - res['fnct_inv_arg'] = field._fnct_inv_arg or False - if isinstance(field, many2many): - (table, col1, col2) = field._sql_names(model) - res['m2m_join_columns'] = [col1, col2] - res['m2m_join_table'] = table - for arg in ('string', 'readonly', 'states', 'size', 'group_operator', 'required', - 'change_default', 'translate', 'help', 'select', 'selectable', 'groups', - 'deprecated', 'digits', 'invisible', 'filters'): - if getattr(field, arg, None): - res[arg] = getattr(field, arg) - - if hasattr(field, 'selection'): - res['selection'] = selection.reify(cr, user, model, field, context=context) - if res['type'] in ('one2many', 'many2many', 'many2one'): - res['relation'] = field._obj - res['domain'] = field._domain(model) if callable(field._domain) else field._domain - res['context'] = field._context - - if isinstance(field, one2many): - res['relation_field'] = field._fields_id - - return res + super(property, self).__init__( + fnct=self._fnct_read, + fnct_inv=self._fnct_write, + fnct_search=self._fnct_search, + obj=obj, + multi='properties', + **args + ) class column_info(object): @@ -1722,5 +1621,5 @@ class column_info(object): self.__class__.__name__, self.name, self.column, self.parent_model, self.parent_column, self.original_parent) -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/osv/orm.py b/openerp/osv/orm.py index 7fe2cf31a40cb5212b1499d3f29d640dbc621739..db1da378bbe108d83f725fe71d9db528720de3e0 100644 --- a/openerp/osv/orm.py +++ b/openerp/osv/orm.py @@ -1,84 +1,31 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <http://www.gnu.org/licenses/>. -# -############################################################################## - - -""" - Object relational mapping to database (postgresql) module - * Hierarchical structure - * Constraints consistency, validations - * Object meta Data depends on its status - * Optimised processing by complex query (multiple actions at once) - * Default fields value - * Permissions optimisation - * Persistant object: DB postgresql - * Datas conversions - * Multi-level caching system - * 2 different inheritancies - * Fields: - - classicals (varchar, integer, boolean, ...) - - relations (one2many, many2one, many2many) - - functions - -""" - -import calendar -import collections -import copy -import datetime -import itertools -import logging -import operator -import pickle -import pytz -import re import simplejson -import time -import traceback -import types - -import babel.dates -import dateutil.relativedelta -import psycopg2 from lxml import etree -import fields -import openerp -import openerp.tools as tools -from openerp.tools.config import config -from openerp.tools.misc import CountingStream, DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT -from openerp.tools.safe_eval import safe_eval as eval -from openerp.tools.translate import _ -from openerp import SUPERUSER_ID -from query import Query +from ..exceptions import except_orm +from ..models import ( + MetaModel, + BaseModel, + Model, TransientModel, AbstractModel, + + MAGIC_COLUMNS, + LOG_ACCESS_COLUMNS, +) -_logger = logging.getLogger(__name__) -_schema = logging.getLogger(__name__ + '.schema') +# extra definitions for backward compatibility +browse_record_list = BaseModel -# List of etree._Element subclasses that we choose to ignore when parsing XML. -from openerp.tools import SKIPPED_ELEMENT_TYPES +class browse_record(object): + """ Pseudo-class for testing record instances """ + class __metaclass__(type): + def __instancecheck__(self, inst): + return isinstance(inst, BaseModel) and len(inst) <= 1 -regex_order = re.compile('^( *([a-z0-9:_]+|"[a-z0-9:_]+")( *desc| *asc)?( *, *|))+$', re.I) -regex_object_name = re.compile(r'^[a-z0-9_.]+$') +class browse_null(object): + """ Pseudo-class for testing null instances """ + class __metaclass__(type): + def __instancecheck__(self, inst): + return isinstance(inst, BaseModel) and not inst -AUTOINIT_RECALCULATE_STORED_FIELDS = 1000 def transfer_field_to_modifiers(field, modifiers): default_values = {} @@ -198,5063 +145,3 @@ def modifiers_tests(): test_modifiers({}, '{}') test_modifiers({"invisible": True}, '{"invisible": true}') test_modifiers({"invisible": False}, '{}') - - -def check_object_name(name): - """ Check if the given name is a valid openerp object name. - - The _name attribute in osv and osv_memory object is subject to - some restrictions. This function returns True or False whether - the given name is allowed or not. - - TODO: this is an approximation. The goal in this approximation - is to disallow uppercase characters (in some places, we quote - table/column names and in other not, which leads to this kind - of errors: - - psycopg2.ProgrammingError: relation "xxx" does not exist). - - The same restriction should apply to both osv and osv_memory - objects for consistency. - - """ - if regex_object_name.match(name) is None: - return False - return True - -def raise_on_invalid_object_name(name): - if not check_object_name(name): - msg = "The _name attribute %s is not valid." % name - _logger.error(msg) - raise except_orm('ValueError', msg) - -POSTGRES_CONFDELTYPES = { - 'RESTRICT': 'r', - 'NO ACTION': 'a', - 'CASCADE': 'c', - 'SET NULL': 'n', - 'SET DEFAULT': 'd', -} - -def intersect(la, lb): - return filter(lambda x: x in lb, la) - -def fix_import_export_id_paths(fieldname): - """ - Fixes the id fields in import and exports, and splits field paths - on '/'. - - :param str fieldname: name of the field to import/export - :return: split field name - :rtype: list of str - """ - fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname) - fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id) - return fixed_external_id.split('/') - -class except_orm(Exception): - def __init__(self, name, value): - self.name = name - self.value = value - self.args = (name, value) - -class BrowseRecordError(Exception): - pass - -class browse_null(object): - """ Readonly python database object browser - """ - - def __init__(self): - self.id = False - - def __getitem__(self, name): - return None - - def __getattr__(self, name): - return None # XXX: return self ? - - def __int__(self): - return False - - def __str__(self): - return '' - - def __nonzero__(self): - return False - - def __unicode__(self): - return u'' - - def __iter__(self): - raise NotImplementedError("Iteration is not allowed on %s" % self) - - -# -# TODO: execute an object method on browse_record_list -# -class browse_record_list(list): - """ Collection of browse objects - - Such an instance will be returned when doing a ``browse([ids..])`` - and will be iterable, yielding browse() objects - """ - - def __init__(self, lst, context=None): - if not context: - context = {} - super(browse_record_list, self).__init__(lst) - self.context = context - - -class browse_record(object): - """ An object that behaves like a row of an object's table. - It has attributes after the columns of the corresponding object. - - Examples:: - - uobj = pool.get('res.users') - user_rec = uobj.browse(cr, uid, 104) - name = user_rec.name - """ - - def __init__(self, cr, uid, id, table, cache, context=None, - list_class=browse_record_list, fields_process=None): - """ - :param table: the browsed object (inherited from orm) - :param dict cache: a dictionary of model->field->data to be shared - across browse objects, thus reducing the SQL - read()s. It can speed up things a lot, but also be - disastrous if not discarded after write()/unlink() - operations - :param dict context: dictionary with an optional context - """ - if fields_process is None: - fields_process = {} - if context is None: - context = {} - self._list_class = list_class - self._cr = cr - self._uid = uid - self._id = id - self._table = table # deprecated, use _model! - self._model = table - self._table_name = self._table._name - self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name) - self._context = context - self._fields_process = fields_process - - cache.setdefault(table._name, {}) - self._data = cache[table._name] - -# if not (id and isinstance(id, (int, long,))): -# raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,)) -# if not table.exists(cr, uid, id, context): -# raise BrowseRecordError(_('Object %s does not exists') % (self,)) - - if id not in self._data: - self._data[id] = {'id': id} - - self._cache = cache - - def __getitem__(self, name): - if name == 'id': - return self._id - - if name not in self._data[self._id]: - # build the list of fields we will fetch - - # fetch the definition of the field which was asked for - if name in self._table._columns: - col = self._table._columns[name] - elif name in self._table._inherit_fields: - col = self._table._inherit_fields[name][2] - elif hasattr(self._table, str(name)): - attr = getattr(self._table, name) - if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)): - def function_proxy(*args, **kwargs): - if 'context' not in kwargs and self._context: - kwargs.update(context=self._context) - return attr(self._cr, self._uid, [self._id], *args, **kwargs) - return function_proxy - else: - return attr - else: - error_msg = "Field '%s' does not exist in object '%s'" % (name, self) - self.__logger.warning(error_msg) - if self.__logger.isEnabledFor(logging.DEBUG): - self.__logger.debug(''.join(traceback.format_stack())) - raise KeyError(error_msg) - - prefetchable = lambda f: f._classic_write and f._prefetch and not f.groups and not f.deprecated - - # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields - if prefetchable(col): - # gen the list of "local" (ie not inherited) fields which are classic or many2one - field_filter = lambda x: prefetchable(x[1]) - fields_to_fetch = filter(field_filter, self._table._columns.items()) - # gen the list of inherited fields - inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items()) - # complete the field list with the inherited fields which are classic or many2one - fields_to_fetch += filter(field_filter, inherits) - # otherwise we fetch only that field - else: - fields_to_fetch = [(name, col)] - - ids = filter(lambda id: name not in self._data[id], self._data.keys()) - # read the results - field_names = map(lambda x: x[0], fields_to_fetch) - try: - field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write") - except (openerp.exceptions.AccessError, except_orm): - if len(ids) == 1: - raise - # prefetching attempt failed, perhaps we're violating ACL restrictions involuntarily - _logger.info('Prefetching attempt for fields %s on %s failed for ids %s, re-trying just for id %s', field_names, self._model._name, ids, self._id) - ids = [self._id] - field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write") - - # TODO: improve this, very slow for reports - if self._fields_process: - lang = self._context.get('lang', 'en_US') or 'en_US' - lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)]) - if not lang_obj_ids: - raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,)) - lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0]) - - for field_name, field_column in fields_to_fetch: - if field_column._type in self._fields_process: - for result_line in field_values: - result_line[field_name] = self._fields_process[field_column._type](result_line[field_name]) - if result_line[field_name]: - result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj) - - if not field_values: - # Where did those ids come from? Perhaps old entries in ir_model_dat? - _logger.warning("No field_values found for ids %s in %s", ids, self) - raise KeyError('Field %s not found in %s'%(name, self)) - # create browse records for 'remote' objects - for result_line in field_values: - new_data = {} - for field_name, field_column in fields_to_fetch: - if field_column._type == 'many2one': - if result_line[field_name]: - obj = self._table.pool[field_column._obj] - if isinstance(result_line[field_name], (list, tuple)): - value = result_line[field_name][0] - else: - value = result_line[field_name] - if value: - # FIXME: this happen when a _inherits object - # overwrite a field of it parent. Need - # testing to be sure we got the right - # object and not the parent one. - if not isinstance(value, browse_record): - if obj is None: - # In some cases the target model is not available yet, so we must ignore it, - # which is safe in most cases, this value will just be loaded later when needed. - # This situation can be caused by custom fields that connect objects with m2o without - # respecting module dependencies, causing relationships to be connected to soon when - # the target is not loaded yet. - continue - new_data[field_name] = browse_record(self._cr, - self._uid, value, obj, self._cache, - context=self._context, - list_class=self._list_class, - fields_process=self._fields_process) - else: - new_data[field_name] = value - else: - new_data[field_name] = browse_null() - else: - new_data[field_name] = browse_null() - elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]): - new_data[field_name] = self._list_class( - (browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), - self._cache, context=self._context, list_class=self._list_class, - fields_process=self._fields_process) - for id in result_line[field_name]), - context=self._context) - elif field_column._type == 'reference': - if result_line[field_name]: - if isinstance(result_line[field_name], browse_record): - new_data[field_name] = result_line[field_name] - else: - ref_obj, ref_id = result_line[field_name].split(',') - ref_id = long(ref_id) - if ref_id: - obj = self._table.pool[ref_obj] - new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) - else: - new_data[field_name] = browse_null() - else: - new_data[field_name] = browse_null() - else: - new_data[field_name] = result_line[field_name] - self._data[result_line['id']].update(new_data) - - if not name in self._data[self._id]: - # How did this happen? Could be a missing model due to custom fields used too soon, see above. - self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values) - self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table) - raise KeyError(_('Unknown attribute %s in %s ') % (name, self)) - return self._data[self._id][name] - - def __getattr__(self, name): - try: - return self[name] - except KeyError, e: - import sys - exc_info = sys.exc_info() - raise AttributeError, "Got %r while trying to get attribute %s on a %s record." % (e, name, self._table._name), exc_info[2] - - def __contains__(self, name): - return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name) - - def __iter__(self): - raise NotImplementedError("Iteration is not allowed on %s" % self) - - def __hasattr__(self, name): - return name in self - - def __int__(self): - return self._id - - def __str__(self): - return "browse_record(%s, %s)" % (self._table_name, self._id) - - def __eq__(self, other): - if not isinstance(other, browse_record): - return False - return (self._table_name, self._id) == (other._table_name, other._id) - - def __ne__(self, other): - if not isinstance(other, browse_record): - return True - return (self._table_name, self._id) != (other._table_name, other._id) - - # we need to define __unicode__ even though we've already defined __str__ - # because we have overridden __getattr__ - def __unicode__(self): - return unicode(str(self)) - - def __hash__(self): - return hash((self._table_name, self._id)) - - __repr__ = __str__ - - def refresh(self): - """Force refreshing this browse_record's data and all the data of the - records that belong to the same cache, by emptying the cache completely, - preserving only the record identifiers (for prefetching optimizations). - """ - for model, model_cache in self._cache.iteritems(): - # only preserve the ids of the records that were in the cache - cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()]) - self._cache[model].clear() - self._cache[model].update(cached_ids) - -def pg_varchar(size=0): - """ Returns the VARCHAR declaration for the provided size: - - * If no size (or an empty or negative size is provided) return an - 'infinite' VARCHAR - * Otherwise return a VARCHAR(n) - - :type int size: varchar size, optional - :rtype: str - """ - if size: - if not isinstance(size, int): - raise TypeError("VARCHAR parameter should be an int, got %s" - % type(size)) - if size > 0: - return 'VARCHAR(%d)' % size - return 'VARCHAR' - -FIELDS_TO_PGTYPES = { - fields.boolean: 'bool', - fields.integer: 'int4', - fields.text: 'text', - fields.html: 'text', - fields.date: 'date', - fields.datetime: 'timestamp', - fields.binary: 'bytea', - fields.many2one: 'int4', - fields.serialized: 'text', -} - -def get_pg_type(f, type_override=None): - """ - :param fields._column f: field to get a Postgres type for - :param type type_override: use the provided type for dispatching instead of the field's own type - :returns: (postgres_identification_type, postgres_type_specification) - :rtype: (str, str) - """ - field_type = type_override or type(f) - - if field_type in FIELDS_TO_PGTYPES: - pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type]) - elif issubclass(field_type, fields.float): - if f.digits: - pg_type = ('numeric', 'NUMERIC') - else: - pg_type = ('float8', 'DOUBLE PRECISION') - elif issubclass(field_type, (fields.char, fields.reference)): - pg_type = ('varchar', pg_varchar(f.size)) - elif issubclass(field_type, fields.selection): - if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\ - or getattr(f, 'size', None) == -1: - pg_type = ('int4', 'INTEGER') - else: - pg_type = ('varchar', pg_varchar(getattr(f, 'size', None))) - elif issubclass(field_type, fields.function): - if f._type == 'selection': - pg_type = ('varchar', pg_varchar()) - else: - pg_type = get_pg_type(f, getattr(fields, f._type)) - else: - _logger.warning('%s type not supported!', field_type) - pg_type = None - - return pg_type - - -class MetaModel(type): - """ Metaclass for the Model. - - This class is used as the metaclass for the Model class to discover - the models defined in a module (i.e. without instanciating them). - If the automatic discovery is not needed, it is possible to set the - model's _register attribute to False. - - """ - - module_to_models = {} - - def __init__(self, name, bases, attrs): - if not self._register: - self._register = True - super(MetaModel, self).__init__(name, bases, attrs) - return - - # The (OpenERP) module name can be in the `openerp.addons` namespace - # or not. For instance module `sale` can be imported as - # `openerp.addons.sale` (the good way) or `sale` (for backward - # compatibility). - module_parts = self.__module__.split('.') - if len(module_parts) > 2 and module_parts[0] == 'openerp' and \ - module_parts[1] == 'addons': - module_name = self.__module__.split('.')[2] - else: - module_name = self.__module__.split('.')[0] - if not hasattr(self, '_module'): - self._module = module_name - - # Remember which models to instanciate for this module. - if not self._custom: - self.module_to_models.setdefault(self._module, []).append(self) - - -# Definition of log access columns, automatically added to models if -# self._log_access is True -LOG_ACCESS_COLUMNS = { - 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL', - 'create_date': 'TIMESTAMP', - 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL', - 'write_date': 'TIMESTAMP' -} -# special columns automatically created by the ORM -MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys() - -class BaseModel(object): - """ Base class for OpenERP models. - - OpenERP models are created by inheriting from this class' subclasses: - - * Model: for regular database-persisted models - * TransientModel: for temporary data, stored in the database but automatically - vaccuumed every so often - * AbstractModel: for abstract super classes meant to be shared by multiple - _inheriting classes (usually Models or TransientModels) - - The system will later instantiate the class once per database (on - which the class' module is installed). - - To create a class that should not be instantiated, the _register class attribute - may be set to False. - """ - __metaclass__ = MetaModel - _auto = True # create database backend - _register = False # Set to false if the model shouldn't be automatically discovered. - _name = None - _columns = {} - _constraints = [] - _custom = False - _defaults = {} - _rec_name = None - _parent_name = 'parent_id' - _parent_store = False - _parent_order = False - _date_name = 'date' - _order = 'id' - _sequence = None - _description = None - _needaction = False - - # dict of {field:method}, with method returning the (name_get of records, {id: fold}) - # to include in the _read_group, if grouped on this field - _group_by_full = {} - - # Transience - _transient = False # True in a TransientModel - - # structure: - # { 'parent_model': 'm2o_field', ... } - _inherits = {} - - # Mapping from inherits'd field name to triple (m, r, f, n) where m is the - # model from which it is inherits'd, r is the (local) field towards m, f - # is the _column object itself, and n is the original (i.e. top-most) - # parent model. - # Example: - # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent', - # field_column_obj, origina_parent_model), ... } - _inherit_fields = {} - - # Mapping field name/column_info object - # This is similar to _inherit_fields but: - # 1. includes self fields, - # 2. uses column_info instead of a triple. - _all_columns = {} - - _table = None - _log_create = False - _sql_constraints = [] - _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists'] - - CONCURRENCY_CHECK_FIELD = '__last_update' - - def log(self, cr, uid, id, message, secondary=False, context=None): - return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.") - - def view_init(self, cr, uid, fields_list, context=None): - """Override this method to do specific things when a view on the object is opened.""" - pass - - def _field_create(self, cr, context=None): - """ Create entries in ir_model_fields for all the model's fields. - - If necessary, also create an entry in ir_model, and if called from the - modules loading scheme (by receiving 'module' in the context), also - create entries in ir_model_data (for the model and the fields). - - - create an entry in ir_model (if there is not already one), - - create an entry in ir_model_data (if there is not already one, and if - 'module' is in the context), - - update ir_model_fields with the fields found in _columns - (TODO there is some redundancy as _columns is updated from - ir_model_fields in __init__). - - """ - if context is None: - context = {} - cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,)) - if not cr.rowcount: - cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',)) - model_id = cr.fetchone()[0] - cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base')) - else: - model_id = cr.fetchone()[0] - if 'module' in context: - name_id = 'model_'+self._name.replace('.', '_') - cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module'])) - if not cr.rowcount: - cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \ - (name_id, context['module'], 'ir.model', model_id) - ) - - cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,)) - cols = {} - for rec in cr.dictfetchall(): - cols[rec['name']] = rec - - ir_model_fields_obj = self.pool.get('ir.model.fields') - - # sparse field should be created at the end, as it depends on its serialized field already existing - model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0) - for (k, f) in model_fields: - vals = { - 'model_id': model_id, - 'model': self._name, - 'name': k, - 'field_description': f.string, - 'ttype': f._type, - 'relation': f._obj or '', - 'select_level': tools.ustr(f.select or 0), - 'readonly': (f.readonly and 1) or 0, - 'required': (f.required and 1) or 0, - 'selectable': (f.selectable and 1) or 0, - 'translate': (f.translate and 1) or 0, - 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '', - 'serialization_field_id': None, - } - if getattr(f, 'serialization_field', None): - # resolve link to serialization_field if specified by name - serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)]) - if not serialization_field_id: - raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k)) - vals['serialization_field_id'] = serialization_field_id[0] - - # When its a custom field,it does not contain f.select - if context.get('field_state', 'base') == 'manual': - if context.get('field_name', '') == k: - vals['select_level'] = context.get('select', '0') - #setting value to let the problem NOT occur next time - elif k in cols: - vals['select_level'] = cols[k]['select_level'] - - if k not in cols: - cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',)) - id = cr.fetchone()[0] - vals['id'] = id - cr.execute("""INSERT INTO ir_model_fields ( - id, model_id, model, name, field_description, ttype, - relation,state,select_level,relation_field, translate, serialization_field_id - ) VALUES ( - %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s - )""", ( - id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'], - vals['relation'], 'base', - vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'] - )) - if 'module' in context: - name1 = 'field_' + self._table + '_' + k - cr.execute("select name from ir_model_data where name=%s", (name1,)) - if cr.fetchone(): - name1 = name1 + "_" + str(id) - cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \ - (name1, context['module'], 'ir.model.fields', id) - ) - else: - for key, val in vals.items(): - if cols[k][key] != vals[key]: - cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name'])) - cr.execute("""UPDATE ir_model_fields SET - model_id=%s, field_description=%s, ttype=%s, relation=%s, - select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s - WHERE - model=%s AND name=%s""", ( - vals['model_id'], vals['field_description'], vals['ttype'], - vals['relation'], - vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name'] - )) - break - - # - # Goal: try to apply inheritance at the instanciation level and - # put objects in the pool var - # - @classmethod - def create_instance(cls, pool, cr): - """ Instanciate a given model. - - This class method instanciates the class of some model (i.e. a class - deriving from osv or osv_memory). The class might be the class passed - in argument or, if it inherits from another class, a class constructed - by combining the two classes. - - The ``attributes`` argument specifies which parent class attributes - have to be combined. - - TODO: the creation of the combined class is repeated at each call of - this method. This is probably unnecessary. - - """ - attributes = ['_columns', '_defaults', '_inherits', '_constraints', - '_sql_constraints'] - - parent_names = getattr(cls, '_inherit', None) - if parent_names: - if isinstance(parent_names, (str, unicode)): - name = cls._name or parent_names - parent_names = [parent_names] - else: - name = cls._name - if not name: - raise TypeError('_name is mandatory in case of multiple inheritance') - - for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]): - if parent_name not in pool: - raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n' - 'You may need to add a dependency on the parent class\' module.' % (name, parent_name)) - parent_model = pool[parent_name] - if not getattr(cls, '_original_module', None) and name == parent_model._name: - cls._original_module = parent_model._original_module - parent_class = parent_model.__class__ - nattr = {} - for s in attributes: - new = copy.copy(getattr(parent_model, s, {})) - if s == '_columns': - # Don't _inherit custom fields. - for c in new.keys(): - if new[c].manual: - del new[c] - if hasattr(new, 'update'): - new.update(cls.__dict__.get(s, {})) - elif s=='_constraints': - for c in cls.__dict__.get(s, []): - exist = False - for c2 in range(len(new)): - #For _constraints, we should check field and methods as well - if new[c2][2]==c[2] and (new[c2][0] == c[0] \ - or getattr(new[c2][0],'__name__', True) == \ - getattr(c[0],'__name__', False)): - # If new class defines a constraint with - # same function name, we let it override - # the old one. - - new[c2] = c - exist = True - break - if not exist: - new.append(c) - else: - new.extend(cls.__dict__.get(s, [])) - nattr[s] = new - - # Keep links to non-inherited constraints, e.g. useful when exporting translations - nattr['_local_constraints'] = cls.__dict__.get('_constraints', []) - nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', []) - - cls = type(name, (cls, parent_class), dict(nattr, _register=False)) - else: - cls._local_constraints = getattr(cls, '_constraints', []) - cls._local_sql_constraints = getattr(cls, '_sql_constraints', []) - - if not getattr(cls, '_original_module', None): - cls._original_module = cls._module - obj = object.__new__(cls) - - if hasattr(obj, '_columns'): - # float fields are registry-dependent (digit attribute). Duplicate them to avoid issues. - for c, f in obj._columns.items(): - if f._type == 'float': - obj._columns[c] = copy.copy(f) - - obj.__init__(pool, cr) - return obj - - def __new__(cls): - """Register this model. - - This doesn't create an instance but simply register the model - as being part of the module where it is defined. - - """ - - - # Set the module name (e.g. base, sale, accounting, ...) on the class. - module = cls.__module__.split('.')[0] - if not hasattr(cls, '_module'): - cls._module = module - - # Record this class in the list of models to instantiate for this module, - # managed by the metaclass. - module_model_list = MetaModel.module_to_models.setdefault(cls._module, []) - if cls not in module_model_list: - if not cls._custom: - module_model_list.append(cls) - - # Since we don't return an instance here, the __init__ - # method won't be called. - return None - - def __init__(self, pool, cr): - """ Initialize a model and make it part of the given registry. - - - copy the stored fields' functions in the osv_pool, - - update the _columns with the fields found in ir_model_fields, - - ensure there is a many2one for each _inherits'd parent, - - update the children's _columns, - - give a chance to each field to initialize itself. - - """ - pool.add(self._name, self) - self.pool = pool - - if not self._name and not hasattr(self, '_inherit'): - name = type(self).__name__.split('.')[0] - msg = "The class %s has to have a _name attribute" % name - - _logger.error(msg) - raise except_orm('ValueError', msg) - - if not self._description: - self._description = self._name - if not self._table: - self._table = self._name.replace('.', '_') - - if not hasattr(self, '_log_access'): - # If _log_access is not specified, it is the same value as _auto. - self._log_access = getattr(self, "_auto", True) - - self._columns = self._columns.copy() - for store_field in self._columns: - f = self._columns[store_field] - if hasattr(f, 'digits_change'): - f.digits_change(cr) - def not_this_field(stored_func): - x, y, z, e, f, l = stored_func - return x != self._name or y != store_field - self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, [])) - if not isinstance(f, fields.function): - continue - if not f.store: - continue - sm = f.store - if sm is True: - sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, f.priority, None)} - for object, aa in sm.items(): - if len(aa) == 4: - (fnct, fields2, order, length) = aa - elif len(aa) == 3: - (fnct, fields2, order) = aa - length = None - else: - raise except_orm('Error', - ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name))) - self.pool._store_function.setdefault(object, []) - t = (self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length) - if not t in self.pool._store_function[object]: - self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length)) - self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4])) - - for (key, _, msg) in self._sql_constraints: - self.pool._sql_error[self._table+'_'+key] = msg - - # Load manual fields - - # Check the query is already done for all modules of if we need to - # do it ourselves. - if self.pool.fields_by_model is not None: - manual_fields = self.pool.fields_by_model.get(self._name, []) - else: - cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual')) - manual_fields = cr.dictfetchall() - for field in manual_fields: - if field['name'] in self._columns: - continue - attrs = { - 'string': field['field_description'], - 'required': bool(field['required']), - 'readonly': bool(field['readonly']), - 'domain': eval(field['domain']) if field['domain'] else None, - 'size': field['size'] or None, - 'ondelete': field['on_delete'], - 'translate': (field['translate']), - 'manual': True, - '_prefetch': False, - #'select': int(field['select_level']) - } - - if field['serialization_field_id']: - cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],)) - attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']}) - if field['ttype'] in ['many2one', 'one2many', 'many2many']: - attrs.update({'relation': field['relation']}) - self._columns[field['name']] = fields.sparse(**attrs) - elif field['ttype'] == 'selection': - self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs) - elif field['ttype'] == 'reference': - self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs) - elif field['ttype'] == 'many2one': - self._columns[field['name']] = fields.many2one(field['relation'], **attrs) - elif field['ttype'] == 'one2many': - self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs) - elif field['ttype'] == 'many2many': - _rel1 = field['relation'].replace('.', '_') - _rel2 = field['model'].replace('.', '_') - _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name']) - self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs) - else: - self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs) - - self._inherits_check() - self._inherits_reload() - if not self._sequence: - self._sequence = self._table + '_id_seq' - for k in self._defaults: - assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,) - for f in self._columns: - self._columns[f].restart() - - # Transience - if self.is_transient(): - self._transient_check_count = 0 - self._transient_max_count = config.get('osv_memory_count_limit') - self._transient_max_hours = config.get('osv_memory_age_limit') - assert self._log_access, "TransientModels must have log_access turned on, "\ - "in order to implement their access rights policy" - - # Validate rec_name - if self._rec_name is not None: - assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name) - else: - self._rec_name = 'name' - - - def __export_row(self, cr, uid, row, fields, raw_data=False, context=None): - if context is None: - context = {} - - def check_type(field_type): - if field_type == 'float': - return 0.0 - elif field_type == 'integer': - return 0 - elif field_type == 'boolean': - return 'False' - return '' - - def selection_field(in_field): - col_obj = self.pool[in_field.keys()[0]] - if f[i] in col_obj._columns.keys(): - return col_obj._columns[f[i]] - elif f[i] in col_obj._inherits.keys(): - selection_field(col_obj._inherits) - else: - return False - - def _get_xml_id(self, cr, uid, r): - model_data = self.pool.get('ir.model.data') - data_ids = model_data.search(cr, uid, [('model', '=', r._model._name), ('res_id', '=', r['id'])]) - if len(data_ids): - d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0] - if d['module']: - r = '%s.%s' % (d['module'], d['name']) - else: - r = d['name'] - else: - postfix = 0 - while True: - n = r._model._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' ) - if not model_data.search(cr, uid, [('name', '=', n)]): - break - postfix += 1 - model_data.create(cr, SUPERUSER_ID, { - 'name': n, - 'model': r._model._name, - 'res_id': r['id'], - 'module': '__export__', - }) - r = '__export__.'+n - return r - - lines = [] - data = map(lambda x: '', range(len(fields))) - done = [] - for fpos in range(len(fields)): - f = fields[fpos] - if f: - r = row - i = 0 - while i < len(f): - cols = False - if f[i] == '.id': - r = r['id'] - elif f[i] == 'id': - r = _get_xml_id(self, cr, uid, r) - else: - r = r[f[i]] - # To display external name of selection field when its exported - if f[i] in self._columns.keys(): - cols = self._columns[f[i]] - elif f[i] in self._inherit_fields.keys(): - cols = selection_field(self._inherits) - if cols and cols._type == 'selection': - sel_list = cols.selection - if r and type(sel_list) == type([]): - r = [x[1] for x in sel_list if r==x[0]] - r = r and r[0] or False - if not r: - if f[i] in self._columns: - r = check_type(self._columns[f[i]]._type) - elif f[i] in self._inherit_fields: - r = check_type(self._inherit_fields[f[i]][2]._type) - data[fpos] = r or False - break - if isinstance(r, (browse_record_list, list)): - first = True - fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \ - or [], fields) - if fields2 in done: - if [x for x in fields2 if x]: - break - done.append(fields2) - if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'): - data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r]) - break - - for row2 in r: - lines2 = row2._model.__export_row(cr, uid, row2, fields2, context=context) - if first: - for fpos2 in range(len(fields)): - if lines2 and lines2[0][fpos2]: - data[fpos2] = lines2[0][fpos2] - if not data[fpos]: - dt = '' - for rr in r: - name_relation = self.pool[rr._table_name]._rec_name - if isinstance(rr[name_relation], browse_record): - rr = rr[name_relation] - rr_name = self.pool[rr._table_name].name_get(cr, uid, [rr.id], context=context) - rr_name = rr_name and rr_name[0] and rr_name[0][1] or '' - dt += tools.ustr(rr_name or '') + ',' - data[fpos] = dt[:-1] - break - lines += lines2[1:] - first = False - else: - lines += lines2 - break - i += 1 - - if i == len(f): - if isinstance(r, browse_record): - r = self.pool[r._table_name].name_get(cr, uid, [r.id], context=context) - r = r and r[0] and r[0][1] or '' - if raw_data and cols and cols._type in ('integer', 'boolean', 'float'): - data[fpos] = r - elif raw_data and cols and cols._type == 'date': - data[fpos] = datetime.datetime.strptime(r, tools.DEFAULT_SERVER_DATE_FORMAT).date() - elif raw_data and cols and cols._type == 'datetime': - data[fpos] = datetime.datetime.strptime(r, tools.DEFAULT_SERVER_DATETIME_FORMAT) - else: - data[fpos] = tools.ustr(r or '') - return [data] + lines - - def export_data(self, cr, uid, ids, fields_to_export, raw_data=False, context=None): - """ - Export fields for selected objects - - :param cr: database cursor - :param uid: current user id - :param ids: list of ids - :param fields_to_export: list of fields - :param raw_data: True to return value in fields type, False for string values - :param context: context arguments, like lang, time zone - :rtype: dictionary with a *datas* matrix - - This method is used when exporting data via client menu - - """ - if context is None: - context = {} - cols = self._columns.copy() - for f in self._inherit_fields: - cols.update({f: self._inherit_fields[f][2]}) - fields_to_export = map(fix_import_export_id_paths, fields_to_export) - datas = [] - for row in self.browse(cr, uid, ids, context): - datas += self.__export_row(cr, uid, row, fields_to_export, raw_data=raw_data, context=context) - return {'datas': datas} - - def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None): - """ - .. deprecated:: 7.0 - Use :meth:`~load` instead - - Import given data in given module - - This method is used when importing data via client menu. - - Example of fields to import for a sale.order:: - - .id, (=database_id) - partner_id, (=name_search) - order_line/.id, (=database_id) - order_line/name, - order_line/product_id/id, (=xml id) - order_line/price_unit, - order_line/product_uom_qty, - order_line/product_uom/id (=xml_id) - - This method returns a 4-tuple with the following structure:: - - (return_code, errored_resource, error_message, unused) - - * The first item is a return code, it is ``-1`` in case of - import error, or the last imported row number in case of success - * The second item contains the record data dict that failed to import - in case of error, otherwise it's 0 - * The third item contains an error message string in case of error, - otherwise it's 0 - * The last item is currently unused, with no specific semantics - - :param fields: list of fields to import - :param datas: data to import - :param mode: 'init' or 'update' for record creation - :param current_module: module name - :param noupdate: flag for record creation - :param filename: optional file to store partial import state for recovery - :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused) - :rtype: (int, dict or 0, str or 0, str or 0) - """ - context = dict(context) if context is not None else {} - context['_import_current_module'] = current_module - - fields = map(fix_import_export_id_paths, fields) - ir_model_data_obj = self.pool.get('ir.model.data') - - def log(m): - if m['type'] == 'error': - raise Exception(m['message']) - - if config.get('import_partial') and filename: - with open(config.get('import_partial'), 'rb') as partial_import_file: - data = pickle.load(partial_import_file) - position = data.get(filename, 0) - - position = 0 - try: - for res_id, xml_id, res, info in self._convert_records(cr, uid, - self._extract_records(cr, uid, fields, datas, - context=context, log=log), - context=context, log=log): - ir_model_data_obj._update(cr, uid, self._name, - current_module, res, mode=mode, xml_id=xml_id, - noupdate=noupdate, res_id=res_id, context=context) - position = info.get('rows', {}).get('to', 0) + 1 - if config.get('import_partial') and filename and (not (position%100)): - with open(config.get('import_partial'), 'rb') as partial_import: - data = pickle.load(partial_import) - data[filename] = position - with open(config.get('import_partial'), 'wb') as partial_import: - pickle.dump(data, partial_import) - if context.get('defer_parent_store_computation'): - self._parent_store_compute(cr) - cr.commit() - except Exception, e: - cr.rollback() - return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), '' - - if context.get('defer_parent_store_computation'): - self._parent_store_compute(cr) - return position, 0, 0, 0 - - def load(self, cr, uid, fields, data, context=None): - """ - Attempts to load the data matrix, and returns a list of ids (or - ``False`` if there was an error and no id could be generated) and a - list of messages. - - The ids are those of the records created and saved (in database), in - the same order they were extracted from the file. They can be passed - directly to :meth:`~read` - - :param fields: list of fields to import, at the same index as the corresponding data - :type fields: list(str) - :param data: row-major matrix of data to import - :type data: list(list(str)) - :param dict context: - :returns: {ids: list(int)|False, messages: [Message]} - """ - cr.execute('SAVEPOINT model_load') - messages = [] - - fields = map(fix_import_export_id_paths, fields) - ModelData = self.pool['ir.model.data'].clear_caches() - - fg = self.fields_get(cr, uid, context=context) - - mode = 'init' - current_module = '' - noupdate = False - - ids = [] - for id, xid, record, info in self._convert_records(cr, uid, - self._extract_records(cr, uid, fields, data, - context=context, log=messages.append), - context=context, log=messages.append): - try: - cr.execute('SAVEPOINT model_load_save') - except psycopg2.InternalError, e: - # broken transaction, exit and hope the source error was - # already logged - if not any(message['type'] == 'error' for message in messages): - messages.append(dict(info, type='error',message= - u"Unknown database error: '%s'" % e)) - break - try: - ids.append(ModelData._update(cr, uid, self._name, - current_module, record, mode=mode, xml_id=xid, - noupdate=noupdate, res_id=id, context=context)) - cr.execute('RELEASE SAVEPOINT model_load_save') - except psycopg2.Warning, e: - messages.append(dict(info, type='warning', message=str(e))) - cr.execute('ROLLBACK TO SAVEPOINT model_load_save') - except psycopg2.Error, e: - messages.append(dict( - info, type='error', - **PGERROR_TO_OE[e.pgcode](self, fg, info, e))) - # Failed to write, log to messages, rollback savepoint (to - # avoid broken transaction) and keep going - cr.execute('ROLLBACK TO SAVEPOINT model_load_save') - if any(message['type'] == 'error' for message in messages): - cr.execute('ROLLBACK TO SAVEPOINT model_load') - ids = False - return {'ids': ids, 'messages': messages} - def _extract_records(self, cr, uid, fields_, data, - context=None, log=lambda a: None): - """ Generates record dicts from the data sequence. - - The result is a generator of dicts mapping field names to raw - (unconverted, unvalidated) values. - - For relational fields, if sub-fields were provided the value will be - a list of sub-records - - The following sub-fields may be set on the record (by key): - * None is the name_get for the record (to use with name_create/name_search) - * "id" is the External ID for the record - * ".id" is the Database ID for the record - """ - columns = dict((k, v.column) for k, v in self._all_columns.iteritems()) - # Fake columns to avoid special cases in extractor - columns[None] = fields.char('rec_name') - columns['id'] = fields.char('External ID') - columns['.id'] = fields.integer('Database ID') - - # m2o fields can't be on multiple lines so exclude them from the - # is_relational field rows filter, but special-case it later on to - # be handled with relational fields (as it can have subfields) - is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one') - get_o2m_values = itemgetter_tuple( - [index for index, field in enumerate(fields_) - if columns[field[0]]._type == 'one2many']) - get_nono2m_values = itemgetter_tuple( - [index for index, field in enumerate(fields_) - if columns[field[0]]._type != 'one2many']) - # Checks if the provided row has any non-empty non-relational field - def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values): - return any(g(row)) and not any(f(row)) - - index = 0 - while True: - if index >= len(data): return - - row = data[index] - # copy non-relational fields to record dict - record = dict((field[0], value) - for field, value in itertools.izip(fields_, row) - if not is_relational(field[0])) - - # Get all following rows which have relational values attached to - # the current record (no non-relational values) - record_span = itertools.takewhile( - only_o2m_values, itertools.islice(data, index + 1, None)) - # stitch record row back on for relational fields - record_span = list(itertools.chain([row], record_span)) - for relfield in set( - field[0] for field in fields_ - if is_relational(field[0])): - column = columns[relfield] - # FIXME: how to not use _obj without relying on fields_get? - Model = self.pool[column._obj] - - # get only cells for this sub-field, should be strictly - # non-empty, field path [None] is for name_get column - indices, subfields = zip(*((index, field[1:] or [None]) - for index, field in enumerate(fields_) - if field[0] == relfield)) - - # return all rows which have at least one value for the - # subfields of relfield - relfield_data = filter(any, map(itemgetter_tuple(indices), record_span)) - record[relfield] = [subrecord - for subrecord, _subinfo in Model._extract_records( - cr, uid, subfields, relfield_data, - context=context, log=log)] - - yield record, {'rows': { - 'from': index, - 'to': index + len(record_span) - 1 - }} - index += len(record_span) - def _convert_records(self, cr, uid, records, - context=None, log=lambda a: None): - """ Converts records from the source iterable (recursive dicts of - strings) into forms which can be written to the database (via - self.create or (ir.model.data)._update) - - :returns: a list of triplets of (id, xid, record) - :rtype: list((int|None, str|None, dict)) - """ - if context is None: context = {} - Converter = self.pool['ir.fields.converter'] - columns = dict((k, v.column) for k, v in self._all_columns.iteritems()) - Translation = self.pool['ir.translation'] - field_names = dict( - (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field', - context.get('lang')) - or column.string)) - for f, column in columns.iteritems()) - - convert = Converter.for_model(cr, uid, self, context=context) - - def _log(base, field, exception): - type = 'warning' if isinstance(exception, Warning) else 'error' - # logs the logical (not human-readable) field name for automated - # processing of response, but injects human readable in message - record = dict(base, type=type, field=field, - message=unicode(exception.args[0]) % base) - if len(exception.args) > 1 and exception.args[1]: - record.update(exception.args[1]) - log(record) - - stream = CountingStream(records) - for record, extras in stream: - dbid = False - xid = False - # name_get/name_create - if None in record: pass - # xid - if 'id' in record: - xid = record['id'] - # dbid - if '.id' in record: - try: - dbid = int(record['.id']) - except ValueError: - # in case of overridden id column - dbid = record['.id'] - if not self.search(cr, uid, [('id', '=', dbid)], context=context): - log(dict(extras, - type='error', - record=stream.index, - field='.id', - message=_(u"Unknown database identifier '%s'") % dbid)) - dbid = False - - converted = convert(record, lambda field, err:\ - _log(dict(extras, record=stream.index, field=field_names[field]), field, err)) - - yield dbid, xid, converted, dict(extras, record=stream.index) - - def _validate(self, cr, uid, ids, context=None): - context = context or {} - lng = context.get('lang') - trans = self.pool.get('ir.translation') - error_msgs = [] - for constraint in self._constraints: - fun, msg, fields = constraint - try: - # We don't pass around the context here: validation code - # must always yield the same results. - valid = fun(self, cr, uid, ids) - extra_error = None - except Exception, e: - _logger.debug('Exception while validating constraint', exc_info=True) - valid = False - extra_error = tools.ustr(e) - if not valid: - # Check presence of __call__ directly instead of using - # callable() because it will be deprecated as of Python 3.0 - if hasattr(msg, '__call__'): - translated_msg = msg(self, cr, uid, ids, context=context) - if isinstance(translated_msg, tuple): - translated_msg = translated_msg[0] % translated_msg[1] - else: - translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) - if extra_error: - translated_msg += "\n\n%s\n%s" % (_('Error details:'), extra_error) - error_msgs.append( - _("The field(s) `%s` failed against a constraint: %s") % (', '.join(fields), translated_msg) - ) - if error_msgs: - raise except_orm('ValidateError', '\n'.join(error_msgs)) - - def default_get(self, cr, uid, fields_list, context=None): - """ - Returns default values for the fields in fields_list. - - :param fields_list: list of fields to get the default values for (example ['field1', 'field2',]) - :type fields_list: list - :param context: optional context dictionary - it may contains keys for specifying certain options - like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call. - It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set - or override a default value for a field. - A special ``bin_size`` boolean flag may also be passed in the context to request the - value of all fields.binary columns to be returned as the size of the binary instead of its - contents. This can also be selectively overriden by passing a field-specific flag - in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field. - Note: The ``bin_size_XXX`` form is new in OpenERP v6.0. - :return: dictionary of the default values (set on the object model class, through user preferences, or in the context) - """ - # trigger view init hook - self.view_init(cr, uid, fields_list, context) - - if not context: - context = {} - defaults = {} - - # get the default values for the inherited fields - for t in self._inherits.keys(): - defaults.update(self.pool[t].default_get(cr, uid, fields_list, context)) - - # get the default values defined in the object - for f in fields_list: - if f in self._defaults: - if callable(self._defaults[f]): - defaults[f] = self._defaults[f](self, cr, uid, context) - else: - defaults[f] = self._defaults[f] - - fld_def = ((f in self._columns) and self._columns[f]) \ - or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \ - or False - - if isinstance(fld_def, fields.property): - property_obj = self.pool.get('ir.property') - prop_value = property_obj.get(cr, uid, f, self._name, context=context) - if prop_value: - if isinstance(prop_value, (browse_record, browse_null)): - defaults[f] = prop_value.id - else: - defaults[f] = prop_value - else: - if f not in defaults: - defaults[f] = False - - # get the default values set by the user and override the default - # values defined in the object - ir_values_obj = self.pool.get('ir.values') - res = ir_values_obj.get(cr, uid, 'default', False, [self._name]) - for id, field, field_value in res: - if field in fields_list: - fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2] - if fld_def._type == 'many2one': - obj = self.pool[fld_def._obj] - if not obj.search(cr, uid, [('id', '=', field_value or False)]): - continue - if fld_def._type == 'many2many': - obj = self.pool[fld_def._obj] - field_value2 = [] - for i in range(len(field_value or [])): - if not obj.search(cr, uid, [('id', '=', - field_value[i])]): - continue - field_value2.append(field_value[i]) - field_value = field_value2 - if fld_def._type == 'one2many': - obj = self.pool[fld_def._obj] - field_value2 = [] - for i in range(len(field_value or [])): - field_value2.append({}) - for field2 in field_value[i]: - if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one': - obj2 = self.pool[obj._columns[field2]._obj] - if not obj2.search(cr, uid, - [('id', '=', field_value[i][field2])]): - continue - elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one': - obj2 = self.pool[obj._inherit_fields[field2][2]._obj] - if not obj2.search(cr, uid, - [('id', '=', field_value[i][field2])]): - continue - # TODO add test for many2many and one2many - field_value2[i][field2] = field_value[i][field2] - field_value = field_value2 - defaults[field] = field_value - - # get the default values from the context - for key in context or {}: - if key.startswith('default_') and (key[8:] in fields_list): - defaults[key[8:]] = context[key] - return defaults - - def fields_get_keys(self, cr, user, context=None): - res = self._columns.keys() - # TODO I believe this loop can be replace by - # res.extend(self._inherit_fields.key()) - for parent in self._inherits: - res.extend(self.pool[parent].fields_get_keys(cr, user, context)) - return res - - def _rec_name_fallback(self, cr, uid, context=None): - rec_name = self._rec_name - if rec_name not in self._columns: - rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id" - return rec_name - - # - # Overload this method if you need a window title which depends on the context - # - def view_header_get(self, cr, user, view_id=None, view_type='form', context=None): - return False - - def user_has_groups(self, cr, uid, groups, context=None): - """Return true if the user is at least member of one of the groups - in groups_str. Typically used to resolve ``groups`` attribute - in view and model definitions. - - :param str groups: comma-separated list of fully-qualified group - external IDs, e.g.: ``base.group_user,base.group_system`` - :return: True if the current user is a member of one of the - given groups - """ - return any(self.pool['res.users'].has_group(cr, uid, group_ext_id) - for group_ext_id in groups.split(',')) - - def _get_default_form_view(self, cr, user, context=None): - """ Generates a default single-line form view using all fields - of the current model except the m2m and o2m ones. - - :param cr: database cursor - :param int user: user id - :param dict context: connection context - :returns: a form view as an lxml document - :rtype: etree._Element - """ - view = etree.Element('form', string=self._description) - group = etree.SubElement(view, 'group', col="4") - # TODO it seems fields_get can be replaced by _all_columns (no need for translation) - for field, descriptor in self.fields_get(cr, user, context=context).iteritems(): - if descriptor['type'] in ('one2many', 'many2many'): - continue - etree.SubElement(group, 'field', name=field) - if descriptor['type'] == 'text': - etree.SubElement(group, 'newline') - return view - - def _get_default_search_view(self, cr, user, context=None): - """ Generates a single-field search view, based on _rec_name. - - :param cr: database cursor - :param int user: user id - :param dict context: connection context - :returns: a tree view as an lxml document - :rtype: etree._Element - """ - view = etree.Element('search', string=self._description) - etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context)) - return view - - def _get_default_tree_view(self, cr, user, context=None): - """ Generates a single-field tree view, based on _rec_name. - - :param cr: database cursor - :param int user: user id - :param dict context: connection context - :returns: a tree view as an lxml document - :rtype: etree._Element - """ - view = etree.Element('tree', string=self._description) - etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context)) - return view - - def _get_default_calendar_view(self, cr, user, context=None): - """ Generates a default calendar view by trying to infer - calendar fields from a number of pre-set attribute names - - :param cr: database cursor - :param int user: user id - :param dict context: connection context - :returns: a calendar view - :rtype: etree._Element - """ - def set_first_of(seq, in_, to): - """Sets the first value of ``seq`` also found in ``in_`` to - the ``to`` attribute of the view being closed over. - - Returns whether it's found a suitable value (and set it on - the attribute) or not - """ - for item in seq: - if item in in_: - view.set(to, item) - return True - return False - - view = etree.Element('calendar', string=self._description) - etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context)) - - if self._date_name not in self._columns: - date_found = False - for dt in ['date', 'date_start', 'x_date', 'x_date_start']: - if dt in self._columns: - self._date_name = dt - date_found = True - break - - if not date_found: - raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!")) - view.set('date_start', self._date_name) - - set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"], - self._columns, 'color') - - if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"], - self._columns, 'date_stop'): - if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"], - self._columns, 'date_delay'): - raise except_orm( - _('Invalid Object Architecture!'), - _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name)) - - return view - - def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): - """ - Get the detailed composition of the requested view like fields, model, view architecture - - :param view_id: id of the view or None - :param view_type: type of the view to return if view_id is None ('form', tree', ...) - :param toolbar: true to include contextual actions - :param submenu: deprecated - :return: dictionary describing the composition of the requested view (including inherited views and extensions) - :raise AttributeError: - * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace' - * if some tag other than 'position' is found in parent view - :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure - """ - if context is None: - context = {} - View = self.pool['ir.ui.view'] - - result = { - 'model': self._name, - 'field_parent': False, - } - - # try to find a view_id if none provided - if not view_id: - # <view_type>_view_ref in context can be used to overrride the default view - view_ref_key = view_type + '_view_ref' - view_ref = context.get(view_ref_key) - if view_ref: - if '.' in view_ref: - module, view_ref = view_ref.split('.', 1) - cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref)) - view_ref_res = cr.fetchone() - if view_ref_res: - view_id = view_ref_res[0] - else: - _logger.warning('%r requires a fully-qualified external id (got: %r for model %s). ' - 'Please use the complete `module.view_id` form instead.', view_ref_key, view_ref, - self._name) - - if not view_id: - # otherwise try to find the lowest priority matching ir.ui.view - view_id = View.default_view(cr, uid, self._name, view_type, context=context) - - # context for post-processing might be overriden - ctx = context - if view_id: - # read the view with inherited views applied - root_view = View.read_combined(cr, uid, view_id, fields=['id', 'name', 'field_parent', 'type', 'model', 'arch'], context=context) - result['arch'] = root_view['arch'] - result['name'] = root_view['name'] - result['type'] = root_view['type'] - result['view_id'] = root_view['id'] - result['field_parent'] = root_view['field_parent'] - # override context fro postprocessing - if root_view.get('model') != self._name: - ctx = dict(context, base_model_name=root_view.get('model')) - else: - # fallback on default views methods if no ir.ui.view could be found - try: - get_func = getattr(self, '_get_default_%s_view' % view_type) - arch_etree = get_func(cr, uid, context) - result['arch'] = etree.tostring(arch_etree, encoding='utf-8') - result['type'] = view_type - result['name'] = 'default' - except AttributeError: - raise except_orm(_('Invalid Architecture!'), _("No default view of type '%s' could be found !") % view_type) - - # Apply post processing, groups and modifiers etc... - xarch, xfields = View.postprocess_and_fields(cr, uid, self._name, etree.fromstring(result['arch']), view_id, context=ctx) - result['arch'] = xarch - result['fields'] = xfields - - # Add related action information if aksed - if toolbar: - toclean = ('report_sxw_content', 'report_rml_content', 'report_sxw', 'report_rml', 'report_sxw_content_data', 'report_rml_content_data') - def clean(x): - x = x[2] - for key in toclean: - x.pop(key, None) - return x - ir_values_obj = self.pool.get('ir.values') - resprint = ir_values_obj.get(cr, uid, 'action', 'client_print_multi', [(self._name, False)], False, context) - resaction = ir_values_obj.get(cr, uid, 'action', 'client_action_multi', [(self._name, False)], False, context) - resrelate = ir_values_obj.get(cr, uid, 'action', 'client_action_relate', [(self._name, False)], False, context) - resaction = [clean(action) for action in resaction if view_type == 'tree' or not action[2].get('multi')] - resprint = [clean(print_) for print_ in resprint if view_type == 'tree' or not print_[2].get('multi')] - #When multi="True" set it will display only in More of the list view - resrelate = [clean(action) for action in resrelate - if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')] - - for x in itertools.chain(resprint, resaction, resrelate): - x['string'] = x['name'] - - result['toolbar'] = { - 'print': resprint, - 'action': resaction, - 'relate': resrelate - } - return result - - def get_formview_id(self, cr, uid, id, context=None): - """ Return an view id to open the document with. This method is meant to be - overridden in addons that want to give specific view ids for example. - - :param int id: id of the document to open - """ - return False - - def get_formview_action(self, cr, uid, id, context=None): - """ Return an action to open the document. This method is meant to be - overridden in addons that want to give specific view ids for example. - - :param int id: id of the document to open - """ - view_id = self.get_formview_id(cr, uid, id, context=context) - return { - 'type': 'ir.actions.act_window', - 'res_model': self._name, - 'view_type': 'form', - 'view_mode': 'form', - 'views': [(view_id, 'form')], - 'target': 'current', - 'res_id': id, - } - - def _view_look_dom_arch(self, cr, uid, node, view_id, context=None): - return self.pool['ir.ui.view'].postprocess_and_fields( - cr, uid, self._name, node, view_id, context=context) - - def search_count(self, cr, user, args, context=None): - res = self.search(cr, user, args, context=context, count=True) - if isinstance(res, list): - return len(res) - return res - - def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): - """ - Search for records based on a search domain. - - :param cr: database cursor - :param user: current user id - :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records. - :param offset: optional number of results to skip in the returned values (default: 0) - :param limit: optional max number of records to return (default: **None**) - :param order: optional columns to sort by (default: self._order=id ) - :param context: optional context arguments, like lang, time zone - :type context: dictionary - :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids - :return: id or list of ids of records matching the criteria - :rtype: integer or list of integers - :raise AccessError: * if user tries to bypass access rules for read on the requested object. - - **Expressing a search domain (args)** - - Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where: - - * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values. - * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right`` - The semantics of most of these operators are obvious. - The ``child_of`` operator will look for records who are children or grand-children of a given record, - according to the semantics of this model (i.e following the relationship field named by - ``self._parent_name``, by default ``parent_id``. - * **value** must be a valid value to compare with the values of **field_name**, depending on its type. - - Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT). - These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1. - Be very careful about this when you combine them the first time. - - Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english :: - - [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de')) - - The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is:: - - (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany)) - - """ - return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count) - - def name_get(self, cr, user, ids, context=None): - """Returns the preferred display value (text representation) for the records with the - given ``ids``. By default this will be the value of the ``name`` column, unless - the model implements a custom behavior. - Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not - guaranteed to be. - - :rtype: list(tuple) - :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``. - """ - if not ids: - return [] - if isinstance(ids, (int, long)): - ids = [ids] - - if self._rec_name in self._all_columns: - rec_name_column = self._all_columns[self._rec_name].column - return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context)) - for r in self.read(cr, user, ids, [self._rec_name], - load='_classic_write', context=context)] - return [(id, "%s,%s" % (self._name, id)) for id in ids] - - def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): - """Search for records that have a display name matching the given ``name`` pattern if compared - with the given ``operator``, while also matching the optional search domain (``args``). - This is used for example to provide suggestions based on a partial value for a relational - field. - Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not - guaranteed to be. - - This method is equivalent to calling :meth:`~.search` with a search domain based on ``name`` - and then :meth:`~.name_get` on the result of the search. - - :param list args: optional search domain (see :meth:`~.search` for syntax), - specifying further restrictions - :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'`` - or ``'='``. - :param int limit: optional max number of records to return - :rtype: list - :return: list of pairs ``(id,text_repr)`` for all matching records. - """ - return self._name_search(cr, user, name, args, operator, context, limit) - - def name_create(self, cr, uid, name, context=None): - """Creates a new record by calling :meth:`~.create` with only one - value provided: the name of the new record (``_rec_name`` field). - The new record will also be initialized with any default values applicable - to this model, or provided through the context. The usual behavior of - :meth:`~.create` applies. - Similarly, this method may raise an exception if the model has multiple - required fields and some do not have default values. - - :param name: name of the record to create - - :rtype: tuple - :return: the :meth:`~.name_get` pair value for the newly-created record. - """ - rec_id = self.create(cr, uid, {self._rec_name: name}, context) - return self.name_get(cr, uid, [rec_id], context)[0] - - # private implementation of name_search, allows passing a dedicated user for the name_get part to - # solve some access rights issues - def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None): - if args is None: - args = [] - if context is None: - context = {} - args = args[:] - # optimize out the default criterion of ``ilike ''`` that matches everything - if not (name == '' and operator == 'ilike'): - args += [(self._rec_name, operator, name)] - access_rights_uid = name_get_uid or user - ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid) - res = self.name_get(cr, access_rights_uid, ids, context) - return res - - def read_string(self, cr, uid, id, langs, fields=None, context=None): - res = {} - res2 = {} - self.pool.get('ir.translation').check_access_rights(cr, uid, 'read') - if not fields: - fields = self._columns.keys() + self._inherit_fields.keys() - #FIXME: collect all calls to _get_source into one SQL call. - for lang in langs: - res[lang] = {'code': lang} - for f in fields: - if f in self._columns: - res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang) - if res_trans: - res[lang][f] = res_trans - else: - res[lang][f] = self._columns[f].string - for table in self._inherits: - cols = intersect(self._inherit_fields.keys(), fields) - res2 = self.pool[table].read_string(cr, uid, id, langs, cols, context) - for lang in res2: - if lang in res: - res[lang]['code'] = lang - for f in res2[lang]: - res[lang][f] = res2[lang][f] - return res - - def write_string(self, cr, uid, id, langs, vals, context=None): - self.pool.get('ir.translation').check_access_rights(cr, uid, 'write') - #FIXME: try to only call the translation in one SQL - for lang in langs: - for field in vals: - if field in self._columns: - src = self._columns[field].string - self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src) - for table in self._inherits: - cols = intersect(self._inherit_fields.keys(), vals) - if cols: - self.pool[table].write_string(cr, uid, id, langs, vals, context) - return True - - def _add_missing_default_values(self, cr, uid, values, context=None): - missing_defaults = [] - avoid_tables = [] # avoid overriding inherited values when parent is set - for tables, parent_field in self._inherits.items(): - if parent_field in values: - avoid_tables.append(tables) - for field in self._columns.keys(): - if not field in values: - missing_defaults.append(field) - for field in self._inherit_fields.keys(): - if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables): - missing_defaults.append(field) - - if len(missing_defaults): - # override defaults with the provided values, never allow the other way around - defaults = self.default_get(cr, uid, missing_defaults, context) - for dv in defaults: - if ((dv in self._columns and self._columns[dv]._type == 'many2many') \ - or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \ - and defaults[dv] and isinstance(defaults[dv][0], (int, long)): - defaults[dv] = [(6, 0, defaults[dv])] - if (dv in self._columns and self._columns[dv]._type == 'one2many' \ - or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \ - and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict): - defaults[dv] = [(0, 0, x) for x in defaults[dv]] - defaults.update(values) - values = defaults - return values - - def clear_caches(self): - """ Clear the caches - - This clears the caches associated to methods decorated with - ``tools.ormcache`` or ``tools.ormcache_multi``. - """ - try: - getattr(self, '_ormcache') - self._ormcache = {} - self.pool._any_cache_cleared = True - except AttributeError: - pass - - - def _read_group_fill_results(self, cr, uid, domain, groupby, remaining_groupbys, aggregated_fields, - read_group_result, read_group_order=None, context=None): - """Helper method for filling in empty groups for all possible values of - the field being grouped by""" - - # self._group_by_full should map groupable fields to a method that returns - # a list of all aggregated values that we want to display for this field, - # in the form of a m2o-like pair (key,label). - # This is useful to implement kanban views for instance, where all columns - # should be displayed even if they don't contain any record. - - # Grab the list of all groups that should be displayed, including all present groups - present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]] - all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain, - read_group_order=read_group_order, - access_rights_uid=openerp.SUPERUSER_ID, - context=context) - - result_template = dict.fromkeys(aggregated_fields, False) - result_template[groupby + '_count'] = 0 - if remaining_groupbys: - result_template['__context'] = {'group_by': remaining_groupbys} - - # Merge the left_side (current results as dicts) with the right_side (all - # possible values as m2o pairs). Both lists are supposed to be using the - # same ordering, and can be merged in one pass. - result = [] - known_values = {} - def append_left(left_side): - grouped_value = left_side[groupby] and left_side[groupby][0] - if not grouped_value in known_values: - result.append(left_side) - known_values[grouped_value] = left_side - else: - count_attr = groupby + '_count' - known_values[grouped_value].update({count_attr: left_side[count_attr]}) - def append_right(right_side): - grouped_value = right_side[0] - if not grouped_value in known_values: - line = dict(result_template) - line[groupby] = right_side - line['__domain'] = [(groupby,'=',grouped_value)] + domain - result.append(line) - known_values[grouped_value] = line - while read_group_result or all_groups: - left_side = read_group_result[0] if read_group_result else None - right_side = all_groups[0] if all_groups else None - assert left_side is None or left_side[groupby] is False \ - or isinstance(left_side[groupby], (tuple,list)), \ - 'M2O-like pair expected, got %r' % left_side[groupby] - assert right_side is None or isinstance(right_side, (tuple,list)), \ - 'M2O-like pair expected, got %r' % right_side - if left_side is None: - append_right(all_groups.pop(0)) - elif right_side is None: - append_left(read_group_result.pop(0)) - elif left_side[groupby] == right_side: - append_left(read_group_result.pop(0)) - all_groups.pop(0) # discard right_side - elif not left_side[groupby] or not left_side[groupby][0]: - # left side == "Undefined" entry, not present on right_side - append_left(read_group_result.pop(0)) - else: - append_right(all_groups.pop(0)) - - if folded: - for r in result: - r['__fold'] = folded.get(r[groupby] and r[groupby][0], False) - return result - - def _read_group_prepare(self, orderby, aggregated_fields, annotated_groupbys, query): - """ - Prepares the GROUP BY and ORDER BY terms for the read_group method. Adds the missing JOIN clause - to the query if order should be computed against m2o field. - :param orderby: the orderby definition in the form "%(field)s %(order)s" - :param aggregated_fields: list of aggregated fields in the query - :param annotated_groupbys: list of dictionaries returned by _read_group_process_groupby - These dictionaries contains the qualified name of each groupby - (fully qualified SQL name for the corresponding field), - and the (non raw) field name. - :param osv.Query query: the query under construction - :return: (groupby_terms, orderby_terms) - """ - orderby_terms = [] - groupby_terms = [gb['qualified_field'] for gb in annotated_groupbys] - groupby_fields = [gb['groupby'] for gb in annotated_groupbys] - if not orderby: - return groupby_terms, orderby_terms - - self._check_qorder(orderby) - for order_part in orderby.split(','): - order_split = order_part.split() - order_field = order_split[0] - if order_field in groupby_fields: - - if self._all_columns[order_field.split(':')[0]].column._type == 'many2one': - order_clause = self._generate_order_by(order_part, query).replace('ORDER BY ', '') - if order_clause: - orderby_terms.append(order_clause) - groupby_terms += [order_term.split()[0] for order_term in order_clause.split(',')] - else: - order = '"%s" %s' % (order_field, '' if len(order_split) == 1 else order_split[1]) - orderby_terms.append(order) - elif order_field in aggregated_fields: - orderby_terms.append(order_part) - else: - # Cannot order by a field that will not appear in the results (needs to be grouped or aggregated) - _logger.warn('%s: read_group order by `%s` ignored, cannot sort on empty columns (not grouped/aggregated)', - self._name, order_part) - return groupby_terms, orderby_terms - - def _read_group_process_groupby(self, gb, query, context): - """ - Helper method to collect important information about groupbys: raw - field name, type, time informations, qualified name, ... - """ - split = gb.split(':') - field_type = self._all_columns[split[0]].column._type - gb_function = split[1] if len(split) == 2 else None - temporal = field_type in ('date', 'datetime') - tz_convert = field_type == 'datetime' and context.get('tz') in pytz.all_timezones - qualified_field = self._inherits_join_calc(split[0], query) - if temporal: - display_formats = { - 'day': 'dd MMM YYYY', - 'week': "'W'w YYYY", - 'month': 'MMMM YYYY', - 'quarter': 'QQQ YYYY', - 'year': 'YYYY' - } - time_intervals = { - 'day': dateutil.relativedelta.relativedelta(days=1), - 'week': datetime.timedelta(days=7), - 'month': dateutil.relativedelta.relativedelta(months=1), - 'quarter': dateutil.relativedelta.relativedelta(months=3), - 'year': dateutil.relativedelta.relativedelta(years=1) - } - if tz_convert: - qualified_field = "timezone('%s', timezone('UTC',%s))" % (context.get('tz', 'UTC'), qualified_field) - qualified_field = "date_trunc('%s', %s)" % (gb_function or 'month', qualified_field) - if field_type == 'boolean': - qualified_field = "coalesce(%s,false)" % qualified_field - return { - 'field': split[0], - 'groupby': gb, - 'type': field_type, - 'display_format': display_formats[gb_function or 'month'] if temporal else None, - 'interval': time_intervals[gb_function or 'month'] if temporal else None, - 'tz_convert': tz_convert, - 'qualified_field': qualified_field - } - - def _read_group_prepare_data(self, key, value, groupby_dict, context): - """ - Helper method to sanitize the data received by read_group. The None - values are converted to False, and the date/datetime are formatted, - and corrected according to the timezones. - """ - value = False if value is None else value - gb = groupby_dict.get(key) - if gb and gb['type'] in ('date', 'datetime') and value: - if isinstance(value, basestring): - dt_format = DEFAULT_SERVER_DATETIME_FORMAT if gb['type'] == 'datetime' else DEFAULT_SERVER_DATE_FORMAT - value = datetime.datetime.strptime(value, dt_format) - if gb['tz_convert']: - value = pytz.timezone(context['tz']).localize(value) - return value - - def _read_group_get_domain(self, groupby, value): - """ - Helper method to construct the domain corresponding to a groupby and - a given value. This is mostly relevant for date/datetime. - """ - if groupby['type'] in ('date', 'datetime') and value: - dt_format = DEFAULT_SERVER_DATETIME_FORMAT if groupby['type'] == 'datetime' else DEFAULT_SERVER_DATE_FORMAT - domain_dt_begin = value - domain_dt_end = value + groupby['interval'] - if groupby['tz_convert']: - domain_dt_begin = domain_dt_begin.astimezone(pytz.utc) - domain_dt_end = domain_dt_end.astimezone(pytz.utc) - return [(groupby['field'], '>=', domain_dt_begin.strftime(dt_format)), - (groupby['field'], '<', domain_dt_end.strftime(dt_format))] - if groupby['type'] == 'many2one' and value: - value = value[0] - return [(groupby['field'], '=', value)] - - def _read_group_format_result(self, data, annotated_groupbys, groupby, groupby_dict, domain, context): - """ - Helper method to format the data contained in the dictianary data by - adding the domain corresponding to its values, the groupbys in the - context and by properly formatting the date/datetime values. - """ - domain_group = [dom for gb in annotated_groupbys for dom in self._read_group_get_domain(gb, data[gb['groupby']])] - for k,v in data.iteritems(): - gb = groupby_dict.get(k) - if gb and gb['type'] in ('date', 'datetime') and v: - data[k] = babel.dates.format_date(v, format=gb['display_format'], locale=context.get('lang', 'en_US')) - - data['__domain'] = domain_group + domain - if len(groupby) - len(annotated_groupbys) >= 1: - data['__context'] = { 'group_by': groupby[len(annotated_groupbys):]} - del data['id'] - return data - - def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): - """ - Get the list of records in list view grouped by the given ``groupby`` fields - - :param cr: database cursor - :param uid: current user id - :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...] - :param list fields: list of fields present in the list view specified on the object - :param list groupby: list of groupby descriptions by which the records will be grouped. - A groupby description is either a field (then it will be grouped by that field) - or a string 'field:groupby_function'. Right now, the only functions supported - are 'day', 'week', 'month', 'quarter' or 'year', and they only make sense for - date/datetime fields. - :param int offset: optional number of records to skip - :param int limit: optional max number of records to return - :param dict context: context arguments, like lang, time zone. - :param list orderby: optional ``order by`` specification, for - overriding the natural sort ordering of the - groups, see also :py:meth:`~osv.osv.osv.search` - (supported only for many2one fields currently) - :param bool lazy: if true, the results are only grouped by the first groupby and the - remaining groupbys are put in the __context key. If false, all the groupbys are - done in one call. - :return: list of dictionaries(one dictionary for each record) containing: - - * the values of fields grouped by the fields in ``groupby`` argument - * __domain: list of tuples specifying the search criteria - * __context: dictionary with argument like ``groupby`` - :rtype: [{'field_name_1': value, ...] - :raise AccessError: * if user has no read rights on the requested object - * if user tries to bypass access rules for read on the requested object - """ - if context is None: - context = {} - self.check_access_rights(cr, uid, 'read') - query = self._where_calc(cr, uid, domain, context=context) - fields = fields or self._columns.keys() - - groupby = [groupby] if isinstance(groupby, basestring) else groupby - groupby_list = groupby[:1] if lazy else groupby - annotated_groupbys = [self._read_group_process_groupby(gb, query, context) - for gb in groupby_list] - groupby_fields = [g['field'] for g in annotated_groupbys] - order = orderby or ','.join([g for g in groupby_list]) - groupby_dict = {gb['groupby']: gb for gb in annotated_groupbys} - - self._apply_ir_rules(cr, uid, query, 'read', context=context) - for gb in groupby_fields: - assert gb in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)" - groupby_def = self._columns.get(gb) or (self._inherit_fields.get(gb) and self._inherit_fields.get(gb)[2]) - assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True" - if not (gb in self._all_columns): - # Don't allow arbitrary values, as this would be a SQL injection vector! - raise except_orm(_('Invalid group_by'), - _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(gb,)) - - aggregated_fields = [ - f for f in fields - if f not in ('id', 'sequence') - if f not in groupby_fields - if self._all_columns[f].column._type in ('integer', 'float') - if getattr(self._all_columns[f].column, '_classic_write')] - - field_formatter = lambda f: (self._all_columns[f].column.group_operator or 'sum', self._inherits_join_calc(f, query), f) - select_terms = ["%s(%s) AS %s" % field_formatter(f) for f in aggregated_fields] - - for gb in annotated_groupbys: - select_terms.append('%s as "%s" ' % (gb['qualified_field'], gb['groupby'])) - - groupby_terms, orderby_terms = self._read_group_prepare(order, aggregated_fields, annotated_groupbys, query) - from_clause, where_clause, where_clause_params = query.get_sql() - if lazy and (len(groupby_fields) >= 2 or not context.get('group_by_no_leaf')): - count_field = groupby_fields[0] if len(groupby_fields) >= 1 else '_' - else: - count_field = '_' - - prefix_terms = lambda prefix, terms: (prefix + " " + ",".join(terms)) if terms else '' - prefix_term = lambda prefix, term: ('%s %s' % (prefix, term)) if term else '' - - query = """ - SELECT min(%(table)s.id) AS id, count(%(table)s.id) AS %(count_field)s_count %(extra_fields)s - FROM %(from)s - %(where)s - %(groupby)s - %(orderby)s - %(limit)s - %(offset)s - """ % { - 'table': self._table, - 'count_field': count_field, - 'extra_fields': prefix_terms(',', select_terms), - 'from': from_clause, - 'where': prefix_term('WHERE', where_clause), - 'groupby': prefix_terms('GROUP BY', groupby_terms), - 'orderby': prefix_terms('ORDER BY', orderby_terms), - 'limit': prefix_term('LIMIT', int(limit) if limit else None), - 'offset': prefix_term('OFFSET', int(offset) if limit else None), - } - cr.execute(query, where_clause_params) - fetched_data = cr.dictfetchall() - - if not groupby_fields: - return fetched_data - - many2onefields = [gb['field'] for gb in annotated_groupbys if gb['type'] == 'many2one'] - if many2onefields: - data_ids = [r['id'] for r in fetched_data] - many2onefields = list(set(many2onefields)) - data_dict = {d['id']: d for d in self.read(cr, uid, data_ids, many2onefields, context=context)} - for d in fetched_data: - d.update(data_dict[d['id']]) - - data = map(lambda r: {k: self._read_group_prepare_data(k,v, groupby_dict, context) for k,v in r.iteritems()}, fetched_data) - result = [self._read_group_format_result(d, annotated_groupbys, groupby, groupby_dict, domain, context) for d in data] - if lazy and groupby_fields[0] in self._group_by_full: - # Right now, read_group only fill results in lazy mode (by default). - # If you need to have the empty groups in 'eager' mode, then the - # method _read_group_fill_results need to be completely reimplemented - # in a sane way - result = self._read_group_fill_results(cr, uid, domain, groupby_fields[0], groupby[len(annotated_groupbys):], - aggregated_fields, result, read_group_order=order, - context=context) - return result - - def _inherits_join_add(self, current_model, parent_model_name, query): - """ - Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates) - :param current_model: current model object - :param parent_model_name: name of the parent model for which the clauses should be added - :param query: query object on which the JOIN should be added - """ - inherits_field = current_model._inherits[parent_model_name] - parent_model = self.pool[parent_model_name] - parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True) - return parent_alias - - def _inherits_join_calc(self, field, query): - """ - Adds missing table select and join clause(s) to ``query`` for reaching - the field coming from an '_inherits' parent table (no duplicates). - - :param field: name of inherited field to reach - :param query: query object on which the JOIN should be added - :return: qualified name of field, to be used in SELECT clause - """ - current_table = self - parent_alias = '"%s"' % current_table._table - while field in current_table._inherit_fields and not field in current_table._columns: - parent_model_name = current_table._inherit_fields[field][0] - parent_table = self.pool[parent_model_name] - parent_alias = self._inherits_join_add(current_table, parent_model_name, query) - current_table = parent_table - return '%s."%s"' % (parent_alias, field) - - def _parent_store_compute(self, cr): - if not self._parent_store: - return - _logger.info('Computing parent left and right for table %s...', self._table) - def browse_rec(root, pos=0): - # TODO: set order - where = self._parent_name+'='+str(root) - if not root: - where = self._parent_name+' IS NULL' - if self._parent_order: - where += ' order by '+self._parent_order - cr.execute('SELECT id FROM '+self._table+' WHERE '+where) - pos2 = pos + 1 - for id in cr.fetchall(): - pos2 = browse_rec(id[0], pos2) - cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root)) - return pos2 + 1 - query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL' - if self._parent_order: - query += ' order by ' + self._parent_order - pos = 0 - cr.execute(query) - for (root,) in cr.fetchall(): - pos = browse_rec(root, pos) - return True - - def _update_store(self, cr, f, k): - _logger.info("storing computed values of fields.function '%s'", k) - ss = self._columns[k]._symbol_set - update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0]) - cr.execute('select id from '+self._table) - ids_lst = map(lambda x: x[0], cr.fetchall()) - while ids_lst: - iids = ids_lst[:AUTOINIT_RECALCULATE_STORED_FIELDS] - ids_lst = ids_lst[AUTOINIT_RECALCULATE_STORED_FIELDS:] - res = f.get(cr, self, iids, k, SUPERUSER_ID, {}) - for key, val in res.items(): - if f._multi: - val = val[k] - # if val is a many2one, just write the ID - if type(val) == tuple: - val = val[0] - if val is not False: - cr.execute(update_query, (ss[1](val), key)) - - def _check_selection_field_value(self, cr, uid, field, value, context=None): - """Raise except_orm if value is not among the valid values for the selection field""" - if self._columns[field]._type == 'reference': - val_model, val_id_str = value.split(',', 1) - val_id = False - try: - val_id = long(val_id_str) - except ValueError: - pass - if not val_id: - raise except_orm(_('ValidateError'), - _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value)) - val = val_model - else: - val = value - if isinstance(self._columns[field].selection, (tuple, list)): - if val in dict(self._columns[field].selection): - return - elif val in dict(self._columns[field].selection(self, cr, uid, context=context)): - return - raise except_orm(_('ValidateError'), - _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field)) - - def _check_removed_columns(self, cr, log=False): - # iterate on the database columns to drop the NOT NULL constraints - # of fields which were required but have been removed (or will be added by another module) - columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)] - columns += MAGIC_COLUMNS - cr.execute("SELECT a.attname, a.attnotnull" - " FROM pg_class c, pg_attribute a" - " WHERE c.relname=%s" - " AND c.oid=a.attrelid" - " AND a.attisdropped=%s" - " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')" - " AND a.attname NOT IN %s", (self._table, False, tuple(columns))), - - for column in cr.dictfetchall(): - if log: - _logger.debug("column %s is in the table %s but not in the corresponding object %s", - column['attname'], self._table, self._name) - if column['attnotnull']: - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname'])) - _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint", - self._table, column['attname']) - - def _save_constraint(self, cr, constraint_name, type): - """ - Record the creation of a constraint for this model, to make it possible - to delete it later when the module is uninstalled. Type can be either - 'f' or 'u' depending on the constraint being a foreign key or not. - """ - if not self._module: - # no need to save constraints for custom models as they're not part - # of any module - return - assert type in ('f', 'u') - cr.execute(""" - SELECT 1 FROM ir_model_constraint, ir_module_module - WHERE ir_model_constraint.module=ir_module_module.id - AND ir_model_constraint.name=%s - AND ir_module_module.name=%s - """, (constraint_name, self._module)) - if not cr.rowcount: - cr.execute(""" - INSERT INTO ir_model_constraint - (name, date_init, date_update, module, model, type) - VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC', - (SELECT id FROM ir_module_module WHERE name=%s), - (SELECT id FROM ir_model WHERE model=%s), %s)""", - (constraint_name, self._module, self._name, type)) - - def _save_relation_table(self, cr, relation_table): - """ - Record the creation of a many2many for this model, to make it possible - to delete it later when the module is uninstalled. - """ - cr.execute(""" - SELECT 1 FROM ir_model_relation, ir_module_module - WHERE ir_model_relation.module=ir_module_module.id - AND ir_model_relation.name=%s - AND ir_module_module.name=%s - """, (relation_table, self._module)) - if not cr.rowcount: - cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model) - VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC', - (SELECT id FROM ir_module_module WHERE name=%s), - (SELECT id FROM ir_model WHERE model=%s))""", - (relation_table, self._module, self._name)) - - # checked version: for direct m2o starting from `self` - def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete): - assert self.is_transient() or not dest_model.is_transient(), \ - 'Many2One relationships from non-transient Model to TransientModel are forbidden' - if self.is_transient() and not dest_model.is_transient(): - # TransientModel relationships to regular Models are annoying - # usually because they could block deletion due to the FKs. - # So unless stated otherwise we default them to ondelete=cascade. - ondelete = ondelete or 'cascade' - fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null') - self._foreign_keys.add(fk_def) - _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def) - - # unchecked version: for custom cases, such as m2m relationships - def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete): - fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null') - self._foreign_keys.add(fk_def) - _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def) - - def _drop_constraint(self, cr, source_table, constraint_name): - cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name)) - - def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete): - # Find FK constraint(s) currently established for the m2o field, - # and see whether they are stale or not - cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name, - cl2.relname as foreign_table - FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, - pg_attribute as att1, pg_attribute as att2 - WHERE con.conrelid = cl1.oid - AND cl1.relname = %s - AND con.confrelid = cl2.oid - AND array_lower(con.conkey, 1) = 1 - AND con.conkey[1] = att1.attnum - AND att1.attrelid = cl1.oid - AND att1.attname = %s - AND array_lower(con.confkey, 1) = 1 - AND con.confkey[1] = att2.attnum - AND att2.attrelid = cl2.oid - AND att2.attname = %s - AND con.contype = 'f'""", (source_table, source_field, 'id')) - constraints = cr.dictfetchall() - if constraints: - if len(constraints) == 1: - # Is it the right constraint? - cons, = constraints - if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\ - or cons['foreign_table'] != dest_model._table: - # Wrong FK: drop it and recreate - _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'", - source_table, cons['constraint_name']) - self._drop_constraint(cr, source_table, cons['constraint_name']) - else: - # it's all good, nothing to do! - return - else: - # Multiple FKs found for the same field, drop them all, and re-create - for cons in constraints: - _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'", - source_table, cons['constraint_name']) - self._drop_constraint(cr, source_table, cons['constraint_name']) - - # (re-)create the FK - self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete) - - - - def _auto_init(self, cr, context=None): - """ - - Call _field_create and, unless _auto is False: - - - create the corresponding table in database for the model, - - possibly add the parent columns in database, - - possibly add the columns 'create_uid', 'create_date', 'write_uid', - 'write_date' in database if _log_access is True (the default), - - report on database columns no more existing in _columns, - - remove no more existing not null constraints, - - alter existing database columns to match _columns, - - create database tables to match _columns, - - add database indices to match _columns, - - save in self._foreign_keys a list a foreign keys to create (see - _auto_end). - - """ - self._foreign_keys = set() - raise_on_invalid_object_name(self._name) - if context is None: - context = {} - store_compute = False - todo_end = [] - update_custom_fields = context.get('update_custom_fields', False) - self._field_create(cr, context=context) - create = not self._table_exist(cr) - if self._auto: - - if create: - self._create_table(cr) - - cr.commit() - if self._parent_store: - if not self._parent_columns_exist(cr): - self._create_parent_columns(cr) - store_compute = True - - # Create the create_uid, create_date, write_uid, write_date, columns if desired. - if self._log_access: - self._add_log_columns(cr) - - self._check_removed_columns(cr, log=False) - - # iterate on the "object columns" - column_data = self._select_column_data(cr) - - for k, f in self._columns.iteritems(): - if k in MAGIC_COLUMNS: - continue - # Don't update custom (also called manual) fields - if f.manual and not update_custom_fields: - continue - - if isinstance(f, fields.one2many): - self._o2m_raise_on_missing_reference(cr, f) - - elif isinstance(f, fields.many2many): - self._m2m_raise_or_create_relation(cr, f) - - else: - res = column_data.get(k) - - # The field is not found as-is in database, try if it - # exists with an old name. - if not res and hasattr(f, 'oldname'): - res = column_data.get(f.oldname) - if res: - cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k)) - res['attname'] = k - column_data[k] = res - _schema.debug("Table '%s': renamed column '%s' to '%s'", - self._table, f.oldname, k) - - # The field already exists in database. Possibly - # change its type, rename it, drop it or change its - # constraints. - if res: - f_pg_type = res['typname'] - f_pg_size = res['size'] - f_pg_notnull = res['attnotnull'] - if isinstance(f, fields.function) and not f.store and\ - not getattr(f, 'nodrop', False): - _logger.info('column %s (%s) converted to a function, removed from table %s', - k, f.string, self._table) - cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k)) - cr.commit() - _schema.debug("Table '%s': dropped column '%s' with cascade", - self._table, k) - f_obj_type = None - else: - f_obj_type = get_pg_type(f) and get_pg_type(f)[0] - - if f_obj_type: - ok = False - casts = [ - ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)), - ('varchar', 'text', 'TEXT', ''), - ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), - ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'), - ('timestamp', 'date', 'date', '::date'), - ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), - ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]), - ] - if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size and (f.size is None or f_pg_size < f.size): - try: - with cr.savepoint(): - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" TYPE %s' % (self._table, k, pg_varchar(f.size))) - except psycopg2.NotSupportedError: - # In place alter table cannot be done because a view is depending of this field. - # Do a manual copy. This will drop the view (that will be recreated later) - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k)) - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size))) - cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size))) - cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,)) - cr.commit() - _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s", - self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited') - for c in casts: - if (f_pg_type==c[0]) and (f._type==c[1]): - if f_pg_type != f_obj_type: - ok = True - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO __temp_type_cast' % (self._table, k)) - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2])) - cr.execute(('UPDATE "%s" SET "%s"= __temp_type_cast'+c[3]) % (self._table, k)) - cr.execute('ALTER TABLE "%s" DROP COLUMN __temp_type_cast CASCADE' % (self._table,)) - cr.commit() - _schema.debug("Table '%s': column '%s' changed type from %s to %s", - self._table, k, c[0], c[1]) - break - - if f_pg_type != f_obj_type: - if not ok: - i = 0 - while True: - newname = k + '_moved' + str(i) - cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \ - "WHERE c.relname=%s " \ - "AND a.attname=%s " \ - "AND c.oid=a.attrelid ", (self._table, newname)) - if not cr.fetchone()[0]: - break - i += 1 - if f_pg_notnull: - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) - cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname)) - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) - cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,)) - _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !", - self._table, k, f_pg_type, f._type, newname) - - # if the field is required and hasn't got a NOT NULL constraint - if f.required and f_pg_notnull == 0: - # set the field to the default value if any - if k in self._defaults: - if callable(self._defaults[k]): - default = self._defaults[k](self, cr, SUPERUSER_ID, context) - else: - default = self._defaults[k] - - if default is not None: - ss = self._columns[k]._symbol_set - query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k) - cr.execute(query, (ss[1](default),)) - # add the NOT NULL constraint - cr.commit() - try: - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) - cr.commit() - _schema.debug("Table '%s': column '%s': added NOT NULL constraint", - self._table, k) - except Exception: - msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\ - "If you want to have it, you should update the records and execute manually:\n"\ - "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL" - _schema.warning(msg, self._table, k, self._table, k) - cr.commit() - elif not f.required and f_pg_notnull == 1: - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k)) - cr.commit() - _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint", - self._table, k) - # Verify index - indexname = '%s_%s_index' % (self._table, k) - cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table)) - res2 = cr.dictfetchall() - if not res2 and f.select: - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) - cr.commit() - if f._type == 'text': - # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context) - msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\ - "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\ - " because there is a length limit for indexable btree values!\n"\ - "Use a search view instead if you simply want to make the field searchable." - _schema.warning(msg, self._table, f._type, k) - if res2 and not f.select: - cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k)) - cr.commit() - msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore" - _schema.debug(msg, self._table, k, f._type) - - if isinstance(f, fields.many2one) or (isinstance(f, fields.function) and f._type == 'many2one' and f.store): - dest_model = self.pool[f._obj] - if dest_model._table != 'ir_actions': - self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete) - - # The field doesn't exist in database. Create it if necessary. - else: - if not isinstance(f, fields.function) or f.store: - # add the missing field - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1])) - cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,)) - _schema.debug("Table '%s': added column '%s' with definition=%s", - self._table, k, get_pg_type(f)[1]) - - # initialize it - if not create and k in self._defaults: - if callable(self._defaults[k]): - default = self._defaults[k](self, cr, SUPERUSER_ID, context) - else: - default = self._defaults[k] - - ss = self._columns[k]._symbol_set - query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0]) - cr.execute(query, (ss[1](default),)) - cr.commit() - _logger.debug("Table '%s': setting default value of new column %s", self._table, k) - - # remember the functions to call for the stored fields - if isinstance(f, fields.function): - order = 10 - if f.store is not True: # i.e. if f.store is a dict - order = f.store[f.store.keys()[0]][2] - todo_end.append((order, self._update_store, (f, k))) - - # and add constraints if needed - if isinstance(f, fields.many2one) or (isinstance(f, fields.function) and f._type == 'many2one' and f.store): - if f._obj not in self.pool: - raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,)) - dest_model = self.pool[f._obj] - ref = dest_model._table - # ir_actions is inherited so foreign key doesn't work on it - if ref != 'ir_actions': - self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete) - if f.select: - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k)) - if f.required: - try: - cr.commit() - cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False) - _schema.debug("Table '%s': column '%s': added a NOT NULL constraint", - self._table, k) - except Exception: - msg = "WARNING: unable to set column %s of table %s not null !\n"\ - "Try to re-run: openerp-server --update=module\n"\ - "If it doesn't work, update records and execute manually:\n"\ - "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL" - _logger.warning(msg, k, self._table, self._table, k) - cr.commit() - - else: - cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,)) - create = not bool(cr.fetchone()) - - cr.commit() # start a new transaction - - if self._auto: - self._add_sql_constraints(cr) - - if create: - self._execute_sql(cr) - - if store_compute: - self._parent_store_compute(cr) - cr.commit() - - return todo_end - - def _auto_end(self, cr, context=None): - """ Create the foreign keys recorded by _auto_init. """ - for t, k, r, d in self._foreign_keys: - cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d)) - self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f') - cr.commit() - del self._foreign_keys - - - def _table_exist(self, cr): - cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,)) - return cr.rowcount - - - def _create_table(self, cr): - cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,)) - cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,)) - _schema.debug("Table '%s': created", self._table) - - - def _parent_columns_exist(self, cr): - cr.execute("""SELECT c.relname - FROM pg_class c, pg_attribute a - WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid - """, (self._table, 'parent_left')) - return cr.rowcount - - - def _create_parent_columns(self, cr): - cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,)) - cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,)) - if 'parent_left' not in self._columns: - _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)', - self._table) - _schema.debug("Table '%s': added column '%s' with definition=%s", - self._table, 'parent_left', 'INTEGER') - elif not self._columns['parent_left'].select: - _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)', - self._table) - if 'parent_right' not in self._columns: - _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)', - self._table) - _schema.debug("Table '%s': added column '%s' with definition=%s", - self._table, 'parent_right', 'INTEGER') - elif not self._columns['parent_right'].select: - _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)', - self._table) - if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'): - _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'", - self._parent_name, self._name) - - cr.commit() - - - def _add_log_columns(self, cr): - for field, field_def in LOG_ACCESS_COLUMNS.iteritems(): - cr.execute(""" - SELECT c.relname - FROM pg_class c, pg_attribute a - WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid - """, (self._table, field)) - if not cr.rowcount: - cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def)) - cr.commit() - _schema.debug("Table '%s': added column '%s' with definition=%s", - self._table, field, field_def) - - - def _select_column_data(self, cr): - # attlen is the number of bytes necessary to represent the type when - # the type has a fixed size. If the type has a varying size attlen is - # -1 and atttypmod is the size limit + 4, or -1 if there is no limit. - cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \ - "FROM pg_class c,pg_attribute a,pg_type t " \ - "WHERE c.relname=%s " \ - "AND c.oid=a.attrelid " \ - "AND a.atttypid=t.oid", (self._table,)) - return dict(map(lambda x: (x['attname'], x),cr.dictfetchall())) - - - def _o2m_raise_on_missing_reference(self, cr, f): - # TODO this check should be a method on fields.one2many. - if f._obj in self.pool: - other = self.pool[f._obj] - # TODO the condition could use fields_get_keys(). - if f._fields_id not in other._columns.keys(): - if f._fields_id not in other._inherit_fields.keys(): - raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,)) - - def _m2m_raise_or_create_relation(self, cr, f): - m2m_tbl, col1, col2 = f._sql_names(self) - self._save_relation_table(cr, m2m_tbl) - cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,)) - if not cr.dictfetchall(): - if f._obj not in self.pool: - raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,)) - dest_model = self.pool[f._obj] - ref = dest_model._table - cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2)) - # create foreign key references with ondelete=cascade, unless the targets are SQL views - cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,)) - if not cr.fetchall(): - self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade') - cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,)) - if not cr.fetchall(): - self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade') - - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1)) - cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2)) - cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref)) - cr.commit() - _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref) - - - def _add_sql_constraints(self, cr): - """ - - Modify this model's database table constraints so they match the one in - _sql_constraints. - - """ - def unify_cons_text(txt): - return txt.lower().replace(', ',',').replace(' (','(') - - for (key, con, _) in self._sql_constraints: - conname = '%s_%s' % (self._table, key) - - self._save_constraint(cr, conname, 'u') - cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,)) - existing_constraints = cr.dictfetchall() - sql_actions = { - 'drop': { - 'execute': False, - 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ), - 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % ( - self._table, conname, con), - 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con), - 'order': 1, - }, - 'add': { - 'execute': False, - 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,), - 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con), - 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % ( - self._table, con), - 'order': 2, - }, - } - - if not existing_constraints: - # constraint does not exists: - sql_actions['add']['execute'] = True - sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], ) - elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]: - # constraint exists but its definition has changed: - sql_actions['drop']['execute'] = True - sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), ) - sql_actions['add']['execute'] = True - sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], ) - - # we need to add the constraint: - sql_actions = [item for item in sql_actions.values()] - sql_actions.sort(key=lambda x: x['order']) - for sql_action in [action for action in sql_actions if action['execute']]: - try: - cr.execute(sql_action['query']) - cr.commit() - _schema.debug(sql_action['msg_ok']) - except: - _schema.warning(sql_action['msg_err']) - cr.rollback() - - - def _execute_sql(self, cr): - """ Execute the SQL code from the _sql attribute (if any).""" - if hasattr(self, "_sql"): - for line in self._sql.split(';'): - line2 = line.replace('\n', '').strip() - if line2: - cr.execute(line2) - cr.commit() - - # - # Update objects that uses this one to update their _inherits fields - # - - def _inherits_reload_src(self): - """ Recompute the _inherit_fields mapping on each _inherits'd child model.""" - for obj in self.pool.models.values(): - if self._name in obj._inherits: - obj._inherits_reload() - - - def _inherits_reload(self): - """ Recompute the _inherit_fields mapping. - - This will also call itself on each inherits'd child model. - - """ - res = {} - for table in self._inherits: - other = self.pool[table] - for col in other._columns.keys(): - res[col] = (table, self._inherits[table], other._columns[col], table) - for col in other._inherit_fields.keys(): - res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3]) - self._inherit_fields = res - self._all_columns = self._get_column_infos() - self._inherits_reload_src() - - - def _get_column_infos(self): - """Returns a dict mapping all fields names (direct fields and - inherited field via _inherits) to a ``column_info`` struct - giving detailed columns """ - result = {} - for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems(): - result[k] = fields.column_info(k, col, parent, m2o, original_parent) - for k, col in self._columns.iteritems(): - result[k] = fields.column_info(k, col) - return result - - - def _inherits_check(self): - for table, field_name in self._inherits.items(): - if field_name not in self._columns: - _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name) - self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table, - required=True, ondelete="cascade") - elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"): - _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name) - self._columns[field_name].required = True - self._columns[field_name].ondelete = "cascade" - - - def fields_get(self, cr, user, allfields=None, context=None, write_access=True): - """ Return the definition of each field. - - The returned value is a dictionary (indiced by field name) of - dictionaries. The _inherits'd fields are included. The string, help, - and selection (if present) attributes are translated. - - :param cr: database cursor - :param user: current user id - :param allfields: list of fields - :param context: context arguments, like lang, time zone - :return: dictionary of field dictionaries, each one describing a field of the business object - :raise AccessError: * if user has no create/write rights on the requested object - - """ - if context is None: - context = {} - - write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \ - or self.check_access_rights(cr, user, 'create', raise_exception=False) - - res = {} - - translation_obj = self.pool.get('ir.translation') - for parent in self._inherits: - res.update(self.pool[parent].fields_get(cr, user, allfields, context)) - - for f, field in self._columns.iteritems(): - if (allfields and f not in allfields) or \ - (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)): - continue - - res[f] = fields.field_to_dict(self, cr, user, field, context=context) - - if not write_access: - res[f]['readonly'] = True - res[f]['states'] = {} - - if 'lang' in context: - if 'string' in res[f]: - res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang']) - if res_trans: - res[f]['string'] = res_trans - if 'help' in res[f]: - help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang']) - if help_trans: - res[f]['help'] = help_trans - - return res - - def get_empty_list_help(self, cr, user, help, context=None): - """ Generic method giving the help message displayed when having - no result to display in a list or kanban view. By default it returns - the help given in parameter that is generally the help message - defined in the action. - """ - return help - - def check_field_access_rights(self, cr, user, operation, fields, context=None): - """ - Check the user access rights on the given fields. This raises Access - Denied if the user does not have the rights. Otherwise it returns the - fields (as is if the fields is not falsy, or the readable/writable - fields if fields is falsy). - """ - def p(field_name): - """Predicate to test if the user has access to the given field name.""" - # Ignore requested field if it doesn't exist. This is ugly but - # it seems to happen at least with 'name_alias' on res.partner. - if field_name not in self._all_columns: - return True - field = self._all_columns[field_name].column - if user != SUPERUSER_ID and field.groups: - return self.user_has_groups(cr, user, groups=field.groups, context=context) - else: - return True - if not fields: - fields = filter(p, self._all_columns.keys()) - else: - filtered_fields = filter(lambda a: not p(a), fields) - if filtered_fields: - _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', operation, user, self._name, ', '.join(filtered_fields)) - raise except_orm( - _('Access Denied'), - _('The requested operation cannot be completed due to security restrictions. ' - 'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \ - (self._description, operation)) - return fields - - def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'): - """ Read records with given ids with the given fields - - :param cr: database cursor - :param user: current user id - :param ids: id or list of the ids of the records to read - :param fields: optional list of field names to return (default: all fields would be returned) - :type fields: list (example ['field_name_1', ...]) - :param context: optional context dictionary - it may contains keys for specifying certain options - like ``context_lang``, ``context_tz`` to alter the results of the call. - A special ``bin_size`` boolean flag may also be passed in the context to request the - value of all fields.binary columns to be returned as the size of the binary instead of its - contents. This can also be selectively overriden by passing a field-specific flag - in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field. - Note: The ``bin_size_XXX`` form is new in OpenERP v6.0. - :return: list of dictionaries((dictionary per record asked)) with requested field values - :rtype: [{‘name_of_the_field’: value, ...}, ...] - :raise AccessError: * if user has no read rights on the requested object - * if user tries to bypass access rules for read on the requested object - - """ - - self.check_access_rights(cr, user, 'read') - fields = self.check_field_access_rights(cr, user, 'read', fields) - if isinstance(ids, (int, long)): - select = [ids] - else: - select = ids - select = map(lambda x: isinstance(x, dict) and x['id'] or x, select) - result = self._read_flat(cr, user, select, fields, context, load) - - if isinstance(ids, (int, long)): - return result and result[0] or False - return result - - def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'): - if not context: - context = {} - if not ids: - return [] - if fields_to_read is None: - fields_to_read = self._columns.keys() - else: - fields_to_read = list(set(fields_to_read)) - - # all inherited fields + all non inherited fields for which the attribute whose name is in load is True - fields_pre = [f for f in fields_to_read if - f == self.CONCURRENCY_CHECK_FIELD - or (f in self._columns and getattr(self._columns[f], '_classic_write')) - ] + self._inherits.values() - - res = [] - if len(fields_pre): - def convert_field(f): - f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1 - if f in ('create_date', 'write_date'): - return "date_trunc('second', %s) as %s" % (f_qual, f) - if f == self.CONCURRENCY_CHECK_FIELD: - if self._log_access: - return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,) - return "(now() at time zone 'UTC')::timestamp AS %s" % (f,) - if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False): - return 'length(%s) as "%s"' % (f_qual, f) - return f_qual - - # Construct a clause for the security rules. - # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses, - # or will at least contain self._table. - rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context) - - fields_pre2 = map(convert_field, fields_pre) - order_by = self._parent_order or self._order - select_fields = ','.join(fields_pre2 + ['%s.id' % self._table]) - query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table) - if rule_clause: - query += " AND " + (' OR '.join(rule_clause)) - query += " ORDER BY " + order_by - for sub_ids in cr.split_for_in_conditions(ids): - cr.execute(query, [tuple(sub_ids)] + rule_params) - results = cr.dictfetchall() - result_ids = [x['id'] for x in results] - self._check_record_rules_result_count(cr, user, sub_ids, result_ids, 'read', context=context) - res.extend(results) - else: - self.check_access_rule(cr, user, ids, 'read', context=context) - res = map(lambda x: {'id': x}, ids) - - if context.get('lang'): - for f in fields_pre: - if f == self.CONCURRENCY_CHECK_FIELD: - continue - if self._columns[f].translate: - ids = [x['id'] for x in res] - #TODO: optimize out of this loop - res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids) - for r in res: - r[f] = res_trans.get(r['id'], False) or r[f] - - for table in self._inherits: - col = self._inherits[table] - cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()] - if not cols: - continue - res2 = self.pool[table].read(cr, user, [x[col] for x in res], cols, context, load) - - res3 = {} - for r in res2: - res3[r['id']] = r - del r['id'] - - for record in res: - if not record[col]: # if the record is deleted from _inherits table? - continue - record.update(res3[record[col]]) - if col not in fields_to_read: - del record[col] - - # all fields which need to be post-processed by a simple function (symbol_get) - fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read) - if fields_post: - for r in res: - for f in fields_post: - r[f] = self._columns[f]._symbol_get(r[f]) - ids = [x['id'] for x in res] - - # all non inherited fields for which the attribute whose name is in load is False - fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read) - - # Compute POST fields - todo = {} - for f in fields_post: - todo.setdefault(self._columns[f]._multi, []) - todo[self._columns[f]._multi].append(f) - for key, val in todo.items(): - if key: - res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res) - assert res2 is not None, \ - 'The function field "%s" on the "%s" model returned None\n' \ - '(a dictionary was expected).' % (val[0], self._name) - for pos in val: - for record in res: - if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6 - multi_fields = res2.get(record['id'],{}) - if multi_fields: - record[pos] = multi_fields.get(pos,[]) - else: - for f in val: - res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res) - for record in res: - if res2: - record[f] = res2[record['id']] - else: - record[f] = [] - - # Warn about deprecated fields now that fields_pre and fields_post are computed - # Explicitly use list() because we may receive tuples - for f in list(fields_pre) + list(fields_post): - field_column = self._all_columns.get(f) and self._all_columns.get(f).column - if field_column and field_column.deprecated: - _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated) - - readonly = None - for vals in res: - for field in vals.copy(): - fobj = None - if field in self._columns: - fobj = self._columns[field] - - if fobj: - groups = fobj.read - if groups: - edit = False - for group in groups: - module = group.split(".")[0] - grp = group.split(".")[1] - cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \ - (grp, module, 'res.groups', user)) - readonly = cr.fetchall() - if readonly[0][0] >= 1: - edit = True - break - elif readonly[0][0] == 0: - edit = False - else: - edit = False - - if not edit: - if type(vals[field]) == type([]): - vals[field] = [] - elif type(vals[field]) == type(0.0): - vals[field] = 0 - elif type(vals[field]) == type(''): - vals[field] = '=No Permission=' - else: - vals[field] = False - - if vals[field] is None: - vals[field] = False - - return res - - # TODO check READ access - def perm_read(self, cr, user, ids, context=None, details=True): - """ - Returns some metadata about the given records. - - :param details: if True, \*_uid fields are replaced with the name of the user - :return: list of ownership dictionaries for each requested record - :rtype: list of dictionaries with the following keys: - - * id: object id - * create_uid: user who created the record - * create_date: date when the record was created - * write_uid: last user who changed the record - * write_date: date of the last change to the record - * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name`` - """ - if not context: - context = {} - if not ids: - return [] - fields = '' - uniq = isinstance(ids, (int, long)) - if uniq: - ids = [ids] - fields = ['id'] - if self._log_access: - fields += ['create_uid', 'create_date', 'write_uid', 'write_date'] - quoted_table = '"%s"' % self._table - fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields) - query = '''SELECT %s, __imd.module, __imd.name - FROM %s LEFT JOIN ir_model_data __imd - ON (__imd.model = %%s and __imd.res_id = %s.id) - WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table) - cr.execute(query, (self._name, tuple(ids))) - res = cr.dictfetchall() - for r in res: - for key in r: - r[key] = r[key] or False - if details and key in ('write_uid', 'create_uid') and r[key]: - try: - r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0] - except Exception: - pass # Leave the numeric uid there - r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False - del r['name'], r['module'] - if uniq: - return res[ids[0]] - return res - - def _check_concurrency(self, cr, ids, context): - if not context: - return - if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access): - return - check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)" - for sub_ids in cr.split_for_in_conditions(ids): - ids_to_check = [] - for id in sub_ids: - id_ref = "%s,%s" % (self._name, id) - update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None) - if update_date: - ids_to_check.extend([id, update_date]) - if not ids_to_check: - continue - cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) - res = cr.fetchone() - if res: - # mention the first one only to keep the error message readable - raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0])) - - def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None): - """Verify the returned rows after applying record rules matches - the length of `ids`, and raise an appropriate exception if it does not. - """ - ids, result_ids = set(ids), set(result_ids) - missing_ids = ids - result_ids - if missing_ids: - # Attempt to distinguish record rule restriction vs deleted records, - # to provide a more specific error message - check if the missinf - cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),)) - forbidden_ids = [x[0] for x in cr.fetchall()] - if forbidden_ids: - # the missing ids are (at least partially) hidden by access rules - if uid == SUPERUSER_ID: - return - _logger.warning('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, forbidden_ids, uid, self._name) - raise except_orm(_('Access Denied'), - _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \ - (self._description, operation)) - else: - # If we get here, the missing_ids are not in the database - if operation in ('read','unlink'): - # No need to warn about deleting an already deleted record. - # And no error when reading a record that was deleted, to prevent spurious - # errors for non-transactional search/read sequences coming from clients - return - _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name) - raise except_orm(_('Missing document(s)'), - _('One of the documents you are trying to access has been deleted, please try again after refreshing.')) - - - def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose. - """Verifies that the operation given by ``operation`` is allowed for the user - according to the access rights.""" - return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception) - - def check_access_rule(self, cr, uid, ids, operation, context=None): - """Verifies that the operation given by ``operation`` is allowed for the user - according to ir.rules. - - :param operation: one of ``write``, ``unlink`` - :raise except_orm: * if current ir.rules do not permit this operation. - :return: None if the operation is allowed - """ - if uid == SUPERUSER_ID: - return - - if self.is_transient(): - # Only one single implicit access rule for transient models: owner only! - # This is ok to hardcode because we assert that TransientModels always - # have log_access enabled so that the create_uid column is always there. - # And even with _inherits, these fields are always present in the local - # table too, so no need for JOINs. - cr.execute("""SELECT distinct create_uid - FROM %s - WHERE id IN %%s""" % self._table, (tuple(ids),)) - uids = [x[0] for x in cr.fetchall()] - if len(uids) != 1 or uids[0] != uid: - raise except_orm(_('Access Denied'), - _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,)) - else: - where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context) - if where_clause: - where_clause = ' and ' + ' and '.join(where_clause) - for sub_ids in cr.split_for_in_conditions(ids): - cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) + - ' WHERE ' + self._table + '.id IN %s' + where_clause, - [sub_ids] + where_params) - returned_ids = [x['id'] for x in cr.dictfetchall()] - self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context) - - def create_workflow(self, cr, uid, ids, context=None): - """Create a workflow instance for each given record IDs.""" - from openerp import workflow - for res_id in ids: - workflow.trg_create(uid, self._name, res_id, cr) - return True - - def delete_workflow(self, cr, uid, ids, context=None): - """Delete the workflow instances bound to the given record IDs.""" - from openerp import workflow - for res_id in ids: - workflow.trg_delete(uid, self._name, res_id, cr) - return True - - def step_workflow(self, cr, uid, ids, context=None): - """Reevaluate the workflow instances of the given record IDs.""" - from openerp import workflow - for res_id in ids: - workflow.trg_write(uid, self._name, res_id, cr) - return True - - def signal_workflow(self, cr, uid, ids, signal, context=None): - """Send given workflow signal and return a dict mapping ids to workflow results""" - from openerp import workflow - result = {} - for res_id in ids: - result[res_id] = workflow.trg_validate(uid, self._name, res_id, signal, cr) - return result - - def redirect_workflow(self, cr, uid, old_new_ids, context=None): - """ Rebind the workflow instance bound to the given 'old' record IDs to - the given 'new' IDs. (``old_new_ids`` is a list of pairs ``(old, new)``. - """ - from openerp import workflow - for old_id, new_id in old_new_ids: - workflow.trg_redirect(uid, self._name, old_id, new_id, cr) - return True - - def unlink(self, cr, uid, ids, context=None): - """ - Delete records with given ids - - :param cr: database cursor - :param uid: current user id - :param ids: id or list of ids - :param context: (optional) context arguments, like lang, time zone - :return: True - :raise AccessError: * if user has no unlink rights on the requested object - * if user tries to bypass access rules for unlink on the requested object - :raise UserError: if the record is default property for other records - - """ - if not ids: - return True - if isinstance(ids, (int, long)): - ids = [ids] - - result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context) - - self._check_concurrency(cr, ids, context) - - self.check_access_rights(cr, uid, 'unlink') - - ir_property = self.pool.get('ir.property') - - # Check if the records are used as default properties. - domain = [('res_id', '=', False), - ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]), - ] - if ir_property.search(cr, uid, domain, context=context): - raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property')) - - # Delete the records' properties. - property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context) - ir_property.unlink(cr, uid, property_ids, context=context) - - self.delete_workflow(cr, uid, ids, context=context) - - self.check_access_rule(cr, uid, ids, 'unlink', context=context) - pool_model_data = self.pool.get('ir.model.data') - ir_values_obj = self.pool.get('ir.values') - for sub_ids in cr.split_for_in_conditions(ids): - cr.execute('delete from ' + self._table + ' ' \ - 'where id IN %s', (sub_ids,)) - - # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file, - # as these are not connected with real database foreign keys, and would be dangling references. - # Note: following steps performed as admin to avoid access rights restrictions, and with no context - # to avoid possible side-effects during admin calls. - # Step 1. Calling unlink of ir_model_data only for the affected IDS - reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)]) - # Step 2. Marching towards the real deletion of referenced records - if reference_ids: - pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids) - - # For the same reason, removing the record relevant to ir_values - ir_value_ids = ir_values_obj.search(cr, uid, - ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)], - context=context) - if ir_value_ids: - ir_values_obj.unlink(cr, uid, ir_value_ids, context=context) - - for order, obj_name, store_ids, fields in result_store: - if obj_name == self._name: - effective_store_ids = list(set(store_ids) - set(ids)) - else: - effective_store_ids = store_ids - if effective_store_ids: - obj = self.pool[obj_name] - cr.execute('select id from '+obj._table+' where id IN %s', (tuple(effective_store_ids),)) - rids = map(lambda x: x[0], cr.fetchall()) - if rids: - obj._store_set_values(cr, uid, rids, fields, context) - - return True - - # - # TODO: Validate - # - def write(self, cr, user, ids, vals, context=None): - """ - Update records with given ids with the given field values - - :param cr: database cursor - :param user: current user id - :type user: integer - :param ids: object id or list of object ids to update according to **vals** - :param vals: field values to update, e.g {'field_name': new_field_value, ...} - :type vals: dictionary - :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...} - :type context: dictionary - :return: True - :raise AccessError: * if user has no write rights on the requested object - * if user tries to bypass access rules for write on the requested object - :raise ValidateError: if user tries to enter invalid value for a field that is not in selection - :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent) - - **Note**: The type of field values to pass in ``vals`` for relationship fields is specific: - - + For a many2many field, a list of tuples is expected. - Here is the list of tuple that are accepted, with the corresponding semantics :: - - (0, 0, { values }) link to a new record that needs to be created with the given values dictionary - (1, ID, { values }) update the linked record with id = ID (write *values* on it) - (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well) - (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself) - (4, ID) link to existing record with id = ID (adds a relationship) - (5) unlink all (like using (3,ID) for all linked records) - (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs) - - Example: - [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4] - - + For a one2many field, a lits of tuples is expected. - Here is the list of tuple that are accepted, with the corresponding semantics :: - - (0, 0, { values }) link to a new record that needs to be created with the given values dictionary - (1, ID, { values }) update the linked record with id = ID (write *values* on it) - (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well) - - Example: - [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})] - - + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link. - + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``) - - """ - readonly = None - self.check_field_access_rights(cr, user, 'write', vals.keys()) - for field in vals.copy(): - fobj = None - if field in self._columns: - fobj = self._columns[field] - elif field in self._inherit_fields: - fobj = self._inherit_fields[field][2] - if not fobj: - continue - groups = fobj.write - - if groups: - edit = False - for group in groups: - module = group.split(".")[0] - grp = group.split(".")[1] - cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \ - (grp, module, 'res.groups', user)) - readonly = cr.fetchall() - if readonly[0][0] >= 1: - edit = True - break - - if not edit: - vals.pop(field) - - if not context: - context = {} - if not ids: - return True - if isinstance(ids, (int, long)): - ids = [ids] - - self._check_concurrency(cr, ids, context) - self.check_access_rights(cr, user, 'write') - - result = self._store_get_values(cr, user, ids, vals.keys(), context) or [] - - # No direct update of parent_left/right - vals.pop('parent_left', None) - vals.pop('parent_right', None) - - parents_changed = [] - parent_order = self._parent_order or self._order - if self._parent_store and (self._parent_name in vals) and not context.get('defer_parent_store_computation'): - # The parent_left/right computation may take up to - # 5 seconds. No need to recompute the values if the - # parent is the same. - # Note: to respect parent_order, nodes must be processed in - # order, so ``parents_changed`` must be ordered properly. - parent_val = vals[self._parent_name] - if parent_val: - query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \ - (self._table, self._parent_name, self._parent_name, parent_order) - cr.execute(query, (tuple(ids), parent_val)) - else: - query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \ - (self._table, self._parent_name, parent_order) - cr.execute(query, (tuple(ids),)) - parents_changed = map(operator.itemgetter(0), cr.fetchall()) - - upd0 = [] - upd1 = [] - upd_todo = [] - updend = [] - direct = [] - totranslate = context.get('lang', False) and (context['lang'] != 'en_US') - for field in vals: - field_column = self._all_columns.get(field) and self._all_columns.get(field).column - if field_column and field_column.deprecated: - _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated) - if field in self._columns: - if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')): - if (not totranslate) or not self._columns[field].translate: - upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0]) - upd1.append(self._columns[field]._symbol_set[1](vals[field])) - direct.append(field) - else: - upd_todo.append(field) - else: - updend.append(field) - if field in self._columns \ - and hasattr(self._columns[field], 'selection') \ - and vals[field]: - self._check_selection_field_value(cr, user, field, vals[field], context=context) - - if self._log_access: - upd0.append('write_uid=%s') - upd0.append("write_date=(now() at time zone 'UTC')") - upd1.append(user) - - if len(upd0): - self.check_access_rule(cr, user, ids, 'write', context=context) - for sub_ids in cr.split_for_in_conditions(ids): - cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \ - 'where id IN %s', upd1 + [sub_ids]) - if cr.rowcount != len(sub_ids): - raise except_orm(_('AccessError'), - _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description) - - if totranslate: - # TODO: optimize - for f in direct: - if self._columns[f].translate: - src_trans = self.pool[self._name].read(cr, user, ids, [f])[0][f] - if not src_trans: - src_trans = vals[f] - # Inserting value to DB - context_wo_lang = dict(context, lang=None) - self.write(cr, user, ids, {f: vals[f]}, context=context_wo_lang) - self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans) - - - # call the 'set' method of fields which are not classic_write - upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority) - - # default element in context must be removed when call a one2many or many2many - rel_context = context.copy() - for c in context.items(): - if c[0].startswith('default_'): - del rel_context[c[0]] - - for field in upd_todo: - for id in ids: - result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or [] - - unknown_fields = updend[:] - for table in self._inherits: - col = self._inherits[table] - nids = [] - for sub_ids in cr.split_for_in_conditions(ids): - cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \ - 'where id IN %s', (sub_ids,)) - nids.extend([x[0] for x in cr.fetchall()]) - - v = {} - for val in updend: - if self._inherit_fields[val][0] == table: - v[val] = vals[val] - unknown_fields.remove(val) - if v: - self.pool[table].write(cr, user, nids, v, context) - - if unknown_fields: - _logger.warning( - 'No such field(s) in model %s: %s.', - self._name, ', '.join(unknown_fields)) - self._validate(cr, user, ids, context) - - # TODO: use _order to set dest at the right position and not first node of parent - # We can't defer parent_store computation because the stored function - # fields that are computer may refer (directly or indirectly) to - # parent_left/right (via a child_of domain) - if parents_changed: - if self.pool._init: - self.pool._init_parent[self._name] = True - else: - order = self._parent_order or self._order - parent_val = vals[self._parent_name] - if parent_val: - clause, params = '%s=%%s' % (self._parent_name,), (parent_val,) - else: - clause, params = '%s IS NULL' % (self._parent_name,), () - - for id in parents_changed: - cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,)) - pleft, pright = cr.fetchone() - distance = pright - pleft + 1 - - # Positions of current siblings, to locate proper insertion point; - # this can _not_ be fetched outside the loop, as it needs to be refreshed - # after each update, in case several nodes are sequentially inserted one - # next to the other (i.e computed incrementally) - cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params) - parents = cr.fetchall() - - # Find Position of the element - position = None - for (parent_pright, parent_id) in parents: - if parent_id == id: - break - position = parent_pright and parent_pright + 1 or 1 - - # It's the first node of the parent - if not position: - if not parent_val: - position = 1 - else: - cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,)) - position = cr.fetchone()[0] + 1 - - if pleft < position <= pright: - raise except_orm(_('UserError'), _('Recursivity Detected.')) - - if pleft < position: - cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position)) - cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position)) - cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright)) - else: - cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position)) - cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position)) - cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance)) - - result += self._store_get_values(cr, user, ids, vals.keys(), context) - result.sort() - - done = {} - for order, model_name, ids_to_update, fields_to_recompute in result: - key = (model_name, tuple(fields_to_recompute)) - done.setdefault(key, {}) - # avoid to do several times the same computation - todo = [] - for id in ids_to_update: - if id not in done[key]: - done[key][id] = True - todo.append(id) - self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context) - - self.step_workflow(cr, user, ids, context=context) - return True - - # - # TODO: Should set perm to user.xxx - # - def create(self, cr, user, vals, context=None): - """ - Create a new record for the model. - - The values for the new record are initialized using the ``vals`` - argument, and if necessary the result of ``default_get()``. - - :param cr: database cursor - :param user: current user id - :type user: integer - :param vals: field values for new record, e.g {'field_name': field_value, ...} - :type vals: dictionary - :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...} - :type context: dictionary - :return: id of new record created - :raise AccessError: * if user has no create rights on the requested object - * if user tries to bypass access rules for create on the requested object - :raise ValidateError: if user tries to enter invalid value for a field that is not in selection - :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent) - - **Note**: The type of field values to pass in ``vals`` for relationship fields is specific. - Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how - to specify them. - - """ - if not context: - context = {} - - if self.is_transient(): - self._transient_vacuum(cr, user) - - self.check_access_rights(cr, user, 'create') - - if self._log_access: - for f in LOG_ACCESS_COLUMNS: - if vals.pop(f, None) is not None: - _logger.warning( - 'Field `%s` is not allowed when creating the model `%s`.', - f, self._name) - vals = self._add_missing_default_values(cr, user, vals, context) - - tocreate = {} - for v in self._inherits: - if self._inherits[v] not in vals: - tocreate[v] = {} - else: - tocreate[v] = {'id': vals[self._inherits[v]]} - - columns = [ - # columns will contain a list of field defined as a tuple - # tuple(field_name, format_string, field_value) - # the tuple will be used by the string formatting for the INSERT - # statement. - ('id', "nextval('%s')" % self._sequence), - ] - - upd_todo = [] - unknown_fields = [] - for v in vals.keys(): - if v in self._inherit_fields and v not in self._columns: - (table, col, col_detail, original_parent) = self._inherit_fields[v] - tocreate[table][v] = vals[v] - del vals[v] - else: - if (v not in self._inherit_fields) and (v not in self._columns): - del vals[v] - unknown_fields.append(v) - if unknown_fields: - _logger.warning( - 'No such field(s) in model %s: %s.', - self._name, ', '.join(unknown_fields)) - - if not self._sequence: - raise except_orm( - _('UserError'), - _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.') - ) - - for table in tocreate: - if self._inherits[table] in vals: - del vals[self._inherits[table]] - - record_id = tocreate[table].pop('id', None) - - # When linking/creating parent records, force context without 'no_store_function' key that - # defers stored functions computing, as these won't be computed in batch at the end of create(). - parent_context = dict(context) - parent_context.pop('no_store_function', None) - - if record_id is None or not record_id: - record_id = self.pool[table].create(cr, user, tocreate[table], context=parent_context) - else: - self.pool[table].write(cr, user, [record_id], tocreate[table], context=parent_context) - - columns.append((self._inherits[table], '%s', record_id)) - - #Start : Set bool fields to be False if they are not touched(to make search more powerful) - bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean'] - - for bool_field in bool_fields: - if bool_field not in vals: - vals[bool_field] = False - #End - for field in vals.copy(): - fobj = None - if field in self._columns: - fobj = self._columns[field] - else: - fobj = self._inherit_fields[field][2] - if not fobj: - continue - groups = fobj.write - if groups: - edit = False - for group in groups: - module = group.split(".")[0] - grp = group.split(".")[1] - cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \ - (grp, module, 'res.groups', user)) - readonly = cr.fetchall() - if readonly[0][0] >= 1: - edit = True - break - elif readonly[0][0] == 0: - edit = False - else: - edit = False - - if not edit: - vals.pop(field) - for field in vals: - current_field = self._columns[field] - if current_field._classic_write: - columns.append((field, '%s', current_field._symbol_set[1](vals[field]))) - - #for the function fields that receive a value, we set them directly in the database - #(they may be required), but we also need to trigger the _fct_inv() - if (hasattr(current_field, '_fnct_inv')) and not isinstance(current_field, fields.related): - #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at - #one week of the release candidate. It seems the only good way to handle correctly this is to add an - #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise - #if, for example, the related has a default value (for usability) then the fct_inv is called and it - #may raise some access rights error. Changing this is a too big change for now, and is thus postponed - #after the release but, definitively, the behavior shouldn't be different for related and function - #fields. - upd_todo.append(field) - else: - #TODO: this `if´ statement should be removed because there is no good reason to special case the fields - #related. See the above TODO comment for further explanations. - if not isinstance(current_field, fields.related): - upd_todo.append(field) - if field in self._columns \ - and hasattr(current_field, 'selection') \ - and vals[field]: - self._check_selection_field_value(cr, user, field, vals[field], context=context) - if self._log_access: - columns.append(('create_uid', '%s', user)) - columns.append(('write_uid', '%s', user)) - columns.append(('create_date', "(now() at time zone 'UTC')")) - columns.append(('write_date', "(now() at time zone 'UTC')")) - - # the list of tuples used in this formatting corresponds to - # tuple(field_name, format, value) - # In some case, for example (id, create_date, write_date) we does not - # need to read the third value of the tuple, because the real value is - # encoded in the second value (the format). - cr.execute( - """INSERT INTO "%s" (%s) VALUES(%s) RETURNING id""" % ( - self._table, - ', '.join('"%s"' % f[0] for f in columns), - ', '.join(f[1] for f in columns) - ), - tuple([f[2] for f in columns if len(f) > 2]) - ) - - id_new, = cr.fetchone() - upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority) - - if self._parent_store and not context.get('defer_parent_store_computation'): - if self.pool._init: - self.pool._init_parent[self._name] = True - else: - parent = vals.get(self._parent_name, False) - if parent: - cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,)) - pleft_old = None - result_p = cr.fetchall() - for (pleft,) in result_p: - if not pleft: - break - pleft_old = pleft - if not pleft_old: - cr.execute('select parent_left from '+self._table+' where id=%s', (parent,)) - pleft_old = cr.fetchone()[0] - pleft = pleft_old - else: - cr.execute('select max(parent_right) from '+self._table) - pleft = cr.fetchone()[0] or 0 - cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,)) - cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,)) - cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new)) - - # default element in context must be remove when call a one2many or many2many - rel_context = context.copy() - for c in context.items(): - if c[0].startswith('default_'): - del rel_context[c[0]] - - result = [] - for field in upd_todo: - result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or [] - self._validate(cr, user, [id_new], context) - - if not context.get('no_store_function', False): - result += self._store_get_values(cr, user, [id_new], - list(set(vals.keys() + self._inherits.values())), - context) - result.sort() - done = [] - for order, model_name, ids, fields2 in result: - if not (model_name, ids, fields2) in done: - self.pool[model_name]._store_set_values(cr, user, ids, fields2, context) - done.append((model_name, ids, fields2)) - - if self._log_create and not (context and context.get('no_store_function', False)): - message = self._description + \ - " '" + \ - self.name_get(cr, user, [id_new], context=context)[0][1] + \ - "' " + _("created.") - self.log(cr, user, id_new, message, True, context=context) - self.check_access_rule(cr, user, [id_new], 'create', context=context) - self.create_workflow(cr, user, [id_new], context=context) - return id_new - - def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None): - """Fetch records as objects allowing to use dot notation to browse fields and relations - - :param cr: database cursor - :param uid: current user id - :param select: id or list of ids. - :param context: context arguments, like lang, time zone - :rtype: object or list of objects requested - - """ - self._list_class = list_class or browse_record_list - cache = {} - # need to accepts ints and longs because ids coming from a method - # launched by button in the interface have a type long... - if isinstance(select, (int, long)): - return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) - elif isinstance(select, list): - return self._list_class((browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select), context=context) - else: - return browse_null() - - def _store_get_values(self, cr, uid, ids, fields, context): - """Returns an ordered list of fields.function to call due to - an update operation on ``fields`` of records with ``ids``, - obtained by calling the 'store' triggers of these fields, - as setup by their 'store' attribute. - - :return: [(priority, model_name, [record_ids,], [function_fields,])] - """ - if fields is None: fields = [] - stored_functions = self.pool._store_function.get(self._name, []) - - # use indexed names for the details of the stored_functions: - model_name_, func_field_to_compute_, target_ids_func_, trigger_fields_, priority_ = range(5) - - # only keep store triggers that should be triggered for the ``fields`` - # being written to. - triggers_to_compute = [f for f in stored_functions \ - if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))] - - to_compute_map = {} - target_id_results = {} - for store_trigger in triggers_to_compute: - target_func_id_ = id(store_trigger[target_ids_func_]) - if not target_func_id_ in target_id_results: - # use admin user for accessing objects having rules defined on store fields - target_id_results[target_func_id_] = [i for i in store_trigger[target_ids_func_](self, cr, SUPERUSER_ID, ids, context) if i] - target_ids = target_id_results[target_func_id_] - - # the compound key must consider the priority and model name - key = (store_trigger[priority_], store_trigger[model_name_]) - for target_id in target_ids: - to_compute_map.setdefault(key, {}).setdefault(target_id,set()).add(tuple(store_trigger)) - - # Here to_compute_map looks like: - # { (10, 'model_a') : { target_id1: [ (trigger_1_tuple, trigger_2_tuple) ], ... } - # (20, 'model_a') : { target_id2: [ (trigger_3_tuple, trigger_4_tuple) ], ... } - # (99, 'model_a') : { target_id1: [ (trigger_5_tuple, trigger_6_tuple) ], ... } - # } - - # Now we need to generate the batch function calls list - # call_map = - # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] } - call_map = {} - for ((priority,model), id_map) in to_compute_map.iteritems(): - trigger_ids_maps = {} - # function_ids_maps = - # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] } - for target_id, triggers in id_map.iteritems(): - trigger_ids_maps.setdefault(tuple(triggers), []).append(target_id) - for triggers, target_ids in trigger_ids_maps.iteritems(): - call_map.setdefault((priority,model),[]).append((priority, model, target_ids, - [t[func_field_to_compute_] for t in triggers])) - ordered_keys = call_map.keys() - ordered_keys.sort() - result = [] - if ordered_keys: - result = reduce(operator.add, (call_map[k] for k in ordered_keys)) - return result - - def _store_set_values(self, cr, uid, ids, fields, context): - """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of - respecting ``multi`` attributes), and stores the resulting values in the database directly.""" - if not ids: - return True - field_flag = False - field_dict = {} - if self._log_access: - cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),)) - res = cr.fetchall() - for r in res: - if r[1]: - field_dict.setdefault(r[0], []) - res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S') - write_date = datetime.datetime.fromtimestamp(time.mktime(res_date)) - for i in self.pool._store_function.get(self._name, []): - if i[5]: - up_write_date = write_date + datetime.timedelta(hours=i[5]) - if datetime.datetime.now() < up_write_date: - if i[1] in fields: - field_dict[r[0]].append(i[1]) - if not field_flag: - field_flag = True - todo = {} - keys = [] - for f in fields: - if self._columns[f]._multi not in keys: - keys.append(self._columns[f]._multi) - todo.setdefault(self._columns[f]._multi, []) - todo[self._columns[f]._multi].append(f) - for key in keys: - val = todo[key] - if key: - # use admin user for accessing objects having rules defined on store fields - result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context) - for id, value in result.items(): - if field_flag: - for f in value.keys(): - if f in field_dict[id]: - value.pop(f) - upd0 = [] - upd1 = [] - for v in value: - if v not in val: - continue - if self._columns[v]._type == 'many2one': - try: - value[v] = value[v][0] - except: - pass - upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0]) - upd1.append(self._columns[v]._symbol_set[1](value[v])) - upd1.append(id) - if upd0 and upd1: - cr.execute('update "' + self._table + '" set ' + \ - ','.join(upd0) + ' where id = %s', upd1) - - else: - for f in val: - # use admin user for accessing objects having rules defined on store fields - result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context) - for r in result.keys(): - if field_flag: - if r in field_dict.keys(): - if f in field_dict[r]: - result.pop(r) - for id, value in result.items(): - if self._columns[f]._type == 'many2one': - try: - value = value[0] - except: - pass - cr.execute('update "' + self._table + '" set ' + \ - '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id)) - return True - - # - # TODO: Validate - # - def perm_write(self, cr, user, ids, fields, context=None): - raise NotImplementedError(_('This method does not exist anymore')) - - # TODO: ameliorer avec NULL - def _where_calc(self, cr, user, domain, active_test=True, context=None): - """Computes the WHERE clause needed to implement an OpenERP domain. - :param domain: the domain to compute - :type domain: list - :param active_test: whether the default filtering of records with ``active`` - field set to ``False`` should be applied. - :return: the query expressing the given domain as provided in domain - :rtype: osv.query.Query - """ - if not context: - context = {} - domain = domain[:] - # if the object has a field named 'active', filter out all inactive - # records unless they were explicitely asked for - if 'active' in self._all_columns and (active_test and context.get('active_test', True)): - if domain: - # the item[0] trick below works for domain items and '&'/'|'/'!' - # operators too - if not any(item[0] == 'active' for item in domain): - domain.insert(0, ('active', '=', 1)) - else: - domain = [('active', '=', 1)] - - if domain: - e = expression.expression(cr, user, domain, self, context) - tables = e.get_tables() - where_clause, where_params = e.to_sql() - where_clause = where_clause and [where_clause] or [] - else: - where_clause, where_params, tables = [], [], ['"%s"' % self._table] - - return Query(tables, where_clause, where_params) - - def _check_qorder(self, word): - if not regex_order.match(word): - raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)')) - return True - - def _apply_ir_rules(self, cr, uid, query, mode='read', context=None): - """Add what's missing in ``query`` to implement all appropriate ir.rules - (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None) - - :param query: the current query object - """ - if uid == SUPERUSER_ID: - return - - def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None): - """ :param string parent_model: string of the parent model - :param model child_object: model object, base of the rule application - """ - if added_clause: - if parent_model and child_object: - # as inherited rules are being applied, we need to add the missing JOIN - # to reach the parent table (if it was not JOINed yet in the query) - parent_alias = child_object._inherits_join_add(child_object, parent_model, query) - # inherited rules are applied on the external table -> need to get the alias and replace - parent_table = self.pool[parent_model]._table - added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause] - # change references to parent_table to parent_alias, because we now use the alias to refer to the table - new_tables = [] - for table in added_tables: - # table is just a table name -> switch to the full alias - if table == '"%s"' % parent_table: - new_tables.append('"%s" as "%s"' % (parent_table, parent_alias)) - # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated - else: - new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias)) - added_tables = new_tables - query.where_clause += added_clause - query.where_clause_params += added_params - for table in added_tables: - if table not in query.tables: - query.tables.append(table) - return True - return False - - # apply main rules on the object - rule_obj = self.pool.get('ir.rule') - rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context) - apply_rule(rule_where_clause, rule_where_clause_params, rule_tables) - - # apply ir.rules from the parents (through _inherits) - for inherited_model in self._inherits: - rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context) - apply_rule(rule_where_clause, rule_where_clause_params, rule_tables, - parent_model=inherited_model, child_object=self) - - def _generate_m2o_order_by(self, order_field, query): - """ - Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields, - either native m2o fields or function/related fields that are stored, including - intermediate JOINs for inheritance if required. - - :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field`` - """ - if order_field not in self._columns and order_field in self._inherit_fields: - # also add missing joins for reaching the table containing the m2o field - qualified_field = self._inherits_join_calc(order_field, query) - order_field_column = self._inherit_fields[order_field][2] - else: - qualified_field = '"%s"."%s"' % (self._table, order_field) - order_field_column = self._columns[order_field] - - assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()' - if not order_field_column._classic_write and not getattr(order_field_column, 'store', False): - _logger.debug("Many2one function/related fields must be stored " \ - "to be used as ordering fields! Ignoring sorting for %s.%s", - self._name, order_field) - return - - # figure out the applicable order_by for the m2o - dest_model = self.pool[order_field_column._obj] - m2o_order = dest_model._order - if not regex_order.match(m2o_order): - # _order is complex, can't use it here, so we default to _rec_name - m2o_order = dest_model._rec_name - else: - # extract the field names, to be able to qualify them and add desc/asc - m2o_order_list = [] - for order_part in m2o_order.split(","): - m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip()) - m2o_order = m2o_order_list - - # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here - # as we don't want to exclude results that have NULL values for the m2o - src_table, src_field = qualified_field.replace('"', '').split('.', 1) - dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True) - qualify = lambda field: '"%s"."%s"' % (dst_alias, field) - return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order) - - def _generate_order_by(self, order_spec, query): - """ - Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be - a comma-separated list of valid field names, optionally followed by an ASC or DESC direction. - - :raise" except_orm in case order_spec is malformed - """ - order_by_clause = '' - order_spec = order_spec or self._order - if order_spec: - order_by_elements = [] - self._check_qorder(order_spec) - for order_part in order_spec.split(','): - order_split = order_part.strip().split(' ') - order_field = order_split[0].strip() - order_direction = order_split[1].strip() if len(order_split) == 2 else '' - inner_clause = None - if order_field == 'id' or (self._log_access and order_field in LOG_ACCESS_COLUMNS.keys()): - order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction)) - elif order_field in self._columns: - order_column = self._columns[order_field] - if order_column._classic_read: - inner_clause = '"%s"."%s"' % (self._table, order_field) - elif order_column._type == 'many2one': - inner_clause = self._generate_m2o_order_by(order_field, query) - else: - continue # ignore non-readable or "non-joinable" fields - elif order_field in self._inherit_fields: - parent_obj = self.pool[self._inherit_fields[order_field][3]] - order_column = parent_obj._columns[order_field] - if order_column._classic_read: - inner_clause = self._inherits_join_calc(order_field, query) - elif order_column._type == 'many2one': - inner_clause = self._generate_m2o_order_by(order_field, query) - else: - continue # ignore non-readable or "non-joinable" fields - else: - raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name)) - if inner_clause: - if isinstance(inner_clause, list): - for clause in inner_clause: - order_by_elements.append("%s %s" % (clause, order_direction)) - else: - order_by_elements.append("%s %s" % (inner_clause, order_direction)) - if order_by_elements: - order_by_clause = ",".join(order_by_elements) - - return order_by_clause and (' ORDER BY %s ' % order_by_clause) or '' - - def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): - """ - Private implementation of search() method, allowing specifying the uid to use for the access right check. - This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors, - by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules! - This is ok at the security level because this method is private and not callable through XML-RPC. - - :param access_rights_uid: optional user ID to use when checking access rights - (not for ir.rules, this is only for ir.model.access) - """ - if context is None: - context = {} - self.check_access_rights(cr, access_rights_uid or user, 'read') - - # For transient models, restrict acces to the current user, except for the super-user - if self.is_transient() and self._log_access and user != SUPERUSER_ID: - args = expression.AND(([('create_uid', '=', user)], args or [])) - - query = self._where_calc(cr, user, args, context=context) - self._apply_ir_rules(cr, user, query, 'read', context=context) - order_by = self._generate_order_by(order, query) - from_clause, where_clause, where_clause_params = query.get_sql() - - limit_str = limit and ' limit %d' % limit or '' - offset_str = offset and ' offset %d' % offset or '' - where_str = where_clause and (" WHERE %s" % where_clause) or '' - query_str = 'SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str - - if count: - # /!\ the main query must be executed as a subquery, otherwise - # offset and limit apply to the result of count()! - cr.execute('SELECT count(*) FROM (%s) AS count' % query_str, where_clause_params) - res = cr.fetchone() - return res[0] - - cr.execute(query_str, where_clause_params) - res = cr.fetchall() - - # TDE note: with auto_join, we could have several lines about the same result - # i.e. a lead with several unread messages; we uniquify the result using - # a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark) - def _uniquify_list(seq): - seen = set() - return [x for x in seq if x not in seen and not seen.add(x)] - - return _uniquify_list([x[0] for x in res]) - - # returns the different values ever entered for one field - # this is used, for example, in the client when the user hits enter on - # a char field - def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None): - if not args: - args = [] - if field in self._inherit_fields: - return self.pool[self._inherit_fields[field][0]].distinct_field_get(cr, uid, field, value, args, offset, limit) - else: - return self._columns[field].search(cr, self, args, field, value, offset, limit, uid) - - def copy_data(self, cr, uid, id, default=None, context=None): - """ - Copy given record's data with all its fields values - - :param cr: database cursor - :param uid: current user id - :param id: id of the record to copy - :param default: field values to override in the original values of the copied record - :type default: dictionary - :param context: context arguments, like lang, time zone - :type context: dictionary - :return: dictionary containing all the field values - """ - - if context is None: - context = {} - - # avoid recursion through already copied records in case of circular relationship - seen_map = context.setdefault('__copy_data_seen', {}) - if id in seen_map.setdefault(self._name, []): - return - seen_map[self._name].append(id) - - if default is None: - default = {} - if 'state' not in default: - if 'state' in self._defaults: - if callable(self._defaults['state']): - default['state'] = self._defaults['state'](self, cr, uid, context) - else: - default['state'] = self._defaults['state'] - - # build a black list of fields that should not be copied - blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right']) - def blacklist_given_fields(obj): - # blacklist the fields that are given by inheritance - for other, field_to_other in obj._inherits.items(): - blacklist.add(field_to_other) - if field_to_other in default: - # all the fields of 'other' are given by the record: default[field_to_other], - # except the ones redefined in self - blacklist.update(set(self.pool[other]._all_columns) - set(self._columns)) - else: - blacklist_given_fields(self.pool[other]) - # blacklist deprecated fields - for name, field in obj._columns.items(): - if field.deprecated: - blacklist.add(name) - - blacklist_given_fields(self) - - - fields_to_copy = dict((f,fi) for f, fi in self._all_columns.iteritems() - if f not in default - if f not in blacklist - if not isinstance(fi.column, fields.function)) - - data = self.read(cr, uid, [id], fields_to_copy.keys(), context=context) - if data: - data = data[0] - else: - raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name)) - - res = dict(default) - for f, colinfo in fields_to_copy.iteritems(): - field = colinfo.column - if field._type == 'many2one': - res[f] = data[f] and data[f][0] - elif field._type == 'one2many': - other = self.pool[field._obj] - # duplicate following the order of the ids because we'll rely on - # it later for copying translations in copy_translation()! - lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])] - # the lines are duplicated using the wrong (old) parent, but then - # are reassigned to the correct one thanks to the (0, 0, ...) - res[f] = [(0, 0, line) for line in lines if line] - elif field._type == 'many2many': - res[f] = [(6, 0, data[f])] - else: - res[f] = data[f] - - return res - - def copy_translations(self, cr, uid, old_id, new_id, context=None): - if context is None: - context = {} - - # avoid recursion through already copied records in case of circular relationship - seen_map = context.setdefault('__copy_translations_seen',{}) - if old_id in seen_map.setdefault(self._name,[]): - return - seen_map[self._name].append(old_id) - - trans_obj = self.pool.get('ir.translation') - # TODO it seems fields_get can be replaced by _all_columns (no need for translation) - fields = self.fields_get(cr, uid, context=context) - - for field_name, field_def in fields.items(): - # removing the lang to compare untranslated values - context_wo_lang = dict(context, lang=None) - old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang) - # we must recursively copy the translations for o2o and o2m - if field_def['type'] == 'one2many': - target_obj = self.pool[field_def['relation']] - # here we rely on the order of the ids to match the translations - # as foreseen in copy_data() - old_children = sorted(r.id for r in old_record[field_name]) - new_children = sorted(r.id for r in new_record[field_name]) - for (old_child, new_child) in zip(old_children, new_children): - target_obj.copy_translations(cr, uid, old_child, new_child, context=context) - # and for translatable fields we keep them for copy - elif field_def.get('translate'): - if field_name in self._columns: - trans_name = self._name + "," + field_name - target_id = new_id - source_id = old_id - elif field_name in self._inherit_fields: - trans_name = self._inherit_fields[field_name][0] + "," + field_name - # get the id of the parent record to set the translation - inherit_field_name = self._inherit_fields[field_name][1] - target_id = new_record[inherit_field_name].id - source_id = old_record[inherit_field_name].id - else: - continue - - trans_ids = trans_obj.search(cr, uid, [ - ('name', '=', trans_name), - ('res_id', '=', source_id) - ]) - user_lang = context.get('lang') - for record in trans_obj.read(cr, uid, trans_ids, context=context): - del record['id'] - # remove source to avoid triggering _set_src - del record['source'] - record.update({'res_id': target_id}) - if user_lang and user_lang == record['lang']: - # 'source' to force the call to _set_src - # 'value' needed if value is changed in copy(), want to see the new_value - record['source'] = old_record[field_name] - record['value'] = new_record[field_name] - trans_obj.create(cr, uid, record, context=context) - - - def copy(self, cr, uid, id, default=None, context=None): - """ - Duplicate record with given id updating it with default values - - :param cr: database cursor - :param uid: current user id - :param id: id of the record to copy - :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}`` - :type default: dictionary - :param context: context arguments, like lang, time zone - :type context: dictionary - :return: id of the newly created record - - """ - if context is None: - context = {} - context = context.copy() - data = self.copy_data(cr, uid, id, default, context) - new_id = self.create(cr, uid, data, context) - self.copy_translations(cr, uid, id, new_id, context) - return new_id - - def exists(self, cr, uid, ids, context=None): - """Checks whether the given id or ids exist in this model, - and return the list of ids that do. This is simple to use for - a truth test on a browse_record:: - - if record.exists(): - pass - - :param ids: id or list of ids to check for existence - :type ids: int or [int] - :return: the list of ids that currently exist, out of - the given `ids` - """ - if type(ids) in (int, long): - ids = [ids] - if not ids: - return [] - query = 'SELECT id FROM "%s"' % self._table - cr.execute(query + "WHERE ID IN %s", (tuple(ids),)) - return [x[0] for x in cr.fetchall()] - - def check_recursion(self, cr, uid, ids, context=None, parent=None): - _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \ - self._name) - assert parent is None or parent in self._columns or parent in self._inherit_fields,\ - "The 'parent' parameter passed to check_recursion() must be None or a valid field name" - return self._check_recursion(cr, uid, ids, context, parent) - - def _check_recursion(self, cr, uid, ids, context=None, parent=None): - """ - Verifies that there is no loop in a hierarchical structure of records, - by following the parent relationship using the **parent** field until a loop - is detected or until a top-level record is found. - - :param cr: database cursor - :param uid: current user id - :param ids: list of ids of records to check - :param parent: optional parent field name (default: ``self._parent_name = parent_id``) - :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected. - """ - if not parent: - parent = self._parent_name - - # must ignore 'active' flag, ir.rules, etc. => direct SQL query - query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table) - for id in ids: - current_id = id - while current_id is not None: - cr.execute(query, (current_id,)) - result = cr.fetchone() - current_id = result[0] if result else None - if current_id == id: - return False - return True - - def _check_m2m_recursion(self, cr, uid, ids, field_name): - """ - Verifies that there is no loop in a hierarchical structure of records, - by following the parent relationship using the **parent** field until a loop - is detected or until a top-level record is found. - - :param cr: database cursor - :param uid: current user id - :param ids: list of ids of records to check - :param field_name: field to check - :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected. - """ - - field = self._all_columns.get(field_name) - field = field.column if field else None - if not field or field._type != 'many2many' or field._obj != self._name: - # field must be a many2many on itself - raise ValueError('invalid field_name: %r' % (field_name,)) - - query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % (field._id2, field._rel, field._id1) - ids_parent = ids[:] - while ids_parent: - ids_parent2 = [] - for i in range(0, len(ids_parent), cr.IN_MAX): - j = i + cr.IN_MAX - sub_ids_parent = ids_parent[i:j] - cr.execute(query, (tuple(sub_ids_parent),)) - ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall()))) - ids_parent = ids_parent2 - for i in ids_parent: - if i in ids: - return False - return True - - def _get_external_ids(self, cr, uid, ids, *args, **kwargs): - """Retrieve the External ID(s) of any database record. - - **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }`` - - :return: map of ids to the list of their fully qualified External IDs - in the form ``module.key``, or an empty list when there's no External - ID for a record, e.g.:: - - { 'id': ['module.ext_id', 'module.ext_id_bis'], - 'id2': [] } - """ - ir_model_data = self.pool.get('ir.model.data') - data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)]) - data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id']) - result = {} - for id in ids: - # can't use dict.fromkeys() as the list would be shared! - result[id] = [] - for record in data_results: - result[record['res_id']].append('%(module)s.%(name)s' % record) - return result - - def get_external_id(self, cr, uid, ids, *args, **kwargs): - """Retrieve the External ID of any database record, if there - is one. This method works as a possible implementation - for a function field, to be able to add it to any - model object easily, referencing it as ``Model.get_external_id``. - - When multiple External IDs exist for a record, only one - of them is returned (randomly). - - :return: map of ids to their fully qualified XML ID, - defaulting to an empty string when there's none - (to be usable as a function field), - e.g.:: - - { 'id': 'module.ext_id', - 'id2': '' } - """ - results = self._get_xml_ids(cr, uid, ids) - for k, v in results.iteritems(): - if results[k]: - results[k] = v[0] - else: - results[k] = '' - return results - - # backwards compatibility - get_xml_id = get_external_id - _get_xml_ids = _get_external_ids - - def print_report(self, cr, uid, ids, name, data, context=None): - """ - Render the report `name` for the given IDs. The report must be defined - for this model, not another. - """ - report = self.pool['ir.actions.report.xml']._lookup_report(cr, name) - assert self._name == report.table - return report.create(cr, uid, ids, data, context) - - # Transience - def is_transient(self): - """ Return whether the model is transient. - - See :class:`TransientModel`. - - """ - return self._transient - - def _transient_clean_rows_older_than(self, cr, seconds): - assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name - # Never delete rows used in last 5 minutes - seconds = max(seconds, 300) - query = ("SELECT id FROM " + self._table + " WHERE" - " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp" - " < ((now() at time zone 'UTC') - interval %s)") - cr.execute(query, ("%s seconds" % seconds,)) - ids = [x[0] for x in cr.fetchall()] - self.unlink(cr, SUPERUSER_ID, ids) - - def _transient_clean_old_rows(self, cr, max_count): - # Check how many rows we have in the table - cr.execute("SELECT count(*) AS row_count FROM " + self._table) - res = cr.fetchall() - if res[0][0] <= max_count: - return # max not reached, nothing to do - self._transient_clean_rows_older_than(cr, 300) - - def _transient_vacuum(self, cr, uid, force=False): - """Clean the transient records. - - This unlinks old records from the transient model tables whenever the - "_transient_max_count" or "_max_age" conditions (if any) are reached. - Actual cleaning will happen only once every "_transient_check_time" calls. - This means this method can be called frequently called (e.g. whenever - a new record is created). - Example with both max_hours and max_count active: - Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the - table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between - 5 and 10 minutes ago, the rest created/changed more then 12 minutes ago. - - age based vacuum will leave the 22 rows created/changed in the last 12 minutes - - count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition - would immediately cause the maximum to be reached again. - - the 10 rows that have been created/changed the last 5 minutes will NOT be deleted - """ - assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name - _transient_check_time = 20 # arbitrary limit on vacuum executions - self._transient_check_count += 1 - if not force and (self._transient_check_count < _transient_check_time): - return True # no vacuum cleaning this time - self._transient_check_count = 0 - - # Age-based expiration - if self._transient_max_hours: - self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60) - - # Count-based expiration - if self._transient_max_count: - self._transient_clean_old_rows(cr, self._transient_max_count) - - return True - - def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None): - """ Serializes one2many and many2many commands into record dictionaries - (as if all the records came from the database via a read()). This - method is aimed at onchange methods on one2many and many2many fields. - - Because commands might be creation commands, not all record dicts - will contain an ``id`` field. Commands matching an existing record - will have an ``id``. - - :param field_name: name of the one2many or many2many field matching the commands - :type field_name: str - :param commands: one2many or many2many commands to execute on ``field_name`` - :type commands: list((int|False, int|False, dict|False)) - :param fields: list of fields to read from the database, when applicable - :type fields: list(str) - :returns: records in a shape similar to that returned by ``read()`` - (except records may be missing the ``id`` field if they don't exist in db) - :rtype: list(dict) - """ - result = [] # result (list of dict) - record_ids = [] # ids of records to read - updates = {} # {id: dict} of updates on particular records - - for command in commands: - if not isinstance(command, (list, tuple)): - record_ids.append(command) - elif command[0] == 0: - result.append(command[2]) - elif command[0] == 1: - record_ids.append(command[1]) - updates.setdefault(command[1], {}).update(command[2]) - elif command[0] in (2, 3): - record_ids = [id for id in record_ids if id != command[1]] - elif command[0] == 4: - record_ids.append(command[1]) - elif command[0] == 5: - result, record_ids = [], [] - elif command[0] == 6: - result, record_ids = [], list(command[2]) - - # read the records and apply the updates - other_model = self.pool[self._all_columns[field_name].column._obj] - for record in other_model.read(cr, uid, record_ids, fields=fields, context=context): - record.update(updates.get(record['id'], {})) - result.append(record) - - return result - - # for backward compatibility - resolve_o2m_commands_to_record_dicts = resolve_2many_commands - - def search_read(self, cr, uid, domain=None, fields=None, offset=0, limit=None, order=None, context=None): - """ - Performs a ``search()`` followed by a ``read()``. - - :param cr: database cursor - :param user: current user id - :param domain: Search domain, see ``args`` parameter in ``search()``. Defaults to an empty domain that will match all records. - :param fields: List of fields to read, see ``fields`` parameter in ``read()``. Defaults to all fields. - :param offset: Number of records to skip, see ``offset`` parameter in ``search()``. Defaults to 0. - :param limit: Maximum number of records to return, see ``limit`` parameter in ``search()``. Defaults to no limit. - :param order: Columns to sort result, see ``order`` parameter in ``search()``. Defaults to no sort. - :param context: context arguments. - :return: List of dictionaries containing the asked fields. - :rtype: List of dictionaries. - - """ - record_ids = self.search(cr, uid, domain or [], offset=offset, limit=limit, order=order, context=context) - if not record_ids: - return [] - - if fields and fields == ['id']: - # shortcut read if we only want the ids - return [{'id': id} for id in record_ids] - - # read() ignores active_test, but it would forward it to any downstream search call - # (e.g. for x2m or function fields), and this is not the desired behavior, the flag - # was presumably only meant for the main search(). - # TODO: Move this to read() directly? - read_ctx = dict(context or {}) - read_ctx.pop('active_test', None) - - result = self.read(cr, uid, record_ids, fields, context=read_ctx) - if len(result) <= 1: - return result - - # reorder read - index = dict((r['id'], r) for r in result) - return [index[x] for x in record_ids if x in index] - - def _register_hook(self, cr): - """ stuff to do right after the registry is built """ - pass - - def __getattr__(self, name): - if name.startswith('signal_'): - signal_name = name[len('signal_'):] - assert signal_name - return (lambda *args, **kwargs: - self.signal_workflow(*args, signal=signal_name, **kwargs)) - get = getattr(super(BaseModel, self), '__getattr__', None) - if get is not None: return get(name) - raise AttributeError( - "'%s' object has no attribute '%s'" % (type(self).__name__, name)) - -# keep this import here, at top it will cause dependency cycle errors -import expression - -class Model(BaseModel): - """Main super-class for regular database-persisted OpenERP models. - - OpenERP models are created by inheriting from this class:: - - class user(Model): - ... - - The system will later instantiate the class once per database (on - which the class' module is installed). - """ - _auto = True - _register = False # not visible in ORM registry, meant to be python-inherited only - _transient = False # True in a TransientModel - -class TransientModel(BaseModel): - """Model super-class for transient records, meant to be temporarily - persisted, and regularly vaccuum-cleaned. - - A TransientModel has a simplified access rights management, - all users can create new records, and may only access the - records they created. The super-user has unrestricted access - to all TransientModel records. - """ - _auto = True - _register = False # not visible in ORM registry, meant to be python-inherited only - _transient = True - -class AbstractModel(BaseModel): - """Abstract Model super-class for creating an abstract class meant to be - inherited by regular models (Models or TransientModels) but not meant to - be usable on its own, or persisted. - - Technical note: we don't want to make AbstractModel the super-class of - Model or BaseModel because it would not make sense to put the main - definition of persistence methods such as create() in it, and still we - should be able to override them within an AbstractModel. - """ - _auto = False # don't create any database backend for AbstractModels - _register = False # not visible in ORM registry, meant to be python-inherited only - _transient = False - -def itemgetter_tuple(items): - """ Fixes itemgetter inconsistency (useful in some cases) of not returning - a tuple if len(items) == 1: always returns an n-tuple where n = len(items) - """ - if len(items) == 0: - return lambda a: () - if len(items) == 1: - return lambda gettable: (gettable[items[0]],) - return operator.itemgetter(*items) - -class ImportWarning(Warning): - """ Used to send warnings upwards the stack during the import process - """ - pass - -def convert_pgerror_23502(model, fields, info, e): - m = re.match(r'^null value in column "(?P<field>\w+)" violates ' - r'not-null constraint\n', - str(e)) - field_name = m and m.group('field') - if not m or field_name not in fields: - return {'message': unicode(e)} - message = _(u"Missing required value for the field '%s'.") % field_name - field = fields.get(field_name) - if field: - message = _(u"Missing required value for the field '%s' (%s)") % (field['string'], field_name) - return { - 'message': message, - 'field': field_name, - } - -def convert_pgerror_23505(model, fields, info, e): - m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint', - str(e)) - field_name = m and m.group('field') - if not m or field_name not in fields: - return {'message': unicode(e)} - message = _(u"The value for the field '%s' already exists.") % field_name - field = fields.get(field_name) - if field: - message = _(u"%s This might be '%s' in the current model, or a field " - u"of the same name in an o2m.") % (message, field['string']) - return { - 'message': message, - 'field': field_name, - } - -PGERROR_TO_OE = collections.defaultdict( - # shape of mapped converters - lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), { - # not_null_violation - '23502': convert_pgerror_23502, - # unique constraint error - '23505': convert_pgerror_23505, -}) -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/osv/osv.py b/openerp/osv/osv.py index d56c0025df06d4f3f2e01fa47b25069ae927aeec..d1a3efc750dc59d778077221e1922b648fb90897 100644 --- a/openerp/osv/osv.py +++ b/openerp/osv/osv.py @@ -19,7 +19,8 @@ # ############################################################################## -from openerp.osv.orm import except_orm, Model, TransientModel, AbstractModel +from ..exceptions import except_orm +from .orm import Model, TransientModel, AbstractModel # Deprecated, kept for backward compatibility. # openerp.exceptions.Warning should be used instead. diff --git a/openerp/report/custom.py b/openerp/report/custom.py index 84634d1dc252c67ae3e722a31ed4f13a229dcb8a..eed759659a4f6a5543050c63c7c48c9476b1adc1 100644 --- a/openerp/report/custom.py +++ b/openerp/report/custom.py @@ -30,8 +30,7 @@ import render from interface import report_int import common from openerp.osv.osv import except_osv -from openerp.osv.orm import browse_null -from openerp.osv.orm import browse_record_list +from openerp.osv.orm import BaseModel from pychart import * import misc import cStringIO @@ -84,7 +83,7 @@ class report_custom(report_int): if row_canvas[i]: row_canvas[i]=False elif len(fields[i])==1: - if not isinstance(obj, browse_null): + if obj: row.append(str(eval('obj.'+fields[i][0],{'obj': obj}))) else: row.append(None) @@ -106,7 +105,7 @@ class report_custom(report_int): key = levels.keys() for l in key: objs = eval('obj.'+l,{'obj': obj}) - if not isinstance(objs, (browse_record_list, list)): + if not isinstance(objs, (BaseModel, list)): objs = [objs] field_new = [] cond_new = [] @@ -191,8 +190,8 @@ class report_custom(report_int): new_obj = eval('obj.'+report['field_parent'][1],{'obj': obj}) if not isinstance(new_obj, list) : new_obj = [new_obj] - for o in new_obj: - if not isinstance(o, browse_null): + for o in new_obj: + if o: res += build_tree(o, level, depth+1) return res diff --git a/openerp/report/print_xml.py b/openerp/report/print_xml.py index 3d74008e120d93d3f35508ce7eebf0cb628e2f49..a7f33b7b21d1d03a52f33a99dbf00b89ed42dee1 100644 --- a/openerp/report/print_xml.py +++ b/openerp/report/print_xml.py @@ -24,7 +24,7 @@ import openerp import openerp.tools as tools from openerp.tools.safe_eval import safe_eval import print_fnc -from openerp.osv.orm import browse_null, browse_record +from openerp.osv.orm import BaseModel class InheritDict(dict): # Might be usefull when we're doing name lookup for call or eval. @@ -74,26 +74,18 @@ class document(object): value = browser for f in fields: - if isinstance(value, list): - if len(value)==0: + if isinstance(value, (BaseModel, list)): + if not value: return '' value = value[0] - if isinstance(value, browse_null): - return '' - else: - value = value[f] + value = value[f] - if isinstance(value, browse_null) or (type(value)==bool and not value): - return '' - else: - return value + return value or '' def get_value2(self, browser, field_path): value = self.get_value(browser, field_path) - if isinstance(value, browse_record): + if isinstance(value, BaseModel): return value.id - elif isinstance(value, browse_null): - return False else: return value @@ -104,7 +96,7 @@ class document(object): # dictionary passed to eval #FIXME: it wont work if the data hasn't been fetched yet... this could -# happen if the eval node is the first one using this browse_record +# happen if the eval node is the first one using this Record # the next line is a workaround for the problem: it causes the resource to be loaded #Pinky: Why not this ? eval(expr, browser) ? # name = browser.name @@ -127,11 +119,7 @@ class document(object): el.set(key, value) elif attrs['type']=='attachment': - if isinstance(browser, list): - model = browser[0]._table_name - else: - model = browser._table_name - + model = browser._name value = self.get_value(browser, attrs['name']) ids = self.pool['ir.attachment'].search(self.cr, self.uid, [('res_model','=',model),('res_id','=',int(value))]) @@ -203,19 +191,13 @@ class document(object): if 'model' in attrs: obj = self.pool[attrs['model']] else: - if isinstance(browser, list): - obj = browser[0]._table - else: - obj = browser._table + obj = browser # the record(set) is an instance of the model # get the ids if 'ids' in attrs: ids = self.eval(browser, attrs['ids']) else: - if isinstance(browser, list): - ids = [b.id for b in browser] - else: - ids = [browser.id] + ids = browse.ids # call the method itself newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args) @@ -233,7 +215,7 @@ class document(object): else: for el_cld in node: parse_result_tree(el_cld, el, datas) - if not isinstance(newdatas, list): + if not isinstance(newdatas, (BaseModel, list)): newdatas = [newdatas] for newdata in newdatas: parse_result_tree(node, parent, newdata) @@ -241,7 +223,7 @@ class document(object): elif attrs['type']=='zoom': value = self.get_value(browser, attrs['name']) if value: - if not isinstance(value, list): + if not isinstance(value, (BaseModel, list)): v_list = [value] else: v_list = value diff --git a/openerp/report/report_sxw.py b/openerp/report/report_sxw.py index f98261b08339264f5d232638c3770ea51e6984c0..e3917d8fa96b24a5f66882c2fc43c0a61b640742 100644 --- a/openerp/report/report_sxw.py +++ b/openerp/report/report_sxw.py @@ -71,82 +71,6 @@ rml2sxw = { def get_date_length(date_format=DEFAULT_SERVER_DATE_FORMAT): return len((datetime.now()).strftime(date_format)) -class _format(object): - def set_value(self, cr, uid, name, object, field, lang_obj): - self.object = object - self._field = field - self.name = name - self.lang_obj = lang_obj - -class _float_format(float, _format): - def __init__(self,value): - super(_float_format, self).__init__() - self.val = value or 0.0 - - def __str__(self): - digits = 2 - if hasattr(self,'_field') and getattr(self._field, 'digits', None): - digits = self._field.digits[1] - if hasattr(self, 'lang_obj'): - return self.lang_obj.format('%.' + str(digits) + 'f', self.name, True) - return str(self.val) - -class _int_format(int, _format): - def __init__(self,value): - super(_int_format, self).__init__() - self.val = value or 0 - - def __str__(self): - if hasattr(self,'lang_obj'): - return self.lang_obj.format('%.d', self.name, True) - return str(self.val) - -class _date_format(str, _format): - def __init__(self,value): - super(_date_format, self).__init__() - self.val = value and str(value) or '' - - def __str__(self): - if self.val: - if getattr(self,'name', None): - date = datetime.strptime(self.name[:get_date_length()], DEFAULT_SERVER_DATE_FORMAT) - return date.strftime(self.lang_obj.date_format.encode('utf-8')) - return self.val - -class _dttime_format(str, _format): - def __init__(self,value): - super(_dttime_format, self).__init__() - self.val = value and str(value) or '' - - def __str__(self): - if self.val and getattr(self,'name', None): - return datetime.strptime(self.name, DEFAULT_SERVER_DATETIME_FORMAT)\ - .strftime("%s %s"%((self.lang_obj.date_format).encode('utf-8'), - (self.lang_obj.time_format).encode('utf-8'))) - return self.val - - -_fields_process = { - 'float': _float_format, - 'date': _date_format, - 'integer': _int_format, - 'datetime' : _dttime_format -} - -# -# Context: {'node': node.dom} -# -class browse_record_list(list): - def __init__(self, lst, context): - super(browse_record_list, self).__init__(lst) - self.context = context - - def __getattr__(self, name): - res = browse_record_list([getattr(x,name) for x in self], self.context) - return res - - def __str__(self): - return "browse_record_list("+str(len(self))+")" class rml_parse(object): def __init__(self, cr, uid, name, parents=rml_parents, tag=rml_tag, context=None): @@ -221,8 +145,8 @@ class rml_parse(object): if not model: model = 'ir.attachment' try : - ids = [int(id)] - res = self.pool[model].read(self.cr,self.uid,ids)[0] + id = int(id) + res = self.pool[model].read(self.cr,self.uid,id) if field : return res[field] elif model =='ir.attachment' : @@ -235,8 +159,9 @@ class rml_parse(object): def setLang(self, lang): self.localcontext['lang'] = lang self.lang_dict_called = False - for obj in self.objects: - obj._context['lang'] = lang + # re-evaluate self.objects in a different environment + env = self.objects.env(self.cr, self.uid, self.localcontext) + self.objects = self.objects.with_env(env) def _get_lang_dict(self): pool_lang = self.pool['res.lang'] @@ -292,7 +217,7 @@ class rml_parse(object): self.lang_dict_called = True if date or date_time: - if not str(value): + if not value: return '' date_format = self.lang_dict['date_format'] @@ -324,8 +249,8 @@ class rml_parse(object): res='%s %s'%(currency_obj.symbol, res) return res - def display_address(self, address_browse_record): - return self.pool['res.partner']._display_address(self.cr, self.uid, address_browse_record) + def display_address(self, address_record): + return address_record.contact_address def repeatIn(self, lst, name,nodes_parent=False): ret_lst = [] @@ -413,11 +338,10 @@ class report_sxw(report_rml, preprocess.report): def getObjects(self, cr, uid, ids, context): table_obj = openerp.registry(cr.dbname)[self.table] - return table_obj.browse(cr, uid, ids, list_class=browse_record_list, context=context, fields_process=_fields_process) + return table_obj.browse(cr, uid, ids, context=context) def create(self, cr, uid, ids, data, context=None): - if context is None: - context = {} + context = dict(context or {}) if self.internal_header: context.update(internal_header=self.internal_header) @@ -444,8 +368,13 @@ class report_sxw(report_rml, preprocess.report): report_xml = a(title=title, report_type=report_type, report_rml_content=rml, name=title, attachment=False, header=self.header) finally: report_file.close() - if report_xml.header: - report_xml.header = self.header + + # We add an attribute on the ir.actions.report.xml instance. + # This attribute 'use_global_header' will be used by + # the create_single_XXX function of the report engine. + # This change has been done to avoid a big change of the API. + setattr(report_xml, 'use_global_header', self.header if report_xml.header else False) + report_type = report_xml.report_type if report_type in ['sxw','odt']: fnct = self.create_source_odt @@ -542,7 +471,7 @@ class report_sxw(report_rml, preprocess.report): objs = self.getObjects(cr, uid, ids, context) rml_parser.set_context(objs, data, ids, report_xml.report_type) processed_rml = etree.XML(rml) - if report_xml.header: + if report_xml.use_global_header: rml_parser._add_header(processed_rml, self.header) processed_rml = self.preprocess_rml(processed_rml,report_xml.report_type) if rml_parser.logo: @@ -552,11 +481,9 @@ class report_sxw(report_rml, preprocess.report): return pdf, report_xml.report_type def create_single_odt(self, cr, uid, ids, data, report_xml, context=None): - if not context: - context={} - context = context.copy() - report_type = report_xml.report_type + context = dict(context or {}) context['parents'] = sxw_parents + report_type = report_xml.report_type binary_report_content = report_xml.report_sxw_content if isinstance(report_xml.report_sxw_content, unicode): # if binary content was passed as unicode, we must @@ -637,7 +564,7 @@ class report_sxw(report_rml, preprocess.report): encoding='utf-8', xml_declaration=True) sxw_contents = {'content.xml':odt, 'meta.xml':meta} - if report_xml.header: + if report_xml.use_global_header: #Add corporate header/footer rml_file = tools.file_open(os.path.join('base', 'report', 'corporate_%s_header.xml' % report_type)) try: @@ -650,7 +577,7 @@ class report_sxw(report_rml, preprocess.report): rml_dom = self.preprocess_rml(etree.XML(rml),report_type) create_doc = self.generators[report_type] odt = create_doc(rml_dom,rml_parser.localcontext) - if report_xml.header: + if report_xml.use_global_header: rml_parser._add_header(odt) odt = etree.tostring(odt, encoding='utf-8', xml_declaration=True) @@ -676,11 +603,9 @@ class report_sxw(report_rml, preprocess.report): return final_op, mime_type def create_single_html2html(self, cr, uid, ids, data, report_xml, context=None): - if not context: - context = {} - context = context.copy() - report_type = 'html' + context = dict(context or {}) context['parents'] = html_parents + report_type = 'html' html = report_xml.report_rml_content html_parser = self.parser(cr, uid, self.name2, context=context) diff --git a/openerp/service/model.py b/openerp/service/model.py index a91baf8ca08c088170d2a3678e5d4c7913348e3f..11054f237d7af32011700302fcb39e1a1bc059ca 100644 --- a/openerp/service/model.py +++ b/openerp/service/model.py @@ -154,7 +154,7 @@ def check(f): def execute_cr(cr, uid, obj, method, *args, **kw): object = openerp.registry(cr.dbname).get(obj) - if not object: + if object is None: raise except_orm('Object Error', "Object %s doesn't exist" % obj) return getattr(object, method)(cr, uid, *args, **kw) diff --git a/openerp/service/security.py b/openerp/service/security.py index 6f115b8030a1111ea19b92cfb28c99014e5250b0..a5d392e7c0d631cf0c01f8e303be22614850c81a 100644 --- a/openerp/service/security.py +++ b/openerp/service/security.py @@ -20,10 +20,11 @@ ############################################################################## import openerp +import openerp.exceptions def login(db, login, password): res_users = openerp.registry(db)['res.users'] - return res_users.login(db, login, password) + return res_users._login(db, login, password) def check_super(passwd): if passwd == openerp.tools.config['admin_passwd']: diff --git a/openerp/service/wsgi_server.py b/openerp/service/wsgi_server.py index 5e72ba570b5d0335ce90a62bb7b7cad5f7ce4c7c..7838350f090e2547e628fa68ec4f9e4c027e1fea 100644 --- a/openerp/service/wsgi_server.py +++ b/openerp/service/wsgi_server.py @@ -194,14 +194,15 @@ def application_unproxied(environ, start_response): if hasattr(threading.current_thread(), 'dbname'): del threading.current_thread().dbname - # Try all handlers until one returns some result (i.e. not None). - wsgi_handlers = [wsgi_xmlrpc] - wsgi_handlers += module_handlers - for handler in wsgi_handlers: - result = handler(environ, start_response) - if result is None: - continue - return result + with openerp.api.Environment.manage(): + # Try all handlers until one returns some result (i.e. not None). + wsgi_handlers = [wsgi_xmlrpc] + wsgi_handlers += module_handlers + for handler in wsgi_handlers: + result = handler(environ, start_response) + if result is None: + continue + return result # We never returned from the loop. response = 'No handler found.\n' diff --git a/openerp/tests/common.py b/openerp/tests/common.py index 50b85555d04083abd934dba10a5a1335134ee30a..d2db62c21a0faedc658a6846558f46f8d947ad24 100644 --- a/openerp/tests/common.py +++ b/openerp/tests/common.py @@ -23,6 +23,7 @@ from tempfile import mkdtemp import werkzeug import openerp +from openerp import api from openerp.modules.registry import RegistryManager _logger = logging.getLogger(__name__) @@ -108,6 +109,7 @@ class TransactionCase(BaseCase): self.registry = RegistryManager.get(DB) self.cr = self.cursor() self.uid = openerp.SUPERUSER_ID + self.env = api.Environment(self.cr, self.uid, {}) def tearDown(self): self.cr.rollback() @@ -125,6 +127,7 @@ class SingleTransactionCase(BaseCase): cls.registry = RegistryManager.get(DB) cls.cr = cls.registry.cursor() cls.uid = openerp.SUPERUSER_ID + cls.env = api.Environment(cls.cr, cls.uid, {}) @classmethod def tearDownClass(cls): diff --git a/openerp/tools/__init__.py b/openerp/tools/__init__.py index c927b2fc87adc945edc3ad70d8d19f20a6072dae..e0ee1b3a2d24c56ad00efddd1443703ebade5f49 100644 --- a/openerp/tools/__init__.py +++ b/openerp/tools/__init__.py @@ -35,6 +35,7 @@ from yaml_import import * from sql import * from float_utils import * from mail import * +from func import * from debugger import * # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/openerp/tools/cache.py b/openerp/tools/cache.py index a23641a9f4b7f9cabea0bc9d20ffb39562600bc1..57dea0888c167df36a45c0e111a086d51d8aa937 100644 --- a/openerp/tools/cache.py +++ b/openerp/tools/cache.py @@ -1,166 +1,177 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2013 OpenERP (<http://www.openerp.com>). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +############################################################################## + +# decorator makes wrappers that have the same API as their wrapped function; +# this is important for the openerp.api.guess() that relies on signatures +from decorator import decorator +from inspect import getargspec + import lru import logging logger = logging.getLogger(__name__) + class ormcache(object): - """ LRU cache decorator for orm methods, - """ + """ LRU cache decorator for orm methods. """ def __init__(self, skiparg=2, size=8192, multi=None, timeout=None): self.skiparg = skiparg self.size = size - self.method = None self.stat_miss = 0 self.stat_hit = 0 self.stat_err = 0 - def __call__(self,m): - self.method = m - def lookup(self2, cr, *args, **argv): - r = self.lookup(self2, cr, *args, **argv) - return r + def __call__(self, method): + self.method = method + lookup = decorator(self.lookup, method) lookup.clear_cache = self.clear return lookup def stat(self): - return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % (self.stat_hit,self.stat_miss,self.stat_err, (100*float(self.stat_hit))/(self.stat_miss+self.stat_hit) ) + return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % \ + (self.stat_hit, self.stat_miss, self.stat_err, + (100*float(self.stat_hit))/(self.stat_miss+self.stat_hit)) - def lru(self, self2): - try: - ormcache = getattr(self2, '_ormcache') - except AttributeError: - ormcache = self2._ormcache = {} + def lru(self, model): + ormcache = model._ormcache try: d = ormcache[self.method] except KeyError: d = ormcache[self.method] = lru.LRU(self.size) return d - def lookup(self, self2, cr, *args, **argv): - d = self.lru(self2) - key = args[self.skiparg-2:] + def lookup(self, method, *args, **kwargs): + d = self.lru(args[0]) + key = args[self.skiparg:] try: - r = d[key] - self.stat_hit += 1 - return r + r = d[key] + self.stat_hit += 1 + return r except KeyError: - self.stat_miss += 1 - value = d[key] = self.method(self2, cr, *args) - return value + self.stat_miss += 1 + value = d[key] = self.method(*args, **kwargs) + return value except TypeError: - self.stat_err += 1 - return self.method(self2, cr, *args) + self.stat_err += 1 + return self.method(*args, **kwargs) - def clear(self, self2, *args): - """ Remove *args entry from the cache or all keys if *args is undefined - """ - d = self.lru(self2) + def clear(self, model, *args): + """ Remove *args entry from the cache or all keys if *args is undefined """ + d = self.lru(model) if args: logger.warn("ormcache.clear arguments are deprecated and ignored " "(while clearing caches on (%s).%s)", - self2._name, self.method.__name__) + model._name, self.method.__name__) d.clear() - self2.pool._any_cache_cleared = True + model.pool._any_cache_cleared = True + class ormcache_context(ormcache): def __init__(self, skiparg=2, size=8192, accepted_keys=()): super(ormcache_context,self).__init__(skiparg,size) self.accepted_keys = accepted_keys - def lookup(self, self2, cr, *args, **argv): - d = self.lru(self2) - - context = argv.get('context', {}) - ckey = filter(lambda x: x[0] in self.accepted_keys, context.items()) - ckey.sort() - - d = self.lru(self2) - key = args[self.skiparg-2:]+tuple(ckey) + def __call__(self, method): + # remember which argument is context + args = getargspec(method)[0] + self.context_pos = args.index('context') + return super(ormcache_context, self).__call__(method) + + def lookup(self, method, *args, **kwargs): + d = self.lru(args[0]) + + # Note. The decorator() wrapper (used in __call__ above) will resolve + # arguments, and pass them positionally to lookup(). This is why context + # is not passed through kwargs! + if self.context_pos < len(args): + context = args[self.context_pos] + else: + context = kwargs.get('context') or {} + ckey = [(k, context[k]) for k in self.accepted_keys if k in context] + + # Beware: do not take the context from args! + key = args[self.skiparg:self.context_pos] + tuple(ckey) try: - r = d[key] - self.stat_hit += 1 - return r + r = d[key] + self.stat_hit += 1 + return r except KeyError: - self.stat_miss += 1 - value = d[key] = self.method(self2, cr, *args, **argv) - return value + self.stat_miss += 1 + value = d[key] = self.method(*args, **kwargs) + return value except TypeError: - self.stat_err += 1 - return self.method(self2, cr, *args, **argv) + self.stat_err += 1 + return self.method(*args, **kwargs) class ormcache_multi(ormcache): def __init__(self, skiparg=2, size=8192, multi=3): - super(ormcache_multi,self).__init__(skiparg,size) - self.multi = multi - 2 - - def lookup(self, self2, cr, *args, **argv): - d = self.lru(self2) - args = list(args) - multi = self.multi - ids = args[multi] - r = {} - miss = [] - + assert skiparg <= multi + super(ormcache_multi, self).__init__(skiparg, size) + self.multi = multi + + def lookup(self, method, *args, **kwargs): + d = self.lru(args[0]) + base_key = args[self.skiparg:self.multi] + args[self.multi+1:] + ids = args[self.multi] + result = {} + missed = [] + + # first take what is available in the cache for i in ids: - args[multi] = i - key = tuple(args[self.skiparg-2:]) + key = base_key + (i,) try: - r[i] = d[key] - self.stat_hit += 1 + result[i] = d[key] + self.stat_hit += 1 except Exception: - self.stat_miss += 1 - miss.append(i) + self.stat_miss += 1 + missed.append(i) - if miss: - args[multi] = miss - r.update(self.method(self2, cr, *args)) + if missed: + # call the method for the ids that were not in the cache + args = list(args) + args[self.multi] = missed + result.update(method(*args, **kwargs)) - for i in miss: - args[multi] = i - key = tuple(args[self.skiparg-2:]) - d[key] = r[i] + # store those new results back in the cache + for i in missed: + key = base_key + (i,) + d[key] = result[i] + + return result - return r class dummy_cache(object): - """ Cache decorator replacement to actually do no caching. - """ + """ Cache decorator replacement to actually do no caching. """ def __init__(self, *l, **kw): pass + def __call__(self, fn): fn.clear_cache = self.clear return fn + def clear(self, *l, **kw): pass -if __name__ == '__main__': - - class A(): - @ormcache() - def m(self,a,b): - print "A::m(", self,a,b - return 1 - - @ormcache_multi(multi=3) - def n(self,cr,uid,ids): - print "m", self,cr,uid,ids - return dict([(i,i) for i in ids]) - - a=A() - r=a.m(1,2) - r=a.m(1,2) - r=a.n("cr",1,[1,2,3,4]) - r=a.n("cr",1,[1,2]) - print r - for i in a._ormcache: - print a._ormcache[i].d - a.m.clear_cache() - a.n.clear_cache(a,1,1) - r=a.n("cr",1,[1,2]) - print r - r=a.n("cr",1,[1,2]) # For backward compatibility cache = ormcache diff --git a/openerp/tools/import_email.py b/openerp/tools/import_email.py index d73c50865a29d719466402b9aeee257760f8136e..15e9b025abea206ef252dcd35be5a739c1e6086a 100644 --- a/openerp/tools/import_email.py +++ b/openerp/tools/import_email.py @@ -23,8 +23,8 @@ import os, sys import re import smtplib import email, mimetypes -from email.Header import decode_header -from email.MIMEText import MIMEText +from email.header import decode_header +from email.mime.text import MIMEText import xmlrpclib warn_msg = """ diff --git a/openerp/tools/misc.py b/openerp/tools/misc.py index e5d8190a4d2b1f5cee1bc2f847297cc48fc1c476..fbdd32e7cb1db49a2e0f9de68f8629b010b2c9bb 100644 --- a/openerp/tools/misc.py +++ b/openerp/tools/misc.py @@ -1212,6 +1212,23 @@ def dumpstacks(sig=None, frame=None): _logger.info("\n".join(code)) +class frozendict(dict): + """ An implementation of an immutable dictionary. """ + def __delitem__(self, key): + raise NotImplementedError("'__delitem__' not supported on frozendict") + def __setitem__(self, key, val): + raise NotImplementedError("'__setitem__' not supported on frozendict") + def clear(self): + raise NotImplementedError("'clear' not supported on frozendict") + def pop(self, key, default=None): + raise NotImplementedError("'pop' not supported on frozendict") + def popitem(self): + raise NotImplementedError("'popitem' not supported on frozendict") + def setdefault(self, key, default=None): + raise NotImplementedError("'setdefault' not supported on frozendict") + def update(self, *args, **kwargs): + raise NotImplementedError("'update' not supported on frozendict") + @contextmanager def ignore(*exc): try: diff --git a/openerp/tools/test_reports.py b/openerp/tools/test_reports.py index 305d2b0f115890c8f2195156cd7ea84b63b68df5..d5423d9ed96512d9e7c53fdc3ff5776d356be2f8 100644 --- a/openerp/tools/test_reports.py +++ b/openerp/tools/test_reports.py @@ -286,7 +286,7 @@ def try_report_action(cr, uid, action_id, active_model=None, active_ids=None, raise Exception("Cannot handle action of type %s" % act_model) log_test("will be using %s action %s #%d", act_model, act_xmlid, act_id) - action = registry[act_model].read(cr, uid, act_id, context=context) + action = registry[act_model].read(cr, uid, [act_id], context=context)[0] assert action, "Could not read action %s[%s]" %(act_model, act_id) loop = 0 while action: diff --git a/openerp/tools/translate.py b/openerp/tools/translate.py index b795405460448a45204786d1f3f7724ad0678753..2e5d54042ea538945767b14ad924002398762ac0 100644 --- a/openerp/tools/translate.py +++ b/openerp/tools/translate.py @@ -165,51 +165,55 @@ class GettextAlias(object): return sql_db.db_connect(db_name) def _get_cr(self, frame, allow_create=True): - is_new_cr = False - cr = frame.f_locals.get('cr', frame.f_locals.get('cursor')) - if not cr: - s = frame.f_locals.get('self', {}) - cr = getattr(s, 'cr', None) - if not cr and allow_create: + # try, in order: cr, cursor, self.env.cr, self.cr + if 'cr' in frame.f_locals: + return frame.f_locals['cr'], False + if 'cursor' in frame.f_locals: + return frame.f_locals['cursor'], False + s = frame.f_locals.get('self') + if hasattr(s, 'env'): + return s.env.cr, False + if hasattr(s, 'cr'): + return s.cr, False + if allow_create: + # create a new cursor db = self._get_db() if db is not None: - cr = db.cursor() - is_new_cr = True - return cr, is_new_cr + return db.cursor(), True + return None, False def _get_uid(self, frame): - return frame.f_locals.get('uid') or frame.f_locals.get('user') + # try, in order: uid, user, self.env.uid + if 'uid' in frame.f_locals: + return frame.f_locals['uid'] + if 'user' in frame.f_locals: + return int(frame.f_locals['user']) # user may be a record + s = frame.f_locals.get('self') + return s.env.uid def _get_lang(self, frame): - lang = None - ctx = frame.f_locals.get('context') - if not ctx: - kwargs = frame.f_locals.get('kwargs') - if kwargs is None: - args = frame.f_locals.get('args') - if args and isinstance(args, (list, tuple)) \ - and isinstance(args[-1], dict): - ctx = args[-1] - elif isinstance(kwargs, dict): - ctx = kwargs.get('context') - if ctx: - lang = ctx.get('lang') - s = frame.f_locals.get('self', {}) - if not lang: - c = getattr(s, 'localcontext', None) - if c: - lang = c.get('lang') - if not lang: - # Last resort: attempt to guess the language of the user - # Pitfall: some operations are performed in sudo mode, and we - # don't know the originial uid, so the language may - # be wrong when the admin language differs. - pool = getattr(s, 'pool', None) - (cr, dummy) = self._get_cr(frame, allow_create=False) - uid = self._get_uid(frame) - if pool and cr and uid: - lang = pool['res.users'].context_get(cr, uid)['lang'] - return lang + # try, in order: context.get('lang'), kwargs['context'].get('lang'), + # self.env.lang, self.localcontext.get('lang') + if 'context' in frame.f_locals: + return frame.f_locals['context'].get('lang') + kwargs = frame.f_locals.get('kwargs', {}) + if 'context' in kwargs: + return kwargs['context'].get('lang') + s = frame.f_locals.get('self') + if hasattr(s, 'env'): + return s.env.lang + if hasattr(s, 'localcontext'): + return s.localcontext.get('lang') + # Last resort: attempt to guess the language of the user + # Pitfall: some operations are performed in sudo mode, and we + # don't know the originial uid, so the language may + # be wrong when the admin language differs. + pool = getattr(s, 'pool', None) + (cr, dummy) = self._get_cr(frame, allow_create=False) + uid = self._get_uid(frame) + if pool and cr and uid: + return pool['res.users'].context_get(cr, uid)['lang'] + return None def __call__(self, source): res = source @@ -685,7 +689,7 @@ def trans_generate(lang, modules, cr): _logger.error("name error in %s: %s", xml_name, str(exc)) continue objmodel = registry.get(obj.model) - if not objmodel or not field_name in objmodel._columns: + if objmodel is None or field_name not in objmodel._columns: continue field_def = objmodel._columns[field_name] @@ -736,7 +740,7 @@ def trans_generate(lang, modules, cr): except (IOError, etree.XMLSyntaxError): _logger.exception("couldn't export translation for report %s %s %s", name, report_type, fname) - for field_name,field_def in obj._table._columns.items(): + for field_name, field_def in obj._columns.items(): if field_def.translate: name = model + "," + field_name try: diff --git a/openerp/tools/yaml_import.py b/openerp/tools/yaml_import.py index 060d261ef187a1dee16543105953d8d9f1ea837e..9ce79fc55134cb5e47a5f918287b0b986adaf24c 100644 --- a/openerp/tools/yaml_import.py +++ b/openerp/tools/yaml_import.py @@ -121,6 +121,7 @@ class YamlInterpreter(object): 'time': time, 'datetime': datetime, 'timedelta': timedelta} + self.env = openerp.api.Environment(self.cr, self.uid, self.context) def _log(self, *args, **kwargs): _logger.log(self.loglevel, *args, **kwargs) @@ -170,6 +171,11 @@ class YamlInterpreter(object): return id + def get_record(self, xml_id): + if '.' not in xml_id: + xml_id = "%s.%s" % (self.module, xml_id) + return self.env.ref(xml_id) + def get_context(self, node, eval_dict): context = self.context.copy() if node.context: @@ -388,6 +394,7 @@ class YamlInterpreter(object): fields = fields or {} if view is not False: fg = view_info['fields'] + onchange_spec = model._onchange_spec(self.cr, SUPERUSER_ID, view_info, context=self.context) # gather the default values on the object. (Can't use `fields´ as parameter instead of {} because we may # have references like `base.main_company´ in the yaml file and it's not compatible with the function) defaults = default and model._add_missing_default_values(self.cr, SUPERUSER_ID, {}, context=self.context) or {} @@ -424,27 +431,35 @@ class YamlInterpreter(object): if not el.attrib.get('on_change', False): continue - match = re.match("([a-z_1-9A-Z]+)\((.*)\)", el.attrib['on_change']) - assert match, "Unable to parse the on_change '%s'!" % (el.attrib['on_change'], ) - - # creating the context - class parent2(object): - def __init__(self, d): - self.d = d - def __getattr__(self, name): - return self.d.get(name, False) - - ctx = record_dict.copy() - ctx['context'] = self.context - ctx['uid'] = SUPERUSER_ID - ctx['parent'] = parent2(parent) - for a in fg: - if a not in ctx: - ctx[a] = process_val(a, defaults.get(a, False)) - - # Evaluation args - args = map(lambda x: eval(x, ctx), match.group(2).split(',')) - result = getattr(model, match.group(1))(self.cr, SUPERUSER_ID, [], *args) + + if el.attrib['on_change'] in ('1', 'true'): + # New-style on_change + recs = model.browse(self.cr, SUPERUSER_ID, [], self.context) + result = recs.onchange(record_dict, field_name, onchange_spec) + + else: + match = re.match("([a-z_1-9A-Z]+)\((.*)\)", el.attrib['on_change']) + assert match, "Unable to parse the on_change '%s'!" % (el.attrib['on_change'], ) + + # creating the context + class parent2(object): + def __init__(self, d): + self.d = d + def __getattr__(self, name): + return self.d.get(name, False) + + ctx = record_dict.copy() + ctx['context'] = self.context + ctx['uid'] = SUPERUSER_ID + ctx['parent'] = parent2(parent) + for a in fg: + if a not in ctx: + ctx[a] = process_val(a, defaults.get(a, False)) + + # Evaluation args + args = map(lambda x: eval(x, ctx), match.group(2).split(',')) + result = getattr(model, match.group(1))(self.cr, SUPERUSER_ID, [], *args) + for key, val in (result or {}).get('value', {}).items(): assert key in fg, ( "The field %r returned from the onchange call %r " @@ -541,25 +556,35 @@ class YamlInterpreter(object): self.uid = self.get_id(node.uid) if node.noupdate: self.noupdate = node.noupdate + self.env = openerp.api.Environment(self.cr, self.uid, self.context) def process_python(self, node): python, statements = node.items()[0] - model = self.get_model(python.model) - statements = statements.replace("\r\n", "\n") + assert python.model or python.id, "!python node must have attribute `model` or `id`" + if python.id is None: + record = self.pool[python.model] + elif isinstance(python.id, basestring): + record = self.get_record(python.id) + else: + record = self.env[python.model].browse(python.id) + if python.model: + assert record._name == python.model, "`id` is not consistent with `model`" + statements = "\n" * python.first_line + statements.replace("\r\n", "\n") code_context = { - 'model': model, + 'self': record, + 'model': record._model, 'cr': self.cr, 'uid': self.uid, 'log': self._log, 'context': self.context, 'openerp': openerp, } - code_context.update({'self': model}) # remove me when no !python block test uses 'self' anymore try: code_obj = compile(statements, self.filename, 'exec') unsafe_eval(code_obj, {'ref': self.get_id}, code_context) except AssertionError, e: - self._log_assert_failure('AssertionError in Python code %s: %s', python.name, e) + self._log_assert_failure('AssertionError in Python code %s (line %d): %s', + python.name, python.first_line, e) return except Exception, e: _logger.debug('Exception during evaluation of !python block in yaml_file %s.', self.filename, exc_info=True) diff --git a/openerp/tools/yaml_tag.py b/openerp/tools/yaml_tag.py index b22787ddf98db02ce343611b7887f371ad292c09..dc7e4b6a38ad00346e2fa1480726719e374cc2bf 100644 --- a/openerp/tools/yaml_tag.py +++ b/openerp/tools/yaml_tag.py @@ -115,6 +115,7 @@ def record_constructor(loader, node): def python_constructor(loader, node): kwargs = loader.construct_mapping(node) + kwargs['first_line'] = node.start_mark.line + 1 return Python(**kwargs) def menuitem_constructor(loader, node): diff --git a/openerp/workflow/workitem.py b/openerp/workflow/workitem.py index 5c77196966b353871e909dd82fce38bf9819afdc..1c08788257ebef0c6f866630d931fd5da6ceb6dd 100644 --- a/openerp/workflow/workitem.py +++ b/openerp/workflow/workitem.py @@ -51,12 +51,11 @@ class Environment(dict): self.id = record.id self.ids = [record.id] self.obj = openerp.registry(self.cr.dbname)[self.model] - self.columns = self.obj._columns.keys() + self.obj._inherit_fields.keys() def __getitem__(self, key): - if (key in self.columns) or (key in dir(self.obj)): - res = self.obj.browse(self.cr, self.uid, self.id) - return res[key] + records = self.obj.browse(self.cr, self.uid, self.ids) + if hasattr(records, key): + return getattr(records, key) else: return super(Environment, self).__getitem__(key) diff --git a/setup.py b/setup.py index 041a8df178b7d0d197239a44820b7de9c62f934a..ca918b9b3bee448dd90d15f1466cc31ecbd779e8 100644 --- a/setup.py +++ b/setup.py @@ -85,6 +85,7 @@ def py2exe_options(): "commands", "dateutil", "decimal", + "decorator", "docutils", "email", "encodings", @@ -155,6 +156,7 @@ setuptools.setup( install_requires = [ 'pychart', # not on pypi, use: pip install http://download.gna.org/pychart/PyChart-1.39.tar.gz 'babel >= 1.0', + 'decorator', 'docutils', 'feedparser', 'gdata',